openmedialibrary/oml/update.py
2019-03-05 18:23:28 +01:00

738 lines
26 KiB
Python

# -*- coding: utf-8 -*-
from contextlib import closing
import base64
import json
import os
import tarfile
from threading import Thread
import urllib.request
import urllib.error
import urllib.parse
import shutil
import subprocess
import sys
import time
import OpenSSL.crypto
import ox
from oxtornado import actions
from sqlalchemy.sql.expression import text
import settings
import utils
import db
from integration import get_trayicon_version
import logging
logger = logging.getLogger(__name__)
ENCODING = 'base64'
def verify(release):
verified = False
value = []
for module in sorted(release['modules']):
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
value = '\n'.join(value)
value = value.encode()
for digest in ('sha512', 'sha256', 'sha1'):
if 'signature_%s' % digest in release:
tls_sig = base64.b64decode(release['signature_%s' % digest].encode())
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
try:
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
verified = True
except OpenSSL.crypto.Error:
logger.debug('invalid tls signature')
verified = False
break
if 'signature' in release and not verified:
import ed25519
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
sig = release['signature'].encode()
try:
vk.verify(sig, value, encoding=ENCODING)
verified = True
except ed25519.BadSignatureError:
verified = False
return verified
def get(url, filename=None):
request = urllib.request.Request(url, headers={
'User-Agent': settings.USER_AGENT
})
with closing(urllib.request.urlopen(request)) as u:
if not filename:
data = u.read()
return data
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as fd:
data = u.read(4096)
while data:
fd.write(data)
data = u.read(4096)
def check():
if settings.release:
release_data = get(settings.server.get('release_url',
'https://downloads.openmedialibrary.com/release.json'))
release = json.loads(release_data.decode('utf-8'))
old = current_version('openmedialibrary')
new = release['modules']['openmedialibrary']['version']
return verify(release) and old < new
return False
def current_version(module):
if 'modules' in settings.release \
and module in settings.release['modules'] \
and 'version' in settings.release['modules'][module]:
version = settings.release['modules'][module]['version']
else:
version = ''
return version
def get_latest_release():
try:
release_data = get(settings.server.get('release_url'))
release = json.loads(release_data.decode('utf-8'))
if verify(release):
ox.makedirs(settings.updates_path)
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
fd.write(release_data)
return release
except:
logger.debug('failed to get latest release')
def get_platform():
name = sys.platform
if name.startswith('darwin'):
name = 'darwin64'
elif name.startswith('linux'):
import platform
machine = platform.machine()
if machine == 'armv7l':
name = 'linux_armv7l'
elif machine == 'aarch64':
name = 'linux_aarch64'
elif machine == 'x86_64':
name = 'linux64'
else:
name = 'linux32'
return name
def download():
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
release = get_latest_release()
platform = get_platform()
if release:
ox.makedirs(settings.updates_path)
os.chdir(os.path.dirname(settings.base_dir))
current_files = {'release.json'}
for module in release['modules']:
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
url = '/'.join([base_url, release['modules'][module]['name']])
if not os.path.exists(module_tar):
logger.debug('download %s', os.path.basename(module_tar))
get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
logger.debug('invalid checksum %s', os.path.basename(module_tar))
os.unlink(module_tar)
return False
current_files.add(os.path.basename(module_tar))
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
os.unlink(os.path.join(settings.updates_path, f))
return True
return True
def install():
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return True
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
if verify(release) and old_version < new_version:
base = os.path.dirname(settings.base_dir)
os.chdir(base)
platform = get_platform()
for module in release['modules']:
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
#tar fails if old platform is moved before extract
new = '%s_new' % module
ox.makedirs(new)
os.chdir(new)
tar = tarfile.open(module_tar)
tar.extractall()
tar.close()
os.chdir(base)
module_old = '%s_old' % module
if os.path.exists(module_old):
rmtree(module_old)
if os.path.exists(module):
move(module, module_old)
move(os.path.join(new, module), module)
if platform != 'win32' and os.path.exists(module_old):
rmtree(module_old)
rmtree(new)
else:
if os.path.exists(module_tar):
os.unlink(module_tar)
return False
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
upgrade_app()
# FIXME: still needed?
if old_version < '20160112-651-de984a3' and platform != 'win32':
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
return True
return True
def move(src, dst):
try:
shutil.move(src, dst)
except:
logger.debug('failed to move %s to %s', src, dst)
raise
def rmtree(path):
try:
shutil.rmtree(path)
except:
logger.debug('failed to remove %s', path)
raise
def update_available():
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
return True
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return False
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return False
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
return verify(release) and old_version < new_version
def restart_oml(update=False):
if update:
get_latest_release()
utils.ctl('restart')
def get_app_version(app):
plist = app + '/Contents/Info.plist'
if os.path.exists(plist):
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
return subprocess.check_output(cmd).strip()
def upgrade_app():
base = os.path.dirname(settings.base_dir)
if sys.platform == 'darwin':
bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
app = '/Applications/Open Media Library.app'
version = get_app_version(app)
current_version = get_app_version(bundled_app)
if version and current_version and version != current_version:
try:
shutil.rmtree(app)
shutil.copytree(bundled_app, app)
except:
logger.debug('Failed to update Application', exc_info=True)
'''
elif sys.platform == 'win32':
current_version = get_trayicon_version()
if current_version != '0.2.0.0':
msi = os.path.normpath(os.path.join(base, 'platform_win32', 'Open Media Library.msi'))
cmd = ['msiexec.exe', '/qb', '/I', msi]
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
subprocess.Popen(cmd, cwd=settings.base_dir, start_new_session=True, startupinfo=startupinfo)
'''
def getVersion(data):
'''
check if new version is available
'''
response = {
'current': settings.MINOR_VERSION,
'version': settings.MINOR_VERSION,
'upgrade': False,
}
if settings.MINOR_VERSION == 'git':
'''
cmd = ['git', 'rev-parse', '@']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
current = stdout.strip()
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
new = stdout.strip()[:40]
response['update'] = len(new) == 40 and current != new
'''
response['update'] = False
else:
get_latest_release()
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return response
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return response
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
current = current_version('openmedialibrary')
response['current'] = current
new = release['modules']['openmedialibrary']['version']
response['version'] = new
response['update'] = current < new
return response
actions.register(getVersion, cache=False)
actions.register(getVersion, cache=False, version='public')
def restart(data):
'''
restart (and upgrade if upgrades are available)
'''
restart_oml(data.get('update'))
return {}
actions.register(restart, cache=False)
class Update(Thread):
_status = {
'reload': False,
'status': 'Updating Open Media Library...'
}
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.start()
def status(self, status, reload=False):
from websocket import trigger_event
self._status = {
'reload': reload,
'status': status,
}
trigger_event('updatestatus', self._status)
def install(self):
while update_available():
self.status('Downloading...')
max_retry = 5
while max_retry > 0 and not download():
max_retry -= 1
self.status('Download failed, retrying...')
time.sleep(5)
self.status('Downloading...')
self.status('Installing...')
if not install():
self.status('Installation failed.')
return True
return False
def update_database(self):
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
self.status('Updating...')
if db_version < 3:
db_version = migrate_3()
if db_version < 4:
db_version = migrate_4()
if db_version < 5:
db_version = migrate_5()
if db_version < 6:
db_version = migrate_6()
if db_version < 7:
db_version = migrate_7()
if db_version < 9:
db_version = migrate_8()
if db_version < 10:
db_version = migrate_10()
if db_version < 11:
db_version = migrate_11()
if db_version < 12:
db_version = migrate_12()
if db_version < 13:
db_version = migrate_13()
if db_version < 15:
db_version = migrate_15()
if db_version < 16:
db_version = migrate_16()
if db_version < 17:
db_version = migrate_17()
if db_version < 18:
db_version = migrate_18()
if db_version < 19:
db_version = migrate_19()
if db_version < 20:
db_version = migrate_20()
settings.server['db_version'] = db_version
def run(self):
self.status('Checking for updates...')
self.update_database()
if self.install():
restart_oml()
return
self.status('Relaunching...', True)
restart_oml()
def migrate_3():
with db.session():
import item.models
for i in item.models.Item.find({
'query': {
'conditions': [{
'key':'mediastate',
'value':'available',
'operator': '=='
}]
}
}):
if not i.files.all():
i.remove_file()
else:
f = i.files.all()[0]
if not 'pages' in i.info and 'pages' in f.info:
i.info['pages'] = f.info['pages']
i.save()
return 3
def migrate_4():
with db.session() as session:
import item.models
from meta.utils import to_isbn13
for i in item.models.Item.query:
update = False
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
isbns = [isbn for isbn in isbns if isbn]
if isbns:
i.meta['isbn'] = isbns[0]
if 'isbn' in i.info:
i.info['isbn'] = i.meta['isbn']
else:
del i.meta['isbn']
if 'isbn' in i.info:
del i.info['isbn']
update = True
if 'isbn' in i.meta and not i.meta['isbn']:
del i.meta['isbn']
update = True
if update:
session.add(i)
session.commit()
return 4
def migrate_5():
db.run_sql([
'DROP INDEX IF EXISTS user_metadata_index',
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
'UPDATE sort SET sharemetadata = 0',
])
with db.session() as session:
import user.models
for m in user.models.Metadata.query:
data_hash = m.get_hash()
if m.data_hash != data_hash:
m.data_hash = data_hash
session.add(m)
session.commit()
import item.models
for i in item.models.Item.query:
update = False
if '_from' in i.info:
del i.info['_from']
update = True
if update:
session.add(i)
session.commit()
for i in item.models.Item.query:
i.sync_metadata()
return 5
def migrate_6():
with db.session() as session:
import item.models
for s in item.models.Sort.query.filter_by(author=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(publisher=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(language=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(place=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(isbn=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(date=''):
s.item.update_sort()
session.commit()
return 6
def migrate_7():
with db.session() as session:
import changelog
for c in changelog.Changelog.query:
if 'editmeta' in c.data or 'resetmeta' in c.data:
session.delete(c)
session.commit()
db.run_sql('DROP TABLE IF EXISTS metadata')
db.run_sql('DROP TABLE IF EXISTS scrape')
db.run_sql('VACUUM')
return 7
def migrate_8():
for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
if key in settings.server:
del settings.server[key]
list_cache = os.path.join(settings.data_path, 'list_cache.json')
if os.path.exists(list_cache):
os.unlink(list_cache)
with db.session() as session:
import item.models
for i in item.models.Item.query:
delta = set(i.meta)-set(i.meta_keys)
if delta:
for key in delta:
del i.meta[key]
session.add(i)
session.commit()
import changelog
import user.models
changelog.Changelog.query.delete()
u = user.models.User.get(settings.USER_ID)
u.rebuild_changelog()
for peer in user.models.User.query:
if peer.id != u.id:
if len(peer.id) != 16:
session.delete(peer)
session.commit()
return 8
def migrate_10():
with db.session() as session:
from item.models import Item, Find
from utils import get_by_id
from item.person import get_sort_name
import unicodedata
sort_names = {}
updates = {}
for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
if sort_type == 'person':
if f.value in sort_names:
sortvalue = sort_names[f.value]
else:
sortvalue = sort_names[f.value] = get_sort_name(f.value)
else:
sortvalue = f.value
if sortvalue:
sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
if not f.key in updates:
updates[f.key] = {}
updates[f.key][f.value] = sortvalue
for key in updates:
for value in updates[key]:
Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
session.commit()
return 10
def migrate_11():
with db.session() as session:
from user.models import User, Metadata, List
from changelog import Changelog
import utils
for u in User.query.filter_by(peered=True):
peer = utils.get_peer(u.id)
last = Changelog.query.filter_by(user_id=u.id).order_by(text('-revision')).first()
if last:
peer.info['revision'] = last.revision
listorder = []
for l in List.query.filter_by(user_id=u.id).order_by('index_'):
if l.name:
peer.info['lists'][l.name] = [i.id for i in l.get_items()]
listorder.append(l.name)
if 'listorder' not in peer.info:
peer.info['listorder'] = listorder
for m in Metadata.query.filter_by(user_id=u.id):
peer.library[m.item_id] = {
'meta': dict(m.data),
'meta_hash': m.data_hash,
'modified': m.modified,
}
peer.library.commit()
peer.sync_info()
peer.sync_db()
Changelog.query.filter_by(user_id=u.id).delete()
Metadata.query.filter_by(user_id=u.id).delete()
session.commit()
if db.table_exists('transfer'):
import state
import downloads
state.online = False
state.downloads = downloads.Downloads()
r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
for t in r:
item_id, added, progress = t
if added:
state.downloads.transfers[item_id] = {
'added': added,
'progress': progress
}
state.db.session.commit()
state.downloads.transfers.commit()
state.downloads = None
return 11
def migrate_12():
db.run_sql([
'DROP TABLE IF EXISTS transfer'
])
return 12
def migrate_13():
import settings
import changelog
import os
import json
path = os.path.join(settings.data_path, 'peers', '%s.log' % settings.USER_ID)
if not os.path.exists(path):
folder = os.path.dirname(path)
if not os.path.exists(folder):
os.makedirs(folder)
with db.session() as session:
revision = -1
qs = changelog.Changelog.query.filter_by(user_id=settings.USER_ID)
with open(path, 'wb') as fd:
for c in qs.order_by('timestamp'):
data = json.dumps([c.revision, c.timestamp, json.loads(c.data)], ensure_ascii=False).encode('utf-8')
fd.write(data + b'\n')
revision = c.revision
if revision > -1:
settings.server['revision'] = revision
return 13
def migrate_15():
from user.models import List, User
with db.session():
l = List.get(':Public')
if l and not len(l.items):
l.remove()
for u in User.query:
if 'local' in u.info:
del u.info['local']
u.save()
return 15
def migrate_16():
db.run_sql([
'''CREATE TABLE user2 (
created DATETIME,
modified DATETIME,
id VARCHAR(43) NOT NULL,
info BLOB,
nickname VARCHAR(256),
pending VARCHAR(64),
queued BOOLEAN,
peered BOOLEAN,
online BOOLEAN,
PRIMARY KEY (id),
CHECK (queued IN (0, 1)),
CHECK (peered IN (0, 1)),
CHECK (online IN (0, 1))
)''',
'''INSERT INTO user2 (created, modified, id, info, nickname, pending, queued, peered, online)
SELECT created, modified, id, info, nickname, pending, queued, peered, online FROM user''',
'DROP TABLE user',
'ALTER TABLE user2 RENAME TO user',
'CREATE INDEX IF NOT EXISTS ix_user_nickname ON user (nickname)'
])
return 16
def migrate_17():
from user.models import List, User
from changelog import add_record
with db.session():
l = List.get(':Public')
if not l:
add_record('removelist', 'Public')
lists = []
for l in List.query.filter_by(user_id=settings.USER_ID).order_by('index_'):
if l.type == 'static' and l.name not in ('', 'Inbox'):
lists.append(l.name)
add_record('orderlists', lists)
return 17
def migrate_18():
db.run_sql([
'''CREATE TABLE annotation (
_id INTEGER NOT NULL,
id VARCHAR(43),
created DATETIME,
modified DATETIME,
user_id VARCHAR(43),
item_id VARCHAR(43),
data BLOB,
findquotes TEXT,
findnotes TEXT,
PRIMARY KEY (_id),
FOREIGN KEY(user_id) REFERENCES user (id),
FOREIGN KEY(item_id) REFERENCES item (id)
)'''])
db.run_sql([
'CREATE INDEX ix_annotation_findquotes ON annotation (findquotes)',
'CREATE INDEX ix_annotation_findnotes ON annotation (findnotes)'
])
return 18
def migrate_19():
from user.models import User
with db.session():
peers = [u for u in User.query.filter_by(peered=True)]
peers.sort(key=lambda u: utils.user_sort_key(u.json()))
for u in peers:
peer = utils.get_peer(u.id)
if not peer.info.get('revision') and os.path.exists(peer._logpath) and os.path.getsize(peer._logpath):
logger.debug('try to apply pending logs for %s', u.id)
try:
peer.apply_log()
except:
logger.error('failed to apply log for %s', u.id)
return 19
def migrate_20():
from glob import glob
changed = False
for log in glob(os.path.join(settings.data_path, 'peers', '*.log')):
with open(log, 'rb') as fd:
data = fd.read()
try:
data.decode('utf-8')
except UnicodeDecodeError:
data = data.decode('Windows-1252')
logger.error('convert %s to utf-8', log)
with open(log, 'wb') as fd:
fd.write(data.encode('utf-8'))
changed = True
if changed:
migrate_19()
return 20