2014-08-05 09:47:16 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2014-09-02 22:32:44 +00:00
|
|
|
|
2014-08-05 09:47:16 +00:00
|
|
|
from contextlib import closing
|
2016-01-18 14:06:21 +00:00
|
|
|
import base64
|
2014-08-05 09:47:16 +00:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import tarfile
|
2016-01-12 15:00:51 +00:00
|
|
|
from threading import Thread
|
2014-09-02 22:32:44 +00:00
|
|
|
import urllib.request, urllib.error, urllib.parse
|
2014-08-05 09:47:16 +00:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
2015-11-30 16:49:33 +00:00
|
|
|
import sys
|
2016-01-12 15:00:51 +00:00
|
|
|
import time
|
2014-08-05 09:47:16 +00:00
|
|
|
|
2016-01-18 14:06:21 +00:00
|
|
|
import OpenSSL.crypto
|
2014-08-05 09:47:16 +00:00
|
|
|
import ox
|
2014-08-22 17:46:45 +00:00
|
|
|
from oxtornado import actions
|
2014-08-05 09:47:16 +00:00
|
|
|
|
|
|
|
import settings
|
2016-02-01 07:45:34 +00:00
|
|
|
import utils
|
2016-01-16 05:47:55 +00:00
|
|
|
import db
|
2016-04-14 19:27:30 +00:00
|
|
|
from integration import get_trayicon_version
|
2014-08-05 09:47:16 +00:00
|
|
|
|
2015-11-30 16:49:33 +00:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-08-22 17:46:45 +00:00
|
|
|
|
2014-08-05 09:47:16 +00:00
|
|
|
ENCODING='base64'
|
|
|
|
|
|
|
|
def verify(release):
|
2016-01-18 14:06:21 +00:00
|
|
|
verified = False
|
2014-08-05 09:47:16 +00:00
|
|
|
value = []
|
|
|
|
for module in sorted(release['modules']):
|
|
|
|
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
|
|
|
|
value = '\n'.join(value)
|
2014-10-31 17:53:10 +00:00
|
|
|
value = value.encode()
|
2016-01-18 14:06:21 +00:00
|
|
|
for digest in ('sha512', 'sha256', 'sha1'):
|
|
|
|
if 'signature_%s'%digest in release:
|
|
|
|
tls_sig = base64.b64decode(release['signature_%s'%digest].encode())
|
|
|
|
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
|
|
|
|
try:
|
|
|
|
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
|
|
|
|
verified = True
|
|
|
|
except OpenSSL.crypto.Error:
|
2016-02-01 08:02:59 +00:00
|
|
|
logger.debug('invalid tls signature')
|
2016-01-18 14:06:21 +00:00
|
|
|
verified = False
|
|
|
|
break
|
|
|
|
if 'signature' in release and not verified:
|
|
|
|
import ed25519
|
|
|
|
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
|
|
|
|
sig = release['signature'].encode()
|
|
|
|
try:
|
|
|
|
vk.verify(sig, value, encoding=ENCODING)
|
|
|
|
verified = True
|
|
|
|
except ed25519.BadSignatureError:
|
|
|
|
verified = False
|
|
|
|
return verified
|
2014-08-05 09:47:16 +00:00
|
|
|
|
2014-08-07 10:12:03 +00:00
|
|
|
def get(url, filename=None):
|
2014-09-02 22:32:44 +00:00
|
|
|
request = urllib.request.Request(url, headers={
|
2014-08-07 10:12:03 +00:00
|
|
|
'User-Agent': settings.USER_AGENT
|
|
|
|
})
|
2014-09-02 22:32:44 +00:00
|
|
|
with closing(urllib.request.urlopen(request)) as u:
|
2014-08-07 10:12:03 +00:00
|
|
|
if not filename:
|
|
|
|
data = u.read()
|
|
|
|
return data
|
|
|
|
else:
|
|
|
|
dirname = os.path.dirname(filename)
|
|
|
|
if dirname and not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
2014-10-31 17:47:48 +00:00
|
|
|
with open(filename, 'wb') as fd:
|
2014-08-05 09:47:16 +00:00
|
|
|
data = u.read(4096)
|
2014-08-07 10:12:03 +00:00
|
|
|
while data:
|
|
|
|
fd.write(data)
|
|
|
|
data = u.read(4096)
|
2014-08-05 09:47:16 +00:00
|
|
|
|
2014-08-07 09:46:23 +00:00
|
|
|
def check():
|
2014-08-05 09:47:16 +00:00
|
|
|
if settings.release:
|
2015-11-26 12:40:39 +00:00
|
|
|
release_data = get(settings.server.get('release_url',
|
|
|
|
'http://downloads.openmedialibrary.com/release.json'))
|
2014-10-31 17:47:48 +00:00
|
|
|
release = json.loads(release_data.decode('utf-8'))
|
2015-03-07 19:17:13 +00:00
|
|
|
old = current_version('openmedialibrary')
|
2014-08-05 09:47:16 +00:00
|
|
|
new = release['modules']['openmedialibrary']['version']
|
|
|
|
return verify(release) and old < new
|
2014-08-22 16:42:08 +00:00
|
|
|
return False
|
2014-08-05 09:47:16 +00:00
|
|
|
|
2015-03-07 19:17:13 +00:00
|
|
|
def current_version(module):
|
2015-11-26 12:40:39 +00:00
|
|
|
if 'modules' in settings.release \
|
|
|
|
and module in settings.release['modules'] \
|
|
|
|
and 'version' in settings.release['modules'][module]:
|
|
|
|
version = settings.release['modules'][module]['version']
|
|
|
|
else:
|
|
|
|
version = ''
|
|
|
|
return version
|
2015-03-07 19:17:13 +00:00
|
|
|
|
2016-01-04 12:02:40 +00:00
|
|
|
def get_latest_release():
|
2016-02-22 14:22:55 +00:00
|
|
|
try:
|
|
|
|
release_data = get(settings.server.get('release_url'))
|
|
|
|
release = json.loads(release_data.decode('utf-8'))
|
|
|
|
if verify(release):
|
|
|
|
ox.makedirs(settings.updates_path)
|
|
|
|
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
|
|
|
|
fd.write(release_data)
|
|
|
|
return release
|
|
|
|
except:
|
2016-02-26 12:01:30 +00:00
|
|
|
logger.debug('failed to get latest release')
|
2016-01-04 12:02:40 +00:00
|
|
|
|
2016-01-31 09:38:16 +00:00
|
|
|
def get_platform():
|
|
|
|
name = sys.platform
|
|
|
|
if name.startswith('darwin'):
|
|
|
|
name = 'darwin64'
|
|
|
|
elif name.startswith('linux'):
|
|
|
|
import platform
|
2016-06-24 10:51:01 +00:00
|
|
|
machine = platform.machine()
|
|
|
|
if machine == 'armv7l':
|
|
|
|
name = 'linux_armv7l'
|
|
|
|
elif machine == 'aarch64':
|
|
|
|
name = 'linux_aarch64'
|
|
|
|
elif machine == 'x86_64':
|
2016-01-31 09:38:16 +00:00
|
|
|
name = 'linux64'
|
|
|
|
else:
|
|
|
|
name = 'linux32'
|
|
|
|
return name
|
|
|
|
|
2016-01-04 12:02:40 +00:00
|
|
|
def download():
|
2016-01-18 06:34:20 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
2016-01-04 12:02:40 +00:00
|
|
|
return True
|
|
|
|
release = get_latest_release()
|
2016-01-31 09:38:16 +00:00
|
|
|
platform = get_platform()
|
2016-01-04 12:02:40 +00:00
|
|
|
if release:
|
2014-08-05 10:10:42 +00:00
|
|
|
ox.makedirs(settings.updates_path)
|
2014-08-07 09:46:23 +00:00
|
|
|
os.chdir(os.path.dirname(settings.base_dir))
|
|
|
|
current_files = {'release.json'}
|
|
|
|
for module in release['modules']:
|
2016-02-01 08:02:59 +00:00
|
|
|
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
|
2016-01-31 09:38:16 +00:00
|
|
|
if release['modules'][module].get('platform', platform) == platform and \
|
|
|
|
release['modules'][module]['version'] > current_version(module):
|
2014-12-13 17:05:00 +00:00
|
|
|
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
|
|
|
|
url = '/'.join([base_url, release['modules'][module]['name']])
|
2014-08-07 09:46:23 +00:00
|
|
|
if not os.path.exists(module_tar):
|
2016-01-13 05:11:38 +00:00
|
|
|
logger.debug('download %s', os.path.basename(module_tar))
|
2014-08-07 10:12:03 +00:00
|
|
|
get(url, module_tar)
|
2014-08-07 09:46:23 +00:00
|
|
|
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
|
2016-01-13 05:11:38 +00:00
|
|
|
logger.debug('invalid checksum %s', os.path.basename(module_tar))
|
2014-08-07 09:46:23 +00:00
|
|
|
os.unlink(module_tar)
|
|
|
|
return False
|
2016-01-31 09:38:16 +00:00
|
|
|
current_files.add(os.path.basename(module_tar))
|
2014-12-13 14:56:17 +00:00
|
|
|
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
|
2014-08-11 17:11:07 +00:00
|
|
|
os.unlink(os.path.join(settings.updates_path, f))
|
2014-08-07 09:46:23 +00:00
|
|
|
return True
|
|
|
|
return True
|
|
|
|
|
2016-01-31 13:24:37 +00:00
|
|
|
def install():
|
2014-08-07 09:46:23 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
|
|
return True
|
2016-01-18 06:34:20 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
2014-08-07 09:46:23 +00:00
|
|
|
return True
|
|
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
|
|
release = json.load(fd)
|
2015-11-17 13:21:05 +00:00
|
|
|
old_version = current_version('openmedialibrary')
|
|
|
|
new_version = release['modules']['openmedialibrary']['version']
|
|
|
|
if verify(release) and old_version < new_version:
|
2016-02-01 11:16:35 +00:00
|
|
|
base = os.path.dirname(settings.base_dir)
|
|
|
|
os.chdir(base)
|
2016-01-31 09:38:16 +00:00
|
|
|
platform = get_platform()
|
2014-08-05 09:47:16 +00:00
|
|
|
for module in release['modules']:
|
2016-01-31 09:38:16 +00:00
|
|
|
if release['modules'][module].get('platform', platform) == platform and \
|
|
|
|
release['modules'][module]['version'] > current_version(module):
|
2014-08-07 09:46:23 +00:00
|
|
|
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
|
|
|
|
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
|
|
|
|
#tar fails if old platform is moved before extract
|
|
|
|
new = '%s_new' % module
|
|
|
|
ox.makedirs(new)
|
|
|
|
os.chdir(new)
|
|
|
|
tar = tarfile.open(module_tar)
|
2014-08-05 09:51:17 +00:00
|
|
|
tar.extractall()
|
|
|
|
tar.close()
|
2016-02-01 11:16:35 +00:00
|
|
|
os.chdir(base)
|
2015-03-14 12:14:08 +00:00
|
|
|
module_old = '%s_old' % module
|
2016-02-01 11:16:35 +00:00
|
|
|
if os.path.exists(module_old):
|
|
|
|
rmtree(module_old)
|
2015-03-14 12:14:08 +00:00
|
|
|
if os.path.exists(module):
|
2016-02-01 11:16:35 +00:00
|
|
|
move(module, module_old)
|
|
|
|
move(os.path.join(new, module), module)
|
2016-02-23 11:57:15 +00:00
|
|
|
if platform != 'win32' and os.path.exists(module_old):
|
2016-02-01 11:16:35 +00:00
|
|
|
rmtree(module_old)
|
|
|
|
rmtree(new)
|
2014-08-05 09:51:17 +00:00
|
|
|
else:
|
2016-01-13 05:19:26 +00:00
|
|
|
if os.path.exists(module_tar):
|
|
|
|
os.unlink(module_tar)
|
2014-08-05 09:51:17 +00:00
|
|
|
return False
|
2016-01-18 06:34:20 +00:00
|
|
|
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
|
2015-11-30 16:49:33 +00:00
|
|
|
upgrade_app()
|
2016-01-31 13:24:37 +00:00
|
|
|
# FIXME: still needed?
|
|
|
|
if old_version < '20160112-651-de984a3':
|
|
|
|
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
|
2014-08-05 09:47:16 +00:00
|
|
|
return True
|
|
|
|
return True
|
2014-08-22 17:46:45 +00:00
|
|
|
|
2016-02-01 11:16:35 +00:00
|
|
|
def move(src, dst):
|
|
|
|
try:
|
|
|
|
shutil.move(src, dst)
|
|
|
|
except:
|
|
|
|
logger.debug('failed to move %s to %s', src, dst)
|
|
|
|
raise
|
|
|
|
|
|
|
|
def rmtree(path):
|
|
|
|
try:
|
|
|
|
shutil.rmtree(path)
|
|
|
|
except:
|
|
|
|
logger.debug('failed to remove %s', path)
|
|
|
|
raise
|
|
|
|
|
2016-01-12 15:00:51 +00:00
|
|
|
def update_available():
|
|
|
|
db_version = settings.server.get('db_version', 0)
|
|
|
|
if db_version < settings.DB_VERSION:
|
|
|
|
return True
|
|
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
|
|
return False
|
2016-01-18 06:34:20 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
2016-01-12 15:00:51 +00:00
|
|
|
return False
|
|
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
|
|
release = json.load(fd)
|
|
|
|
old_version = current_version('openmedialibrary')
|
|
|
|
new_version = release['modules']['openmedialibrary']['version']
|
|
|
|
return verify(release) and old_version < new_version
|
|
|
|
|
|
|
|
def restart_oml(update=False):
|
|
|
|
if update:
|
|
|
|
get_latest_release()
|
2016-02-01 07:45:34 +00:00
|
|
|
utils.ctl('restart')
|
2016-01-12 15:00:51 +00:00
|
|
|
|
2015-11-30 16:49:33 +00:00
|
|
|
def get_app_version(app):
|
|
|
|
plist = app + '/Contents/Info.plist'
|
|
|
|
if os.path.exists(plist):
|
|
|
|
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
|
|
|
|
return subprocess.check_output(cmd).strip()
|
|
|
|
|
|
|
|
def upgrade_app():
|
2016-04-14 19:27:30 +00:00
|
|
|
base = os.path.dirname(settings.base_dir)
|
2015-11-30 16:49:33 +00:00
|
|
|
if sys.platform == 'darwin':
|
2016-02-06 10:37:08 +00:00
|
|
|
bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
|
2015-11-30 16:49:33 +00:00
|
|
|
app = '/Applications/Open Media Library.app'
|
|
|
|
version = get_app_version(app)
|
|
|
|
current_version = get_app_version(bundled_app)
|
|
|
|
if version and current_version and version != current_version:
|
|
|
|
try:
|
|
|
|
shutil.rmtree(app)
|
|
|
|
shutil.copytree(bundled_app, app)
|
|
|
|
except:
|
2016-01-24 09:13:03 +00:00
|
|
|
logger.debug('Failed to update Application', exc_info=True)
|
2016-04-14 19:27:30 +00:00
|
|
|
elif sys.platform == 'win32':
|
|
|
|
current_version = get_trayicon_version()
|
|
|
|
if current_version != '0.2.0.0':
|
|
|
|
msi = os.path.normpath(os.path.join(base, 'platform_win32', 'Open Media Library.msi'))
|
|
|
|
cmd = ['msiexec.exe', '/qb', '/I', msi]
|
|
|
|
startupinfo = subprocess.STARTUPINFO()
|
|
|
|
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
|
|
|
startupinfo.wShowWindow = subprocess.SW_HIDE
|
|
|
|
subprocess.Popen(cmd, cwd=settings.base_dir, start_new_session=True, startupinfo=startupinfo)
|
2015-11-30 16:49:33 +00:00
|
|
|
|
2014-08-22 17:46:45 +00:00
|
|
|
def getVersion(data):
|
|
|
|
'''
|
|
|
|
check if new version is available
|
|
|
|
'''
|
|
|
|
response = {
|
|
|
|
'current': settings.MINOR_VERSION,
|
2016-01-13 05:17:20 +00:00
|
|
|
'version': settings.MINOR_VERSION,
|
2014-08-22 17:46:45 +00:00
|
|
|
'upgrade': False,
|
|
|
|
}
|
2014-09-01 12:01:41 +00:00
|
|
|
if settings.MINOR_VERSION == 'git':
|
2015-12-01 08:58:03 +00:00
|
|
|
'''
|
2014-09-01 12:01:41 +00:00
|
|
|
cmd = ['git', 'rev-parse', '@']
|
|
|
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
current = stdout.strip()
|
|
|
|
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
|
|
|
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
new = stdout.strip()[:40]
|
2015-11-30 23:26:35 +00:00
|
|
|
response['update'] = len(new) == 40 and current != new
|
2015-12-01 08:58:03 +00:00
|
|
|
'''
|
|
|
|
response['update'] = False
|
2014-09-01 12:01:41 +00:00
|
|
|
else:
|
2016-01-04 12:02:40 +00:00
|
|
|
get_latest_release()
|
2014-09-01 12:01:41 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
|
|
return response
|
2016-01-18 06:34:20 +00:00
|
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
2014-09-01 12:01:41 +00:00
|
|
|
return response
|
|
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
|
|
release = json.load(fd)
|
2015-03-07 19:17:13 +00:00
|
|
|
current = current_version('openmedialibrary')
|
2014-09-01 12:01:41 +00:00
|
|
|
response['current'] = current
|
|
|
|
new = release['modules']['openmedialibrary']['version']
|
2016-01-13 05:17:20 +00:00
|
|
|
response['version'] = new
|
2014-09-01 12:01:41 +00:00
|
|
|
response['update'] = current < new
|
2014-08-22 17:46:45 +00:00
|
|
|
return response
|
|
|
|
actions.register(getVersion, cache=False)
|
|
|
|
|
|
|
|
def restart(data):
|
|
|
|
'''
|
|
|
|
restart (and upgrade if upgrades are available)
|
|
|
|
'''
|
2016-01-12 15:00:51 +00:00
|
|
|
restart_oml(data.get('update'))
|
2014-08-22 17:46:45 +00:00
|
|
|
return {}
|
|
|
|
actions.register(restart, cache=False)
|
2016-01-12 15:00:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Update(Thread):
|
2016-01-13 06:47:13 +00:00
|
|
|
_status = {
|
|
|
|
'reload': False,
|
|
|
|
'status': 'Updating Open Media Library...'
|
|
|
|
}
|
2016-01-12 15:00:51 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
Thread.__init__(self)
|
|
|
|
self.daemon = True
|
|
|
|
self.start()
|
|
|
|
|
|
|
|
def status(self, status, reload=False):
|
|
|
|
from websocket import trigger_event
|
2016-01-13 06:47:13 +00:00
|
|
|
self._status = {
|
2016-01-12 15:00:51 +00:00
|
|
|
'reload': reload,
|
|
|
|
'status': status,
|
2016-01-13 06:47:13 +00:00
|
|
|
}
|
|
|
|
trigger_event('updatestatus', self._status)
|
2016-01-12 15:00:51 +00:00
|
|
|
|
|
|
|
def install(self):
|
|
|
|
while update_available():
|
2016-01-16 14:43:54 +00:00
|
|
|
self.status('Downloading...')
|
2016-01-12 15:00:51 +00:00
|
|
|
while not download():
|
2016-01-16 14:43:54 +00:00
|
|
|
self.status('Download failed, retrying...')
|
|
|
|
time.sleep(5)
|
|
|
|
self.status('Downloading...')
|
|
|
|
self.status('Installing...')
|
2016-01-31 13:24:37 +00:00
|
|
|
if not install():
|
2016-01-16 14:43:54 +00:00
|
|
|
self.status('Installation failed.')
|
2016-01-13 05:23:12 +00:00
|
|
|
return True
|
|
|
|
return False
|
2016-01-12 15:00:51 +00:00
|
|
|
|
|
|
|
def update_database(self):
|
|
|
|
db_version = settings.server.get('db_version', 0)
|
|
|
|
if db_version < settings.DB_VERSION:
|
2016-01-16 14:43:54 +00:00
|
|
|
self.status('Updating...')
|
2016-01-19 04:18:33 +00:00
|
|
|
if db_version < 3:
|
|
|
|
db_version = migrate_3()
|
2016-01-19 06:13:57 +00:00
|
|
|
if db_version < 4:
|
|
|
|
db_version = migrate_4()
|
2016-01-19 10:05:16 +00:00
|
|
|
if db_version < 5:
|
|
|
|
db_version = migrate_5()
|
2016-01-20 06:46:46 +00:00
|
|
|
if db_version < 6:
|
|
|
|
db_version = migrate_6()
|
2016-01-21 06:28:07 +00:00
|
|
|
if db_version < 7:
|
|
|
|
db_version = migrate_7()
|
2016-01-24 07:59:18 +00:00
|
|
|
if db_version < 9:
|
|
|
|
db_version = migrate_8()
|
2016-01-25 17:32:04 +00:00
|
|
|
if db_version < 10:
|
|
|
|
db_version = migrate_10()
|
2016-02-10 14:02:32 +00:00
|
|
|
if db_version < 11:
|
|
|
|
db_version = migrate_11()
|
2016-02-15 11:30:17 +00:00
|
|
|
if db_version < 12:
|
|
|
|
db_version = migrate_12()
|
2016-01-12 15:00:51 +00:00
|
|
|
settings.server['db_version'] = settings.DB_VERSION
|
|
|
|
|
|
|
|
def run(self):
|
2016-01-16 14:43:54 +00:00
|
|
|
self.status('Checking for updates...')
|
2016-01-12 15:00:51 +00:00
|
|
|
self.update_database()
|
2016-01-13 05:23:12 +00:00
|
|
|
if self.install():
|
|
|
|
restart_oml()
|
|
|
|
return
|
2016-01-13 17:46:09 +00:00
|
|
|
self.status('Relaunching...', True)
|
2016-01-12 15:00:51 +00:00
|
|
|
restart_oml()
|
2016-01-16 05:47:55 +00:00
|
|
|
|
|
|
|
|
2016-01-19 04:18:33 +00:00
|
|
|
def migrate_3():
|
|
|
|
with db.session():
|
|
|
|
import item.models
|
|
|
|
for i in item.models.Item.find({
|
|
|
|
'query': {
|
|
|
|
'conditions': [{
|
|
|
|
'key':'mediastate',
|
|
|
|
'value':'available',
|
|
|
|
'operator': '=='
|
|
|
|
}]
|
|
|
|
}
|
|
|
|
}):
|
|
|
|
if not i.files.all():
|
|
|
|
i.remove_file()
|
|
|
|
else:
|
|
|
|
f = i.files.all()[0]
|
|
|
|
if not 'pages' in i.info and 'pages' in f.info:
|
|
|
|
i.info['pages'] = f.info['pages']
|
|
|
|
i.save()
|
|
|
|
return 3
|
2016-01-19 06:13:57 +00:00
|
|
|
|
|
|
|
def migrate_4():
|
|
|
|
with db.session() as session:
|
|
|
|
import item.models
|
|
|
|
from meta.utils import to_isbn13
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
update = False
|
|
|
|
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
|
|
|
|
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
|
|
|
|
isbns = [isbn for isbn in isbns if isbn]
|
|
|
|
if isbns:
|
|
|
|
i.meta['isbn'] = isbns[0]
|
|
|
|
if 'isbn' in i.info:
|
|
|
|
i.info['isbn'] = i.meta['isbn']
|
|
|
|
else:
|
|
|
|
del i.meta['isbn']
|
|
|
|
if 'isbn' in i.info:
|
|
|
|
del i.info['isbn']
|
|
|
|
update = True
|
|
|
|
if 'isbn' in i.meta and not i.meta['isbn']:
|
|
|
|
del i.meta['isbn']
|
|
|
|
update = True
|
|
|
|
if update:
|
|
|
|
session.add(i)
|
|
|
|
session.commit()
|
|
|
|
return 4
|
2016-01-19 10:05:16 +00:00
|
|
|
|
|
|
|
def migrate_5():
|
|
|
|
db.run_sql([
|
|
|
|
'DROP INDEX IF EXISTS user_metadata_index',
|
2016-01-19 11:14:00 +00:00
|
|
|
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
|
|
|
|
'UPDATE sort SET sharemetadata = 0',
|
2016-01-19 10:05:16 +00:00
|
|
|
]),
|
|
|
|
with db.session() as session:
|
|
|
|
import user.models
|
|
|
|
for m in user.models.Metadata.query:
|
|
|
|
data_hash = m.get_hash()
|
|
|
|
if m.data_hash != data_hash:
|
|
|
|
m.data_hash = data_hash
|
|
|
|
session.add(m)
|
|
|
|
session.commit()
|
|
|
|
import item.models
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
update = False
|
|
|
|
if '_from' in i.info:
|
|
|
|
del i.info['_from']
|
|
|
|
update = True
|
|
|
|
if update:
|
|
|
|
session.add(i)
|
|
|
|
session.commit()
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
i.sync_metadata()
|
|
|
|
return 5
|
2016-01-20 06:46:46 +00:00
|
|
|
|
|
|
|
def migrate_6():
|
|
|
|
with db.session() as session:
|
|
|
|
import item.models
|
|
|
|
for s in item.models.Sort.query.filter_by(author=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
for s in item.models.Sort.query.filter_by(publisher=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
for s in item.models.Sort.query.filter_by(language=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
for s in item.models.Sort.query.filter_by(place=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
for s in item.models.Sort.query.filter_by(isbn=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
for s in item.models.Sort.query.filter_by(date=''):
|
|
|
|
s.item.update_sort()
|
|
|
|
session.commit()
|
|
|
|
return 6
|
|
|
|
|
2016-01-21 06:28:07 +00:00
|
|
|
def migrate_7():
|
|
|
|
with db.session() as session:
|
|
|
|
import changelog
|
|
|
|
for c in changelog.Changelog.query:
|
|
|
|
if 'editmeta' in c.data or 'resetmeta' in c.data:
|
|
|
|
session.delete(c)
|
|
|
|
session.commit()
|
|
|
|
db.run_sql('DROP TABLE IF EXISTS metadata')
|
2016-01-21 07:08:02 +00:00
|
|
|
db.run_sql('DROP TABLE IF EXISTS scrape')
|
2016-01-21 06:28:07 +00:00
|
|
|
db.run_sql('VACUUM')
|
|
|
|
return 7
|
2016-01-23 16:49:34 +00:00
|
|
|
|
|
|
|
def migrate_8():
|
|
|
|
for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
|
|
|
|
if key in settings.server:
|
|
|
|
del settings.server[key]
|
|
|
|
list_cache = os.path.join(settings.data_path, 'list_cache.json')
|
|
|
|
if os.path.exists(list_cache):
|
|
|
|
os.unlink(list_cache)
|
|
|
|
with db.session() as session:
|
|
|
|
import item.models
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
delta = set(i.meta)-set(i.meta_keys)
|
|
|
|
if delta:
|
|
|
|
for key in delta:
|
|
|
|
del i.meta[key]
|
|
|
|
session.add(i)
|
|
|
|
session.commit()
|
|
|
|
import changelog
|
|
|
|
import user.models
|
|
|
|
changelog.Changelog.query.delete()
|
|
|
|
u = user.models.User.get(settings.USER_ID)
|
|
|
|
u.rebuild_changelog()
|
|
|
|
for peer in user.models.User.query:
|
|
|
|
if peer.id != u.id:
|
|
|
|
if len(peer.id) != 16:
|
|
|
|
session.delete(peer)
|
|
|
|
session.commit()
|
|
|
|
return 8
|
2016-01-25 17:32:04 +00:00
|
|
|
|
|
|
|
def migrate_10():
|
|
|
|
with db.session() as session:
|
|
|
|
from item.models import Item, Find
|
|
|
|
from utils import get_by_id
|
|
|
|
from item.person import get_sort_name
|
|
|
|
import unicodedata
|
|
|
|
sort_names = {}
|
2016-01-27 09:39:10 +00:00
|
|
|
updates = {}
|
2016-01-27 08:55:36 +00:00
|
|
|
for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
|
|
|
|
sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
|
|
|
|
if sort_type == 'person':
|
|
|
|
if f.value in sort_names:
|
2016-01-27 09:39:10 +00:00
|
|
|
sortvalue = sort_names[f.value]
|
2016-01-25 17:32:04 +00:00
|
|
|
else:
|
2016-01-27 09:39:10 +00:00
|
|
|
sortvalue = sort_names[f.value] = get_sort_name(f.value)
|
2016-01-27 08:55:36 +00:00
|
|
|
else:
|
2016-01-27 09:39:10 +00:00
|
|
|
sortvalue = f.value
|
|
|
|
if sortvalue:
|
|
|
|
sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
|
|
|
|
if not f.key in updates:
|
|
|
|
updates[f.key] = {}
|
|
|
|
updates[f.key][f.value] = sortvalue
|
|
|
|
for key in updates:
|
|
|
|
for value in updates[key]:
|
|
|
|
Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
|
2016-01-25 17:32:04 +00:00
|
|
|
session.commit()
|
|
|
|
return 10
|
2016-02-10 14:02:32 +00:00
|
|
|
|
|
|
|
def migrate_11():
|
|
|
|
with db.session() as session:
|
2016-02-11 15:55:41 +00:00
|
|
|
from user.models import User, Metadata, List
|
2016-02-10 14:02:32 +00:00
|
|
|
from changelog import Changelog
|
|
|
|
import utils
|
|
|
|
for u in User.query.filter_by(peered=True):
|
|
|
|
peer = utils.get_peer(u.id)
|
|
|
|
last = Changelog.query.filter_by(user_id=u.id).order_by('-revision').first()
|
|
|
|
if last:
|
|
|
|
peer.info['revision'] = last.revision
|
2016-02-13 10:44:19 +00:00
|
|
|
listorder = []
|
|
|
|
for l in List.query.filter_by(user_id=u.id).order_by('index_'):
|
2016-02-11 15:55:41 +00:00
|
|
|
if l.name:
|
2016-02-12 13:27:55 +00:00
|
|
|
peer.info['lists'][l.name] = [i.id for i in l.get_items()]
|
2016-02-13 10:44:19 +00:00
|
|
|
listorder.append(l.name)
|
|
|
|
if 'listorder' not in peer.info:
|
|
|
|
peer.info['listorder'] = listorder
|
2016-02-10 14:02:32 +00:00
|
|
|
for m in Metadata.query.filter_by(user_id=u.id):
|
|
|
|
peer.library[m.item_id] = {
|
|
|
|
'meta': dict(m.data),
|
|
|
|
'meta_hash': m.data_hash,
|
|
|
|
'modified': m.modified,
|
|
|
|
}
|
|
|
|
peer.library.commit()
|
|
|
|
peer.sync_info()
|
|
|
|
peer.sync_db()
|
|
|
|
Changelog.query.filter_by(user_id=u.id).delete()
|
|
|
|
Metadata.query.filter_by(user_id=u.id).delete()
|
|
|
|
session.commit()
|
2016-02-15 11:30:17 +00:00
|
|
|
if db.table_exists('transfer'):
|
|
|
|
import state
|
|
|
|
import downloads
|
|
|
|
state.online = False
|
|
|
|
state.downloads = downloads.Downloads()
|
|
|
|
r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
|
|
|
|
for t in r:
|
|
|
|
item_id, added, progress = t
|
|
|
|
if added:
|
|
|
|
state.downloads.transfers[item_id] = {
|
|
|
|
'added': added,
|
|
|
|
'progress': progress
|
|
|
|
}
|
|
|
|
state.db.session.commit()
|
|
|
|
state.downloads.transfers.commit()
|
|
|
|
state.downloads = None
|
2016-02-10 14:02:32 +00:00
|
|
|
return 11
|
2016-02-15 11:30:17 +00:00
|
|
|
|
|
|
|
def migrate_12():
|
|
|
|
db.run_sql([
|
|
|
|
'DROP TABLE IF EXISTS transfer'
|
2016-02-22 13:47:27 +00:00
|
|
|
])
|
2016-02-15 11:30:17 +00:00
|
|
|
return 12
|