No Description

update.py 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738
  1. # -*- coding: utf-8 -*-
  2. from contextlib import closing
  3. import base64
  4. import json
  5. import os
  6. import tarfile
  7. from threading import Thread
  8. import urllib.request
  9. import urllib.error
  10. import urllib.parse
  11. import shutil
  12. import subprocess
  13. import sys
  14. import time
  15. import OpenSSL.crypto
  16. import ox
  17. from oxtornado import actions
  18. from sqlalchemy.sql.expression import text
  19. import settings
  20. import utils
  21. import db
  22. from integration import get_trayicon_version
  23. import logging
  24. logger = logging.getLogger(__name__)
  25. ENCODING = 'base64'
  26. def verify(release):
  27. verified = False
  28. value = []
  29. for module in sorted(release['modules']):
  30. value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
  31. value = '\n'.join(value)
  32. value = value.encode()
  33. for digest in ('sha512', 'sha256', 'sha1'):
  34. if 'signature_%s' % digest in release:
  35. tls_sig = base64.b64decode(release['signature_%s' % digest].encode())
  36. cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
  37. try:
  38. OpenSSL.crypto.verify(cert, tls_sig, value, digest)
  39. verified = True
  40. except OpenSSL.crypto.Error:
  41. logger.debug('invalid tls signature')
  42. verified = False
  43. break
  44. if 'signature' in release and not verified:
  45. import ed25519
  46. vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
  47. sig = release['signature'].encode()
  48. try:
  49. vk.verify(sig, value, encoding=ENCODING)
  50. verified = True
  51. except ed25519.BadSignatureError:
  52. verified = False
  53. return verified
  54. def get(url, filename=None):
  55. request = urllib.request.Request(url, headers={
  56. 'User-Agent': settings.USER_AGENT
  57. })
  58. with closing(urllib.request.urlopen(request)) as u:
  59. if not filename:
  60. data = u.read()
  61. return data
  62. else:
  63. dirname = os.path.dirname(filename)
  64. if dirname and not os.path.exists(dirname):
  65. os.makedirs(dirname)
  66. with open(filename, 'wb') as fd:
  67. data = u.read(4096)
  68. while data:
  69. fd.write(data)
  70. data = u.read(4096)
  71. def check():
  72. if settings.release:
  73. release_data = get(settings.server.get('release_url',
  74. 'https://downloads.openmedialibrary.com/release.json'))
  75. release = json.loads(release_data.decode('utf-8'))
  76. old = current_version('openmedialibrary')
  77. new = release['modules']['openmedialibrary']['version']
  78. return verify(release) and old < new
  79. return False
  80. def current_version(module):
  81. if 'modules' in settings.release \
  82. and module in settings.release['modules'] \
  83. and 'version' in settings.release['modules'][module]:
  84. version = settings.release['modules'][module]['version']
  85. else:
  86. version = ''
  87. return version
  88. def get_latest_release():
  89. try:
  90. release_data = get(settings.server.get('release_url'))
  91. release = json.loads(release_data.decode('utf-8'))
  92. if verify(release):
  93. ox.makedirs(settings.updates_path)
  94. with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
  95. fd.write(release_data)
  96. return release
  97. except:
  98. logger.debug('failed to get latest release')
  99. def get_platform():
  100. name = sys.platform
  101. if name.startswith('darwin'):
  102. name = 'darwin64'
  103. elif name.startswith('linux'):
  104. import platform
  105. machine = platform.machine()
  106. if machine == 'armv7l':
  107. name = 'linux_armv7l'
  108. elif machine == 'aarch64':
  109. name = 'linux_aarch64'
  110. elif machine == 'x86_64':
  111. name = 'linux64'
  112. else:
  113. name = 'linux32'
  114. return name
  115. def download():
  116. if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
  117. return True
  118. release = get_latest_release()
  119. platform = get_platform()
  120. if release:
  121. ox.makedirs(settings.updates_path)
  122. os.chdir(os.path.dirname(settings.base_dir))
  123. current_files = {'release.json'}
  124. for module in release['modules']:
  125. module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
  126. if release['modules'][module].get('platform', platform) == platform and \
  127. release['modules'][module]['version'] > current_version(module):
  128. base_url = settings.server.get('release_url').rsplit('/', 1)[0]
  129. url = '/'.join([base_url, release['modules'][module]['name']])
  130. if not os.path.exists(module_tar):
  131. logger.debug('download %s', os.path.basename(module_tar))
  132. get(url, module_tar)
  133. if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
  134. logger.debug('invalid checksum %s', os.path.basename(module_tar))
  135. os.unlink(module_tar)
  136. return False
  137. current_files.add(os.path.basename(module_tar))
  138. for f in set(next(os.walk(settings.updates_path))[2])-current_files:
  139. os.unlink(os.path.join(settings.updates_path, f))
  140. return True
  141. return True
  142. def install():
  143. if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
  144. return True
  145. if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
  146. return True
  147. with open(os.path.join(settings.updates_path, 'release.json')) as fd:
  148. release = json.load(fd)
  149. old_version = current_version('openmedialibrary')
  150. new_version = release['modules']['openmedialibrary']['version']
  151. if verify(release) and old_version < new_version:
  152. base = os.path.dirname(settings.base_dir)
  153. os.chdir(base)
  154. platform = get_platform()
  155. for module in release['modules']:
  156. if release['modules'][module].get('platform', platform) == platform and \
  157. release['modules'][module]['version'] > current_version(module):
  158. module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
  159. if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
  160. #tar fails if old platform is moved before extract
  161. new = '%s_new' % module
  162. ox.makedirs(new)
  163. os.chdir(new)
  164. tar = tarfile.open(module_tar)
  165. tar.extractall()
  166. tar.close()
  167. os.chdir(base)
  168. module_old = '%s_old' % module
  169. if os.path.exists(module_old):
  170. rmtree(module_old)
  171. if os.path.exists(module):
  172. move(module, module_old)
  173. move(os.path.join(new, module), module)
  174. if platform != 'win32' and os.path.exists(module_old):
  175. rmtree(module_old)
  176. rmtree(new)
  177. else:
  178. if os.path.exists(module_tar):
  179. os.unlink(module_tar)
  180. return False
  181. shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
  182. upgrade_app()
  183. # FIXME: still needed?
  184. if old_version < '20160112-651-de984a3' and platform != 'win32':
  185. subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
  186. return True
  187. return True
  188. def move(src, dst):
  189. try:
  190. shutil.move(src, dst)
  191. except:
  192. logger.debug('failed to move %s to %s', src, dst)
  193. raise
  194. def rmtree(path):
  195. try:
  196. shutil.rmtree(path)
  197. except:
  198. logger.debug('failed to remove %s', path)
  199. raise
  200. def update_available():
  201. db_version = settings.server.get('db_version', 0)
  202. if db_version < settings.DB_VERSION:
  203. return True
  204. if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
  205. return False
  206. if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
  207. return False
  208. with open(os.path.join(settings.updates_path, 'release.json')) as fd:
  209. release = json.load(fd)
  210. old_version = current_version('openmedialibrary')
  211. new_version = release['modules']['openmedialibrary']['version']
  212. return verify(release) and old_version < new_version
  213. def restart_oml(update=False):
  214. if update:
  215. get_latest_release()
  216. utils.ctl('restart')
  217. def get_app_version(app):
  218. plist = app + '/Contents/Info.plist'
  219. if os.path.exists(plist):
  220. cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
  221. return subprocess.check_output(cmd).strip()
  222. def upgrade_app():
  223. base = os.path.dirname(settings.base_dir)
  224. if sys.platform == 'darwin':
  225. bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
  226. app = '/Applications/Open Media Library.app'
  227. version = get_app_version(app)
  228. current_version = get_app_version(bundled_app)
  229. if version and current_version and version != current_version:
  230. try:
  231. shutil.rmtree(app)
  232. shutil.copytree(bundled_app, app)
  233. except:
  234. logger.debug('Failed to update Application', exc_info=True)
  235. '''
  236. elif sys.platform == 'win32':
  237. current_version = get_trayicon_version()
  238. if current_version != '0.2.0.0':
  239. msi = os.path.normpath(os.path.join(base, 'platform_win32', 'Open Media Library.msi'))
  240. cmd = ['msiexec.exe', '/qb', '/I', msi]
  241. startupinfo = subprocess.STARTUPINFO()
  242. startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
  243. startupinfo.wShowWindow = subprocess.SW_HIDE
  244. subprocess.Popen(cmd, cwd=settings.base_dir, start_new_session=True, startupinfo=startupinfo)
  245. '''
  246. def getVersion(data):
  247. '''
  248. check if new version is available
  249. '''
  250. response = {
  251. 'current': settings.MINOR_VERSION,
  252. 'version': settings.MINOR_VERSION,
  253. 'upgrade': False,
  254. }
  255. if settings.MINOR_VERSION == 'git':
  256. '''
  257. cmd = ['git', 'rev-parse', '@']
  258. p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
  259. stdout, stderr = p.communicate()
  260. current = stdout.strip()
  261. cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
  262. p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
  263. stdout, stderr = p.communicate()
  264. new = stdout.strip()[:40]
  265. response['update'] = len(new) == 40 and current != new
  266. '''
  267. response['update'] = False
  268. else:
  269. get_latest_release()
  270. if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
  271. return response
  272. if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
  273. return response
  274. with open(os.path.join(settings.updates_path, 'release.json')) as fd:
  275. release = json.load(fd)
  276. current = current_version('openmedialibrary')
  277. response['current'] = current
  278. new = release['modules']['openmedialibrary']['version']
  279. response['version'] = new
  280. response['update'] = current < new
  281. return response
  282. actions.register(getVersion, cache=False)
  283. actions.register(getVersion, cache=False, version='public')
  284. def restart(data):
  285. '''
  286. restart (and upgrade if upgrades are available)
  287. '''
  288. restart_oml(data.get('update'))
  289. return {}
  290. actions.register(restart, cache=False)
  291. class Update(Thread):
  292. _status = {
  293. 'reload': False,
  294. 'status': 'Updating Open Media Library...'
  295. }
  296. def __init__(self):
  297. Thread.__init__(self)
  298. self.daemon = True
  299. self.start()
  300. def status(self, status, reload=False):
  301. from websocket import trigger_event
  302. self._status = {
  303. 'reload': reload,
  304. 'status': status,
  305. }
  306. trigger_event('updatestatus', self._status)
  307. def install(self):
  308. while update_available():
  309. self.status('Downloading...')
  310. max_retry = 5
  311. while max_retry > 0 and not download():
  312. max_retry -= 1
  313. self.status('Download failed, retrying...')
  314. time.sleep(5)
  315. self.status('Downloading...')
  316. self.status('Installing...')
  317. if not install():
  318. self.status('Installation failed.')
  319. return True
  320. return False
  321. def update_database(self):
  322. db_version = settings.server.get('db_version', 0)
  323. if db_version < settings.DB_VERSION:
  324. self.status('Updating...')
  325. if db_version < 3:
  326. db_version = migrate_3()
  327. if db_version < 4:
  328. db_version = migrate_4()
  329. if db_version < 5:
  330. db_version = migrate_5()
  331. if db_version < 6:
  332. db_version = migrate_6()
  333. if db_version < 7:
  334. db_version = migrate_7()
  335. if db_version < 9:
  336. db_version = migrate_8()
  337. if db_version < 10:
  338. db_version = migrate_10()
  339. if db_version < 11:
  340. db_version = migrate_11()
  341. if db_version < 12:
  342. db_version = migrate_12()
  343. if db_version < 13:
  344. db_version = migrate_13()
  345. if db_version < 15:
  346. db_version = migrate_15()
  347. if db_version < 16:
  348. db_version = migrate_16()
  349. if db_version < 17:
  350. db_version = migrate_17()
  351. if db_version < 18:
  352. db_version = migrate_18()
  353. if db_version < 19:
  354. db_version = migrate_19()
  355. if db_version < 20:
  356. db_version = migrate_20()
  357. settings.server['db_version'] = db_version
  358. def run(self):
  359. self.status('Checking for updates...')
  360. self.update_database()
  361. if self.install():
  362. restart_oml()
  363. return
  364. self.status('Relaunching...', True)
  365. restart_oml()
  366. def migrate_3():
  367. with db.session():
  368. import item.models
  369. for i in item.models.Item.find({
  370. 'query': {
  371. 'conditions': [{
  372. 'key':'mediastate',
  373. 'value':'available',
  374. 'operator': '=='
  375. }]
  376. }
  377. }):
  378. if not i.files.all():
  379. i.remove_file()
  380. else:
  381. f = i.files.all()[0]
  382. if not 'pages' in i.info and 'pages' in f.info:
  383. i.info['pages'] = f.info['pages']
  384. i.save()
  385. return 3
  386. def migrate_4():
  387. with db.session() as session:
  388. import item.models
  389. from meta.utils import to_isbn13
  390. for i in item.models.Item.query:
  391. update = False
  392. if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
  393. isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
  394. isbns = [isbn for isbn in isbns if isbn]
  395. if isbns:
  396. i.meta['isbn'] = isbns[0]
  397. if 'isbn' in i.info:
  398. i.info['isbn'] = i.meta['isbn']
  399. else:
  400. del i.meta['isbn']
  401. if 'isbn' in i.info:
  402. del i.info['isbn']
  403. update = True
  404. if 'isbn' in i.meta and not i.meta['isbn']:
  405. del i.meta['isbn']
  406. update = True
  407. if update:
  408. session.add(i)
  409. session.commit()
  410. return 4
  411. def migrate_5():
  412. db.run_sql([
  413. 'DROP INDEX IF EXISTS user_metadata_index',
  414. 'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
  415. 'UPDATE sort SET sharemetadata = 0',
  416. ])
  417. with db.session() as session:
  418. import user.models
  419. for m in user.models.Metadata.query:
  420. data_hash = m.get_hash()
  421. if m.data_hash != data_hash:
  422. m.data_hash = data_hash
  423. session.add(m)
  424. session.commit()
  425. import item.models
  426. for i in item.models.Item.query:
  427. update = False
  428. if '_from' in i.info:
  429. del i.info['_from']
  430. update = True
  431. if update:
  432. session.add(i)
  433. session.commit()
  434. for i in item.models.Item.query:
  435. i.sync_metadata()
  436. return 5
  437. def migrate_6():
  438. with db.session() as session:
  439. import item.models
  440. for s in item.models.Sort.query.filter_by(author=''):
  441. s.item.update_sort()
  442. for s in item.models.Sort.query.filter_by(publisher=''):
  443. s.item.update_sort()
  444. for s in item.models.Sort.query.filter_by(language=''):
  445. s.item.update_sort()
  446. for s in item.models.Sort.query.filter_by(place=''):
  447. s.item.update_sort()
  448. for s in item.models.Sort.query.filter_by(isbn=''):
  449. s.item.update_sort()
  450. for s in item.models.Sort.query.filter_by(date=''):
  451. s.item.update_sort()
  452. session.commit()
  453. return 6
  454. def migrate_7():
  455. with db.session() as session:
  456. import changelog
  457. for c in changelog.Changelog.query:
  458. if 'editmeta' in c.data or 'resetmeta' in c.data:
  459. session.delete(c)
  460. session.commit()
  461. db.run_sql('DROP TABLE IF EXISTS metadata')
  462. db.run_sql('DROP TABLE IF EXISTS scrape')
  463. db.run_sql('VACUUM')
  464. return 7
  465. def migrate_8():
  466. for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
  467. if key in settings.server:
  468. del settings.server[key]
  469. list_cache = os.path.join(settings.data_path, 'list_cache.json')
  470. if os.path.exists(list_cache):
  471. os.unlink(list_cache)
  472. with db.session() as session:
  473. import item.models
  474. for i in item.models.Item.query:
  475. delta = set(i.meta)-set(i.meta_keys)
  476. if delta:
  477. for key in delta:
  478. del i.meta[key]
  479. session.add(i)
  480. session.commit()
  481. import changelog
  482. import user.models
  483. changelog.Changelog.query.delete()
  484. u = user.models.User.get(settings.USER_ID)
  485. u.rebuild_changelog()
  486. for peer in user.models.User.query:
  487. if peer.id != u.id:
  488. if len(peer.id) != 16:
  489. session.delete(peer)
  490. session.commit()
  491. return 8
  492. def migrate_10():
  493. with db.session() as session:
  494. from item.models import Item, Find
  495. from utils import get_by_id
  496. from item.person import get_sort_name
  497. import unicodedata
  498. sort_names = {}
  499. updates = {}
  500. for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
  501. sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
  502. if sort_type == 'person':
  503. if f.value in sort_names:
  504. sortvalue = sort_names[f.value]
  505. else:
  506. sortvalue = sort_names[f.value] = get_sort_name(f.value)
  507. else:
  508. sortvalue = f.value
  509. if sortvalue:
  510. sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
  511. if not f.key in updates:
  512. updates[f.key] = {}
  513. updates[f.key][f.value] = sortvalue
  514. for key in updates:
  515. for value in updates[key]:
  516. Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
  517. session.commit()
  518. return 10
  519. def migrate_11():
  520. with db.session() as session:
  521. from user.models import User, Metadata, List
  522. from changelog import Changelog
  523. import utils
  524. for u in User.query.filter_by(peered=True):
  525. peer = utils.get_peer(u.id)
  526. last = Changelog.query.filter_by(user_id=u.id).order_by(text('-revision')).first()
  527. if last:
  528. peer.info['revision'] = last.revision
  529. listorder = []
  530. for l in List.query.filter_by(user_id=u.id).order_by('index_'):
  531. if l.name:
  532. peer.info['lists'][l.name] = [i.id for i in l.get_items()]
  533. listorder.append(l.name)
  534. if 'listorder' not in peer.info:
  535. peer.info['listorder'] = listorder
  536. for m in Metadata.query.filter_by(user_id=u.id):
  537. peer.library[m.item_id] = {
  538. 'meta': dict(m.data),
  539. 'meta_hash': m.data_hash,
  540. 'modified': m.modified,
  541. }
  542. peer.library.commit()
  543. peer.sync_info()
  544. peer.sync_db()
  545. Changelog.query.filter_by(user_id=u.id).delete()
  546. Metadata.query.filter_by(user_id=u.id).delete()
  547. session.commit()
  548. if db.table_exists('transfer'):
  549. import state
  550. import downloads
  551. state.online = False
  552. state.downloads = downloads.Downloads()
  553. r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
  554. for t in r:
  555. item_id, added, progress = t
  556. if added:
  557. state.downloads.transfers[item_id] = {
  558. 'added': added,
  559. 'progress': progress
  560. }
  561. state.db.session.commit()
  562. state.downloads.transfers.commit()
  563. state.downloads = None
  564. return 11
  565. def migrate_12():
  566. db.run_sql([
  567. 'DROP TABLE IF EXISTS transfer'
  568. ])
  569. return 12
  570. def migrate_13():
  571. import settings
  572. import changelog
  573. import os
  574. import json
  575. path = os.path.join(settings.data_path, 'peers', '%s.log' % settings.USER_ID)
  576. if not os.path.exists(path):
  577. folder = os.path.dirname(path)
  578. if not os.path.exists(folder):
  579. os.makedirs(folder)
  580. with db.session() as session:
  581. revision = -1
  582. qs = changelog.Changelog.query.filter_by(user_id=settings.USER_ID)
  583. with open(path, 'wb') as fd:
  584. for c in qs.order_by('timestamp'):
  585. data = json.dumps([c.revision, c.timestamp, json.loads(c.data)], ensure_ascii=False).encode('utf-8')
  586. fd.write(data + b'\n')
  587. revision = c.revision
  588. if revision > -1:
  589. settings.server['revision'] = revision
  590. return 13
  591. def migrate_15():
  592. from user.models import List, User
  593. with db.session():
  594. l = List.get(':Public')
  595. if l and not len(l.items):
  596. l.remove()
  597. for u in User.query:
  598. if 'local' in u.info:
  599. del u.info['local']
  600. u.save()
  601. return 15
  602. def migrate_16():
  603. db.run_sql([
  604. '''CREATE TABLE user2 (
  605. created DATETIME,
  606. modified DATETIME,
  607. id VARCHAR(43) NOT NULL,
  608. info BLOB,
  609. nickname VARCHAR(256),
  610. pending VARCHAR(64),
  611. queued BOOLEAN,
  612. peered BOOLEAN,
  613. online BOOLEAN,
  614. PRIMARY KEY (id),
  615. CHECK (queued IN (0, 1)),
  616. CHECK (peered IN (0, 1)),
  617. CHECK (online IN (0, 1))
  618. )''',
  619. '''INSERT INTO user2 (created, modified, id, info, nickname, pending, queued, peered, online)
  620. SELECT created, modified, id, info, nickname, pending, queued, peered, online FROM user''',
  621. 'DROP TABLE user',
  622. 'ALTER TABLE user2 RENAME TO user',
  623. 'CREATE INDEX IF NOT EXISTS ix_user_nickname ON user (nickname)'
  624. ])
  625. return 16
  626. def migrate_17():
  627. from user.models import List, User
  628. from changelog import add_record
  629. with db.session():
  630. l = List.get(':Public')
  631. if not l:
  632. add_record('removelist', 'Public')
  633. lists = []
  634. for l in List.query.filter_by(user_id=settings.USER_ID).order_by('index_'):
  635. if l.type == 'static' and l.name not in ('', 'Inbox'):
  636. lists.append(l.name)
  637. add_record('orderlists', lists)
  638. return 17
  639. def migrate_18():
  640. db.run_sql([
  641. '''CREATE TABLE annotation (
  642. _id INTEGER NOT NULL,
  643. id VARCHAR(43),
  644. created DATETIME,
  645. modified DATETIME,
  646. user_id VARCHAR(43),
  647. item_id VARCHAR(43),
  648. data BLOB,
  649. findquotes TEXT,
  650. findnotes TEXT,
  651. PRIMARY KEY (_id),
  652. FOREIGN KEY(user_id) REFERENCES user (id),
  653. FOREIGN KEY(item_id) REFERENCES item (id)
  654. )'''])
  655. db.run_sql([
  656. 'CREATE INDEX ix_annotation_findquotes ON annotation (findquotes)',
  657. 'CREATE INDEX ix_annotation_findnotes ON annotation (findnotes)'
  658. ])
  659. return 18
  660. def migrate_19():
  661. from user.models import User
  662. with db.session():
  663. peers = [u for u in User.query.filter_by(peered=True)]
  664. peers.sort(key=lambda u: utils.user_sort_key(u.json()))
  665. for u in peers:
  666. peer = utils.get_peer(u.id)
  667. if not peer.info.get('revision') and os.path.exists(peer._logpath) and os.path.getsize(peer._logpath):
  668. logger.debug('try to apply pending logs for %s', u.id)
  669. try:
  670. peer.apply_log()
  671. except:
  672. logger.error('failed to apply log for %s', u.id)
  673. return 19
  674. def migrate_20():
  675. from glob import glob
  676. changed = False
  677. for log in glob(os.path.join(settings.data_path, 'peers', '*.log')):
  678. with open(log, 'rb') as fd:
  679. data = fd.read()
  680. try:
  681. data.decode('utf-8')
  682. except UnicodeDecodeError:
  683. data = data.decode('Windows-1252')
  684. logger.error('convert %s to utf-8', log)
  685. with open(log, 'wb') as fd:
  686. fd.write(data.encode('utf-8'))
  687. changed = True
  688. if changed:
  689. migrate_19()
  690. return 20