diff --git a/ctl b/ctl
index 74d9d76..6e46c91 100755
--- a/ctl
+++ b/ctl
@@ -1,6 +1,5 @@
#!/usr/bin/env bash
NAME="openmedialibrary"
-PID="/tmp/$NAME.$USER.pid"
cd "`dirname "$0"`"
if [ -e oml ]; then
@@ -25,16 +24,7 @@ else
mv "$BASE/config/release.json" "$BASE/data/release.json"
fi
fi
-if [ ! -e "$PID" ]; then
-if [ -e "$DATA/tor/hostname" ]; then
- onion=$(cat "$DATA/tor/hostname")
- id=${onion/.onion/}
- PID="/tmp/$NAME.$USER.$id.pid"
-fi
-fi
-if [ ! -e "$PID" ]; then
- PID="$DATA/$NAME.pid"
-fi
+PID="$DATA/$NAME.pid"
PLATFORM_PYTHON=3.4
SHARED_PYTHON=3.7
@@ -44,7 +34,7 @@ else
if [ $SYSTEM == "Linux" ]; then
if [ $PLATFORM == "x86_64" ]; then
ARCH=64
- PLATFORM_PYTHON=3.7
+ PLATFORM_PYTHON=3.11
else
ARCH=32
fi
@@ -74,6 +64,10 @@ PATH="$PLATFORM_ENV/bin:$PATH"
SHARED_ENV="$BASE/platform/Shared"
export SHARED_ENV
+if [ -e "$SHARED_ENV/etc/openssl/openssl.cnf" ]; then
+ export OPENSSL_CONF="$SHARED_ENV/etc/openssl/openssl.cnf"
+fi
+
PATH="$SHARED_ENV/bin:$PATH"
export PATH
diff --git a/oml/changelog.py b/oml/changelog.py
index f641fd4..025611a 100644
--- a/oml/changelog.py
+++ b/oml/changelog.py
@@ -297,7 +297,7 @@ class Changelog(db.Model):
return True
def action_addpeer(self, user, timestamp, peerid, username):
- if len(peerid) == 16:
+ if len(peerid) == settings.ID_LENGTH:
from user.models import User
if not 'users' in user.info:
user.info['users'] = {}
@@ -318,7 +318,7 @@ class Changelog(db.Model):
return True
def action_editpeer(self, user, timestamp, peerid, data):
- if len(peerid) == 16:
+ if len(peerid) == settings.ID_LENGTH:
from user.models import User
peer = User.get_or_create(peerid)
update = False
@@ -466,7 +466,7 @@ class Changelog(db.Model):
elif op == 'addpeer':
peer_id = data[1]
username = data[2]
- if len(peer_id) == 16:
+ if len(peer_id) == settings.ID_LENGTH:
peer = User.get(peer_id)
if peer:
username = peer.json().get('username', 'anonymous')
diff --git a/oml/commands.py b/oml/commands.py
index 25e1fa8..c252666 100644
--- a/oml/commands.py
+++ b/oml/commands.py
@@ -154,7 +154,7 @@ def command_update_static(*args):
import utils
setup.create_db()
old_oxjs = os.path.join(settings.static_path, 'oxjs')
- oxjs = os.path.join(settings.base_dir, '..', 'oxjs')
+ oxjs = os.path.join(settings.top_dir, 'oxjs')
if os.path.exists(old_oxjs) and not os.path.exists(oxjs):
shutil.move(old_oxjs, oxjs)
if not os.path.exists(oxjs):
@@ -163,7 +163,7 @@ def command_update_static(*args):
os.system('cd "%s" && git pull' % oxjs)
r('python3', os.path.join(oxjs, 'tools', 'build', 'build.py'), '-nogeo')
utils.update_static()
- reader = os.path.join(settings.base_dir, '..', 'reader')
+ reader = os.path.join(settings.top_dir, 'reader')
if not os.path.exists(reader):
r('git', 'clone', '--depth', '1', 'https://code.0x2620.org/0x2620/openmedialibrary_reader.git', reader)
elif os.path.exists(os.path.join(reader, '.git')):
diff --git a/oml/item/api.py b/oml/item/api.py
index d0c04a5..db9d102 100644
--- a/oml/item/api.py
+++ b/oml/item/api.py
@@ -5,7 +5,7 @@ import os
import unicodedata
from sqlalchemy.orm import load_only
-from sqlalchemy.sql.expression import text
+from sqlalchemy.sql.expression import text, column
from sqlalchemy import func
from oxtornado import actions
@@ -58,8 +58,13 @@ def find(data):
qs = models.Find.query.filter_by(key=q['group'])
if items is None or items.first():
if items is not None:
- qs = qs.filter(models.Find.item_id.in_(items))
- values = list(qs.values('value', 'findvalue', 'sortvalue'))
+ ids = [i[0] for i in items.with_entities(column('id'))]
+ qs = qs.filter(models.Find.item_id.in_(ids))
+ values = list(qs.values(
+ column('value'),
+ column('findvalue'),
+ column('sortvalue'),
+ ))
for f in values:
value = f[0]
findvalue = f[1]
@@ -167,7 +172,7 @@ actions.register(edit, cache=False)
def remove(data):
'''
takes {
- id
+ ids
}
'''
if 'ids' in data and data['ids']:
diff --git a/oml/item/handlers.py b/oml/item/handlers.py
index 19d6460..901e579 100644
--- a/oml/item/handlers.py
+++ b/oml/item/handlers.py
@@ -17,7 +17,8 @@ import db
import settings
import tornado.web
import tornado.gen
-import tornado.concurrent
+from concurrent.futures import ThreadPoolExecutor
+from tornado.concurrent import run_on_executor
from oxtornado import json_dumps, json_response
@@ -27,6 +28,8 @@ import state
import logging
logger = logging.getLogger(__name__)
+MAX_WORKERS = 4
+
class OptionalBasicAuthMixin(object):
class SendChallenge(Exception):
@@ -90,6 +93,23 @@ class EpubHandler(OMLHandler):
self.set_header('Content-Type', content_type)
self.write(z.read(filename))
+class CropHandler(OMLHandler):
+
+ def get(self, id, page, left, top, right, bottom):
+ from media.pdf import crop
+ with db.session():
+ item = Item.get(id)
+ path = item.get_path()
+ data = crop(path, page, left, top, right, bottom)
+ if path and data:
+ self.set_header('Content-Type', 'image/jpeg')
+ self.set_header('Content-Length', str(len(data)))
+ self.write(data)
+ return
+ self.set_status(404)
+ return
+
+
def serve_static(handler, path, mimetype, include_body=True, disposition=None):
handler.set_header('Content-Type', mimetype)
size = os.stat(path).st_size
@@ -177,6 +197,7 @@ class ReaderHandler(OMLHandler):
return serve_static(self, path, 'text/html')
class UploadHandler(OMLHandler):
+ executor = ThreadPoolExecutor(max_workers=MAX_WORKERS)
def initialize(self, context=None):
self._context = context
@@ -184,7 +205,60 @@ class UploadHandler(OMLHandler):
def get(self):
self.write('use POST')
- @tornado.web.asynchronous
+ @run_on_executor
+ def save_files(self, request):
+ listname = request.arguments.get('list', None)
+ if listname:
+ listname = listname[0]
+ if isinstance(listname, bytes):
+ listname = listname.decode('utf-8')
+ with self._context():
+ prefs = settings.preferences
+ ids = []
+ for upload in request.files.get('files', []):
+ filename = upload.filename
+ id = get_id(data=upload.body)
+ ids.append(id)
+ file = File.get(id)
+ if not file or not os.path.exists(file.fullpath()):
+ logger.debug('add %s to library', id)
+ prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
+ prefix_imported = os.path.join(prefix_books, '.import' + os.sep)
+ ox.makedirs(prefix_imported)
+ import_name = os.path.join(prefix_imported, filename)
+ n = 1
+ while os.path.exists(import_name):
+ n += 1
+ name, extension = filename.rsplit('.', 1)
+ if extension == 'kepub':
+ extension = 'epub'
+ import_name = os.path.join(prefix_imported, '%s [%d].%s' % (name, n, extension))
+ with open(import_name, 'wb') as fd:
+ fd.write(upload.body)
+ file = add_file(id, import_name, prefix_books)
+ file.move()
+ else:
+ user = state.user()
+ if not file.item:
+ item = Item.get_or_create(id=file.sha1, info=file.info)
+ file.item_id = item.id
+ state.db.session.add(file)
+ state.db.session.commit()
+ else:
+ item = file.item
+ if user not in item.users:
+ logger.debug('add %s to local user', id)
+ item.add_user(user)
+ add_record('additem', item.id, file.info)
+ add_record('edititem', item.id, item.meta)
+ item.update()
+ if listname and ids:
+ list_ = List.get(settings.USER_ID, listname)
+ if list_:
+ list_.add_items(ids)
+ response = json_response({'ids': ids})
+ return response
+
@tornado.gen.coroutine
def post(self):
if 'origin' in self.request.headers and self.request.host not in self.request.headers['origin']:
@@ -193,60 +267,7 @@ class UploadHandler(OMLHandler):
self.write('')
return
- def save_files(context, request, callback):
- listname = request.arguments.get('list', None)
- if listname:
- listname = listname[0]
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8')
- with context():
- prefs = settings.preferences
- ids = []
- for upload in request.files.get('files', []):
- filename = upload.filename
- id = get_id(data=upload.body)
- ids.append(id)
- file = File.get(id)
- if not file or not os.path.exists(file.fullpath()):
- logger.debug('add %s to library', id)
- prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
- prefix_imported = os.path.join(prefix_books, '.import' + os.sep)
- ox.makedirs(prefix_imported)
- import_name = os.path.join(prefix_imported, filename)
- n = 1
- while os.path.exists(import_name):
- n += 1
- name, extension = filename.rsplit('.', 1)
- if extension == 'kepub':
- extension = 'epub'
- import_name = os.path.join(prefix_imported, '%s [%d].%s' % (name, n, extension))
- with open(import_name, 'wb') as fd:
- fd.write(upload.body)
- file = add_file(id, import_name, prefix_books)
- file.move()
- else:
- user = state.user()
- if not file.item:
- item = Item.get_or_create(id=file.sha1, info=file.info)
- file.item_id = item.id
- state.db.session.add(file)
- state.db.session.commit()
- else:
- item = file.item
- if user not in item.users:
- logger.debug('add %s to local user', id)
- item.add_user(user)
- add_record('additem', item.id, file.info)
- add_record('edititem', item.id, item.meta)
- item.update()
- if listname and ids:
- l = List.get(settings.USER_ID, listname)
- if l:
- l.add_items(ids)
- response = json_response({'ids': ids})
- callback(response)
-
- response = yield tornado.gen.Task(save_files, self._context, self.request)
+ response = yield self.save_files(self.request)
if 'status' not in response:
response = json_response(response)
response = json_dumps(response)
diff --git a/oml/item/models.py b/oml/item/models.py
index 508f46e..61732d1 100644
--- a/oml/item/models.py
+++ b/oml/item/models.py
@@ -322,7 +322,7 @@ class Item(db.Model):
def remove_annotations(self):
from annotation.models import Annotation
- for a in Annotation.query.filter_by(item_id=self.id, user_id=state.user()):
+ for a in Annotation.query.filter_by(item_id=self.id, user_id=settings.USER_ID):
a.add_record('removeannotation')
a.delete()
@@ -733,7 +733,13 @@ class File(db.Model):
return re.sub(r'^\.|\.$|:|/|\?|<|>|\\|\*|\||"', '_', string)
prefs = settings.preferences
prefix = os.sep.join(os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/').split('/'))
- if not self.item:
+ not_item = False
+ try:
+ not_item = not self.item
+ except:
+ logger.debug('trying to move an item that was just deleted', exc_info=True)
+ not_item = True
+ if not_item:
return
j = self.item.json(keys=['title', 'author', 'publisher', 'date', 'extension'])
diff --git a/oml/item/query.py b/oml/item/query.py
index 9683dee..91be5e1 100644
--- a/oml/item/query.py
+++ b/oml/item/query.py
@@ -23,7 +23,6 @@ def parse(data):
if [r for r in query['range'] if not isinstance(r, int)]:
logger.error('range must be 2 integers! got this: %s', query['range'])
query['range'] = [0, 0]
- #print data
query['qs'] = models.Item.find(data)
if 'group' not in query:
query['qs'] = order(query['qs'], query['sort'])
diff --git a/oml/item/scan.py b/oml/item/scan.py
index 78c8fe6..e4a97a8 100644
--- a/oml/item/scan.py
+++ b/oml/item/scan.py
@@ -225,6 +225,7 @@ def run_scan():
missing = ids - library_items
if missing:
logger.debug('%s items in library without a record', len(missing))
+ settings.server['last_scan'] = time.mktime(time.gmtime())
def change_path(old, new):
old_icons = os.path.join(old, 'Metadata', 'icons.db')
diff --git a/oml/library.py b/oml/library.py
index eee921e..b490e16 100644
--- a/oml/library.py
+++ b/oml/library.py
@@ -153,7 +153,7 @@ class Peer(object):
self.info['lists'][name] = list(set(self.info['lists'][name]) - set(ids))
elif action == 'addpeer':
peerid, username = args
- if len(peerid) == 16:
+ if len(peerid) == settings.ID_LENGTH:
self.info['peers'][peerid] = {'username': username}
# fixme, just trigger peer update here
from user.models import User
@@ -164,7 +164,7 @@ class Peer(object):
peer.save()
elif action == 'editpeer':
peerid, data = args
- if len(peerid) == 16:
+ if len(peerid) == settings.ID_LENGTH:
if peerid not in self.info['peers']:
self.info['peers'][peerid] = {}
for key in ('username', 'contact'):
@@ -377,6 +377,10 @@ def sync_db():
from sqlalchemy.orm import load_only
import item.models
first = True
+ missing_previews = []
+ state.sync_db = True
+
+ #FIXME: why is this loop needed
with db.session():
sort_ids = {i.item_id for i in item.models.Sort.query.options(load_only('item_id'))}
if sort_ids:
@@ -387,9 +391,11 @@ def sync_db():
if first:
first = False
logger.debug('sync items')
- i.update(commit=False)
- if i.info.get('mediastate') == 'unavailable' and state.tasks:
- state.tasks.queue('getpreview', i.id)
+ #why?
+ #i.update(commit=False)
+ i.update_sort(commit=False)
+ if i.info.get('mediastate') == 'unavailable':
+ missing_previews.append(i.id)
commit = True
#logger.debug('sync:%s', i)
t0 = maybe_commit(t0)
@@ -397,6 +403,7 @@ def sync_db():
break
if commit:
state.db.session.commit()
+
if not first:
logger.debug('synced items')
if not state.shutdown:
@@ -408,6 +415,12 @@ def sync_db():
item.models.Sort.query.filter_by(item_id=None).delete()
item.models.Find.query.filter_by(item_id=None).delete()
+ if missing_previews and state.tasks:
+ logger.debug('queueing download of %s missing previews', len(missing_previews))
+ for id in missing_previews:
+ state.tasks.queue('getpreview', id)
+ state.sync_db = False
+
def cleanup_lists():
import item.models
import user.models
diff --git a/oml/localnodes.py b/oml/localnodes.py
index 5178f0a..dcd2751 100644
--- a/oml/localnodes.py
+++ b/oml/localnodes.py
@@ -1,22 +1,26 @@
# -*- coding: utf-8 -*-
+import asyncio
import socket
import netifaces
from zeroconf import (
- ServiceBrowser, ServiceInfo, ServiceStateChange, Zeroconf
+ ServiceBrowser, ServiceInfo, ServiceStateChange
)
+from zeroconf.asyncio import AsyncZeroconf
from tornado.ioloop import PeriodicCallback
import settings
import state
from tor_request import get_opener
+from utils import time_cache
import logging
logger = logging.getLogger(__name__)
-def can_connect(data):
+@time_cache(3)
+def can_connect(**data):
try:
opener = get_opener(data['id'])
headers = {
@@ -60,100 +64,120 @@ class LocalNodes(dict):
return
self.setup()
self._ip_changed = PeriodicCallback(self._update_if_ip_changed, 60000)
+ state.main.add_callback(self._ip_changed.start)
def setup(self):
self.local_ips = get_broadcast_interfaces()
- self.zeroconf = {ip: Zeroconf(interfaces=[ip]) for ip in self.local_ips}
- self.register_service()
+ self.zeroconf = {ip: AsyncZeroconf(interfaces=[ip]) for ip in self.local_ips}
+ asyncio.create_task(self.register_service())
self.browse()
- def _update_if_ip_changed(self):
+ async def _update_if_ip_changed(self):
local_ips = get_broadcast_interfaces()
username = settings.preferences.get('username', 'anonymous')
if local_ips != self.local_ips or self.username != username:
- self.close()
+ await self.close()
self.setup()
def browse(self):
self.browser = {
- ip: ServiceBrowser(self.zeroconf[ip], self.service_type, handlers=[self.on_service_state_change])
+ ip: ServiceBrowser(self.zeroconf[ip].zeroconf, self.service_type, handlers=[self.on_service_state_change])
for ip in self.zeroconf
}
- def register_service(self):
+ async def register_service(self):
if self.local_info:
for local_ip, local_info in self.local_info:
- self.zeroconf[local_ip].unregister_service(local_info)
+ self.zeroconf[local_ip].async_unregister_service(local_info)
self.local_info = None
local_name = socket.gethostname().partition('.')[0] + '.local.'
port = settings.server['node_port']
+ self.local_info = []
self.username = settings.preferences.get('username', 'anonymous')
desc = {
- 'username': self.username
+ 'username': self.username,
+ 'id': settings.USER_ID,
}
- self.local_info = []
+ tasks = []
for i, local_ip in enumerate(get_broadcast_interfaces()):
if i:
- name = '%s-%s [%s].%s' % (desc['username'], i+1, settings.USER_ID, self.service_type)
+ name = '%s [%s].%s' % (desc['username'], i, self.service_type)
else:
- name = '%s [%s].%s' % (desc['username'], settings.USER_ID, self.service_type)
- local_info = ServiceInfo(self.service_type, name,
- socket.inet_aton(local_ip), port, 0, 0, desc, local_name)
- self.zeroconf[local_ip].register_service(local_info)
+ name = '%s.%s' % (desc['username'], self.service_type)
+
+ addresses = [socket.inet_aton(local_ip)]
+ local_info = ServiceInfo(self.service_type, name, port, 0, 0, desc, local_name, addresses=addresses)
+ task = self.zeroconf[local_ip].async_register_service(local_info)
+ tasks.append(task)
self.local_info.append((local_ip, local_info))
+ await asyncio.gather(*tasks)
def __del__(self):
self.close()
- def close(self):
+ async def close(self):
if self.local_info:
+ tasks = []
for local_ip, local_info in self.local_info:
try:
- self.zeroconf[local_ip].unregister_service(local_info)
+ task = self.zeroconf[local_ip].async_unregister_service(local_info)
+ tasks.append(task)
except:
logger.debug('exception closing zeroconf', exc_info=True)
self.local_info = None
if self.zeroconf:
for local_ip in self.zeroconf:
try:
- self.zeroconf[local_ip].close()
+ task = self.zeroconf[local_ip].async_close()
+ tasks.append(task)
except:
logger.debug('exception closing zeroconf', exc_info=True)
self.zeroconf = None
for id in list(self):
self.pop(id, None)
+ await asyncio.gather(*tasks)
def on_service_state_change(self, zeroconf, service_type, name, state_change):
- if '[' not in name:
- id = name.split('.')[0]
- else:
- id = name.split('[')[1].split(']')[0]
- if id == settings.USER_ID:
- return
- if state_change is ServiceStateChange.Added:
+ try:
info = zeroconf.get_service_info(service_type, name)
- if info:
+ except zeroconf._exceptions.NotRunningException:
+ return
+ if info and b'id' in info.properties:
+ id = info.properties[b'id'].decode()
+ if id == settings.USER_ID:
+ return
+ if len(id) != settings.ID_LENGTH:
+ return
+ if state_change is ServiceStateChange.Added:
+ new = id not in self
self[id] = {
'id': id,
- 'host': socket.inet_ntoa(info.address),
+ 'host': socket.inet_ntoa(info.addresses[0]),
'port': info.port
}
if info.properties:
for key, value in info.properties.items():
key = key.decode()
self[id][key] = value.decode()
- logger.debug('add: %s [%s] (%s:%s)', self[id].get('username', 'anon'), id, self[id]['host'], self[id]['port'])
+ logger.debug(
+ '%s: %s [%s] (%s:%s)',
+ 'add' if new else 'update',
+ self[id].get('username', 'anon'),
+ id,
+ self[id]['host'],
+ self[id]['port']
+ )
if state.tasks and id in self:
state.tasks.queue('addlocalinfo', self[id])
- elif state_change is ServiceStateChange.Removed:
- logger.debug('remove: %s', id)
- self.pop(id, None)
- if state.tasks:
- state.tasks.queue('removelocalinfo', id)
+ elif state_change is ServiceStateChange.Removed:
+ logger.debug('remove: %s', id)
+ self.pop(id, None)
+ if state.tasks:
+ state.tasks.queue('removelocalinfo', id)
def get_data(self, user_id):
data = self.get(user_id)
- if data and can_connect(data):
+ if data and can_connect(**data):
return data
return None
diff --git a/oml/media/epub.py b/oml/media/epub.py
index 552a134..92ab289 100644
--- a/oml/media/epub.py
+++ b/oml/media/epub.py
@@ -68,17 +68,17 @@ def cover(path):
if manifest:
manifest = manifest[0]
if metadata and manifest:
- for e in metadata.getchildren():
+ for e in list(metadata):
if e.tag == '{http://www.idpf.org/2007/opf}meta' and e.attrib.get('name') == 'cover':
cover_id = e.attrib['content']
- for e in manifest.getchildren():
+ for e in list(manifest):
if e.attrib['id'] == cover_id:
filename = unquote(e.attrib['href'])
filename = normpath(os.path.join(os.path.dirname(opf[0]), filename))
if filename in files:
return use(filename)
if manifest:
- images = [e for e in manifest.getchildren() if 'image' in e.attrib['media-type']]
+ images = [e for e in list(manifest) if 'image' in e.attrib['media-type']]
if images:
image_data = []
for e in images:
@@ -89,7 +89,7 @@ def cover(path):
if image_data:
image_data.sort(key=lambda name: z.getinfo(name).file_size)
return use(image_data[-1])
- for e in manifest.getchildren():
+ for e in list(manifest):
if 'html' in e.attrib['media-type']:
filename = unquote(e.attrib['href'])
filename = normpath(os.path.join(os.path.dirname(opf[0]), filename))
@@ -118,7 +118,7 @@ def info(epub):
metadata = info.findall('{http://www.idpf.org/2007/opf}metadata')
if metadata:
metadata = metadata[0]
- for e in metadata.getchildren():
+ for e in list(metadata):
if e.text and e.text.strip() and e.text not in ('unknown', 'none'):
key = e.tag.split('}')[-1]
key = {
@@ -148,7 +148,7 @@ def info(epub):
for point in nav_map.findall('{http://www.daisy.org/z3986/2005/ncx/}navPoint'):
label = point.find('{http://www.daisy.org/z3986/2005/ncx/}navLabel')
if label:
- txt = label.getchildren()[0].text
+ txt = list(label)[0].text
if txt:
contents.append(txt)
if contents:
diff --git a/oml/media/pdf.py b/oml/media/pdf.py
index 75313b2..2f58d9a 100644
--- a/oml/media/pdf.py
+++ b/oml/media/pdf.py
@@ -10,6 +10,7 @@ from glob import glob
from datetime import datetime
from PyPDF2 import PdfFileReader
+from PIL import Image
import ox
import settings
@@ -24,13 +25,13 @@ def cover(pdf):
else:
return page(pdf, 1)
-def ql_cover(pdf):
+def ql_cover(pdf, size=1024):
tmp = tempfile.mkdtemp()
cmd = [
'qlmanage',
'-t',
'-s',
- '1024',
+ str(size),
'-o',
tmp,
pdf
@@ -48,7 +49,7 @@ def ql_cover(pdf):
shutil.rmtree(tmp)
return data
-def page(pdf, page):
+def page(pdf, page, size=1024):
tmp = tempfile.mkdtemp()
if sys.platform == 'win32':
pdf = get_short_path_name(pdf)
@@ -56,8 +57,8 @@ def page(pdf, page):
'pdftocairo',
pdf,
'-jpeg',
- '-f', str(page), '-l', str(page),
- '-scale-to', '1024', '-cropbox',
+ '-f', str(page), '-l', str(page),
+ '-scale-to', str(size), '-cropbox',
os.path.join(tmp, 'page')
]
if sys.platform == 'win32':
@@ -79,6 +80,46 @@ def page(pdf, page):
shutil.rmtree(tmp)
return data
+def crop(pdf, page, left, top, right, bottom):
+ size = 2048
+ tmp = tempfile.mkdtemp()
+ if sys.platform == 'win32':
+ pdf = get_short_path_name(pdf)
+ cmd = [
+ 'pdftocairo',
+ pdf,
+ '-jpeg',
+ '-f', str(page), '-l', str(page),
+ '-scale-to', str(size), '-cropbox',
+ os.path.join(tmp, 'page')
+ ]
+ if sys.platform == 'win32':
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = subprocess.SW_HIDE
+ p = subprocess.Popen(cmd, close_fds=True, startupinfo=startupinfo)
+ else:
+ p = subprocess.Popen(cmd, close_fds=True)
+ p.wait()
+ image = glob('%s/*' % tmp)
+ if image:
+ image = image[0]
+ crop = [int(p) for p in (left, top, right, bottom)]
+ img = Image.open(image).crop(crop)
+ img.save(image)
+ with open(image, 'rb') as fd:
+ data = fd.read()
+ else:
+ logger.debug('pdftocairo %s %s', pdf, ' '.join(cmd))
+ data = None
+ shutil.rmtree(tmp)
+ return data
+
+
+
+
+
+
'''
def page(pdf, page):
image = tempfile.mkstemp('.jpg')[1]
@@ -281,3 +322,4 @@ def extract_isbn(text):
isbns = find_isbns(text)
if isbns:
return isbns[0]
+
diff --git a/oml/media/txt.py b/oml/media/txt.py
index ca76253..29a61c7 100644
--- a/oml/media/txt.py
+++ b/oml/media/txt.py
@@ -7,8 +7,9 @@ import tempfile
import subprocess
def cover(path):
+ import settings
image = tempfile.mkstemp('.jpg')[1]
- cmd = ['python3', '../reader/txt.js/txt.py', '-i', path, '-o', image]
+ cmd = ['python3', os.path.join(settings.top_dir, 'reader/txt.js/txt.py'), '-i', path, '-o', image]
p = subprocess.Popen(cmd, close_fds=True)
p.wait()
with open(image, 'rb') as fd:
diff --git a/oml/node/server.py b/oml/node/server.py
index bc2e816..ae5e8df 100644
--- a/oml/node/server.py
+++ b/oml/node/server.py
@@ -12,18 +12,20 @@ import socket
import socketserver
import time
-from Crypto.PublicKey import RSA
-from Crypto.Util.asn1 import DerSequence
-from OpenSSL.crypto import dump_privatekey, FILETYPE_ASN1
from OpenSSL.SSL import (
- Context, Connection, TLSv1_2_METHOD,
- VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT, VERIFY_CLIENT_ONCE
+ Connection,
+ Context,
+ TLSv1_2_METHOD,
+ VERIFY_CLIENT_ONCE,
+ VERIFY_FAIL_IF_NO_PEER_CERT,
+ VERIFY_PEER,
)
import db
import settings
import state
import user
+import utils
from changelog import changelog_size, changelog_path
from websocket import trigger_event
@@ -34,16 +36,15 @@ import logging
logger = logging.getLogger(__name__)
-def get_service_id(key):
- '''
- service_id is the first half of the sha1 of the rsa public key encoded in base32
- '''
- # compute sha1 of public key and encode first half in base32
- pub_der = DerSequence()
- pub_der.decode(dump_privatekey(FILETYPE_ASN1, key))
- public_key = RSA.construct((pub_der._seq[1], pub_der._seq[2])).exportKey('DER')[22:]
- service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
- return service_id
+def get_service_id(connection):
+ certs = connection.get_peer_cert_chain()
+ for cert in certs:
+ if cert.get_signature_algorithm().decode() == "ED25519":
+ pubkey = cert.get_pubkey()
+ public_key = pubkey.to_cryptography_key().public_bytes_raw()
+ service_id = utils.get_onion(public_key)
+ return service_id
+ raise Exception("connection with invalid certificate")
class TLSTCPServer(socketserver.TCPServer):
@@ -55,7 +56,7 @@ class TLSTCPServer(socketserver.TCPServer):
socketserver.TCPServer.__init__(self, server_address, HandlerClass)
ctx = Context(TLSv1_2_METHOD)
ctx.use_privatekey_file(settings.ssl_key_path)
- ctx.use_certificate_file(settings.ssl_cert_path)
+ ctx.use_certificate_chain_file(settings.ssl_cert_path)
# only allow clients with cert:
ctx.set_verify(VERIFY_PEER | VERIFY_CLIENT_ONCE | VERIFY_FAIL_IF_NO_PEER_CERT, self._accept)
#ctx.set_verify(VERIFY_PEER | VERIFY_CLIENT_ONCE, self._accept)
@@ -111,8 +112,7 @@ class Handler(http.server.SimpleHTTPRequestHandler):
return self.do_GET()
def do_GET(self):
- #x509 = self.connection.get_peer_certificate()
- #user_id = get_service_id(x509.get_pubkey()) if x509 else None
+ user_id = get_service_id(self.connection)
import item.models
parts = self.path.split('/')
if len(parts) == 3 and parts[1] in ('get', 'preview'):
@@ -185,8 +185,7 @@ class Handler(http.server.SimpleHTTPRequestHandler):
self.end_headers()
def _changelog(self):
- x509 = self.connection.get_peer_certificate()
- user_id = get_service_id(x509.get_pubkey()) if x509 else None
+ user_id = get_service_id(self.connection)
with db.session():
u = user.models.User.get(user_id)
if not u:
@@ -257,8 +256,7 @@ class Handler(http.server.SimpleHTTPRequestHandler):
ping responds public ip
'''
- x509 = self.connection.get_peer_certificate()
- user_id = get_service_id(x509.get_pubkey()) if x509 else None
+ user_id = get_service_id(self.connection)
content = {}
try:
diff --git a/oml/nodes.py b/oml/nodes.py
index 27d7658..d6c1112 100644
--- a/oml/nodes.py
+++ b/oml/nodes.py
@@ -448,6 +448,9 @@ class Node(Thread):
except socket.timeout:
logger.debug('timeout %s', url)
return False
+ except urllib.error.URLError as e:
+ logger.debug('urllib.error.URLError %s', e)
+ return False
except socks.GeneralProxyError:
logger.debug('download failed %s', url)
return False
@@ -598,6 +601,8 @@ class Nodes(Thread):
def _pull(self):
if not state.sync_enabled or settings.preferences.get('downloadRate') == 0:
return
+ if state.sync_db:
+ return
if state.activity and state.activity.get('activity') == 'import':
return
self._pulling = True
@@ -617,12 +622,12 @@ class Nodes(Thread):
node.pullChanges()
self._pulling = False
- def join(self):
+ async def join(self):
self._q.put(None)
for node in list(self._nodes.values()):
node.join()
if self.local:
- self.local.close()
+ await self.local.close()
return super().join(1)
def publish_node():
diff --git a/oml/oxtornado.py b/oml/oxtornado.py
index b34cdfe..23b4b91 100644
--- a/oml/oxtornado.py
+++ b/oml/oxtornado.py
@@ -83,9 +83,18 @@ class ApiHandler(tornado.web.RequestHandler):
context = self._context
if context is None:
context = defaultcontext
- action = request.arguments.get('action', [None])[0].decode('utf-8')
- data = request.arguments.get('data', [b'{}'])[0]
- data = json.loads(data.decode('utf-8')) if data else {}
+ action = None
+ if request.headers.get('Content-Type') == 'application/json':
+ try:
+ r = json.loads(request.body.decode())
+ action = r['action']
+ data = r['data']
+ except:
+ action = None
+ if not action:
+ action = request.arguments.get('action', [None])[0].decode('utf-8')
+ data = request.arguments.get('data', [b'{}'])[0]
+ data = json.loads(data.decode('utf-8')) if data else {}
if not action:
methods = list(actions.keys())
api = []
diff --git a/oml/queryparser.py b/oml/queryparser.py
index 8562218..b6e7dcb 100644
--- a/oml/queryparser.py
+++ b/oml/queryparser.py
@@ -2,9 +2,10 @@
from datetime import datetime
import unicodedata
+import sqlalchemy.orm.exc
from sqlalchemy.sql import operators
from sqlalchemy.orm import load_only
-from sqlalchemy.sql.expression import text
+from sqlalchemy.sql.expression import text, column
import utils
import settings
@@ -13,6 +14,7 @@ from fulltext import find_fulltext
import logging
logger = logging.getLogger(__name__)
+
def get_operator(op, type='str'):
return {
'str': {
@@ -134,7 +136,8 @@ class Parser(object):
q = get_operator(op)(self._find.findvalue, v)
if k != '*':
q &= (self._find.key == k)
- ids = self._model.query.join(self._find).filter(q).options(load_only('id'))
+ ids = self._find.query.filter(q).with_entities(column('item_id'))
+ ids = [i[0] for i in ids]
return self.in_ids(ids, exclude)
elif k == 'list':
nickname, name = v.split(':', 1)
@@ -220,7 +223,7 @@ class Parser(object):
for condition in conditions:
if 'conditions' in condition:
q = self.parse_conditions(condition['conditions'],
- condition.get('operator', '&'))
+ condition.get('operator', '&'))
else:
q = self.parse_condition(condition)
if isinstance(q, list):
@@ -265,8 +268,7 @@ class Parser(object):
qs = self._model.query
#only include items that have hard metadata
conditions = self.parse_conditions(data.get('query', {}).get('conditions', []),
- data.get('query', {}).get('operator', '&'))
+ data.get('query', {}).get('operator', '&'))
for c in conditions:
qs = qs.filter(c)
- #print(qs)
return qs
diff --git a/oml/server.py b/oml/server.py
index fbae2fc..8d39872 100644
--- a/oml/server.py
+++ b/oml/server.py
@@ -1,16 +1,19 @@
# -*- coding: utf-8 -*-
+import asyncio
import os
import sys
import signal
import time
from tornado.ioloop import IOLoop
-from tornado.web import StaticFileHandler, Application
+import tornado.web
+from tornado.web import Application
from cache import Cache
from item.handlers import EpubHandler, ReaderHandler, FileHandler
from item.handlers import OMLHandler, UploadHandler
+from item.handlers import CropHandler
from item.icons import IconHandler
import db
import node.server
@@ -28,6 +31,12 @@ import logging
logger = logging.getLogger(__name__)
+class StaticFileHandler(tornado.web.StaticFileHandler):
+ def get_content_type(self):
+ if self.request.path.split('?')[0].endswith('.mjs'):
+ return 'application/javascript'
+ return super().get_content_type()
+
class MainHandler(OMLHandler):
def get(self, path):
@@ -59,13 +68,13 @@ def log_request(handler):
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
-def shutdown():
+async def shutdown():
state.shutdown = True
if state.tor:
state.tor._shutdown = True
if state.nodes:
logger.debug('shutdown nodes')
- state.nodes.join()
+ await state.nodes.join()
if state.downloads:
logger.debug('shutdown downloads')
state.downloads.join()
@@ -111,11 +120,11 @@ def run():
common_handlers = [
(r'/(favicon.ico)', StaticFileHandler, {'path': settings.static_path}),
- (r'/static/oxjs/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'oxjs')}),
- (r'/static/cbr.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'cbr.js')}),
- (r'/static/epub.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'epub.js')}),
- (r'/static/pdf.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'pdf.js')}),
- (r'/static/txt.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'txt.js')}),
+ (r'/static/oxjs/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'oxjs')}),
+ (r'/static/cbr.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'cbr.js')}),
+ (r'/static/epub.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'epub.js')}),
+ (r'/static/pdf.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'pdf.js')}),
+ (r'/static/txt.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'txt.js')}),
(r'/static/(.*)', StaticFileHandler, {'path': settings.static_path}),
(r'/(.*)/epub/(.*)', EpubHandler),
(r'/(.*?)/reader/', ReaderHandler),
@@ -125,6 +134,7 @@ def run():
(r'/(.*?)/get/', FileHandler, {
'attachment': True
}),
+ (r'/(.*)/2048p(\d*),(\d*),(\d*),(\d*),(\d*).jpg', CropHandler),
(r'/(.*)/(cover|preview)(\d*).jpg', IconHandler),
]
handlers = common_handlers + [
@@ -146,13 +156,12 @@ def run():
http_server.listen(settings.server['port'], settings.server['address'], max_buffer_size=max_buffer_size)
# public server
- '''
- public_port = settings.server.get('public_port')
- public_address = settings.server['public_address']
- if public_port:
- public_server = Application(public_handlers, **options)
- public_server.listen(public_port, public_address)
- '''
+ if settings.preferences.get('enableReadOnlyService'):
+ public_port = settings.server.get('public_port')
+ public_address = settings.server['public_address']
+ if public_port:
+ public_server = Application(public_handlers, **options)
+ public_server.listen(public_port, public_address)
if PID:
with open(PID, 'w') as pid:
@@ -198,10 +207,10 @@ def run():
print('open browser at %s' % url)
logger.debug('Starting OML %s at %s', settings.VERSION, url)
- signal.signal(signal.SIGTERM, shutdown)
+ signal.signal(signal.SIGTERM, lambda _, __: sys.exit(0))
try:
state.main.start()
except:
print('shutting down...')
- shutdown()
+ asyncio.run(shutdown())
diff --git a/oml/settings.py b/oml/settings.py
index 5c06f74..0fad9ee 100644
--- a/oml/settings.py
+++ b/oml/settings.py
@@ -8,15 +8,17 @@ from oml.utils import get_user_id
from oml import fulltext
base_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..'))
+top_dir = os.path.dirname(base_dir)
+
static_path = os.path.join(base_dir, 'static')
-updates_path = os.path.normpath(os.path.join(base_dir, '..', 'updates'))
+updates_path = os.path.normpath(os.path.join(top_dir, 'updates'))
oml_data_path = os.path.join(base_dir, 'config.json')
-data_path = os.path.normpath(os.path.join(base_dir, '..', 'data'))
+data_path = os.path.normpath(os.path.join(top_dir, 'data'))
if not os.path.exists(data_path):
- config_path = os.path.normpath(os.path.join(base_dir, '..', 'config'))
+ config_path = os.path.normpath(os.path.join(top_dir, 'config'))
if os.path.exists(config_path):
data_path = config_path
else:
@@ -24,9 +26,11 @@ if not os.path.exists(data_path):
db_path = os.path.join(data_path, 'data.db')
log_path = os.path.join(data_path, 'debug.log')
-ssl_cert_path = os.path.join(data_path, 'node.ssl.crt')
-ssl_key_path = os.path.join(data_path, 'tor', 'private_key')
+ca_key_path = os.path.join(data_path, 'node.ca.key')
+ca_cert_path = os.path.join(data_path, 'node.ca.crt')
+ssl_cert_path = os.path.join(data_path, 'node.tls.crt')
+ssl_key_path = os.path.join(data_path, 'node.tls.key')
if os.path.exists(oml_data_path):
with open(oml_data_path) as fd:
@@ -57,7 +61,7 @@ for key in server_defaults:
release = pdict(os.path.join(data_path, 'release.json'))
-USER_ID = get_user_id(ssl_key_path, ssl_cert_path)
+USER_ID = get_user_id(ssl_key_path, ssl_cert_path, ca_key_path, ca_cert_path)
OML_UPDATE_KEY = 'K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU'
OML_UPDATE_CERT = '''-----BEGIN CERTIFICATE-----
@@ -96,3 +100,5 @@ if not FULLTEXT_SUPPORT:
config['itemKeys'] = [k for k in config['itemKeys'] if k['id'] != 'fulltext']
DB_VERSION = 20
+
+ID_LENGTH = 56
diff --git a/oml/setup.py b/oml/setup.py
index e676292..ee62cb6 100644
--- a/oml/setup.py
+++ b/oml/setup.py
@@ -423,6 +423,27 @@ def upgrade_db(old, new=None):
)''')
run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)')
run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)')
+ if old <= '20240608-1469-647a8b9':
+ old_hostname = os.path.join(settings.data_path, 'tor/hostname')
+ if os.path.exists(old_hostname):
+ with open(old_hostname) as fd:
+ OLD_USER_ID = fd.read().split('.')[0]
+ statements = [
+ "UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
+ "UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ "UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ "UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ ]
+ run_sql([
+ sql.format(oid=OLD_USER_ID, nid=settings.USER_ID)
+ for sql in statements
+ ])
+ for ext in ('log', 'db', 'json'):
+ old_log = os.path.join(settings.data_path, 'peers/%s.%s' % (OLD_USER_ID, ext))
+ new_log = os.path.join(settings.data_path, 'peers/%s.%s' % (USER_ID, ext))
+ if os.path.exists(old_log) and not os.path.exists(new_log):
+ os.rename(old_log, new_log)
+
def create_default_lists(user_id=None):
with db.session():
diff --git a/oml/state.py b/oml/state.py
index cc1e804..4dbf2da 100644
--- a/oml/state.py
+++ b/oml/state.py
@@ -16,6 +16,7 @@ websockets = []
uisockets = []
peers = {}
changelog_size = None
+sync_db = False
activity = {}
removepeer = {}
diff --git a/oml/tasks.py b/oml/tasks.py
index 93d3b45..d506853 100644
--- a/oml/tasks.py
+++ b/oml/tasks.py
@@ -36,16 +36,15 @@ class Tasks(Thread):
def run(self):
self.load_tasks()
- if time.mktime(time.gmtime()) - settings.server.get('last_scan', 0) > 24*60*60:
- settings.server['last_scan'] = time.mktime(time.gmtime())
+ if (time.mktime(time.gmtime()) - settings.server.get('last_scan', 0)) > 24*60*60:
self.queue('scan')
import item.scan
from item.models import sync_metadata, get_preview, get_cover
from user.models import (
- export_list, update_user_peering,
- add_local_info, remove_local_info,
- upload
+ export_list, update_user_peering,
+ add_local_info, remove_local_info,
+ upload
)
shutdown = False
while not shutdown:
@@ -98,6 +97,7 @@ class Tasks(Thread):
def load_tasks(self):
if os.path.exists(self._taskspath):
+ logger.debug('loading tasks')
try:
with open(self._taskspath) as f:
tasks = json.load(f)
diff --git a/oml/tor.py b/oml/tor.py
index 17056ca..830701b 100644
--- a/oml/tor.py
+++ b/oml/tor.py
@@ -22,9 +22,11 @@ import logging
logging.getLogger('stem').setLevel(logging.ERROR)
logger = logging.getLogger(__name__)
+
class TorDaemon(Thread):
installing = False
running = True
+ ended = False
p = None
def __init__(self):
@@ -105,6 +107,8 @@ DirReqStatistics 0
logger.debug(line)
self.p.communicate()
time.sleep(0.5)
+ self.ended = True
+ self.running = False
self.p = None
def kill(self):
@@ -142,6 +146,10 @@ class Tor(object):
logger.debug("Start tor")
self.daemon = TorDaemon()
return self.connect()
+ elif self.daemon.ended:
+ logger.debug("Try starting tor again")
+ self.daemon = TorDaemon()
+ return self.connect()
if not self.daemon.installing:
logger.debug("Failed to connect to tor")
return False
@@ -201,18 +209,20 @@ class Tor(object):
return False
controller = self.controller
if controller.get_version() >= stem.version.Requirement.ADD_ONION:
- with open(settings.ssl_key_path, 'rb') as fd:
- private_key = fd.read()
- key_content = RSA.importKey(private_key).exportKey().decode()
- key_content = ''.join(key_content.strip().split('\n')[1:-1])
+ private_key, public_key = utils.load_pem_key(settings.ca_key_path)
+ key_type, key_content = utils.get_onion_key(private_key)
ports = {9851: settings.server['node_port']}
if settings.preferences.get('enableReadOnlyService'):
ports[80] = settings.server['public_port']
controller.remove_ephemeral_hidden_service(settings.USER_ID)
- response = controller.create_ephemeral_hidden_service(ports,
- key_type='RSA1024', key_content=key_content,
- detached=True)
+ response = controller.create_ephemeral_hidden_service(
+ ports,
+ key_type=key_type, key_content=key_content,
+ detached=True
+ )
if response.is_ok():
+ if response.service_id != settings.USER_ID:
+ logger.error("Something is wrong with tor id %s vs %s", response.service_id, settings.USER_ID)
logger.debug('published node as https://%s.onion:%s',
settings.USER_ID, settings.server_defaults['node_port'])
if settings.preferences.get('enableReadOnlyService'):
@@ -259,65 +269,71 @@ def torbrowser_url(sys_platform=None):
data = read_url(base_url, timeout=3*24*60*60).decode()
versions = []
for r in (
- re.compile('href="(\d\.\d\.\d/)"'),
- re.compile('href="(\d\.\d/)"'),
+ re.compile('href="(\d+\.\d+\.\d+/)"'),
+ re.compile('href="(\d+\.\d+/)"'),
):
versions += r.findall(data)
+ if not versions:
+ return None
current = sorted(versions)[-1]
url = base_url + current
- language = '.*?en'
if sys_platform.startswith('linux'):
if platform.architecture()[0] == '64bit':
- osname = 'linux64'
+ osname = 'linux-x86_64'
else:
- osname = 'linux32'
+ osname = 'linux-x86_32'
ext = 'xz'
elif sys_platform == 'darwin':
- osname = 'osx64'
+ osname = 'macos'
ext = 'dmg'
elif sys_platform == 'win32':
- language = ''
- osname = ''
- ext = 'zip'
+ osname = 'windows-x86_64-portable'
+ ext = 'exe'
else:
logger.debug('no way to get torbrowser url for %s', sys.platform)
return None
- r = re.compile('href="(.*?{osname}{language}.*?{ext})"'.format(osname=osname,language=language,ext=ext))
- torbrowser = sorted(r.findall(read_url(url).decode()))[-1]
+ data = read_url(url).decode()
+ r = re.compile('href="(.*?{osname}.*?{ext})"'.format(osname=osname, ext=ext)).findall(data)
+ if not r:
+ r = re.compile('href="(.*?{ext})"'.format(ext=ext)).findall(data)
+ torbrowser = sorted(r)[-1]
url += torbrowser
return url
def get_tor():
if sys.platform == 'darwin':
for path in (
- os.path.join(settings.base_dir, '..', 'platform_darwin64', 'tor', 'tor'),
+ os.path.join(settings.top_dir, 'platform_darwin64', 'tor', 'tor'),
'/Applications/TorBrowser.app/TorBrowser/Tor/tor',
- os.path.join(settings.base_dir, '..', 'tor', 'TorBrowser.app', 'TorBrowser', 'Tor', 'tor')
+ os.path.join(settings.top_dir, 'tor', 'TorBrowser.app', 'TorBrowser', 'Tor', 'tor')
):
if os.path.isfile(path) and os.access(path, os.X_OK):
return path
elif sys.platform == 'win32':
paths = [
- os.path.join(settings.base_dir, '..', 'platform_win32', 'tor', 'tor.exe')
+ os.path.join(settings.top_dir, 'platform_win32', 'tor', 'tor.exe')
]
- exe = os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'tor.exe')
- for prefix in (
- os.path.join(os.path.expanduser('~'), 'Desktop'),
- os.path.join('C:', 'Program Files'),
- os.path.join('C:', 'Program Files (x86)'),
+ for exe in (
+ os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'tor.exe'),
+ os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'Tor', 'tor.exe'),
):
- path = os.path.join(prefix, exe)
- paths.append(path)
- paths.append(os.path.join(settings.base_dir, '..', 'tor', 'Tor', 'tor.exe'))
+ for prefix in (
+ os.path.join(os.path.expanduser('~'), 'Desktop'),
+ os.path.join('C:', 'Program Files'),
+ os.path.join('C:', 'Program Files (x86)'),
+ ):
+ path = os.path.join(prefix, exe)
+ paths.append(path)
+ paths.append(os.path.join(settings.top_dir, 'tor', 'Tor', 'tor.exe'))
for path in paths:
if os.path.isfile(path) and os.access(path, os.X_OK):
return os.path.normpath(path)
elif sys.platform.startswith('linux'):
for path in (
- os.path.join(settings.base_dir, '..', 'platform_linux64', 'tor', 'tor'),
- os.path.join(settings.base_dir, '..', 'platform_linux32', 'tor', 'tor'),
- os.path.join(settings.base_dir, '..', 'platform_linux_armv7l', 'tor', 'tor'),
- os.path.join(settings.base_dir, '..', 'platform_linux_aarch64', 'tor', 'tor'),
+ os.path.join(settings.top_dir, 'platform_linux64', 'tor', 'tor'),
+ os.path.join(settings.top_dir, 'platform_linux32', 'tor', 'tor'),
+ os.path.join(settings.top_dir, 'platform_linux_armv7l', 'tor', 'tor'),
+ os.path.join(settings.top_dir, 'platform_linux_aarch64', 'tor', 'tor'),
):
if os.path.isfile(path) and os.access(path, os.X_OK):
return os.path.normpath(path)
@@ -331,9 +347,12 @@ def get_tor():
path = os.path.join(base, 'TorBrowser', 'Tor', 'tor')
if os.path.isfile(path) and os.access(path, os.X_OK):
return path
+ path = os.path.join(base, 'TorBrowser', 'Tor', 'Tor', 'tor')
+ if os.path.isfile(path) and os.access(path, os.X_OK):
+ return path
except:
pass
- local_tor = os.path.normpath(os.path.join(settings.base_dir, '..',
+ local_tor = os.path.normpath(os.path.join(settings.top_dir,
'tor', 'tor-browser_en-US', 'Browser', 'TorBrowser', 'Tor', 'tor'))
if os.path.exists(local_tor):
return local_tor
@@ -342,7 +361,7 @@ def get_tor():
def get_geoip(tor):
geo = []
for tordir in (
- os.path.normpath(os.path.join(settings.base_dir, '..', 'platform', 'tor')),
+ os.path.normpath(os.path.join(settings.top_dir, 'platform', 'tor')),
os.path.join(os.path.dirname(os.path.dirname(tor)), 'Data', 'Tor')
):
gepipfile = os.path.join(tordir, 'geoip')
@@ -364,7 +383,7 @@ def install_tor():
logger.debug('found existing tor installation')
return
url = torbrowser_url()
- target = os.path.normpath(os.path.join(settings.base_dir, '..', 'tor'))
+ target = os.path.normpath(os.path.join(settings.top_dir, 'tor'))
if url:
logger.debug('downloading and installing tor')
if sys.platform.startswith('linux'):
diff --git a/oml/tor_request.py b/oml/tor_request.py
index 0fd6455..d5b819f 100644
--- a/oml/tor_request.py
+++ b/oml/tor_request.py
@@ -32,7 +32,7 @@ def getaddrinfo(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
def create_tor_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
- source_address=None):
+ source_address=None):
host, port = address
err = None
af = socket.AF_INET
@@ -40,6 +40,9 @@ def create_tor_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
proto = 6
sa = address
sock = None
+
+ logger.debug('make tor connection to: %s', address)
+
try:
sock = socks.socksocket(af, socktype, proto)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
@@ -66,27 +69,30 @@ class TorHTTPSConnection(http.client.HTTPSConnection):
def __init__(self, host, port=None, service_id=None, check_hostname=None, context=None, **kwargs):
self._service_id = service_id
if self._service_id:
- if hasattr(ssl, '_create_default_https_context'):
- context = ssl._create_default_https_context()
- elif hasattr(ssl, '_create_stdlib_context'):
- context = ssl._create_stdlib_context()
+ context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
if context:
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
- # tor keys are still 1024 bit, debian started to require 2048 by default,
- # try to lower requirements to 1024 if needed
- try:
- context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path)
- except ssl.SSLError:
- context.set_ciphers('DEFAULT@SECLEVEL=1')
- context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path)
+ context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path)
context.load_default_certs()
+ context.set_alpn_protocols(['http/1.1'])
+ context.post_handshake_auth = True
http.client.HTTPSConnection.__init__(self, host, port,
check_hostname=check_hostname, context=context, **kwargs)
if not is_local(host):
self._create_connection = create_tor_connection
+ def get_service_id_cert(self):
+ for cert in self.sock._sslobj.get_verified_chain():
+ info = cert.get_info()
+ subject = info.get("subject")
+ if subject:
+ CN = subject[0][0][1]
+ if CN == self._service_id:
+ cert = cert.public_bytes()
+ return cert
+
def _check_service_id(self, cert):
service_id = get_service_id(cert=cert)
if service_id != self._service_id:
@@ -96,11 +102,9 @@ class TorHTTPSConnection(http.client.HTTPSConnection):
def connect(self):
http.client.HTTPSConnection.connect(self)
if self._service_id:
- cert = self.sock.getpeercert(binary_form=True)
+ cert = self.get_service_id_cert()
if not self._check_service_id(cert):
- raise InvalidCertificateException(self._service_id, cert,
- 'service_id mismatch')
- #logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version)
+ raise InvalidCertificateException(self._service_id, cert, 'service_id mismatch')
class TorHTTPSHandler(urllib.request.HTTPSHandler):
def __init__(self, debuglevel=0, context=None, check_hostname=None, service_id=None):
diff --git a/oml/user/api.py b/oml/user/api.py
index 53448b3..a1c9a8f 100644
--- a/oml/user/api.py
+++ b/oml/user/api.py
@@ -411,7 +411,7 @@ def requestPeering(data):
nickname (optional)
}
'''
- if len(data.get('id', '')) != 16:
+ if len(data.get('id', '')) != settings.ID_LENGTH:
logger.debug('invalid user id')
return {}
u = models.User.get_or_create(data['id'])
@@ -434,7 +434,7 @@ def acceptPeering(data):
message
}
'''
- if len(data.get('id', '')) != 16:
+ if len(data.get('id', '')) != settings.ID_LENGTH:
logger.debug('invalid user id')
return {}
logger.debug('acceptPeering... %s', data)
@@ -453,8 +453,8 @@ def rejectPeering(data):
message
}
'''
- if len(data.get('id', '')) not in (16, 43):
- logger.debug('invalid user id')
+ if len(data.get('id', '')) not in (16, 43, 56):
+ logger.debug('invalid user id: %s', data)
return {}
u = models.User.get_or_create(data['id'])
u.info['message'] = data.get('message', '')
@@ -471,8 +471,8 @@ def removePeering(data):
message
}
'''
- if len(data.get('id', '')) not in (16, 43):
- logger.debug('invalid user id')
+ if len(data.get('id', '')) not in (16, 43, 56):
+ logger.debug('invalid user id: %s', data)
return {}
u = models.User.get(data['id'], for_update=True)
if u:
@@ -488,8 +488,8 @@ def cancelPeering(data):
takes {
}
'''
- if len(data.get('id', '')) != 16:
- logger.debug('invalid user id')
+ if len(data.get('id', '')) != settings.ID_LENGTH:
+ logger.debug('invalid user id: %s', data)
return {}
u = models.User.get_or_create(data['id'])
u.info['message'] = data.get('message', '')
diff --git a/oml/user/models.py b/oml/user/models.py
index ffe60f5..bb737a4 100644
--- a/oml/user/models.py
+++ b/oml/user/models.py
@@ -27,7 +27,7 @@ class User(db.Model):
created = sa.Column(sa.DateTime())
modified = sa.Column(sa.DateTime())
- id = sa.Column(sa.String(43), primary_key=True)
+ id = sa.Column(sa.String(128), primary_key=True)
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
nickname = sa.Column(sa.String(256), index=True)
@@ -256,7 +256,7 @@ class List(db.Model):
type = sa.Column(sa.String(64))
_query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
- user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id'))
+ user_id = sa.Column(sa.String(128), sa.ForeignKey('user.id'))
user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic'))
items = sa.orm.relationship('Item', secondary=list_items,
@@ -456,7 +456,7 @@ class Metadata(db.Model):
id = sa.Column(sa.Integer(), primary_key=True)
item_id = sa.Column(sa.String(32))
- user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id'))
+ user_id = sa.Column(sa.String(128), sa.ForeignKey('user.id'))
data_hash = sa.Column(sa.String(40), index=True)
data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
diff --git a/oml/utils.py b/oml/utils.py
index ad2b65a..c8477c0 100644
--- a/oml/utils.py
+++ b/oml/utils.py
@@ -5,6 +5,7 @@ from datetime import datetime
from io import StringIO, BytesIO
from PIL import Image, ImageFile
import base64
+import functools
import hashlib
import json
import os
@@ -17,19 +18,26 @@ import time
import unicodedata
import ox
+import OpenSSL.crypto
from OpenSSL.crypto import (
- load_privatekey, load_certificate,
- dump_privatekey, dump_certificate,
- FILETYPE_ASN1, FILETYPE_PEM, PKey, TYPE_RSA,
- X509, X509Extension
+ dump_certificate,
+ dump_privatekey,
+ FILETYPE_PEM,
+ load_certificate,
+ load_privatekey,
+ PKey,
+ TYPE_RSA,
+ X509,
+ X509Extension
)
-from Crypto.PublicKey import RSA
-from Crypto.Util.asn1 import DerSequence
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ed25519
from meta.utils import normalize_isbn, find_isbns, get_language, to_isbn13
from win32utils import get_short_path_name
+
import logging
logging.getLogger('PIL').setLevel(logging.ERROR)
logger = logging.getLogger(__name__)
@@ -92,7 +100,7 @@ def resize_image(data, width=None, size=None):
height = max(height, 1)
if width < source_width:
- resize_method = Image.ANTIALIAS
+ resize_method = Image.LANCZOS
else:
resize_method = Image.BICUBIC
output = source.resize((width, height), resize_method)
@@ -119,78 +127,157 @@ def get_position_by_id(list, key):
return i
return -1
-def get_user_id(private_key, cert_path):
- if os.path.exists(private_key):
- with open(private_key) as fd:
- key = load_privatekey(FILETYPE_PEM, fd.read())
- if key.bits() != 1024:
- os.unlink(private_key)
+def sign_cert(cert, key):
+ # pyOpenSSL sgin api does not allow NULL hash
+ # return cert.sign(key, None)
+ return OpenSSL.crypto._lib.X509_sign(cert._x509, key._pkey, OpenSSL.crypto._ffi.NULL)
+
+def load_pem_key(pem):
+ with open(pem) as fd:
+ ca_key_pem = fd.read()
+ key = load_privatekey(FILETYPE_PEM, ca_key_pem)
+ if key.bits() != 256:
+ raise Exception("Invalid key %s" % pem)
+ key = key.to_cryptography_key()
+ private_key = key.private_bytes_raw()
+ public_key = key.public_key().public_bytes_raw()
+ return private_key, public_key
+
+
+def expand_private_key(secret_key) -> bytes:
+ hash = hashlib.sha512(secret_key).digest()
+ hash = bytearray(hash)
+ hash[0] &= 248
+ hash[31] &= 127
+ hash[31] |= 64
+ return bytes(hash)
+
+def get_onion(pubkey):
+ version_byte = b"\x03"
+ checksum_str = ".onion checksum".encode()
+ checksum = hashlib.sha3_256(checksum_str + pubkey + version_byte).digest()[:2]
+ return base64.b32encode(pubkey + checksum + version_byte).decode().lower()
+
+def get_onion_key(private_key):
+ onion_key = expand_private_key(private_key)
+ key_type = 'ED25519-V3'
+ key_content = base64.encodebytes(onion_key).decode().strip().replace('\n', '')
+ return key_type, key_content
+
+def get_user_id(key_path, cert_path, ca_key_path, ca_cert_path):
+ if os.path.exists(ca_key_path):
+ try:
+ private_key, public_key = load_pem_key(ca_key_path)
+ except:
+ os.unlink(ca_key_path)
else:
- user_id = get_service_id(private_key)
- if not os.path.exists(private_key):
- if os.path.exists(cert_path):
- os.unlink(cert_path)
- folder = os.path.dirname(private_key)
- if not os.path.exists(folder):
- os.makedirs(folder)
- os.chmod(folder, 0o700)
- key = PKey()
- key.generate_key(TYPE_RSA, 1024)
- with open(private_key, 'wb') as fd:
- os.chmod(private_key, 0o600)
- fd.write(dump_privatekey(FILETYPE_PEM, key))
- os.chmod(private_key, 0o400)
- user_id = get_service_id(private_key)
- if not os.path.exists(cert_path) or \
- (datetime.now() - datetime.fromtimestamp(os.path.getmtime(cert_path))).days > 60:
+ user_id = get_onion(public_key)
+
+ if not os.path.exists(ca_key_path):
+ private_key = ed25519.Ed25519PrivateKey.generate()
+ private_bytes = private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption()
+ )
+ with open(ca_key_path, 'wb') as fd:
+ fd.write(private_bytes)
+
+ public_key = private_key.public_key().public_bytes_raw()
+ user_id = get_onion(public_key)
+
+ if not os.path.exists(ca_cert_path) or \
+ (datetime.now() - datetime.fromtimestamp(os.path.getmtime(ca_cert_path))).days > 5*365:
+ with open(ca_key_path, 'rb') as key_file:
+ key_data = key_file.read()
+ cakey = load_privatekey(FILETYPE_PEM, key_data)
ca = X509()
ca.set_version(2)
ca.set_serial_number(1)
ca.get_subject().CN = user_id
ca.gmtime_adj_notBefore(0)
- ca.gmtime_adj_notAfter(90 * 24 * 60 * 60)
+ ca.gmtime_adj_notAfter(10 * 356 * 24 * 60 * 60)
ca.set_issuer(ca.get_subject())
- ca.set_pubkey(key)
+ ca.set_pubkey(cakey)
ca.add_extensions([
- X509Extension(b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
- X509Extension(b"nsCertType", True, b"sslCA"),
+ X509Extension(b"basicConstraints", False, b"CA:TRUE"),
+ X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
+ X509Extension(
+ b"subjectKeyIdentifier", False, b"hash", subject=ca
+ ),
+ ])
+ ca.add_extensions([
+ X509Extension(
+ b"authorityKeyIdentifier", False, b"keyid:always", issuer=ca
+ )
+ ])
+
+ sign_cert(ca, cakey)
+
+ with open(ca_cert_path, 'wb') as fd:
+ fd.write(dump_certificate(FILETYPE_PEM, ca))
+
+ if os.path.exists(cert_path):
+ os.unlink(cert_path)
+ if os.path.exists(key_path):
+ os.unlink(key_path)
+ else:
+ with open(ca_cert_path) as fd:
+ ca = load_certificate(FILETYPE_PEM, fd.read())
+ with open(ca_key_path) as fd:
+ cakey = load_privatekey(FILETYPE_PEM, fd.read())
+
+
+ # create RSA intermediate certificate since clients don't quite like Ed25519 yet
+ if not os.path.exists(cert_path) or \
+ (datetime.now() - datetime.fromtimestamp(os.path.getmtime(cert_path))).days > 60:
+
+ key = PKey()
+ key.generate_key(TYPE_RSA, 2048)
+
+ cert = X509()
+ cert.set_version(2)
+ cert.set_serial_number(2)
+ cert.get_subject().CN = user_id + ".onion"
+ cert.gmtime_adj_notBefore(0)
+ cert.gmtime_adj_notAfter(90 * 24 * 60 * 60)
+ cert.set_issuer(ca.get_subject())
+ cert.set_pubkey(key)
+ subject_alt_names = b"DNS: %s.onion" % user_id.encode()
+ cert.add_extensions([
+ X509Extension(b"basicConstraints", True, b"CA:FALSE"),
X509Extension(b"extendedKeyUsage", True,
b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"),
X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca),
+ X509Extension(b"subjectAltName", critical=True, value=subject_alt_names),
])
- ca.sign(key, "sha256")
+ sign_cert(cert, cakey)
with open(cert_path, 'wb') as fd:
+ fd.write(dump_certificate(FILETYPE_PEM, cert))
fd.write(dump_certificate(FILETYPE_PEM, ca))
+ with open(key_path, 'wb') as fd:
+ fd.write(dump_privatekey(FILETYPE_PEM, key))
return user_id
+
def get_service_id(private_key_file=None, cert=None):
'''
service_id is the first half of the sha1 of the rsa public key encoded in base32
'''
if private_key_file:
- with open(private_key_file, 'rb') as fd:
- private_key = fd.read()
- public_key = RSA.importKey(private_key).publickey().exportKey('DER')[22:]
- # compute sha1 of public key and encode first half in base32
- service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
- '''
- # compute public key from priate key and export in DER format
- # ignoring the SPKI header(22 bytes)
- key = load_privatekey(FILETYPE_PEM, private_key)
- cert = X509()
- cert.set_pubkey(key)
- public_key = dump_privatekey(FILETYPE_ASN1, cert.get_pubkey())[22:]
- # compute sha1 of public key and encode first half in base32
- service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
- '''
+ with open(private_key_file, 'rb') as key_file:
+ key_type, key_content = key_file.read().split(b':', 1)
+ private_key = base64.decodebytes(key_content)
+ public_key = Ed25519().public_key_from_hash(private_key)
+ service_id = get_onion(public_key)
elif cert:
- # compute sha1 of public key and encode first half in base32
- key = load_certificate(FILETYPE_ASN1, cert).get_pubkey()
- pub_der = DerSequence()
- pub_der.decode(dump_privatekey(FILETYPE_ASN1, key))
- public_key = RSA.construct((pub_der._seq[1], pub_der._seq[2])).exportKey('DER')[22:]
- service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
+ cert_ = load_certificate(FILETYPE_PEM, cert)
+ key = cert_.get_pubkey()
+ public_key = key.to_cryptography_key().public_bytes_raw()
+ service_id = get_onion(public_key)
+ else:
+ service_id = None
return service_id
def update_dict(root, data):
@@ -398,7 +485,7 @@ def check_pidfile(pid):
def ctl(*args):
import settings
if sys.platform == 'win32':
- platform_win32 = os.path.normpath(os.path.join(settings.base_dir, '..', 'platform_win32'))
+ platform_win32 = os.path.normpath(os.path.join(settings.top_dir, 'platform_win32'))
python = os.path.join(platform_win32, 'pythonw.exe')
cmd = [python, 'oml'] + list(args)
startupinfo = subprocess.STARTUPINFO()
@@ -495,3 +582,36 @@ def iexists(path):
def same_path(f1, f2):
return unicodedata.normalize('NFC', f1) == unicodedata.normalize('NFC', f2)
+
+def time_cache(max_age, maxsize=128, typed=False):
+ def _decorator(fn):
+ @functools.lru_cache(maxsize=maxsize, typed=typed)
+ def _new(*args, __time_salt, **kwargs):
+ return fn(*args, **kwargs)
+
+ @functools.wraps(fn)
+ def _wrapped(*args, **kwargs):
+ return _new(*args, **kwargs, __time_salt=int(time.time() / max_age))
+
+ return _wrapped
+
+ return _decorator
+
+def migrate_userid(old_id, new_id):
+ from db import run_sql
+ import settings
+ statements = [
+ "UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
+ "UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ "UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ "UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
+ ]
+ run_sql([
+ sql.format(oid=old_id, nid=new_id)
+ for sql in statements
+ ])
+ for ext in ('log', 'db', 'json'):
+ old_file = os.path.join(settings.data_path, 'peers/%s.%s' % (old_id, ext))
+ new_file = os.path.join(settings.data_path, 'peers/%s.%s' % (new_id, ext))
+ if os.path.exists(old_file) and not os.path.exists(new_file):
+ os.rename(old_file, new_file)
diff --git a/requirements-shared.txt b/requirements-shared.txt
index 3b4600f..6e03575 100644
--- a/requirements-shared.txt
+++ b/requirements-shared.txt
@@ -2,7 +2,7 @@ requests==2.21.0
chardet
html5lib
#ox>=2.0.666
-git+http://git.0x2620.org/python-ox.git#egg=python-ox
+git+https://code.0x2620.org/0x2620/python-ox.git#egg=ox
python-stdnum==1.2
PyPDF2==1.25.1
pysocks
diff --git a/requirements.txt b/requirements.txt
index 16e5d69..0dec41d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,9 +1,9 @@
lxml
simplejson
ed25519>=1.4
-SQLAlchemy==1.0.12
+SQLAlchemy==1.4.46
pyopenssl>=0.15
-pyCrypto>=2.6.1
+pycryptodome
pillow
netifaces
-tornado==5.1.1
+tornado==6.0.3
diff --git a/static/css/oml.css b/static/css/oml.css
index 375341b..dabc0a9 100644
--- a/static/css/oml.css
+++ b/static/css/oml.css
@@ -10,6 +10,10 @@
font-size: 14px;
line-height: 21px;
}
+.OMLQuote img {
+ max-width: 100%;
+ margin: auto;
+}
.OMLAnnotation .OMLQuoteBackground {
position: absolute;
diff --git a/static/html/pdf.html b/static/html/pdf.html
index 36e1a60..706041d 100644
--- a/static/html/pdf.html
+++ b/static/html/pdf.html
@@ -1,4 +1,4 @@
-
+
-
-
-
-
-
-
+
-
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -75,119 +81,195 @@ See https://github.com/adobe-type-tools/cmap-resources
+
+
-
+
-
+
+
+
-
-
+
-
+
-
-
-
+
+
+
+
-
+
+
+
+
+
+ Highlight color
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Add image
+
+
+
+
-
- Presentation Mode
+
+ Open
-
- Open
+
+ Print
-
- Print
+
+ Save
-
- Download
+
+
+
+ Presentation Mode
-
- Current View
+
+ Current Page
-
+
-
- Go to First Page
+
+ Go to First Page
-
- Go to Last Page
+
+ Go to Last Page
-
- Rotate Clockwise
+
+ Rotate Clockwise
-
- Rotate Counterclockwise
+
+ Rotate Counterclockwise
-
- Text Selection Tool
-
-
- Hand Tool
-
+