diff --git a/ctl b/ctl index 6e46c91..74d9d76 100755 --- a/ctl +++ b/ctl @@ -1,5 +1,6 @@ #!/usr/bin/env bash NAME="openmedialibrary" +PID="/tmp/$NAME.$USER.pid" cd "`dirname "$0"`" if [ -e oml ]; then @@ -24,7 +25,16 @@ else mv "$BASE/config/release.json" "$BASE/data/release.json" fi fi -PID="$DATA/$NAME.pid" +if [ ! -e "$PID" ]; then +if [ -e "$DATA/tor/hostname" ]; then + onion=$(cat "$DATA/tor/hostname") + id=${onion/.onion/} + PID="/tmp/$NAME.$USER.$id.pid" +fi +fi +if [ ! -e "$PID" ]; then + PID="$DATA/$NAME.pid" +fi PLATFORM_PYTHON=3.4 SHARED_PYTHON=3.7 @@ -34,7 +44,7 @@ else if [ $SYSTEM == "Linux" ]; then if [ $PLATFORM == "x86_64" ]; then ARCH=64 - PLATFORM_PYTHON=3.11 + PLATFORM_PYTHON=3.7 else ARCH=32 fi @@ -64,10 +74,6 @@ PATH="$PLATFORM_ENV/bin:$PATH" SHARED_ENV="$BASE/platform/Shared" export SHARED_ENV -if [ -e "$SHARED_ENV/etc/openssl/openssl.cnf" ]; then - export OPENSSL_CONF="$SHARED_ENV/etc/openssl/openssl.cnf" -fi - PATH="$SHARED_ENV/bin:$PATH" export PATH diff --git a/oml/changelog.py b/oml/changelog.py index 025611a..f641fd4 100644 --- a/oml/changelog.py +++ b/oml/changelog.py @@ -297,7 +297,7 @@ class Changelog(db.Model): return True def action_addpeer(self, user, timestamp, peerid, username): - if len(peerid) == settings.ID_LENGTH: + if len(peerid) == 16: from user.models import User if not 'users' in user.info: user.info['users'] = {} @@ -318,7 +318,7 @@ class Changelog(db.Model): return True def action_editpeer(self, user, timestamp, peerid, data): - if len(peerid) == settings.ID_LENGTH: + if len(peerid) == 16: from user.models import User peer = User.get_or_create(peerid) update = False @@ -466,7 +466,7 @@ class Changelog(db.Model): elif op == 'addpeer': peer_id = data[1] username = data[2] - if len(peer_id) == settings.ID_LENGTH: + if len(peer_id) == 16: peer = User.get(peer_id) if peer: username = peer.json().get('username', 'anonymous') diff --git a/oml/commands.py b/oml/commands.py index c252666..25e1fa8 100644 --- a/oml/commands.py +++ b/oml/commands.py @@ -154,7 +154,7 @@ def command_update_static(*args): import utils setup.create_db() old_oxjs = os.path.join(settings.static_path, 'oxjs') - oxjs = os.path.join(settings.top_dir, 'oxjs') + oxjs = os.path.join(settings.base_dir, '..', 'oxjs') if os.path.exists(old_oxjs) and not os.path.exists(oxjs): shutil.move(old_oxjs, oxjs) if not os.path.exists(oxjs): @@ -163,7 +163,7 @@ def command_update_static(*args): os.system('cd "%s" && git pull' % oxjs) r('python3', os.path.join(oxjs, 'tools', 'build', 'build.py'), '-nogeo') utils.update_static() - reader = os.path.join(settings.top_dir, 'reader') + reader = os.path.join(settings.base_dir, '..', 'reader') if not os.path.exists(reader): r('git', 'clone', '--depth', '1', 'https://code.0x2620.org/0x2620/openmedialibrary_reader.git', reader) elif os.path.exists(os.path.join(reader, '.git')): diff --git a/oml/item/api.py b/oml/item/api.py index db9d102..d0c04a5 100644 --- a/oml/item/api.py +++ b/oml/item/api.py @@ -5,7 +5,7 @@ import os import unicodedata from sqlalchemy.orm import load_only -from sqlalchemy.sql.expression import text, column +from sqlalchemy.sql.expression import text from sqlalchemy import func from oxtornado import actions @@ -58,13 +58,8 @@ def find(data): qs = models.Find.query.filter_by(key=q['group']) if items is None or items.first(): if items is not None: - ids = [i[0] for i in items.with_entities(column('id'))] - qs = qs.filter(models.Find.item_id.in_(ids)) - values = list(qs.values( - column('value'), - column('findvalue'), - column('sortvalue'), - )) + qs = qs.filter(models.Find.item_id.in_(items)) + values = list(qs.values('value', 'findvalue', 'sortvalue')) for f in values: value = f[0] findvalue = f[1] @@ -172,7 +167,7 @@ actions.register(edit, cache=False) def remove(data): ''' takes { - ids + id } ''' if 'ids' in data and data['ids']: diff --git a/oml/item/handlers.py b/oml/item/handlers.py index 901e579..19d6460 100644 --- a/oml/item/handlers.py +++ b/oml/item/handlers.py @@ -17,8 +17,7 @@ import db import settings import tornado.web import tornado.gen -from concurrent.futures import ThreadPoolExecutor -from tornado.concurrent import run_on_executor +import tornado.concurrent from oxtornado import json_dumps, json_response @@ -28,8 +27,6 @@ import state import logging logger = logging.getLogger(__name__) -MAX_WORKERS = 4 - class OptionalBasicAuthMixin(object): class SendChallenge(Exception): @@ -93,23 +90,6 @@ class EpubHandler(OMLHandler): self.set_header('Content-Type', content_type) self.write(z.read(filename)) -class CropHandler(OMLHandler): - - def get(self, id, page, left, top, right, bottom): - from media.pdf import crop - with db.session(): - item = Item.get(id) - path = item.get_path() - data = crop(path, page, left, top, right, bottom) - if path and data: - self.set_header('Content-Type', 'image/jpeg') - self.set_header('Content-Length', str(len(data))) - self.write(data) - return - self.set_status(404) - return - - def serve_static(handler, path, mimetype, include_body=True, disposition=None): handler.set_header('Content-Type', mimetype) size = os.stat(path).st_size @@ -197,7 +177,6 @@ class ReaderHandler(OMLHandler): return serve_static(self, path, 'text/html') class UploadHandler(OMLHandler): - executor = ThreadPoolExecutor(max_workers=MAX_WORKERS) def initialize(self, context=None): self._context = context @@ -205,60 +184,7 @@ class UploadHandler(OMLHandler): def get(self): self.write('use POST') - @run_on_executor - def save_files(self, request): - listname = request.arguments.get('list', None) - if listname: - listname = listname[0] - if isinstance(listname, bytes): - listname = listname.decode('utf-8') - with self._context(): - prefs = settings.preferences - ids = [] - for upload in request.files.get('files', []): - filename = upload.filename - id = get_id(data=upload.body) - ids.append(id) - file = File.get(id) - if not file or not os.path.exists(file.fullpath()): - logger.debug('add %s to library', id) - prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep) - prefix_imported = os.path.join(prefix_books, '.import' + os.sep) - ox.makedirs(prefix_imported) - import_name = os.path.join(prefix_imported, filename) - n = 1 - while os.path.exists(import_name): - n += 1 - name, extension = filename.rsplit('.', 1) - if extension == 'kepub': - extension = 'epub' - import_name = os.path.join(prefix_imported, '%s [%d].%s' % (name, n, extension)) - with open(import_name, 'wb') as fd: - fd.write(upload.body) - file = add_file(id, import_name, prefix_books) - file.move() - else: - user = state.user() - if not file.item: - item = Item.get_or_create(id=file.sha1, info=file.info) - file.item_id = item.id - state.db.session.add(file) - state.db.session.commit() - else: - item = file.item - if user not in item.users: - logger.debug('add %s to local user', id) - item.add_user(user) - add_record('additem', item.id, file.info) - add_record('edititem', item.id, item.meta) - item.update() - if listname and ids: - list_ = List.get(settings.USER_ID, listname) - if list_: - list_.add_items(ids) - response = json_response({'ids': ids}) - return response - + @tornado.web.asynchronous @tornado.gen.coroutine def post(self): if 'origin' in self.request.headers and self.request.host not in self.request.headers['origin']: @@ -267,7 +193,60 @@ class UploadHandler(OMLHandler): self.write('') return - response = yield self.save_files(self.request) + def save_files(context, request, callback): + listname = request.arguments.get('list', None) + if listname: + listname = listname[0] + if isinstance(listname, bytes): + listname = listname.decode('utf-8') + with context(): + prefs = settings.preferences + ids = [] + for upload in request.files.get('files', []): + filename = upload.filename + id = get_id(data=upload.body) + ids.append(id) + file = File.get(id) + if not file or not os.path.exists(file.fullpath()): + logger.debug('add %s to library', id) + prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep) + prefix_imported = os.path.join(prefix_books, '.import' + os.sep) + ox.makedirs(prefix_imported) + import_name = os.path.join(prefix_imported, filename) + n = 1 + while os.path.exists(import_name): + n += 1 + name, extension = filename.rsplit('.', 1) + if extension == 'kepub': + extension = 'epub' + import_name = os.path.join(prefix_imported, '%s [%d].%s' % (name, n, extension)) + with open(import_name, 'wb') as fd: + fd.write(upload.body) + file = add_file(id, import_name, prefix_books) + file.move() + else: + user = state.user() + if not file.item: + item = Item.get_or_create(id=file.sha1, info=file.info) + file.item_id = item.id + state.db.session.add(file) + state.db.session.commit() + else: + item = file.item + if user not in item.users: + logger.debug('add %s to local user', id) + item.add_user(user) + add_record('additem', item.id, file.info) + add_record('edititem', item.id, item.meta) + item.update() + if listname and ids: + l = List.get(settings.USER_ID, listname) + if l: + l.add_items(ids) + response = json_response({'ids': ids}) + callback(response) + + response = yield tornado.gen.Task(save_files, self._context, self.request) if 'status' not in response: response = json_response(response) response = json_dumps(response) diff --git a/oml/item/models.py b/oml/item/models.py index 61732d1..508f46e 100644 --- a/oml/item/models.py +++ b/oml/item/models.py @@ -322,7 +322,7 @@ class Item(db.Model): def remove_annotations(self): from annotation.models import Annotation - for a in Annotation.query.filter_by(item_id=self.id, user_id=settings.USER_ID): + for a in Annotation.query.filter_by(item_id=self.id, user_id=state.user()): a.add_record('removeannotation') a.delete() @@ -733,13 +733,7 @@ class File(db.Model): return re.sub(r'^\.|\.$|:|/|\?|<|>|\\|\*|\||"', '_', string) prefs = settings.preferences prefix = os.sep.join(os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/').split('/')) - not_item = False - try: - not_item = not self.item - except: - logger.debug('trying to move an item that was just deleted', exc_info=True) - not_item = True - if not_item: + if not self.item: return j = self.item.json(keys=['title', 'author', 'publisher', 'date', 'extension']) diff --git a/oml/item/query.py b/oml/item/query.py index 91be5e1..9683dee 100644 --- a/oml/item/query.py +++ b/oml/item/query.py @@ -23,6 +23,7 @@ def parse(data): if [r for r in query['range'] if not isinstance(r, int)]: logger.error('range must be 2 integers! got this: %s', query['range']) query['range'] = [0, 0] + #print data query['qs'] = models.Item.find(data) if 'group' not in query: query['qs'] = order(query['qs'], query['sort']) diff --git a/oml/item/scan.py b/oml/item/scan.py index e4a97a8..78c8fe6 100644 --- a/oml/item/scan.py +++ b/oml/item/scan.py @@ -225,7 +225,6 @@ def run_scan(): missing = ids - library_items if missing: logger.debug('%s items in library without a record', len(missing)) - settings.server['last_scan'] = time.mktime(time.gmtime()) def change_path(old, new): old_icons = os.path.join(old, 'Metadata', 'icons.db') diff --git a/oml/library.py b/oml/library.py index b490e16..eee921e 100644 --- a/oml/library.py +++ b/oml/library.py @@ -153,7 +153,7 @@ class Peer(object): self.info['lists'][name] = list(set(self.info['lists'][name]) - set(ids)) elif action == 'addpeer': peerid, username = args - if len(peerid) == settings.ID_LENGTH: + if len(peerid) == 16: self.info['peers'][peerid] = {'username': username} # fixme, just trigger peer update here from user.models import User @@ -164,7 +164,7 @@ class Peer(object): peer.save() elif action == 'editpeer': peerid, data = args - if len(peerid) == settings.ID_LENGTH: + if len(peerid) == 16: if peerid not in self.info['peers']: self.info['peers'][peerid] = {} for key in ('username', 'contact'): @@ -377,10 +377,6 @@ def sync_db(): from sqlalchemy.orm import load_only import item.models first = True - missing_previews = [] - state.sync_db = True - - #FIXME: why is this loop needed with db.session(): sort_ids = {i.item_id for i in item.models.Sort.query.options(load_only('item_id'))} if sort_ids: @@ -391,11 +387,9 @@ def sync_db(): if first: first = False logger.debug('sync items') - #why? - #i.update(commit=False) - i.update_sort(commit=False) - if i.info.get('mediastate') == 'unavailable': - missing_previews.append(i.id) + i.update(commit=False) + if i.info.get('mediastate') == 'unavailable' and state.tasks: + state.tasks.queue('getpreview', i.id) commit = True #logger.debug('sync:%s', i) t0 = maybe_commit(t0) @@ -403,7 +397,6 @@ def sync_db(): break if commit: state.db.session.commit() - if not first: logger.debug('synced items') if not state.shutdown: @@ -415,12 +408,6 @@ def sync_db(): item.models.Sort.query.filter_by(item_id=None).delete() item.models.Find.query.filter_by(item_id=None).delete() - if missing_previews and state.tasks: - logger.debug('queueing download of %s missing previews', len(missing_previews)) - for id in missing_previews: - state.tasks.queue('getpreview', id) - state.sync_db = False - def cleanup_lists(): import item.models import user.models diff --git a/oml/localnodes.py b/oml/localnodes.py index dcd2751..5178f0a 100644 --- a/oml/localnodes.py +++ b/oml/localnodes.py @@ -1,26 +1,22 @@ # -*- coding: utf-8 -*- -import asyncio import socket import netifaces from zeroconf import ( - ServiceBrowser, ServiceInfo, ServiceStateChange + ServiceBrowser, ServiceInfo, ServiceStateChange, Zeroconf ) -from zeroconf.asyncio import AsyncZeroconf from tornado.ioloop import PeriodicCallback import settings import state from tor_request import get_opener -from utils import time_cache import logging logger = logging.getLogger(__name__) -@time_cache(3) -def can_connect(**data): +def can_connect(data): try: opener = get_opener(data['id']) headers = { @@ -64,120 +60,100 @@ class LocalNodes(dict): return self.setup() self._ip_changed = PeriodicCallback(self._update_if_ip_changed, 60000) - state.main.add_callback(self._ip_changed.start) def setup(self): self.local_ips = get_broadcast_interfaces() - self.zeroconf = {ip: AsyncZeroconf(interfaces=[ip]) for ip in self.local_ips} - asyncio.create_task(self.register_service()) + self.zeroconf = {ip: Zeroconf(interfaces=[ip]) for ip in self.local_ips} + self.register_service() self.browse() - async def _update_if_ip_changed(self): + def _update_if_ip_changed(self): local_ips = get_broadcast_interfaces() username = settings.preferences.get('username', 'anonymous') if local_ips != self.local_ips or self.username != username: - await self.close() + self.close() self.setup() def browse(self): self.browser = { - ip: ServiceBrowser(self.zeroconf[ip].zeroconf, self.service_type, handlers=[self.on_service_state_change]) + ip: ServiceBrowser(self.zeroconf[ip], self.service_type, handlers=[self.on_service_state_change]) for ip in self.zeroconf } - async def register_service(self): + def register_service(self): if self.local_info: for local_ip, local_info in self.local_info: - self.zeroconf[local_ip].async_unregister_service(local_info) + self.zeroconf[local_ip].unregister_service(local_info) self.local_info = None local_name = socket.gethostname().partition('.')[0] + '.local.' port = settings.server['node_port'] - self.local_info = [] self.username = settings.preferences.get('username', 'anonymous') desc = { - 'username': self.username, - 'id': settings.USER_ID, + 'username': self.username } - tasks = [] + self.local_info = [] for i, local_ip in enumerate(get_broadcast_interfaces()): if i: - name = '%s [%s].%s' % (desc['username'], i, self.service_type) + name = '%s-%s [%s].%s' % (desc['username'], i+1, settings.USER_ID, self.service_type) else: - name = '%s.%s' % (desc['username'], self.service_type) - - addresses = [socket.inet_aton(local_ip)] - local_info = ServiceInfo(self.service_type, name, port, 0, 0, desc, local_name, addresses=addresses) - task = self.zeroconf[local_ip].async_register_service(local_info) - tasks.append(task) + name = '%s [%s].%s' % (desc['username'], settings.USER_ID, self.service_type) + local_info = ServiceInfo(self.service_type, name, + socket.inet_aton(local_ip), port, 0, 0, desc, local_name) + self.zeroconf[local_ip].register_service(local_info) self.local_info.append((local_ip, local_info)) - await asyncio.gather(*tasks) def __del__(self): self.close() - async def close(self): + def close(self): if self.local_info: - tasks = [] for local_ip, local_info in self.local_info: try: - task = self.zeroconf[local_ip].async_unregister_service(local_info) - tasks.append(task) + self.zeroconf[local_ip].unregister_service(local_info) except: logger.debug('exception closing zeroconf', exc_info=True) self.local_info = None if self.zeroconf: for local_ip in self.zeroconf: try: - task = self.zeroconf[local_ip].async_close() - tasks.append(task) + self.zeroconf[local_ip].close() except: logger.debug('exception closing zeroconf', exc_info=True) self.zeroconf = None for id in list(self): self.pop(id, None) - await asyncio.gather(*tasks) def on_service_state_change(self, zeroconf, service_type, name, state_change): - try: - info = zeroconf.get_service_info(service_type, name) - except zeroconf._exceptions.NotRunningException: + if '[' not in name: + id = name.split('.')[0] + else: + id = name.split('[')[1].split(']')[0] + if id == settings.USER_ID: return - if info and b'id' in info.properties: - id = info.properties[b'id'].decode() - if id == settings.USER_ID: - return - if len(id) != settings.ID_LENGTH: - return - if state_change is ServiceStateChange.Added: - new = id not in self + if state_change is ServiceStateChange.Added: + info = zeroconf.get_service_info(service_type, name) + if info: self[id] = { 'id': id, - 'host': socket.inet_ntoa(info.addresses[0]), + 'host': socket.inet_ntoa(info.address), 'port': info.port } if info.properties: for key, value in info.properties.items(): key = key.decode() self[id][key] = value.decode() - logger.debug( - '%s: %s [%s] (%s:%s)', - 'add' if new else 'update', - self[id].get('username', 'anon'), - id, - self[id]['host'], - self[id]['port'] - ) + logger.debug('add: %s [%s] (%s:%s)', self[id].get('username', 'anon'), id, self[id]['host'], self[id]['port']) if state.tasks and id in self: state.tasks.queue('addlocalinfo', self[id]) - elif state_change is ServiceStateChange.Removed: - logger.debug('remove: %s', id) - self.pop(id, None) - if state.tasks: - state.tasks.queue('removelocalinfo', id) + elif state_change is ServiceStateChange.Removed: + logger.debug('remove: %s', id) + self.pop(id, None) + if state.tasks: + state.tasks.queue('removelocalinfo', id) def get_data(self, user_id): data = self.get(user_id) - if data and can_connect(**data): + if data and can_connect(data): return data return None diff --git a/oml/media/epub.py b/oml/media/epub.py index 92ab289..552a134 100644 --- a/oml/media/epub.py +++ b/oml/media/epub.py @@ -68,17 +68,17 @@ def cover(path): if manifest: manifest = manifest[0] if metadata and manifest: - for e in list(metadata): + for e in metadata.getchildren(): if e.tag == '{http://www.idpf.org/2007/opf}meta' and e.attrib.get('name') == 'cover': cover_id = e.attrib['content'] - for e in list(manifest): + for e in manifest.getchildren(): if e.attrib['id'] == cover_id: filename = unquote(e.attrib['href']) filename = normpath(os.path.join(os.path.dirname(opf[0]), filename)) if filename in files: return use(filename) if manifest: - images = [e for e in list(manifest) if 'image' in e.attrib['media-type']] + images = [e for e in manifest.getchildren() if 'image' in e.attrib['media-type']] if images: image_data = [] for e in images: @@ -89,7 +89,7 @@ def cover(path): if image_data: image_data.sort(key=lambda name: z.getinfo(name).file_size) return use(image_data[-1]) - for e in list(manifest): + for e in manifest.getchildren(): if 'html' in e.attrib['media-type']: filename = unquote(e.attrib['href']) filename = normpath(os.path.join(os.path.dirname(opf[0]), filename)) @@ -118,7 +118,7 @@ def info(epub): metadata = info.findall('{http://www.idpf.org/2007/opf}metadata') if metadata: metadata = metadata[0] - for e in list(metadata): + for e in metadata.getchildren(): if e.text and e.text.strip() and e.text not in ('unknown', 'none'): key = e.tag.split('}')[-1] key = { @@ -148,7 +148,7 @@ def info(epub): for point in nav_map.findall('{http://www.daisy.org/z3986/2005/ncx/}navPoint'): label = point.find('{http://www.daisy.org/z3986/2005/ncx/}navLabel') if label: - txt = list(label)[0].text + txt = label.getchildren()[0].text if txt: contents.append(txt) if contents: diff --git a/oml/media/pdf.py b/oml/media/pdf.py index 2f58d9a..75313b2 100644 --- a/oml/media/pdf.py +++ b/oml/media/pdf.py @@ -10,7 +10,6 @@ from glob import glob from datetime import datetime from PyPDF2 import PdfFileReader -from PIL import Image import ox import settings @@ -25,13 +24,13 @@ def cover(pdf): else: return page(pdf, 1) -def ql_cover(pdf, size=1024): +def ql_cover(pdf): tmp = tempfile.mkdtemp() cmd = [ 'qlmanage', '-t', '-s', - str(size), + '1024', '-o', tmp, pdf @@ -49,7 +48,7 @@ def ql_cover(pdf, size=1024): shutil.rmtree(tmp) return data -def page(pdf, page, size=1024): +def page(pdf, page): tmp = tempfile.mkdtemp() if sys.platform == 'win32': pdf = get_short_path_name(pdf) @@ -57,8 +56,8 @@ def page(pdf, page, size=1024): 'pdftocairo', pdf, '-jpeg', - '-f', str(page), '-l', str(page), - '-scale-to', str(size), '-cropbox', + '-f', str(page), '-l', str(page), + '-scale-to', '1024', '-cropbox', os.path.join(tmp, 'page') ] if sys.platform == 'win32': @@ -80,46 +79,6 @@ def page(pdf, page, size=1024): shutil.rmtree(tmp) return data -def crop(pdf, page, left, top, right, bottom): - size = 2048 - tmp = tempfile.mkdtemp() - if sys.platform == 'win32': - pdf = get_short_path_name(pdf) - cmd = [ - 'pdftocairo', - pdf, - '-jpeg', - '-f', str(page), '-l', str(page), - '-scale-to', str(size), '-cropbox', - os.path.join(tmp, 'page') - ] - if sys.platform == 'win32': - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = subprocess.SW_HIDE - p = subprocess.Popen(cmd, close_fds=True, startupinfo=startupinfo) - else: - p = subprocess.Popen(cmd, close_fds=True) - p.wait() - image = glob('%s/*' % tmp) - if image: - image = image[0] - crop = [int(p) for p in (left, top, right, bottom)] - img = Image.open(image).crop(crop) - img.save(image) - with open(image, 'rb') as fd: - data = fd.read() - else: - logger.debug('pdftocairo %s %s', pdf, ' '.join(cmd)) - data = None - shutil.rmtree(tmp) - return data - - - - - - ''' def page(pdf, page): image = tempfile.mkstemp('.jpg')[1] @@ -322,4 +281,3 @@ def extract_isbn(text): isbns = find_isbns(text) if isbns: return isbns[0] - diff --git a/oml/media/txt.py b/oml/media/txt.py index 29a61c7..ca76253 100644 --- a/oml/media/txt.py +++ b/oml/media/txt.py @@ -7,9 +7,8 @@ import tempfile import subprocess def cover(path): - import settings image = tempfile.mkstemp('.jpg')[1] - cmd = ['python3', os.path.join(settings.top_dir, 'reader/txt.js/txt.py'), '-i', path, '-o', image] + cmd = ['python3', '../reader/txt.js/txt.py', '-i', path, '-o', image] p = subprocess.Popen(cmd, close_fds=True) p.wait() with open(image, 'rb') as fd: diff --git a/oml/node/server.py b/oml/node/server.py index ae5e8df..bc2e816 100644 --- a/oml/node/server.py +++ b/oml/node/server.py @@ -12,20 +12,18 @@ import socket import socketserver import time +from Crypto.PublicKey import RSA +from Crypto.Util.asn1 import DerSequence +from OpenSSL.crypto import dump_privatekey, FILETYPE_ASN1 from OpenSSL.SSL import ( - Connection, - Context, - TLSv1_2_METHOD, - VERIFY_CLIENT_ONCE, - VERIFY_FAIL_IF_NO_PEER_CERT, - VERIFY_PEER, + Context, Connection, TLSv1_2_METHOD, + VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT, VERIFY_CLIENT_ONCE ) import db import settings import state import user -import utils from changelog import changelog_size, changelog_path from websocket import trigger_event @@ -36,15 +34,16 @@ import logging logger = logging.getLogger(__name__) -def get_service_id(connection): - certs = connection.get_peer_cert_chain() - for cert in certs: - if cert.get_signature_algorithm().decode() == "ED25519": - pubkey = cert.get_pubkey() - public_key = pubkey.to_cryptography_key().public_bytes_raw() - service_id = utils.get_onion(public_key) - return service_id - raise Exception("connection with invalid certificate") +def get_service_id(key): + ''' + service_id is the first half of the sha1 of the rsa public key encoded in base32 + ''' + # compute sha1 of public key and encode first half in base32 + pub_der = DerSequence() + pub_der.decode(dump_privatekey(FILETYPE_ASN1, key)) + public_key = RSA.construct((pub_der._seq[1], pub_der._seq[2])).exportKey('DER')[22:] + service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode() + return service_id class TLSTCPServer(socketserver.TCPServer): @@ -56,7 +55,7 @@ class TLSTCPServer(socketserver.TCPServer): socketserver.TCPServer.__init__(self, server_address, HandlerClass) ctx = Context(TLSv1_2_METHOD) ctx.use_privatekey_file(settings.ssl_key_path) - ctx.use_certificate_chain_file(settings.ssl_cert_path) + ctx.use_certificate_file(settings.ssl_cert_path) # only allow clients with cert: ctx.set_verify(VERIFY_PEER | VERIFY_CLIENT_ONCE | VERIFY_FAIL_IF_NO_PEER_CERT, self._accept) #ctx.set_verify(VERIFY_PEER | VERIFY_CLIENT_ONCE, self._accept) @@ -112,7 +111,8 @@ class Handler(http.server.SimpleHTTPRequestHandler): return self.do_GET() def do_GET(self): - user_id = get_service_id(self.connection) + #x509 = self.connection.get_peer_certificate() + #user_id = get_service_id(x509.get_pubkey()) if x509 else None import item.models parts = self.path.split('/') if len(parts) == 3 and parts[1] in ('get', 'preview'): @@ -185,7 +185,8 @@ class Handler(http.server.SimpleHTTPRequestHandler): self.end_headers() def _changelog(self): - user_id = get_service_id(self.connection) + x509 = self.connection.get_peer_certificate() + user_id = get_service_id(x509.get_pubkey()) if x509 else None with db.session(): u = user.models.User.get(user_id) if not u: @@ -256,7 +257,8 @@ class Handler(http.server.SimpleHTTPRequestHandler): ping responds public ip ''' - user_id = get_service_id(self.connection) + x509 = self.connection.get_peer_certificate() + user_id = get_service_id(x509.get_pubkey()) if x509 else None content = {} try: diff --git a/oml/nodes.py b/oml/nodes.py index d6c1112..27d7658 100644 --- a/oml/nodes.py +++ b/oml/nodes.py @@ -448,9 +448,6 @@ class Node(Thread): except socket.timeout: logger.debug('timeout %s', url) return False - except urllib.error.URLError as e: - logger.debug('urllib.error.URLError %s', e) - return False except socks.GeneralProxyError: logger.debug('download failed %s', url) return False @@ -601,8 +598,6 @@ class Nodes(Thread): def _pull(self): if not state.sync_enabled or settings.preferences.get('downloadRate') == 0: return - if state.sync_db: - return if state.activity and state.activity.get('activity') == 'import': return self._pulling = True @@ -622,12 +617,12 @@ class Nodes(Thread): node.pullChanges() self._pulling = False - async def join(self): + def join(self): self._q.put(None) for node in list(self._nodes.values()): node.join() if self.local: - await self.local.close() + self.local.close() return super().join(1) def publish_node(): diff --git a/oml/oxtornado.py b/oml/oxtornado.py index 23b4b91..b34cdfe 100644 --- a/oml/oxtornado.py +++ b/oml/oxtornado.py @@ -83,18 +83,9 @@ class ApiHandler(tornado.web.RequestHandler): context = self._context if context is None: context = defaultcontext - action = None - if request.headers.get('Content-Type') == 'application/json': - try: - r = json.loads(request.body.decode()) - action = r['action'] - data = r['data'] - except: - action = None - if not action: - action = request.arguments.get('action', [None])[0].decode('utf-8') - data = request.arguments.get('data', [b'{}'])[0] - data = json.loads(data.decode('utf-8')) if data else {} + action = request.arguments.get('action', [None])[0].decode('utf-8') + data = request.arguments.get('data', [b'{}'])[0] + data = json.loads(data.decode('utf-8')) if data else {} if not action: methods = list(actions.keys()) api = [] diff --git a/oml/queryparser.py b/oml/queryparser.py index b6e7dcb..8562218 100644 --- a/oml/queryparser.py +++ b/oml/queryparser.py @@ -2,10 +2,9 @@ from datetime import datetime import unicodedata -import sqlalchemy.orm.exc from sqlalchemy.sql import operators from sqlalchemy.orm import load_only -from sqlalchemy.sql.expression import text, column +from sqlalchemy.sql.expression import text import utils import settings @@ -14,7 +13,6 @@ from fulltext import find_fulltext import logging logger = logging.getLogger(__name__) - def get_operator(op, type='str'): return { 'str': { @@ -136,8 +134,7 @@ class Parser(object): q = get_operator(op)(self._find.findvalue, v) if k != '*': q &= (self._find.key == k) - ids = self._find.query.filter(q).with_entities(column('item_id')) - ids = [i[0] for i in ids] + ids = self._model.query.join(self._find).filter(q).options(load_only('id')) return self.in_ids(ids, exclude) elif k == 'list': nickname, name = v.split(':', 1) @@ -223,7 +220,7 @@ class Parser(object): for condition in conditions: if 'conditions' in condition: q = self.parse_conditions(condition['conditions'], - condition.get('operator', '&')) + condition.get('operator', '&')) else: q = self.parse_condition(condition) if isinstance(q, list): @@ -268,7 +265,8 @@ class Parser(object): qs = self._model.query #only include items that have hard metadata conditions = self.parse_conditions(data.get('query', {}).get('conditions', []), - data.get('query', {}).get('operator', '&')) + data.get('query', {}).get('operator', '&')) for c in conditions: qs = qs.filter(c) + #print(qs) return qs diff --git a/oml/server.py b/oml/server.py index 8d39872..fbae2fc 100644 --- a/oml/server.py +++ b/oml/server.py @@ -1,19 +1,16 @@ # -*- coding: utf-8 -*- -import asyncio import os import sys import signal import time from tornado.ioloop import IOLoop -import tornado.web -from tornado.web import Application +from tornado.web import StaticFileHandler, Application from cache import Cache from item.handlers import EpubHandler, ReaderHandler, FileHandler from item.handlers import OMLHandler, UploadHandler -from item.handlers import CropHandler from item.icons import IconHandler import db import node.server @@ -31,12 +28,6 @@ import logging logger = logging.getLogger(__name__) -class StaticFileHandler(tornado.web.StaticFileHandler): - def get_content_type(self): - if self.request.path.split('?')[0].endswith('.mjs'): - return 'application/javascript' - return super().get_content_type() - class MainHandler(OMLHandler): def get(self, path): @@ -68,13 +59,13 @@ def log_request(handler): log_method("%d %s %.2fms", handler.get_status(), handler._request_summary(), request_time) -async def shutdown(): +def shutdown(): state.shutdown = True if state.tor: state.tor._shutdown = True if state.nodes: logger.debug('shutdown nodes') - await state.nodes.join() + state.nodes.join() if state.downloads: logger.debug('shutdown downloads') state.downloads.join() @@ -120,11 +111,11 @@ def run(): common_handlers = [ (r'/(favicon.ico)', StaticFileHandler, {'path': settings.static_path}), - (r'/static/oxjs/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'oxjs')}), - (r'/static/cbr.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'cbr.js')}), - (r'/static/epub.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'epub.js')}), - (r'/static/pdf.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'pdf.js')}), - (r'/static/txt.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.top_dir, 'reader', 'txt.js')}), + (r'/static/oxjs/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'oxjs')}), + (r'/static/cbr.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'cbr.js')}), + (r'/static/epub.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'epub.js')}), + (r'/static/pdf.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'pdf.js')}), + (r'/static/txt.js/(.*)', StaticFileHandler, {'path': os.path.join(settings.base_dir, '..', 'reader', 'txt.js')}), (r'/static/(.*)', StaticFileHandler, {'path': settings.static_path}), (r'/(.*)/epub/(.*)', EpubHandler), (r'/(.*?)/reader/', ReaderHandler), @@ -134,7 +125,6 @@ def run(): (r'/(.*?)/get/', FileHandler, { 'attachment': True }), - (r'/(.*)/2048p(\d*),(\d*),(\d*),(\d*),(\d*).jpg', CropHandler), (r'/(.*)/(cover|preview)(\d*).jpg', IconHandler), ] handlers = common_handlers + [ @@ -156,12 +146,13 @@ def run(): http_server.listen(settings.server['port'], settings.server['address'], max_buffer_size=max_buffer_size) # public server - if settings.preferences.get('enableReadOnlyService'): - public_port = settings.server.get('public_port') - public_address = settings.server['public_address'] - if public_port: - public_server = Application(public_handlers, **options) - public_server.listen(public_port, public_address) + ''' + public_port = settings.server.get('public_port') + public_address = settings.server['public_address'] + if public_port: + public_server = Application(public_handlers, **options) + public_server.listen(public_port, public_address) + ''' if PID: with open(PID, 'w') as pid: @@ -207,10 +198,10 @@ def run(): print('open browser at %s' % url) logger.debug('Starting OML %s at %s', settings.VERSION, url) - signal.signal(signal.SIGTERM, lambda _, __: sys.exit(0)) + signal.signal(signal.SIGTERM, shutdown) try: state.main.start() except: print('shutting down...') - asyncio.run(shutdown()) + shutdown() diff --git a/oml/settings.py b/oml/settings.py index 0fad9ee..5c06f74 100644 --- a/oml/settings.py +++ b/oml/settings.py @@ -8,17 +8,15 @@ from oml.utils import get_user_id from oml import fulltext base_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')) -top_dir = os.path.dirname(base_dir) - static_path = os.path.join(base_dir, 'static') -updates_path = os.path.normpath(os.path.join(top_dir, 'updates')) +updates_path = os.path.normpath(os.path.join(base_dir, '..', 'updates')) oml_data_path = os.path.join(base_dir, 'config.json') -data_path = os.path.normpath(os.path.join(top_dir, 'data')) +data_path = os.path.normpath(os.path.join(base_dir, '..', 'data')) if not os.path.exists(data_path): - config_path = os.path.normpath(os.path.join(top_dir, 'config')) + config_path = os.path.normpath(os.path.join(base_dir, '..', 'config')) if os.path.exists(config_path): data_path = config_path else: @@ -26,11 +24,9 @@ if not os.path.exists(data_path): db_path = os.path.join(data_path, 'data.db') log_path = os.path.join(data_path, 'debug.log') +ssl_cert_path = os.path.join(data_path, 'node.ssl.crt') +ssl_key_path = os.path.join(data_path, 'tor', 'private_key') -ca_key_path = os.path.join(data_path, 'node.ca.key') -ca_cert_path = os.path.join(data_path, 'node.ca.crt') -ssl_cert_path = os.path.join(data_path, 'node.tls.crt') -ssl_key_path = os.path.join(data_path, 'node.tls.key') if os.path.exists(oml_data_path): with open(oml_data_path) as fd: @@ -61,7 +57,7 @@ for key in server_defaults: release = pdict(os.path.join(data_path, 'release.json')) -USER_ID = get_user_id(ssl_key_path, ssl_cert_path, ca_key_path, ca_cert_path) +USER_ID = get_user_id(ssl_key_path, ssl_cert_path) OML_UPDATE_KEY = 'K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU' OML_UPDATE_CERT = '''-----BEGIN CERTIFICATE----- @@ -100,5 +96,3 @@ if not FULLTEXT_SUPPORT: config['itemKeys'] = [k for k in config['itemKeys'] if k['id'] != 'fulltext'] DB_VERSION = 20 - -ID_LENGTH = 56 diff --git a/oml/setup.py b/oml/setup.py index ee62cb6..e676292 100644 --- a/oml/setup.py +++ b/oml/setup.py @@ -423,27 +423,6 @@ def upgrade_db(old, new=None): )''') run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)') run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)') - if old <= '20240608-1469-647a8b9': - old_hostname = os.path.join(settings.data_path, 'tor/hostname') - if os.path.exists(old_hostname): - with open(old_hostname) as fd: - OLD_USER_ID = fd.read().split('.')[0] - statements = [ - "UPDATE user SET id = '{nid}' WHERE id = '{oid}'", - "UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'", - "UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'", - "UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'", - ] - run_sql([ - sql.format(oid=OLD_USER_ID, nid=settings.USER_ID) - for sql in statements - ]) - for ext in ('log', 'db', 'json'): - old_log = os.path.join(settings.data_path, 'peers/%s.%s' % (OLD_USER_ID, ext)) - new_log = os.path.join(settings.data_path, 'peers/%s.%s' % (USER_ID, ext)) - if os.path.exists(old_log) and not os.path.exists(new_log): - os.rename(old_log, new_log) - def create_default_lists(user_id=None): with db.session(): diff --git a/oml/state.py b/oml/state.py index 4dbf2da..cc1e804 100644 --- a/oml/state.py +++ b/oml/state.py @@ -16,7 +16,6 @@ websockets = [] uisockets = [] peers = {} changelog_size = None -sync_db = False activity = {} removepeer = {} diff --git a/oml/tasks.py b/oml/tasks.py index d506853..93d3b45 100644 --- a/oml/tasks.py +++ b/oml/tasks.py @@ -36,15 +36,16 @@ class Tasks(Thread): def run(self): self.load_tasks() - if (time.mktime(time.gmtime()) - settings.server.get('last_scan', 0)) > 24*60*60: + if time.mktime(time.gmtime()) - settings.server.get('last_scan', 0) > 24*60*60: + settings.server['last_scan'] = time.mktime(time.gmtime()) self.queue('scan') import item.scan from item.models import sync_metadata, get_preview, get_cover from user.models import ( - export_list, update_user_peering, - add_local_info, remove_local_info, - upload + export_list, update_user_peering, + add_local_info, remove_local_info, + upload ) shutdown = False while not shutdown: @@ -97,7 +98,6 @@ class Tasks(Thread): def load_tasks(self): if os.path.exists(self._taskspath): - logger.debug('loading tasks') try: with open(self._taskspath) as f: tasks = json.load(f) diff --git a/oml/tor.py b/oml/tor.py index 830701b..17056ca 100644 --- a/oml/tor.py +++ b/oml/tor.py @@ -22,11 +22,9 @@ import logging logging.getLogger('stem').setLevel(logging.ERROR) logger = logging.getLogger(__name__) - class TorDaemon(Thread): installing = False running = True - ended = False p = None def __init__(self): @@ -107,8 +105,6 @@ DirReqStatistics 0 logger.debug(line) self.p.communicate() time.sleep(0.5) - self.ended = True - self.running = False self.p = None def kill(self): @@ -146,10 +142,6 @@ class Tor(object): logger.debug("Start tor") self.daemon = TorDaemon() return self.connect() - elif self.daemon.ended: - logger.debug("Try starting tor again") - self.daemon = TorDaemon() - return self.connect() if not self.daemon.installing: logger.debug("Failed to connect to tor") return False @@ -209,20 +201,18 @@ class Tor(object): return False controller = self.controller if controller.get_version() >= stem.version.Requirement.ADD_ONION: - private_key, public_key = utils.load_pem_key(settings.ca_key_path) - key_type, key_content = utils.get_onion_key(private_key) + with open(settings.ssl_key_path, 'rb') as fd: + private_key = fd.read() + key_content = RSA.importKey(private_key).exportKey().decode() + key_content = ''.join(key_content.strip().split('\n')[1:-1]) ports = {9851: settings.server['node_port']} if settings.preferences.get('enableReadOnlyService'): ports[80] = settings.server['public_port'] controller.remove_ephemeral_hidden_service(settings.USER_ID) - response = controller.create_ephemeral_hidden_service( - ports, - key_type=key_type, key_content=key_content, - detached=True - ) + response = controller.create_ephemeral_hidden_service(ports, + key_type='RSA1024', key_content=key_content, + detached=True) if response.is_ok(): - if response.service_id != settings.USER_ID: - logger.error("Something is wrong with tor id %s vs %s", response.service_id, settings.USER_ID) logger.debug('published node as https://%s.onion:%s', settings.USER_ID, settings.server_defaults['node_port']) if settings.preferences.get('enableReadOnlyService'): @@ -269,71 +259,65 @@ def torbrowser_url(sys_platform=None): data = read_url(base_url, timeout=3*24*60*60).decode() versions = [] for r in ( - re.compile('href="(\d+\.\d+\.\d+/)"'), - re.compile('href="(\d+\.\d+/)"'), + re.compile('href="(\d\.\d\.\d/)"'), + re.compile('href="(\d\.\d/)"'), ): versions += r.findall(data) - if not versions: - return None current = sorted(versions)[-1] url = base_url + current + language = '.*?en' if sys_platform.startswith('linux'): if platform.architecture()[0] == '64bit': - osname = 'linux-x86_64' + osname = 'linux64' else: - osname = 'linux-x86_32' + osname = 'linux32' ext = 'xz' elif sys_platform == 'darwin': - osname = 'macos' + osname = 'osx64' ext = 'dmg' elif sys_platform == 'win32': - osname = 'windows-x86_64-portable' - ext = 'exe' + language = '' + osname = '' + ext = 'zip' else: logger.debug('no way to get torbrowser url for %s', sys.platform) return None - data = read_url(url).decode() - r = re.compile('href="(.*?{osname}.*?{ext})"'.format(osname=osname, ext=ext)).findall(data) - if not r: - r = re.compile('href="(.*?{ext})"'.format(ext=ext)).findall(data) - torbrowser = sorted(r)[-1] + r = re.compile('href="(.*?{osname}{language}.*?{ext})"'.format(osname=osname,language=language,ext=ext)) + torbrowser = sorted(r.findall(read_url(url).decode()))[-1] url += torbrowser return url def get_tor(): if sys.platform == 'darwin': for path in ( - os.path.join(settings.top_dir, 'platform_darwin64', 'tor', 'tor'), + os.path.join(settings.base_dir, '..', 'platform_darwin64', 'tor', 'tor'), '/Applications/TorBrowser.app/TorBrowser/Tor/tor', - os.path.join(settings.top_dir, 'tor', 'TorBrowser.app', 'TorBrowser', 'Tor', 'tor') + os.path.join(settings.base_dir, '..', 'tor', 'TorBrowser.app', 'TorBrowser', 'Tor', 'tor') ): if os.path.isfile(path) and os.access(path, os.X_OK): return path elif sys.platform == 'win32': paths = [ - os.path.join(settings.top_dir, 'platform_win32', 'tor', 'tor.exe') + os.path.join(settings.base_dir, '..', 'platform_win32', 'tor', 'tor.exe') ] - for exe in ( - os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'tor.exe'), - os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'Tor', 'tor.exe'), + exe = os.path.join('Tor Browser', 'Browser', 'TorBrowser', 'Tor', 'tor.exe') + for prefix in ( + os.path.join(os.path.expanduser('~'), 'Desktop'), + os.path.join('C:', 'Program Files'), + os.path.join('C:', 'Program Files (x86)'), ): - for prefix in ( - os.path.join(os.path.expanduser('~'), 'Desktop'), - os.path.join('C:', 'Program Files'), - os.path.join('C:', 'Program Files (x86)'), - ): - path = os.path.join(prefix, exe) - paths.append(path) - paths.append(os.path.join(settings.top_dir, 'tor', 'Tor', 'tor.exe')) + path = os.path.join(prefix, exe) + paths.append(path) + paths.append(os.path.join(settings.base_dir, '..', 'tor', 'Tor', 'tor.exe')) for path in paths: if os.path.isfile(path) and os.access(path, os.X_OK): return os.path.normpath(path) elif sys.platform.startswith('linux'): for path in ( - os.path.join(settings.top_dir, 'platform_linux64', 'tor', 'tor'), - os.path.join(settings.top_dir, 'platform_linux32', 'tor', 'tor'), - os.path.join(settings.top_dir, 'platform_linux_armv7l', 'tor', 'tor'), - os.path.join(settings.top_dir, 'platform_linux_aarch64', 'tor', 'tor'), + os.path.join(settings.base_dir, '..', 'platform_linux64', 'tor', 'tor'), + os.path.join(settings.base_dir, '..', 'platform_linux32', 'tor', 'tor'), + os.path.join(settings.base_dir, '..', 'platform_linux_armv7l', 'tor', 'tor'), + os.path.join(settings.base_dir, '..', 'platform_linux_aarch64', 'tor', 'tor'), ): if os.path.isfile(path) and os.access(path, os.X_OK): return os.path.normpath(path) @@ -347,12 +331,9 @@ def get_tor(): path = os.path.join(base, 'TorBrowser', 'Tor', 'tor') if os.path.isfile(path) and os.access(path, os.X_OK): return path - path = os.path.join(base, 'TorBrowser', 'Tor', 'Tor', 'tor') - if os.path.isfile(path) and os.access(path, os.X_OK): - return path except: pass - local_tor = os.path.normpath(os.path.join(settings.top_dir, + local_tor = os.path.normpath(os.path.join(settings.base_dir, '..', 'tor', 'tor-browser_en-US', 'Browser', 'TorBrowser', 'Tor', 'tor')) if os.path.exists(local_tor): return local_tor @@ -361,7 +342,7 @@ def get_tor(): def get_geoip(tor): geo = [] for tordir in ( - os.path.normpath(os.path.join(settings.top_dir, 'platform', 'tor')), + os.path.normpath(os.path.join(settings.base_dir, '..', 'platform', 'tor')), os.path.join(os.path.dirname(os.path.dirname(tor)), 'Data', 'Tor') ): gepipfile = os.path.join(tordir, 'geoip') @@ -383,7 +364,7 @@ def install_tor(): logger.debug('found existing tor installation') return url = torbrowser_url() - target = os.path.normpath(os.path.join(settings.top_dir, 'tor')) + target = os.path.normpath(os.path.join(settings.base_dir, '..', 'tor')) if url: logger.debug('downloading and installing tor') if sys.platform.startswith('linux'): diff --git a/oml/tor_request.py b/oml/tor_request.py index d5b819f..0fd6455 100644 --- a/oml/tor_request.py +++ b/oml/tor_request.py @@ -32,7 +32,7 @@ def getaddrinfo(*args): return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] def create_tor_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None): + source_address=None): host, port = address err = None af = socket.AF_INET @@ -40,9 +40,6 @@ def create_tor_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, proto = 6 sa = address sock = None - - logger.debug('make tor connection to: %s', address) - try: sock = socks.socksocket(af, socktype, proto) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: @@ -69,30 +66,27 @@ class TorHTTPSConnection(http.client.HTTPSConnection): def __init__(self, host, port=None, service_id=None, check_hostname=None, context=None, **kwargs): self._service_id = service_id if self._service_id: - context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) + if hasattr(ssl, '_create_default_https_context'): + context = ssl._create_default_https_context() + elif hasattr(ssl, '_create_stdlib_context'): + context = ssl._create_stdlib_context() if context: context.check_hostname = False context.verify_mode = ssl.CERT_NONE - context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path) + # tor keys are still 1024 bit, debian started to require 2048 by default, + # try to lower requirements to 1024 if needed + try: + context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path) + except ssl.SSLError: + context.set_ciphers('DEFAULT@SECLEVEL=1') + context.load_cert_chain(settings.ssl_cert_path, settings.ssl_key_path) context.load_default_certs() - context.set_alpn_protocols(['http/1.1']) - context.post_handshake_auth = True http.client.HTTPSConnection.__init__(self, host, port, check_hostname=check_hostname, context=context, **kwargs) if not is_local(host): self._create_connection = create_tor_connection - def get_service_id_cert(self): - for cert in self.sock._sslobj.get_verified_chain(): - info = cert.get_info() - subject = info.get("subject") - if subject: - CN = subject[0][0][1] - if CN == self._service_id: - cert = cert.public_bytes() - return cert - def _check_service_id(self, cert): service_id = get_service_id(cert=cert) if service_id != self._service_id: @@ -102,9 +96,11 @@ class TorHTTPSConnection(http.client.HTTPSConnection): def connect(self): http.client.HTTPSConnection.connect(self) if self._service_id: - cert = self.get_service_id_cert() + cert = self.sock.getpeercert(binary_form=True) if not self._check_service_id(cert): - raise InvalidCertificateException(self._service_id, cert, 'service_id mismatch') + raise InvalidCertificateException(self._service_id, cert, + 'service_id mismatch') + #logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version) class TorHTTPSHandler(urllib.request.HTTPSHandler): def __init__(self, debuglevel=0, context=None, check_hostname=None, service_id=None): diff --git a/oml/user/api.py b/oml/user/api.py index a1c9a8f..53448b3 100644 --- a/oml/user/api.py +++ b/oml/user/api.py @@ -411,7 +411,7 @@ def requestPeering(data): nickname (optional) } ''' - if len(data.get('id', '')) != settings.ID_LENGTH: + if len(data.get('id', '')) != 16: logger.debug('invalid user id') return {} u = models.User.get_or_create(data['id']) @@ -434,7 +434,7 @@ def acceptPeering(data): message } ''' - if len(data.get('id', '')) != settings.ID_LENGTH: + if len(data.get('id', '')) != 16: logger.debug('invalid user id') return {} logger.debug('acceptPeering... %s', data) @@ -453,8 +453,8 @@ def rejectPeering(data): message } ''' - if len(data.get('id', '')) not in (16, 43, 56): - logger.debug('invalid user id: %s', data) + if len(data.get('id', '')) not in (16, 43): + logger.debug('invalid user id') return {} u = models.User.get_or_create(data['id']) u.info['message'] = data.get('message', '') @@ -471,8 +471,8 @@ def removePeering(data): message } ''' - if len(data.get('id', '')) not in (16, 43, 56): - logger.debug('invalid user id: %s', data) + if len(data.get('id', '')) not in (16, 43): + logger.debug('invalid user id') return {} u = models.User.get(data['id'], for_update=True) if u: @@ -488,8 +488,8 @@ def cancelPeering(data): takes { } ''' - if len(data.get('id', '')) != settings.ID_LENGTH: - logger.debug('invalid user id: %s', data) + if len(data.get('id', '')) != 16: + logger.debug('invalid user id') return {} u = models.User.get_or_create(data['id']) u.info['message'] = data.get('message', '') diff --git a/oml/user/models.py b/oml/user/models.py index bb737a4..ffe60f5 100644 --- a/oml/user/models.py +++ b/oml/user/models.py @@ -27,7 +27,7 @@ class User(db.Model): created = sa.Column(sa.DateTime()) modified = sa.Column(sa.DateTime()) - id = sa.Column(sa.String(128), primary_key=True) + id = sa.Column(sa.String(43), primary_key=True) info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler))) nickname = sa.Column(sa.String(256), index=True) @@ -256,7 +256,7 @@ class List(db.Model): type = sa.Column(sa.String(64)) _query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json_pickler))) - user_id = sa.Column(sa.String(128), sa.ForeignKey('user.id')) + user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id')) user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic')) items = sa.orm.relationship('Item', secondary=list_items, @@ -456,7 +456,7 @@ class Metadata(db.Model): id = sa.Column(sa.Integer(), primary_key=True) item_id = sa.Column(sa.String(32)) - user_id = sa.Column(sa.String(128), sa.ForeignKey('user.id')) + user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id')) data_hash = sa.Column(sa.String(40), index=True) data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler))) diff --git a/oml/utils.py b/oml/utils.py index c8477c0..ad2b65a 100644 --- a/oml/utils.py +++ b/oml/utils.py @@ -5,7 +5,6 @@ from datetime import datetime from io import StringIO, BytesIO from PIL import Image, ImageFile import base64 -import functools import hashlib import json import os @@ -18,26 +17,19 @@ import time import unicodedata import ox -import OpenSSL.crypto from OpenSSL.crypto import ( - dump_certificate, - dump_privatekey, - FILETYPE_PEM, - load_certificate, - load_privatekey, - PKey, - TYPE_RSA, - X509, - X509Extension + load_privatekey, load_certificate, + dump_privatekey, dump_certificate, + FILETYPE_ASN1, FILETYPE_PEM, PKey, TYPE_RSA, + X509, X509Extension ) -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import ed25519 +from Crypto.PublicKey import RSA +from Crypto.Util.asn1 import DerSequence from meta.utils import normalize_isbn, find_isbns, get_language, to_isbn13 from win32utils import get_short_path_name - import logging logging.getLogger('PIL').setLevel(logging.ERROR) logger = logging.getLogger(__name__) @@ -100,7 +92,7 @@ def resize_image(data, width=None, size=None): height = max(height, 1) if width < source_width: - resize_method = Image.LANCZOS + resize_method = Image.ANTIALIAS else: resize_method = Image.BICUBIC output = source.resize((width, height), resize_method) @@ -127,157 +119,78 @@ def get_position_by_id(list, key): return i return -1 -def sign_cert(cert, key): - # pyOpenSSL sgin api does not allow NULL hash - # return cert.sign(key, None) - return OpenSSL.crypto._lib.X509_sign(cert._x509, key._pkey, OpenSSL.crypto._ffi.NULL) - -def load_pem_key(pem): - with open(pem) as fd: - ca_key_pem = fd.read() - key = load_privatekey(FILETYPE_PEM, ca_key_pem) - if key.bits() != 256: - raise Exception("Invalid key %s" % pem) - key = key.to_cryptography_key() - private_key = key.private_bytes_raw() - public_key = key.public_key().public_bytes_raw() - return private_key, public_key - - -def expand_private_key(secret_key) -> bytes: - hash = hashlib.sha512(secret_key).digest() - hash = bytearray(hash) - hash[0] &= 248 - hash[31] &= 127 - hash[31] |= 64 - return bytes(hash) - -def get_onion(pubkey): - version_byte = b"\x03" - checksum_str = ".onion checksum".encode() - checksum = hashlib.sha3_256(checksum_str + pubkey + version_byte).digest()[:2] - return base64.b32encode(pubkey + checksum + version_byte).decode().lower() - -def get_onion_key(private_key): - onion_key = expand_private_key(private_key) - key_type = 'ED25519-V3' - key_content = base64.encodebytes(onion_key).decode().strip().replace('\n', '') - return key_type, key_content - -def get_user_id(key_path, cert_path, ca_key_path, ca_cert_path): - if os.path.exists(ca_key_path): - try: - private_key, public_key = load_pem_key(ca_key_path) - except: - os.unlink(ca_key_path) +def get_user_id(private_key, cert_path): + if os.path.exists(private_key): + with open(private_key) as fd: + key = load_privatekey(FILETYPE_PEM, fd.read()) + if key.bits() != 1024: + os.unlink(private_key) else: - user_id = get_onion(public_key) - - if not os.path.exists(ca_key_path): - private_key = ed25519.Ed25519PrivateKey.generate() - private_bytes = private_key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption() - ) - with open(ca_key_path, 'wb') as fd: - fd.write(private_bytes) - - public_key = private_key.public_key().public_bytes_raw() - user_id = get_onion(public_key) - - if not os.path.exists(ca_cert_path) or \ - (datetime.now() - datetime.fromtimestamp(os.path.getmtime(ca_cert_path))).days > 5*365: - with open(ca_key_path, 'rb') as key_file: - key_data = key_file.read() - cakey = load_privatekey(FILETYPE_PEM, key_data) + user_id = get_service_id(private_key) + if not os.path.exists(private_key): + if os.path.exists(cert_path): + os.unlink(cert_path) + folder = os.path.dirname(private_key) + if not os.path.exists(folder): + os.makedirs(folder) + os.chmod(folder, 0o700) + key = PKey() + key.generate_key(TYPE_RSA, 1024) + with open(private_key, 'wb') as fd: + os.chmod(private_key, 0o600) + fd.write(dump_privatekey(FILETYPE_PEM, key)) + os.chmod(private_key, 0o400) + user_id = get_service_id(private_key) + if not os.path.exists(cert_path) or \ + (datetime.now() - datetime.fromtimestamp(os.path.getmtime(cert_path))).days > 60: ca = X509() ca.set_version(2) ca.set_serial_number(1) ca.get_subject().CN = user_id ca.gmtime_adj_notBefore(0) - ca.gmtime_adj_notAfter(10 * 356 * 24 * 60 * 60) + ca.gmtime_adj_notAfter(90 * 24 * 60 * 60) ca.set_issuer(ca.get_subject()) - ca.set_pubkey(cakey) + ca.set_pubkey(key) ca.add_extensions([ - X509Extension(b"basicConstraints", False, b"CA:TRUE"), - X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"), - X509Extension( - b"subjectKeyIdentifier", False, b"hash", subject=ca - ), - ]) - ca.add_extensions([ - X509Extension( - b"authorityKeyIdentifier", False, b"keyid:always", issuer=ca - ) - ]) - - sign_cert(ca, cakey) - - with open(ca_cert_path, 'wb') as fd: - fd.write(dump_certificate(FILETYPE_PEM, ca)) - - if os.path.exists(cert_path): - os.unlink(cert_path) - if os.path.exists(key_path): - os.unlink(key_path) - else: - with open(ca_cert_path) as fd: - ca = load_certificate(FILETYPE_PEM, fd.read()) - with open(ca_key_path) as fd: - cakey = load_privatekey(FILETYPE_PEM, fd.read()) - - - # create RSA intermediate certificate since clients don't quite like Ed25519 yet - if not os.path.exists(cert_path) or \ - (datetime.now() - datetime.fromtimestamp(os.path.getmtime(cert_path))).days > 60: - - key = PKey() - key.generate_key(TYPE_RSA, 2048) - - cert = X509() - cert.set_version(2) - cert.set_serial_number(2) - cert.get_subject().CN = user_id + ".onion" - cert.gmtime_adj_notBefore(0) - cert.gmtime_adj_notAfter(90 * 24 * 60 * 60) - cert.set_issuer(ca.get_subject()) - cert.set_pubkey(key) - subject_alt_names = b"DNS: %s.onion" % user_id.encode() - cert.add_extensions([ - X509Extension(b"basicConstraints", True, b"CA:FALSE"), + X509Extension(b"basicConstraints", True, b"CA:TRUE, pathlen:0"), + X509Extension(b"nsCertType", True, b"sslCA"), X509Extension(b"extendedKeyUsage", True, b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"), X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"), X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca), - X509Extension(b"subjectAltName", critical=True, value=subject_alt_names), ]) - sign_cert(cert, cakey) + ca.sign(key, "sha256") with open(cert_path, 'wb') as fd: - fd.write(dump_certificate(FILETYPE_PEM, cert)) fd.write(dump_certificate(FILETYPE_PEM, ca)) - with open(key_path, 'wb') as fd: - fd.write(dump_privatekey(FILETYPE_PEM, key)) return user_id - def get_service_id(private_key_file=None, cert=None): ''' service_id is the first half of the sha1 of the rsa public key encoded in base32 ''' if private_key_file: - with open(private_key_file, 'rb') as key_file: - key_type, key_content = key_file.read().split(b':', 1) - private_key = base64.decodebytes(key_content) - public_key = Ed25519().public_key_from_hash(private_key) - service_id = get_onion(public_key) + with open(private_key_file, 'rb') as fd: + private_key = fd.read() + public_key = RSA.importKey(private_key).publickey().exportKey('DER')[22:] + # compute sha1 of public key and encode first half in base32 + service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode() + ''' + # compute public key from priate key and export in DER format + # ignoring the SPKI header(22 bytes) + key = load_privatekey(FILETYPE_PEM, private_key) + cert = X509() + cert.set_pubkey(key) + public_key = dump_privatekey(FILETYPE_ASN1, cert.get_pubkey())[22:] + # compute sha1 of public key and encode first half in base32 + service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode() + ''' elif cert: - cert_ = load_certificate(FILETYPE_PEM, cert) - key = cert_.get_pubkey() - public_key = key.to_cryptography_key().public_bytes_raw() - service_id = get_onion(public_key) - else: - service_id = None + # compute sha1 of public key and encode first half in base32 + key = load_certificate(FILETYPE_ASN1, cert).get_pubkey() + pub_der = DerSequence() + pub_der.decode(dump_privatekey(FILETYPE_ASN1, key)) + public_key = RSA.construct((pub_der._seq[1], pub_der._seq[2])).exportKey('DER')[22:] + service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode() return service_id def update_dict(root, data): @@ -485,7 +398,7 @@ def check_pidfile(pid): def ctl(*args): import settings if sys.platform == 'win32': - platform_win32 = os.path.normpath(os.path.join(settings.top_dir, 'platform_win32')) + platform_win32 = os.path.normpath(os.path.join(settings.base_dir, '..', 'platform_win32')) python = os.path.join(platform_win32, 'pythonw.exe') cmd = [python, 'oml'] + list(args) startupinfo = subprocess.STARTUPINFO() @@ -582,36 +495,3 @@ def iexists(path): def same_path(f1, f2): return unicodedata.normalize('NFC', f1) == unicodedata.normalize('NFC', f2) - -def time_cache(max_age, maxsize=128, typed=False): - def _decorator(fn): - @functools.lru_cache(maxsize=maxsize, typed=typed) - def _new(*args, __time_salt, **kwargs): - return fn(*args, **kwargs) - - @functools.wraps(fn) - def _wrapped(*args, **kwargs): - return _new(*args, **kwargs, __time_salt=int(time.time() / max_age)) - - return _wrapped - - return _decorator - -def migrate_userid(old_id, new_id): - from db import run_sql - import settings - statements = [ - "UPDATE user SET id = '{nid}' WHERE id = '{oid}'", - "UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'", - "UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'", - "UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'", - ] - run_sql([ - sql.format(oid=old_id, nid=new_id) - for sql in statements - ]) - for ext in ('log', 'db', 'json'): - old_file = os.path.join(settings.data_path, 'peers/%s.%s' % (old_id, ext)) - new_file = os.path.join(settings.data_path, 'peers/%s.%s' % (new_id, ext)) - if os.path.exists(old_file) and not os.path.exists(new_file): - os.rename(old_file, new_file) diff --git a/requirements-shared.txt b/requirements-shared.txt index 6e03575..3b4600f 100644 --- a/requirements-shared.txt +++ b/requirements-shared.txt @@ -2,7 +2,7 @@ requests==2.21.0 chardet html5lib #ox>=2.0.666 -git+https://code.0x2620.org/0x2620/python-ox.git#egg=ox +git+http://git.0x2620.org/python-ox.git#egg=python-ox python-stdnum==1.2 PyPDF2==1.25.1 pysocks diff --git a/requirements.txt b/requirements.txt index 0dec41d..16e5d69 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ lxml simplejson ed25519>=1.4 -SQLAlchemy==1.4.46 +SQLAlchemy==1.0.12 pyopenssl>=0.15 -pycryptodome +pyCrypto>=2.6.1 pillow netifaces -tornado==6.0.3 +tornado==5.1.1 diff --git a/static/css/oml.css b/static/css/oml.css index dabc0a9..375341b 100644 --- a/static/css/oml.css +++ b/static/css/oml.css @@ -10,10 +10,6 @@ font-size: 14px; line-height: 21px; } -.OMLQuote img { - max-width: 100%; - margin: auto; -} .OMLAnnotation .OMLQuoteBackground { position: absolute; diff --git a/static/html/pdf.html b/static/html/pdf.html index 706041d..36e1a60 100644 --- a/static/html/pdf.html +++ b/static/html/pdf.html @@ -1,4 +1,4 @@ - + + + + + + + - +
-
-
- - - - -
-
- -
-
-
- - -
+
+ + +
@@ -81,195 +75,119 @@ See https://github.com/adobe-type-tools/cmap-resources
-
-
+
-
- -
- -
-
- -
-
- - -
-
- -
- File name: -

-

-
-
- File size: -

-

-
-
-
- Title: -

-

-
-
- Author: -

-

-
-
- Subject: -

-

-
-
- Keywords: -

-

-
-
- Creation Date: -

-

-
-
- Modification Date: -

-

-
-
- Creator: -

-

-
-
-
- PDF Producer: -

-

-
-
- PDF Version: -

-

-
-
- Page Count: -

-

-
-
- Page Size: -

-

-
-
-
- Fast Web View: -

-

-
-
- -
-
- -
-
- Choose an option - - Alt text (alternative text) helps when people can’t see the image or when it doesn’t load. - +
- -
- Preparing document for printing… +
+
-
+
+
- diff --git a/static/js/annotation.js b/static/js/annotation.js index d82a584..6df2090 100644 --- a/static/js/annotation.js +++ b/static/js/annotation.js @@ -1,18 +1,9 @@ 'use strict'; -oml.SELECTION = 0 -oml.HIGHLIGHT = 1 - oml.ui.annotation = function(annotation, $iframe) { - var value = Ox.encodeHTMLEntities(annotation.text).replace(/\n/g, '
') - if (annotation.type == oml.HIGHLIGHT) { - let coord = annotation.coords[0].map(p => parseInt(p)).join(',') - let image = `/${oml.user.ui.item}/2048p${parseInt(annotation.page)},${coord}.jpg` - value = `` - } var $quoteText = Ox.Element() .addClass('OxSelectable OMLQuote') - .html(value) + .html(Ox.encodeHTMLEntities(annotation.text).replace(/\n/g, '
')) .on({ click: function(event) { var id diff --git a/static/js/folders.js b/static/js/folders.js index 21eee1e..8a00703 100644 --- a/static/js/folders.js +++ b/static/js/folders.js @@ -128,10 +128,6 @@ oml.ui.folders = function() { .appendTo(that) ); oml.$ui.librariesList.$body.css({height: '16px'}); // FIXME! - oml.$ui.librariesList.find('.OxColumnItems').css({ - 'text-overflow': 'reset', - 'direction': 'rtl' - }) users.forEach(function(user, index) { diff --git a/static/js/oml.js b/static/js/oml.js index 021c534..12db1ef 100644 --- a/static/js/oml.js +++ b/static/js/oml.js @@ -219,7 +219,7 @@ function loadOML(browserSupported) { window.oml = Ox.App({ name: 'oml', - socket: document.location.protocol.replace('http', 'ws') + '//' + document.location.host + '/ws', + socket: 'ws://' + document.location.host + '/ws', url: '/api/' }).bindEvent({ load: function(data) { diff --git a/static/js/utils.js b/static/js/utils.js index a78aac3..f4bb2b9 100644 --- a/static/js/utils.js +++ b/static/js/utils.js @@ -1028,7 +1028,7 @@ oml.updateFilterMenus = function() { }; oml.validatePublicKey = function(value) { - return /^[a-z0-9+\/]{56}$/.test(value); + return /^[a-z0-9+\/]{16}$/.test(value); }; oml.updateDebugMenu = function() { diff --git a/static/js/viewer.js b/static/js/viewer.js index 89bc798..616aacf 100644 --- a/static/js/viewer.js +++ b/static/js/viewer.js @@ -151,7 +151,7 @@ oml.ui.viewer = function() { height: '100%', border: 0 }).onMessage(function(data, event) { - // console.log('got', event, data, data.page) + console.log('got', event, data) if (event == 'addAnnotation') { addAnnotation(data); var $annotation = oml.ui.annotation(data, $iframe).bindEvent(annotationEvents) @@ -215,11 +215,7 @@ oml.ui.viewer = function() { }) return } - var map = {} - map[sortKey] = function(value) { - return value ? value.toString() : ''; - } - annotations = Ox.sortBy(annotations, sortKey, map) + annotations = Ox.sortBy(annotations, sortKey) oml.$ui.annotationFolder.empty(); var visibleAnnotations = []; var hasAnnotations = false; diff --git a/static/reader/pdf.css b/static/reader/pdf.css deleted file mode 100644 index 467b305..0000000 --- a/static/reader/pdf.css +++ /dev/null @@ -1,62 +0,0 @@ - -.toolbarButton.cropFile::before, - .secondaryToolbarButton.cropFile::before { - mask-image: url(pdf/toolbarButton-crop.png); -} -.toolbarButton.embedPage::before, - .secondaryToolbarButton.embedPage::before { - mask-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2IiB2aWV3Qm94PSIwIDAgMjU2IDI1NiI+PGxpbmUgeDE9Ijg4IiB5MT0iNTYiIHgyPSIyNCIgeTI9IjEyOCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48bGluZSB4MT0iMjQiIHkxPSIxMjgiIHgyPSI4OCIgeTI9IjIwMCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48bGluZSB4MT0iMTY4IiB5MT0iNTYiIHgyPSIyMzIiIHkyPSIxMjgiIHN0cm9rZT0iIzAwMDAwMCIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2Utd2lkdGg9IjQ4Ii8+PGxpbmUgeDE9IjIzMiIgeTE9IjEyOCIgeDI9IjE2OCIgeTI9IjIwMCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48L3N2Zz48IS0teyJjb2xvciI6ImRlZmF1bHQiLCJuYW1lIjoic3ltYm9sRW1iZWQiLCJ0aGVtZSI6Im94bWVkaXVtIn0tLT4=); -} -@media screen and (min-resolution: 2dppx) { - .toolbarButton.cropFile::before, - .secondaryToolbarButton.cropFile::before { - mask-image: url(pdf/toolbarButton-crop@2x.png); - } - .toolbarButton.embedPage::before, - .secondaryToolbarButton.embedPage::before { - mask-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2IiB2aWV3Qm94PSIwIDAgMjU2IDI1NiI+PGxpbmUgeDE9Ijg4IiB5MT0iNTYiIHgyPSIyNCIgeTI9IjEyOCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48bGluZSB4MT0iMjQiIHkxPSIxMjgiIHgyPSI4OCIgeTI9IjIwMCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48bGluZSB4MT0iMTY4IiB5MT0iNTYiIHgyPSIyMzIiIHkyPSIxMjgiIHN0cm9rZT0iIzAwMDAwMCIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2Utd2lkdGg9IjQ4Ii8+PGxpbmUgeDE9IjIzMiIgeTE9IjEyOCIgeDI9IjE2OCIgeTI9IjIwMCIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS13aWR0aD0iNDgiLz48L3N2Zz48IS0teyJjb2xvciI6ImRlZmF1bHQiLCJuYW1lIjoic3ltYm9sRW1iZWQiLCJ0aGVtZSI6Im94bWVkaXVtIn0tLT4=); - } -} - -.verticalToolbarSeparator.hiddenMediumView, -#print, -#secondaryPrint, -#openFile, -#secondaryOpenFile, -#editorModeButtons { - display: none !important; -} - -.page .crop-overlay { - position: absolute; - top: 0; - bottom: 0; - left: 0; - right: 0; - //background: rgba(0,0,0,0.5); - cursor: crosshair; - z-index: 100; -} -.page .crop-overlay.inactive { - pointer-events: none; - cursor: default; -} - -.page .crop-overlay canvas { - width: 100%; - height: 100%; -} -.page .highlights { - position: absolute; - top: 0; - bottom: 0; - left: 0; - right: 0; - //background: rgba(0,0,0,0.5); - z-index: 101; - pointer-events: none; -} -.page .highlights canvas { - width: 100%; - height: 100%; -} diff --git a/static/reader/pdf.js b/static/reader/pdf.js index 8ac01eb..0a0b50b 100644 --- a/static/reader/pdf.js +++ b/static/reader/pdf.js @@ -1,97 +1,6 @@ var id = document.location.pathname.split('/')[1]; var annotations = []; var currentPage = 1, rendered = false -var highlightInactive = true -var selectedAnnotation - -const SELECTION = 0 -const HIGHLIGHT = 1 - - -var div = document.createElement("div") -div.innerHTML = ` - -` -var cropFile = div.querySelector("#cropFile") - -document.querySelector('#toolbarViewerRight').insertBefore(cropFile, document.querySelector('#toolbarViewerRight').firstChild) - -// secondary menu -div.innerHTML = ` - -` -var secondaryCropFile = div.querySelector("#secondaryCropFile") -document.querySelector('#secondaryToolbarButtonContainer').insertBefore( - secondaryCropFile, - document.querySelector('#secondaryToolbarButtonContainer').firstChild -) - -function initOverlay() { - document.querySelectorAll('#cropFile,.secondaryToolbarButton.cropFile').forEach(btn => { - btn.addEventListener('click', event=> { - if (highlightInactive) { - event.target.style.background = 'red' - highlightInactive = false - document.querySelectorAll('.crop-overlay.inactive').forEach(element => { - element.classList.remove('inactive') - }) - } else { - event.target.style.background = '' - highlightInactive = true - document.querySelectorAll('.crop-overlay').forEach(element => { - element.classList.add('inactive') - }) - } - }) - }) - PDFViewerApplication.initializedPromise.then(function() { - PDFViewerApplication.pdfViewer.eventBus.on("pagesinit", function(event) { - /* - document.querySelector('#viewerContainer').addEventListener('scroll', event => { - if (window.parent && window.parent.postMessage) { - if (first) { - first = false - } else { - window.parent.postMessage({event: 'scrolled', top: event.target.scrollTop}) - } - } - }) - */ - }) - PDFViewerApplication.pdfViewer.eventBus.on("pagerender", function(event) { - var page = event.pageNumber.toString() - var div = event.source.div - var overlay = document.createElement('div') - overlay.classList.add('crop-overlay') - overlay.id = 'overlay' + page - if (highlightInactive) { - overlay.classList.add('inactive') - } - div.appendChild(overlay) - - renderHighlightSelectionOverlay(overlay, id, page, event.source) - var highlights = document.createElement('div') - highlights.classList.add('highlights') - highlights.id = 'highlights' + page - var canvas = document.createElement('canvas') - highlights.appendChild(canvas) - div.appendChild(highlights) - renderHighlights(page) - }) - PDFViewerApplication.eventBus.on('pagerendered', function(event) { - loadAnnotations(event.pageNumber) - }) - }) -} - -document.addEventListener('DOMContentLoaded', function() { - window.PDFViewerApplication ? initOverlay() : document.addEventListener("webviewerloaded", initOverlay) -}) - Ox.load({ 'UI': { @@ -116,15 +25,7 @@ Ox.load({ document.querySelector('#viewerContainer').scrollTop = el.offsetTop + el.parentElement.offsetTop - 64; } }, delay) - var oldSelection = selectedAnnotation - selectedAnnotation = data.id selectAnnotation(data.id) - if (oldSelection) { - var old = annotations.filter(function(a) { return a.id == oldSelection })[0] - if (old && old.type == HIGHLIGHT) { - renderHighlights(old.page) - } - } } else if (event == 'addAnnotation') { createAnnotation() } else if (event == 'addAnnotations') { @@ -136,11 +37,7 @@ Ox.load({ } data.annotations.forEach(function(annotation) { annotations.push(annotation) - if (annotation.type == HIGHLIGHT) { - renderHighlights(annotation.page) - } else { - renderAnnotation(annotation) - } + renderAnnotation(annotation) }) } else if (event == 'removeAnnotation') { removeAnnotation(data.id) @@ -178,6 +75,18 @@ window.addEventListener('mouseup', function(event) { } }) +function bindEvents() { + if (!window.PDFViewerApplication || !window.PDFViewerApplication.eventBus) { + setTimeout(bindEvents, 10) + return + } + PDFViewerApplication.eventBus.on('pagerendered', function(event) { + loadAnnotations(event.pageNumber) + }) +} + +bindEvents() + function getHighlight() { var pageNumber = PDFViewerApplication.pdfViewer.currentPageNumber; var pageIndex = pageNumber - 1; @@ -193,7 +102,6 @@ function getHighlight() { var text = selection.toString(); var position = [pageNumber].concat(Ox.sort(selected.map(function(c) { return [c[1], c[0]]}))[0]); return { - type: SELECTION, page: pageNumber, pageLabel: PDFViewerApplication.pdfViewer.currentPageLabel, position: position, @@ -226,32 +134,25 @@ function renderAnnotation(annotation) { } var pageElement = page.canvas.parentElement.parentElement; var viewport = page.viewport; - if (annotation.type == HIGHLIGHT) { - renderHighlights(annotation.page) - } else if (annotation.coords) { - pageElement.querySelectorAll('.oml-annotation').forEach(el => el.remove()) - annotation.coords.forEach(function (rect) { - var bounds = viewport.convertToViewportRectangle(rect); - var el = document.createElement('div'); - el.classList.add('oml-annotation') - el.classList.add('a' + annotation.id) - el.classList.add('page' + annotation.page) - el.dataset.id = annotation.id - el.setAttribute('style', 'position: absolute; background-color: yellow;opacity:0.3;' + - 'left:' + Math.min(bounds[0], bounds[2]) + 'px; top:' + Math.min(bounds[1], bounds[3]) + 'px;' + - 'width:' + Math.abs(bounds[0] - bounds[2]) + 'px; height:' + Math.abs(bounds[1] - bounds[3]) + 'px;'); + annotation.coords.forEach(function (rect) { + var bounds = viewport.convertToViewportRectangle(rect); + var el = document.createElement('div'); + el.classList.add('oml-annotation') + el.classList.add('a' + annotation.id) + el.classList.add('page' + annotation.page) + el.dataset.id = annotation.id + el.setAttribute('style', 'position: absolute; background-color: yellow;opacity:0.3;' + + 'left:' + Math.min(bounds[0], bounds[2]) + 'px; top:' + Math.min(bounds[1], bounds[3]) + 'px;' + + 'width:' + Math.abs(bounds[0] - bounds[2]) + 'px; height:' + Math.abs(bounds[1] - bounds[3]) + 'px;'); - el.addEventListener('click', function() { - if (!el.classList.contains('selected')) { - selectAnnotation(annotation.id) - Ox.$parent.postMessage('selectAnnotation', {id: annotation.id}) - } - }); - pageElement.appendChild(el); + el.addEventListener('click', function() { + if (!el.classList.contains('selected')) { + selectAnnotation(annotation.id) + Ox.$parent.postMessage('selectAnnotation', {id: annotation.id}) + } }); - } else { - // console.log("annotation without position", annotation) - } + pageElement.appendChild(el); + }); } function addAnnotation(annotation) { @@ -268,11 +169,6 @@ function selectAnnotation(id) { g.classList.add('selected') g.style.backgroundColor = 'blue' }) - annotations.forEach(a => { - if (a.id == id && a.type == HIGHLIGHT) { - renderHighlights(a.page) - } - }) } function deselectAnnotation(id) { @@ -311,7 +207,7 @@ function loadAnnotations(page) { e.remove() }) annotations.filter(function(a) { - return a.page == page && !a.type == HIGHLIGHT + return a.page == page }).forEach(function(annot) { renderAnnotation(annot) }) @@ -324,145 +220,3 @@ function isInView(element) { var elementBottom = elementTop + $(element).height(); return elementTop < docViewBottom && elementBottom > docViewTop; } - -function renderHighlightSelectionOverlay(root, documentId, page, source) { - var canvas = document.createElement('canvas') - root.appendChild(canvas) - canvas.width = canvas.clientWidth - canvas.height = canvas.clientHeight - var ctx = canvas.getContext('2d'); - var viewerContainer = document.querySelector('#viewerContainer') - var bounds = root.getBoundingClientRect(); - var base = 2048 - var scale = Math.max(bounds.height, bounds.width) / base - var last_mousex = last_mousey = 0; - var mousex = mousey = 0; - var mousedown = false; - var p = { - top: 0, - left: 0, - bottom: 0, - right: 0 - } - var inside = false - - canvas.addEventListener('mousedown', function(e) { - if (inside) { - const coords = [ - [p.left, p.top, p.right, p.bottom] - ] - addAnnotation({ - type: HIGHLIGHT, - id: Ox.SHA1(pageNumber.toString() + JSON.stringify(p)), - text: "", - page: parseInt(page), - pageLabel: source.pageLabel, - coords: coords, - }) - return - } - let bounds = root.getBoundingClientRect(); - last_mousex = e.clientX - bounds.left; - last_mousey = e.clientY - bounds.top; - p.top = parseInt(last_mousey / scale) - p.left = parseInt(last_mousex / scale) - mousedown = true; - }); - - document.addEventListener('mouseup', function(e) { - if (mousedown) { - mousedown = false; - p.bottom = parseInt(mousey / scale) - p.right = parseInt(mousex / scale) - - if (p.top > p.bottom) { - var t = p.top - p.top = p.bottom - p.bottom = t - } - if (p.left > p.right) { - var t = p.left - p.left = p.right - p.right = t - } - /* - var url = `${baseUrl}/documents/${documentId}/2048p${page},${p.left},${p.top},${p.right},${p.bottom}.jpg` - info.url = `${baseUrl}/document/${documentId}/${page}` - info.page = page - if (p.left != p.right && p.top != p.bottom) { - var context = formatOutput(info, url) - copyToClipboard(context) - addToRecent({ - document: documentId, - page: parseInt(page), - title: info.title, - type: 'fragment', - link: `${baseUrl}/documents/${documentId}/${page}`, - src: url - }) - } - */ - } - }); - - canvas.addEventListener('mousemove', function(e) { - let bounds = root.getBoundingClientRect(); - mousex = e.clientX - bounds.left; - mousey = e.clientY - bounds.top; - - if(mousedown) { - ctx.clearRect(0, 0, canvas.clientWidth, canvas.clientHeight) - ctx.beginPath() - var width = mousex - last_mousex - var height = mousey - last_mousey - ctx.rect(last_mousex, last_mousey, width, height) - ctx.strokeStyle = 'black'; - ctx.lineWidth = 2; - ctx.stroke(); - } else { - let py = parseInt(mousey / scale) - let px = parseInt(mousex / scale) - if (py > p.top && py < p.bottom && px > p.left && px < p.right) { - inside = true - canvas.style.cursor = 'pointer' - canvas.title = 'Click to add highlight' - } else { - inside = false - canvas.style.cursor = '' - canvas.title = '' - } - } - }); -} -function renderHighlights(page) { - var pageAnnotations = annotations.filter(annotation => { - return annotation.type == HIGHLIGHT && (!page || (annotation.page == page)) - }) - pageAnnotations.forEach(annotation => { - let page = annotation.page - var canvas = document.querySelector(`#highlights${page} canvas`) - if (canvas) { - canvas.width = canvas.clientWidth - canvas.height = canvas.clientHeight - var ctx = canvas.getContext('2d'); - var viewerContainer = document.querySelector('#viewerContainer') - var bounds = canvas.parentElement.getBoundingClientRect(); - var base = 2048 - var scale = Math.max(bounds.height, bounds.width) / base - ctx.clearRect(0, 0, canvas.clientWidth, canvas.clientHeight) - pageAnnotations.forEach(annotation => { - ctx.beginPath() - ctx.strokeStyle = annotation.id == selectedAnnotation ? 'blue' : 'yellow'; - ctx.lineWidth = 2; - annotation.coords.forEach(coord => { - const width = coord[2] - coord[0], - height = coord[3] - coord[1]; - ctx.rect(coord[0] * scale, coord[1] * scale, width * scale, height * scale) - }) - ctx.stroke(); - }) - - } - }) - -} diff --git a/static/reader/pdf/toolbarButton-crop.png b/static/reader/pdf/toolbarButton-crop.png deleted file mode 100644 index 00c56d5..0000000 Binary files a/static/reader/pdf/toolbarButton-crop.png and /dev/null differ diff --git a/static/reader/pdf/toolbarButton-crop.svg b/static/reader/pdf/toolbarButton-crop.svg deleted file mode 100644 index 618195a..0000000 --- a/static/reader/pdf/toolbarButton-crop.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/static/reader/pdf/toolbarButton-crop@10.png b/static/reader/pdf/toolbarButton-crop@10.png deleted file mode 100644 index 7cc05e5..0000000 Binary files a/static/reader/pdf/toolbarButton-crop@10.png and /dev/null differ diff --git a/static/reader/pdf/toolbarButton-crop@2x.png b/static/reader/pdf/toolbarButton-crop@2x.png deleted file mode 100644 index 950d275..0000000 Binary files a/static/reader/pdf/toolbarButton-crop@2x.png and /dev/null differ