port to python3

This commit is contained in:
j 2014-09-03 00:32:44 +02:00
parent 14f426afd4
commit 8e27b9f76e
51 changed files with 272 additions and 248 deletions

View file

@ -39,9 +39,9 @@ To update to latest version:
./ctl update ./ctl update
On Linux you need a working python2 installation with PIL, pyhon-lxml and poppler-utils: On Linux you need a working python2 installation with pillow, pyhon-lxml and poppler-utils:
apt-get install python2.7 python-imaging python-lxml poppler-utils apt-get install python3.4 python3-pil python3-lxml poppler-utils
Platform Platform

16
ctl
View file

@ -26,7 +26,7 @@ export SHARED_ENV
PATH="$SHARED_ENV/bin:$PATH" PATH="$SHARED_ENV/bin:$PATH"
export PATH export PATH
PYTHONPATH="$PLATFORM_ENV/lib/python2.7/site-packages:$SHARED_ENV/lib/python2.7/site-packages:$BASE/$NAME" PYTHONPATH="$PLATFORM_ENV/lib/python3.4/site-packages:$SHARED_ENV/lib/python3.4/site-packages:$BASE/$NAME"
export PYTHONPATH export PYTHONPATH
oxCACHE="$BASE/config/ox" oxCACHE="$BASE/config/ox"
@ -45,10 +45,10 @@ if [ "$1" == "start" ]; then
exit 1 exit 1
fi fi
if [ ! -d "$BASE/$NAME/.git" ]; then if [ ! -d "$BASE/$NAME/.git" ]; then
python2 oml install_update python3 oml install_update
cd "$BASE/$NAME" cd "$BASE/$NAME"
fi fi
python2 oml server $PID python3 oml server $PID
rm -f $PID rm -f $PID
exit $? exit $?
fi fi
@ -59,7 +59,7 @@ if [ "$1" == "debug" ]; then
exit 1 exit 1
fi fi
shift shift
python2 oml server $@ python3 oml server $@
exit $? exit $?
fi fi
if [ "$1" == "stop" ]; then if [ "$1" == "stop" ]; then
@ -89,7 +89,7 @@ if [ "$1" == "open" ]; then
fi fi
if [ "$1" == "ui" ]; then if [ "$1" == "ui" ]; then
shift shift
python2 $NAME/oml/ui.py $@ python3 $NAME/oml/ui.py $@
exit $? exit $?
fi fi
if [ "$1" == "update" ]; then if [ "$1" == "update" ]; then
@ -107,17 +107,17 @@ if [ "$1" == "update" ]; then
NEW=`"$0" version` NEW=`"$0" version`
"$0" postupdate -o $OLD -n $NEW "$0" postupdate -o $OLD -n $NEW
else else
python2 oml update python3 oml update
fi fi
exit $? exit $?
fi fi
if [ "$1" == "python" ]; then if [ "$1" == "python" ]; then
cd "$BASE/$NAME" cd "$BASE/$NAME"
shift shift
python2 $@ python3 $@
exit $? exit $?
fi fi
cd "$BASE/$NAME" cd "$BASE/$NAME"
python2 oml $@ python3 oml $@
exit $? exit $?

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import sys import sys

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import subprocess import subprocess
import json import json
@ -67,7 +67,7 @@ def autocompleteFolder(data):
else: else:
folder, name = os.path.split(path) folder, name = os.path.split(path)
if os.path.exists(folder): if os.path.exists(folder):
prefix, folders, files = os.walk(folder).next() prefix, folders, files = next(os.walk(folder))
folders = [os.path.join(prefix, f) for f in folders if (not name or f.startswith(name)) and not f.startswith('.')] folders = [os.path.join(prefix, f) for f in folders if (not name or f.startswith(name)) and not f.startswith('.')]
if prefix == path: if prefix == path:
folders = [path] + folders folders = [path] + folders

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime from datetime import datetime
import json import json
@ -100,7 +100,7 @@ class Changelog(db.Model):
return True return True
else: else:
logger.debug('INVLAID SIGNATURE ON CHANGE %s', change) logger.debug('INVLAID SIGNATURE ON CHANGE %s', change)
raise Exception, 'invalid signature' raise Exception('invalid signature')
else: else:
logger.debug('revsion does not match! got %s expecting %s', revision, next_revision) logger.debug('revsion does not match! got %s expecting %s', revision, next_revision)
return False return False
@ -168,7 +168,7 @@ class Changelog(db.Model):
if i.timestamp > timestamp: if i.timestamp > timestamp:
logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta) logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta)
return True return True
keys = filter(lambda k: k in Item.id_keys, meta.keys()) keys = [k for k in list(meta.keys()) if k in Item.id_keys]
if keys: if keys:
key = keys[0] key = keys[0]
primary = [key, meta[key]] primary = [key, meta[key]]

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import subprocess import subprocess
from os.path import join, exists, dirname from os.path import join, exists, dirname
@ -22,7 +22,7 @@ def get(*cmd):
return stdout return stdout
def r(*cmd): def r(*cmd):
print ' '.join(cmd) print(' '.join(cmd))
return subprocess.call(cmd) return subprocess.call(cmd)
def version(module): def version(module):
@ -40,7 +40,7 @@ def command_version(*args):
""" """
Print current version Print current version
""" """
print version('openmedialibrary') print(version('openmedialibrary'))
def command_debug(*args): def command_debug(*args):
""" """
@ -66,7 +66,7 @@ def command_install_update(*args):
""" """
import update import update
if not update.install(): if not update.install():
print "UPDATE FAILED" print("UPDATE FAILED")
sys.exit(1) sys.exit(1)
def command_update(*args): def command_update(*args):
@ -75,7 +75,7 @@ def command_update(*args):
""" """
import update import update
if not (update.download() and update.install()): if not (update.download() and update.install()):
print "UPDATE FAILED" print("UPDATE FAILED")
def command_postupdate(*args): def command_postupdate(*args):
""" """
@ -84,7 +84,7 @@ def command_postupdate(*args):
def run(*args): def run(*args):
o, old, n, new = args o, old, n, new = args
if o != '-o' or n != '-n': if o != '-o' or n != '-n':
print 'usage: -o oldversion -n newversion' print('usage: -o oldversion -n newversion')
sys.exit(1) sys.exit(1)
if old <= '20140521-65-e14c686' and new > '20140521-65-e14c686': if old <= '20140521-65-e14c686' and new > '20140521-65-e14c686':
if not os.path.exists(settings.db_path): if not os.path.exists(settings.db_path):
@ -117,7 +117,7 @@ def command_release(*args):
""" """
Release new version Release new version
""" """
print 'checking...' print('checking...')
import os import os
import json import json
import hashlib import hashlib
@ -171,7 +171,7 @@ def command_release(*args):
sign(release) sign(release)
with open('updates/release.json', 'w') as fd: with open('updates/release.json', 'w') as fd:
json.dump(release, fd, indent=2) json.dump(release, fd, indent=2)
print 'signed latest release in updates/release.json' print('signed latest release in updates/release.json')
def command_shell(*args): def command_shell(*args):
''' '''
@ -223,5 +223,5 @@ def main():
info = actions["command_%s"%command].__doc__.split('\n') info = actions["command_%s"%command].__doc__.split('\n')
info = [' %s%s' % (' ' * indent, i.strip()) for i in info] info = [' %s%s' % (' ' * indent, i.strip()) for i in info]
info = '\n'.join(info).strip() info = '\n'.join(info).strip()
print(" %s%s%s" % (command, space, info)) print((" %s%s%s" % (command, space, info)))
sys.exit(1) sys.exit(1)

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# DHT placeholder # DHT placeholder
from __future__ import division
import logging import logging
import requests import requests
import ed25519 import ed25519
import json import json
import settings import settings
logger = logging.getLogger('oml.directory') logger = logging.getLogger('oml.directory')
@ -27,7 +28,7 @@ def get(vk):
vk = ed25519.VerifyingKey(id, encoding='base64') vk = ed25519.VerifyingKey(id, encoding='base64')
try: try:
vk.verify(sig, data, encoding='base64') vk.verify(sig, data, encoding='base64')
data = json.loads(data) data = json.loads(data.decode('utf-8'))
except ed25519.BadSignatureError: except ed25519.BadSignatureError:
logger.debug('invalid signature') logger.debug('invalid signature')
@ -36,7 +37,7 @@ def get(vk):
def put(sk, data): def put(sk, data):
id = sk.get_verifying_key().to_ascii(encoding='base64') id = sk.get_verifying_key().to_ascii(encoding='base64')
data = json.dumps(data) data = json.dumps(data).encode('utf-8')
sig = sk.sign(data, encoding='base64') sig = sk.sign(data, encoding='base64')
url ='%s/%s' % (base, id) url ='%s/%s' % (base, id)
headers = { headers = {

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from threading import Thread from threading import Thread
import time import time

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json import json
import hashlib import hashlib
@ -11,8 +11,8 @@ from oxtornado import actions
from utils import cleanup_id from utils import cleanup_id
from websocket import trigger_event from websocket import trigger_event
import metaremote as meta import metaremote as meta
import models from . import models
import query from . import query
import settings import settings
import state import state
import utils import utils
@ -183,7 +183,7 @@ def getMetadata(data):
include_edits = data.pop('includeEdits') include_edits = data.pop('includeEdits')
else: else:
include_edits = False include_edits = False
key, value = data.iteritems().next() key, value = next(iter(data.items()))
value = cleanup_id(key, value) value = cleanup_id(key, value)
response = meta.lookup(key, value) response = meta.lookup(key, value)
if include_edits: if include_edits:

View file

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from datetime import datetime from datetime import datetime
import mimetypes import mimetypes
import os import os
import zipfile import zipfile
from models import Item from .models import Item
import db import db
import settings import settings
import tornado.web import tornado.web
@ -46,7 +46,7 @@ def serve_static(handler, path, mimetype, include_body=True):
handler.set_header('Content-Type', mimetype) handler.set_header('Content-Type', mimetype)
handler.set_header('Content-Length', str(os.stat(path).st_size)) handler.set_header('Content-Length', str(os.stat(path).st_size))
if include_body: if include_body:
with open(path) as fd: with open(path, 'rb') as fd:
handler.write(fd.read()) handler.write(fd.read())
return return

View file

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from StringIO import StringIO
from io import BytesIO
from PIL import Image from PIL import Image
import sqlite3 import sqlite3
@ -32,30 +32,30 @@ class Icons(dict):
def create(self): def create(self):
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
c.execute(u'CREATE TABLE IF NOT EXISTS icon (id varchar(64) unique, data blob)') c.execute('CREATE TABLE IF NOT EXISTS icon (id varchar(64) unique, data blob)')
c.execute(u'CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)') c.execute('CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)')
if int(self.get_setting(c, 'version', 0)) < 1: if int(self.get_setting(c, 'version', 0)) < 1:
self.set_setting(c, 'version', 1) self.set_setting(c, 'version', 1)
def get_setting(self, c, key, default=None): def get_setting(self, c, key, default=None):
c.execute(u'SELECT value FROM setting WHERE key = ?', (key, )) c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
for row in c: for row in c:
return row[0] return row[0]
return default return default
def set_setting(self, c, key, value): def set_setting(self, c, key, value):
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value))) c.execute('INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
def black(self): def black(self):
img = Image.new('RGB', (80, 128)) img = Image.new('RGB', (80, 128))
o = StringIO() o = BytesIO()
img.save(o, format='jpeg') img.save(o, format='jpeg')
data = o.getvalue() data = o.getvalue()
o.close() o.close()
return data return data
def __getitem__(self, id, default=None): def __getitem__(self, id, default=None):
sql = u'SELECT data FROM icon WHERE id=?' sql = 'SELECT data FROM icon WHERE id=?'
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
c.execute(sql, (id, )) c.execute(sql, (id, ))
@ -68,7 +68,7 @@ class Icons(dict):
return data return data
def __setitem__(self, id, data): def __setitem__(self, id, data):
sql = u'INSERT OR REPLACE INTO icon values (?, ?)' sql = 'INSERT OR REPLACE INTO icon values (?, ?)'
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
data = sqlite3.Binary(data) data = sqlite3.Binary(data)
@ -78,7 +78,7 @@ class Icons(dict):
conn.close() conn.close()
def __delitem__(self, id): def __delitem__(self, id):
sql = u'DELETE FROM icon WHERE id = ?' sql = 'DELETE FROM icon WHERE id = ?'
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
c.execute(sql, (id, )) c.execute(sql, (id, ))
@ -94,7 +94,7 @@ def get_icon(id, type_, size, callback):
skey = '%s:%s:%s' % (type_, id, size) skey = '%s:%s:%s' % (type_, id, size)
data = icons[skey] data = icons[skey]
if data: if data:
callback(str(data)) callback(bytes(data))
return return
key = '%s:%s' % (type_, id) key = '%s:%s' % (type_, id)
data = icons[key] data = icons[key]
@ -114,7 +114,7 @@ def get_icon(id, type_, size, callback):
size = None size = None
if size: if size:
data = icons[skey] = resize_image(data, size=size) data = icons[skey] = resize_image(data, size=size)
data = str(data) or '' data = bytes(data) or ''
callback(data) callback(data)
@run_async @run_async
@ -144,7 +144,7 @@ def get_icon_app(id, type_, size, callback):
size = None size = None
if size: if size:
data = icons[skey] = resize_image(data, size=size) data = icons[skey] = resize_image(data, size=size)
data = str(data) or '' data = bytes(data) or ''
callback(data) callback(data)
class IconHandler(tornado.web.RequestHandler): class IconHandler(tornado.web.RequestHandler):

View file

@ -1,9 +1,9 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime from datetime import datetime
from StringIO import StringIO from io import StringIO
import base64 import base64
import hashlib import hashlib
import json import json
@ -18,8 +18,9 @@ import sqlalchemy as sa
from changelog import Changelog from changelog import Changelog
from db import MutableDict from db import MutableDict
from icons import icons import json_pickler
from person import get_sort_name from .icons import icons
from .person import get_sort_name
from settings import config from settings import config
from utils import remove_empty_folders from utils import remove_empty_folders
from websocket import trigger_event from websocket import trigger_event
@ -46,8 +47,8 @@ class Item(db.Model):
id = sa.Column(sa.String(32), primary_key=True) id = sa.Column(sa.String(32), primary_key=True)
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json))) info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json))) meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
# why is this in db and not in i.e. info? # why is this in db and not in i.e. info?
added = sa.Column(sa.DateTime()) # added to local library added = sa.Column(sa.DateTime()) # added to local library
@ -104,7 +105,7 @@ class Item(db.Model):
if t: if t:
j['transferadded'] = t.added j['transferadded'] = t.added
j['transferprogress'] = t.progress j['transferprogress'] = t.progress
j['users'] = map(str, list(self.users)) j['users'] = list(map(str, list(self.users)))
if self.info: if self.info:
j.update(self.info) j.update(self.info)
@ -115,7 +116,7 @@ class Item(db.Model):
if key not in self.meta and key in j: if key not in self.meta and key in j:
del j[key] del j[key]
if keys: if keys:
for k in j.keys(): for k in list(j.keys()):
if k not in keys: if k not in keys:
del j[k] del j[k]
return j return j
@ -140,19 +141,19 @@ class Item(db.Model):
elif sort_type == 'name': elif sort_type == 'name':
if not isinstance(value, list): if not isinstance(value, list):
value = [value] value = [value]
value = map(get_sort_name, value) value = list(map(get_sort_name, value))
value = ox.sort_string(u'\n'.join(value)) value = ox.sort_string('\n'.join(value))
elif sort_type == 'title': elif sort_type == 'title':
if isinstance(value, dict): if isinstance(value, dict):
value = value.values() value = list(value.values())
if isinstance(value, list): if isinstance(value, list):
value = u''.join(value) value = ''.join(value)
value = utils.sort_title(value).lower() value = utils.sort_title(value).lower()
else: else:
if isinstance(value, list): if isinstance(value, list):
value = u'\n'.join(value) value = '\n'.join(value)
if value: if value:
value = unicode(value) value = str(value)
value = ox.sort_string(value).lower() value = ox.sort_string(value).lower()
elif isinstance(value, list): #empty list elif isinstance(value, list): #empty list
value = '' value = ''
@ -178,7 +179,7 @@ class Item(db.Model):
if value: if value:
Find.query.filter_by(item_id=self.id, key=key['id']).delete() Find.query.filter_by(item_id=self.id, key=key['id']).delete()
if isinstance(value, dict): if isinstance(value, dict):
value = ' '.join(value.values()) value = ' '.join(list(value.values()))
if not isinstance(value, list): if not isinstance(value, list):
value = [value] value = [value]
for v in value: for v in value:
@ -194,7 +195,7 @@ class Item(db.Model):
if key not in self.info: if key not in self.info:
self.info[key] = self.meta[key] self.info[key] = self.meta[key]
del self.meta[key] del self.meta[key]
users = map(str, list(self.users)) users = list(map(str, list(self.users)))
self.info['mediastate'] = 'available' # available, unavailable, transferring self.info['mediastate'] = 'available' # available, unavailable, transferring
t = Transfer.get(self.id) t = Transfer.get(self.id)
if t and t.added and t.progress < 1: if t and t.added and t.progress < 1:
@ -230,7 +231,7 @@ class Item(db.Model):
record[key] = data[key] record[key] = data[key]
self.meta[key] = data[key] self.meta[key] = data[key]
update = True update = True
for key in self.meta.keys(): for key in list(self.meta.keys()):
if key not in self.meta_keys: if key not in self.meta_keys:
del self.meta[key] del self.meta[key]
update = True update = True
@ -446,7 +447,7 @@ class Find(db.Model):
findvalue = sa.Column(sa.Text(), index=True) findvalue = sa.Column(sa.Text(), index=True)
def __repr__(self): def __repr__(self):
return (u'%s=%s' % (self.key, self.findvalue)).encode('utf-8') return ('%s=%s' % (self.key, self.findvalue)).encode('utf-8')
@classmethod @classmethod
def get(cls, item, key): def get(cls, item, key):
@ -470,7 +471,7 @@ class File(db.Model):
sha1 = sa.Column(sa.String(32), primary_key=True) sha1 = sa.Column(sa.String(32), primary_key=True)
path = sa.Column(sa.String(2048)) path = sa.Column(sa.String(2048))
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json))) info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id')) item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
item = sa.orm.relationship('Item', backref=sa.orm.backref('files', lazy='dynamic')) item = sa.orm.relationship('Item', backref=sa.orm.backref('files', lazy='dynamic'))
@ -591,7 +592,7 @@ class Metadata(db.Model):
key = sa.Column(sa.String(256)) key = sa.Column(sa.String(256))
value = sa.Column(sa.String(256)) value = sa.Column(sa.String(256))
data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json))) data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
def __repr__(self): def __repr__(self):
return '='.join([self.key, self.value]) return '='.join([self.key, self.value])

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import unicodedata import unicodedata
@ -19,7 +19,7 @@ def get_sort_name(name, sortname=None):
person.save() person.save()
sortname = unicodedata.normalize('NFKD', person.sortname) sortname = unicodedata.normalize('NFKD', person.sortname)
else: else:
sortname = u'' sortname = ''
return sortname return sortname
class Person(db.Model): class Person(db.Model):

View file

@ -1,12 +1,12 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
#does not work in sqlite #does not work in sqlite
#from sqlalchemy.sql.expression import nullslast #from sqlalchemy.sql.expression import nullslast
from queryparser import Parser from queryparser import Parser
import models from . import models
import settings import settings
import utils import utils

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime from datetime import datetime
import os import os
@ -67,7 +67,7 @@ def run_scan():
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/') prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
if not prefix[-1] == '/': if not prefix[-1] == '/':
prefix += '/' prefix += '/'
assert isinstance(prefix, unicode) assert isinstance(prefix, str)
books = [] books = []
for root, folders, files in os.walk(prefix): for root, folders, files in os.walk(prefix):
for f in files: for f in files:
@ -121,7 +121,7 @@ def run_import(options=None):
listname = options.get('list') listname = options.get('list')
if listname: if listname:
listitems = [] listitems = []
assert isinstance(prefix, unicode) assert isinstance(prefix, str)
books = [] books = []
count = 0 count = 0
for root, folders, files in os.walk(prefix): for root, folders, files in os.walk(prefix):

11
oml/json_pickler.py Normal file
View file

@ -0,0 +1,11 @@
import json
def loads(*args, **kargs):
#print('loads', args, kargs)
if isinstance(args[0], bytes):
args = (args[0].decode('utf-8'),) + args[1:]
return json.loads(*args, **kargs)
def dumps(*args, **kargs):
#print('dumps', args, kargs)
return json.dumps(*args, **kargs).encode('utf-8')

View file

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json import json
import socket import socket
import struct import struct
import thread import _thread
from threading import Thread from threading import Thread
import time import time
@ -57,8 +57,8 @@ class LocalNodesBase(Thread):
'port': server['node_port'], 'port': server['node_port'],
'cert': server['cert'] 'cert': server['cert']
}) })
sig = sk.sign(message, encoding='base64') sig = sk.sign(message.encode('utf-8'), encoding='base64')
packet = json.dumps([sig, USER_ID, message]) packet = json.dumps([sig, USER_ID, message]).encode('utf-8')
else: else:
packet = None packet = None
return packet return packet
@ -89,7 +89,7 @@ class LocalNodesBase(Thread):
now = time.mktime(time.localtime()) now = time.mktime(time.localtime())
if now - last > 60: if now - last > 60:
last = now last = now
thread.start_new_thread(self.send, ()) _thread.start_new_thread(self.send, ())
except: except:
if self._active: if self._active:
logger.debug('receive failed. restart later', exc_info=1) logger.debug('receive failed. restart later', exc_info=1)
@ -115,7 +115,7 @@ class LocalNodesBase(Thread):
#print addr #print addr
if data['id'] != USER_ID: if data['id'] != USER_ID:
if data['id'] not in self._nodes: if data['id'] not in self._nodes:
thread.start_new_thread(self.new_node, (data, )) _thread.start_new_thread(self.new_node, (data, ))
elif can_connect(data): elif can_connect(data):
self._nodes[data['id']] = data self._nodes[data['id']] = data
@ -166,7 +166,7 @@ class LocalNodes4(LocalNodesBase):
s = socket.socket (socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket (socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt (socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self._TTL) s.setsockopt (socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self._TTL)
try: try:
s.sendto(packet + '\0', sockaddr) s.sendto(packet + b'\0', sockaddr)
except: except:
logger.debug('LocalNodes4.send failed', exc_info=1) logger.debug('LocalNodes4.send failed', exc_info=1)
s.close() s.close()
@ -198,7 +198,7 @@ class LocalNodes6(LocalNodesBase):
s = socket.socket(family, socktype, proto) s = socket.socket(family, socktype, proto)
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl) s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl)
try: try:
s.sendto(packet + '\0', sockaddr) s.sendto(packet + b'\0', sockaddr)
except: except:
logger.debug('LocalNodes6.send failed', exc_info=1) logger.debug('LocalNodes6.send failed', exc_info=1)
s.close() s.close()
@ -206,7 +206,7 @@ class LocalNodes6(LocalNodesBase):
def get_socket(self): def get_socket(self):
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
group_bin = socket.inet_pton(socket.AF_INET6, self._BROADCAST) + '\0'*4 group_bin = socket.inet_pton(socket.AF_INET6, self._BROADCAST) + b'\0'*4
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, group_bin) s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, group_bin)
self._socket = s self._socket = s
return s return s
@ -229,7 +229,7 @@ class LocalNodes(object):
def cleanup(self): def cleanup(self):
if self._active: if self._active:
for id in self._nodes.keys(): for id in list(self._nodes.keys()):
if not can_connect(self._nodes[id]): if not can_connect(self._nodes[id]):
del self._nodes[id] del self._nodes[id]
if not self._active: if not self._active:

View file

@ -1,23 +1,24 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import base64 import base64
import hashlib import hashlib
import os import os
import codecs
import ox import ox
import pdf from . import pdf
import epub from . import epub
import txt from . import txt
import opf from . import opf
def get_id(f=None, data=None): def get_id(f=None, data=None):
if data: if data:
return base64.b32encode(hashlib.sha1(data).digest()) return base64.b32encode(hashlib.sha1(data).digest()).decode()
else: else:
return base64.b32encode(ox.sha1sum(f, cached=True).decode('hex')) return base64.b32encode(codecs.decode(ox.sha1sum(f, cached=True), 'hex')).decode()
def metadata(f, from_=None): def metadata(f, from_=None):
@ -55,16 +56,16 @@ def metadata(f, from_=None):
if key in opf_info: if key in opf_info:
data[key] = opf_info[key] data[key] = opf_info[key]
if key in data: if key in data:
if isinstance(data[key], basestring): if isinstance(data[key], str):
data[key] = data[key].replace('\x00', '') data[key] = data[key].replace('\x00', '')
elif isinstance(data[key], list): elif isinstance(data[key], list):
data[key] = [e.replace('\x00', '') if isinstance(e, basestring) else e for e in data[key]] data[key] = [e.replace('\x00', '') if isinstance(e, str) else e for e in data[key]]
if 'isbn' in data: if 'isbn' in data:
data['primaryid'] = ['isbn', data['isbn'][0]] data['primaryid'] = ['isbn', data['isbn'][0]]
elif 'asin' in data: elif 'asin' in data:
data['primaryid'] = ['asin', data['asin'][0]] data['primaryid'] = ['asin', data['asin'][0]]
if 'author' in data: if 'author' in data:
if isinstance(data['author'], basestring): if isinstance(data['author'], str):
if data['author'].strip(): if data['author'].strip():
data['author'] = data['author'].strip().split('; ') data['author'] = data['author'].strip().split('; ')
else: else:

View file

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os import os
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import zipfile import zipfile
from StringIO import StringIO from io import StringIO
import re import re
from PIL import Image from PIL import Image

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import sys import sys
import tempfile import tempfile
@ -9,7 +9,7 @@ import os
import shutil import shutil
from glob import glob from glob import glob
from pyPdf import PdfFileReader #from pyPdf import PdfFileReader
import stdnum.isbn import stdnum.isbn
import settings import settings
@ -139,9 +139,9 @@ def info(pdf):
if stdnum.isbn.is_valid(value): if stdnum.isbn.is_valid(value):
data['isbn'] = [value] data['isbn'] = [value]
del data['identifier'] del data['identifier']
for key, value in data.iteritems(): for key, value in data.items():
if isinstance(value, dict): if isinstance(value, dict):
value = ' '.join(value.values()) value = ' '.join(list(value.values()))
data[key] = value data[key] = value
text = extract_text(pdf) text = extract_text(pdf)
data['textsize'] = len(text) data['textsize'] = len(text)
@ -150,7 +150,7 @@ def info(pdf):
isbn = extract_isbn(text) isbn = extract_isbn(text)
if isbn: if isbn:
data['isbn'] = [isbn] data['isbn'] = [isbn]
if 'isbn' in data and isinstance(data['isbn'], basestring): if 'isbn' in data and isinstance(data['isbn'], str):
data['isbn'] = [data['isbn']] data['isbn'] = [data['isbn']]
if 'date' in data and len(data['date']) == 8 and data['date'].isdigit(): if 'date' in data and len(data['date']) == 8 and data['date'].isdigit():
d = data['date'] d = data['date']

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os import os
from utils import find_isbns from utils import find_isbns

View file

@ -1,17 +1,17 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import stdnum.isbn import stdnum.isbn
import ox import ox
import abebooks from . import abebooks
import loc from . import loc
import lookupbyisbn from . import lookupbyisbn
import openlibrary from . import openlibrary
import worldcat from . import worldcat
import google from . import google
import duckduckgo from . import duckduckgo
import logging import logging
logger = logging.getLogger('meta') logger = logging.getLogger('meta')
@ -51,22 +51,22 @@ def lookup(key, value):
ids.append(kv) ids.append(kv)
done = False done = False
logger.debug('FIXME: sort ids') logger.debug('FIXME: sort ids')
ids.sort(key=lambda i: ox.sort_string(u''.join(i))) ids.sort(key=lambda i: ox.sort_string(''.join(i)))
logger.debug('IDS %s', ids) logger.debug('IDS %s', ids)
for k, v in ids: for k, v in ids:
for provider, id in providers: for provider, id in providers:
if id == k: if id == k:
if provider not in provider_data: if provider not in provider_data:
provider_data[provider] = {} provider_data[provider] = {}
for k_, v_ in globals()[provider].lookup(v).iteritems(): for k_, v_ in globals()[provider].lookup(v).items():
if k_ not in provider_data[provider]: if k_ not in provider_data[provider]:
provider_data[provider][k_] = v_ provider_data[provider][k_] = v_
for provider in sorted( for provider in sorted(
provider_data.keys(), list(provider_data.keys()),
key=lambda x: -len(provider_data[x]) key=lambda x: -len(provider_data[x])
): ):
logger.debug('%s %s %s', provider, len(provider_data[provider]), provider_data[provider].keys()) logger.debug('%s %s %s', provider, len(provider_data[provider]), list(provider_data[provider].keys()))
for k_, v_ in provider_data[provider].iteritems(): for k_, v_ in provider_data[provider].items():
if not k_ in data: if not k_ in data:
data[k_] = v_ data[k_] = v_
for k, v in ids: for k, v in ids:

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re import re

View file

@ -2,10 +2,10 @@
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
def get_classification(id): def get_classification(id):
name = u'%s' % id name = '%s' % id
base = ''.join([s for s in id.split('/')[0].split('.')[0] if s.isdigit()]) base = ''.join([s for s in id.split('/')[0].split('.')[0] if s.isdigit()])
if base in DEWEY: if base in DEWEY:
name = u'%s %s' % (name, DEWEY[base].decode('utf-8')) name = '%s %s' % (name, DEWEY[base].decode('utf-8'))
return name return name
DEWEY = { DEWEY = {
@ -941,9 +941,9 @@ if __name__ == '__main__':
dewey = {} dewey = {}
for i in range(0, 1000): for i in range(0, 1000):
url = 'http://dewey.info/class/%s/about.en.json' % i url = 'http://dewey.info/class/%s/about.en.json' % i
print url print(url)
data = json.loads(read_url(url)) data = json.loads(read_url(url))
for d in data.values(): for d in list(data.values()):
if 'http://www.w3.org/2004/02/skos/core#prefLabel' in d: if 'http://www.w3.org/2004/02/skos/core#prefLabel' in d:
value = d['http://www.w3.org/2004/02/skos/core#prefLabel'][0]['value'] value = d['http://www.w3.org/2004/02/skos/core#prefLabel'][0]['value']
dewey[str(i)] = value dewey[str(i)] = value

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import ox.web.duckduckgo import ox.web.duckduckgo
import stdnum.isbn import stdnum.isbn

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import ox.web.google import ox.web.google
import stdnum.isbn import stdnum.isbn

View file

@ -1,15 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from ox.cache import read_url from ox.cache import read_url
import ox import ox
import re import re
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from dewey import get_classification from .dewey import get_classification
from marc_countries import COUNTRIES from .marc_countries import COUNTRIES
from utils import normalize_isbn from .utils import normalize_isbn
import logging import logging
logger = logging.getLogger('meta.loc') logger = logging.getLogger('meta.loc')
@ -86,7 +86,7 @@ def lookup(id):
toc = mods.findall(ns + 'tableOfContents') toc = mods.findall(ns + 'tableOfContents')
if toc: if toc:
info['description'] = toc[0].text.strip() info['description'] = toc[0].text.strip()
for key in info.keys(): for key in list(info.keys()):
if not info[key]: if not info[key]:
del info[key] del info[key]
return info return info

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re import re
@ -8,7 +8,7 @@ from ox.cache import read_url
from ox import find_re, strip_tags, decode_html from ox import find_re, strip_tags, decode_html
import stdnum.isbn import stdnum.isbn
from utils import find_isbns from .utils import find_isbns
import logging import logging
logger = logging.getLogger('meta.lookupbyisbn') logger = logging.getLogger('meta.lookupbyisbn')
@ -78,13 +78,13 @@ def lookup(id):
r['description'] = decode_html(strip_tags(desc)) r['description'] = decode_html(strip_tags(desc))
r['cover'] = find_re(data, '<img src="(.*?)" alt="Book cover').replace('._SL160_', '') r['cover'] = find_re(data, '<img src="(.*?)" alt="Book cover').replace('._SL160_', '')
for key in r: for key in r:
if isinstance(r[key], basestring): if isinstance(r[key], str):
r[key] = decode_html(strip_tags(r[key])).strip() r[key] = decode_html(strip_tags(r[key])).strip()
if 'author' in r and isinstance(r['author'], basestring) and r['author']: if 'author' in r and isinstance(r['author'], str) and r['author']:
r['author'] = [r['author']] r['author'] = [r['author']]
else: else:
r['author'] = [] r['author'] = []
if r['description'].lower() == u'Description of this item is not available at this time.'.lower(): if r['description'].lower() == 'Description of this item is not available at this time.'.lower():
r['description'] = '' r['description'] = ''
return r return r

View file

@ -1,16 +1,16 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime from datetime import datetime
from urllib import urlencode from urllib.parse import urlencode
import json import json
from ox.cache import read_url from ox.cache import read_url
from dewey import get_classification from .dewey import get_classification
from marc_countries import COUNTRIES from .marc_countries import COUNTRIES
from utils import normalize_isbn from .utils import normalize_isbn
import logging import logging
logger = logging.getLogger('meta.openlibrary') logger = logging.getLogger('meta.openlibrary')
@ -41,7 +41,7 @@ def find(query):
results = [] results = []
ids = [b for b in r.get('result', []) if b.startswith('/books')] ids = [b for b in r.get('result', []) if b.startswith('/books')]
books = api.get_many(ids).get('result', []) books = api.get_many(ids).get('result', [])
for olid, value in books.iteritems(): for olid, value in books.items():
olid = olid.split('/')[-1] olid = olid.split('/')[-1]
book = format(value) book = format(value)
book['olid'] = [olid] book['olid'] = [olid]
@ -84,7 +84,7 @@ def lookup(id, return_all=False):
data['olid'] = [] data['olid'] = []
if id not in data['olid']: if id not in data['olid']:
data['olid'] = [id] data['olid'] = [id]
logger.debug('lookup %s => %s', id, data.keys()) logger.debug('lookup %s => %s', id, list(data.keys()))
return data return data
def get_type(obj): def get_type(obj):
@ -129,7 +129,7 @@ def format(info, return_all=False):
elif key in ('isbn_10', 'isbn_13'): elif key in ('isbn_10', 'isbn_13'):
if not isinstance(value, list): if not isinstance(value, list):
value = [value] value = [value]
value = map(normalize_isbn, value) value = list(map(normalize_isbn, value))
if KEYS[key] in data: if KEYS[key] in data:
value = data[KEYS[key]] + value value = data[KEYS[key]] + value
elif isinstance(value, list) and key not in ('publish_places', 'lccn', 'oclc_numbers'): elif isinstance(value, list) and key not in ('publish_places', 'lccn', 'oclc_numbers'):
@ -149,7 +149,7 @@ def format(info, return_all=False):
def resolve_names(objects, key='name'): def resolve_names(objects, key='name'):
r = [] r = []
data = api.get_many([k['key'] for k in objects]).get('result', {}) data = api.get_many([k['key'] for k in objects]).get('result', {})
for k, value in data.iteritems(): for k, value in data.items():
if 'location' in value and value.get('type', {}).get('key') == '/type/redirect': if 'location' in value and value.get('type', {}).get('key') == '/type/redirect':
value = api.get(value['location']).get('result', {}) value = api.get(value['location']).get('result', {})
r.append(value[key]) r.append(value[key])
@ -160,7 +160,7 @@ class API(object):
def _request(self, action, data, timeout=None): def _request(self, action, data, timeout=None):
for key in data: for key in data:
if not isinstance(data[key], basestring): if not isinstance(data[key], str):
data[key] = json.dumps(data[key]) data[key] = json.dumps(data[key])
url = self.base + '/' + action + '?' + urlencode(data) url = self.base + '/' + action + '?' + urlencode(data)
if timeout is None: if timeout is None:
@ -181,7 +181,7 @@ class API(object):
return data return data
def search(self, query): def search(self, query):
if isinstance(query, basestring): if isinstance(query, str):
query = { query = {
'query': query 'query': query
} }

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re import re
import stdnum.isbn import stdnum.isbn
@ -10,6 +10,8 @@ def normalize_isbn(value):
return ''.join([s for s in value if s.isdigit() or s == 'X']) return ''.join([s for s in value if s.isdigit() or s == 'X'])
def find_isbns(text): def find_isbns(text):
if isinstance(text, bytes):
text = text.decode()
matches = re.compile('\d[\d\-X\ ]+').findall(text) matches = re.compile('\d[\d\-X\ ]+').findall(text)
matches = [normalize_isbn(value) for value in matches] matches = [normalize_isbn(value) for value in matches]
return [isbn for isbn in matches if stdnum.isbn.is_valid(isbn) return [isbn for isbn in matches if stdnum.isbn.is_valid(isbn)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re import re
import hashlib import hashlib
@ -99,7 +99,7 @@ def lookup(id):
if m: if m:
data['date'] = m[0] data['date'] = m[0]
logger.debug('lookup %s => %s', id, data.keys()) logger.debug('lookup %s => %s', id, list(data.keys()))
return data return data
info = lookup info = lookup

View file

@ -1,9 +1,9 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json import json
from urllib import urlencode from urllib.parse import urlencode
from ox.cache import read_url from ox.cache import read_url

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import os import os
@ -8,11 +8,11 @@ from tornado.web import Application
from tornado.httpserver import HTTPServer from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop from tornado.ioloop import IOLoop
import oxtornado from . import oxtornado
from oxtornado import actions from .oxtornado import actions
import meta from . import meta
import utils from . import utils
import logging import logging
logger = logging.getLogger('metaoml') logger = logging.getLogger('metaoml')
@ -49,7 +49,7 @@ def getMetadata(data):
include_edits = data.pop('includeEdits') include_edits = data.pop('includeEdits')
else: else:
include_edits = False include_edits = False
key, value = data.iteritems().next() key, value = next(iter(data.items()))
if key == 'isbn': if key == 'isbn':
value = utils.normalize_isbn(value) value = utils.normalize_isbn(value)
response = meta.lookup(key, value) response = meta.lookup(key, value)

View file

@ -19,9 +19,9 @@ def generate_ssl():
key = OpenSSL.crypto.PKey() key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
with open(settings.ssl_key_path, 'wb') as fd: with open(settings.ssl_key_path, 'wb') as fd:
os.chmod(settings.ssl_key_path, 0600) os.chmod(settings.ssl_key_path, 0o600)
fd.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)) fd.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
os.chmod(settings.ssl_key_path, 0400) os.chmod(settings.ssl_key_path, 0o400)
ca = OpenSSL.crypto.X509() ca = OpenSSL.crypto.X509()
ca.set_version(2) ca.set_version(2)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from changelog import Changelog from changelog import Changelog
from user.models import User from user.models import User

View file

@ -11,11 +11,11 @@ from tornado.ioloop import PeriodicCallback
from oxtornado import run_async from oxtornado import run_async
from utils import valid, get_public_ipv6 from utils import valid, get_public_ipv6
from websocket import trigger_event from websocket import trigger_event
import cert from . import cert
import db import db
import directory import directory
import json import json
import nodeapi from . import nodeapi
import settings import settings
import state import state
import user import user

View file

@ -1,14 +1,14 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from Queue import Queue
from queue import Queue
from threading import Thread from threading import Thread
import json import json
import socket import socket
from StringIO import StringIO from io import StringIO
import gzip import gzip
import urllib2 import urllib.request, urllib.error, urllib.parse
from datetime import datetime from datetime import datetime
import os import os
import time import time
@ -135,10 +135,10 @@ class Node(Thread):
'X-Ed25519-Key': settings.USER_ID, 'X-Ed25519-Key': settings.USER_ID,
'X-Ed25519-Signature': sig, 'X-Ed25519-Signature': sig,
} }
self._opener.addheaders = zip(headers.keys(), headers.values()) self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
try: try:
r = self._opener.open(url, data=content, timeout=self.TIMEOUT) r = self._opener.open(url, data=content, timeout=self.TIMEOUT)
except urllib2.HTTPError as e: except urllib.error.HTTPError as e:
if e.code == 403: if e.code == 403:
logger.debug('REMOTE ENDED PEERING') logger.debug('REMOTE ENDED PEERING')
with db.session(): with db.session():
@ -150,7 +150,7 @@ class Node(Thread):
logger.debug('urllib2.HTTPError %s %s', e, e.code) logger.debug('urllib2.HTTPError %s %s', e, e.code)
self.online = False self.online = False
return None return None
except urllib2.URLError as e: except urllib.error.URLError as e:
logger.debug('urllib2.URLError %s', e) logger.debug('urllib2.URLError %s', e)
self.online = False self.online = False
return None return None
@ -201,7 +201,7 @@ class Node(Thread):
'X-Node-Protocol': settings.NODE_PROTOCOL, 'X-Node-Protocol': settings.NODE_PROTOCOL,
'Accept-Encoding': 'gzip', 'Accept-Encoding': 'gzip',
} }
self._opener.addheaders = zip(headers.keys(), headers.values()) self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url, timeout=1) r = self._opener.open(url, timeout=1)
version = r.headers.get('X-Node-Protocol', None) version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL: if version != settings.NODE_PROTOCOL:
@ -297,7 +297,7 @@ class Node(Thread):
} }
t1 = datetime.utcnow() t1 = datetime.utcnow()
logger.debug('download %s', url) logger.debug('download %s', url)
self._opener.addheaders = zip(headers.keys(), headers.values()) self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url, timeout=self.TIMEOUT*2) r = self._opener.open(url, timeout=self.TIMEOUT*2)
if r.getcode() == 200: if r.getcode() == 200:
if r.headers.get('content-encoding', None) == 'gzip': if r.headers.get('content-encoding', None) == 'gzip':
@ -338,7 +338,7 @@ class Node(Thread):
headers = { headers = {
'User-Agent': settings.USER_AGENT, 'User-Agent': settings.USER_AGENT,
} }
self._opener.addheaders = zip(headers.keys(), headers.values()) self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url) r = self._opener.open(url)
if r.getcode() == 200: if r.getcode() == 200:
with open(path, 'w') as fd: with open(path, 'w') as fd:
@ -379,11 +379,11 @@ class Nodes(Thread):
def _call(self, target, action, *args): def _call(self, target, action, *args):
if target == 'all': if target == 'all':
nodes = self._nodes.values() nodes = list(self._nodes.values())
elif target == 'peered': elif target == 'peered':
nodes = [n for n in self._nodes.values() if n.user.peered] nodes = [n for n in list(self._nodes.values()) if n.user.peered]
elif target == 'online': elif target == 'online':
nodes = [n for n in self._nodes.values() if n.online] nodes = [n for n in list(self._nodes.values()) if n.online]
else: else:
nodes = [self._nodes[target]] nodes = [self._nodes[target]]
for node in nodes: for node in nodes:
@ -412,7 +412,7 @@ class Nodes(Thread):
def join(self): def join(self):
self._running = False self._running = False
self._q.put(None) self._q.put(None)
for node in self._nodes.values(): for node in list(self._nodes.values()):
node.join() node.join()
self._local.join() self._local.join()
return Thread.join(self) return Thread.join(self)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from contextlib import contextmanager from contextlib import contextmanager
import inspect import inspect
@ -30,7 +30,7 @@ def _to_json(python_object):
tt = python_object.timetuple() tt = python_object.timetuple()
return '%d-%02d-%02dT%02d:%02d%02dZ' % tuple(list(tt)[:6]) return '%d-%02d-%02dT%02d:%02d%02dZ' % tuple(list(tt)[:6])
return python_object.strftime('%Y-%m-%dT%H:%M:%SZ') return python_object.strftime('%Y-%m-%dT%H:%M:%SZ')
raise TypeError(u'%s %s is not JSON serializable' % (repr(python_object), type(python_object))) raise TypeError('%s %s is not JSON serializable' % (repr(python_object), type(python_object)))
def json_dumps(obj): def json_dumps(obj):
indent = 2 indent = 2
@ -52,14 +52,14 @@ def trim(docstring):
# and split into a list of lines: # and split into a list of lines:
lines = docstring.expandtabs().splitlines() lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count): # Determine minimum indentation (first line doesn't count):
indent = sys.maxint indent = sys.maxsize
for line in lines[1:]: for line in lines[1:]:
stripped = line.lstrip() stripped = line.lstrip()
if stripped: if stripped:
indent = min(indent, len(line) - len(stripped)) indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special): # Remove indentation (first line is special):
trimmed = [lines[0].strip()] trimmed = [lines[0].strip()]
if indent < sys.maxint: if indent < sys.maxsize:
for line in lines[1:]: for line in lines[1:]:
trimmed.append(line[indent:].rstrip()) trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines: # Strip off trailing and leading blank lines:
@ -78,11 +78,11 @@ def defaultcontext():
def api_task(context, request, callback): def api_task(context, request, callback):
if context == None: if context == None:
context = defaultcontext context = defaultcontext
action = request.arguments.get('action', [None])[0] action = request.arguments.get('action', [None])[0].decode('utf-8')
data = request.arguments.get('data', ['{}'])[0] data = request.arguments.get('data', [b'{}'])[0]
data = json.loads(data) if data else {} data = json.loads(data.decode('utf-8')) if data else {}
if not action: if not action:
methods = actions.keys() methods = list(actions.keys())
api = [] api = []
for f in sorted(methods): for f in sorted(methods):
api.append({'name': f, api.append({'name': f,
@ -154,7 +154,7 @@ class ApiActions(dict):
data = data or {} data = data or {}
docs = data.get('docs', False) docs = data.get('docs', False)
code = data.get('code', False) code = data.get('code', False)
_actions = self.keys() _actions = list(self.keys())
_actions.sort() _actions.sort()
actions = {} actions = {}
for a in _actions: for a in _actions:
@ -172,16 +172,16 @@ class ApiActions(dict):
def code(self, name, version=None): def code(self, name, version=None):
f = self[name] f = self[name]
if name != 'api' and hasattr(f, 'func_closure') and f.func_closure: if name != 'api' and hasattr(f, 'func_closure') and f.__closure__:
fc = filter(lambda c: hasattr(c.cell_contents, '__call__'), f.func_closure) fc = [c for c in f.__closure__ if hasattr(c.cell_contents, '__call__')]
f = fc[len(fc)-1].cell_contents f = fc[len(fc)-1].cell_contents
info = f.func_code.co_filename info = f.__code__.co_filename
info = u'%s:%s' % (info, f.func_code.co_firstlineno) info = '%s:%s' % (info, f.__code__.co_firstlineno)
return info, trim(inspect.getsource(f)) return info, trim(inspect.getsource(f))
def register(self, method, action=None, cache=True, version=None): def register(self, method, action=None, cache=True, version=None):
if not action: if not action:
action = method.func_name action = method.__name__
if version: if version:
if not version in self.versions: if not version in self.versions:
self.versions[version] = {} self.versions[version] = {}

View file

@ -101,7 +101,7 @@ class Parser(object):
q = ~q q = ~q
return q return q
elif key_type in ("string", "text"): elif key_type in ("string", "text"):
if isinstance(v, unicode): if isinstance(v, str):
v = unicodedata.normalize('NFKD', v).lower() v = unicodedata.normalize('NFKD', v).lower()
else: else:
v = v.lower() v = v.lower()

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import os import os
import sys import sys

View file

@ -52,15 +52,15 @@ for key in server_defaults:
release = pdict(os.path.join(config_path, 'release.json')) release = pdict(os.path.join(config_path, 'release.json'))
if os.path.exists(key_path): if os.path.exists(key_path):
with open(key_path) as fd: with open(key_path, 'rb') as fd:
sk = ed25519.SigningKey(fd.read()) sk = ed25519.SigningKey(fd.read())
vk = sk.get_verifying_key() vk = sk.get_verifying_key()
else: else:
sk, vk = ed25519.create_keypair() sk, vk = ed25519.create_keypair()
with open(key_path, 'w') as fd: with open(key_path, 'wb') as fd:
os.chmod(key_path, 0600) os.chmod(key_path, 0o600)
fd.write(sk.to_bytes()) fd.write(sk.to_bytes())
os.chmod(key_path, 0400) os.chmod(key_path, 0o400)
USER_ID = vk.to_ascii(encoding='base64') USER_ID = vk.to_ascii(encoding='base64')
OML_UPDATE_KEY='K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU' OML_UPDATE_KEY='K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU'

View file

@ -1,14 +1,14 @@
import httplib import http.client
import socket import socket
import urllib2 import urllib.request, urllib.error, urllib.parse
import ssl import ssl
import hashlib import hashlib
import logging import logging
logger = logging.getLogger('oml.ssl_request') logger = logging.getLogger('oml.ssl_request')
class InvalidCertificateException(httplib.HTTPException, urllib2.URLError): class InvalidCertificateException(http.client.HTTPException, urllib.error.URLError):
def __init__(self, fingerprint, cert, reason): def __init__(self, fingerprint, cert, reason):
httplib.HTTPException.__init__(self) http.client.HTTPException.__init__(self)
self.fingerprint = fingerprint self.fingerprint = fingerprint
self.cert_fingerprint = hashlib.sha1(cert).hexdigest() self.cert_fingerprint = hashlib.sha1(cert).hexdigest()
self.reason = reason self.reason = reason
@ -17,11 +17,11 @@ class InvalidCertificateException(httplib.HTTPException, urllib2.URLError):
return ('%s (local) != %s (remote) (%s)\n' % return ('%s (local) != %s (remote) (%s)\n' %
(self.fingerprint, self.cert_fingerprint, self.reason)) (self.fingerprint, self.cert_fingerprint, self.reason))
class CertValidatingHTTPSConnection(httplib.HTTPConnection): class CertValidatingHTTPSConnection(http.client.HTTPConnection):
default_port = httplib.HTTPS_PORT default_port = http.client.HTTPS_PORT
def __init__(self, host, port=None, fingerprint=None, strict=None, **kwargs): def __init__(self, host, port=None, fingerprint=None, strict=None, **kwargs):
httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs) http.client.HTTPConnection.__init__(self, host, port, strict, **kwargs)
self.fingerprint = fingerprint self.fingerprint = fingerprint
if self.fingerprint: if self.fingerprint:
self.cert_reqs = ssl.CERT_REQUIRED self.cert_reqs = ssl.CERT_REQUIRED
@ -44,9 +44,9 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection):
'fingerprint mismatch') 'fingerprint mismatch')
#logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version) #logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version)
class VerifiedHTTPSHandler(urllib2.HTTPSHandler): class VerifiedHTTPSHandler(urllib.request.HTTPSHandler):
def __init__(self, **kwargs): def __init__(self, **kwargs):
urllib2.AbstractHTTPHandler.__init__(self) urllib.request.AbstractHTTPHandler.__init__(self)
self._connection_args = kwargs self._connection_args = kwargs
def https_open(self, req): def https_open(self, req):
@ -57,15 +57,15 @@ class VerifiedHTTPSHandler(urllib2.HTTPSHandler):
try: try:
return self.do_open(http_class_wrapper, req) return self.do_open(http_class_wrapper, req)
except urllib2.URLError, e: except urllib.error.URLError as e:
if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1: if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1:
raise InvalidCertificateException(self.fingerprint, '', raise InvalidCertificateException(self.fingerprint, '',
e.reason.args[1]) e.reason.args[1])
raise raise
https_request = urllib2.HTTPSHandler.do_request_ https_request = urllib.request.HTTPSHandler.do_request_
def get_opener(fingerprint): def get_opener(fingerprint):
handler = VerifiedHTTPSHandler(fingerprint=fingerprint) handler = VerifiedHTTPSHandler(fingerprint=fingerprint)
opener = urllib2.build_opener(handler) opener = urllib.request.build_opener(handler)
return opener return opener

View file

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from Queue import Queue
from queue import Queue
from threading import Thread from threading import Thread
from websocket import trigger_event from websocket import trigger_event

View file

@ -6,8 +6,8 @@ try:
GObject.threads_init() GObject.threads_init()
use_Gtk = True use_Gtk = True
except: except:
from Tkinter import Tk from tkinter import Tk
import tkFileDialog import tkinter.filedialog
use_Gtk = False use_Gtk = False
class GtkUI: class GtkUI:
@ -23,16 +23,16 @@ class GtkUI:
if response == Gtk.ResponseType.OK: if response == Gtk.ResponseType.OK:
filename = dialog.get_filename() filename = dialog.get_filename()
if DEBUG: if DEBUG:
print filename, 'selected' print(filename, 'selected')
elif response == Gtk.ResponseType.CANCEL: elif response == Gtk.ResponseType.CANCEL:
if DEBUG: if DEBUG:
print 'Closed, no files selected' print('Closed, no files selected')
filename = None filename = None
dialog.destroy() dialog.destroy()
while Gtk.events_pending(): while Gtk.events_pending():
Gtk.main_iteration() Gtk.main_iteration()
if DEBUG: if DEBUG:
print "done" print("done")
return filename return filename
def selectFile(self, data): def selectFile(self, data):
@ -47,16 +47,16 @@ class GtkUI:
if response == Gtk.ResponseType.OK: if response == Gtk.ResponseType.OK:
filename = dialog.get_filename() filename = dialog.get_filename()
if DEBUG: if DEBUG:
print filename, 'selected' print(filename, 'selected')
elif response == Gtk.ResponseType.CANCEL: elif response == Gtk.ResponseType.CANCEL:
if DEBUG: if DEBUG:
print 'Closed, no files selected' print('Closed, no files selected')
filename = None filename = None
dialog.destroy() dialog.destroy()
while Gtk.events_pending(): while Gtk.events_pending():
Gtk.main_iteration() Gtk.main_iteration()
if DEBUG: if DEBUG:
print "done" print("done")
return filename return filename
class TkUI: class TkUI:
@ -64,10 +64,10 @@ class TkUI:
self.root = Tk() self.root = Tk()
self.root.withdraw() #hiding tkinter window self.root.withdraw() #hiding tkinter window
def selectFolder(self, data): def selectFolder(self, data):
return tkFileDialog.askdirectory(title=data.get("title", "Select Folder")) return tkinter.filedialog.askdirectory(title=data.get("title", "Select Folder"))
def selectFile(self, data): def selectFile(self, data):
return tkFileDialog.askopenfilename(title=data.get("title", "Select File")) return tkinter.filedialog.askopenfilename(title=data.get("title", "Select File"))
if use_Gtk: if use_Gtk:
ui = GtkUI() ui = GtkUI()
@ -77,6 +77,6 @@ else:
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys
if len(sys.argv) == 2 and sys.argv[1] == 'folder': if len(sys.argv) == 2 and sys.argv[1] == 'folder':
print ui.selectFolder({}) print(ui.selectFolder({}))
else: else:
print ui.selectFile({}) print(ui.selectFile({}))

View file

@ -1,12 +1,12 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from contextlib import closing from contextlib import closing
import json import json
import os import os
import tarfile import tarfile
import urllib2 import urllib.request, urllib.error, urllib.parse
import shutil import shutil
import subprocess import subprocess
@ -34,10 +34,10 @@ def verify(release):
return True return True
def get(url, filename=None): def get(url, filename=None):
request = urllib2.Request(url, headers={ request = urllib.request.Request(url, headers={
'User-Agent': settings.USER_AGENT 'User-Agent': settings.USER_AGENT
}) })
with closing(urllib2.urlopen(request)) as u: with closing(urllib.request.urlopen(request)) as u:
if not filename: if not filename:
data = u.read() data = u.read()
return data return data
@ -76,7 +76,7 @@ def download():
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name']) module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
url = RELEASE_URL.replace('release.json', release['modules'][module]['name']) url = RELEASE_URL.replace('release.json', release['modules'][module]['name'])
if not os.path.exists(module_tar): if not os.path.exists(module_tar):
print 'download', os.path.basename(module_tar) print('download', os.path.basename(module_tar))
get(url, module_tar) get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']: if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
os.unlink(module_tar) os.unlink(module_tar)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from copy import deepcopy from copy import deepcopy
import json import json
@ -11,7 +11,7 @@ import ox
from changelog import Changelog from changelog import Changelog
from oxtornado import actions from oxtornado import actions
from utils import update_dict from utils import update_dict
import models from . import models
import settings import settings
import state import state

View file

@ -9,6 +9,7 @@ from changelog import Changelog
from db import MutableDict from db import MutableDict
from queryparser import Parser from queryparser import Parser
import db import db
import json_pickler
import settings import settings
import state import state
@ -22,7 +23,7 @@ class User(db.Model):
modified = sa.Column(sa.DateTime()) modified = sa.Column(sa.DateTime())
id = sa.Column(sa.String(43), primary_key=True) id = sa.Column(sa.String(43), primary_key=True)
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json))) info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
nickname = sa.Column(sa.String(256), unique=True) nickname = sa.Column(sa.String(256), unique=True)
@ -140,7 +141,7 @@ class List(db.Model):
index_ = sa.Column(sa.Integer()) index_ = sa.Column(sa.Integer())
type = sa.Column(sa.String(64)) type = sa.Column(sa.String(64))
_query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json))) _query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id')) user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id'))
user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic')) user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic'))
@ -250,7 +251,7 @@ class List(db.Model):
id = '' id = ''
if self.user_id != settings.USER_ID: if self.user_id != settings.USER_ID:
id += self.user.nickname id += self.user.nickname
id = u'%s:%s' % (id, self.name) id = '%s:%s' % (id, self.name)
return id return id
@property @property
@ -258,7 +259,7 @@ class List(db.Model):
id = '' id = ''
if self.user_id != settings.USER_ID: if self.user_id != settings.USER_ID:
id += self.user_id id += self.user_id
id = u'%s:%s' % (id, self.id) id = '%s:%s' % (id, self.id)
return id return id
def __repr__(self): def __repr__(self):

View file

@ -1,15 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os import os
import sys import sys
from PIL import Image from PIL import Image
from StringIO import StringIO from io import StringIO, BytesIO
import re import re
import stdnum.isbn import stdnum.isbn
import socket import socket
import cStringIO import io
import gzip import gzip
import time import time
from datetime import datetime from datetime import datetime
@ -49,14 +49,18 @@ def get_positions(ids, pos):
return positions return positions
def get_by_key(objects, key, value): def get_by_key(objects, key, value):
obj = filter(lambda o: o.get(key) == value, objects) obj = [o for o in objects if o.get(key) == value]
return obj and obj[0] or None return obj and obj[0] or None
def get_by_id(objects, id): def get_by_id(objects, id):
return get_by_key(objects, 'id', id) return get_by_key(objects, 'id', id)
def resize_image(data, width=None, size=None): def resize_image(data, width=None, size=None):
source = Image.open(StringIO(data)) if isinstance(data, bytes):
data = BytesIO(data)
else:
data = StringIO(data)
source = Image.open(data)
if source.mode == 'P': if source.mode == 'P':
source = source.convert('RGB') source = source.convert('RGB')
source_width = source.size[0] source_width = source.size[0]
@ -83,7 +87,7 @@ def resize_image(data, width=None, size=None):
else: else:
resize_method = Image.BICUBIC resize_method = Image.BICUBIC
output = source.resize((width, height), resize_method) output = source.resize((width, height), resize_method)
o = StringIO() o = BytesIO()
output.save(o, format='jpeg') output.save(o, format='jpeg')
data = o.getvalue() data = o.getvalue()
o.close() o.close()
@ -91,13 +95,13 @@ def resize_image(data, width=None, size=None):
def sort_title(title): def sort_title(title):
title = title.replace(u'Æ', 'Ae') title = title.replace('Æ', 'Ae')
if isinstance(title, str): if isinstance(title, str):
title = unicode(title) title = str(title)
title = ox.sort_string(title) title = ox.sort_string(title)
#title #title
title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title) title = re.sub('[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
return title.strip() return title.strip()
def get_position_by_id(list, key): def get_position_by_id(list, key):
@ -150,6 +154,7 @@ def get_local_ipv4():
cmd = ['/sbin/route', '-n', 'get', 'default'] cmd = ['/sbin/route', '-n', 'get', 'default']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
interface = [[p.strip() for p in s.split(':', 1)] interface = [[p.strip() for p in s.split(':', 1)]
for s in stdout.strip().split('\n') if 'interface' in s] for s in stdout.strip().split('\n') if 'interface' in s]
if interface: if interface:
@ -157,6 +162,7 @@ def get_local_ipv4():
cmd = ['ifconfig', interface] cmd = ['ifconfig', interface]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
ips = [l for l in stdout.split('\n') if 'inet ' in l] ips = [l for l in stdout.split('\n') if 'inet ' in l]
if ips: if ips:
ip = ips[0].strip().split(' ')[1] ip = ips[0].strip().split(' ')[1]
@ -164,6 +170,7 @@ def get_local_ipv4():
cmd = ['ip', 'route', 'show'] cmd = ['ip', 'route', 'show']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
local = [l for l in stdout.split('\n') if 'default' in l] local = [l for l in stdout.split('\n') if 'default' in l]
if local: if local:
dev = local[0].split(' ')[4] dev = local[0].split(' ')[4]
@ -174,7 +181,7 @@ def get_local_ipv4():
def update_dict(root, data): def update_dict(root, data):
for key in data: for key in data:
keys = map(lambda part: part.replace('\0', '\\.'), key.replace('\\.', '\0').split('.')) keys = [part.replace('\0', '\\.') for part in key.replace('\\.', '\0').split('.')]
value = data[key] value = data[key]
p = root p = root
while len(keys)>1: while len(keys)>1:
@ -208,7 +215,7 @@ def remove_empty_tree(leaf):
else: else:
break break
utc_0 = int(time.mktime(datetime(1970, 01, 01).timetuple())) utc_0 = int(time.mktime(datetime(1970, 0o1, 0o1).timetuple()))
def datetime2ts(dt): def datetime2ts(dt):
return int(time.mktime(dt.utctimetuple())) - utc_0 return int(time.mktime(dt.utctimetuple())) - utc_0

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from tornado.websocket import WebSocketHandler from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop from tornado.ioloop import IOLoop

View file

@ -1,5 +1,5 @@
Twisted
simplejson simplejson
ed25519 ed25519
SQLAlchemy==0.9.4 SQLAlchemy==0.9.7
pyopenssl>=0.13.1 pyopenssl>=0.14
pillow