port to python3

This commit is contained in:
j 2014-09-03 00:32:44 +02:00
parent 14f426afd4
commit 8e27b9f76e
51 changed files with 272 additions and 248 deletions

View file

@ -39,9 +39,9 @@ To update to latest version:
./ctl update
On Linux you need a working python2 installation with PIL, pyhon-lxml and poppler-utils:
On Linux you need a working python2 installation with pillow, pyhon-lxml and poppler-utils:
apt-get install python2.7 python-imaging python-lxml poppler-utils
apt-get install python3.4 python3-pil python3-lxml poppler-utils
Platform

16
ctl
View file

@ -26,7 +26,7 @@ export SHARED_ENV
PATH="$SHARED_ENV/bin:$PATH"
export PATH
PYTHONPATH="$PLATFORM_ENV/lib/python2.7/site-packages:$SHARED_ENV/lib/python2.7/site-packages:$BASE/$NAME"
PYTHONPATH="$PLATFORM_ENV/lib/python3.4/site-packages:$SHARED_ENV/lib/python3.4/site-packages:$BASE/$NAME"
export PYTHONPATH
oxCACHE="$BASE/config/ox"
@ -45,10 +45,10 @@ if [ "$1" == "start" ]; then
exit 1
fi
if [ ! -d "$BASE/$NAME/.git" ]; then
python2 oml install_update
python3 oml install_update
cd "$BASE/$NAME"
fi
python2 oml server $PID
python3 oml server $PID
rm -f $PID
exit $?
fi
@ -59,7 +59,7 @@ if [ "$1" == "debug" ]; then
exit 1
fi
shift
python2 oml server $@
python3 oml server $@
exit $?
fi
if [ "$1" == "stop" ]; then
@ -89,7 +89,7 @@ if [ "$1" == "open" ]; then
fi
if [ "$1" == "ui" ]; then
shift
python2 $NAME/oml/ui.py $@
python3 $NAME/oml/ui.py $@
exit $?
fi
if [ "$1" == "update" ]; then
@ -107,17 +107,17 @@ if [ "$1" == "update" ]; then
NEW=`"$0" version`
"$0" postupdate -o $OLD -n $NEW
else
python2 oml update
python3 oml update
fi
exit $?
fi
if [ "$1" == "python" ]; then
cd "$BASE/$NAME"
shift
python2 $@
python3 $@
exit $?
fi
cd "$BASE/$NAME"
python2 oml $@
python3 oml $@
exit $?

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import sys

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import subprocess
import json
@ -67,7 +67,7 @@ def autocompleteFolder(data):
else:
folder, name = os.path.split(path)
if os.path.exists(folder):
prefix, folders, files = os.walk(folder).next()
prefix, folders, files = next(os.walk(folder))
folders = [os.path.join(prefix, f) for f in folders if (not name or f.startswith(name)) and not f.startswith('.')]
if prefix == path:
folders = [path] + folders

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime
import json
@ -100,7 +100,7 @@ class Changelog(db.Model):
return True
else:
logger.debug('INVLAID SIGNATURE ON CHANGE %s', change)
raise Exception, 'invalid signature'
raise Exception('invalid signature')
else:
logger.debug('revsion does not match! got %s expecting %s', revision, next_revision)
return False
@ -168,7 +168,7 @@ class Changelog(db.Model):
if i.timestamp > timestamp:
logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta)
return True
keys = filter(lambda k: k in Item.id_keys, meta.keys())
keys = [k for k in list(meta.keys()) if k in Item.id_keys]
if keys:
key = keys[0]
primary = [key, meta[key]]

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import subprocess
from os.path import join, exists, dirname
@ -22,7 +22,7 @@ def get(*cmd):
return stdout
def r(*cmd):
print ' '.join(cmd)
print(' '.join(cmd))
return subprocess.call(cmd)
def version(module):
@ -40,7 +40,7 @@ def command_version(*args):
"""
Print current version
"""
print version('openmedialibrary')
print(version('openmedialibrary'))
def command_debug(*args):
"""
@ -66,7 +66,7 @@ def command_install_update(*args):
"""
import update
if not update.install():
print "UPDATE FAILED"
print("UPDATE FAILED")
sys.exit(1)
def command_update(*args):
@ -75,7 +75,7 @@ def command_update(*args):
"""
import update
if not (update.download() and update.install()):
print "UPDATE FAILED"
print("UPDATE FAILED")
def command_postupdate(*args):
"""
@ -84,7 +84,7 @@ def command_postupdate(*args):
def run(*args):
o, old, n, new = args
if o != '-o' or n != '-n':
print 'usage: -o oldversion -n newversion'
print('usage: -o oldversion -n newversion')
sys.exit(1)
if old <= '20140521-65-e14c686' and new > '20140521-65-e14c686':
if not os.path.exists(settings.db_path):
@ -117,7 +117,7 @@ def command_release(*args):
"""
Release new version
"""
print 'checking...'
print('checking...')
import os
import json
import hashlib
@ -171,7 +171,7 @@ def command_release(*args):
sign(release)
with open('updates/release.json', 'w') as fd:
json.dump(release, fd, indent=2)
print 'signed latest release in updates/release.json'
print('signed latest release in updates/release.json')
def command_shell(*args):
'''
@ -223,5 +223,5 @@ def main():
info = actions["command_%s"%command].__doc__.split('\n')
info = [' %s%s' % (' ' * indent, i.strip()) for i in info]
info = '\n'.join(info).strip()
print(" %s%s%s" % (command, space, info))
print((" %s%s%s" % (command, space, info)))
sys.exit(1)

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
# DHT placeholder
from __future__ import division
import logging
import requests
import ed25519
import json
import settings
logger = logging.getLogger('oml.directory')
@ -27,7 +28,7 @@ def get(vk):
vk = ed25519.VerifyingKey(id, encoding='base64')
try:
vk.verify(sig, data, encoding='base64')
data = json.loads(data)
data = json.loads(data.decode('utf-8'))
except ed25519.BadSignatureError:
logger.debug('invalid signature')
@ -36,7 +37,7 @@ def get(vk):
def put(sk, data):
id = sk.get_verifying_key().to_ascii(encoding='base64')
data = json.dumps(data)
data = json.dumps(data).encode('utf-8')
sig = sk.sign(data, encoding='base64')
url ='%s/%s' % (base, id)
headers = {

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from threading import Thread
import time

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json
import hashlib
@ -11,8 +11,8 @@ from oxtornado import actions
from utils import cleanup_id
from websocket import trigger_event
import metaremote as meta
import models
import query
from . import models
from . import query
import settings
import state
import utils
@ -183,7 +183,7 @@ def getMetadata(data):
include_edits = data.pop('includeEdits')
else:
include_edits = False
key, value = data.iteritems().next()
key, value = next(iter(data.items()))
value = cleanup_id(key, value)
response = meta.lookup(key, value)
if include_edits:

View file

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from datetime import datetime
import mimetypes
import os
import zipfile
from models import Item
from .models import Item
import db
import settings
import tornado.web
@ -46,7 +46,7 @@ def serve_static(handler, path, mimetype, include_body=True):
handler.set_header('Content-Type', mimetype)
handler.set_header('Content-Length', str(os.stat(path).st_size))
if include_body:
with open(path) as fd:
with open(path, 'rb') as fd:
handler.write(fd.read())
return

View file

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from StringIO import StringIO
from io import BytesIO
from PIL import Image
import sqlite3
@ -32,30 +32,30 @@ class Icons(dict):
def create(self):
conn = self.connect()
c = conn.cursor()
c.execute(u'CREATE TABLE IF NOT EXISTS icon (id varchar(64) unique, data blob)')
c.execute(u'CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)')
c.execute('CREATE TABLE IF NOT EXISTS icon (id varchar(64) unique, data blob)')
c.execute('CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)')
if int(self.get_setting(c, 'version', 0)) < 1:
self.set_setting(c, 'version', 1)
def get_setting(self, c, key, default=None):
c.execute(u'SELECT value FROM setting WHERE key = ?', (key, ))
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
for row in c:
return row[0]
return default
def set_setting(self, c, key, value):
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
c.execute('INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
def black(self):
img = Image.new('RGB', (80, 128))
o = StringIO()
o = BytesIO()
img.save(o, format='jpeg')
data = o.getvalue()
o.close()
return data
def __getitem__(self, id, default=None):
sql = u'SELECT data FROM icon WHERE id=?'
sql = 'SELECT data FROM icon WHERE id=?'
conn = self.connect()
c = conn.cursor()
c.execute(sql, (id, ))
@ -68,7 +68,7 @@ class Icons(dict):
return data
def __setitem__(self, id, data):
sql = u'INSERT OR REPLACE INTO icon values (?, ?)'
sql = 'INSERT OR REPLACE INTO icon values (?, ?)'
conn = self.connect()
c = conn.cursor()
data = sqlite3.Binary(data)
@ -78,7 +78,7 @@ class Icons(dict):
conn.close()
def __delitem__(self, id):
sql = u'DELETE FROM icon WHERE id = ?'
sql = 'DELETE FROM icon WHERE id = ?'
conn = self.connect()
c = conn.cursor()
c.execute(sql, (id, ))
@ -94,7 +94,7 @@ def get_icon(id, type_, size, callback):
skey = '%s:%s:%s' % (type_, id, size)
data = icons[skey]
if data:
callback(str(data))
callback(bytes(data))
return
key = '%s:%s' % (type_, id)
data = icons[key]
@ -114,7 +114,7 @@ def get_icon(id, type_, size, callback):
size = None
if size:
data = icons[skey] = resize_image(data, size=size)
data = str(data) or ''
data = bytes(data) or ''
callback(data)
@run_async
@ -144,7 +144,7 @@ def get_icon_app(id, type_, size, callback):
size = None
if size:
data = icons[skey] = resize_image(data, size=size)
data = str(data) or ''
data = bytes(data) or ''
callback(data)
class IconHandler(tornado.web.RequestHandler):

View file

@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime
from StringIO import StringIO
from io import StringIO
import base64
import hashlib
import json
@ -18,8 +18,9 @@ import sqlalchemy as sa
from changelog import Changelog
from db import MutableDict
from icons import icons
from person import get_sort_name
import json_pickler
from .icons import icons
from .person import get_sort_name
from settings import config
from utils import remove_empty_folders
from websocket import trigger_event
@ -46,8 +47,8 @@ class Item(db.Model):
id = sa.Column(sa.String(32), primary_key=True)
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json)))
meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json)))
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
# why is this in db and not in i.e. info?
added = sa.Column(sa.DateTime()) # added to local library
@ -104,7 +105,7 @@ class Item(db.Model):
if t:
j['transferadded'] = t.added
j['transferprogress'] = t.progress
j['users'] = map(str, list(self.users))
j['users'] = list(map(str, list(self.users)))
if self.info:
j.update(self.info)
@ -115,7 +116,7 @@ class Item(db.Model):
if key not in self.meta and key in j:
del j[key]
if keys:
for k in j.keys():
for k in list(j.keys()):
if k not in keys:
del j[k]
return j
@ -140,19 +141,19 @@ class Item(db.Model):
elif sort_type == 'name':
if not isinstance(value, list):
value = [value]
value = map(get_sort_name, value)
value = ox.sort_string(u'\n'.join(value))
value = list(map(get_sort_name, value))
value = ox.sort_string('\n'.join(value))
elif sort_type == 'title':
if isinstance(value, dict):
value = value.values()
value = list(value.values())
if isinstance(value, list):
value = u''.join(value)
value = ''.join(value)
value = utils.sort_title(value).lower()
else:
if isinstance(value, list):
value = u'\n'.join(value)
value = '\n'.join(value)
if value:
value = unicode(value)
value = str(value)
value = ox.sort_string(value).lower()
elif isinstance(value, list): #empty list
value = ''
@ -178,7 +179,7 @@ class Item(db.Model):
if value:
Find.query.filter_by(item_id=self.id, key=key['id']).delete()
if isinstance(value, dict):
value = ' '.join(value.values())
value = ' '.join(list(value.values()))
if not isinstance(value, list):
value = [value]
for v in value:
@ -194,7 +195,7 @@ class Item(db.Model):
if key not in self.info:
self.info[key] = self.meta[key]
del self.meta[key]
users = map(str, list(self.users))
users = list(map(str, list(self.users)))
self.info['mediastate'] = 'available' # available, unavailable, transferring
t = Transfer.get(self.id)
if t and t.added and t.progress < 1:
@ -230,7 +231,7 @@ class Item(db.Model):
record[key] = data[key]
self.meta[key] = data[key]
update = True
for key in self.meta.keys():
for key in list(self.meta.keys()):
if key not in self.meta_keys:
del self.meta[key]
update = True
@ -446,7 +447,7 @@ class Find(db.Model):
findvalue = sa.Column(sa.Text(), index=True)
def __repr__(self):
return (u'%s=%s' % (self.key, self.findvalue)).encode('utf-8')
return ('%s=%s' % (self.key, self.findvalue)).encode('utf-8')
@classmethod
def get(cls, item, key):
@ -470,7 +471,7 @@ class File(db.Model):
sha1 = sa.Column(sa.String(32), primary_key=True)
path = sa.Column(sa.String(2048))
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json)))
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
item = sa.orm.relationship('Item', backref=sa.orm.backref('files', lazy='dynamic'))
@ -591,7 +592,7 @@ class Metadata(db.Model):
key = sa.Column(sa.String(256))
value = sa.Column(sa.String(256))
data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json)))
data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
def __repr__(self):
return '='.join([self.key, self.value])

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import unicodedata
@ -19,7 +19,7 @@ def get_sort_name(name, sortname=None):
person.save()
sortname = unicodedata.normalize('NFKD', person.sortname)
else:
sortname = u''
sortname = ''
return sortname
class Person(db.Model):

View file

@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
#does not work in sqlite
#from sqlalchemy.sql.expression import nullslast
from queryparser import Parser
import models
from . import models
import settings
import utils

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime
import os
@ -67,7 +67,7 @@ def run_scan():
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
if not prefix[-1] == '/':
prefix += '/'
assert isinstance(prefix, unicode)
assert isinstance(prefix, str)
books = []
for root, folders, files in os.walk(prefix):
for f in files:
@ -121,7 +121,7 @@ def run_import(options=None):
listname = options.get('list')
if listname:
listitems = []
assert isinstance(prefix, unicode)
assert isinstance(prefix, str)
books = []
count = 0
for root, folders, files in os.walk(prefix):

11
oml/json_pickler.py Normal file
View file

@ -0,0 +1,11 @@
import json
def loads(*args, **kargs):
#print('loads', args, kargs)
if isinstance(args[0], bytes):
args = (args[0].decode('utf-8'),) + args[1:]
return json.loads(*args, **kargs)
def dumps(*args, **kargs):
#print('dumps', args, kargs)
return json.dumps(*args, **kargs).encode('utf-8')

View file

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json
import socket
import struct
import thread
import _thread
from threading import Thread
import time
@ -57,8 +57,8 @@ class LocalNodesBase(Thread):
'port': server['node_port'],
'cert': server['cert']
})
sig = sk.sign(message, encoding='base64')
packet = json.dumps([sig, USER_ID, message])
sig = sk.sign(message.encode('utf-8'), encoding='base64')
packet = json.dumps([sig, USER_ID, message]).encode('utf-8')
else:
packet = None
return packet
@ -89,7 +89,7 @@ class LocalNodesBase(Thread):
now = time.mktime(time.localtime())
if now - last > 60:
last = now
thread.start_new_thread(self.send, ())
_thread.start_new_thread(self.send, ())
except:
if self._active:
logger.debug('receive failed. restart later', exc_info=1)
@ -115,7 +115,7 @@ class LocalNodesBase(Thread):
#print addr
if data['id'] != USER_ID:
if data['id'] not in self._nodes:
thread.start_new_thread(self.new_node, (data, ))
_thread.start_new_thread(self.new_node, (data, ))
elif can_connect(data):
self._nodes[data['id']] = data
@ -166,7 +166,7 @@ class LocalNodes4(LocalNodesBase):
s = socket.socket (socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt (socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self._TTL)
try:
s.sendto(packet + '\0', sockaddr)
s.sendto(packet + b'\0', sockaddr)
except:
logger.debug('LocalNodes4.send failed', exc_info=1)
s.close()
@ -198,7 +198,7 @@ class LocalNodes6(LocalNodesBase):
s = socket.socket(family, socktype, proto)
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl)
try:
s.sendto(packet + '\0', sockaddr)
s.sendto(packet + b'\0', sockaddr)
except:
logger.debug('LocalNodes6.send failed', exc_info=1)
s.close()
@ -206,7 +206,7 @@ class LocalNodes6(LocalNodesBase):
def get_socket(self):
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
group_bin = socket.inet_pton(socket.AF_INET6, self._BROADCAST) + '\0'*4
group_bin = socket.inet_pton(socket.AF_INET6, self._BROADCAST) + b'\0'*4
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, group_bin)
self._socket = s
return s
@ -229,7 +229,7 @@ class LocalNodes(object):
def cleanup(self):
if self._active:
for id in self._nodes.keys():
for id in list(self._nodes.keys()):
if not can_connect(self._nodes[id]):
del self._nodes[id]
if not self._active:

View file

@ -1,23 +1,24 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import base64
import hashlib
import os
import codecs
import ox
import pdf
import epub
import txt
import opf
from . import pdf
from . import epub
from . import txt
from . import opf
def get_id(f=None, data=None):
if data:
return base64.b32encode(hashlib.sha1(data).digest())
return base64.b32encode(hashlib.sha1(data).digest()).decode()
else:
return base64.b32encode(ox.sha1sum(f, cached=True).decode('hex'))
return base64.b32encode(codecs.decode(ox.sha1sum(f, cached=True), 'hex')).decode()
def metadata(f, from_=None):
@ -55,16 +56,16 @@ def metadata(f, from_=None):
if key in opf_info:
data[key] = opf_info[key]
if key in data:
if isinstance(data[key], basestring):
if isinstance(data[key], str):
data[key] = data[key].replace('\x00', '')
elif isinstance(data[key], list):
data[key] = [e.replace('\x00', '') if isinstance(e, basestring) else e for e in data[key]]
data[key] = [e.replace('\x00', '') if isinstance(e, str) else e for e in data[key]]
if 'isbn' in data:
data['primaryid'] = ['isbn', data['isbn'][0]]
elif 'asin' in data:
data['primaryid'] = ['asin', data['asin'][0]]
if 'author' in data:
if isinstance(data['author'], basestring):
if isinstance(data['author'], str):
if data['author'].strip():
data['author'] = data['author'].strip().split('; ')
else:

View file

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os
import xml.etree.ElementTree as ET
import zipfile
from StringIO import StringIO
from io import StringIO
import re
from PIL import Image

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import xml.etree.ElementTree as ET

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import sys
import tempfile
@ -9,7 +9,7 @@ import os
import shutil
from glob import glob
from pyPdf import PdfFileReader
#from pyPdf import PdfFileReader
import stdnum.isbn
import settings
@ -139,9 +139,9 @@ def info(pdf):
if stdnum.isbn.is_valid(value):
data['isbn'] = [value]
del data['identifier']
for key, value in data.iteritems():
for key, value in data.items():
if isinstance(value, dict):
value = ' '.join(value.values())
value = ' '.join(list(value.values()))
data[key] = value
text = extract_text(pdf)
data['textsize'] = len(text)
@ -150,7 +150,7 @@ def info(pdf):
isbn = extract_isbn(text)
if isbn:
data['isbn'] = [isbn]
if 'isbn' in data and isinstance(data['isbn'], basestring):
if 'isbn' in data and isinstance(data['isbn'], str):
data['isbn'] = [data['isbn']]
if 'date' in data and len(data['date']) == 8 and data['date'].isdigit():
d = data['date']

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os
from utils import find_isbns

View file

@ -1,17 +1,17 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import stdnum.isbn
import ox
import abebooks
import loc
import lookupbyisbn
import openlibrary
import worldcat
import google
import duckduckgo
from . import abebooks
from . import loc
from . import lookupbyisbn
from . import openlibrary
from . import worldcat
from . import google
from . import duckduckgo
import logging
logger = logging.getLogger('meta')
@ -51,22 +51,22 @@ def lookup(key, value):
ids.append(kv)
done = False
logger.debug('FIXME: sort ids')
ids.sort(key=lambda i: ox.sort_string(u''.join(i)))
ids.sort(key=lambda i: ox.sort_string(''.join(i)))
logger.debug('IDS %s', ids)
for k, v in ids:
for provider, id in providers:
if id == k:
if provider not in provider_data:
provider_data[provider] = {}
for k_, v_ in globals()[provider].lookup(v).iteritems():
for k_, v_ in globals()[provider].lookup(v).items():
if k_ not in provider_data[provider]:
provider_data[provider][k_] = v_
for provider in sorted(
provider_data.keys(),
list(provider_data.keys()),
key=lambda x: -len(provider_data[x])
):
logger.debug('%s %s %s', provider, len(provider_data[provider]), provider_data[provider].keys())
for k_, v_ in provider_data[provider].iteritems():
logger.debug('%s %s %s', provider, len(provider_data[provider]), list(provider_data[provider].keys()))
for k_, v_ in provider_data[provider].items():
if not k_ in data:
data[k_] = v_
for k, v in ids:

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re

View file

@ -2,10 +2,10 @@
# vi:si:et:sw=4:sts=4:ts=4
def get_classification(id):
name = u'%s' % id
name = '%s' % id
base = ''.join([s for s in id.split('/')[0].split('.')[0] if s.isdigit()])
if base in DEWEY:
name = u'%s %s' % (name, DEWEY[base].decode('utf-8'))
name = '%s %s' % (name, DEWEY[base].decode('utf-8'))
return name
DEWEY = {
@ -941,9 +941,9 @@ if __name__ == '__main__':
dewey = {}
for i in range(0, 1000):
url = 'http://dewey.info/class/%s/about.en.json' % i
print url
print(url)
data = json.loads(read_url(url))
for d in data.values():
for d in list(data.values()):
if 'http://www.w3.org/2004/02/skos/core#prefLabel' in d:
value = d['http://www.w3.org/2004/02/skos/core#prefLabel'][0]['value']
dewey[str(i)] = value

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import ox.web.duckduckgo
import stdnum.isbn

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import ox.web.google
import stdnum.isbn

View file

@ -1,15 +1,15 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from ox.cache import read_url
import ox
import re
import xml.etree.ElementTree as ET
from dewey import get_classification
from marc_countries import COUNTRIES
from utils import normalize_isbn
from .dewey import get_classification
from .marc_countries import COUNTRIES
from .utils import normalize_isbn
import logging
logger = logging.getLogger('meta.loc')
@ -86,7 +86,7 @@ def lookup(id):
toc = mods.findall(ns + 'tableOfContents')
if toc:
info['description'] = toc[0].text.strip()
for key in info.keys():
for key in list(info.keys()):
if not info[key]:
del info[key]
return info

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re
@ -8,7 +8,7 @@ from ox.cache import read_url
from ox import find_re, strip_tags, decode_html
import stdnum.isbn
from utils import find_isbns
from .utils import find_isbns
import logging
logger = logging.getLogger('meta.lookupbyisbn')
@ -78,13 +78,13 @@ def lookup(id):
r['description'] = decode_html(strip_tags(desc))
r['cover'] = find_re(data, '<img src="(.*?)" alt="Book cover').replace('._SL160_', '')
for key in r:
if isinstance(r[key], basestring):
if isinstance(r[key], str):
r[key] = decode_html(strip_tags(r[key])).strip()
if 'author' in r and isinstance(r['author'], basestring) and r['author']:
if 'author' in r and isinstance(r['author'], str) and r['author']:
r['author'] = [r['author']]
else:
r['author'] = []
if r['description'].lower() == u'Description of this item is not available at this time.'.lower():
if r['description'].lower() == 'Description of this item is not available at this time.'.lower():
r['description'] = ''
return r

View file

@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from datetime import datetime
from urllib import urlencode
from urllib.parse import urlencode
import json
from ox.cache import read_url
from dewey import get_classification
from marc_countries import COUNTRIES
from utils import normalize_isbn
from .dewey import get_classification
from .marc_countries import COUNTRIES
from .utils import normalize_isbn
import logging
logger = logging.getLogger('meta.openlibrary')
@ -41,7 +41,7 @@ def find(query):
results = []
ids = [b for b in r.get('result', []) if b.startswith('/books')]
books = api.get_many(ids).get('result', [])
for olid, value in books.iteritems():
for olid, value in books.items():
olid = olid.split('/')[-1]
book = format(value)
book['olid'] = [olid]
@ -84,7 +84,7 @@ def lookup(id, return_all=False):
data['olid'] = []
if id not in data['olid']:
data['olid'] = [id]
logger.debug('lookup %s => %s', id, data.keys())
logger.debug('lookup %s => %s', id, list(data.keys()))
return data
def get_type(obj):
@ -129,7 +129,7 @@ def format(info, return_all=False):
elif key in ('isbn_10', 'isbn_13'):
if not isinstance(value, list):
value = [value]
value = map(normalize_isbn, value)
value = list(map(normalize_isbn, value))
if KEYS[key] in data:
value = data[KEYS[key]] + value
elif isinstance(value, list) and key not in ('publish_places', 'lccn', 'oclc_numbers'):
@ -149,7 +149,7 @@ def format(info, return_all=False):
def resolve_names(objects, key='name'):
r = []
data = api.get_many([k['key'] for k in objects]).get('result', {})
for k, value in data.iteritems():
for k, value in data.items():
if 'location' in value and value.get('type', {}).get('key') == '/type/redirect':
value = api.get(value['location']).get('result', {})
r.append(value[key])
@ -160,7 +160,7 @@ class API(object):
def _request(self, action, data, timeout=None):
for key in data:
if not isinstance(data[key], basestring):
if not isinstance(data[key], str):
data[key] = json.dumps(data[key])
url = self.base + '/' + action + '?' + urlencode(data)
if timeout is None:
@ -181,7 +181,7 @@ class API(object):
return data
def search(self, query):
if isinstance(query, basestring):
if isinstance(query, str):
query = {
'query': query
}

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re
import stdnum.isbn
@ -10,6 +10,8 @@ def normalize_isbn(value):
return ''.join([s for s in value if s.isdigit() or s == 'X'])
def find_isbns(text):
if isinstance(text, bytes):
text = text.decode()
matches = re.compile('\d[\d\-X\ ]+').findall(text)
matches = [normalize_isbn(value) for value in matches]
return [isbn for isbn in matches if stdnum.isbn.is_valid(isbn)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import re
import hashlib
@ -99,7 +99,7 @@ def lookup(id):
if m:
data['date'] = m[0]
logger.debug('lookup %s => %s', id, data.keys())
logger.debug('lookup %s => %s', id, list(data.keys()))
return data
info = lookup

View file

@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import json
from urllib import urlencode
from urllib.parse import urlencode
from ox.cache import read_url

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import os
@ -8,11 +8,11 @@ from tornado.web import Application
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
import oxtornado
from oxtornado import actions
from . import oxtornado
from .oxtornado import actions
import meta
import utils
from . import meta
from . import utils
import logging
logger = logging.getLogger('metaoml')
@ -49,7 +49,7 @@ def getMetadata(data):
include_edits = data.pop('includeEdits')
else:
include_edits = False
key, value = data.iteritems().next()
key, value = next(iter(data.items()))
if key == 'isbn':
value = utils.normalize_isbn(value)
response = meta.lookup(key, value)

View file

@ -19,9 +19,9 @@ def generate_ssl():
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
with open(settings.ssl_key_path, 'wb') as fd:
os.chmod(settings.ssl_key_path, 0600)
os.chmod(settings.ssl_key_path, 0o600)
fd.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
os.chmod(settings.ssl_key_path, 0400)
os.chmod(settings.ssl_key_path, 0o400)
ca = OpenSSL.crypto.X509()
ca.set_version(2)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from changelog import Changelog
from user.models import User

View file

@ -11,11 +11,11 @@ from tornado.ioloop import PeriodicCallback
from oxtornado import run_async
from utils import valid, get_public_ipv6
from websocket import trigger_event
import cert
from . import cert
import db
import directory
import json
import nodeapi
from . import nodeapi
import settings
import state
import user

View file

@ -1,14 +1,14 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from Queue import Queue
from queue import Queue
from threading import Thread
import json
import socket
from StringIO import StringIO
from io import StringIO
import gzip
import urllib2
import urllib.request, urllib.error, urllib.parse
from datetime import datetime
import os
import time
@ -135,10 +135,10 @@ class Node(Thread):
'X-Ed25519-Key': settings.USER_ID,
'X-Ed25519-Signature': sig,
}
self._opener.addheaders = zip(headers.keys(), headers.values())
self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
try:
r = self._opener.open(url, data=content, timeout=self.TIMEOUT)
except urllib2.HTTPError as e:
except urllib.error.HTTPError as e:
if e.code == 403:
logger.debug('REMOTE ENDED PEERING')
with db.session():
@ -150,7 +150,7 @@ class Node(Thread):
logger.debug('urllib2.HTTPError %s %s', e, e.code)
self.online = False
return None
except urllib2.URLError as e:
except urllib.error.URLError as e:
logger.debug('urllib2.URLError %s', e)
self.online = False
return None
@ -201,7 +201,7 @@ class Node(Thread):
'X-Node-Protocol': settings.NODE_PROTOCOL,
'Accept-Encoding': 'gzip',
}
self._opener.addheaders = zip(headers.keys(), headers.values())
self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url, timeout=1)
version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL:
@ -297,7 +297,7 @@ class Node(Thread):
}
t1 = datetime.utcnow()
logger.debug('download %s', url)
self._opener.addheaders = zip(headers.keys(), headers.values())
self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url, timeout=self.TIMEOUT*2)
if r.getcode() == 200:
if r.headers.get('content-encoding', None) == 'gzip':
@ -338,7 +338,7 @@ class Node(Thread):
headers = {
'User-Agent': settings.USER_AGENT,
}
self._opener.addheaders = zip(headers.keys(), headers.values())
self._opener.addheaders = list(zip(list(headers.keys()), list(headers.values())))
r = self._opener.open(url)
if r.getcode() == 200:
with open(path, 'w') as fd:
@ -379,11 +379,11 @@ class Nodes(Thread):
def _call(self, target, action, *args):
if target == 'all':
nodes = self._nodes.values()
nodes = list(self._nodes.values())
elif target == 'peered':
nodes = [n for n in self._nodes.values() if n.user.peered]
nodes = [n for n in list(self._nodes.values()) if n.user.peered]
elif target == 'online':
nodes = [n for n in self._nodes.values() if n.online]
nodes = [n for n in list(self._nodes.values()) if n.online]
else:
nodes = [self._nodes[target]]
for node in nodes:
@ -412,7 +412,7 @@ class Nodes(Thread):
def join(self):
self._running = False
self._q.put(None)
for node in self._nodes.values():
for node in list(self._nodes.values()):
node.join()
self._local.join()
return Thread.join(self)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from contextlib import contextmanager
import inspect
@ -30,7 +30,7 @@ def _to_json(python_object):
tt = python_object.timetuple()
return '%d-%02d-%02dT%02d:%02d%02dZ' % tuple(list(tt)[:6])
return python_object.strftime('%Y-%m-%dT%H:%M:%SZ')
raise TypeError(u'%s %s is not JSON serializable' % (repr(python_object), type(python_object)))
raise TypeError('%s %s is not JSON serializable' % (repr(python_object), type(python_object)))
def json_dumps(obj):
indent = 2
@ -52,14 +52,14 @@ def trim(docstring):
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxint
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxint:
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
@ -78,11 +78,11 @@ def defaultcontext():
def api_task(context, request, callback):
if context == None:
context = defaultcontext
action = request.arguments.get('action', [None])[0]
data = request.arguments.get('data', ['{}'])[0]
data = json.loads(data) if data else {}
action = request.arguments.get('action', [None])[0].decode('utf-8')
data = request.arguments.get('data', [b'{}'])[0]
data = json.loads(data.decode('utf-8')) if data else {}
if not action:
methods = actions.keys()
methods = list(actions.keys())
api = []
for f in sorted(methods):
api.append({'name': f,
@ -154,7 +154,7 @@ class ApiActions(dict):
data = data or {}
docs = data.get('docs', False)
code = data.get('code', False)
_actions = self.keys()
_actions = list(self.keys())
_actions.sort()
actions = {}
for a in _actions:
@ -172,16 +172,16 @@ class ApiActions(dict):
def code(self, name, version=None):
f = self[name]
if name != 'api' and hasattr(f, 'func_closure') and f.func_closure:
fc = filter(lambda c: hasattr(c.cell_contents, '__call__'), f.func_closure)
if name != 'api' and hasattr(f, 'func_closure') and f.__closure__:
fc = [c for c in f.__closure__ if hasattr(c.cell_contents, '__call__')]
f = fc[len(fc)-1].cell_contents
info = f.func_code.co_filename
info = u'%s:%s' % (info, f.func_code.co_firstlineno)
info = f.__code__.co_filename
info = '%s:%s' % (info, f.__code__.co_firstlineno)
return info, trim(inspect.getsource(f))
def register(self, method, action=None, cache=True, version=None):
if not action:
action = method.func_name
action = method.__name__
if version:
if not version in self.versions:
self.versions[version] = {}

View file

@ -101,7 +101,7 @@ class Parser(object):
q = ~q
return q
elif key_type in ("string", "text"):
if isinstance(v, unicode):
if isinstance(v, str):
v = unicodedata.normalize('NFKD', v).lower()
else:
v = v.lower()

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, print_function
import os
import sys

View file

@ -52,15 +52,15 @@ for key in server_defaults:
release = pdict(os.path.join(config_path, 'release.json'))
if os.path.exists(key_path):
with open(key_path) as fd:
with open(key_path, 'rb') as fd:
sk = ed25519.SigningKey(fd.read())
vk = sk.get_verifying_key()
else:
sk, vk = ed25519.create_keypair()
with open(key_path, 'w') as fd:
os.chmod(key_path, 0600)
with open(key_path, 'wb') as fd:
os.chmod(key_path, 0o600)
fd.write(sk.to_bytes())
os.chmod(key_path, 0400)
os.chmod(key_path, 0o400)
USER_ID = vk.to_ascii(encoding='base64')
OML_UPDATE_KEY='K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU'

View file

@ -1,14 +1,14 @@
import httplib
import http.client
import socket
import urllib2
import urllib.request, urllib.error, urllib.parse
import ssl
import hashlib
import logging
logger = logging.getLogger('oml.ssl_request')
class InvalidCertificateException(httplib.HTTPException, urllib2.URLError):
class InvalidCertificateException(http.client.HTTPException, urllib.error.URLError):
def __init__(self, fingerprint, cert, reason):
httplib.HTTPException.__init__(self)
http.client.HTTPException.__init__(self)
self.fingerprint = fingerprint
self.cert_fingerprint = hashlib.sha1(cert).hexdigest()
self.reason = reason
@ -17,11 +17,11 @@ class InvalidCertificateException(httplib.HTTPException, urllib2.URLError):
return ('%s (local) != %s (remote) (%s)\n' %
(self.fingerprint, self.cert_fingerprint, self.reason))
class CertValidatingHTTPSConnection(httplib.HTTPConnection):
default_port = httplib.HTTPS_PORT
class CertValidatingHTTPSConnection(http.client.HTTPConnection):
default_port = http.client.HTTPS_PORT
def __init__(self, host, port=None, fingerprint=None, strict=None, **kwargs):
httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs)
http.client.HTTPConnection.__init__(self, host, port, strict, **kwargs)
self.fingerprint = fingerprint
if self.fingerprint:
self.cert_reqs = ssl.CERT_REQUIRED
@ -44,9 +44,9 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection):
'fingerprint mismatch')
#logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version)
class VerifiedHTTPSHandler(urllib2.HTTPSHandler):
class VerifiedHTTPSHandler(urllib.request.HTTPSHandler):
def __init__(self, **kwargs):
urllib2.AbstractHTTPHandler.__init__(self)
urllib.request.AbstractHTTPHandler.__init__(self)
self._connection_args = kwargs
def https_open(self, req):
@ -57,15 +57,15 @@ class VerifiedHTTPSHandler(urllib2.HTTPSHandler):
try:
return self.do_open(http_class_wrapper, req)
except urllib2.URLError, e:
except urllib.error.URLError as e:
if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1:
raise InvalidCertificateException(self.fingerprint, '',
e.reason.args[1])
raise
https_request = urllib2.HTTPSHandler.do_request_
https_request = urllib.request.HTTPSHandler.do_request_
def get_opener(fingerprint):
handler = VerifiedHTTPSHandler(fingerprint=fingerprint)
opener = urllib2.build_opener(handler)
opener = urllib.request.build_opener(handler)
return opener

View file

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from Queue import Queue
from queue import Queue
from threading import Thread
from websocket import trigger_event

View file

@ -6,8 +6,8 @@ try:
GObject.threads_init()
use_Gtk = True
except:
from Tkinter import Tk
import tkFileDialog
from tkinter import Tk
import tkinter.filedialog
use_Gtk = False
class GtkUI:
@ -23,16 +23,16 @@ class GtkUI:
if response == Gtk.ResponseType.OK:
filename = dialog.get_filename()
if DEBUG:
print filename, 'selected'
print(filename, 'selected')
elif response == Gtk.ResponseType.CANCEL:
if DEBUG:
print 'Closed, no files selected'
print('Closed, no files selected')
filename = None
dialog.destroy()
while Gtk.events_pending():
Gtk.main_iteration()
if DEBUG:
print "done"
print("done")
return filename
def selectFile(self, data):
@ -47,16 +47,16 @@ class GtkUI:
if response == Gtk.ResponseType.OK:
filename = dialog.get_filename()
if DEBUG:
print filename, 'selected'
print(filename, 'selected')
elif response == Gtk.ResponseType.CANCEL:
if DEBUG:
print 'Closed, no files selected'
print('Closed, no files selected')
filename = None
dialog.destroy()
while Gtk.events_pending():
Gtk.main_iteration()
if DEBUG:
print "done"
print("done")
return filename
class TkUI:
@ -64,10 +64,10 @@ class TkUI:
self.root = Tk()
self.root.withdraw() #hiding tkinter window
def selectFolder(self, data):
return tkFileDialog.askdirectory(title=data.get("title", "Select Folder"))
return tkinter.filedialog.askdirectory(title=data.get("title", "Select Folder"))
def selectFile(self, data):
return tkFileDialog.askopenfilename(title=data.get("title", "Select File"))
return tkinter.filedialog.askopenfilename(title=data.get("title", "Select File"))
if use_Gtk:
ui = GtkUI()
@ -77,6 +77,6 @@ else:
if __name__ == '__main__':
import sys
if len(sys.argv) == 2 and sys.argv[1] == 'folder':
print ui.selectFolder({})
print(ui.selectFolder({}))
else:
print ui.selectFile({})
print(ui.selectFile({}))

View file

@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
from contextlib import closing
import json
import os
import tarfile
import urllib2
import urllib.request, urllib.error, urllib.parse
import shutil
import subprocess
@ -34,10 +34,10 @@ def verify(release):
return True
def get(url, filename=None):
request = urllib2.Request(url, headers={
request = urllib.request.Request(url, headers={
'User-Agent': settings.USER_AGENT
})
with closing(urllib2.urlopen(request)) as u:
with closing(urllib.request.urlopen(request)) as u:
if not filename:
data = u.read()
return data
@ -76,7 +76,7 @@ def download():
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
url = RELEASE_URL.replace('release.json', release['modules'][module]['name'])
if not os.path.exists(module_tar):
print 'download', os.path.basename(module_tar)
print('download', os.path.basename(module_tar))
get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
os.unlink(module_tar)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from copy import deepcopy
import json
@ -11,7 +11,7 @@ import ox
from changelog import Changelog
from oxtornado import actions
from utils import update_dict
import models
from . import models
import settings
import state

View file

@ -9,6 +9,7 @@ from changelog import Changelog
from db import MutableDict
from queryparser import Parser
import db
import json_pickler
import settings
import state
@ -22,7 +23,7 @@ class User(db.Model):
modified = sa.Column(sa.DateTime())
id = sa.Column(sa.String(43), primary_key=True)
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json)))
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
nickname = sa.Column(sa.String(256), unique=True)
@ -140,7 +141,7 @@ class List(db.Model):
index_ = sa.Column(sa.Integer())
type = sa.Column(sa.String(64))
_query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json)))
_query = sa.Column('query', MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id'))
user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic'))
@ -250,7 +251,7 @@ class List(db.Model):
id = ''
if self.user_id != settings.USER_ID:
id += self.user.nickname
id = u'%s:%s' % (id, self.name)
id = '%s:%s' % (id, self.name)
return id
@property
@ -258,7 +259,7 @@ class List(db.Model):
id = ''
if self.user_id != settings.USER_ID:
id += self.user_id
id = u'%s:%s' % (id, self.id)
id = '%s:%s' % (id, self.id)
return id
def __repr__(self):

View file

@ -1,15 +1,15 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os
import sys
from PIL import Image
from StringIO import StringIO
from io import StringIO, BytesIO
import re
import stdnum.isbn
import socket
import cStringIO
import io
import gzip
import time
from datetime import datetime
@ -49,14 +49,18 @@ def get_positions(ids, pos):
return positions
def get_by_key(objects, key, value):
obj = filter(lambda o: o.get(key) == value, objects)
obj = [o for o in objects if o.get(key) == value]
return obj and obj[0] or None
def get_by_id(objects, id):
return get_by_key(objects, 'id', id)
def resize_image(data, width=None, size=None):
source = Image.open(StringIO(data))
if isinstance(data, bytes):
data = BytesIO(data)
else:
data = StringIO(data)
source = Image.open(data)
if source.mode == 'P':
source = source.convert('RGB')
source_width = source.size[0]
@ -83,7 +87,7 @@ def resize_image(data, width=None, size=None):
else:
resize_method = Image.BICUBIC
output = source.resize((width, height), resize_method)
o = StringIO()
o = BytesIO()
output.save(o, format='jpeg')
data = o.getvalue()
o.close()
@ -91,13 +95,13 @@ def resize_image(data, width=None, size=None):
def sort_title(title):
title = title.replace(u'Æ', 'Ae')
title = title.replace('Æ', 'Ae')
if isinstance(title, str):
title = unicode(title)
title = str(title)
title = ox.sort_string(title)
#title
title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
title = re.sub('[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
return title.strip()
def get_position_by_id(list, key):
@ -150,6 +154,7 @@ def get_local_ipv4():
cmd = ['/sbin/route', '-n', 'get', 'default']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
interface = [[p.strip() for p in s.split(':', 1)]
for s in stdout.strip().split('\n') if 'interface' in s]
if interface:
@ -157,6 +162,7 @@ def get_local_ipv4():
cmd = ['ifconfig', interface]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
ips = [l for l in stdout.split('\n') if 'inet ' in l]
if ips:
ip = ips[0].strip().split(' ')[1]
@ -164,6 +170,7 @@ def get_local_ipv4():
cmd = ['ip', 'route', 'show']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
local = [l for l in stdout.split('\n') if 'default' in l]
if local:
dev = local[0].split(' ')[4]
@ -174,7 +181,7 @@ def get_local_ipv4():
def update_dict(root, data):
for key in data:
keys = map(lambda part: part.replace('\0', '\\.'), key.replace('\\.', '\0').split('.'))
keys = [part.replace('\0', '\\.') for part in key.replace('\\.', '\0').split('.')]
value = data[key]
p = root
while len(keys)>1:
@ -208,7 +215,7 @@ def remove_empty_tree(leaf):
else:
break
utc_0 = int(time.mktime(datetime(1970, 01, 01).timetuple()))
utc_0 = int(time.mktime(datetime(1970, 0o1, 0o1).timetuple()))
def datetime2ts(dt):
return int(time.mktime(dt.utctimetuple())) - utc_0

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop

View file

@ -1,5 +1,5 @@
Twisted
simplejson
ed25519
SQLAlchemy==0.9.4
pyopenssl>=0.13.1
SQLAlchemy==0.9.7
pyopenssl>=0.14
pillow