use python logging

This commit is contained in:
j 2014-05-17 16:26:59 +02:00
parent d9346e8328
commit 6a8a7b956d
27 changed files with 174 additions and 141 deletions

View file

@ -6,7 +6,6 @@ from __future__ import division
import os
import sys
import app
import server

View file

@ -5,6 +5,7 @@ from __future__ import division
from flask import Flask
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
import logging
import oxflask.api
@ -23,6 +24,12 @@ import user.api
import item.views
import commands
#FORMAT = '%(asctime)-15s %(clientip)s %(user)-8s %(message)s'
#logging.basicConfig(format=FORMAT)
#logger = logging.getLogger('oml.app')
#logger.warning('test')
logging.basicConfig(level=logging.DEBUG)
app = Flask('openmedialibrary', static_folder=settings.static_path)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////%s' % settings.db_path
app.register_blueprint(oxflask.api.app)

View file

@ -2,6 +2,8 @@
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import logging
import json
from datetime import datetime
@ -12,6 +14,8 @@ from settings import db
import state
from websocket import trigger_event
logger = logging.getLogger('oml.changelog')
class Changelog(db.Model):
'''
additem itemid metadata from file (info) + OLID
@ -60,7 +64,7 @@ class Changelog(db.Model):
def apply_changes(cls, user, changes):
for change in changes:
if not Changelog.apply_change(user, change, trigger=False):
print 'FAIL', change
logger.debug('FAIL %s', change)
break
return False
if changes:
@ -84,19 +88,19 @@ class Changelog(db.Model):
c.data = data
c.sig = sig
action, args = json.loads(data)
print 'apply change', action, args
logger.debug('apply change %s %s', action, args)
if getattr(c, 'action_' + action)(user, timestamp, *args):
print 'change applied'
logger.debug('change applied')
db.session.add(c)
db.session.commit()
if trigger:
trigger_event('change', {});
return True
else:
print 'INVLAID SIGNATURE ON CHANGE', change
logger.debug('INVLAID SIGNATURE ON CHANGE %s', change)
raise Exception, 'invalid signature'
else:
print 'revsion does not match! got', revision, 'expecting', next_revision
logger.debug('revsion does not match! got %s expecting %s', revision, next_revision)
return False
def __repr__(self):
@ -154,10 +158,10 @@ class Changelog(db.Model):
return True
key = meta.keys()[0]
if not meta[key] and i.meta.get('mainid') == key:
print 'remove id mapping', key, meta[key], 'currently', i.meta[key]
logger.debug('remove id mapping %s currenrlt %s', key, meta[key], i.meta[key])
i.update_mainid(key, meta[key])
elif meta[key] and (i.meta.get('mainid') != key or meta[key] != i.meta.get(key)):
print 'new mapping', key, meta[key], 'currently', i.meta.get('mainid'), i.meta.get(i.meta.get('mainid'))
logger.debug('new mapping %s %s currently %s %s', key, meta[key], i.meta.get('mainid'), i.meta.get(i.meta.get('mainid')))
i.update_mainid(key, meta[key])
return True

View file

@ -1,14 +1,17 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
# DHT placeholder
from __future__ import division
# DHT placeholder
import logging
import requests
import ed25519
import json
import settings
logger = logging.getLogger('oml.directory')
base = settings.server['directory_service']
def get(vk):
@ -23,7 +26,8 @@ def get(vk):
vk.verify(sig, data, encoding='base64')
data = json.loads(data)
except ed25519.BadSignatureError:
print 'invalid signature'
logger.debug('invalid signature')
data = None
return data
@ -39,7 +43,7 @@ def put(sk, data):
r = requests.put(url, data, headers=headers, timeout=2)
except:
import traceback
print 'directory.put failed:', data
logger.info('directory.put failed: %s', data)
traceback.print_exc()
return False
return r.status_code == 200

View file

@ -4,9 +4,12 @@ from __future__ import division
from threading import Thread
import time
import logging
import state
logger = logging.getLogger('oml.downloads')
class Downloads(Thread):
def __init__(self, app):
@ -21,11 +24,11 @@ class Downloads(Thread):
for i in item.models.Item.query.filter(
item.models.Item.transferadded!=None).filter(
item.models.Item.transferprogress<1):
print 'DOWNLOAD', i, i.users
logger.debug('DOWNLOAD %s %s', i, i.users)
for p in i.users:
if state.nodes.check_online(p.id):
r = state.nodes.download(p.id, i)
print 'download ok?', r
logger.debug('download ok? %s', r)
return True
return False

View file

@ -2,6 +2,8 @@
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import logging
import json
from oxflask.api import actions
from oxflask.shortcuts import returns_json
@ -16,6 +18,8 @@ import meta
import utils
logger = logging.getLogger('oml.item.api')
@returns_json
def find(request):
'''
@ -91,19 +95,19 @@ actions.register(get)
def edit(request):
response = {}
data = json.loads(request.form['data']) if 'data' in request.form else {}
print 'edit', data
logger.debug('edit', data)
item = models.Item.get(data['id'])
keys = filter(lambda k: k in models.Item.id_keys, data.keys())
print item, keys
logger.debug(item, keys)
if item and keys and item.json()['mediastate'] == 'available':
key = keys[0]
print 'update mainid', key, data[key]
logger.debug('update mainid %s %s', key, data[key])
if key in ('isbn10', 'isbn13'):
data[key] = utils.normalize_isbn(data[key])
item.update_mainid(key, data[key])
response = item.json()
else:
print 'can only edit available items'
logger.info('can only edit available items')
response = item.json()
return response
actions.register(edit, cache=False)
@ -111,7 +115,7 @@ actions.register(edit, cache=False)
@returns_json
def remove(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
print 'remove files', data['ids']
logger.debug('remove files', data)
if 'ids' in data and data['ids']:
for i in models.Item.query.filter(models.Item.id.in_(data['ids'])):
i.remove_file()
@ -135,7 +139,7 @@ def findMetadata(request):
'''
response = {}
data = json.loads(request.form['data']) if 'data' in request.form else {}
print 'findMetadata', data
logger.debug('findMetadata %s', data)
response['items'] = meta.find(**data)
return response
actions.register(findMetadata)
@ -143,7 +147,7 @@ actions.register(findMetadata)
@returns_json
def getMetadata(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
print 'getMetadata', data
logger.debug('getMetadata %s', data)
key, value = data.iteritems().next()
if key in ('isbn10', 'isbn13'):
value = utils.normalize_isbn(value)
@ -191,7 +195,7 @@ actions.register(scan, cache=False)
@returns_json
def _import(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
print 'api.import', data
logger.debug('api.import %s', data)
state.main.add_callback(state.websockets[0].put, json.dumps(['import', data]))
return {}
actions.register(_import, 'import', cache=False)

View file

@ -10,6 +10,7 @@ import hashlib
from datetime import datetime
from StringIO import StringIO
import shutil
import logging
import Image
import ox
@ -33,6 +34,8 @@ from covers import covers
from changelog import Changelog
from websocket import trigger_event
logger = logging.getLogger('oml.item.model')
class Work(db.Model):
created = db.Column(db.DateTime())
@ -278,8 +281,9 @@ class Item(db.Model):
if k != key:
if k in self.meta:
del self.meta[k]
print 'mainid', 'mainid' in self.meta, self.meta.get('mainid')
print 'key', key, self.meta.get(key)
logger.debug('mainid %s %s', 'mainid' in self.meta, self.meta.get('mainid'))
logger.debug('key %s %s', key, self.meta.get(key))
# get metadata from external resources
self.scrape()
self.update()
@ -319,12 +323,12 @@ class Item(db.Model):
def scrape(self):
mainid = self.meta.get('mainid')
print 'scrape', mainid, self.meta.get(mainid)
logger.debug('scrape %s %s', mainid, self.meta.get(mainid))
if mainid:
m = meta.lookup(mainid, self.meta[mainid])
self.meta.update(m)
else:
print 'FIX UPDATE', mainid
logger.debug('FIX UPDATE %s', mainid)
self.update()
def queue_download(self):
@ -339,7 +343,7 @@ class Item(db.Model):
f = File.get(self.id)
content_id = media.get_id(data=content)
if content_id != self.id:
print 'INVALID CONTENT', self.id, 'vs', content_id
logger.debug('INVALID CONTENT %s vs %s', self.id, content_id)
return False
if not f:
path = 'Downloads/%s.%s' % (self.id, self.info['extension'])
@ -355,12 +359,13 @@ class Item(db.Model):
self.added = datetime.now()
Changelog.record(u, 'additem', self.id, self.info)
self.update()
f.move()
trigger_event('transfer', {
'id': self.id, 'progress': 1
})
return True
else:
print 'TRIED TO SAVE EXISTING FILE!!!'
logger.debug('TRIED TO SAVE EXISTING FILE!!!')
self.transferprogress = 1
self.update()
return False
@ -368,7 +373,7 @@ class Item(db.Model):
def remove_file(self):
for f in self.files.all():
path = f.fullpath()
print path
logger.debug('remove file %s', path)
if os.path.exists(path):
os.unlink(path)
db.session.delete(f)
@ -484,7 +489,6 @@ class File(db.Model):
if author.startswith('.'):
author = '_' + author[1:]
filename = '%s.%s' % (title, extension)
print self.sha1, author, filename
new_path = os.path.join(author[0].upper(), author, filename)
if self.path == new_path:
return
@ -501,7 +505,6 @@ class File(db.Model):
shutil.move(current_path, path)
self.path = new_path
self.save()
print 'move', current_path, new_path
def save(self):
db.session.add(self)

View file

@ -31,7 +31,6 @@ def remove_missing():
path = f.item.get_path()
if not os.path.exists(path):
dirty = True
print 'file gone', f, path
f.item.remove_file()
if dirty:
db.session.commit()

View file

@ -67,7 +67,6 @@ def cover(id, size=None):
if not data:
data = covers[id]
if not data:
print 'check for cover', id
data = item.update_cover()
if not data:
data = covers.black()

View file

@ -2,19 +2,21 @@
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import socket
import thread
import json
import logging
import socket
import struct
from threading import Thread
import sys
import subprocess
import sys
import thread
from threading import Thread
from settings import preferences, server, USER_ID, sk
from node.utils import get_public_ipv6
from ed25519_utils import valid
logger = logging.getLogger('oml.localnodes')
def can_connect(data):
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
@ -122,7 +124,7 @@ class LocalNodes(Thread):
return self._nodes[user_id]
def new_node(self, data):
print 'NEW NODE', data
logger.debug('NEW NODE %s', data)
if can_connect(data):
self._nodes[data['id']] = data
with self._app.app_context():

View file

@ -2,6 +2,9 @@
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import logging
logger = logging.getLogger('meta')
import abebooks
import loc
import lookupbyisbn
@ -41,7 +44,7 @@ def lookup(key, value):
if not kv in ids:
ids.append(kv)
done = False
print 'lookup %s=%s =>' % ids[0], ids
logger.debug('lookup %s=%s => %s' % ids[0][0], ids[0][1], ids)
for k, v in ids:
for provider, id in providers:
if id == k and provider not in provider_data:
@ -50,7 +53,7 @@ def lookup(key, value):
provider_data.keys(),
key=lambda x: -len(provider_data[x])
):
print provider, len(provider_data[provider]), provider_data[provider].keys()
logger.debug('%s %s %s', provider, len(provider_data[provider]), provider_data[provider].keys())
for k_, v_ in provider_data[provider].iteritems():
if not k_ in data:
data[k_] = v_

View file

@ -1,7 +1,14 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from ox.cache import read_url
import re
import lxml.html
import logging
logger = logging.getLogger('meta.abebooks')
def get_ids(key, value):
ids = []
if key in ('isbn10', 'isbn13'):
@ -12,12 +19,11 @@ def get_ids(key, value):
if urls:
ids.append((key, value))
if ids:
print 'abebooks.get_ids', key, value
print ids
logger.debug('get_ids %s %s => %s', key, value, ids)
return ids
def lookup(id):
print 'abebooks.lookup', id
logger.debug('lookup %s', id)
return {}
def get_data(id):

View file

@ -7,9 +7,12 @@ import stdnum.isbn
from .utils import find_isbns
import logging
logger = logging.getLogger('meta.duckduckgo')
def find(title, author=None, publisher=None, date=None):
print 'duckduckgo.find', title, author, publisher, date
logger.debug('find %s %s %s %s', title, author, publisher, date)
query = title
if author:
if isinstance(author, list):

View file

@ -7,9 +7,12 @@ import stdnum.isbn
from .utils import find_isbns
import logging
logger = logging.getLogger('meta.google')
def find(title, author=None, publisher=None, date=None):
print 'google.find', title, author, publisher, date
logger.debug('find %s %s %s %s', title, author, publisher, date)
query = title
if author:
if isinstance(author, list):
@ -19,7 +22,7 @@ def find(title, author=None, publisher=None, date=None):
isbns = []
for r in ox.web.google.find(query):
isbns += find_isbns(' '.join(r))
print isbns, 'google'
logger.debug('isbns', isbns)
results = []
done = set()
for isbn in isbns:

View file

@ -10,6 +10,9 @@ import xml.etree.ElementTree as ET
from utils import normalize_isbn
from marc_countries import COUNTRIES
import logging
logger = logging.getLogger('meta.loc')
def get_ids(key, value):
ids = []
if key in ['isbn10', 'isbn13']:
@ -19,12 +22,11 @@ def get_ids(key, value):
if match:
ids.append(('lccn', match.group(1)))
if ids:
print 'loc.get_ids', key, value
print ids
logger.debug('get_ids %s,%s => %s', key, value, ids)
return ids
def lookup(id):
print 'loc.lookup', id
logger.debug('lookup %s', id)
ns = '{http://www.loc.gov/mods/v3}'
url = 'http://lccn.loc.gov/%s/mods' % id
data = read_url(url)

View file

@ -3,6 +3,9 @@ from ox import find_re, strip_tags, decode_html
import re
import stdnum.isbn
import logging
logger = logging.getLogger('meta.lookupbyisbn')
base = 'http://www.lookupbyisbn.com'
def get_ids(key, value):
@ -20,12 +23,11 @@ def get_ids(key, value):
if stdnum.isbn.is_valid(value):
ids.append(('isbn10', value))
if ids:
print 'lookupbyisbn.get_ids', key, value
print ids
logger.debug('get_ids %s, %s => %s', key, value, ids)
return ids
def lookup(id):
print 'lookupbyisbn.lookup', id
logger.debug('lookup %s', id)
r = {
'asin': id
}

View file

@ -9,6 +9,9 @@ import json
from marc_countries import COUNTRIES
from utils import normalize_isbn
import logging
logger = logging.getLogger('meta.openlibrary')
KEYS = {
'authors': 'author',
'covers': 'cover',
@ -31,7 +34,7 @@ def find(*args, **kargs):
args = [a.replace(':', ' ') for a in args]
for k in ('date', 'publisher'):
if k in kargs:
print 'ignoring %s on openlibrary' % k, kargs[k]
logger.debug('ignoring %s on openlibrary %s', k, kargs[k])
del kargs[k]
for k, v in kargs.iteritems():
key = KEYS.keys()[KEYS.values().index(k)]
@ -43,7 +46,7 @@ def find(*args, **kargs):
args += v
query = ' '.join(args)
query = query.strip()
print 'openlibrary.find', query
logger.debug('find %s', query)
r = api.search(query)
results = []
ids = [b for b in r.get('result', []) if b.startswith('/books')]
@ -66,7 +69,7 @@ def get_ids(key, value):
if (id, v) not in ids:
ids.append((id, v))
elif key in ('isbn10', 'isbn13', 'oclc', 'lccn'):
print 'openlibraryid.get_ids', key, value
logger.debug('openlibraryid.get_ids %s %s', key, value)
r = api.things({'type': '/type/edition', key.replace('isbn', 'isbn_'): value})
for b in r.get('result', []):
if b.startswith('/books'):
@ -75,8 +78,7 @@ def get_ids(key, value):
if kv not in ids:
ids.append(kv)
if ids:
print 'openlibraryid.get_ids', key, value
print ids
logger.debug('get_ids %s %s => %s', key, value, ids)
return ids
def lookup(id, return_all=False):
@ -86,7 +88,7 @@ def lookup(id, return_all=False):
#info = json.loads(read_url(url))
data = format(info, return_all)
data['olid'] = id
print 'openlibrary.lookup', id, data.keys()
logger.debug('lookup %s => %s', id, data.keys())
return data
def format(info, return_all=False):
@ -133,8 +135,8 @@ class API(object):
url = self.base + '/' + action + '?' + urlencode(data)
result = json.loads(read_url(url))
if 'status' in result and result['status'] == 'error' or 'error' in result:
print 'FAILED', action, data
print 'URL', url
logger.info('FAILED %s %s', action, data)
logger.info('URL %s', url)
return result
def get(self, key):
@ -152,7 +154,7 @@ class API(object):
}
data = self._request('search', {'q': query})
if 'status' in data and data['status'] == 'error':
print 'FAILED', query
logger.info('FAILED %s', query)
return data
def things(self, query):

View file

@ -1,32 +0,0 @@
import json
from ox.cache import read_url
import ox.web.lookupbyisbn
from utils import normalize_isbn
import openlibrary as ol
def add_lookupbyisbn(item):
isbn = item.meta.get('isbn10', item.meta.get('isbn13'))
if isbn:
more = ox.web.lookupbyisbn.get_data(isbn)
if more:
for key in more:
if more[key]:
value = more[key]
if isinstance(value, basestring):
value = ox.strip_tags(ox.decode_html(value))
elif isinstance(value, list):
value = [ox.strip_tags(ox.decode_html(v)) for v in value]
item.meta[key] = value
if 'author' in item.meta and isinstance(item.meta['author'], basestring):
item.meta['author'] = [item.meta['author']]
if 'isbn' in item.meta:
del item.meta['isbn']
def update_ol(item):
info = ol.info(item.meta['olid'])
for key in info:
item.meta[key] = info[key]

View file

@ -8,6 +8,9 @@ import re
from utils import normalize_isbn
import stdnum.isbn
import logging
logger = logging.getLogger('meta.worldcat')
base_url = 'http://www.worldcat.org'
def get_ids(key, value):
@ -28,8 +31,8 @@ def get_ids(key, value):
if k in info:
ids.append((k, info[k]))
if ids:
print 'worldcat.get_ids', key, value
print ids
logger.debug('get_ids %s %s', key, value)
logger.debug('%s', ids)
return ids
def lookup(id):
@ -58,8 +61,7 @@ def lookup(id):
data['isbn%d'%len(isbn)] = isbn
if 'author' in data:
data['author'] = [data['author']]
print 'worldcat.lookup', id
print data.keys()
logger.debug('lookup %s => %s', id, data.keys())
return data
info = lookup

View file

@ -9,6 +9,9 @@ from user.models import User
import state
from websocket import trigger_event
import logging
logger = logging.getLogger('oml.node.api')
def api_pullChanges(app, remote_id, user_id=None, from_=None, to=None):
if user_id and not from_ and not to:
from_ = user_id
@ -34,7 +37,7 @@ def api_pullChanges(app, remote_id, user_id=None, from_=None, to=None):
def api_pushChanges(app, user_id, changes):
user = User.get(user_id)
if not Changelog.apply_changes(user, changes):
print 'FAILED TO APPLY CHANGE'
logger.debug('FAILED TO APPLY CHANGE')
state.nodes.queue(user_id, 'pullChanges')
return False
return True
@ -58,7 +61,7 @@ def api_requestPeering(app, user_id, username, message):
def api_acceptPeering(app, user_id, username, message):
user = User.get(user_id)
print 'incoming acceptPeering event: pending:', user.pending
logger.debug('incoming acceptPeering event: pending: %s', user.pending)
if user and user.pending == 'sent':
if not user.info:
user.info = {}

View file

@ -18,6 +18,9 @@ from ed25519_utils import valid
import api
import cert
import logging
logger = logging.getLogger('oml.node.server')
class NodeHandler(tornado.web.RequestHandler):
def initialize(self, app):
@ -44,7 +47,7 @@ class NodeHandler(tornado.web.RequestHandler):
content = {}
if valid(key, data, sig):
action, args = json.loads(data)
print key, 'action', action, args
logger.debug('%s action %s %s', key, action, args)
if action == 'ping':
content = {
'ip': request.remote_addr
@ -57,7 +60,7 @@ class NodeHandler(tornado.web.RequestHandler):
) or (u and u.peered):
content = getattr(api, 'api_' + action)(self.app, key, *args)
else:
print 'PEER', key, 'IS UNKNOWN SEND 403'
logger.debug('PEER %s IS UNKNOWN SEND 403', key)
self.set_status(403)
content = {
'status': 'not peered'
@ -91,7 +94,7 @@ class ShareHandler(tornado.web.RequestHandler):
'txt': 'text/plain',
}.get(path.split('.')[-1], None)
self.set_header('Content-Type', mimetype)
print 'GET file', id
logger.debug('GET file %s', id)
with open(path, 'rb') as f:
while 1:
data = f.read(16384)

View file

@ -22,6 +22,9 @@ from websocket import trigger_event
from localnodes import LocalNodes
from ssl_request import get_opener
import logging
logger = logging.getLogger('oml.nodes')
ENCODING='base64'
class Node(object):
@ -79,7 +82,7 @@ class Node(object):
self.resolve()
url = self.url
if not self.url:
print 'unable to find host', self.user_id
logger.debug('unable to find host %s', self.user_id)
self.online = False
return None
content = json.dumps([action, args])
@ -97,20 +100,20 @@ class Node(object):
r = self._opener.open(url, data=content)
except urllib2.HTTPError as e:
if e.code == 403:
print 'REMOTE ENDED PEERING'
logger.debug('REMOTE ENDED PEERING')
if self.user.peered:
self.user.update_peering(False)
self.online = False
return
print 'urllib2.HTTPError', e, e.code
logger.debug('urllib2.HTTPError %s %s', e, e.code)
self.online = False
return None
except urllib2.URLError as e:
print 'urllib2.URLError', e
logger.debug('urllib2.URLError %s', e)
self.online = False
return None
except:
print 'unknown url error'
logger.debug('unknown url error')
import traceback
print traceback.print_exc()
self.online = False
@ -120,7 +123,7 @@ class Node(object):
if sig and self._valid(data, sig):
response = json.loads(data)
else:
print 'invalid signature', data
logger.debug('invalid signature %s', data)
response = None
return response
@ -141,14 +144,14 @@ class Node(object):
if self.user.peered:
try:
self.online = False
print 'type to connect to', self.user_id
logger.debug('type to connect to %s', self.user_id)
self.pullChanges()
print 'connected to', self.user_id
logger.debug('connected to %s', self.user_id)
self.online = True
except:
import traceback
traceback.print_exc()
print 'failed to connect to', self.user_id
logger.debug('failed to connect to %s', self.user_id)
self.online = False
else:
self.online = False
@ -167,7 +170,7 @@ class Node(object):
return Changelog.apply_changes(self.user, changes)
def pushChanges(self, changes):
print 'pushing changes to', self.user_id, changes
logger.debug('pushing changes to %s %s', self.user_id, changes)
try:
r = self.request('pushChanges', changes)
except:
@ -177,7 +180,7 @@ class Node(object):
'status': 'offline'
})
r = False
print 'pushedChanges', r, self.user_id
logger.debug('pushedChanges %s %s', r, self.user_id)
def requestPeering(self, message):
p = self.user
@ -187,25 +190,24 @@ class Node(object):
return True
def acceptPeering(self, message):
print 'run acceptPeering', message
logger.debug('run acceptPeering %s', message)
r = self.request('acceptPeering', settings.preferences['username'], message)
print 'result', r
logger.debug('result %s', r)
p = self.user
p.update_peering(True)
self.go_online()
return True
def rejectPeering(self, message):
print 'reject peering!!!', self.user
logger.debug('rejectPeering %s', self.user)
p = self.user
p.update_peering(False)
r = self.request('rejectPeering', message)
print 'reject peering!!!', r
self.online = False
return True
def removePeering(self, message):
print 'remove peering!!!', self.user
logger.debug('removePeering %s', self.user)
p = self.user
if p.peered:
p.update_peering(False)
@ -226,7 +228,7 @@ class Node(object):
'User-Agent': settings.USER_AGENT,
}
t1 = datetime.now()
print 'GET', url
logger.debug('download %s', url)
'''
r = requests.get(url, headers=headers)
if r.status_code == 200:
@ -240,10 +242,10 @@ class Node(object):
duration = (t2-t1).total_seconds()
if duration:
self.download_speed = len(content) / duration
print 'SPEED', ox.format_bits(self.download_speed)
logger.debug('SPEED %s', ox.format_bits(self.download_speed))
return item.save_file(content)
else:
print 'FAILED', url
logger.debug('FAILED %s', url)
return False
def download_upgrade(self, release):
@ -261,7 +263,7 @@ class Node(object):
with open(path, 'w') as fd:
fd.write(r.read())
if (ox.sha1sum(path) != sha1):
print 'invalid update!'
logger.error('invalid update!')
os.unlink(path)
return False
else:

View file

@ -5,16 +5,18 @@ from __future__ import division, with_statement
import inspect
import sys
import json
import logging
from flask import request, Blueprint
from .shortcuts import render_to_json_response, json_response
logger = logging.getLogger('oxflask.api')
app = Blueprint('oxflask', __name__)
@app.route('/api/', methods=['POST', 'OPTIONS'])
def api():
if request.host not in request.headers['origin']:
print 'reject cross site attempt to access api', request
logger.debug('reject cross site attempt to access api %s', request)
return ''
if request.method == "OPTIONS":
@ -29,6 +31,7 @@ def api():
'doc': actions.doc(f).replace('\n', '<br>\n')})
return render_to_json_response(api)
action = request.form['action']
logger.debug('API %s', action)
f = actions.get(action)
if f:
response = f(request)

View file

@ -3,6 +3,8 @@ import socket
import urllib2
import ssl
import hashlib
import logging
logger = logging.getLogger('oml.ssl_request')
class InvalidCertificateException(httplib.HTTPException, urllib2.URLError):
def __init__(self, fingerprint, cert, reason):
@ -40,7 +42,7 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection):
if not self._ValidateCertificateFingerprint(cert):
raise InvalidCertificateException(self.fingerprint, cert,
'fingerprint mismatch')
print 'CIPHER', self.sock.cipher(), 'VERSION', self.sock.ssl_version
logger.debug('CIPHER %s VERSION %s', self.sock.cipher(), self.sock.ssl_version)
class VerifiedHTTPSHandler(urllib2.HTTPSHandler):
def __init__(self, **kwargs):

View file

@ -18,12 +18,14 @@ import settings
import state
from changelog import Changelog
import logging
logger = logging.getLogger('oml.user.api')
@returns_json
def init(request):
'''
this is an init request to test stuff
'''
#print 'init', request
response = {}
if os.path.exists(settings.oml_config_path):
with open(settings.oml_config_path) as fd:
@ -118,6 +120,7 @@ actions.register(removeList, cache=False)
@returns_json
def editList(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
logger.debug('editList %s', data)
l = models.List.get_or_create(data['id'])
name = l.name
if 'name' in data:
@ -127,7 +130,7 @@ def editList(request):
if l.type == 'static' and name != l.name:
Changelog.record(state.user(), 'editlist', name, {'name': l.name})
l.save()
return {}
return l.json()
actions.register(editList, cache=False)
@returns_json
@ -154,7 +157,7 @@ actions.register(removeListItems, cache=False)
def sortLists(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
n = 0
print 'sortLists', data
logger.debug('sortLists %s', data)
for id in data['ids']:
l = models.List.get(id)
l.position = n
@ -178,7 +181,7 @@ actions.register(editUser, cache=False)
def requestPeering(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
if len(data.get('id', '')) != 43:
print 'invalid user id'
logger.debug('invalid user id')
return {}
p = models.User.get_or_create(data['id'])
state.nodes.queue('add', p.id)
@ -190,9 +193,9 @@ actions.register(requestPeering, cache=False)
def acceptPeering(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
if len(data.get('id', '')) != 43:
print 'invalid user id'
logger.debug('invalid user id')
return {}
print 'acceptPeering...', data
logger.debug('acceptPeering... %s', data)
p = models.User.get_or_create(data['id'])
state.nodes.queue('add', p.id)
state.nodes.queue(p.id, 'acceptPeering', data.get('message', ''))
@ -203,7 +206,7 @@ actions.register(acceptPeering, cache=False)
def rejectPeering(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
if len(data.get('id', '')) != 43:
print 'invalid user id'
logger.debug('invalid user id')
return {}
p = models.User.get_or_create(data['id'])
state.nodes.queue('add', p.id)
@ -215,7 +218,7 @@ actions.register(rejectPeering, cache=False)
def removePeering(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
if len(data.get('id', '')) != 43:
print 'invalid user id'
logger.debug('invalid user id')
return {}
u = models.User.get_or_create(data['id'])
state.nodes.queue('add', u.id)
@ -227,7 +230,7 @@ actions.register(removePeering, cache=False)
def cancelPeering(request):
data = json.loads(request.form['data']) if 'data' in request.form else {}
if len(data.get('id', '')) != 43:
print 'invalid user id'
logger.debug('invalid user id')
return {}
p = models.User.get_or_create(data['id'])
state.nodes.queue('add', p.id)

View file

@ -12,6 +12,9 @@ from settings import db
import state
import logging
logger = logging.getLogger('oml.user.models')
class User(db.Model):
created = db.Column(db.DateTime())

View file

@ -2,6 +2,8 @@
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import logging
from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop
from Queue import Queue
@ -12,6 +14,8 @@ from oxflask.shortcuts import json_dumps
import state
logger = logging.getLogger('oml.websocket')
class Background:
def __init__(self, handler):
@ -24,8 +28,6 @@ class Background:
while self.connected:
message = self.q.get()
action, data = json.loads(message)
print action
print data
import item.scan
if action == 'ping':
self.post(['pong', data])
@ -55,9 +57,8 @@ class Handler(WebSocketHandler):
background = None
def open(self):
print "New connection opened."
if self.request.host not in self.request.headers['origin']:
print 'reject cross site attempt to open websocket', self.request
logger.debug('reject cross site attempt to open websocket %s', self.request)
self.close()
self.background = Background(self)
state.websockets.append(self.background)
@ -70,19 +71,17 @@ class Handler(WebSocketHandler):
self.background.put(message)
def on_close(self):
print "Connection closed."
if self.background:
state.websockets.remove(self.background)
self.background.connected = False
def trigger_event(event, data):
if len(state.websockets):
print 'trigger event', event, data, len(state.websockets)
logger.debug('trigger event %s %s', event, data, len(state.websockets))
for ws in state.websockets:
try:
ws.post([event, data])
except:
import traceback
traceback.print_exc()
print 'failed to send to ws', ws, event, data
logger.debug('failed to send to ws %s %s %s', ws, event, data)