openmedialibrary/oml/nodes.py

427 lines
14 KiB
Python
Raw Normal View History

2014-05-04 17:26:43 +00:00
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
from Queue import Queue
from threading import Thread
import json
2014-05-18 03:01:24 +00:00
import socket
2014-05-18 23:24:04 +00:00
from StringIO import StringIO
import gzip
import urllib2
2014-05-04 17:26:43 +00:00
from datetime import datetime
import os
2014-05-19 15:00:33 +00:00
import time
2014-05-04 17:26:43 +00:00
import ox
import ed25519
2014-05-18 23:24:04 +00:00
from tornado.ioloop import PeriodicCallback
2014-05-04 17:26:43 +00:00
import settings
import user.models
from changelog import Changelog
import directory
from websocket import trigger_event
2014-05-12 12:57:47 +00:00
from localnodes import LocalNodes
2014-05-14 09:57:11 +00:00
from ssl_request import get_opener
import state
2014-08-09 16:14:14 +00:00
import db
2014-05-04 17:26:43 +00:00
2014-05-17 14:26:59 +00:00
import logging
logger = logging.getLogger('oml.nodes')
2014-05-04 17:26:43 +00:00
ENCODING='base64'
2014-05-19 15:00:33 +00:00
class Node(Thread):
_running = True
2014-05-14 09:57:11 +00:00
_cert = None
2014-05-04 17:26:43 +00:00
online = False
download_speed = 0
2014-05-18 23:50:05 +00:00
TIMEOUT = 5
2014-05-04 17:26:43 +00:00
2014-05-12 12:57:47 +00:00
def __init__(self, nodes, user):
self._nodes = nodes
2014-05-04 17:26:43 +00:00
self.user_id = user.id
key = str(user.id)
self.vk = ed25519.VerifyingKey(key, encoding=ENCODING)
2014-05-18 03:01:24 +00:00
logger.debug('new Node %s online=%s', self.user_id, self.online)
2014-05-19 15:00:33 +00:00
self._q = Queue()
Thread.__init__(self)
self.daemon = True
self.start()
2014-05-18 23:24:04 +00:00
self._ping = PeriodicCallback(self.ping, 120000)
self._ping.start()
2014-05-19 15:00:33 +00:00
self.ping()
def run(self):
2014-08-09 16:14:14 +00:00
while self._running:
action = self._q.get()
if not self._running:
break
if action == 'go_online' or not self.online:
self._go_online()
else:
self.online = self.can_connect()
2014-05-19 15:00:33 +00:00
def join(self):
self._running = False
self.ping()
2014-08-09 18:32:41 +00:00
#return Thread.join(self)
2014-05-19 15:00:33 +00:00
def ping(self):
self._q.put('')
def go_online(self):
self._q.put('go_online')
2014-05-04 17:26:43 +00:00
@property
def url(self):
if self.host:
2014-05-12 12:57:47 +00:00
if ':' in self.host:
2014-05-14 09:57:11 +00:00
url = 'https://[%s]:%s' % (self.host, self.port)
2014-05-12 12:57:47 +00:00
else:
2014-05-14 09:57:11 +00:00
url = 'https://%s:%s' % (self.host, self.port)
else:
url = None
2014-05-04 17:26:43 +00:00
return url
2014-05-13 10:36:02 +00:00
def resolve(self):
logger.debug('resolve node')
r = self.get_local()
if not r:
try:
r = directory.get(self.vk)
except:
logger.debug('directory failed', exc_info=1)
r = None
2014-05-04 17:26:43 +00:00
if r:
self.host = r['host']
if 'port' in r:
self.port = r['port']
2014-05-14 09:57:11 +00:00
if r['cert'] != self._cert:
self._cert = r['cert']
self._opener = get_opener(self._cert)
2014-05-04 17:26:43 +00:00
else:
self.host = None
self.port = 9851
2014-05-12 12:57:47 +00:00
def get_local(self):
if self._nodes and self._nodes._local:
2014-05-14 09:57:11 +00:00
local = self._nodes._local.get(self.user_id)
if local and local['cert'] != self._cert:
self._cert = local['cert']
self._opener = get_opener(self._cert)
return local
2014-05-12 12:57:47 +00:00
return None
2014-05-04 17:26:43 +00:00
def request(self, action, *args):
2014-05-13 10:36:02 +00:00
url = self.url
if not url:
self.resolve()
url = self.url
if not self.url:
2014-05-17 14:26:59 +00:00
logger.debug('unable to find host %s', self.user_id)
2014-05-13 10:36:02 +00:00
self.online = False
2014-05-04 17:26:43 +00:00
return None
content = json.dumps([action, args])
sig = settings.sk.sign(content, encoding=ENCODING)
headers = {
'User-Agent': settings.USER_AGENT,
'X-Node-Protocol': settings.NODE_PROTOCOL,
2014-05-04 17:26:43 +00:00
'Accept': 'text/plain',
'Accept-Encoding': 'gzip',
'Content-Type': 'application/json',
'X-Ed25519-Key': settings.USER_ID,
'X-Ed25519-Signature': sig,
}
2014-05-14 09:57:11 +00:00
self._opener.addheaders = zip(headers.keys(), headers.values())
try:
2014-05-17 23:14:29 +00:00
r = self._opener.open(url, data=content, timeout=self.TIMEOUT)
2014-05-14 09:57:11 +00:00
except urllib2.HTTPError as e:
if e.code == 403:
2014-05-17 14:26:59 +00:00
logger.debug('REMOTE ENDED PEERING')
2014-08-11 18:10:07 +00:00
with db.session():
u = self.user
if u.peered:
u.update_peering(False)
2014-05-14 09:57:11 +00:00
self.online = False
return
2014-05-17 14:26:59 +00:00
logger.debug('urllib2.HTTPError %s %s', e, e.code)
2014-05-14 09:57:11 +00:00
self.online = False
return None
except urllib2.URLError as e:
2014-05-17 14:26:59 +00:00
logger.debug('urllib2.URLError %s', e)
2014-05-14 09:57:11 +00:00
self.online = False
return None
except:
2014-05-18 03:01:24 +00:00
logger.debug('unknown url error', exc_info=1)
2014-05-14 09:57:11 +00:00
self.online = False
return None
data = r.read()
2014-05-18 23:24:04 +00:00
if r.headers.get('content-encoding', None) == 'gzip':
data = gzip.GzipFile(fileobj=StringIO(data)).read()
version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL:
logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version)
self.online = False
if version > settings.NODE_PROTOCOL:
state.update_required = True
return None
2014-05-04 17:26:43 +00:00
sig = r.headers.get('X-Ed25519-Signature')
if sig and self._valid(data, sig):
response = json.loads(data)
else:
2014-05-17 14:26:59 +00:00
logger.debug('invalid signature %s', data)
2014-05-04 17:26:43 +00:00
response = None
return response
def _valid(self, data, sig):
try:
self.vk.verify(sig, data, encoding=ENCODING)
#except ed25519.BadSignatureError:
except:
return False
return True
@property
def user(self):
2014-08-09 16:14:14 +00:00
with db.session():
return user.models.User.get_or_create(self.user_id)
2014-05-04 17:26:43 +00:00
2014-05-18 03:01:24 +00:00
def can_connect(self):
try:
url = self.url
if url:
logger.debug('try to connect to %s', url)
headers = {
'User-Agent': settings.USER_AGENT,
'X-Node-Protocol': settings.NODE_PROTOCOL,
'Accept-Encoding': 'gzip',
}
self._opener.addheaders = zip(headers.keys(), headers.values())
r = self._opener.open(url, timeout=1)
version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL:
logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version)
return False
c = r.read()
logger.debug('ok')
return True
2014-05-18 03:01:24 +00:00
except:
pass
return False
2014-05-19 15:00:33 +00:00
def _go_online(self):
2014-05-13 10:36:02 +00:00
self.resolve()
2014-05-18 03:01:24 +00:00
u = self.user
2014-05-19 15:00:33 +00:00
logger.debug('go_online peer=%s queued=%s (%s)', u.peered, u.queued, u.id)
if u.peered or u.queued and self.host:
2014-05-04 17:26:43 +00:00
try:
self.online = False
2014-05-19 15:00:33 +00:00
logger.debug('try to connect to %s at [%s]:%s', self.user_id, self.host, self.port)
2014-05-18 03:01:24 +00:00
if self.can_connect():
2014-05-19 15:00:33 +00:00
logger.debug('connected to [%s]:%s', self.host, self.port)
2014-05-18 03:01:24 +00:00
self.online = True
if u.queued:
logger.debug('queued peering event pending=%s peered=%s', u.pending, u.peered)
if u.pending == 'sent':
self.peering('requestPeering')
elif u.pending == '' and u.peered:
self.peering('acceptPeering')
else:
#fixme, what about cancel/reject peering here?
self.peering('removePeering')
if self.online:
self.pullChanges()
2014-05-04 17:26:43 +00:00
except:
2014-05-18 03:01:24 +00:00
logger.debug('failed to connect to %s', self.user_id, exc_info=1)
2014-05-04 17:26:43 +00:00
self.online = False
else:
self.online = False
2014-05-19 15:00:33 +00:00
self.trigger_status()
def trigger_status(self):
2014-05-04 17:26:43 +00:00
trigger_event('status', {
'id': self.user_id,
2014-05-18 23:24:04 +00:00
'online': self.online
2014-05-04 17:26:43 +00:00
})
def pullChanges(self):
2014-08-09 16:14:14 +00:00
last = Changelog.query.filter_by(user_id=self.user_id).order_by('-revision').first()
from_revision = last.revision + 1 if last else 0
logger.debug('pullChanges %s from %s', self.user.name, from_revision)
changes = self.request('pullChanges', from_revision)
if not changes:
return False
return Changelog.apply_changes(self.user, changes)
2014-05-04 17:26:43 +00:00
def pushChanges(self, changes):
2014-05-17 14:26:59 +00:00
logger.debug('pushing changes to %s %s', self.user_id, changes)
if self.online:
try:
r = self.request('pushChanges', changes)
except:
self.online = False
self.trigger_status()
r = False
logger.debug('pushedChanges %s %s', r, self.user_id)
2014-05-04 17:26:43 +00:00
2014-05-18 03:01:24 +00:00
def peering(self, action):
u = self.user
if action in ('requestPeering', 'acceptPeering'):
r = self.request(action, settings.preferences['username'], u.info.get('message'))
else:
r = self.request(action, u.info.get('message'))
2014-05-18 23:24:04 +00:00
if r != None:
2014-05-18 03:01:24 +00:00
u.queued = False
if 'message' in u.info:
del u.info['message']
u.save()
else:
logger.debug('peering failed? %s %s', action, r)
if action in ('cancelPeering', 'rejectPeering', 'removePeering'):
self.online = False
2014-05-19 15:00:33 +00:00
else:
self.go_online()
trigger_event('peering.%s'%action.replace('Peering', ''), u.json())
2014-05-04 17:26:43 +00:00
return True
def download(self, item):
from item.models import Transfer
2014-05-04 17:26:43 +00:00
url = '%s/get/%s' % (self.url, item.id)
headers = {
'User-Agent': settings.USER_AGENT,
}
2014-05-20 00:43:54 +00:00
t1 = datetime.utcnow()
2014-05-17 14:26:59 +00:00
logger.debug('download %s', url)
2014-05-14 09:57:11 +00:00
self._opener.addheaders = zip(headers.keys(), headers.values())
try:
r = self._opener.open(url, timeout=self.TIMEOUT*2)
except:
logger.debug('openurl failed %s', url, exec_info=1)
return False
2014-05-14 09:57:11 +00:00
if r.getcode() == 200:
try:
if r.headers.get('content-encoding', None) == 'gzip':
content = gzip.GzipFile(fileobj=r).read()
else:
content = ''
ct = datetime.utcnow()
for chunk in iter(lambda: r.read(16*1024), ''):
content += chunk
if (datetime.utcnow() - ct).total_seconds() > 1:
ct = datetime.utcnow()
t = Transfer.get(item.id)
t.progress = len(content) / item.info['size']
t.save()
trigger_event('transfer', {
'id': item.id, 'progress': t.progress
})
'''
content = r.read()
'''
2014-05-18 23:24:04 +00:00
t2 = datetime.utcnow()
duration = (t2-t1).total_seconds()
if duration:
self.download_speed = len(content) / duration
logger.debug('SPEED %s', ox.format_bits(self.download_speed))
return item.save_file(content)
except:
logger.debug('download failed %s', url, exec_info=1)
return False
2014-05-04 17:26:43 +00:00
else:
2014-05-17 14:26:59 +00:00
logger.debug('FAILED %s', url)
2014-05-04 17:26:43 +00:00
return False
2014-05-14 09:57:11 +00:00
def download_upgrade(self, release):
for module in release['modules']:
path = os.path.join(settings.update_path, release['modules'][module]['name'])
2014-05-04 17:26:43 +00:00
if not os.path.exists(path):
2014-05-14 09:57:11 +00:00
url = '%s/oml/%s' % (self.url, release['modules'][module]['name'])
sha1 = release['modules'][module]['sha1']
2014-05-04 17:26:43 +00:00
headers = {
'User-Agent': settings.USER_AGENT,
}
2014-05-14 09:57:11 +00:00
self._opener.addheaders = zip(headers.keys(), headers.values())
r = self._opener.open(url)
if r.getcode() == 200:
2014-05-04 17:26:43 +00:00
with open(path, 'w') as fd:
2014-05-14 09:57:11 +00:00
fd.write(r.read())
2014-05-04 17:26:43 +00:00
if (ox.sha1sum(path) != sha1):
2014-05-17 14:26:59 +00:00
logger.error('invalid update!')
2014-05-04 17:26:43 +00:00
os.unlink(path)
return False
else:
return False
class Nodes(Thread):
_nodes = {}
2014-05-18 03:01:24 +00:00
_local = None
2014-05-04 17:26:43 +00:00
2014-08-09 16:33:59 +00:00
def __init__(self):
2014-05-04 17:26:43 +00:00
self._q = Queue()
self._running = True
2014-08-09 16:33:59 +00:00
self._local = LocalNodes()
self._cleanup = PeriodicCallback(lambda: self.queue('cleanup'), 120000)
self._cleanup.start()
2014-05-04 17:26:43 +00:00
Thread.__init__(self)
self.daemon = True
self.start()
def cleanup(self):
if self._running:
self._local.cleanup()
2014-05-04 17:26:43 +00:00
def queue(self, *args):
self._q.put(list(args))
def is_online(self, id):
2014-05-04 17:26:43 +00:00
return id in self._nodes and self._nodes[id].online
def download(self, id, item):
return id in self._nodes and self._nodes[id].download(item)
def _call(self, target, action, *args):
if target == 'all':
nodes = self._nodes.values()
2014-05-14 09:57:11 +00:00
elif target == 'peered':
nodes = [n for n in self._nodes.values() if n.user.peered]
2014-05-04 17:26:43 +00:00
elif target == 'online':
nodes = [n for n in self._nodes.values() if n.online]
else:
nodes = [self._nodes[target]]
for node in nodes:
getattr(node, action)(*args)
2014-05-18 03:01:24 +00:00
def _add(self, user_id):
2014-05-04 17:26:43 +00:00
if user_id not in self._nodes:
from user.models import User
2014-08-09 16:14:14 +00:00
with db.session():
self._nodes[user_id] = Node(self, User.get_or_create(user_id))
2014-05-04 17:26:43 +00:00
else:
2014-05-18 03:01:24 +00:00
if not self._nodes[user_id].online:
2014-05-19 15:00:33 +00:00
self._nodes[user_id].ping()
2014-05-04 17:26:43 +00:00
def run(self):
2014-08-09 16:14:14 +00:00
while self._running:
args = self._q.get()
if args:
if args[0] == 'cleanup':
self.cleanup()
elif args[0] == 'add':
2014-08-09 16:14:14 +00:00
self._add(args[1])
else:
self._call(*args)
2014-05-04 17:26:43 +00:00
def join(self):
self._running = False
self._q.put(None)
2014-05-19 15:00:33 +00:00
for node in self._nodes.values():
node.join()
2014-08-09 18:32:41 +00:00
self._local.join()
2014-05-04 17:26:43 +00:00
return Thread.join(self)