remove id migration and directory lookup
This commit is contained in:
parent
9f396acd48
commit
cf3762dd7f
5 changed files with 8 additions and 116 deletions
|
@ -1,61 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# DHT placeholder
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import ed25519
|
||||
import json
|
||||
import tor_request
|
||||
|
||||
import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
base = 'http://hpjats6xixrleoqg.onion:25519'
|
||||
|
||||
def get(vk):
|
||||
id = vk.to_ascii(encoding='base64').decode()
|
||||
url = '%s/%s' % (base, id)
|
||||
headers = {
|
||||
'User-Agent': settings.USER_AGENT
|
||||
}
|
||||
try:
|
||||
opener = tor_request.get_opener()
|
||||
opener.addheaders = list(zip(headers.keys(), headers.values()))
|
||||
r = opener.open(url)
|
||||
except:
|
||||
logger.info('get failed %s', url, exc_info=True)
|
||||
return None
|
||||
sig = r.headers.get('X-Ed25519-Signature')
|
||||
data = r.read()
|
||||
if sig and data:
|
||||
vk = ed25519.VerifyingKey(id, encoding='base64')
|
||||
try:
|
||||
vk.verify(sig, data, encoding='base64')
|
||||
data = json.loads(data.decode('utf-8'))
|
||||
except ed25519.BadSignatureError:
|
||||
logger.debug('invalid signature')
|
||||
|
||||
data = None
|
||||
return data
|
||||
|
||||
def put(sk, data):
|
||||
id = sk.get_verifying_key().to_ascii(encoding='base64').decode()
|
||||
data = json.dumps(data).encode()
|
||||
sig = sk.sign(data, encoding='base64')
|
||||
url ='%s/%s' % (base, id)
|
||||
headers = {
|
||||
'User-Agent': settings.USER_AGENT,
|
||||
'X-Ed25519-Signature': sig
|
||||
}
|
||||
try:
|
||||
#r = requests.put(url, data, headers=headers, timeout=2)
|
||||
opener = tor_request.get_opener()
|
||||
opener.addheaders = list(zip(headers.keys(), headers.values()))
|
||||
r = opener.open(url, data)
|
||||
except:
|
||||
logger.info('put failed: %s', data, exc_info=True)
|
||||
return False
|
||||
return r.status == 200
|
27
oml/nodes.py
27
oml/nodes.py
|
@ -105,25 +105,6 @@ class Node(Thread):
|
|||
else:
|
||||
self.local = None
|
||||
self.port = 9851
|
||||
if len(self.user_id) == 43:
|
||||
self.migrate_id()
|
||||
|
||||
|
||||
def migrate_id(self):
|
||||
import ed25519
|
||||
from . import directory
|
||||
key = self.user_id.encode()
|
||||
vk = ed25519.VerifyingKey(key, encoding=ENCODING)
|
||||
try:
|
||||
r = directory.get(vk)
|
||||
except:
|
||||
logger.debug('directory failed', exc_info=True)
|
||||
r = None
|
||||
if r and 'id' in r and len(r['id']) == 16:
|
||||
u = self.user
|
||||
self.user_id = r['id']
|
||||
u.migrate_id(self.user_id)
|
||||
self._opener = get_opener(self.user_id)
|
||||
|
||||
def get_local(self):
|
||||
if self._nodes and self._nodes._local:
|
||||
|
@ -546,14 +527,6 @@ def update_online():
|
|||
if state.online:
|
||||
for node in list(state.nodes._nodes.values()):
|
||||
node.trigger_status()
|
||||
if settings.OLD_USER_ID and not settings.server.get('migrated_id', False):
|
||||
from . import directory
|
||||
r = directory.put(settings.sk, {
|
||||
'id': settings.USER_ID,
|
||||
})
|
||||
logger.debug('push id to directory %s', r)
|
||||
if r:
|
||||
settings.server['migrated_id'] = True
|
||||
|
||||
def check_nodes():
|
||||
if state.online:
|
||||
|
|
|
@ -25,7 +25,6 @@ if not os.path.exists(data_path):
|
|||
db_path = os.path.join(data_path, 'data.db')
|
||||
log_path = os.path.join(data_path, 'debug.log')
|
||||
icons_db_path = os.path.join(data_path, 'icons.db')
|
||||
key_path = os.path.join(data_path, 'node.key')
|
||||
ssl_cert_path = os.path.join(data_path, 'node.ssl.crt')
|
||||
ssl_key_path = os.path.join(data_path, 'tor', 'private_key')
|
||||
|
||||
|
@ -57,17 +56,6 @@ for key in server_defaults:
|
|||
|
||||
release = pdict(os.path.join(data_path, 'release.json'))
|
||||
|
||||
if os.path.exists(key_path):
|
||||
import ed25519
|
||||
with open(key_path, 'rb') as fd:
|
||||
sk = ed25519.SigningKey(fd.read())
|
||||
vk = sk.get_verifying_key()
|
||||
OLD_USER_ID = vk.to_ascii(encoding='base64').decode()
|
||||
else:
|
||||
sk = None
|
||||
vk = None
|
||||
OLD_USER_ID = None
|
||||
|
||||
USER_ID = get_user_id(ssl_key_path, ssl_cert_path)
|
||||
|
||||
OML_UPDATE_KEY='K55EZpPYbP3X+3mA66cztlw1sSaUMqGwfTDKQyP2qOU'
|
||||
|
|
10
oml/setup.py
10
oml/setup.py
|
@ -340,7 +340,13 @@ def upgrade_db(old, new=None):
|
|||
old_key = os.path.join(settings.data_path, 'node.ssl.key')
|
||||
if os.path.exists(old_key):
|
||||
os.unlink(old_key)
|
||||
if settings.OLD_USER_ID:
|
||||
key_path = os.path.join(settings.data_path, 'node.key')
|
||||
if os.path.exists(key_path):
|
||||
import ed25519
|
||||
with open(key_path, 'rb') as fd:
|
||||
sk = ed25519.SigningKey(fd.read())
|
||||
vk = sk.get_verifying_key()
|
||||
OLD_USER_ID = vk.to_ascii(encoding='base64').decode()
|
||||
statements = [
|
||||
"UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
|
||||
"UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
||||
|
@ -348,7 +354,7 @@ def upgrade_db(old, new=None):
|
|||
"UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
||||
]
|
||||
for sql in statements:
|
||||
run_sql(sql.format(oid=settings.OLD_USER_ID, nid=settings.USER_ID))
|
||||
run_sql(sql.format(oid=OLD_USER_ID, nid=settings.USER_ID))
|
||||
if old <= '20151201-384-03c2439':
|
||||
with db.session():
|
||||
import item.models
|
||||
|
|
|
@ -203,20 +203,6 @@ class User(db.Model):
|
|||
n += 1
|
||||
self.nickname = nickname
|
||||
|
||||
def migrate_id(self, service_id):
|
||||
if len(service_id) == 16:
|
||||
statements = [
|
||||
"DELETE FROM user WHERE id = '{nid}'",
|
||||
"UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
|
||||
"UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
||||
"UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
||||
"UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
||||
]
|
||||
with db.session() as session:
|
||||
for sql in statements:
|
||||
session.connection().execute(sql.format(oid=self.id, nid=service_id))
|
||||
session.commit()
|
||||
|
||||
def rebuild_changelog(self):
|
||||
Changelog.query.filter_by(user_id=self.id).delete()
|
||||
for item in self.library.get_items().order_by('created'):
|
||||
|
|
Loading…
Reference in a new issue