simple changelog
This commit is contained in:
parent
04f994d4b7
commit
e966256fa2
15 changed files with 267 additions and 103 deletions
87
oml/nodes.py
87
oml/nodes.py
|
|
@ -30,7 +30,7 @@ import library
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEBUG_NODES=False
|
||||
DEBUG_NODES = False
|
||||
|
||||
class Node(Thread):
|
||||
host = None
|
||||
|
|
@ -239,24 +239,81 @@ class Node(Thread):
|
|||
self.online = self.can_connect()
|
||||
if not self.online or state.shutdown:
|
||||
return
|
||||
with db.session():
|
||||
u = user.models.User.get_or_create(self.user_id)
|
||||
if not u or not self.online or not u.peered:
|
||||
return True
|
||||
self.resolve()
|
||||
peer = get_peer(self.user_id)
|
||||
from_revision = peer.info.get('revision', -1) + 1
|
||||
path = peer._logpath
|
||||
if os.path.exists(path):
|
||||
size = os.path.getsize(path)
|
||||
else:
|
||||
size = 0
|
||||
url = '%s/log' % self.url
|
||||
if DEBUG_NODES:
|
||||
logger.debug('pullChanges: %s [%s]', self.user_id, url)
|
||||
headers = self.headers.copy()
|
||||
if size:
|
||||
headers['Range'] = '%s-' % size
|
||||
self._opener.addheaders = list(zip(headers.keys(), headers.values()))
|
||||
try:
|
||||
changes = self.request('pullChanges', from_revision)
|
||||
r = self._opener.open(url, timeout=self.TIMEOUT*60)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 403:
|
||||
logger.debug('pullChanges 403: %s (%s)', url, self.user_id)
|
||||
if state.tasks:
|
||||
state.tasks.queue('peering', (self.user_id, False))
|
||||
del self._nodes[self.user_id]
|
||||
self.online = False
|
||||
else:
|
||||
logger.debug('unknown http errpr %s %s (%s)', e.code, url, self.user_id)
|
||||
return False
|
||||
except socket.timeout:
|
||||
logger.debug('timeout %s', url)
|
||||
return False
|
||||
except socks.GeneralProxyError:
|
||||
logger.debug('openurl failed %s', url)
|
||||
return False
|
||||
except urllib.error.URLError as e:
|
||||
logger.debug('openurl failed urllib2.URLError %s', e.reason)
|
||||
return False
|
||||
except:
|
||||
self.online = False
|
||||
if DEBUG_NODES:
|
||||
logger.debug('%s went offline', u.name, exc_info=True)
|
||||
logger.debug('openurl failed %s', url, exc_info=True)
|
||||
return False
|
||||
if not changes:
|
||||
if r.getcode() in (200, 206):
|
||||
changed = False
|
||||
chunk_size = 16 * 1024
|
||||
mode = 'ab' if r.getcode() == 206 else 'wb'
|
||||
content = b''
|
||||
|
||||
try:
|
||||
if r.headers.get('content-encoding', None) == 'gzip':
|
||||
fileobj = gzip.GzipFile(fileobj=r)
|
||||
else:
|
||||
fileobj = r
|
||||
for chunk in iter(lambda: fileobj.read(chunk_size), b''):
|
||||
content += chunk
|
||||
eol = content.rfind(b'\n') + 1
|
||||
if eol > 0:
|
||||
with open(path, mode) as fd:
|
||||
fd.write(content[:eol])
|
||||
content = content[eol:]
|
||||
mode = 'ab'
|
||||
changed = True
|
||||
if state.shutdown:
|
||||
return False
|
||||
if state.bandwidth:
|
||||
while not state.bandwidth.download(chunk_size) and not state.shutdown:
|
||||
time.sleep(0.1)
|
||||
if content:
|
||||
with open(path, mode) as fd:
|
||||
fd.write(content)
|
||||
changed = True
|
||||
if changed:
|
||||
peer.apply_log()
|
||||
except:
|
||||
logger.debug('download failed %s', url, exc_info=True)
|
||||
return False
|
||||
else:
|
||||
logger.debug('FAILED %s', url)
|
||||
return False
|
||||
#with open('/tmp/changelog_%s_%s.json' % (self.user_id, from_revision), 'w') as f:
|
||||
# json.dump(changes, f, ensure_ascii=False, indent=4)
|
||||
return peer.apply_changes(changes)
|
||||
|
||||
def peering(self, action):
|
||||
with db.session():
|
||||
|
|
@ -417,7 +474,7 @@ class Nodes(Thread):
|
|||
del u.info['local']
|
||||
u.save()
|
||||
self.queue('add', u.id)
|
||||
state.peers[u.id] = library.Peer(u.id)
|
||||
get_peer(u.id)
|
||||
for u in user.models.User.query.filter_by(queued=True):
|
||||
logger.debug('adding queued node... %s', u.id)
|
||||
self.queue('add', u.id, True)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue