cleanup download, download from local nodes if offline, cache per download users syndb only at startup
This commit is contained in:
parent
ba4f311fe1
commit
988d030f92
4 changed files with 45 additions and 42 deletions
64
oml/nodes.py
64
oml/nodes.py
|
|
@ -8,7 +8,6 @@ import json
|
|||
from io import BytesIO
|
||||
import gzip
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
from datetime import datetime
|
||||
import os
|
||||
import time
|
||||
import socket
|
||||
|
|
@ -306,31 +305,30 @@ class Node(Thread):
|
|||
return False
|
||||
if r.getcode() == 200:
|
||||
try:
|
||||
fileobj = r
|
||||
if r.headers.get('content-encoding', None) == 'gzip':
|
||||
fileobj = gzip.GzipFile(fileobj=r)
|
||||
content = b''
|
||||
ct = datetime.utcnow()
|
||||
size = 0
|
||||
else:
|
||||
fileobj = r
|
||||
content = []
|
||||
ct = time.time()
|
||||
size = item.info['size']
|
||||
received = 0
|
||||
chunk_size = 16*1024
|
||||
for chunk in iter(lambda: fileobj.read(chunk_size), b''):
|
||||
content += chunk
|
||||
size += len(chunk)
|
||||
since_ct = (datetime.utcnow() - ct).total_seconds()
|
||||
if since_ct > 1:
|
||||
ct = datetime.utcnow()
|
||||
content.append(chunk)
|
||||
received += len(chunk)
|
||||
if time.time() - ct > 1:
|
||||
ct = time.time()
|
||||
if state.shutdown:
|
||||
return False
|
||||
t = state.downloads.transfers.get(item.id)
|
||||
if not t:
|
||||
# transfer was canceled
|
||||
if not t: # transfer was canceled
|
||||
trigger_event('transfer', {
|
||||
'id': item.id, 'progress': -1
|
||||
})
|
||||
return False
|
||||
else:
|
||||
t['progress'] = size / item.info['size']
|
||||
state.downloads.transfers[item.id] = t
|
||||
t['progress'] = received / size
|
||||
trigger_event('transfer', {
|
||||
'id': item.id, 'progress': t['progress']
|
||||
})
|
||||
|
|
@ -338,7 +336,7 @@ class Node(Thread):
|
|||
if state.bandwidth:
|
||||
while not state.bandwidth.download(chunk_size) and not state.shutdown:
|
||||
time.sleep(0.1)
|
||||
return item.save_file(content)
|
||||
return item.save_file(b''.join(content))
|
||||
except:
|
||||
logger.debug('download failed %s', url, exc_info=True)
|
||||
return False
|
||||
|
|
@ -368,9 +366,10 @@ class Node(Thread):
|
|||
code = r.getcode()
|
||||
if code == 200:
|
||||
try:
|
||||
fileobj = r
|
||||
if r.headers.get('content-encoding', None) == 'gzip':
|
||||
fileobj = gzip.GzipFile(fileobj=r)
|
||||
else:
|
||||
fileobj = r
|
||||
content = fileobj.read()
|
||||
key = 'preview:' + item_id
|
||||
icons[key] = content
|
||||
|
|
@ -431,12 +430,28 @@ class Nodes(Thread):
|
|||
self.daemon = True
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
library.sync_db()
|
||||
self.queue('pull')
|
||||
while not state.shutdown:
|
||||
args = self._q.get()
|
||||
if args:
|
||||
if args[0] == 'cleanup':
|
||||
self.cleanup()
|
||||
elif args[0] == 'add':
|
||||
self._add(*args[1:])
|
||||
elif args[0] == 'pull':
|
||||
self._pull()
|
||||
else:
|
||||
self._call(*args)
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
if not state.shutdown and self._local:
|
||||
self._local.cleanup()
|
||||
|
||||
def pull(self):
|
||||
if state.online and not self._pulling:
|
||||
if not self._pulling:
|
||||
self.queue('pull')
|
||||
|
||||
def queue(self, *args):
|
||||
|
|
@ -482,7 +497,6 @@ class Nodes(Thread):
|
|||
if state.activity and state.activity.get('activity') == 'import':
|
||||
return
|
||||
self._pulling = True
|
||||
library.sync_db()
|
||||
if state.shutdown:
|
||||
return
|
||||
users = []
|
||||
|
|
@ -499,20 +513,6 @@ class Nodes(Thread):
|
|||
node.pullChanges()
|
||||
self._pulling = False
|
||||
|
||||
def run(self):
|
||||
self.queue('pull')
|
||||
while not state.shutdown:
|
||||
args = self._q.get()
|
||||
if args:
|
||||
if args[0] == 'cleanup':
|
||||
self.cleanup()
|
||||
elif args[0] == 'add':
|
||||
self._add(*args[1:])
|
||||
elif args[0] == 'pull':
|
||||
self._pull()
|
||||
else:
|
||||
self._call(*args)
|
||||
|
||||
def join(self):
|
||||
self._q.put(None)
|
||||
for node in list(self._nodes.values()):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue