openmedialibrary/oml/downloads.py

113 lines
3 KiB
Python
Raw Normal View History

2014-05-04 17:26:43 +00:00
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
2014-09-02 22:32:44 +00:00
2014-05-04 17:26:43 +00:00
from threading import Thread
import time
2014-08-09 16:14:14 +00:00
import db
2014-05-04 17:26:43 +00:00
import state
import settings
import update
2014-05-04 17:26:43 +00:00
2015-03-07 16:24:07 +00:00
from websocket import trigger_event
import logging
2015-11-29 14:56:38 +00:00
logger = logging.getLogger(__name__)
2014-05-17 14:26:59 +00:00
2014-05-04 17:26:43 +00:00
class Downloads(Thread):
2014-08-09 16:33:59 +00:00
def __init__(self):
2014-05-04 17:26:43 +00:00
self._running = True
Thread.__init__(self)
self.daemon = True
self.start()
def download_updates(self):
now = int(time.mktime(time.gmtime()))
if now > settings.server.get('last_update_check', 0) + 24*60*60:
settings.server['last_update_check'] = now
update.download()
2016-01-15 07:59:35 +00:00
state.user().library.export_json()
2014-05-04 17:26:43 +00:00
def download_next(self):
import item.models
self.download_updates()
for t in item.models.Transfer.query.filter(
item.models.Transfer.added!=None,
item.models.Transfer.progress<1).order_by(item.models.Transfer.added):
2015-11-18 00:27:53 +00:00
if not self._running:
return False
for u in t.item.users:
if state.nodes.is_online(u.id):
logger.debug('DOWNLOAD %s %s', t.item, u)
r = state.nodes.download(u.id, t.item)
2014-05-04 17:26:43 +00:00
return True
return False
def run(self):
2016-01-14 04:27:26 +00:00
self.wait(10)
2015-12-24 15:11:47 +00:00
while self._running:
2016-01-14 04:27:26 +00:00
self.wait_online()
2015-12-24 15:11:47 +00:00
with db.session():
self.download_next()
2016-01-14 04:27:26 +00:00
self.wait(10)
2014-05-04 17:26:43 +00:00
def join(self):
self._running = False
return Thread.join(self)
2015-03-07 16:24:07 +00:00
2016-01-14 04:27:26 +00:00
def wait_online(self):
while not state.online:
self.wait(5)
def wait(self, timeout):
step = min(timeout, 1)
while self._running and timeout > 0:
time.sleep(step)
timeout -= step
2015-03-07 16:24:07 +00:00
class ScrapeThread(Thread):
def __init__(self):
self._running = True
Thread.__init__(self)
self.daemon = True
self.start()
def scrape_queue(self):
import item.models
scraped = False
for s in item.models.Scrape.query.filter(
item.models.Scrape.added!=None,
).order_by(item.models.Scrape.added):
if not self._running:
2015-11-18 00:27:53 +00:00
return True
2015-03-07 16:24:07 +00:00
logger.debug('scrape %s', s.item)
try:
2015-12-02 21:05:23 +00:00
if s.item.scrape():
for f in s.item.files:
f.move()
s.item.update_icons()
2016-01-05 09:51:02 +00:00
s.item.save()
2015-12-02 21:05:23 +00:00
s.remove()
trigger_event('change', {})
2015-03-07 16:24:07 +00:00
scraped = True
except:
logger.debug('scrape failed %s', s.item, exc_info=1)
return scraped
def run(self):
time.sleep(2)
2015-12-24 15:11:47 +00:00
while self._running:
wait = False
with db.session():
2015-03-07 16:24:07 +00:00
if not self.scrape_queue():
2015-12-24 15:11:47 +00:00
wait = True
if wait:
time.sleep(1)
2015-03-07 16:24:07 +00:00
def join(self):
self._running = False
return Thread.join(self)