openmedialibrary/oml/downloads.py

95 lines
2.5 KiB
Python
Raw Normal View History

2014-05-04 17:26:43 +00:00
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
2014-09-02 22:32:44 +00:00
2014-05-04 17:26:43 +00:00
from threading import Thread
import time
2014-08-09 16:14:14 +00:00
import db
2014-05-04 17:26:43 +00:00
import state
import settings
import update
2014-05-04 17:26:43 +00:00
2015-03-07 16:24:07 +00:00
from websocket import trigger_event
import logging
2014-05-17 14:26:59 +00:00
logger = logging.getLogger('oml.downloads')
2014-05-04 17:26:43 +00:00
class Downloads(Thread):
2014-08-09 16:33:59 +00:00
def __init__(self):
2014-05-04 17:26:43 +00:00
self._running = True
Thread.__init__(self)
self.daemon = True
self.start()
def download_updates(self):
now = int(time.mktime(time.gmtime()))
if now > settings.server.get('last_update_check', 0) + 24*60*60:
settings.server['last_update_check'] = now
update.download()
2014-05-04 17:26:43 +00:00
def download_next(self):
import item.models
self.download_updates()
for t in item.models.Transfer.query.filter(
item.models.Transfer.added!=None,
item.models.Transfer.progress<1).order_by(item.models.Transfer.added):
for u in t.item.users:
if state.nodes.is_online(u.id):
logger.debug('DOWNLOAD %s %s', t.item, u)
r = state.nodes.download(u.id, t.item)
2014-05-17 14:26:59 +00:00
logger.debug('download ok? %s', r)
2014-05-04 17:26:43 +00:00
return True
return False
def run(self):
time.sleep(2)
2014-08-09 16:14:14 +00:00
with db.session():
2014-05-04 17:26:43 +00:00
while self._running:
self.download_next()
time.sleep(0.5)
2014-05-04 17:26:43 +00:00
def join(self):
self._running = False
return Thread.join(self)
2015-03-07 16:24:07 +00:00
class ScrapeThread(Thread):
def __init__(self):
self._running = True
Thread.__init__(self)
self.daemon = True
self.start()
def scrape_queue(self):
import item.models
scraped = False
for s in item.models.Scrape.query.filter(
item.models.Scrape.added!=None,
).order_by(item.models.Scrape.added):
if not self._running:
return False
logger.debug('scrape %s', s.item)
try:
s.item.scrape()
2015-11-17 11:51:49 +00:00
for f in s.item.files:
f.move()
2015-03-07 16:24:07 +00:00
s.remove()
trigger_event('change', {})
scraped = True
except:
logger.debug('scrape failed %s', s.item, exc_info=1)
return scraped
def run(self):
time.sleep(2)
with db.session():
while self._running:
if not self.scrape_queue():
time.sleep(10)
def join(self):
self._running = False
return Thread.join(self)