save queued task on shutdown, add getcover task

This commit is contained in:
j 2016-02-11 11:44:40 +05:30
parent 813e3b591b
commit acd64d3186
2 changed files with 87 additions and 37 deletions

View file

@ -450,6 +450,8 @@ class Item(db.Model):
def update_icons(self): def update_icons(self):
if state.online: if state.online:
self.update_cover() self.update_cover()
else:
state.tasks.queue('getcover', self.id)
self.update_preview() self.update_preview()
def load_metadata(self): def load_metadata(self):
@ -823,12 +825,28 @@ def update_sort_table():
s.commit() s.commit()
def get_cover(id):
delay = 60
if state.online:
#logger.debug('get_cover(%s)', id)
with db.session():
i = Item.get(id)
if i:
i.update_cover()
else:
state.main.call_later(delay, lambda: state.tasks.queue('getcover', id))
def get_preview(id): def get_preview(id):
#logger.debug('get_preview(%s)', id) delay = 60
with db.session(): if state.online:
i = Item.get(id) #logger.debug('get_preview(%s)', id)
if i: with db.session():
i.get_preview() i = Item.get(id)
if i:
i.get_preview()
else:
state.main.call_later(delay, lambda: state.tasks.queue('getpreview', id))
def sync_metadata(ids=None): def sync_metadata(ids=None):
#logger.debug('sync_metadata(%s)', len(ids) if len(ids) > 10 else ids) #logger.debug('sync_metadata(%s)', len(ids) if len(ids) > 10 else ids)

View file

@ -1,65 +1,97 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
import os
import json
from queue import Queue from queue import Queue
from threading import Thread from threading import Thread
from websocket import trigger_event from websocket import trigger_event
import state import state
import settings
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Tasks(Thread): class Tasks(Thread):
_tasks = []
def __init__(self): def __init__(self):
self._taskspath = os.path.join(settings.data_path, 'tasks.json')
self.q = Queue() self.q = Queue()
Thread.__init__(self) Thread.__init__(self)
self.daemon = True self.daemon = True
self.start() self.start()
self.queue('scan') self.queue('scan')
self.load_tasks()
def run(self): def run(self):
import item.scan import item.scan
from item.models import sync_metadata, get_preview from item.models import sync_metadata, get_preview, get_cover
from user.models import export_list, update_user_peering from user.models import export_list, update_user_peering
while not state.shutdown: shutdown = False
while not shutdown:
m = self.q.get() m = self.q.get()
if m and not state.shutdown: if m:
try: if state.shutdown:
action, data = m self._tasks.append(m)
logger.debug('%s start', action) else:
if action == 'changelibrarypath': try:
item.scan.change_path(data[0], data[1]) action, data = m
elif action == 'export': logger.debug('%s start', action)
export_list(data) if action == 'changelibrarypath':
elif action == 'getpreview': item.scan.change_path(data[0], data[1])
get_preview(data) elif action == 'export':
elif action == 'import': export_list(data)
item.scan.run_import(data) elif action == 'getcover':
elif action == 'peering': get_cover(data)
update_user_peering(*data) elif action == 'getpreview':
elif action == 'ping': get_preview(data)
trigger_event('pong', data) elif action == 'import':
elif action == 'scan': item.scan.run_import(data)
item.scan.run_scan() elif action == 'peering':
elif action == 'scanimport': update_user_peering(*data)
item.scan.import_folder() elif action == 'ping':
elif action == 'syncmetadata': trigger_event('pong', data)
sync_metadata(data) elif action == 'scan':
else: item.scan.run_scan()
trigger_event('error', {'error': 'unknown action'}) elif action == 'scanimport':
logger.debug('%s done', action) item.scan.import_folder()
except: elif action == 'syncmetadata':
logger.debug('task failed', exc_info=True) sync_metadata(data)
else:
trigger_event('error', {'error': 'unknown action'})
logger.debug('%s done', action)
except:
logger.debug('task failed', exc_info=True)
else:
shutdown = True
self.q.task_done() self.q.task_done()
def load_tasks(self):
if os.path.exists(self._taskspath):
try:
with open(self._taskspath) as f:
tasks = json.load(f)
for task in tasks:
self.q.put(task)
logger.debug('loaded %s tasks', len(tasks))
except:
logger.debug('failed to load saved tasks', exc_info=True)
os.unlink(self._taskspath)
def save_tasks(self):
if self._tasks:
logger.debug('saving %s tasks for later', len(self._tasks))
with open(self._taskspath, 'w') as f:
json.dump(self._tasks, f)
def join(self): def join(self):
self.q.put(None) self.q.put(None)
return Thread.join(self) r = Thread.join(self)
self.save_tasks()
return r
def queue(self, action, data=None): def queue(self, action, data=None):
if not state.shutdown: if not state.shutdown:
logger.debug('%s queued', action) logger.debug('%s queued', action)
self.q.put((action, data)) self.q.put((action, data))