2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2014-05-16 08:06:11 +00:00
|
|
|
from __future__ import division
|
|
|
|
|
2014-05-17 00:14:15 +00:00
|
|
|
import json
|
2014-05-26 11:41:59 +00:00
|
|
|
import hashlib
|
2014-05-18 23:24:04 +00:00
|
|
|
|
|
|
|
from sqlalchemy.orm import load_only
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
from oxtornado import actions
|
|
|
|
from utils import cleanup_id
|
|
|
|
from websocket import trigger_event
|
|
|
|
import metaremote as meta
|
2014-05-04 17:26:43 +00:00
|
|
|
import models
|
2014-08-12 08:16:57 +00:00
|
|
|
import query
|
2014-05-04 17:26:43 +00:00
|
|
|
import settings
|
|
|
|
import state
|
|
|
|
import utils
|
|
|
|
|
2014-05-18 23:24:04 +00:00
|
|
|
import logging
|
2014-05-17 14:26:59 +00:00
|
|
|
logger = logging.getLogger('oml.item.api')
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def find(data):
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
2014-05-18 23:24:04 +00:00
|
|
|
takes {
|
|
|
|
query {
|
|
|
|
conditions [{}]
|
|
|
|
operator string
|
|
|
|
}
|
|
|
|
group string
|
|
|
|
keys [string]
|
|
|
|
sort [{}]
|
|
|
|
range [int, int]
|
|
|
|
}
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
|
|
|
response = {}
|
|
|
|
q = query.parse(data)
|
|
|
|
if 'group' in q:
|
2014-05-14 09:57:11 +00:00
|
|
|
names = {}
|
|
|
|
groups = {}
|
2014-05-26 11:41:59 +00:00
|
|
|
key = 'group:' + hashlib.sha1(json.dumps(data)).hexdigest()
|
|
|
|
g = state.cache.get(key)
|
|
|
|
if g is None:
|
|
|
|
items = [i.id for i in q['qs'].options(load_only('id'))]
|
|
|
|
qs = models.Find.query.filter_by(key=q['group'])
|
|
|
|
if items:
|
|
|
|
qs = qs.filter(models.Find.item_id.in_(items))
|
|
|
|
for f in qs.values('value', 'findvalue'):
|
|
|
|
value = f[0]
|
|
|
|
findvalue = f[1]
|
|
|
|
if findvalue not in groups:
|
|
|
|
groups[findvalue] = 0
|
|
|
|
groups[findvalue] += 1
|
|
|
|
names[findvalue] = value
|
|
|
|
g = [{'name': names[k], 'items': groups[k]} for k in groups]
|
|
|
|
else:
|
|
|
|
g = []
|
|
|
|
if 'sort' in q:
|
|
|
|
g.sort(key=lambda k: k[q['sort'][0]['key']])
|
|
|
|
if q['sort'][0]['operator'] == '-':
|
|
|
|
g.reverse()
|
|
|
|
state.cache.set(key, g)
|
2014-05-04 17:26:43 +00:00
|
|
|
if 'positions' in data:
|
|
|
|
response['positions'] = {}
|
|
|
|
ids = [k['name'] for k in g]
|
|
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
|
|
elif 'range' in data:
|
|
|
|
response['items'] = g[q['range'][0]:q['range'][1]]
|
|
|
|
else:
|
|
|
|
response['items'] = len(g)
|
|
|
|
elif 'position' in data:
|
2014-05-18 23:24:04 +00:00
|
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
2014-05-04 17:26:43 +00:00
|
|
|
response['position'] = utils.get_positions(ids, [data['qs'][0].id])[0]
|
|
|
|
elif 'positions' in data:
|
2014-05-18 23:24:04 +00:00
|
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
2014-05-04 17:26:43 +00:00
|
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
|
|
elif 'keys' in data:
|
|
|
|
response['items'] = []
|
|
|
|
for i in q['qs'][q['range'][0]:q['range'][1]]:
|
|
|
|
j = i.json()
|
|
|
|
response['items'].append({k:j[k] for k in j if not data['keys'] or k in data['keys']})
|
|
|
|
else:
|
2014-05-26 23:45:29 +00:00
|
|
|
size = [i.info.get('size', 0) for i in q['qs'].join(models.Sort).options(load_only('id', 'info'))]
|
|
|
|
response['items'] = len(size)
|
|
|
|
response['size'] = sum(size)
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
|
|
|
actions.register(find)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def get(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
keys
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item:
|
|
|
|
response = item.json(data['keys'] if 'keys' in data else None)
|
|
|
|
return response
|
|
|
|
actions.register(get)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def edit(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
...
|
|
|
|
}
|
|
|
|
setting identifier or base metadata is possible not both at the same time
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
2014-05-18 23:24:04 +00:00
|
|
|
if item and item.json()['mediastate'] == 'available':
|
2014-05-21 00:02:21 +00:00
|
|
|
if 'primaryid' in data:
|
|
|
|
if data['primaryid']:
|
|
|
|
key, value = data['primaryid']
|
|
|
|
logger.debug('update primaryid %s %s', key, value)
|
2014-05-24 10:50:27 +00:00
|
|
|
value = cleanup_id(key, value)
|
2014-05-21 00:02:21 +00:00
|
|
|
item.update_primaryid(key, value)
|
|
|
|
else:
|
|
|
|
item.update_primaryid()
|
2014-05-19 20:58:00 +00:00
|
|
|
response = item.json()
|
|
|
|
else:
|
2014-05-21 00:02:21 +00:00
|
|
|
item.edit_metadata(data)
|
|
|
|
response = item.json()
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.info('can only edit available items')
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
|
|
|
actions.register(edit, cache=False)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def remove(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
logger.debug('remove files %s', data)
|
2014-05-17 11:45:57 +00:00
|
|
|
if 'ids' in data and data['ids']:
|
|
|
|
for i in models.Item.query.filter(models.Item.id.in_(data['ids'])):
|
|
|
|
i.remove_file()
|
2014-05-26 23:45:29 +00:00
|
|
|
return {
|
|
|
|
'items': []
|
|
|
|
}
|
2014-05-17 11:45:57 +00:00
|
|
|
actions.register(remove, cache=False)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-26 23:45:29 +00:00
|
|
|
def autocomplete(data):
|
|
|
|
return {}
|
|
|
|
actions.register(remove, cache=False)
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def findMetadata(data):
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
|
|
|
takes {
|
2014-05-21 00:02:21 +00:00
|
|
|
query: string,
|
2014-05-04 17:26:43 +00:00
|
|
|
}
|
|
|
|
returns {
|
2014-05-18 23:24:04 +00:00
|
|
|
items: [{
|
|
|
|
key: value
|
|
|
|
}]
|
2014-05-04 17:26:43 +00:00
|
|
|
}
|
2014-05-18 23:24:04 +00:00
|
|
|
key is one of the supported identifiers: isbn10, isbn13...
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
|
|
|
response = {}
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('findMetadata %s', data)
|
2014-05-21 00:02:21 +00:00
|
|
|
response['items'] = meta.find(data['query'])
|
2014-05-14 18:46:31 +00:00
|
|
|
return response
|
|
|
|
actions.register(findMetadata)
|
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
def getMetadata(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
key: value
|
2014-05-21 00:02:21 +00:00
|
|
|
includeEdits: boolean
|
2014-05-18 23:24:04 +00:00
|
|
|
}
|
|
|
|
key can be one of the supported identifiers: isbn10, isbn13, oclc, olid,...
|
|
|
|
'''
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('getMetadata %s', data)
|
2014-05-21 00:02:21 +00:00
|
|
|
if 'includeEdits' in data:
|
|
|
|
include_edits = data.pop('includeEdits')
|
|
|
|
else:
|
|
|
|
include_edits = False
|
2014-05-14 18:46:31 +00:00
|
|
|
key, value = data.iteritems().next()
|
2014-05-24 10:50:27 +00:00
|
|
|
value = cleanup_id(key, value)
|
2014-05-14 18:46:31 +00:00
|
|
|
response = meta.lookup(key, value)
|
2014-05-21 00:02:21 +00:00
|
|
|
if include_edits:
|
|
|
|
response.update(models.Metadata.load(key, value))
|
2014-05-18 23:24:04 +00:00
|
|
|
if response:
|
2014-05-21 00:02:21 +00:00
|
|
|
response['primaryid'] = [key, value]
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
2014-05-14 18:46:31 +00:00
|
|
|
actions.register(getMetadata)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
def resetMetadata(data):
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item and 'primaryid' in item.meta:
|
|
|
|
meta = models.Metadata.get(*item.meta['primaryid'])
|
|
|
|
if meta:
|
|
|
|
meta.reset()
|
|
|
|
return {}
|
|
|
|
actions.register(resetMetadata)
|
2014-05-19 20:14:24 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
def download(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item:
|
2014-05-12 23:43:27 +00:00
|
|
|
item.queue_download()
|
2014-05-04 17:26:43 +00:00
|
|
|
item.update()
|
|
|
|
response = {'status': 'queued'}
|
|
|
|
return response
|
|
|
|
actions.register(download, cache=False)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def cancelDownloads(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
ids
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
2014-05-18 10:44:56 +00:00
|
|
|
ids = data['ids']
|
|
|
|
if ids:
|
|
|
|
for item in models.Item.query.filter(models.Item.id.in_(ids)):
|
2014-05-21 22:41:29 +00:00
|
|
|
t = models.Transfer.get(item.id)
|
|
|
|
t.progress = None
|
|
|
|
t.added = None
|
|
|
|
t.save()
|
2014-05-18 10:44:56 +00:00
|
|
|
p = state.user()
|
|
|
|
if p in item.users:
|
|
|
|
item.users.remove(p)
|
|
|
|
for l in item.lists.filter_by(user_id=settings.USER_ID):
|
2014-05-18 10:46:56 +00:00
|
|
|
l.items.remove(item)
|
2014-05-18 10:44:56 +00:00
|
|
|
item.update()
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {'status': 'cancelled'}
|
|
|
|
return response
|
2014-05-18 10:44:56 +00:00
|
|
|
actions.register(cancelDownloads, cache=False)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def scan(data):
|
2014-05-26 11:21:19 +00:00
|
|
|
state.tasks.queue('scan', {})
|
2014-05-04 17:26:43 +00:00
|
|
|
return {}
|
|
|
|
actions.register(scan, cache=False)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def _import(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
path absolute path to import
|
|
|
|
list listename (add new items to this list)
|
|
|
|
mode copy|move
|
|
|
|
}
|
|
|
|
'''
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('api.import %s', data)
|
2014-05-26 11:21:19 +00:00
|
|
|
state.tasks.queue('import', data)
|
2014-05-04 17:26:43 +00:00
|
|
|
return {}
|
|
|
|
actions.register(_import, 'import', cache=False)
|
2014-05-18 23:24:04 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def cancelImport(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
state.activity['cancel'] = True
|
2014-05-20 17:03:58 +00:00
|
|
|
trigger_event('activity', {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [0, 0],
|
|
|
|
'status': {'code': 200, 'text': 'canceled'}
|
|
|
|
})
|
2014-05-18 23:24:04 +00:00
|
|
|
return {}
|
|
|
|
actions.register(cancelImport, cache=False)
|