2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2014-05-17 00:14:15 +00:00
|
|
|
import json
|
2014-05-26 11:41:59 +00:00
|
|
|
import hashlib
|
2015-11-30 16:50:03 +00:00
|
|
|
import os
|
2016-01-04 10:25:05 +00:00
|
|
|
import unicodedata
|
2014-05-18 23:24:04 +00:00
|
|
|
|
|
|
|
from sqlalchemy.orm import load_only
|
2014-10-04 18:57:09 +00:00
|
|
|
from sqlalchemy import func
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
from oxtornado import actions
|
|
|
|
from utils import cleanup_id
|
|
|
|
from websocket import trigger_event
|
2016-01-07 10:12:48 +00:00
|
|
|
import meta
|
2014-09-02 22:32:44 +00:00
|
|
|
from . import models
|
|
|
|
from . import query
|
2015-03-08 12:48:22 +00:00
|
|
|
from .person import get_sort_name
|
2014-05-04 17:26:43 +00:00
|
|
|
import settings
|
|
|
|
import state
|
|
|
|
import utils
|
|
|
|
|
2015-05-14 11:03:49 +00:00
|
|
|
from . import person_api
|
|
|
|
from . import title_api
|
|
|
|
|
2014-05-18 23:24:04 +00:00
|
|
|
import logging
|
2015-11-29 14:56:38 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-17 14:26:59 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def find(data):
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
2014-05-18 23:24:04 +00:00
|
|
|
takes {
|
|
|
|
query {
|
|
|
|
conditions [{}]
|
|
|
|
operator string
|
|
|
|
}
|
|
|
|
group string
|
|
|
|
keys [string]
|
|
|
|
sort [{}]
|
|
|
|
range [int, int]
|
|
|
|
}
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
|
|
|
response = {}
|
|
|
|
q = query.parse(data)
|
|
|
|
if 'group' in q:
|
2014-05-14 09:57:11 +00:00
|
|
|
names = {}
|
|
|
|
groups = {}
|
2014-09-02 23:09:42 +00:00
|
|
|
key = 'group:' + hashlib.sha1(json.dumps(data).encode('utf-8')).hexdigest()
|
2014-05-26 11:41:59 +00:00
|
|
|
g = state.cache.get(key)
|
|
|
|
if g is None:
|
2014-10-04 19:14:36 +00:00
|
|
|
items = q['qs'].options(load_only('id'))
|
2014-05-26 11:41:59 +00:00
|
|
|
qs = models.Find.query.filter_by(key=q['group'])
|
2016-01-04 10:25:05 +00:00
|
|
|
if items.first():
|
2014-05-26 11:41:59 +00:00
|
|
|
qs = qs.filter(models.Find.item_id.in_(items))
|
|
|
|
for f in qs.values('value', 'findvalue'):
|
|
|
|
value = f[0]
|
|
|
|
findvalue = f[1]
|
|
|
|
if findvalue not in groups:
|
|
|
|
groups[findvalue] = 0
|
|
|
|
groups[findvalue] += 1
|
|
|
|
names[findvalue] = value
|
|
|
|
g = [{'name': names[k], 'items': groups[k]} for k in groups]
|
|
|
|
else:
|
|
|
|
g = []
|
|
|
|
if 'sort' in q:
|
2015-03-08 12:48:22 +00:00
|
|
|
sort_type = utils.get_by_id(settings.config['itemKeys'], q['group']).get('sortType')
|
|
|
|
def _sort_key(k):
|
|
|
|
if sort_type == 'person' and q['sort'][0]['key'] == 'name':
|
2016-01-04 10:25:05 +00:00
|
|
|
v = get_sort_name(k[q['sort'][0]['key']])
|
2015-03-08 12:48:22 +00:00
|
|
|
else:
|
2016-01-04 10:25:05 +00:00
|
|
|
v = k[q['sort'][0]['key']]
|
|
|
|
if isinstance(v, str):
|
|
|
|
v = unicodedata.normalize('NFKD', v).lower()
|
|
|
|
return v
|
|
|
|
g.sort(key=_sort_key, reverse=q['sort'][0]['operator'] == '-')
|
2014-05-26 11:41:59 +00:00
|
|
|
state.cache.set(key, g)
|
2014-05-04 17:26:43 +00:00
|
|
|
if 'positions' in data:
|
|
|
|
response['positions'] = {}
|
|
|
|
ids = [k['name'] for k in g]
|
|
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
|
|
elif 'range' in data:
|
|
|
|
response['items'] = g[q['range'][0]:q['range'][1]]
|
|
|
|
else:
|
|
|
|
response['items'] = len(g)
|
|
|
|
elif 'position' in data:
|
2014-05-18 23:24:04 +00:00
|
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
2014-05-04 17:26:43 +00:00
|
|
|
response['position'] = utils.get_positions(ids, [data['qs'][0].id])[0]
|
|
|
|
elif 'positions' in data:
|
2014-05-18 23:24:04 +00:00
|
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
2014-05-04 17:26:43 +00:00
|
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
|
|
elif 'keys' in data:
|
2016-01-04 10:25:05 +00:00
|
|
|
response['items'] = [
|
|
|
|
i.json(data['keys']) for i in q['qs'][q['range'][0]:q['range'][1]]
|
|
|
|
]
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2016-01-04 10:25:05 +00:00
|
|
|
size = [i.info.get('size', 0) for i in q['qs'].options(load_only('id', 'info'))]
|
2014-05-26 23:45:29 +00:00
|
|
|
response['items'] = len(size)
|
|
|
|
response['size'] = sum(size)
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
|
|
|
actions.register(find)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def get(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
keys
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item:
|
|
|
|
response = item.json(data['keys'] if 'keys' in data else None)
|
|
|
|
return response
|
|
|
|
actions.register(get)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def edit(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
...
|
|
|
|
}
|
|
|
|
setting identifier or base metadata is possible not both at the same time
|
2016-01-05 08:31:38 +00:00
|
|
|
|
|
|
|
id can be one id or list of ids
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
2016-01-05 08:31:38 +00:00
|
|
|
ids = data['id']
|
|
|
|
if isinstance(ids, str):
|
|
|
|
ids = [ids]
|
2016-01-05 10:27:35 +00:00
|
|
|
edited = []
|
2016-01-05 08:31:38 +00:00
|
|
|
for id in ids:
|
|
|
|
item = models.Item.get(id)
|
|
|
|
if item and item.json()['mediastate'] == 'available':
|
2016-01-13 09:58:06 +00:00
|
|
|
item.edit(data, reset_from=True)
|
2016-01-11 13:43:54 +00:00
|
|
|
response = item.json()
|
2016-01-05 10:27:35 +00:00
|
|
|
edited.append(id)
|
2014-05-19 20:58:00 +00:00
|
|
|
else:
|
2016-01-05 08:31:38 +00:00
|
|
|
logger.info('can only edit available items %s', id)
|
2016-01-05 10:27:35 +00:00
|
|
|
if len(ids) > 1:
|
|
|
|
response = data
|
|
|
|
response['id'] = edited
|
2016-01-05 14:04:03 +00:00
|
|
|
for key in list(state.cache):
|
|
|
|
if key.startswith('group:'):
|
|
|
|
state.cache.delete(key)
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
|
|
|
actions.register(edit, cache=False)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def remove(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
'''
|
2014-05-17 11:45:57 +00:00
|
|
|
if 'ids' in data and data['ids']:
|
|
|
|
for i in models.Item.query.filter(models.Item.id.in_(data['ids'])):
|
|
|
|
i.remove_file()
|
2014-05-26 23:45:29 +00:00
|
|
|
return {
|
|
|
|
'items': []
|
|
|
|
}
|
2014-05-17 11:45:57 +00:00
|
|
|
actions.register(remove, cache=False)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-26 23:45:29 +00:00
|
|
|
def autocomplete(data):
|
2014-10-04 18:57:09 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
key: string,
|
|
|
|
value: string,
|
|
|
|
operator: string // '=', '==', '^', '$'
|
|
|
|
query: object // item query to limit results
|
|
|
|
range: [int, int]
|
|
|
|
}
|
|
|
|
returns {
|
|
|
|
items: [string, ...] //array of matching values
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
response = {}
|
|
|
|
response['items'] = []
|
|
|
|
if not 'range' in data:
|
|
|
|
data['range'] = [0, 10]
|
|
|
|
op = data.get('operator', '=')
|
|
|
|
|
|
|
|
key = utils.get_by_id(settings.config['itemKeys'], data['key'])
|
|
|
|
order_by = key.get('autocompleteSort', False)
|
|
|
|
add_itemsort = False
|
|
|
|
if order_by:
|
|
|
|
for o in order_by:
|
|
|
|
if o['operator'] != '-': o['operator'] = ''
|
|
|
|
order_by = ['%(operator)ssort.%(key)s' % o for o in order_by]
|
|
|
|
add_itemsort = True
|
|
|
|
else:
|
|
|
|
order_by = ['-items']
|
|
|
|
|
|
|
|
items = query.parse({'query': data.get('query', {})})['qs'].options(load_only('id'))
|
|
|
|
qs = state.db.session.query(models.Find.value, func.count(models.Find.value).label('items'))
|
|
|
|
qs = qs.filter(models.Find.item_id.in_(items))
|
|
|
|
if data['value']:
|
|
|
|
value = data['value'].lower()
|
|
|
|
qs = qs.filter(models.Find.key.is_(data['key']))
|
|
|
|
if op == '=':
|
|
|
|
qs = qs.filter(models.Find.findvalue.contains(value))
|
|
|
|
elif op == '==':
|
|
|
|
qs = qs.filter(models.Find.findvalue.is_(value))
|
|
|
|
elif op == '^':
|
|
|
|
qs = qs.filter(models.Find.findvalue.startswith(value))
|
|
|
|
elif op == '$':
|
|
|
|
qs = qs.filter(models.Find.findvalue.endswith(value))
|
|
|
|
if add_itemsort:
|
|
|
|
qs = qs.join(models.Item).join(models.Sort)
|
2016-01-12 08:17:15 +00:00
|
|
|
qs = qs.group_by(models.Find.findvalue)
|
2014-10-04 18:57:09 +00:00
|
|
|
qs = qs.order_by(*order_by)
|
|
|
|
response['items'] = [r.value for r in qs[data['range'][0]:data['range'][1]]]
|
|
|
|
return response
|
|
|
|
actions.register(autocomplete)
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def findMetadata(data):
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
|
|
|
takes {
|
2016-01-12 06:29:26 +00:00
|
|
|
key: value,
|
2014-05-04 17:26:43 +00:00
|
|
|
}
|
|
|
|
returns {
|
2014-05-18 23:24:04 +00:00
|
|
|
items: [{
|
|
|
|
key: value
|
|
|
|
}]
|
2014-05-04 17:26:43 +00:00
|
|
|
}
|
2014-05-18 23:24:04 +00:00
|
|
|
key is one of the supported identifiers: isbn10, isbn13...
|
2014-05-04 17:26:43 +00:00
|
|
|
'''
|
2016-01-11 14:56:11 +00:00
|
|
|
response = {
|
|
|
|
'items': []
|
|
|
|
}
|
2016-01-12 06:29:26 +00:00
|
|
|
key = ','.join(sorted(data))
|
|
|
|
if key == 'isbn':
|
|
|
|
r = meta.lookup(key, data[key])
|
2016-01-11 14:56:11 +00:00
|
|
|
if r:
|
|
|
|
response['items'].append(r)
|
2016-01-12 06:29:26 +00:00
|
|
|
elif key == 'author,title':
|
|
|
|
value = ' '.join(data.values())
|
|
|
|
for isbn in meta.find(value):
|
2016-01-11 14:56:11 +00:00
|
|
|
r = meta.lookup('isbn', isbn)
|
|
|
|
if r:
|
|
|
|
response['items'].append(r)
|
2016-01-12 06:29:26 +00:00
|
|
|
elif key == 'id':
|
2016-01-11 14:56:11 +00:00
|
|
|
import user.models
|
|
|
|
items = {}
|
2016-01-12 06:29:26 +00:00
|
|
|
for m in user.models.Metadata.query.filter_by(item_id=data[key]):
|
2016-01-11 14:56:11 +00:00
|
|
|
if m.data_hash not in items:
|
|
|
|
items[m.data_hash] = m.data
|
|
|
|
items[m.data_hash]['users'] = [m.user_id]
|
|
|
|
j = items[m.data_hash]
|
|
|
|
for key in [k['id'] for k in settings.config['itemKeys'] if isinstance(k['type'], list)]:
|
|
|
|
if key in j and not isinstance(j[key], list):
|
2016-01-13 04:41:31 +00:00
|
|
|
j[key] = [j[key]] if j[key] else []
|
2016-01-11 14:56:11 +00:00
|
|
|
else:
|
|
|
|
items[m.data_hash]['users'].append(m.user_id)
|
|
|
|
response['items'] = list(items.values())
|
2014-05-14 18:46:31 +00:00
|
|
|
return response
|
|
|
|
actions.register(findMetadata)
|
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
def getMetadata(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
key: value
|
2014-05-21 00:02:21 +00:00
|
|
|
includeEdits: boolean
|
2014-05-18 23:24:04 +00:00
|
|
|
}
|
|
|
|
key can be one of the supported identifiers: isbn10, isbn13, oclc, olid,...
|
|
|
|
'''
|
2014-05-21 00:02:21 +00:00
|
|
|
if 'includeEdits' in data:
|
|
|
|
include_edits = data.pop('includeEdits')
|
|
|
|
else:
|
|
|
|
include_edits = False
|
2014-09-02 22:32:44 +00:00
|
|
|
key, value = next(iter(data.items()))
|
2014-05-24 10:50:27 +00:00
|
|
|
value = cleanup_id(key, value)
|
2014-05-14 18:46:31 +00:00
|
|
|
response = meta.lookup(key, value)
|
2014-05-21 00:02:21 +00:00
|
|
|
if include_edits:
|
|
|
|
response.update(models.Metadata.load(key, value))
|
2016-01-07 10:12:48 +00:00
|
|
|
for key in [k['id'] for k in settings.config['itemKeys'] if isinstance(k['type'], list)]:
|
|
|
|
if key in response and not isinstance(response[key], list):
|
2016-01-13 04:41:31 +00:00
|
|
|
response[key] = [response[key]] if response[key] else []
|
2014-05-04 17:26:43 +00:00
|
|
|
return response
|
2014-05-14 18:46:31 +00:00
|
|
|
actions.register(getMetadata)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
def resetMetadata(data):
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item and 'primaryid' in item.meta:
|
|
|
|
meta = models.Metadata.get(*item.meta['primaryid'])
|
|
|
|
if meta:
|
|
|
|
meta.reset()
|
|
|
|
return {}
|
|
|
|
actions.register(resetMetadata)
|
2014-05-19 20:14:24 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
def download(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item:
|
2014-05-12 23:43:27 +00:00
|
|
|
item.queue_download()
|
2014-05-04 17:26:43 +00:00
|
|
|
item.update()
|
|
|
|
response = {'status': 'queued'}
|
|
|
|
return response
|
|
|
|
actions.register(download, cache=False)
|
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def cancelDownloads(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
ids
|
|
|
|
}
|
|
|
|
'''
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {}
|
2014-05-18 10:44:56 +00:00
|
|
|
ids = data['ids']
|
|
|
|
if ids:
|
|
|
|
for item in models.Item.query.filter(models.Item.id.in_(ids)):
|
2014-05-21 22:41:29 +00:00
|
|
|
t = models.Transfer.get(item.id)
|
|
|
|
t.progress = None
|
|
|
|
t.added = None
|
|
|
|
t.save()
|
2014-05-18 10:44:56 +00:00
|
|
|
p = state.user()
|
|
|
|
if p in item.users:
|
|
|
|
item.users.remove(p)
|
|
|
|
for l in item.lists.filter_by(user_id=settings.USER_ID):
|
2014-05-18 10:46:56 +00:00
|
|
|
l.items.remove(item)
|
2014-05-18 10:44:56 +00:00
|
|
|
item.update()
|
2014-05-04 17:26:43 +00:00
|
|
|
response = {'status': 'cancelled'}
|
|
|
|
return response
|
2014-05-18 10:44:56 +00:00
|
|
|
actions.register(cancelDownloads, cache=False)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def scan(data):
|
2014-05-26 11:21:19 +00:00
|
|
|
state.tasks.queue('scan', {})
|
2014-05-04 17:26:43 +00:00
|
|
|
return {}
|
|
|
|
actions.register(scan, cache=False)
|
|
|
|
|
2016-01-14 08:59:11 +00:00
|
|
|
def export(data):
|
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
path absolute path to import
|
|
|
|
list listename (add new items to this list)
|
|
|
|
mode add|replace
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
import user.models
|
|
|
|
l = user.models.List.get(':' + data['list'])
|
|
|
|
if l:
|
|
|
|
data['list'] = ':' + data['list']
|
|
|
|
state.tasks.queue('export', data)
|
|
|
|
response = {'status': 'ok'}
|
|
|
|
else:
|
|
|
|
response = {'status': 'invalid list'}
|
|
|
|
return response
|
|
|
|
actions.register(export, cache=False)
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def _import(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
'''
|
|
|
|
takes {
|
|
|
|
path absolute path to import
|
|
|
|
list listename (add new items to this list)
|
|
|
|
mode copy|move
|
|
|
|
}
|
|
|
|
'''
|
2014-05-26 11:21:19 +00:00
|
|
|
state.tasks.queue('import', data)
|
2014-05-04 17:26:43 +00:00
|
|
|
return {}
|
|
|
|
actions.register(_import, 'import', cache=False)
|
2014-05-18 23:24:04 +00:00
|
|
|
|
2014-05-19 20:14:24 +00:00
|
|
|
|
|
|
|
def cancelImport(data):
|
2014-05-18 23:24:04 +00:00
|
|
|
state.activity['cancel'] = True
|
2014-05-20 17:03:58 +00:00
|
|
|
trigger_event('activity', {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [0, 0],
|
|
|
|
'status': {'code': 200, 'text': 'canceled'}
|
|
|
|
})
|
2014-05-18 23:24:04 +00:00
|
|
|
return {}
|
|
|
|
actions.register(cancelImport, cache=False)
|
2015-11-30 16:50:03 +00:00
|
|
|
|
|
|
|
def openFolder(data):
|
|
|
|
response = {}
|
|
|
|
item = models.Item.get(data['id'])
|
|
|
|
if item:
|
|
|
|
path = item.get_path()
|
|
|
|
if path:
|
2015-11-30 17:07:07 +00:00
|
|
|
utils.open_folder(path=path)
|
2015-11-30 16:50:03 +00:00
|
|
|
return response
|
|
|
|
actions.register(openFolder, cache=False)
|