372 lines
11 KiB
Python
372 lines
11 KiB
Python
# -*- coding: utf-8 -*-
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
import json
|
|
import hashlib
|
|
import os
|
|
|
|
from sqlalchemy.orm import load_only
|
|
from sqlalchemy.sql.expression import text
|
|
from sqlalchemy import func
|
|
|
|
from oxtornado import actions
|
|
import utils # utils must be imported before websocket to avoid loop
|
|
from websocket import trigger_event
|
|
import meta
|
|
from . import models
|
|
from . import query
|
|
import settings
|
|
import state
|
|
|
|
from . import person_api
|
|
from . import title_api
|
|
|
|
import logging
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def find(data):
|
|
'''
|
|
takes {
|
|
query {
|
|
conditions [{}]
|
|
operator string
|
|
}
|
|
group string
|
|
keys [string]
|
|
sort [{}]
|
|
range [int, int]
|
|
}
|
|
'''
|
|
response = {}
|
|
q = query.parse(data)
|
|
if 'group' in q:
|
|
names = {}
|
|
groups = {}
|
|
sortvalues = {}
|
|
_keydata = data.copy()
|
|
for key in ('range', 'position', 'positions'):
|
|
if key in _keydata:
|
|
del _keydata[key]
|
|
key = 'group:' + hashlib.sha1(json.dumps(_keydata, sort_keys=True).encode('utf-8')).hexdigest()
|
|
g = state.cache.get(key)
|
|
if g is None:
|
|
state.cache.lock(key)
|
|
if data.get('query', {}).get('conditions'):
|
|
items = q['qs'].options(load_only('id'))
|
|
else:
|
|
items = None
|
|
qs = models.Find.query.filter_by(key=q['group'])
|
|
if items is None or items.first():
|
|
if items is not None:
|
|
qs = qs.filter(models.Find.item_id.in_(items))
|
|
values = list(qs.values('value', 'findvalue', 'sortvalue'))
|
|
for f in values:
|
|
value = f[0]
|
|
findvalue = f[1]
|
|
sortvalue = f[2]
|
|
if findvalue not in groups:
|
|
groups[findvalue] = 0
|
|
groups[findvalue] += 1
|
|
names[findvalue] = value
|
|
sortvalues[value] = sortvalue
|
|
g = [{'name': names[k], 'items': groups[k]} for k in groups]
|
|
else:
|
|
g = []
|
|
if 'sort' in q:
|
|
reverse = q['sort'][0]['operator'] == '-'
|
|
def _sort_key(k):
|
|
name = sortvalues[k['name']]
|
|
if not name:
|
|
name = '\uffff' if not reverse else ''
|
|
items = k['items']
|
|
if q['sort'][0]['key'] == 'name':
|
|
v = (name, items)
|
|
else:
|
|
v = (-items, name)
|
|
return v
|
|
if q['sort'][0]['key'] == 'items':
|
|
reverse = not reverse
|
|
g.sort(key=_sort_key, reverse=reverse)
|
|
state.cache.set(key, g)
|
|
if 'positions' in data:
|
|
response['positions'] = {}
|
|
ids = [k['name'] for k in g]
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
elif 'range' in data:
|
|
response['items'] = g[q['range'][0]:q['range'][1]]
|
|
else:
|
|
response['items'] = len(g)
|
|
elif 'position' in data:
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
|
response['position'] = utils.get_positions(ids, [data['qs'][0].id])[0]
|
|
elif 'positions' in data:
|
|
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
|
response['positions'] = utils.get_positions(ids, data['positions'])
|
|
elif 'keys' in data:
|
|
response['items'] = [
|
|
i.json(data['keys']) for i in q['qs'][q['range'][0]:q['range'][1]]
|
|
]
|
|
else:
|
|
size = [i.info.get('size', 0) for i in q['qs'].options(load_only('id', 'info'))]
|
|
response['items'] = len(size)
|
|
response['size'] = sum(size)
|
|
return response
|
|
actions.register(find)
|
|
|
|
|
|
def get(data):
|
|
'''
|
|
takes {
|
|
id
|
|
keys
|
|
}
|
|
'''
|
|
response = {}
|
|
item = models.Item.get(data['id'])
|
|
if item:
|
|
response = item.json(data['keys'] if 'keys' in data else None)
|
|
return response
|
|
actions.register(get)
|
|
|
|
|
|
def edit(data):
|
|
'''
|
|
takes {
|
|
id
|
|
...
|
|
}
|
|
setting identifier or base metadata is possible not both at the same time
|
|
|
|
id can be one id or list of ids
|
|
'''
|
|
response = {}
|
|
ids = data['id']
|
|
if isinstance(ids, str):
|
|
ids = [ids]
|
|
edited = []
|
|
for id in ids:
|
|
item = models.Item.get(id)
|
|
if item and item.json().get('mediastate') == 'available':
|
|
item.edit(data)
|
|
response = item.json()
|
|
edited.append(id)
|
|
else:
|
|
logger.info('can only edit available items %s', id)
|
|
if len(ids) > 1:
|
|
response = data
|
|
response['id'] = edited
|
|
state.cache.clear('group:')
|
|
return response
|
|
actions.register(edit, cache=False)
|
|
|
|
|
|
def remove(data):
|
|
'''
|
|
takes {
|
|
id
|
|
}
|
|
'''
|
|
if 'ids' in data and data['ids']:
|
|
for i in models.Item.query.filter(models.Item.id.in_(data['ids'])):
|
|
logger.info('remove item %s', i)
|
|
i.remove_file()
|
|
state.cache.clear('group:')
|
|
return {
|
|
'items': []
|
|
}
|
|
actions.register(remove, cache=False)
|
|
|
|
def autocomplete(data):
|
|
'''
|
|
takes {
|
|
key: string,
|
|
value: string,
|
|
operator: string // '=', '==', '^', '$'
|
|
query: object // item query to limit results
|
|
range: [int, int]
|
|
}
|
|
returns {
|
|
items: [string, ...] //array of matching values
|
|
}
|
|
'''
|
|
response = {}
|
|
response['items'] = []
|
|
if not 'range' in data:
|
|
data['range'] = [0, 10]
|
|
op = data.get('operator', '=')
|
|
|
|
key = utils.get_by_id(settings.config['itemKeys'], data['key'])
|
|
order_by = key.get('autocompleteSort', False)
|
|
add_itemsort = False
|
|
if order_by:
|
|
for o in order_by:
|
|
if o['operator'] != '-': o['operator'] = ''
|
|
order_by = [text('%(operator)ssort.%(key)s' % o) for o in order_by]
|
|
add_itemsort = True
|
|
else:
|
|
order_by = [text('-items')]
|
|
|
|
items = query.parse({'query': data.get('query', {})})['qs'].options(load_only('id'))
|
|
qs = state.db.session.query(models.Find.value, func.count(models.Find.value).label('items'))
|
|
qs = qs.filter(models.Find.item_id.in_(items))
|
|
if data['value']:
|
|
value = data['value'].lower()
|
|
qs = qs.filter(models.Find.key.is_(data['key']))
|
|
if op == '=':
|
|
qs = qs.filter(models.Find.findvalue.contains(value))
|
|
elif op == '==':
|
|
qs = qs.filter(models.Find.findvalue.is_(value))
|
|
elif op == '^':
|
|
qs = qs.filter(models.Find.findvalue.startswith(value))
|
|
elif op == '$':
|
|
qs = qs.filter(models.Find.findvalue.endswith(value))
|
|
if add_itemsort:
|
|
qs = qs.join(models.Item).join(models.Sort)
|
|
qs = qs.group_by(models.Find.findvalue)
|
|
qs = qs.order_by(*order_by)
|
|
response['items'] = [r.value for r in qs[data['range'][0]:data['range'][1]]]
|
|
return response
|
|
actions.register(autocomplete)
|
|
|
|
def findMetadata(data):
|
|
'''
|
|
takes {
|
|
key: value,
|
|
}
|
|
returns {
|
|
items: [{
|
|
key: value
|
|
}]
|
|
}
|
|
key is one of the supported identifiers: isbn10, isbn13...
|
|
'''
|
|
response = {
|
|
'items': []
|
|
}
|
|
key = ','.join(sorted(data))
|
|
if key == 'isbn':
|
|
r = meta.lookup_isbn(data[key])
|
|
if r:
|
|
response['items'].append(r)
|
|
elif key == 'author,title':
|
|
response['items'] = meta.find(**data)
|
|
elif key == 'id':
|
|
import user.models
|
|
items = {}
|
|
value = data[key]
|
|
for u in user.models.User.query.filter_by(peered=True):
|
|
peer = utils.get_peer(u.id)
|
|
if value in peer.library:
|
|
h = peer.get_metahash(value)
|
|
if h not in items:
|
|
items[h] = peer.library[value].get('meta', {}).copy()
|
|
items[h]['users'] = [u.id]
|
|
j = items[h]
|
|
for key_ in [k['id'] for k in settings.config['itemKeys'] if isinstance(k['type'], list)]:
|
|
if key_ in j and not isinstance(j[key_], list):
|
|
j[key_] = [j[key_]] if j[key_] else []
|
|
else:
|
|
items[h]['users'].append(u.id)
|
|
response['items'] = list(items.values())
|
|
return response
|
|
actions.register(findMetadata)
|
|
|
|
|
|
def cancelDownloads(data):
|
|
'''
|
|
takes {
|
|
ids
|
|
}
|
|
'''
|
|
response = {}
|
|
ids = data['ids']
|
|
if ids:
|
|
for item in models.Item.query.filter(models.Item.id.in_(ids)):
|
|
t = state.downloads.transfers.get(item.id)
|
|
if t:
|
|
del state.downloads.transfers[item.id]
|
|
p = state.user()
|
|
if p in item.users:
|
|
item.users.remove(p)
|
|
for l in item.lists.filter_by(user_id=settings.USER_ID):
|
|
l.items.remove(item)
|
|
item.update()
|
|
trigger_event('transfer', {
|
|
'id': item.id, 'progress': -1
|
|
})
|
|
response = {'cancelled': ids}
|
|
return response
|
|
actions.register(cancelDownloads, cache=False)
|
|
|
|
|
|
def scan(data):
|
|
state.tasks.queue('scan', {})
|
|
return {}
|
|
actions.register(scan, cache=False)
|
|
|
|
def export(data):
|
|
'''
|
|
takes {
|
|
path absolute path to import
|
|
list listename (add new items to this list)
|
|
mode add|replace
|
|
}
|
|
'''
|
|
import user.models
|
|
l = user.models.List.get(':' + data['list'])
|
|
if l:
|
|
data['list'] = ':' + data['list']
|
|
state.tasks.queue('export', data)
|
|
response = {'status': 'ok'}
|
|
else:
|
|
response = {'status': 'invalid list'}
|
|
return response
|
|
actions.register(export, cache=False)
|
|
|
|
def _import(data):
|
|
'''
|
|
takes {
|
|
path absolute path to import
|
|
list listename (add new items to this list)
|
|
mode copy|move
|
|
}
|
|
'''
|
|
if os.path.exists(data['path']):
|
|
state.activity = {
|
|
'activity': 'import',
|
|
'path': data['path'],
|
|
}
|
|
state.tasks.queue('import', data)
|
|
return {}
|
|
actions.register(_import, 'import', cache=False)
|
|
|
|
|
|
def cancelImport(data):
|
|
state.activity['cancel'] = True
|
|
trigger_event('activity', {
|
|
'activity': 'import',
|
|
'progress': [0, 0],
|
|
'status': {'code': 200, 'text': 'canceled'}
|
|
})
|
|
return {}
|
|
actions.register(cancelImport, cache=False)
|
|
|
|
def openFile(data):
|
|
response = {}
|
|
item = models.Item.get(data['id'])
|
|
if item:
|
|
path = item.get_path()
|
|
if path:
|
|
utils.open_file(path)
|
|
return response
|
|
actions.register(openFile, cache=False)
|
|
|
|
def openFolder(data):
|
|
response = {}
|
|
item = models.Item.get(data['id'])
|
|
if item:
|
|
path = item.get_path()
|
|
if path:
|
|
utils.open_folder(path=path)
|
|
return response
|
|
actions.register(openFolder, cache=False)
|