forked from 0x2620/pandora
only decode ids for found positions
merge document.view.get_positions into item.utils.get_positions
add flag to decode id before looking up in ids
Followup to 09ebbc9cc6
This commit is contained in:
parent
3d95d7013a
commit
c7157d5001
12 changed files with 24 additions and 47 deletions
|
@ -640,7 +640,7 @@ def findMedia(request, data):
|
|||
qs = _order_query(qs, query['sort'])
|
||||
|
||||
response['data']['positions'] = {}
|
||||
ids = [j['oshash'] for j in qs.values('oshash')]
|
||||
ids = list(qs.values_list('oshash', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
||||
|
||||
elif 'keys' in query:
|
||||
|
|
|
@ -75,8 +75,8 @@ def findChangeLogs(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [ox.toAZ(i.id) for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
return render_to_json_response(response)
|
||||
|
|
|
@ -165,33 +165,6 @@ def parse_query(data, user):
|
|||
return query
|
||||
|
||||
|
||||
def get_positions(qs, query_positions):
|
||||
'''
|
||||
qs: a QuerySet
|
||||
query_positions: a list of AZ ids
|
||||
|
||||
TODO: merge this with item.utils.get_positions. The win is to fetch
|
||||
only the integer IDs and convert the (smaller) set of query_positions to
|
||||
ints, rather than fetch all keys for everything in qs (expected to be many
|
||||
orders of magnitude larger), ignore most of it, and convert those ids to
|
||||
strings.
|
||||
|
||||
Returns:
|
||||
{
|
||||
i: index of i in qs
|
||||
for i in query_positions
|
||||
}
|
||||
'''
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
ret = {}
|
||||
for i in query_positions:
|
||||
try:
|
||||
ret[i] = ids.index(ox.fromAZ(i))
|
||||
except:
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
||||
def findDocuments(request, data):
|
||||
'''
|
||||
Finds documents for a given query
|
||||
|
@ -225,7 +198,8 @@ def findDocuments(request, data):
|
|||
#FIXME: actually implement position requests
|
||||
response['data']['position'] = 0
|
||||
elif 'positions' in data:
|
||||
response['data']['positions'] = get_positions(qs, query['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
r = qs.aggregate(
|
||||
Sum('size')
|
||||
|
|
|
@ -234,8 +234,8 @@ def findEntities(request, data):
|
|||
#FIXME: actually implement position requests
|
||||
response['data']['position'] = 0
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
return render_to_json_response(response)
|
||||
|
|
|
@ -206,8 +206,8 @@ def findEvents(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ def sort_title(title):
|
|||
title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
|
||||
return title.strip()
|
||||
|
||||
def get_positions(ids, pos):
|
||||
def get_positions(ids, pos, decode_id=False):
|
||||
'''
|
||||
>>> get_positions([1,2,3,4], [2,4])
|
||||
{2: 1, 4: 3}
|
||||
|
@ -69,7 +69,10 @@ def get_positions(ids, pos):
|
|||
positions = {}
|
||||
for i in pos:
|
||||
try:
|
||||
positions[i] = ids.index(i)
|
||||
if decode_id:
|
||||
positions[i] = ids.index(ox.fromAZ(i))
|
||||
else:
|
||||
positions[i] = ids.index(i)
|
||||
except:
|
||||
pass
|
||||
return positions
|
||||
|
|
|
@ -204,7 +204,7 @@ def find(request, data):
|
|||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in query:
|
||||
qs = _order_query(query['qs'], query['sort'])
|
||||
ids = [j['public_id'] for j in qs.values('public_id')]
|
||||
ids = list(qs.values_list('public_id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
||||
elif 'keys' in query:
|
||||
response['data']['items'] = []
|
||||
|
|
|
@ -123,8 +123,8 @@ def findErrorLogs(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [ox.toAZ(i.id) for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
return render_to_json_response(response)
|
||||
|
|
|
@ -133,8 +133,8 @@ def findNames(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
return render_to_json_response(response)
|
||||
|
|
|
@ -254,8 +254,8 @@ def findPlaces(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
response['data']['area'] = qs.aggregate(
|
||||
|
|
|
@ -85,7 +85,7 @@ def findSequences(request, data):
|
|||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
qs = order_query(qs, query['sort'])
|
||||
ids = [i['public_id'] for i in qs.values('public_id')]
|
||||
ids = list(qs.values_list('public_id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
|
|
|
@ -132,8 +132,8 @@ def findTitles(request, data):
|
|||
if qs.count() > 0:
|
||||
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, data['positions'])
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
return render_to_json_response(response)
|
||||
|
|
Loading…
Reference in a new issue