only decode ids for found positions

merge document.view.get_positions into item.utils.get_positions
add flag to decode id before looking up in ids

Followup to 09ebbc9cc6
This commit is contained in:
j 2016-06-30 16:18:07 +02:00
parent 3d95d7013a
commit c7157d5001
12 changed files with 24 additions and 47 deletions

View file

@ -640,7 +640,7 @@ def findMedia(request, data):
qs = _order_query(qs, query['sort']) qs = _order_query(qs, query['sort'])
response['data']['positions'] = {} response['data']['positions'] = {}
ids = [j['oshash'] for j in qs.values('oshash')] ids = list(qs.values_list('oshash', flat=True))
response['data']['positions'] = utils.get_positions(ids, query['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'])
elif 'keys' in query: elif 'keys' in query:

View file

@ -75,8 +75,8 @@ def findChangeLogs(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [ox.toAZ(i.id) for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
return render_to_json_response(response) return render_to_json_response(response)

View file

@ -165,33 +165,6 @@ def parse_query(data, user):
return query return query
def get_positions(qs, query_positions):
'''
qs: a QuerySet
query_positions: a list of AZ ids
TODO: merge this with item.utils.get_positions. The win is to fetch
only the integer IDs and convert the (smaller) set of query_positions to
ints, rather than fetch all keys for everything in qs (expected to be many
orders of magnitude larger), ignore most of it, and convert those ids to
strings.
Returns:
{
i: index of i in qs
for i in query_positions
}
'''
ids = list(qs.values_list('id', flat=True))
ret = {}
for i in query_positions:
try:
ret[i] = ids.index(ox.fromAZ(i))
except:
pass
return ret
def findDocuments(request, data): def findDocuments(request, data):
''' '''
Finds documents for a given query Finds documents for a given query
@ -225,7 +198,8 @@ def findDocuments(request, data):
#FIXME: actually implement position requests #FIXME: actually implement position requests
response['data']['position'] = 0 response['data']['position'] = 0
elif 'positions' in data: elif 'positions' in data:
response['data']['positions'] = get_positions(qs, query['positions']) ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
r = qs.aggregate( r = qs.aggregate(
Sum('size') Sum('size')

View file

@ -234,8 +234,8 @@ def findEntities(request, data):
#FIXME: actually implement position requests #FIXME: actually implement position requests
response['data']['position'] = 0 response['data']['position'] = 0
elif 'positions' in data: elif 'positions' in data:
ids = [i.get_id() for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, query['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
return render_to_json_response(response) return render_to_json_response(response)

View file

@ -206,8 +206,8 @@ def findEvents(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [i.get_id() for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()

View file

@ -61,7 +61,7 @@ def sort_title(title):
title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title) title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
return title.strip() return title.strip()
def get_positions(ids, pos): def get_positions(ids, pos, decode_id=False):
''' '''
>>> get_positions([1,2,3,4], [2,4]) >>> get_positions([1,2,3,4], [2,4])
{2: 1, 4: 3} {2: 1, 4: 3}
@ -69,7 +69,10 @@ def get_positions(ids, pos):
positions = {} positions = {}
for i in pos: for i in pos:
try: try:
positions[i] = ids.index(i) if decode_id:
positions[i] = ids.index(ox.fromAZ(i))
else:
positions[i] = ids.index(i)
except: except:
pass pass
return positions return positions

View file

@ -204,7 +204,7 @@ def find(request, data):
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in query: elif 'positions' in query:
qs = _order_query(query['qs'], query['sort']) qs = _order_query(query['qs'], query['sort'])
ids = [j['public_id'] for j in qs.values('public_id')] ids = list(qs.values_list('public_id', flat=True))
response['data']['positions'] = utils.get_positions(ids, query['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'])
elif 'keys' in query: elif 'keys' in query:
response['data']['items'] = [] response['data']['items'] = []

View file

@ -123,8 +123,8 @@ def findErrorLogs(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [ox.toAZ(i.id) for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
return render_to_json_response(response) return render_to_json_response(response)

View file

@ -133,8 +133,8 @@ def findNames(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [i.get_id() for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
return render_to_json_response(response) return render_to_json_response(response)

View file

@ -254,8 +254,8 @@ def findPlaces(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [i.get_id() for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
response['data']['area'] = qs.aggregate( response['data']['area'] = qs.aggregate(

View file

@ -85,7 +85,7 @@ def findSequences(request, data):
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
qs = order_query(qs, query['sort']) qs = order_query(qs, query['sort'])
ids = [i['public_id'] for i in qs.values('public_id')] ids = list(qs.values_list('public_id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, data['positions'])
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()

View file

@ -132,8 +132,8 @@ def findTitles(request, data):
if qs.count() > 0: if qs.count() > 0:
response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0] response['data']['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
elif 'positions' in data: elif 'positions' in data:
ids = [i.get_id() for i in qs] ids = list(qs.values_list('id', flat=True))
response['data']['positions'] = utils.get_positions(ids, data['positions']) response['data']['positions'] = utils.get_positions(ids, query['positions'], decode_id=True)
else: else:
response['data']['items'] = qs.count() response['data']['items'] = qs.count()
return render_to_json_response(response) return render_to_json_response(response)