forked from 0x2620/pandora
Merge remote-tracking branch 'wjt/2935-findDocuments-performance'
This commit is contained in:
commit
3d95d7013a
3 changed files with 38 additions and 8 deletions
|
@ -39,14 +39,18 @@ def parseCondition(condition, user, item=None):
|
|||
|
||||
|
||||
def buildCondition(k, op, v):
|
||||
import entity.models
|
||||
if k == 'id':
|
||||
v = ox.fromAZ(v)
|
||||
return Q(**{k: v})
|
||||
if isinstance(v, bool):
|
||||
key = k
|
||||
elif k == 'entity':
|
||||
entity_key, v = entity.managers.namePredicate(op, v)
|
||||
key = 'entities__' + entity_key
|
||||
entity_key, entity_v = entity.managers.namePredicate(op, v)
|
||||
key = 'id__in'
|
||||
v = entity.models.DocumentProperties.objects.filter(**{
|
||||
'entity__' + entity_key: entity_v
|
||||
}).values_list('document_id', flat=True)
|
||||
else:
|
||||
key = k + get_operator(op, 'istr')
|
||||
key = str(key)
|
||||
|
|
|
@ -165,6 +165,33 @@ def parse_query(data, user):
|
|||
return query
|
||||
|
||||
|
||||
def get_positions(qs, query_positions):
|
||||
'''
|
||||
qs: a QuerySet
|
||||
query_positions: a list of AZ ids
|
||||
|
||||
TODO: merge this with item.utils.get_positions. The win is to fetch
|
||||
only the integer IDs and convert the (smaller) set of query_positions to
|
||||
ints, rather than fetch all keys for everything in qs (expected to be many
|
||||
orders of magnitude larger), ignore most of it, and convert those ids to
|
||||
strings.
|
||||
|
||||
Returns:
|
||||
{
|
||||
i: index of i in qs
|
||||
for i in query_positions
|
||||
}
|
||||
'''
|
||||
ids = list(qs.values_list('id', flat=True))
|
||||
ret = {}
|
||||
for i in query_positions:
|
||||
try:
|
||||
ret[i] = ids.index(ox.fromAZ(i))
|
||||
except:
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
||||
def findDocuments(request, data):
|
||||
'''
|
||||
Finds documents for a given query
|
||||
|
@ -198,8 +225,7 @@ def findDocuments(request, data):
|
|||
#FIXME: actually implement position requests
|
||||
response['data']['position'] = 0
|
||||
elif 'positions' in data:
|
||||
ids = [i.get_id() for i in qs]
|
||||
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
||||
response['data']['positions'] = get_positions(qs, query['positions'])
|
||||
else:
|
||||
r = qs.aggregate(
|
||||
Sum('size')
|
||||
|
|
|
@ -89,7 +89,7 @@ pandora.chunkupload = function(options) {
|
|||
}
|
||||
}, false);
|
||||
request.addEventListener('error', function (evt) {
|
||||
that.status = 'uplaod failed';
|
||||
that.status = 'upload failed';
|
||||
that.progress = -1;
|
||||
that.responseText = evt.target.responseText;
|
||||
done();
|
||||
|
@ -176,7 +176,7 @@ pandora.chunkupload = function(options) {
|
|||
// failed to upload, try again in 5 second
|
||||
retries++;
|
||||
if (maxRetry > 0 && retries > maxRetry) {
|
||||
that.status = 'uplaod failed';
|
||||
that.status = 'upload failed';
|
||||
that.progress = -1;
|
||||
done();
|
||||
} else {
|
||||
|
@ -195,7 +195,7 @@ pandora.chunkupload = function(options) {
|
|||
// failed to upload, try again in 3 second
|
||||
retries++;
|
||||
if (maxRetry > 0 && retries > maxRetry) {
|
||||
that.status = 'uplaod failed';
|
||||
that.status = 'upload failed';
|
||||
that.progress = -1;
|
||||
done();
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue