Merge remote-tracking branch 'wjt/2935-findDocuments-performance'
This commit is contained in:
commit
3d95d7013a
3 changed files with 38 additions and 8 deletions
|
@ -39,14 +39,18 @@ def parseCondition(condition, user, item=None):
|
||||||
|
|
||||||
|
|
||||||
def buildCondition(k, op, v):
|
def buildCondition(k, op, v):
|
||||||
|
import entity.models
|
||||||
if k == 'id':
|
if k == 'id':
|
||||||
v = ox.fromAZ(v)
|
v = ox.fromAZ(v)
|
||||||
return Q(**{k: v})
|
return Q(**{k: v})
|
||||||
if isinstance(v, bool):
|
if isinstance(v, bool):
|
||||||
key = k
|
key = k
|
||||||
elif k == 'entity':
|
elif k == 'entity':
|
||||||
entity_key, v = entity.managers.namePredicate(op, v)
|
entity_key, entity_v = entity.managers.namePredicate(op, v)
|
||||||
key = 'entities__' + entity_key
|
key = 'id__in'
|
||||||
|
v = entity.models.DocumentProperties.objects.filter(**{
|
||||||
|
'entity__' + entity_key: entity_v
|
||||||
|
}).values_list('document_id', flat=True)
|
||||||
else:
|
else:
|
||||||
key = k + get_operator(op, 'istr')
|
key = k + get_operator(op, 'istr')
|
||||||
key = str(key)
|
key = str(key)
|
||||||
|
|
|
@ -165,6 +165,33 @@ def parse_query(data, user):
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
def get_positions(qs, query_positions):
|
||||||
|
'''
|
||||||
|
qs: a QuerySet
|
||||||
|
query_positions: a list of AZ ids
|
||||||
|
|
||||||
|
TODO: merge this with item.utils.get_positions. The win is to fetch
|
||||||
|
only the integer IDs and convert the (smaller) set of query_positions to
|
||||||
|
ints, rather than fetch all keys for everything in qs (expected to be many
|
||||||
|
orders of magnitude larger), ignore most of it, and convert those ids to
|
||||||
|
strings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{
|
||||||
|
i: index of i in qs
|
||||||
|
for i in query_positions
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
ids = list(qs.values_list('id', flat=True))
|
||||||
|
ret = {}
|
||||||
|
for i in query_positions:
|
||||||
|
try:
|
||||||
|
ret[i] = ids.index(ox.fromAZ(i))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def findDocuments(request, data):
|
def findDocuments(request, data):
|
||||||
'''
|
'''
|
||||||
Finds documents for a given query
|
Finds documents for a given query
|
||||||
|
@ -198,8 +225,7 @@ def findDocuments(request, data):
|
||||||
#FIXME: actually implement position requests
|
#FIXME: actually implement position requests
|
||||||
response['data']['position'] = 0
|
response['data']['position'] = 0
|
||||||
elif 'positions' in data:
|
elif 'positions' in data:
|
||||||
ids = [i.get_id() for i in qs]
|
response['data']['positions'] = get_positions(qs, query['positions'])
|
||||||
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
|
||||||
else:
|
else:
|
||||||
r = qs.aggregate(
|
r = qs.aggregate(
|
||||||
Sum('size')
|
Sum('size')
|
||||||
|
|
|
@ -89,7 +89,7 @@ pandora.chunkupload = function(options) {
|
||||||
}
|
}
|
||||||
}, false);
|
}, false);
|
||||||
request.addEventListener('error', function (evt) {
|
request.addEventListener('error', function (evt) {
|
||||||
that.status = 'uplaod failed';
|
that.status = 'upload failed';
|
||||||
that.progress = -1;
|
that.progress = -1;
|
||||||
that.responseText = evt.target.responseText;
|
that.responseText = evt.target.responseText;
|
||||||
done();
|
done();
|
||||||
|
@ -100,7 +100,7 @@ pandora.chunkupload = function(options) {
|
||||||
done();
|
done();
|
||||||
}, false);
|
}, false);
|
||||||
var formData = new FormData();
|
var formData = new FormData();
|
||||||
|
|
||||||
Object.keys(options.data).forEach(function(key) {
|
Object.keys(options.data).forEach(function(key) {
|
||||||
formData.append(key, options.data[key]);
|
formData.append(key, options.data[key]);
|
||||||
});
|
});
|
||||||
|
@ -176,7 +176,7 @@ pandora.chunkupload = function(options) {
|
||||||
// failed to upload, try again in 5 second
|
// failed to upload, try again in 5 second
|
||||||
retries++;
|
retries++;
|
||||||
if (maxRetry > 0 && retries > maxRetry) {
|
if (maxRetry > 0 && retries > maxRetry) {
|
||||||
that.status = 'uplaod failed';
|
that.status = 'upload failed';
|
||||||
that.progress = -1;
|
that.progress = -1;
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
|
@ -195,7 +195,7 @@ pandora.chunkupload = function(options) {
|
||||||
// failed to upload, try again in 3 second
|
// failed to upload, try again in 3 second
|
||||||
retries++;
|
retries++;
|
||||||
if (maxRetry > 0 && retries > maxRetry) {
|
if (maxRetry > 0 && retries > maxRetry) {
|
||||||
that.status = 'uplaod failed';
|
that.status = 'upload failed';
|
||||||
that.progress = -1;
|
that.progress = -1;
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Reference in a new issue