diff --git a/pandora/document/managers.py b/pandora/document/managers.py index d0334ea2..1530bde8 100644 --- a/pandora/document/managers.py +++ b/pandora/document/managers.py @@ -39,14 +39,18 @@ def parseCondition(condition, user, item=None): def buildCondition(k, op, v): + import entity.models if k == 'id': v = ox.fromAZ(v) return Q(**{k: v}) if isinstance(v, bool): key = k elif k == 'entity': - entity_key, v = entity.managers.namePredicate(op, v) - key = 'entities__' + entity_key + entity_key, entity_v = entity.managers.namePredicate(op, v) + key = 'id__in' + v = entity.models.DocumentProperties.objects.filter(**{ + 'entity__' + entity_key: entity_v + }).values_list('document_id', flat=True) else: key = k + get_operator(op, 'istr') key = str(key) diff --git a/pandora/document/views.py b/pandora/document/views.py index 66a01e59..ab739f62 100644 --- a/pandora/document/views.py +++ b/pandora/document/views.py @@ -165,6 +165,33 @@ def parse_query(data, user): return query +def get_positions(qs, query_positions): + ''' + qs: a QuerySet + query_positions: a list of AZ ids + + TODO: merge this with item.utils.get_positions. The win is to fetch + only the integer IDs and convert the (smaller) set of query_positions to + ints, rather than fetch all keys for everything in qs (expected to be many + orders of magnitude larger), ignore most of it, and convert those ids to + strings. + + Returns: + { + i: index of i in qs + for i in query_positions + } + ''' + ids = list(qs.values_list('id', flat=True)) + ret = {} + for i in query_positions: + try: + ret[i] = ids.index(ox.fromAZ(i)) + except: + pass + return ret + + def findDocuments(request, data): ''' Finds documents for a given query @@ -198,8 +225,7 @@ def findDocuments(request, data): #FIXME: actually implement position requests response['data']['position'] = 0 elif 'positions' in data: - ids = [i.get_id() for i in qs] - response['data']['positions'] = utils.get_positions(ids, query['positions']) + response['data']['positions'] = get_positions(qs, query['positions']) else: r = qs.aggregate( Sum('size') diff --git a/static/js/chunkupload.js b/static/js/chunkupload.js index ba83a096..2d37d807 100644 --- a/static/js/chunkupload.js +++ b/static/js/chunkupload.js @@ -89,7 +89,7 @@ pandora.chunkupload = function(options) { } }, false); request.addEventListener('error', function (evt) { - that.status = 'uplaod failed'; + that.status = 'upload failed'; that.progress = -1; that.responseText = evt.target.responseText; done(); @@ -100,7 +100,7 @@ pandora.chunkupload = function(options) { done(); }, false); var formData = new FormData(); - + Object.keys(options.data).forEach(function(key) { formData.append(key, options.data[key]); }); @@ -176,7 +176,7 @@ pandora.chunkupload = function(options) { // failed to upload, try again in 5 second retries++; if (maxRetry > 0 && retries > maxRetry) { - that.status = 'uplaod failed'; + that.status = 'upload failed'; that.progress = -1; done(); } else { @@ -195,7 +195,7 @@ pandora.chunkupload = function(options) { // failed to upload, try again in 3 second retries++; if (maxRetry > 0 && retries > maxRetry) { - that.status = 'uplaod failed'; + that.status = 'upload failed'; that.progress = -1; done(); } else {