Cache serialized entities when fetching many annotations

For a scene with ~5600 annotations, of which ~3100 are entities, this
cuts fetching the scene from 12 seconds to 2 seconds.
This commit is contained in:
Will Thompson 2015-09-14 14:08:02 +02:00 committed by j
commit 8759b569da
3 changed files with 42 additions and 11 deletions

View file

@ -32,12 +32,14 @@ def get_layers(item, interval=None, user=None):
start, end = interval
qs = qs.filter(start__lt=end, end__gt=start)
entity_cache = {}
for a in qs.order_by('start').select_related('user'):
if a.layer in private:
if a.user == user:
layers[a.layer].append(a.json(user=user))
else:
layers[a.layer].append(a.json(user=user))
if a.layer in private and a.user != user:
continue
layers[a.layer].append(a.json(user=user, entity_cache=entity_cache))
return layers
@ -107,7 +109,10 @@ class MetaClip:
annotations = self.annotations.all()
if qs:
annotations = annotations.filter(qs)
j['annotations'] = [a.json(keys=['value', 'id', 'layer']) for a in annotations]
entity_cache = {}
j['annotations'] = [
a.json(keys=['value', 'id', 'layer'], entity_cache=entity_cache) for a in annotations
]
if 'layers' in keys:
j['layers'] = self.get_layers()
if 'cuts' in keys: