forked from 0x2620/pandora
Merge changes
This commit is contained in:
commit
5d43ed0585
8 changed files with 50 additions and 40 deletions
|
@ -134,6 +134,7 @@ class Annotation(models.Model):
|
|||
|
||||
def save(self, *args, **kwargs):
|
||||
from .tasks import update_matches
|
||||
async = kwargs.pop('async', False)
|
||||
|
||||
set_public_id = not self.id or not self.public_id
|
||||
layer = self.get_layer()
|
||||
|
@ -177,14 +178,16 @@ class Annotation(models.Model):
|
|||
'id': self.clip.id,
|
||||
self.layer: False
|
||||
}).update(**{self.layer: True})
|
||||
#update clip.findvalue
|
||||
# update clip.findvalue
|
||||
self.clip.save()
|
||||
|
||||
#editAnnotations needs to be in snyc
|
||||
# editAnnotations needs to be in snyc
|
||||
# load_subtitles can not be in sync
|
||||
fn = update_matches.delay if async else update_matches
|
||||
if layer.get('type') == 'place' or layer.get('hasPlaces'):
|
||||
update_matches(self.id, 'place')
|
||||
fn(self.id, 'place')
|
||||
if layer.get('type') == 'event' or layer.get('hasEvents'):
|
||||
update_matches(self.id, 'event')
|
||||
fn(self.id, 'event')
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
|
|
|
@ -18,10 +18,13 @@ def update_matches(id, type):
|
|||
elif type == 'event':
|
||||
from event.models import Event as Model
|
||||
|
||||
a = Annotation.objects.get(pk=id)
|
||||
try:
|
||||
a = Annotation.objects.get(pk=id)
|
||||
except Annotation.DoesNotExist:
|
||||
return
|
||||
a_matches = getattr(a, type == 'place' and 'places' or 'events')
|
||||
|
||||
#remove undefined matches that only have this annotation
|
||||
# remove undefined matches that only have this annotation
|
||||
for p in a_matches.filter(defined=False).exclude(name=a.value):
|
||||
if p.annotations.exclude(id=id).count() == 0:
|
||||
p.delete()
|
||||
|
@ -33,8 +36,7 @@ def update_matches(id, type):
|
|||
if a.findvalue:
|
||||
names = {}
|
||||
for n in Model.objects.all().values('id', 'name', 'alternativeNames'):
|
||||
names[n['id']] = [ox.decode_html(x)
|
||||
for x in (n['name'],) + n['alternativeNames']]
|
||||
names[n['id']] = [ox.decode_html(x) for x in (n['name'],) + n['alternativeNames']]
|
||||
value = a.findvalue.lower()
|
||||
|
||||
current = [p.id for p in a_matches.all()]
|
||||
|
@ -49,19 +51,19 @@ def update_matches(id, type):
|
|||
new = []
|
||||
for i in matches:
|
||||
p = Model.objects.get(pk=i)
|
||||
#only add places/events that did not get added as a super match
|
||||
#i.e. only add The Paris Region and not Paris
|
||||
# only add places/events that did not get added as a super match
|
||||
# i.e. only add The Paris Region and not Paris
|
||||
if not filter(lambda n: n in name_matches,
|
||||
[n.lower() for n in p.get_super_matches()]):
|
||||
new.append(i)
|
||||
removed = list(filter(lambda p: p not in new, current))
|
||||
added = list(filter(lambda p: p not in current, new))
|
||||
update = removed + added
|
||||
removed = set(filter(lambda p: p not in new, current))
|
||||
added = set(filter(lambda p: p not in current, new))
|
||||
update = list(removed | added)
|
||||
if update:
|
||||
for e in Model.objects.filter(id__in=update):
|
||||
e.update_matches(Annotation.objects.filter(pk=a.id))
|
||||
else:
|
||||
#annotation has no value, remove all exisint matches
|
||||
# annotation has no value, remove all exisint matches
|
||||
for e in a_matches.all():
|
||||
e.update_matches(Annotation.objects.filter(pk=a.id))
|
||||
|
||||
|
|
|
@ -1632,7 +1632,7 @@ class Item(models.Model):
|
|||
value=value,
|
||||
user=user
|
||||
)
|
||||
annotation.save()
|
||||
annotation.save(async=True)
|
||||
# otherwise add empty 5 seconds annotation every minute
|
||||
if not subtitles_added:
|
||||
start = offset and int(offset / 60) * 60 + 60 or 0
|
||||
|
@ -1647,7 +1647,7 @@ class Item(models.Model):
|
|||
value='',
|
||||
user=user
|
||||
)
|
||||
annotation.save()
|
||||
annotation.save(async=True)
|
||||
offset += f.duration
|
||||
# remove left over clips without annotations
|
||||
Clip.objects.filter(item=self, annotations__id=None).delete()
|
||||
|
|
|
@ -9,6 +9,16 @@ import re
|
|||
from PIL import Image
|
||||
from ox.utils import json
|
||||
|
||||
__all__ = ['join_tiles', 'split_tiles']
|
||||
|
||||
def divide(num, by):
|
||||
# divide(100, 3) -> [33, 33, 34]
|
||||
arr = []
|
||||
div = int(num / by)
|
||||
mod = num % by
|
||||
for i in range(int(by)):
|
||||
arr.append(div + (i > by - 1 - mod))
|
||||
return arr
|
||||
|
||||
def join_tiles(source_paths, durations, target_path):
|
||||
'''
|
||||
|
@ -17,15 +27,6 @@ def join_tiles(source_paths, durations, target_path):
|
|||
be written to target_path.
|
||||
'''
|
||||
|
||||
def divide(num, by):
|
||||
# divide(100, 3) -> [33, 33, 34]
|
||||
arr = []
|
||||
div = int(num / by)
|
||||
mod = num % by
|
||||
for i in range(int(by)):
|
||||
arr.append(div + (i > by - 1 - mod))
|
||||
return arr
|
||||
|
||||
def get_file_info(file_name):
|
||||
for mode in modes:
|
||||
if re.match('^timeline' + mode + '64p\d+\.jpg', file_name):
|
||||
|
@ -86,11 +87,12 @@ def join_tiles(source_paths, durations, target_path):
|
|||
#print(image_file)
|
||||
if mode == full_tile_mode:
|
||||
# render full tile
|
||||
resized = data['target_images']['large'].resize((
|
||||
data['full_tile_widths'][0], large_tile_h
|
||||
), Image.ANTIALIAS)
|
||||
data['target_images']['full'].paste(resized, (data['full_tile_offset'], 0))
|
||||
data['full_tile_offset'] += data['full_tile_widths'][0]
|
||||
if data['full_tile_widths'][0]:
|
||||
resized = data['target_images']['large'].resize((
|
||||
data['full_tile_widths'][0], large_tile_h
|
||||
), Image.ANTIALIAS)
|
||||
data['target_images']['full'].paste(resized, (data['full_tile_offset'], 0))
|
||||
data['full_tile_offset'] += data['full_tile_widths'][0]
|
||||
data['full_tile_widths'] = data['full_tile_widths'][1:]
|
||||
large_tile_i += 1
|
||||
# open next large tile
|
||||
|
|
|
@ -107,13 +107,13 @@ class Place(models.Model):
|
|||
numberofmatches = -1
|
||||
for a in annotations.exclude(id__in=matches):
|
||||
self.annotations.remove(a)
|
||||
#annotations of type place always need a place
|
||||
# annotations of type place always need a place
|
||||
if a.get_layer().get('type') == 'place' and a.places.count() == 0:
|
||||
a.places.add(Place.get_or_create(a.value))
|
||||
for p in a.places.exclude(id=self.id):
|
||||
p.update_matches()
|
||||
for a in matches.exclude(id__in=self.annotations.all()):
|
||||
#need to check again since editEvent might have been called again
|
||||
# need to check again since editEvent might have been called again
|
||||
if self.annotations.filter(id=a.id).count() == 0:
|
||||
self.annotations.add(a)
|
||||
ids = list(set([a['item_id'] for a in self.annotations.all().values('item_id')]))
|
||||
|
|
|
@ -66,7 +66,8 @@ class Task(models.Model):
|
|||
task, created = cls.objects.get_or_create(item=item)
|
||||
if task.update(save=False) or created:
|
||||
task.user = user
|
||||
task.started = datetime.now()
|
||||
if not task.started:
|
||||
task.started = datetime.now()
|
||||
task.ended = None
|
||||
task.save()
|
||||
|
||||
|
@ -83,10 +84,10 @@ class Task(models.Model):
|
|||
status = 'pending'
|
||||
elif self.item.files.filter(uploading=True).count():
|
||||
status = 'uploading'
|
||||
elif self.item.files.filter(queued=True).count():
|
||||
status = 'queued'
|
||||
elif self.item.files.filter(encoding=True).count():
|
||||
status = 'processing'
|
||||
elif self.item.files.filter(queued=True).count():
|
||||
status = 'queued'
|
||||
elif self.item.files.filter(failed=True).count():
|
||||
status = 'failed'
|
||||
elif self.item.rendered:
|
||||
|
|
|
@ -37,7 +37,6 @@ def parseCondition(condition, user):
|
|||
else:
|
||||
key = k + get_operator(op, 'istr')
|
||||
key = str(key)
|
||||
|
||||
q = Q(**{key: v})
|
||||
if exclude:
|
||||
q = ~q
|
||||
|
@ -53,7 +52,7 @@ def parseConditions(conditions, operator, user):
|
|||
for condition in conditions:
|
||||
if 'conditions' in condition:
|
||||
q = parseConditions(condition['conditions'],
|
||||
condition.get('operator', '&'), user)
|
||||
condition.get('operator', '&'), user)
|
||||
if q:
|
||||
conn.append(q)
|
||||
pass
|
||||
|
|
|
@ -47,7 +47,7 @@ class SessionData(models.Model):
|
|||
|
||||
objects = managers.SessionDataManager()
|
||||
|
||||
groupssort = models.CharField(default=None,blank=True,null=True, max_length=255)
|
||||
groupssort = models.CharField(default=None, blank=True, null=True, max_length=255)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s" % self.session_key
|
||||
|
@ -141,6 +141,9 @@ class SessionData(models.Model):
|
|||
|
||||
def json(self, keys=None, user=None):
|
||||
ua = ox.parse_useragent(self.useragent or '')
|
||||
if ua['robot']['name'] and self.level != -1:
|
||||
self.level = -1
|
||||
self.save()
|
||||
j = {
|
||||
'browser': ua['browser']['string'],
|
||||
'disabled': False,
|
||||
|
@ -234,6 +237,7 @@ def get_ui(user_ui, user=None):
|
|||
ui = {}
|
||||
config = copy.deepcopy(settings.CONFIG)
|
||||
ui.update(config['user']['ui'])
|
||||
|
||||
def update_ui(ui, new):
|
||||
'''
|
||||
only update set keys in dicts
|
||||
|
@ -385,8 +389,7 @@ def has_capability(user, capability):
|
|||
else:
|
||||
level = user.profile.get_level()
|
||||
return level in settings.CONFIG['capabilities'][capability] \
|
||||
and settings.CONFIG['capabilities'][capability][level]
|
||||
|
||||
and settings.CONFIG['capabilities'][capability][level]
|
||||
|
||||
def merge_users(old, new):
|
||||
old.annotations.all().update(user=new)
|
||||
|
|
Loading…
Reference in a new issue