Merge changes

This commit is contained in:
j 2016-09-05 18:17:06 +02:00
commit 5d43ed0585
8 changed files with 50 additions and 40 deletions

View file

@ -134,6 +134,7 @@ class Annotation(models.Model):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
from .tasks import update_matches from .tasks import update_matches
async = kwargs.pop('async', False)
set_public_id = not self.id or not self.public_id set_public_id = not self.id or not self.public_id
layer = self.get_layer() layer = self.get_layer()
@ -181,10 +182,12 @@ class Annotation(models.Model):
self.clip.save() self.clip.save()
# editAnnotations needs to be in snyc # editAnnotations needs to be in snyc
# load_subtitles can not be in sync
fn = update_matches.delay if async else update_matches
if layer.get('type') == 'place' or layer.get('hasPlaces'): if layer.get('type') == 'place' or layer.get('hasPlaces'):
update_matches(self.id, 'place') fn(self.id, 'place')
if layer.get('type') == 'event' or layer.get('hasEvents'): if layer.get('type') == 'event' or layer.get('hasEvents'):
update_matches(self.id, 'event') fn(self.id, 'event')
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
with transaction.atomic(): with transaction.atomic():

View file

@ -18,7 +18,10 @@ def update_matches(id, type):
elif type == 'event': elif type == 'event':
from event.models import Event as Model from event.models import Event as Model
try:
a = Annotation.objects.get(pk=id) a = Annotation.objects.get(pk=id)
except Annotation.DoesNotExist:
return
a_matches = getattr(a, type == 'place' and 'places' or 'events') a_matches = getattr(a, type == 'place' and 'places' or 'events')
# remove undefined matches that only have this annotation # remove undefined matches that only have this annotation
@ -33,8 +36,7 @@ def update_matches(id, type):
if a.findvalue: if a.findvalue:
names = {} names = {}
for n in Model.objects.all().values('id', 'name', 'alternativeNames'): for n in Model.objects.all().values('id', 'name', 'alternativeNames'):
names[n['id']] = [ox.decode_html(x) names[n['id']] = [ox.decode_html(x) for x in (n['name'],) + n['alternativeNames']]
for x in (n['name'],) + n['alternativeNames']]
value = a.findvalue.lower() value = a.findvalue.lower()
current = [p.id for p in a_matches.all()] current = [p.id for p in a_matches.all()]
@ -54,9 +56,9 @@ def update_matches(id, type):
if not filter(lambda n: n in name_matches, if not filter(lambda n: n in name_matches,
[n.lower() for n in p.get_super_matches()]): [n.lower() for n in p.get_super_matches()]):
new.append(i) new.append(i)
removed = list(filter(lambda p: p not in new, current)) removed = set(filter(lambda p: p not in new, current))
added = list(filter(lambda p: p not in current, new)) added = set(filter(lambda p: p not in current, new))
update = removed + added update = list(removed | added)
if update: if update:
for e in Model.objects.filter(id__in=update): for e in Model.objects.filter(id__in=update):
e.update_matches(Annotation.objects.filter(pk=a.id)) e.update_matches(Annotation.objects.filter(pk=a.id))

View file

@ -1632,7 +1632,7 @@ class Item(models.Model):
value=value, value=value,
user=user user=user
) )
annotation.save() annotation.save(async=True)
# otherwise add empty 5 seconds annotation every minute # otherwise add empty 5 seconds annotation every minute
if not subtitles_added: if not subtitles_added:
start = offset and int(offset / 60) * 60 + 60 or 0 start = offset and int(offset / 60) * 60 + 60 or 0
@ -1647,7 +1647,7 @@ class Item(models.Model):
value='', value='',
user=user user=user
) )
annotation.save() annotation.save(async=True)
offset += f.duration offset += f.duration
# remove left over clips without annotations # remove left over clips without annotations
Clip.objects.filter(item=self, annotations__id=None).delete() Clip.objects.filter(item=self, annotations__id=None).delete()

View file

@ -9,13 +9,7 @@ import re
from PIL import Image from PIL import Image
from ox.utils import json from ox.utils import json
__all__ = ['join_tiles', 'split_tiles']
def join_tiles(source_paths, durations, target_path):
'''
This is an implementation of a join_tiles function for new-style timelines.
Timelines of files will be read from source_paths, the timeline of the item will
be written to target_path.
'''
def divide(num, by): def divide(num, by):
# divide(100, 3) -> [33, 33, 34] # divide(100, 3) -> [33, 33, 34]
@ -26,6 +20,13 @@ def join_tiles(source_paths, durations, target_path):
arr.append(div + (i > by - 1 - mod)) arr.append(div + (i > by - 1 - mod))
return arr return arr
def join_tiles(source_paths, durations, target_path):
'''
This is an implementation of a join_tiles function for new-style timelines.
Timelines of files will be read from source_paths, the timeline of the item will
be written to target_path.
'''
def get_file_info(file_name): def get_file_info(file_name):
for mode in modes: for mode in modes:
if re.match('^timeline' + mode + '64p\d+\.jpg', file_name): if re.match('^timeline' + mode + '64p\d+\.jpg', file_name):
@ -86,6 +87,7 @@ def join_tiles(source_paths, durations, target_path):
#print(image_file) #print(image_file)
if mode == full_tile_mode: if mode == full_tile_mode:
# render full tile # render full tile
if data['full_tile_widths'][0]:
resized = data['target_images']['large'].resize(( resized = data['target_images']['large'].resize((
data['full_tile_widths'][0], large_tile_h data['full_tile_widths'][0], large_tile_h
), Image.ANTIALIAS) ), Image.ANTIALIAS)

View file

@ -66,6 +66,7 @@ class Task(models.Model):
task, created = cls.objects.get_or_create(item=item) task, created = cls.objects.get_or_create(item=item)
if task.update(save=False) or created: if task.update(save=False) or created:
task.user = user task.user = user
if not task.started:
task.started = datetime.now() task.started = datetime.now()
task.ended = None task.ended = None
task.save() task.save()
@ -83,10 +84,10 @@ class Task(models.Model):
status = 'pending' status = 'pending'
elif self.item.files.filter(uploading=True).count(): elif self.item.files.filter(uploading=True).count():
status = 'uploading' status = 'uploading'
elif self.item.files.filter(queued=True).count():
status = 'queued'
elif self.item.files.filter(encoding=True).count(): elif self.item.files.filter(encoding=True).count():
status = 'processing' status = 'processing'
elif self.item.files.filter(queued=True).count():
status = 'queued'
elif self.item.files.filter(failed=True).count(): elif self.item.files.filter(failed=True).count():
status = 'failed' status = 'failed'
elif self.item.rendered: elif self.item.rendered:

View file

@ -37,7 +37,6 @@ def parseCondition(condition, user):
else: else:
key = k + get_operator(op, 'istr') key = k + get_operator(op, 'istr')
key = str(key) key = str(key)
q = Q(**{key: v}) q = Q(**{key: v})
if exclude: if exclude:
q = ~q q = ~q

View file

@ -141,6 +141,9 @@ class SessionData(models.Model):
def json(self, keys=None, user=None): def json(self, keys=None, user=None):
ua = ox.parse_useragent(self.useragent or '') ua = ox.parse_useragent(self.useragent or '')
if ua['robot']['name'] and self.level != -1:
self.level = -1
self.save()
j = { j = {
'browser': ua['browser']['string'], 'browser': ua['browser']['string'],
'disabled': False, 'disabled': False,
@ -234,6 +237,7 @@ def get_ui(user_ui, user=None):
ui = {} ui = {}
config = copy.deepcopy(settings.CONFIG) config = copy.deepcopy(settings.CONFIG)
ui.update(config['user']['ui']) ui.update(config['user']['ui'])
def update_ui(ui, new): def update_ui(ui, new):
''' '''
only update set keys in dicts only update set keys in dicts
@ -387,7 +391,6 @@ def has_capability(user, capability):
return level in settings.CONFIG['capabilities'][capability] \ return level in settings.CONFIG['capabilities'][capability] \
and settings.CONFIG['capabilities'][capability][level] and settings.CONFIG['capabilities'][capability][level]
def merge_users(old, new): def merge_users(old, new):
old.annotations.all().update(user=new) old.annotations.all().update(user=new)
old.edits.all().update(user=new) old.edits.all().update(user=new)