diff --git a/pandora/archive/tasks.py b/pandora/archive/tasks.py new file mode 100644 index 000000000..c6a9f6e16 --- /dev/null +++ b/pandora/archive/tasks.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# vi:si:et:sw=4:sts=4:ts=4 +from datetime import timedelta +import os +import re + +from celery.decorators import task, periodic_task + +from item.utils import oxid, parse_path +from item.models import get_item +import item.tasks + +import models + +@task(ignore_resulsts=True, queue='default') +def update_files(user, volume, files): + user = models.User.objects.get(username=user) + volume, created = models.Volume.objects.get_or_create(user=user, name=volume) + all_files = [] + for f in files: + path = f['path'] + folder = path.split('/') + name = folder.pop() + if folder and folder[-1] in ('Extras', 'Versions', 'DVDs'): + name = '/'.join([folder.pop(), name]) + folder = '/'.join(folder) + #print folder + #print name + f['folder'] = folder + f['name'] = name + oshash = f['oshash'] + all_files.append(oshash) + + same_folder = models.Instance.objects.filter(folder=folder, volume=volume) + if same_folder.count() > 0: + item = same_folder[0].file.item + else: + item = None + + path = os.path.join(folder, name) + + instance = models.Instance.objects.filter(file__oshash=oshash, volume=volume) + if instance.count()>0: + instance = instance[0] + updated = False + for key in ('mtime', 'name', 'folder'): + if f[key] != getattr(instance, key): + setattr(instance, key, f[key]) + updated=True + if updated: + instance.save() + else: + #look if oshash is known + file_objects = models.File.objects.filter(oshash=oshash) + if file_objects.count() > 0: + file_object = file_objects[0] + #new oshash, add to database + else: + if not item: + item_info = parse_path(folder) + item = get_item(item_info) + file_object = models.File() + file_object.oshash = oshash + file_object.name = name + file_object.item = item + file_object.save() + instance = models.Instance() + instance.volume = volume + instance.file = file_object + for key in ('mtime', 'name', 'folder'): + setattr(instance, key, f[key]) + instance.save() + + #remove deleted files + #FIXME: can this have any bad consequences? i.e. on the selction of used item files. + models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files).delete() + diff --git a/pandora/archive/views.py b/pandora/archive/views.py index b476b6f33..fc466a92b 100644 --- a/pandora/archive/views.py +++ b/pandora/archive/views.py @@ -28,13 +28,14 @@ from ox.django.http import HttpFileResponse from ox.django.views import task_status import ox -import models - from item.utils import oxid, parse_path from item.models import get_item import item.tasks from api.actions import actions +import models +import tasks + @login_required_json def removeVolume(request): data = json.loads(request.POST['data']) @@ -73,66 +74,10 @@ def update(request): response = json_response({'info': [], 'data': [], 'file': []}) volume = None if 'files' in data: - volume, created = models.Volume.objects.get_or_create(user=user, name=data['volume']) - all_files = [] - for f in data['files']: - path = f['path'] - folder = path.split('/') - name = folder.pop() - if folder and folder[-1] in ('Extras', 'Versions', 'DVDs'): - name = '/'.join([folder.pop(), name]) - folder = '/'.join(folder) - #print folder - #print name - f['folder'] = folder - f['name'] = name - oshash = f['oshash'] - all_files.append(oshash) - - same_folder = models.Instance.objects.filter(folder=folder, volume=volume) - if same_folder.count() > 0: - item = same_folder[0].file.item - else: - item = None - - path = os.path.join(folder, name) - - instance = models.Instance.objects.filter(file__oshash=oshash, volume=volume) - if instance.count()>0: - instance = instance[0] - updated = False - for key in ('mtime', 'name', 'folder'): - if f[key] != getattr(instance, key): - setattr(instance, key, f[key]) - updated=True - if updated: - instance.save() - else: - #look if oshash is known - file_objects = models.File.objects.filter(oshash=oshash) - if file_objects.count() > 0: - file_object = file_objects[0] - #new oshash, add to database - else: - if not item: - item_info = parse_path(folder) - item = get_item(item_info) - file_object = models.File() - file_object.oshash = oshash - file_object.name = name - file_object.item = item - file_object.save() - response['data']['info'].append(oshash) - instance = models.Instance() - instance.volume = volume - instance.file = file_object - for key in ('mtime', 'name', 'folder'): - setattr(instance, key, f[key]) - instance.save() - - #remove deleted files - #FIXME: can this have any bad consequences? i.e. on the selction of used item files. - models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files).delete() + #update files info async, this takes to long otherwise + #FIXME: how can client know if update is done? possibly with taksStatus? + t = tasks.update_files.delay(user.username, data['volume'], data['files']) + response['data']['taskId'] = t.task_id user_profile = user.get_profile() user_profile.files_updated = datetime.now() @@ -234,8 +179,7 @@ def firefogg_upload(request): f.save() #FIXME: this fails badly if rabbitmq goes down try: - task_id = 'update_streams_' + f.item.itemId - t = item.tasks.update_streams.delay(f.item.itemId, task_id=task_id) + t = item.tasks.update_streams.delay(f.item.itemId) data['resultUrl'] = t.task_id except: pass diff --git a/pandora/item/models.py b/pandora/item/models.py index 50f853adf..34c3d40a8 100644 --- a/pandora/item/models.py +++ b/pandora/item/models.py @@ -572,6 +572,9 @@ class Item(models.Model): s.files = 0 s.size = 0 + s.color = int(sum(self.data.get('color', []))) + s.cuts = len(self.data.get('cuts', [])) + s.cutsperminute = s.cuts / (s.duration/60) for key in ('title', 'language', 'country') + self.person_keys: setattr(s, '%s_desc'%key, getattr(s, key)) if not getattr(s, key): @@ -654,26 +657,26 @@ class Item(models.Model): cmd = [] if os.path.exists(stream.video.path): os.unlink(stream.video.path) - if len(files) > 1: + elif not os.path.exists(os.path.dirname(stream.video.path)): + os.makedirs(os.path.dirname(stream.video.path)) + if len(files.values()) > 1: for f in sorted(files): cmd.append('+') cmd.append(files[f]) - if not os.path.exists(os.path.dirname(stream.video.path)): - os.makedirs(os.path.dirname(stream.video.path)) cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:] #print cmd p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #p = subprocess.Popen(cmd, stdin=subprocess.PIPE) p.wait() else: - os.symlink(files[0], stream.video.path) + os.symlink(files.values()[0], stream.video.path) stream.save() if 'video' in stream.info: extract.timeline(stream.video.path, self.timeline_prefix) self.stream_aspect = stream.info['video'][0]['width']/stream.info['video'][0]['height'] self.data['cuts'] = extract.cuts(self.timeline_prefix) - self.data['average_color'] = extract.average_color(self.timeline_prefix) + self.data['color'] = extract.average_color(self.timeline_prefix) #extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8]) stream.extract_derivatives() @@ -830,6 +833,12 @@ class ItemSort(models.Model): filename = models.CharField(max_length=1024, blank=True, db_index=True) files = models.IntegerField(blank=True, db_index=True) size = models.BigIntegerField(blank=True, db_index=True) + color = models.IntegerField(blank=True, db_index=True) + saturation = models.IntegerField(blank=True, db_index=True) + brightness = models.IntegerField(blank=True, db_index=True) + cuts = models.IntegerField(blank=True, db_index=True) + cutsperminute = models.FloatField(blank=True, db_index=True) + #required to move empty values to the bottom for both asc and desc sort title_desc = models.CharField(max_length=1000, db_index=True)