pandora/pandora/archive/tasks.py

158 lines
4.9 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
import os
2012-01-28 10:22:51 +00:00
from glob import glob
2011-12-27 13:21:22 +00:00
from celery.task import task
2011-01-28 08:48:38 +00:00
from django.conf import settings
import ox
2013-02-08 17:01:26 +00:00
from item.models import Item
import models
2012-01-28 10:22:51 +00:00
import extract
2011-02-02 10:36:04 +00:00
_INSTANCE_KEYS = ('mtime', 'path')
2011-02-02 10:36:04 +00:00
2011-04-18 19:12:41 +00:00
def get_or_create_file(volume, f, user, item=None):
2011-02-02 10:36:04 +00:00
try:
file = models.File.objects.get(oshash=f['oshash'])
except models.File.DoesNotExist:
file = models.File()
file.oshash = f['oshash']
file.path = f['path']
2013-02-08 17:01:26 +00:00
file.path = f['path']
2011-04-18 19:12:41 +00:00
if item:
file.item = item
else:
2013-02-08 17:01:26 +00:00
file.item = None #gets pupulated later via update_info
2011-02-02 10:36:04 +00:00
file.save()
return file
2011-02-22 15:42:56 +00:00
def update_or_create_instance(volume, f):
2011-04-18 19:12:41 +00:00
#instance with oshash exists
2011-02-22 15:45:12 +00:00
instance = models.Instance.objects.filter(file__oshash=f['oshash'], volume=volume)
2011-02-02 10:36:04 +00:00
if instance.count()>0:
instance = instance[0]
updated = False
for key in _INSTANCE_KEYS:
if f[key] != getattr(instance, key):
setattr(instance, key, f[key])
updated=True
if updated:
instance.ignore = False
2011-02-02 10:36:04 +00:00
instance.save()
instance.file.save()
2011-02-02 10:36:04 +00:00
else:
instance = models.Instance.objects.filter(path=f['path'], volume=volume)
2011-04-18 19:12:41 +00:00
if instance.count()>0:
#same path, other oshash, keep path/item mapping, remove instance
2011-06-27 19:55:40 +00:00
item = instance[0].file.item
2011-04-18 19:12:41 +00:00
instance.delete()
else: #new instance
item = None
2011-02-02 10:36:04 +00:00
instance = models.Instance()
instance.volume = volume
2011-04-18 19:12:41 +00:00
instance.file = get_or_create_file(volume, f, volume.user, item)
2011-02-02 10:36:04 +00:00
for key in _INSTANCE_KEYS:
setattr(instance, key, f[key])
instance.save()
instance.file.save()
2013-02-08 17:01:26 +00:00
if instance.file.item:
instance.file.item.update_wanted()
2011-02-02 10:36:04 +00:00
return instance
2011-01-01 11:44:42 +00:00
2012-03-21 08:13:38 +00:00
@task(ignore_results=True, queue='default')
def update_files(user, volume, files):
user = models.User.objects.get(username=user)
volume, created = models.Volume.objects.get_or_create(user=user, name=volume)
all_files = [f['oshash'] for f in files]
#remove deleted files
2011-10-20 19:32:31 +00:00
removed = models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files)
2011-10-20 23:52:21 +00:00
ids = [i['itemId'] for i in Item.objects.filter(
files__instances__in=removed.filter(file__selected=True)).distinct().values('itemId')]
2011-10-20 19:32:31 +00:00
removed.delete()
for f in files:
update_or_create_instance(volume, f)
2011-10-20 19:32:31 +00:00
for i in ids:
2011-10-20 23:52:21 +00:00
i = Item.objects.get(itemId=i)
2011-10-20 19:32:31 +00:00
i.update_selected()
2011-06-01 11:03:07 +00:00
2011-08-19 16:54:42 +00:00
@task(queue="encoding")
def process_stream(fileId):
'''
process uploaded stream
'''
2011-08-20 08:45:54 +00:00
file = models.File.objects.get(id=fileId)
2011-08-19 16:54:42 +00:00
streams = file.streams.filter(source=None)
2011-08-20 08:45:54 +00:00
if streams.count() > 0:
2011-08-19 16:54:42 +00:00
stream = streams[0]
models.File.objects.filter(id=fileId).update(encoding=True)
2011-08-19 16:54:42 +00:00
stream.make_timeline()
stream.extract_derivatives()
models.File.objects.filter(id=fileId).update(encoding=False)
2011-08-26 16:07:32 +00:00
file.item.update_selected()
2011-10-28 13:16:42 +00:00
if not file.item.rendered:
file.item.update_timeline()
2012-09-08 12:56:40 +00:00
if file.item.rendered:
file.item.save()
2011-08-19 16:54:42 +00:00
return True
2012-01-28 10:22:51 +00:00
@task(queue="encoding")
def extract_stream(fileId):
'''
extract stream from direct upload
'''
file = models.File.objects.get(id=fileId)
if file.data:
config = settings.CONFIG['video']
stream, created = models.Stream.objects.get_or_create(
file=file, resolution=max(config['resolutions']),
format=config['formats'][0])
if created:
models.File.objects.filter(id=fileId).update(encoding=True)
stream.video.name = stream.path(stream.name())
stream.encode()
if stream.available:
stream.make_timeline()
stream.extract_derivatives()
file.item.update_selected()
if not file.item.rendered:
file.item.update_timeline()
if file.item.rendered:
file.item.save()
models.File.objects.filter(id=fileId).update(encoding=False)
@task(queue="encoding")
def extract_derivatives(fileId, rebuild=False):
file = models.File.objects.get(id=fileId)
streams = file.streams.filter(source=None)
if streams.count() > 0:
streams[0].extract_derivatives(rebuild)
return True
2012-01-28 10:22:51 +00:00
@task(queue="encoding")
def update_stream(id):
s = models.Stream.objects.get(pk=id)
if not glob("%s*"%s.timeline_prefix):
s.make_timeline()
if not s.color:
s.cuts = tuple(extract.cuts(s.timeline_prefix))
s.color = tuple(extract.average_color(s.timeline_prefix))
s.save()
s.file.selected = True
s.file.save()
s.file.item.update_timeline()
#make sure all derivatives exist
s.extract_derivatives()
2012-02-23 20:02:59 +00:00
s.file.item.save()
2012-01-28 10:22:51 +00:00
#update clips
for c in s.file.item.clips.all():
c.update_calculated_values()
c.save()