sort values, fix symlink, update files as task

This commit is contained in:
j 2010-12-23 20:31:53 +05:30
parent 46d02853ce
commit 82580e8e32
3 changed files with 99 additions and 69 deletions

77
pandora/archive/tasks.py Normal file
View file

@ -0,0 +1,77 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from datetime import timedelta
import os
import re
from celery.decorators import task, periodic_task
from item.utils import oxid, parse_path
from item.models import get_item
import item.tasks
import models
@task(ignore_resulsts=True, queue='default')
def update_files(user, volume, files):
user = models.User.objects.get(username=user)
volume, created = models.Volume.objects.get_or_create(user=user, name=volume)
all_files = []
for f in files:
path = f['path']
folder = path.split('/')
name = folder.pop()
if folder and folder[-1] in ('Extras', 'Versions', 'DVDs'):
name = '/'.join([folder.pop(), name])
folder = '/'.join(folder)
#print folder
#print name
f['folder'] = folder
f['name'] = name
oshash = f['oshash']
all_files.append(oshash)
same_folder = models.Instance.objects.filter(folder=folder, volume=volume)
if same_folder.count() > 0:
item = same_folder[0].file.item
else:
item = None
path = os.path.join(folder, name)
instance = models.Instance.objects.filter(file__oshash=oshash, volume=volume)
if instance.count()>0:
instance = instance[0]
updated = False
for key in ('mtime', 'name', 'folder'):
if f[key] != getattr(instance, key):
setattr(instance, key, f[key])
updated=True
if updated:
instance.save()
else:
#look if oshash is known
file_objects = models.File.objects.filter(oshash=oshash)
if file_objects.count() > 0:
file_object = file_objects[0]
#new oshash, add to database
else:
if not item:
item_info = parse_path(folder)
item = get_item(item_info)
file_object = models.File()
file_object.oshash = oshash
file_object.name = name
file_object.item = item
file_object.save()
instance = models.Instance()
instance.volume = volume
instance.file = file_object
for key in ('mtime', 'name', 'folder'):
setattr(instance, key, f[key])
instance.save()
#remove deleted files
#FIXME: can this have any bad consequences? i.e. on the selction of used item files.
models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files).delete()

View file

@ -28,13 +28,14 @@ from ox.django.http import HttpFileResponse
from ox.django.views import task_status from ox.django.views import task_status
import ox import ox
import models
from item.utils import oxid, parse_path from item.utils import oxid, parse_path
from item.models import get_item from item.models import get_item
import item.tasks import item.tasks
from api.actions import actions from api.actions import actions
import models
import tasks
@login_required_json @login_required_json
def removeVolume(request): def removeVolume(request):
data = json.loads(request.POST['data']) data = json.loads(request.POST['data'])
@ -73,66 +74,10 @@ def update(request):
response = json_response({'info': [], 'data': [], 'file': []}) response = json_response({'info': [], 'data': [], 'file': []})
volume = None volume = None
if 'files' in data: if 'files' in data:
volume, created = models.Volume.objects.get_or_create(user=user, name=data['volume']) #update files info async, this takes to long otherwise
all_files = [] #FIXME: how can client know if update is done? possibly with taksStatus?
for f in data['files']: t = tasks.update_files.delay(user.username, data['volume'], data['files'])
path = f['path'] response['data']['taskId'] = t.task_id
folder = path.split('/')
name = folder.pop()
if folder and folder[-1] in ('Extras', 'Versions', 'DVDs'):
name = '/'.join([folder.pop(), name])
folder = '/'.join(folder)
#print folder
#print name
f['folder'] = folder
f['name'] = name
oshash = f['oshash']
all_files.append(oshash)
same_folder = models.Instance.objects.filter(folder=folder, volume=volume)
if same_folder.count() > 0:
item = same_folder[0].file.item
else:
item = None
path = os.path.join(folder, name)
instance = models.Instance.objects.filter(file__oshash=oshash, volume=volume)
if instance.count()>0:
instance = instance[0]
updated = False
for key in ('mtime', 'name', 'folder'):
if f[key] != getattr(instance, key):
setattr(instance, key, f[key])
updated=True
if updated:
instance.save()
else:
#look if oshash is known
file_objects = models.File.objects.filter(oshash=oshash)
if file_objects.count() > 0:
file_object = file_objects[0]
#new oshash, add to database
else:
if not item:
item_info = parse_path(folder)
item = get_item(item_info)
file_object = models.File()
file_object.oshash = oshash
file_object.name = name
file_object.item = item
file_object.save()
response['data']['info'].append(oshash)
instance = models.Instance()
instance.volume = volume
instance.file = file_object
for key in ('mtime', 'name', 'folder'):
setattr(instance, key, f[key])
instance.save()
#remove deleted files
#FIXME: can this have any bad consequences? i.e. on the selction of used item files.
models.Instance.objects.filter(volume=volume).exclude(file__oshash__in=all_files).delete()
user_profile = user.get_profile() user_profile = user.get_profile()
user_profile.files_updated = datetime.now() user_profile.files_updated = datetime.now()
@ -234,8 +179,7 @@ def firefogg_upload(request):
f.save() f.save()
#FIXME: this fails badly if rabbitmq goes down #FIXME: this fails badly if rabbitmq goes down
try: try:
task_id = 'update_streams_' + f.item.itemId t = item.tasks.update_streams.delay(f.item.itemId)
t = item.tasks.update_streams.delay(f.item.itemId, task_id=task_id)
data['resultUrl'] = t.task_id data['resultUrl'] = t.task_id
except: except:
pass pass

View file

@ -572,6 +572,9 @@ class Item(models.Model):
s.files = 0 s.files = 0
s.size = 0 s.size = 0
s.color = int(sum(self.data.get('color', [])))
s.cuts = len(self.data.get('cuts', []))
s.cutsperminute = s.cuts / (s.duration/60)
for key in ('title', 'language', 'country') + self.person_keys: for key in ('title', 'language', 'country') + self.person_keys:
setattr(s, '%s_desc'%key, getattr(s, key)) setattr(s, '%s_desc'%key, getattr(s, key))
if not getattr(s, key): if not getattr(s, key):
@ -654,26 +657,26 @@ class Item(models.Model):
cmd = [] cmd = []
if os.path.exists(stream.video.path): if os.path.exists(stream.video.path):
os.unlink(stream.video.path) os.unlink(stream.video.path)
if len(files) > 1: elif not os.path.exists(os.path.dirname(stream.video.path)):
os.makedirs(os.path.dirname(stream.video.path))
if len(files.values()) > 1:
for f in sorted(files): for f in sorted(files):
cmd.append('+') cmd.append('+')
cmd.append(files[f]) cmd.append(files[f])
if not os.path.exists(os.path.dirname(stream.video.path)):
os.makedirs(os.path.dirname(stream.video.path))
cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:] cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:]
#print cmd #print cmd
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#p = subprocess.Popen(cmd, stdin=subprocess.PIPE) #p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
p.wait() p.wait()
else: else:
os.symlink(files[0], stream.video.path) os.symlink(files.values()[0], stream.video.path)
stream.save() stream.save()
if 'video' in stream.info: if 'video' in stream.info:
extract.timeline(stream.video.path, self.timeline_prefix) extract.timeline(stream.video.path, self.timeline_prefix)
self.stream_aspect = stream.info['video'][0]['width']/stream.info['video'][0]['height'] self.stream_aspect = stream.info['video'][0]['width']/stream.info['video'][0]['height']
self.data['cuts'] = extract.cuts(self.timeline_prefix) self.data['cuts'] = extract.cuts(self.timeline_prefix)
self.data['average_color'] = extract.average_color(self.timeline_prefix) self.data['color'] = extract.average_color(self.timeline_prefix)
#extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8]) #extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8])
stream.extract_derivatives() stream.extract_derivatives()
@ -830,6 +833,12 @@ class ItemSort(models.Model):
filename = models.CharField(max_length=1024, blank=True, db_index=True) filename = models.CharField(max_length=1024, blank=True, db_index=True)
files = models.IntegerField(blank=True, db_index=True) files = models.IntegerField(blank=True, db_index=True)
size = models.BigIntegerField(blank=True, db_index=True) size = models.BigIntegerField(blank=True, db_index=True)
color = models.IntegerField(blank=True, db_index=True)
saturation = models.IntegerField(blank=True, db_index=True)
brightness = models.IntegerField(blank=True, db_index=True)
cuts = models.IntegerField(blank=True, db_index=True)
cutsperminute = models.FloatField(blank=True, db_index=True)
#required to move empty values to the bottom for both asc and desc sort #required to move empty values to the bottom for both asc and desc sort
title_desc = models.CharField(max_length=1000, db_index=True) title_desc = models.CharField(max_length=1000, db_index=True)