2010-07-12 14:56:14 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2011-12-29 19:13:03 +00:00
|
|
|
from __future__ import division, with_statement
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
import os.path
|
2010-08-10 22:01:41 +00:00
|
|
|
import time
|
2012-01-07 10:48:05 +00:00
|
|
|
import shutil
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-10-15 15:21:41 +00:00
|
|
|
from django.conf import settings
|
|
|
|
from django.contrib.auth.models import User
|
2010-07-12 14:56:14 +00:00
|
|
|
from django.db import models
|
2011-04-18 18:50:31 +00:00
|
|
|
from django.db.models.signals import pre_delete
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-11-23 09:53:12 +00:00
|
|
|
from ox.django import fields
|
2010-07-12 14:56:14 +00:00
|
|
|
import ox
|
|
|
|
|
2010-11-08 16:34:25 +00:00
|
|
|
from item import utils
|
2013-02-08 17:01:26 +00:00
|
|
|
import item.models
|
2012-09-11 12:42:33 +00:00
|
|
|
from person.models import get_name_sort
|
2011-06-27 13:39:35 +00:00
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
import extract
|
|
|
|
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class File(models.Model):
|
2013-02-08 17:01:26 +00:00
|
|
|
AV_INFO = (
|
|
|
|
'duration', 'video', 'audio', 'oshash', 'size',
|
|
|
|
)
|
|
|
|
|
|
|
|
PATH_INFO = (
|
|
|
|
'episodes', 'extension', 'language', 'part', 'partTitle', 'version'
|
|
|
|
)
|
|
|
|
ITEM_INFO = (
|
|
|
|
'title', 'director', 'year',
|
|
|
|
'season', 'episode', 'episodeTitle',
|
|
|
|
'seriesTitle', 'seriesYear'
|
|
|
|
)
|
2012-09-11 12:42:33 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
oshash = models.CharField(max_length=16, unique=True)
|
2013-02-08 17:01:26 +00:00
|
|
|
item = models.ForeignKey("item.Item", related_name='files', null=True)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-10-15 15:21:41 +00:00
|
|
|
path = models.CharField(max_length=2048, default="") # canoncial path/file
|
|
|
|
sort_path = models.CharField(max_length=2048, default="") # sort name
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
type = models.CharField(default="", max_length=255)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
#editable
|
|
|
|
extension = models.CharField(default="", max_length=255, null=True)
|
|
|
|
language = models.CharField(default="", max_length=8, null=True)
|
2012-09-11 13:17:35 +00:00
|
|
|
part = models.CharField(default="", max_length=255, null=True)
|
2012-09-11 12:42:33 +00:00
|
|
|
part_title = models.CharField(default="", max_length=255, null=True)
|
|
|
|
version = models.CharField(default="", max_length=255, null=True)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
size = models.BigIntegerField(default=0)
|
2011-06-01 16:40:39 +00:00
|
|
|
duration = models.FloatField(null=True)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
info = fields.DictField(default={})
|
|
|
|
|
|
|
|
video_codec = models.CharField(max_length=255)
|
|
|
|
pixel_format = models.CharField(max_length=255)
|
|
|
|
display_aspect_ratio = models.CharField(max_length=255)
|
|
|
|
width = models.IntegerField(default = 0)
|
|
|
|
height = models.IntegerField(default = 0)
|
|
|
|
framerate = models.CharField(max_length=255)
|
|
|
|
|
|
|
|
audio_codec = models.CharField(max_length=255)
|
2010-08-10 22:01:41 +00:00
|
|
|
channels = models.IntegerField(default=0)
|
|
|
|
samplerate = models.IntegerField(default=0)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
bits_per_pixel = models.FloatField(default=-1)
|
|
|
|
pixels = models.BigIntegerField(default=0)
|
|
|
|
|
2010-08-07 14:31:20 +00:00
|
|
|
#This is true if derivative is available or subtitles where uploaded
|
|
|
|
available = models.BooleanField(default = False)
|
2011-10-15 15:21:41 +00:00
|
|
|
selected = models.BooleanField(default = False)
|
2011-08-23 17:39:34 +00:00
|
|
|
uploading = models.BooleanField(default = False)
|
2013-03-26 11:54:03 +00:00
|
|
|
encoding = models.BooleanField(default = False)
|
2011-10-15 15:21:41 +00:00
|
|
|
wanted = models.BooleanField(default = False)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
is_audio = models.BooleanField(default=False)
|
|
|
|
is_video = models.BooleanField(default=False)
|
|
|
|
is_subtitle = models.BooleanField(default=False)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
#upload and data handling
|
|
|
|
data = models.FileField(null=True, blank=True,
|
|
|
|
upload_to=lambda f, x: f.get_path('data.bin'))
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def __unicode__(self):
|
2011-10-15 15:21:41 +00:00
|
|
|
return self.path
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
def parse_info(self):
|
|
|
|
if self.info:
|
2010-07-12 14:56:14 +00:00
|
|
|
for key in ('duration', 'size'):
|
|
|
|
setattr(self, key, self.info.get(key, 0))
|
|
|
|
|
2011-01-28 08:48:38 +00:00
|
|
|
if 'video' in self.info and self.info['video'] and \
|
|
|
|
'width' in self.info['video'][0]:
|
2011-01-22 10:14:30 +00:00
|
|
|
video = self.info['video'][0]
|
|
|
|
self.video_codec = video['codec']
|
|
|
|
self.width = video['width']
|
|
|
|
self.height = video['height']
|
|
|
|
self.framerate = video['framerate']
|
|
|
|
if 'display_aspect_ratio' in video:
|
|
|
|
self.display_aspect_ratio = video['display_aspect_ratio']
|
2010-07-12 14:56:14 +00:00
|
|
|
else:
|
|
|
|
self.display_aspect_ratio = "%s:%s" % (self.width, self.height)
|
|
|
|
self.is_video = True
|
|
|
|
self.is_audio = False
|
2011-10-15 15:21:41 +00:00
|
|
|
if self.path.endswith('.jpg') or \
|
|
|
|
self.path.endswith('.png') or \
|
2012-01-02 03:50:54 +00:00
|
|
|
self.path.endswith('.txt') or \
|
|
|
|
self.video_codec == 'ansi' or \
|
2011-01-22 10:14:30 +00:00
|
|
|
self.duration == 0.04:
|
2010-12-27 05:07:15 +00:00
|
|
|
self.is_video = False
|
2012-01-02 03:50:54 +00:00
|
|
|
self.video_codec = ''
|
2010-07-12 14:56:14 +00:00
|
|
|
else:
|
|
|
|
self.is_video = False
|
2011-02-23 11:51:32 +00:00
|
|
|
self.display_aspect_ratio = "4:3"
|
2012-09-14 08:54:09 +00:00
|
|
|
self.width = 0
|
|
|
|
self.height = 0
|
2011-08-22 07:05:14 +00:00
|
|
|
if 'audio' in self.info and self.info['audio'] and self.duration > 0:
|
2011-01-22 10:14:30 +00:00
|
|
|
audio = self.info['audio'][0]
|
|
|
|
self.audio_codec = audio['codec']
|
|
|
|
self.samplerate = audio.get('samplerate', 0)
|
|
|
|
self.channels = audio.get('channels', 0)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
if not self.is_video:
|
|
|
|
self.is_audio = True
|
|
|
|
else:
|
|
|
|
self.is_audio = False
|
2011-08-22 07:05:14 +00:00
|
|
|
self.audio_codec = ''
|
|
|
|
self.sampleate = 0
|
|
|
|
self.channels = 0
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
if self.framerate:
|
2010-12-22 15:17:38 +00:00
|
|
|
self.pixels = int(self.width * self.height * float(utils.parse_decimal(self.framerate)) * self.duration)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2012-09-12 09:44:22 +00:00
|
|
|
|
|
|
|
def get_path_info(self):
|
2013-02-08 17:01:26 +00:00
|
|
|
data = {}
|
|
|
|
for key in self.PATH_INFO:
|
|
|
|
data[key] = self.info.get(key, None)
|
|
|
|
if self.item:
|
|
|
|
for key in self.ITEM_INFO:
|
|
|
|
data[key] = self.item.get(key)
|
2013-02-09 18:54:18 +00:00
|
|
|
if self.item.get('series'):
|
|
|
|
data['isEpisode'] = True
|
2013-02-08 17:01:26 +00:00
|
|
|
data['directorSort'] = [get_name_sort(n) for n in self.item.get('director', [])]
|
2013-02-09 18:54:18 +00:00
|
|
|
data['isEpisode'] = 'isEpisode' in data \
|
|
|
|
or data.get('season') != None \
|
2012-09-12 09:44:22 +00:00
|
|
|
or data.get('episode') != None \
|
2013-02-09 04:02:32 +00:00
|
|
|
or data.get('episodes') not in ([], None) \
|
2013-02-09 05:02:26 +00:00
|
|
|
or (data.get('seriesTitle') != None and data.get('episodeTitle') != None)
|
2012-09-12 16:16:54 +00:00
|
|
|
if data['isEpisode'] and data['seriesYear'] == None:
|
|
|
|
data['seriesYear'] = data['year']
|
2012-09-12 09:44:22 +00:00
|
|
|
data['type'] = 'unknown'
|
2013-02-08 17:51:12 +00:00
|
|
|
if 'extension' in data and data['extension']:
|
2012-11-04 16:47:25 +00:00
|
|
|
data['extension'] = data['extension'].lower()
|
2012-11-02 12:41:41 +00:00
|
|
|
for type in ox.movie.EXTENSIONS:
|
|
|
|
if data['extension'] in ox.movie.EXTENSIONS[type]:
|
|
|
|
data['type'] = type
|
2012-09-11 12:42:33 +00:00
|
|
|
return data
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
def normalize_path(self):
|
2013-02-08 17:01:26 +00:00
|
|
|
#FIXME: always use format_path
|
2013-02-25 20:54:47 +00:00
|
|
|
if settings.CONFIG['site']['folderdepth'] == 4:
|
2012-10-06 14:37:13 +00:00
|
|
|
return ox.movie.format_path(self.get_path_info())
|
|
|
|
else:
|
|
|
|
path = self.path or ''
|
|
|
|
if self.instances.all().count():
|
|
|
|
path = self.instances.all()[0].path
|
|
|
|
return path
|
2012-09-11 12:42:33 +00:00
|
|
|
|
2013-02-08 17:01:26 +00:00
|
|
|
def update_info(self, info, user):
|
|
|
|
#populate name sort with director if unknown
|
|
|
|
if info.get('director') and info.get('directorSort'):
|
|
|
|
for name, sortname in zip(info['director'], info['directorSort']):
|
|
|
|
get_name_sort(name, sortname)
|
2013-04-22 12:22:02 +00:00
|
|
|
#add all files in one folder to same item
|
|
|
|
if self.instances.all().count():
|
2013-05-28 08:45:07 +00:00
|
|
|
folder = os.path.dirname(self.instances.all()[0].path) + '/'
|
2013-04-22 12:22:02 +00:00
|
|
|
qs = item.models.Item.objects.filter(files__instances__path__startswith=folder)
|
|
|
|
if qs.exists():
|
|
|
|
self.item = qs[0]
|
|
|
|
if not self.item:
|
|
|
|
self.item = item.models.get_item(info, user)
|
2013-02-08 17:01:26 +00:00
|
|
|
for key in self.AV_INFO + self.PATH_INFO:
|
|
|
|
if key in info:
|
|
|
|
self.info[key] = info[key]
|
|
|
|
self.parse_info()
|
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
def save(self, *args, **kwargs):
|
2013-02-08 17:01:26 +00:00
|
|
|
if self.id and self.info:
|
2012-09-11 12:42:33 +00:00
|
|
|
self.path = self.normalize_path()
|
2013-02-08 17:01:26 +00:00
|
|
|
if self.item:
|
|
|
|
data = self.get_path_info()
|
|
|
|
self.extension = data.get('extension')
|
|
|
|
self.language = data.get('language')
|
|
|
|
self.part = ox.sort_string(unicode(data.get('part') or ''))
|
|
|
|
self.part_title = ox.sort_string(unicode(data.get('partTitle')) or '')
|
|
|
|
self.type = data.get('type') or 'unknown'
|
|
|
|
self.version = data.get('version')
|
2012-09-12 09:44:22 +00:00
|
|
|
|
2012-09-11 12:42:33 +00:00
|
|
|
if self.path:
|
|
|
|
self.sort_path = utils.sort_string(self.path)
|
|
|
|
self.is_audio = self.type == 'audio'
|
|
|
|
self.is_video = self.type == 'video'
|
|
|
|
self.is_subtitle = self.path.endswith('.srt')
|
2011-02-02 07:51:59 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
if self.type not in ('audio', 'video'):
|
|
|
|
self.duration = None
|
2013-03-12 09:36:00 +00:00
|
|
|
else:
|
|
|
|
duration = sum([s.info.get('duration', 0)
|
2012-09-11 12:42:33 +00:00
|
|
|
for s in self.streams.filter(source=None)])
|
2013-03-12 09:36:00 +00:00
|
|
|
if duration:
|
|
|
|
self.duration = duration
|
2011-06-27 13:39:35 +00:00
|
|
|
|
2011-08-26 15:43:31 +00:00
|
|
|
if self.is_subtitle:
|
|
|
|
self.available = self.data and True or False
|
|
|
|
else:
|
2011-08-26 15:52:08 +00:00
|
|
|
self.available = not self.uploading and \
|
2012-09-11 12:42:33 +00:00
|
|
|
self.streams.filter(source=None, available=True).count() > 0
|
2010-07-12 14:56:14 +00:00
|
|
|
super(File, self).save(*args, **kwargs)
|
|
|
|
|
2011-10-15 15:21:41 +00:00
|
|
|
def get_path(self, name):
|
2010-12-04 01:09:10 +00:00
|
|
|
h = self.oshash
|
2013-03-25 12:57:20 +00:00
|
|
|
return os.path.join('media', h[:2], h[2:4], h[4:6], h[6:], name)
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2010-08-24 17:16:33 +00:00
|
|
|
def contents(self):
|
2010-09-17 21:06:01 +00:00
|
|
|
if self.data != None:
|
|
|
|
self.data.seek(0)
|
2010-09-03 13:28:44 +00:00
|
|
|
return self.data.read()
|
2010-08-24 17:16:33 +00:00
|
|
|
return None
|
|
|
|
|
2011-06-01 16:40:39 +00:00
|
|
|
def srt(self, offset=0):
|
2012-03-06 22:04:29 +00:00
|
|
|
srt = ox.srt.load(self.data.path)
|
2012-01-02 16:45:01 +00:00
|
|
|
#subtitles should not overlap
|
|
|
|
for i in range(1, len(srt)):
|
|
|
|
if srt[i-1]['out'] > srt[i]['in']:
|
|
|
|
srt[i-1]['out'] = srt[i]['in']
|
2012-09-15 10:23:17 +00:00
|
|
|
|
|
|
|
def shift(s):
|
|
|
|
s['in'] += offset
|
|
|
|
s['out'] += offset
|
|
|
|
return s
|
|
|
|
if offset:
|
|
|
|
srt = map(shift, srt)
|
2010-09-18 14:44:35 +00:00
|
|
|
return srt
|
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
def editable(self, user):
|
2012-09-08 15:03:59 +00:00
|
|
|
p = user.get_profile()
|
|
|
|
return p.get_level() in ('admin', 'staff') or \
|
|
|
|
self.instances.filter(volume__user=user).count() > 0 or \
|
2013-02-08 17:01:26 +00:00
|
|
|
(not self.item or self.item.user == user)
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
def save_chunk(self, chunk, chunk_id=-1, done=False):
|
2013-03-26 11:54:03 +00:00
|
|
|
if not self.available:
|
|
|
|
if not self.data:
|
|
|
|
name = 'data.%s' % self.info.get('extension', 'avi')
|
|
|
|
self.data.name = self.get_path(name)
|
|
|
|
ox.makedirs(os.path.dirname(self.data.path))
|
|
|
|
with open(self.data.path, 'w') as f:
|
|
|
|
f.write(chunk.read())
|
|
|
|
self.save()
|
|
|
|
else:
|
|
|
|
with open(self.data.path, 'a') as f:
|
|
|
|
f.write(chunk.read())
|
|
|
|
if done:
|
|
|
|
self.info.update(ox.avinfo(self.data.path))
|
|
|
|
self.parse_info()
|
|
|
|
self.save()
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def save_chunk_stream(self, chunk, chunk_id=-1, done=False):
|
2010-09-08 17:14:01 +00:00
|
|
|
if not self.available:
|
2011-09-06 12:06:59 +00:00
|
|
|
config = settings.CONFIG['video']
|
2011-08-18 12:01:37 +00:00
|
|
|
stream, created = Stream.objects.get_or_create(
|
|
|
|
file=self,
|
2013-03-04 15:39:08 +00:00
|
|
|
resolution=max(config['resolutions']),
|
2011-08-19 15:37:37 +00:00
|
|
|
format=config['formats'][0])
|
2011-08-18 12:01:37 +00:00
|
|
|
if created:
|
2013-04-22 11:45:22 +00:00
|
|
|
stream.media.name = stream.path(stream.name())
|
|
|
|
ox.makedirs(os.path.dirname(stream.media.path))
|
|
|
|
with open(stream.media.path, 'w') as f:
|
2011-12-29 19:13:03 +00:00
|
|
|
f.write(chunk.read())
|
2011-12-30 08:50:52 +00:00
|
|
|
stream.save()
|
2010-09-03 13:28:44 +00:00
|
|
|
else:
|
2013-04-22 11:45:22 +00:00
|
|
|
with open(stream.media.path, 'a') as f:
|
2011-12-29 19:13:03 +00:00
|
|
|
#FIXME: should check that chunk_id/offset is right
|
|
|
|
f.write(chunk.read())
|
2011-08-18 12:01:37 +00:00
|
|
|
if done:
|
|
|
|
stream.available = True
|
2013-03-24 20:57:38 +00:00
|
|
|
stream.info = {}
|
2011-08-18 12:01:37 +00:00
|
|
|
stream.save()
|
2010-09-03 13:28:44 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-01-19 12:06:03 +00:00
|
|
|
def json(self, keys=None, user=None):
|
2011-02-01 13:19:34 +00:00
|
|
|
resolution = (self.width, self.height)
|
2012-09-13 22:15:08 +00:00
|
|
|
if resolution == (0, 0) or self.type != 'video':
|
2011-02-01 13:19:34 +00:00
|
|
|
resolution = None
|
|
|
|
duration = self.duration
|
2011-10-18 20:06:01 +00:00
|
|
|
if self.type != 'video':
|
2011-02-01 13:19:34 +00:00
|
|
|
duration = None
|
2011-01-19 12:06:03 +00:00
|
|
|
data = {
|
2011-10-18 20:06:01 +00:00
|
|
|
'audioCodec': self.audio_codec,
|
2011-01-19 12:06:03 +00:00
|
|
|
'available': self.available,
|
2011-02-01 13:19:34 +00:00
|
|
|
'duration': duration,
|
2013-03-26 11:54:03 +00:00
|
|
|
'encoding': self.encoding,
|
2011-01-19 12:06:03 +00:00
|
|
|
'framerate': self.framerate,
|
2011-06-27 13:39:35 +00:00
|
|
|
'id': self.oshash,
|
2011-10-18 20:06:01 +00:00
|
|
|
'instances': [i.json() for i in self.instances.all()],
|
2011-10-15 15:21:41 +00:00
|
|
|
'path': self.path,
|
2011-10-18 20:06:01 +00:00
|
|
|
'resolution': resolution,
|
|
|
|
'samplerate': self.samplerate,
|
|
|
|
'selected': self.selected,
|
2011-01-19 12:06:03 +00:00
|
|
|
'size': self.size,
|
2011-10-18 20:06:01 +00:00
|
|
|
'type': self.type,
|
|
|
|
'videoCodec': self.video_codec,
|
|
|
|
'wanted': self.wanted,
|
2011-01-19 12:06:03 +00:00
|
|
|
}
|
2013-02-08 17:01:26 +00:00
|
|
|
for key in self.PATH_INFO:
|
|
|
|
data[key] = self.info.get(key)
|
2011-10-22 22:12:56 +00:00
|
|
|
data['users'] = list(set([i['user'] for i in data['instances']]))
|
2011-01-19 12:06:03 +00:00
|
|
|
if keys:
|
|
|
|
for k in data.keys():
|
|
|
|
if k not in keys:
|
|
|
|
del data[k]
|
|
|
|
return data
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2012-08-21 22:18:00 +00:00
|
|
|
def all_paths(self):
|
|
|
|
return [self.path] + [i.path for i in self.instances.all()]
|
|
|
|
|
2012-01-07 10:48:05 +00:00
|
|
|
def delete_frames(self):
|
|
|
|
frames = os.path.join(settings.MEDIA_ROOT, self.get_path('frames'))
|
|
|
|
if os.path.exists(frames):
|
|
|
|
shutil.rmtree(frames)
|
|
|
|
|
2011-04-18 18:50:31 +00:00
|
|
|
def delete_file(sender, **kwargs):
|
|
|
|
f = kwargs['instance']
|
2011-08-18 12:01:37 +00:00
|
|
|
#FIXME: delete streams here
|
2011-04-18 18:50:31 +00:00
|
|
|
if f.data:
|
|
|
|
f.data.delete()
|
|
|
|
pre_delete.connect(delete_file, sender=File)
|
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Volume(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("user", "name")
|
|
|
|
|
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
|
|
|
|
user = models.ForeignKey(User, related_name='volumes')
|
|
|
|
name = models.CharField(max_length=1024)
|
|
|
|
|
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s's %s"% (self.user, self.name)
|
|
|
|
|
2011-09-24 23:09:48 +00:00
|
|
|
def json(self):
|
|
|
|
return {
|
|
|
|
'name': self.name,
|
|
|
|
'path': 'unknown',
|
|
|
|
'items': self.files.count()
|
|
|
|
}
|
|
|
|
|
2010-11-08 17:43:59 +00:00
|
|
|
class Instance(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
2011-10-15 15:21:41 +00:00
|
|
|
unique_together = ("path", "volume")
|
2010-08-10 22:01:41 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
atime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
|
|
|
ctime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
|
|
|
mtime = models.IntegerField(default=lambda: int(time.time()), editable=False)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-10-15 15:21:41 +00:00
|
|
|
path = models.CharField(max_length=2048)
|
|
|
|
ignore = models.BooleanField(default=False)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
file = models.ForeignKey(File, related_name='instances')
|
2010-08-10 22:01:41 +00:00
|
|
|
volume = models.ForeignKey(Volume, related_name='files')
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
def __unicode__(self):
|
2011-10-15 15:21:41 +00:00
|
|
|
return u"%s's %s <%s>"% (self.volume.user, self.path, self.file.oshash)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
@property
|
2010-09-23 16:01:48 +00:00
|
|
|
def itemId(self):
|
|
|
|
return File.objects.get(oshash=self.oshash).itemId
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-02-01 13:19:34 +00:00
|
|
|
def json(self):
|
|
|
|
return {
|
2011-10-18 20:06:01 +00:00
|
|
|
'ignore': self.ignore,
|
|
|
|
'path': self.path,
|
2011-02-01 13:19:34 +00:00
|
|
|
'user': self.volume.user.username,
|
|
|
|
'volume': self.volume.name,
|
|
|
|
}
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-08 17:14:01 +00:00
|
|
|
def frame_path(frame, name):
|
2010-08-24 17:16:33 +00:00
|
|
|
ext = os.path.splitext(name)[-1]
|
2010-09-08 17:14:01 +00:00
|
|
|
name = "%s%s" % (frame.position, ext)
|
2011-10-15 15:21:41 +00:00
|
|
|
return frame.file.get_path(name)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class Frame(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-08-10 22:01:41 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("file", "position")
|
2010-07-12 14:56:14 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
file = models.ForeignKey(File, related_name="frames")
|
|
|
|
position = models.FloatField()
|
2010-08-07 14:31:20 +00:00
|
|
|
frame = models.ImageField(default=None, null=True, upload_to=frame_path)
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-11-06 16:14:40 +00:00
|
|
|
'''
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
name = "%d.jpg" % self.position
|
|
|
|
if file.name != name:
|
|
|
|
#FIXME: frame path should be renamed on save to match current position
|
|
|
|
super(Frame, self).save(*args, **kwargs)
|
|
|
|
'''
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
def __unicode__(self):
|
2011-01-22 10:14:30 +00:00
|
|
|
return u'%s/%s' % (self.file, self.position)
|
2011-04-18 18:50:31 +00:00
|
|
|
|
|
|
|
def delete_frame(sender, **kwargs):
|
|
|
|
f = kwargs['instance']
|
|
|
|
if f.frame:
|
|
|
|
f.frame.delete()
|
|
|
|
pre_delete.connect(delete_frame, sender=Frame)
|
2011-08-18 12:01:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Stream(models.Model):
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("file", "resolution", "format")
|
|
|
|
|
|
|
|
file = models.ForeignKey(File, related_name='streams')
|
|
|
|
resolution = models.IntegerField(default=96)
|
|
|
|
format = models.CharField(max_length=255, default='webm')
|
|
|
|
|
2013-04-22 11:45:22 +00:00
|
|
|
media = models.FileField(default=None, blank=True, upload_to=lambda f, x: f.path(x))
|
2011-08-18 12:01:37 +00:00
|
|
|
source = models.ForeignKey('Stream', related_name='derivatives', default=None, null=True)
|
|
|
|
available = models.BooleanField(default=False)
|
2012-01-20 18:15:54 +00:00
|
|
|
oshash = models.CharField(max_length=16, null=True, db_index=True)
|
2011-08-18 12:01:37 +00:00
|
|
|
info = fields.DictField(default={})
|
2011-08-18 19:37:12 +00:00
|
|
|
duration = models.FloatField(default=0)
|
|
|
|
aspect_ratio = models.FloatField(default=0)
|
|
|
|
|
2011-10-21 18:02:36 +00:00
|
|
|
cuts = fields.TupleField(default=[])
|
|
|
|
color = fields.TupleField(default=[])
|
2012-05-17 09:38:59 +00:00
|
|
|
volume = models.FloatField(default=0)
|
2011-10-21 18:02:36 +00:00
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
@property
|
|
|
|
def timeline_prefix(self):
|
2012-05-17 09:38:59 +00:00
|
|
|
return os.path.join(settings.MEDIA_ROOT, self.path())
|
2011-08-18 12:01:37 +00:00
|
|
|
return os.path.join(settings.MEDIA_ROOT, self.path(), 'timeline')
|
|
|
|
|
|
|
|
def name(self):
|
|
|
|
return u"%sp.%s" % (self.resolution, self.format)
|
|
|
|
|
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s/%s" % (self.file, self.name())
|
|
|
|
|
2013-04-28 12:07:49 +00:00
|
|
|
def get(self, resolution, format):
|
|
|
|
streams = []
|
|
|
|
if self.format == format:
|
|
|
|
streams.append(self)
|
|
|
|
for stream in self.derivatives.filter(format=format).order_by('-resolution'):
|
|
|
|
streams.append(stream)
|
|
|
|
stream = streams.pop(0)
|
|
|
|
while streams and streams[0].resolution >= resolution:
|
|
|
|
stream = streams.pop(0)
|
|
|
|
return stream
|
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
def path(self, name=''):
|
2011-10-15 15:21:41 +00:00
|
|
|
return self.file.get_path(name)
|
2011-08-18 12:01:37 +00:00
|
|
|
|
2012-05-29 15:02:59 +00:00
|
|
|
def extract_derivatives(self, rebuild=False):
|
2011-09-06 12:06:59 +00:00
|
|
|
config = settings.CONFIG['video']
|
2013-03-29 16:16:35 +00:00
|
|
|
for resolution in sorted(config['resolutions'], reverse=True):
|
2013-04-28 12:07:49 +00:00
|
|
|
if resolution <= self.resolution:
|
|
|
|
for f in config['formats']:
|
|
|
|
derivative, created = Stream.objects.get_or_create(file=self.file,
|
|
|
|
resolution=resolution, format=f)
|
|
|
|
|
|
|
|
name = derivative.name()
|
|
|
|
name = os.path.join(os.path.dirname(self.media.name), name)
|
|
|
|
if created:
|
|
|
|
derivative.source = self
|
|
|
|
derivative.save()
|
2013-04-22 11:45:22 +00:00
|
|
|
derivative.media.name = name
|
2013-04-28 12:07:49 +00:00
|
|
|
derivative.encode()
|
|
|
|
derivative.save()
|
|
|
|
elif rebuild or not derivative.available:
|
|
|
|
if not derivative.media:
|
|
|
|
derivative.media.name = name
|
|
|
|
derivative.encode()
|
2011-08-18 12:01:37 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def encode(self):
|
|
|
|
if self.source:
|
2013-04-22 11:45:22 +00:00
|
|
|
media = self.source.media.path
|
2013-05-10 09:45:42 +00:00
|
|
|
if not self.media:
|
|
|
|
self.media.name = os.path.join(os.path.dirname(self.source.media.name), self.name())
|
2013-04-22 11:45:22 +00:00
|
|
|
target = self.media.path
|
|
|
|
info = ox.avinfo(media)
|
|
|
|
if extract.stream(media, target, self.name(), info):
|
2011-08-18 12:01:37 +00:00
|
|
|
self.available = True
|
|
|
|
else:
|
2013-05-10 09:45:42 +00:00
|
|
|
self.media = None
|
2011-08-18 12:01:37 +00:00
|
|
|
self.available = False
|
|
|
|
self.save()
|
2013-03-26 11:54:03 +00:00
|
|
|
elif self.file.data:
|
2013-04-22 11:45:22 +00:00
|
|
|
media = self.file.data.path
|
2013-05-10 09:45:42 +00:00
|
|
|
if not self.media:
|
|
|
|
self.media.name = self.path(self.name())
|
2013-04-22 11:45:22 +00:00
|
|
|
target = self.media.path
|
|
|
|
info = ox.avinfo(media)
|
|
|
|
if extract.stream(media, target, self.name(), info):
|
2013-03-26 11:54:03 +00:00
|
|
|
self.available = True
|
|
|
|
else:
|
|
|
|
self.available = False
|
|
|
|
self.save()
|
2011-08-18 12:01:37 +00:00
|
|
|
|
|
|
|
def make_timeline(self):
|
|
|
|
if self.available and not self.source:
|
2013-04-22 11:45:22 +00:00
|
|
|
extract.timeline(self.media.path, self.timeline_prefix)
|
2011-10-21 18:02:36 +00:00
|
|
|
self.cuts = tuple(extract.cuts(self.timeline_prefix))
|
|
|
|
self.color = tuple(extract.average_color(self.timeline_prefix))
|
2012-05-29 15:02:59 +00:00
|
|
|
self.volume = extract.average_volume(self.timeline_prefix)
|
2011-10-21 18:05:39 +00:00
|
|
|
self.save()
|
2011-08-18 12:01:37 +00:00
|
|
|
|
|
|
|
def save(self, *args, **kwargs):
|
2013-04-22 11:45:22 +00:00
|
|
|
if self.media and not self.info:
|
|
|
|
self.info = ox.avinfo(self.media.path)
|
2012-01-20 18:15:54 +00:00
|
|
|
self.oshash = self.info.get('oshash')
|
2011-08-18 19:37:12 +00:00
|
|
|
self.duration = self.info.get('duration', 0)
|
|
|
|
if 'video' in self.info and self.info['video']:
|
2013-01-11 07:18:40 +00:00
|
|
|
if 'display_aspect_ratio' in self.info['video'][0]:
|
|
|
|
dar = map(int, self.info['video'][0]['display_aspect_ratio'].split(':'))
|
|
|
|
self.aspect_ratio = dar[0] / dar[1]
|
|
|
|
else:
|
|
|
|
self.aspect_ratio = self.info['video'][0]['width'] / self.info['video'][0]['height']
|
2011-08-18 19:37:12 +00:00
|
|
|
else:
|
|
|
|
self.aspect_ratio = 128/80
|
2011-08-18 12:01:37 +00:00
|
|
|
super(Stream, self).save(*args, **kwargs)
|
2011-08-21 08:57:53 +00:00
|
|
|
if self.available and not self.file.available:
|
|
|
|
self.file.save()
|
2011-08-18 12:01:37 +00:00
|
|
|
|
2011-08-18 19:37:12 +00:00
|
|
|
def json(self):
|
|
|
|
return {
|
|
|
|
'duration': self.duration,
|
2011-10-28 23:27:44 +00:00
|
|
|
'aspectratio': self.aspect_ratio,
|
2011-08-18 19:37:12 +00:00
|
|
|
}
|
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
def delete_stream(sender, **kwargs):
|
|
|
|
f = kwargs['instance']
|
2013-04-22 11:45:22 +00:00
|
|
|
if f.media:
|
|
|
|
f.media.delete()
|
2011-08-18 12:01:37 +00:00
|
|
|
pre_delete.connect(delete_stream, sender=Stream)
|