2009-06-08 16:08:59 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2010-08-07 14:31:20 +00:00
|
|
|
from __future__ import division, with_statement
|
|
|
|
|
2010-02-03 12:05:38 +00:00
|
|
|
from datetime import datetime
|
2009-06-08 16:08:59 +00:00
|
|
|
import os.path
|
2010-09-03 13:28:44 +00:00
|
|
|
import subprocess
|
2010-09-10 14:09:41 +00:00
|
|
|
from glob import glob
|
2011-01-03 14:14:54 +00:00
|
|
|
import unicodedata
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
from django.db import models
|
2009-12-31 15:04:32 +00:00
|
|
|
from django.core.files.base import ContentFile
|
2010-01-22 23:09:07 +00:00
|
|
|
from django.utils import simplejson as json
|
2010-02-16 12:41:57 +00:00
|
|
|
from django.conf import settings
|
2010-01-22 23:09:07 +00:00
|
|
|
|
2010-07-07 22:46:41 +00:00
|
|
|
import ox
|
2011-01-01 11:44:42 +00:00
|
|
|
from ox.django import fields
|
|
|
|
from ox.normalize import canonicalTitle
|
2010-12-25 13:45:19 +00:00
|
|
|
import ox.web.imdb
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2009-08-01 14:14:54 +00:00
|
|
|
import managers
|
2010-02-03 12:05:38 +00:00
|
|
|
import utils
|
2010-11-27 12:12:53 +00:00
|
|
|
import tasks
|
2010-09-03 13:28:44 +00:00
|
|
|
from archive import extract
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2010-11-28 16:03:23 +00:00
|
|
|
from annotaion.models import Annotation, Layer
|
2011-01-01 11:44:42 +00:00
|
|
|
from person.models import get_name_sort
|
2011-01-03 14:14:54 +00:00
|
|
|
from app.models import site_config
|
2010-11-14 21:49:16 +00:00
|
|
|
|
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def get_item(info):
|
2010-01-16 20:42:11 +00:00
|
|
|
'''
|
|
|
|
info dict with:
|
|
|
|
imdbId, title, director, episode_title, season, series
|
|
|
|
'''
|
2010-01-22 23:57:06 +00:00
|
|
|
if 'imdbId' in info and info['imdbId']:
|
2010-01-16 20:42:11 +00:00
|
|
|
try:
|
2010-09-23 16:01:48 +00:00
|
|
|
item = Item.objects.get(itemId=info['imdbId'])
|
|
|
|
except Item.DoesNotExist:
|
|
|
|
item = Item(itemId=info['imdbId'])
|
2011-01-03 16:02:33 +00:00
|
|
|
if 'title' in info and 'director' in info:
|
2010-11-14 21:49:16 +00:00
|
|
|
item.external_data = {
|
2010-07-12 14:56:14 +00:00
|
|
|
'title': info['title'],
|
2011-01-03 16:02:33 +00:00
|
|
|
'director': info['director'],
|
2010-07-12 14:56:14 +00:00
|
|
|
'year': info.get('year', '')
|
|
|
|
}
|
2010-08-07 14:31:20 +00:00
|
|
|
#FIXME: this should be done async
|
2010-12-23 12:32:00 +00:00
|
|
|
item.save()
|
|
|
|
tasks.update_imdb.delay(item.itemId)
|
|
|
|
#item.update_imdb()
|
2010-01-16 20:42:11 +00:00
|
|
|
else:
|
2010-11-26 17:11:26 +00:00
|
|
|
q = Item.objects.filter(find__key='title', find__value=info['title'])
|
2010-01-16 20:42:11 +00:00
|
|
|
if q.count() > 1:
|
2010-07-12 14:56:14 +00:00
|
|
|
print "FIXME: check more than title here!!?"
|
2010-09-23 16:01:48 +00:00
|
|
|
item = q[0]
|
2010-01-16 20:42:11 +00:00
|
|
|
else:
|
2010-07-12 14:56:14 +00:00
|
|
|
try:
|
2010-09-23 16:01:48 +00:00
|
|
|
item = Item.objects.get(itemId=info['oxdbId'])
|
|
|
|
except Item.DoesNotExist:
|
|
|
|
item = Item()
|
2010-11-14 21:49:16 +00:00
|
|
|
item.data = {
|
2010-07-12 14:56:14 +00:00
|
|
|
'title': info['title'],
|
2011-01-03 16:02:33 +00:00
|
|
|
'director': info['director'],
|
2010-07-12 14:56:14 +00:00
|
|
|
'year': info.get('year', '')
|
|
|
|
}
|
2010-09-23 16:01:48 +00:00
|
|
|
item.itemId = info['oxdbId']
|
2010-07-12 14:56:14 +00:00
|
|
|
|
|
|
|
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
2010-08-07 14:31:20 +00:00
|
|
|
if key in info and info[key]:
|
2010-11-14 21:49:16 +00:00
|
|
|
item.data[key] = info[key]
|
2010-12-08 01:16:13 +00:00
|
|
|
try:
|
|
|
|
existing_item = Item.objects.get(oxdbId=item.oxdb_id())
|
|
|
|
item = existing_item
|
|
|
|
except Item.DoesNotExist:
|
|
|
|
item.save()
|
2010-09-23 16:01:48 +00:00
|
|
|
return item
|
2010-01-16 20:42:11 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
class Item(models.Model):
|
2009-10-04 22:00:08 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
2010-01-16 20:42:11 +00:00
|
|
|
published = models.DateTimeField(default=datetime.now, editable=False)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2010-11-14 21:49:16 +00:00
|
|
|
#only items that have data from files are available,
|
2011-01-01 11:44:42 +00:00
|
|
|
#this is indicated by setting available to True
|
2009-12-31 15:04:32 +00:00
|
|
|
available = models.BooleanField(default=False, db_index=True)
|
2010-09-23 16:01:48 +00:00
|
|
|
itemId = models.CharField(max_length=128, unique=True, blank=True)
|
2009-10-04 22:00:08 +00:00
|
|
|
oxdbId = models.CharField(max_length=42, unique=True, blank=True)
|
2010-11-27 02:33:31 +00:00
|
|
|
external_data = fields.DictField(default={}, editable=False)
|
|
|
|
data = fields.DictField(default={}, editable=False)
|
|
|
|
json = fields.DictField(default={}, editable=False)
|
2010-12-07 23:24:35 +00:00
|
|
|
poster = models.ImageField(default=None, blank=True, upload_to=lambda i, x: i.path("poster.jpg"))
|
2010-11-27 02:33:31 +00:00
|
|
|
poster_url = models.TextField(blank=True)
|
|
|
|
poster_height = models.IntegerField(default=0)
|
|
|
|
poster_width = models.IntegerField(default=0)
|
|
|
|
poster_frame = models.FloatField(default=-1)
|
2011-01-04 07:32:32 +00:00
|
|
|
|
|
|
|
icon = models.ImageField(default=None, blank=True, upload_to=lambda i, x: i.path("icon.jpg"))
|
|
|
|
|
2010-11-27 02:33:31 +00:00
|
|
|
#stream related fields
|
|
|
|
stream_aspect = models.FloatField(default=4/3)
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
objects = managers.ItemManager()
|
2009-08-01 14:14:54 +00:00
|
|
|
|
2009-10-04 22:00:08 +00:00
|
|
|
def get(self, key, default=None):
|
2010-11-14 21:49:16 +00:00
|
|
|
if self.data and key in self.data:
|
|
|
|
return self.data[key]
|
|
|
|
if self.external_data and key in self.external_data:
|
|
|
|
return self.external_data[key]
|
2009-10-04 22:00:08 +00:00
|
|
|
return default
|
|
|
|
|
2010-01-30 06:56:10 +00:00
|
|
|
def editable(self, user):
|
2010-01-27 06:43:17 +00:00
|
|
|
#FIXME: make permissions work
|
|
|
|
return False
|
|
|
|
|
|
|
|
def edit(self, data):
|
|
|
|
#FIXME: how to map the keys to the right place to write them to?
|
2011-01-01 11:44:42 +00:00
|
|
|
for key in data:
|
|
|
|
if key != 'id':
|
|
|
|
setattr(self.data, key, data[key])
|
2010-01-27 06:43:17 +00:00
|
|
|
self.oxdb.save()
|
|
|
|
self.save()
|
|
|
|
|
2009-10-04 22:00:08 +00:00
|
|
|
def reviews(self):
|
2010-07-12 14:56:14 +00:00
|
|
|
reviews = self.get('reviews', [])
|
|
|
|
_reviews = {}
|
|
|
|
for r in reviews:
|
2010-12-22 07:45:37 +00:00
|
|
|
for url in settings.REVIEW_WHITELIST:
|
|
|
|
if url in r[0]:
|
|
|
|
_reviews[settings.REVIEW_WHITELIST[url]] = r[0]
|
2010-07-12 14:56:14 +00:00
|
|
|
return _reviews
|
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_imdb(self):
|
2010-09-23 16:01:48 +00:00
|
|
|
if len(self.itemId) == 7:
|
2011-01-03 14:14:54 +00:00
|
|
|
data = ox.web.imdb.Imdb(self.itemId)
|
|
|
|
#FIXME: all this should be in ox.web.imdb.Imdb
|
2011-01-04 06:50:52 +00:00
|
|
|
for key in ('directors', 'writers', 'editors', 'producers',
|
|
|
|
'cinematographers', 'languages', 'genres', 'keywords',
|
|
|
|
'episode_directors'):
|
2011-01-03 14:14:54 +00:00
|
|
|
if key in data:
|
|
|
|
data[key[:-1]] = data.pop(key)
|
|
|
|
if 'countries' in data:
|
|
|
|
data['country'] = data.pop('countries')
|
|
|
|
if 'release date' in data:
|
2011-01-04 06:23:59 +00:00
|
|
|
data['releasedate'] = data.pop('release date')
|
|
|
|
if isinstance(data['releasedate'], list):
|
|
|
|
data['releasedate'] = min(data['releasedate'])
|
2011-01-03 14:14:54 +00:00
|
|
|
if 'plot' in data:
|
|
|
|
data['summary'] = data.pop('plot')
|
2011-01-03 19:45:56 +00:00
|
|
|
if isinstance(data['cast'][0], basestring):
|
|
|
|
data['actor'] = [data['cast'][0]]
|
|
|
|
else:
|
|
|
|
data['actor'] = [c[0] for c in data['cast']]
|
2011-01-03 14:14:54 +00:00
|
|
|
self.external_data = data
|
2010-07-12 14:56:14 +00:00
|
|
|
self.save()
|
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
def __unicode__(self):
|
2010-09-10 14:09:41 +00:00
|
|
|
year = self.get('year')
|
|
|
|
if year:
|
|
|
|
return u'%s (%s)' % (self.get('title'), self.get('year'))
|
|
|
|
return self.get('title')
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2010-09-14 14:10:37 +00:00
|
|
|
def get_absolute_url(self):
|
2010-11-30 23:33:42 +00:00
|
|
|
return '/%s' % self.itemId
|
2010-09-14 14:10:37 +00:00
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
def save(self, *args, **kwargs):
|
2010-12-08 01:16:13 +00:00
|
|
|
self.oxdbId = self.oxdb_id()
|
2010-07-05 12:07:59 +00:00
|
|
|
|
2010-09-06 20:31:12 +00:00
|
|
|
if self.poster:
|
|
|
|
self.poster_height = self.poster.height
|
|
|
|
self.poster_width = self.poster.width
|
2010-09-10 10:41:08 +00:00
|
|
|
else:
|
|
|
|
self.poster_height = 128
|
|
|
|
self.poster_width = 80
|
2010-11-27 12:12:53 +00:00
|
|
|
self.update_find()
|
|
|
|
self.update_sort()
|
|
|
|
self.update_facets()
|
2011-01-06 06:53:35 +00:00
|
|
|
self.json = self.get_json()
|
|
|
|
super(Item, self).save(*args, **kwargs)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2010-09-12 14:23:23 +00:00
|
|
|
def delete(self, *args, **kwargs):
|
|
|
|
self.delete_poster()
|
|
|
|
for f in glob("%s*"%self.timeline_prefix):
|
|
|
|
os.unlink(f)
|
|
|
|
for f in glob("%sstrip*"%self.timeline_prefix[:-8]):
|
|
|
|
os.unlink(f)
|
2010-09-23 16:01:48 +00:00
|
|
|
super(Item, self).delete(*args, **kwargs)
|
2010-09-12 14:23:23 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def merge_with(self, other):
|
2010-09-12 14:23:23 +00:00
|
|
|
'''
|
|
|
|
move all related tables to other and delete self
|
|
|
|
'''
|
|
|
|
for stream in self.streams.all():
|
2010-09-23 16:01:48 +00:00
|
|
|
stream.item = other
|
2010-09-12 14:23:23 +00:00
|
|
|
stream.save()
|
|
|
|
for l in self.lists.all():
|
2010-09-23 16:01:48 +00:00
|
|
|
l.items.remove(self)
|
|
|
|
if l.items.filter(id=other.id) == 0:
|
|
|
|
l.items.add(other)
|
2011-01-03 20:27:40 +00:00
|
|
|
#FIXME: should this really happen for annotations?
|
|
|
|
for a in self.annotations.all():
|
|
|
|
a.item = other
|
|
|
|
|
2010-09-12 14:23:23 +00:00
|
|
|
if hasattr(self, 'files'):
|
|
|
|
for f in self.files.all():
|
2010-09-23 16:01:48 +00:00
|
|
|
f.item = other
|
2010-09-12 14:23:23 +00:00
|
|
|
f.save()
|
|
|
|
self.delete()
|
|
|
|
other.save()
|
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
'''
|
|
|
|
JSON cache related functions
|
|
|
|
'''
|
2010-12-22 07:45:37 +00:00
|
|
|
#FIXME: this should not be used
|
2009-06-08 16:08:59 +00:00
|
|
|
_public_fields = {
|
2010-09-23 16:01:48 +00:00
|
|
|
'itemId': 'id',
|
2011-01-01 11:44:42 +00:00
|
|
|
'title': 'title',
|
|
|
|
'year': 'year',
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
'runtime': 'runtime',
|
2011-01-04 06:23:59 +00:00
|
|
|
'releasedate': 'releasedate',
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-03 16:02:33 +00:00
|
|
|
'country': 'country',
|
|
|
|
'director': 'director',
|
|
|
|
'writer': 'writer',
|
|
|
|
'editor': 'editor',
|
|
|
|
'producer': 'producer',
|
2010-09-06 21:19:59 +00:00
|
|
|
'cinematographer': 'cinematographer',
|
2011-01-03 16:02:33 +00:00
|
|
|
'language': 'language',
|
2011-01-04 06:23:59 +00:00
|
|
|
'genre': 'genre',
|
2011-01-03 16:02:33 +00:00
|
|
|
'keyword': 'keyword',
|
2009-06-08 16:08:59 +00:00
|
|
|
'cast': 'cast',
|
|
|
|
'series_title': 'series_title',
|
|
|
|
'episode_title': 'episode_title',
|
|
|
|
'season': 'season',
|
|
|
|
'episode': 'episode',
|
2009-10-04 22:00:08 +00:00
|
|
|
'reviews': 'reviews',
|
2009-08-01 14:14:54 +00:00
|
|
|
'trivia': 'trivia',
|
2009-10-04 22:00:08 +00:00
|
|
|
'rating': 'rating',
|
|
|
|
'votes': 'votes',
|
2009-08-16 12:23:29 +00:00
|
|
|
'alternative_titles': 'alternative_titles',
|
2010-07-05 08:30:17 +00:00
|
|
|
'connections_json': 'connections',
|
2009-06-08 16:08:59 +00:00
|
|
|
}
|
2010-11-27 02:33:31 +00:00
|
|
|
|
2010-09-07 14:05:38 +00:00
|
|
|
def get_poster(self):
|
2010-09-06 20:45:11 +00:00
|
|
|
poster = {}
|
|
|
|
poster['width'] = self.poster_width
|
|
|
|
poster['height'] = self.poster_height
|
2010-09-23 16:01:48 +00:00
|
|
|
poster['url'] = '/%s/poster.jpg' % self.itemId
|
2010-09-07 14:05:38 +00:00
|
|
|
'''
|
2010-09-06 20:45:11 +00:00
|
|
|
if self.poster:
|
|
|
|
poster['url'] = self.poster.url
|
|
|
|
else:
|
|
|
|
poster['url'] = self.poster_url
|
2010-09-07 14:05:38 +00:00
|
|
|
'''
|
2010-09-06 20:45:11 +00:00
|
|
|
return poster
|
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def get_posters(self):
|
|
|
|
posters = {}
|
|
|
|
for p in self.poster_urls.all():
|
|
|
|
if p.service not in posters:
|
|
|
|
posters[p.service] = []
|
|
|
|
posters[p.service].append({'url': p.url, 'width': p.width, 'height': p.height})
|
|
|
|
local_posters = self.local_posters().keys()
|
|
|
|
if local_posters:
|
|
|
|
posters['local'] = []
|
|
|
|
for p in local_posters:
|
|
|
|
url = p.replace(settings.MEDIA_ROOT, settings.MEDIA_URL)
|
|
|
|
width = 640
|
|
|
|
height = 1024
|
|
|
|
posters['local'].append({'url': url, 'width': width, 'height': height})
|
|
|
|
return posters
|
|
|
|
|
|
|
|
def get_stream(self):
|
|
|
|
stream = {}
|
|
|
|
if self.streams.all().count():
|
|
|
|
s = self.streams.all()[0]
|
|
|
|
if s.video and s.info:
|
|
|
|
stream['duration'] = s.info['duration']
|
2011-01-01 11:44:42 +00:00
|
|
|
if 'video' in s.info and s.info['video']:
|
2010-09-10 15:12:22 +00:00
|
|
|
stream['aspectRatio'] = s.info['video'][0]['width'] / s.info['video'][0]['height']
|
2010-09-14 15:03:10 +00:00
|
|
|
if settings.XSENDFILE or settings.XACCELREDIRECT:
|
2010-09-23 16:01:48 +00:00
|
|
|
stream['baseUrl'] = '/%s' % self.itemId
|
2010-09-14 15:03:10 +00:00
|
|
|
else:
|
|
|
|
stream['baseUrl'] = os.path.dirname(s.video.url)
|
2010-09-10 15:12:22 +00:00
|
|
|
stream['profiles'] = list(set(map(lambda s: int(os.path.splitext(s['profile'])[0][:-1]), self.streams.all().values('profile'))))
|
|
|
|
return stream
|
|
|
|
|
2010-09-18 14:44:35 +00:00
|
|
|
def get_layers(self):
|
|
|
|
layers = {}
|
2010-11-14 21:49:16 +00:00
|
|
|
layers['cuts'] = self.data.get('cuts', {})
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-18 14:44:35 +00:00
|
|
|
layers['subtitles'] = {}
|
2010-11-28 16:03:23 +00:00
|
|
|
#FIXME: subtitles should be stored in Annotation
|
2010-09-18 14:44:35 +00:00
|
|
|
qs = self.files.filter(is_subtitle=True, is_main=True, available=True)
|
|
|
|
if qs.count()>0:
|
|
|
|
layers['subtitles'] = qs[0].srt()
|
|
|
|
return layers
|
|
|
|
|
2009-12-31 15:04:32 +00:00
|
|
|
def get_json(self, fields=None):
|
2011-01-06 06:53:35 +00:00
|
|
|
item = {
|
|
|
|
'id': self.itemId
|
|
|
|
}
|
2011-01-05 13:06:09 +00:00
|
|
|
item.update(self.external_data)
|
|
|
|
item.update(self.data)
|
|
|
|
for key in site_config['keys'].keys():
|
|
|
|
if key not in item:
|
|
|
|
value = self.get(key)
|
|
|
|
#also get values from sort table, i.e. numberof values
|
|
|
|
if not value and self.sort and hasattr(self.sort, key):
|
|
|
|
if hasattr(self.sort, '%s_desc'%key):
|
|
|
|
if getattr(self.sort, key) == getattr(self.sort, '%s_desc'%key):
|
|
|
|
value = getattr(self.sort, key)
|
|
|
|
else:
|
|
|
|
value = getattr(self.sort, key)
|
|
|
|
if value:
|
|
|
|
item[key] = value
|
|
|
|
|
|
|
|
#format datetime values
|
|
|
|
for key in item:
|
|
|
|
if isinstance(item[key], datetime):
|
|
|
|
item[key] = item[key].strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
if not fields:
|
2010-09-23 16:01:48 +00:00
|
|
|
item['stream'] = self.get_stream()
|
|
|
|
item['poster'] = self.get_poster()
|
|
|
|
item['posters'] = self.get_posters()
|
|
|
|
return item
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2010-11-28 16:03:23 +00:00
|
|
|
def oxdb_id(self):
|
2011-01-03 16:02:33 +00:00
|
|
|
return utils.oxdb_id(self.get('title', ''), self.get('director', []), str(self.get('year', '')),
|
2010-11-28 16:03:23 +00:00
|
|
|
self.get('season', ''), self.get('episode', ''),
|
2011-01-04 06:50:52 +00:00
|
|
|
self.get('episode_title', ''), self.get('episode_director', []), self.get('episode_year', ''))
|
2010-02-16 12:41:57 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
'''
|
|
|
|
Search related functions
|
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_find(self):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-06 16:14:00 +00:00
|
|
|
def save(key, value):
|
|
|
|
f, created = ItemFind.objects.get_or_create(item=self, key=key)
|
|
|
|
if value not in ('', '||'):
|
|
|
|
f.value = value
|
|
|
|
f.save()
|
|
|
|
else:
|
|
|
|
f.delete()
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2011-01-05 13:06:09 +00:00
|
|
|
#FIXME: use site_config
|
2010-11-06 16:14:00 +00:00
|
|
|
save('title', '\n'.join([self.get('title'), self.get('original_title', '')]))
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-15 15:55:10 +00:00
|
|
|
for key in self.facet_keys:
|
2011-01-03 17:47:20 +00:00
|
|
|
if key == 'character':
|
|
|
|
values = self.get('cast', '')
|
|
|
|
if values:
|
|
|
|
if isinstance(values[0], basestring):
|
|
|
|
values = [values[0], ]
|
|
|
|
else:
|
|
|
|
values = [i[1] for i in values]
|
|
|
|
else:
|
|
|
|
values = self.get(key, '')
|
|
|
|
if isinstance(values, list):
|
|
|
|
save(key, '|%s|'%'|'.join(values))
|
2010-07-16 10:13:16 +00:00
|
|
|
else:
|
2011-01-03 17:47:20 +00:00
|
|
|
save(key, values)
|
2011-01-05 13:06:09 +00:00
|
|
|
|
2011-01-03 17:47:20 +00:00
|
|
|
save('summary', self.get('summary', '') + self.get('plot', '') + self.get('plot_outline', ''))
|
2010-11-06 16:14:00 +00:00
|
|
|
save('trivia', ' '.join(self.get('trivia', [])))
|
|
|
|
save('location', '|%s|'%'|'.join(self.get('filming_locations', [])))
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-07-13 22:20:14 +00:00
|
|
|
#FIXME:
|
|
|
|
#f.dialog = 'fixme'
|
2010-12-31 12:03:41 +00:00
|
|
|
save('dialog', '\n'.join([l.value for l in Annotation.objects.filter(layer__type='subtitle', item=self).order_by('start')]))
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2009-10-04 22:00:08 +00:00
|
|
|
#FIXME: collate filenames
|
|
|
|
#f.filename = self.filename
|
2010-11-06 16:14:00 +00:00
|
|
|
all_find = ' '.join([f.value for f in ItemFind.objects.filter(item=self).exclude(key='all')])
|
|
|
|
save('all', all_find)
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_sort(self):
|
2009-10-04 22:00:08 +00:00
|
|
|
try:
|
|
|
|
s = self.sort
|
2010-09-23 16:01:48 +00:00
|
|
|
except ItemSort.DoesNotExist:
|
|
|
|
s = ItemSort(item=self)
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def sortNames(values):
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u''
|
2010-07-12 14:56:14 +00:00
|
|
|
if values:
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u'; '.join([get_name_sort(name) for name in values])
|
2009-08-16 12:23:29 +00:00
|
|
|
if not sort_value:
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u''
|
2009-08-16 12:23:29 +00:00
|
|
|
return sort_value
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
base_keys = (
|
|
|
|
'id',
|
|
|
|
'aspectratio',
|
|
|
|
'duration',
|
|
|
|
'color',
|
|
|
|
'saturation',
|
|
|
|
'brightness',
|
|
|
|
'volume',
|
|
|
|
'clips',
|
|
|
|
'cuts',
|
|
|
|
'cutsperminute',
|
|
|
|
'words',
|
|
|
|
'wordsperminute',
|
|
|
|
'resolution',
|
|
|
|
'pixels',
|
|
|
|
'size',
|
|
|
|
'bitrate',
|
|
|
|
'files',
|
|
|
|
'filename',
|
|
|
|
'published',
|
|
|
|
'modified',
|
|
|
|
'popularity'
|
|
|
|
)
|
|
|
|
for key in site_config['sortKeys']:
|
|
|
|
name = key['id']
|
2011-01-05 13:06:09 +00:00
|
|
|
source = key.get('key', name)
|
2011-01-03 14:14:54 +00:00
|
|
|
field_type = key['type']
|
2011-01-05 13:06:09 +00:00
|
|
|
max_int = 9223372036854775807L
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
if name not in base_keys:
|
|
|
|
if field_type == 'title':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = utils.sort_title(canonicalTitle(self.get(source)))
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if not value:
|
|
|
|
value = 'zzzzzzzzzzzzzzzzzzzzzzzzz'
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
elif field_type == 'person':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = sortNames(self.get(source, []))
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)[:955]
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if not value:
|
|
|
|
value = 'zzzzzzzzzzzzzzzzzzzzzzzzz'
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 19:45:56 +00:00
|
|
|
elif field_type == 'string':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, u'')
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
2011-01-03 08:45:31 +00:00
|
|
|
value = u','.join(value)
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)[:955]
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if not value:
|
|
|
|
value = 'zzzzzzzzzzzzzzzzzzzzzzzzz'
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
elif field_type == 'length':
|
2011-01-05 13:06:09 +00:00
|
|
|
#can be length of strings or length of arrays, i.e. keywords
|
|
|
|
value = self.get(source, None)
|
|
|
|
if not value:
|
|
|
|
value = -max_int
|
|
|
|
else:
|
|
|
|
value = len(value)
|
|
|
|
setattr(s, '%s_desc'%name, value)
|
|
|
|
if value == -max_int:
|
|
|
|
value = max_int
|
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
elif field_type == 'integer':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, -max_int)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
|
|
|
value = len(value)
|
2011-01-05 13:06:09 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if value == -max_int:
|
|
|
|
value = max_int
|
2011-01-05 13:06:09 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
elif field_type == 'float':
|
|
|
|
max_float = 9223372036854775807L
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, -max_float)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
|
|
|
value = sum(value)
|
|
|
|
setattr(s, name, value)
|
|
|
|
if value == -max_float:
|
|
|
|
value = max_float
|
|
|
|
setattr(s, '%s_desc'%name, value)
|
|
|
|
elif field_type == 'words':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, '')
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
|
|
|
value = '\n'.join(value)
|
|
|
|
if value:
|
|
|
|
value = len(value.split(' '))
|
|
|
|
else:
|
|
|
|
value = 0
|
|
|
|
setattr(s, name, value)
|
|
|
|
elif field_type == 'year':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, '')
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if not value:
|
|
|
|
value = '9999'
|
2011-01-03 17:47:20 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
elif field_type == 'date':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, None)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
value = datetime.strptime(value, '%Y-%m-%d')
|
2011-01-06 06:53:35 +00:00
|
|
|
setattr(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
if not value:
|
|
|
|
value = datetime.strptime('9999-12-12', '%Y-%m-%d')
|
2011-01-06 06:53:35 +00:00
|
|
|
setattr(s, '%s_desc'%name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
|
|
|
|
#sort keys based on database, these will always be available
|
2011-01-03 20:30:50 +00:00
|
|
|
s.itemId = self.itemId.replace('0x', 'xx')
|
2011-01-03 14:14:54 +00:00
|
|
|
s.modified = self.modified
|
|
|
|
s.modified_desc = self.modified
|
|
|
|
s.published = self.published
|
|
|
|
s.published_desc = self.published
|
|
|
|
|
|
|
|
# sort values based on data from videos
|
|
|
|
s.words = 0 #FIXME: get words from all layers or something
|
|
|
|
s.wordsperminute = 0
|
|
|
|
s.clips = 0 #FIXME: get clips from all layers or something
|
|
|
|
s.popularity = 0 #FIXME: get popularity from somewhere
|
2010-12-22 15:14:36 +00:00
|
|
|
videos = self.main_videos()
|
2010-12-22 18:42:35 +00:00
|
|
|
if len(videos) > 0:
|
|
|
|
s.duration = sum([v.duration for v in videos])
|
|
|
|
s.resolution = videos[0].width * videos[0].height
|
|
|
|
s.aspectratio = int(1000 * utils.parse_decimal(v.display_aspect_ratio))
|
|
|
|
#FIXME: should be average over all files
|
|
|
|
if 'bitrate' in videos[0].info:
|
|
|
|
s.bitrate = videos[0].info['bitrate']
|
|
|
|
s.pixels = sum([v.pixels for v in videos])
|
|
|
|
s.filename = ' '.join([v.name for v in videos])
|
2011-01-03 14:14:54 +00:00
|
|
|
s.filename_desc = ' '.join([v.name for v in videos])
|
2010-12-22 18:42:35 +00:00
|
|
|
s.files = self.files.all().count()
|
|
|
|
s.size = sum([v.size for v in videos]) #FIXME: only size of movies?
|
2011-01-03 14:14:54 +00:00
|
|
|
s.volume = 0
|
2010-12-22 18:45:41 +00:00
|
|
|
else:
|
|
|
|
s.duration = 0
|
|
|
|
s.resolution = 0
|
|
|
|
s.aspectratio = 0
|
|
|
|
s.bitrate = 0
|
|
|
|
s.pixels = 0
|
|
|
|
s.filename = 0
|
|
|
|
s.files = 0
|
|
|
|
s.size = 0
|
2011-01-03 14:14:54 +00:00
|
|
|
s.volume = 0
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2010-12-23 15:01:53 +00:00
|
|
|
s.color = int(sum(self.data.get('color', [])))
|
2010-12-25 13:59:21 +00:00
|
|
|
s.saturation = 0 #FIXME
|
|
|
|
s.brightness = 0 #FIXME
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-12-23 15:01:53 +00:00
|
|
|
s.cuts = len(self.data.get('cuts', []))
|
2010-12-25 13:45:19 +00:00
|
|
|
if s.duration:
|
|
|
|
s.cutsperminute = s.cuts / (s.duration/60)
|
2010-12-25 14:00:48 +00:00
|
|
|
else:
|
|
|
|
s.cutsperminute = 0
|
2009-08-16 12:23:29 +00:00
|
|
|
s.save()
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-05 13:06:09 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_facets(self):
|
2011-01-01 11:44:42 +00:00
|
|
|
#FIXME: what to do with Unkown Director, Year, Country etc.
|
2010-07-15 15:55:10 +00:00
|
|
|
for key in self.facet_keys:
|
2011-01-03 17:47:20 +00:00
|
|
|
current_values = self.get(key, [])
|
|
|
|
if not isinstance(current_values, list):
|
|
|
|
current_values = [current_values]
|
2010-09-23 16:01:48 +00:00
|
|
|
saved_values = [i.value for i in Facet.objects.filter(item=self, key=key)]
|
2010-07-12 14:56:14 +00:00
|
|
|
removed_values = filter(lambda x: x not in current_values, saved_values)
|
|
|
|
if removed_values:
|
2010-09-23 16:01:48 +00:00
|
|
|
Facet.objects.filter(item=self, key=key, value__in=removed_values).delete()
|
2010-07-12 14:56:14 +00:00
|
|
|
for value in current_values:
|
|
|
|
if value not in saved_values:
|
|
|
|
value_sort = value
|
2010-07-15 15:55:10 +00:00
|
|
|
if key in self.person_keys:
|
2010-11-27 12:12:53 +00:00
|
|
|
value_sort = get_name_sort(value)
|
2010-07-12 14:56:14 +00:00
|
|
|
f = Facet(key=key, value=value, value_sort=value_sort)
|
2010-09-23 16:01:48 +00:00
|
|
|
f.item = self
|
2010-07-12 14:56:14 +00:00
|
|
|
f.save()
|
2010-07-13 22:20:14 +00:00
|
|
|
year = self.get('year', None)
|
|
|
|
if year:
|
2010-09-23 16:01:48 +00:00
|
|
|
f, created = Facet.objects.get_or_create(key='year', value=year, value_sort=year, item=self)
|
2010-07-13 22:20:14 +00:00
|
|
|
else:
|
2010-09-23 16:01:48 +00:00
|
|
|
Facet.objects.filter(item=self, key='year').delete()
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-12-04 01:26:49 +00:00
|
|
|
def path(self, name=''):
|
2010-12-05 17:51:40 +00:00
|
|
|
h = self.itemId
|
2010-12-07 19:05:59 +00:00
|
|
|
return os.path.join('items', h[:2], h[2:4], h[4:6], h[6:], name)
|
2010-12-04 01:26:49 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
'''
|
|
|
|
Video related functions
|
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def frame(self, position, width=128):
|
|
|
|
stream = self.streams.filter(profile=settings.VIDEO_PROFILE+'.webm')[0]
|
2010-12-04 01:26:49 +00:00
|
|
|
path = os.path.join(settings.MEDIA_ROOT, self.path(), 'frames', "%d"%width, "%s.jpg"%position)
|
2010-09-10 15:12:22 +00:00
|
|
|
if not os.path.exists(path):
|
|
|
|
extract.frame(stream.video.path, path, position, width)
|
|
|
|
return path
|
|
|
|
|
|
|
|
@property
|
|
|
|
def timeline_prefix(self):
|
2010-12-04 01:26:49 +00:00
|
|
|
return os.path.join(settings.MEDIA_ROOT, self.path(), 'timeline')
|
2010-09-10 15:12:22 +00:00
|
|
|
|
2010-10-16 11:49:45 +00:00
|
|
|
def main_videos(self):
|
|
|
|
#FIXME: needs to check if more than one user has main files and only take from "higher" user
|
2010-11-16 19:07:15 +00:00
|
|
|
videos = self.files.filter(is_main=True, is_video=True, available=True)
|
|
|
|
if videos.count()>0:
|
|
|
|
first = videos[0]
|
|
|
|
user = first.instances.all()[0].volume.user
|
|
|
|
#only take videos from same user and with same width/height
|
|
|
|
def check(v):
|
|
|
|
if v.instances.filter(volume__user=user).count()>0 and \
|
|
|
|
first.width == v.width and first.height == v.height:
|
|
|
|
return True
|
|
|
|
return False
|
2011-01-01 11:44:42 +00:00
|
|
|
videos = filter(check, videos)
|
2010-11-16 19:07:15 +00:00
|
|
|
return videos
|
2010-10-16 11:49:45 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_streams(self):
|
2010-09-10 15:12:22 +00:00
|
|
|
files = {}
|
2010-10-16 11:49:45 +00:00
|
|
|
for f in self.main_videos():
|
2010-09-10 15:12:22 +00:00
|
|
|
files[utils.sort_title(f.name)] = f.video.path
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
#FIXME: how to detect if something changed?
|
|
|
|
if files:
|
2010-09-23 16:01:48 +00:00
|
|
|
stream, created = Stream.objects.get_or_create(item=self, profile='%s.webm' % settings.VIDEO_PROFILE)
|
2010-12-07 18:45:26 +00:00
|
|
|
stream.video.name = stream.path()
|
2010-09-10 15:12:22 +00:00
|
|
|
cmd = []
|
2010-12-23 12:32:00 +00:00
|
|
|
if os.path.exists(stream.video.path):
|
|
|
|
os.unlink(stream.video.path)
|
2010-12-23 15:01:53 +00:00
|
|
|
elif not os.path.exists(os.path.dirname(stream.video.path)):
|
|
|
|
os.makedirs(os.path.dirname(stream.video.path))
|
|
|
|
if len(files.values()) > 1:
|
2010-12-23 12:32:00 +00:00
|
|
|
for f in sorted(files):
|
|
|
|
cmd.append('+')
|
|
|
|
cmd.append(files[f])
|
|
|
|
cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:]
|
|
|
|
#print cmd
|
|
|
|
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
#p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
|
|
|
p.wait()
|
|
|
|
else:
|
2010-12-23 15:01:53 +00:00
|
|
|
os.symlink(files.values()[0], stream.video.path)
|
2010-09-10 15:12:22 +00:00
|
|
|
stream.save()
|
|
|
|
|
|
|
|
if 'video' in stream.info:
|
|
|
|
extract.timeline(stream.video.path, self.timeline_prefix)
|
|
|
|
self.stream_aspect = stream.info['video'][0]['width']/stream.info['video'][0]['height']
|
2010-11-14 21:49:16 +00:00
|
|
|
self.data['cuts'] = extract.cuts(self.timeline_prefix)
|
2010-12-23 15:01:53 +00:00
|
|
|
self.data['color'] = extract.average_color(self.timeline_prefix)
|
2010-11-14 21:49:16 +00:00
|
|
|
#extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8])
|
2010-09-10 15:12:22 +00:00
|
|
|
|
|
|
|
stream.extract_derivatives()
|
2010-12-01 00:00:33 +00:00
|
|
|
self.make_local_posters()
|
|
|
|
self.make_poster()
|
2010-09-10 15:12:22 +00:00
|
|
|
self.available = True
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
'''
|
|
|
|
Poster related functions
|
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def update_poster_urls(self):
|
2010-09-10 14:09:41 +00:00
|
|
|
_current = {}
|
|
|
|
for s in settings.POSTER_SERVICES:
|
2010-09-23 16:01:48 +00:00
|
|
|
url = '%s?itemId=%s'%(s, self.itemId)
|
2010-09-10 14:09:41 +00:00
|
|
|
try:
|
|
|
|
data = json.loads(ox.net.readUrlUnicode(url))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
for service in data:
|
|
|
|
if service not in _current:
|
|
|
|
_current[service] = []
|
|
|
|
for poster in data[service]:
|
|
|
|
_current[service].append(poster)
|
|
|
|
#FIXME: remove urls that are no longer listed
|
|
|
|
for service in _current:
|
|
|
|
for poster in _current[service]:
|
2010-09-23 16:01:48 +00:00
|
|
|
p, created = PosterUrl.objects.get_or_create(item=self, url=poster['url'], service=service)
|
2010-09-10 14:09:41 +00:00
|
|
|
if created:
|
|
|
|
p.width = poster['width']
|
|
|
|
p.height = poster['height']
|
|
|
|
p.save()
|
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def delete_poster(self):
|
2010-09-13 15:25:37 +00:00
|
|
|
if self.poster:
|
|
|
|
path = self.poster.path
|
|
|
|
self.poster.delete()
|
|
|
|
for f in glob(path.replace('.jpg', '*.jpg')):
|
|
|
|
os.unlink(f)
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2010-09-17 20:23:03 +00:00
|
|
|
def prefered_poster_url(self):
|
|
|
|
if self.poster_url:
|
|
|
|
return self.poster_url
|
|
|
|
self.update_poster_urls()
|
|
|
|
for service in settings.POSTER_PRECEDENCE:
|
|
|
|
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
|
|
|
return u.url
|
|
|
|
return None
|
|
|
|
|
2010-12-01 00:00:33 +00:00
|
|
|
def make_poster(self, force=False):
|
2010-09-10 15:12:22 +00:00
|
|
|
if not self.poster or force:
|
2010-09-17 20:23:03 +00:00
|
|
|
url = self.prefered_poster_url()
|
2010-09-10 14:09:41 +00:00
|
|
|
if url:
|
|
|
|
data = ox.net.readUrl(url)
|
2010-09-10 15:12:22 +00:00
|
|
|
if force:
|
|
|
|
self.delete_poster()
|
2010-09-10 14:09:41 +00:00
|
|
|
self.poster.save('poster.jpg', ContentFile(data))
|
|
|
|
self.save()
|
|
|
|
else:
|
2010-09-10 15:12:22 +00:00
|
|
|
if force:
|
|
|
|
self.delete_poster()
|
2010-09-10 14:09:41 +00:00
|
|
|
local_posters = self.make_local_posters()
|
|
|
|
if local_posters:
|
|
|
|
with open(local_posters[0]) as f:
|
|
|
|
self.poster.save('poster.jpg', ContentFile(f.read()))
|
|
|
|
|
|
|
|
def local_posters(self):
|
2010-09-03 13:28:44 +00:00
|
|
|
part = 1
|
2010-09-10 14:09:41 +00:00
|
|
|
posters = {}
|
2010-10-16 11:49:45 +00:00
|
|
|
for f in self.main_videos():
|
2010-09-03 13:28:44 +00:00
|
|
|
for frame in f.frames.all():
|
2010-12-04 01:26:49 +00:00
|
|
|
path = self.path('poster.pandora.%s.%s.jpg'%(part, frame.position))
|
2010-09-10 14:09:41 +00:00
|
|
|
path = os.path.abspath(os.path.join(settings.MEDIA_ROOT, path))
|
|
|
|
posters[path] = frame.frame.path
|
2010-09-03 13:28:44 +00:00
|
|
|
part += 1
|
2010-09-10 14:09:41 +00:00
|
|
|
return posters
|
|
|
|
|
|
|
|
def make_local_posters(self):
|
|
|
|
posters = self.local_posters()
|
|
|
|
for poster in posters:
|
|
|
|
frame = posters[poster]
|
2010-12-04 01:26:49 +00:00
|
|
|
timeline = self.path('timeline.64.png')
|
2010-11-18 17:33:46 +00:00
|
|
|
timeline = os.path.abspath(os.path.join(settings.MEDIA_ROOT, timeline))
|
2010-12-25 13:45:19 +00:00
|
|
|
if os.path.exists(timeline):
|
|
|
|
cmd = [settings.ITEM_POSTER,
|
|
|
|
'-t', self.get('title'),
|
2011-01-03 16:02:33 +00:00
|
|
|
'-d', ', '.join(self.get('director', ['Unknown Director'])),
|
2010-12-25 13:45:19 +00:00
|
|
|
'-y', str(self.get('year', '')),
|
|
|
|
'-f', frame,
|
|
|
|
'-l', timeline,
|
|
|
|
'-p', poster
|
|
|
|
]
|
|
|
|
if len(self.itemId) == 7:
|
|
|
|
cmd += ['-i', self.itemId]
|
|
|
|
cmd += ['-o', self.oxdbId]
|
|
|
|
p = subprocess.Popen(cmd)
|
|
|
|
p.wait()
|
2010-09-10 14:09:41 +00:00
|
|
|
return posters.keys()
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2011-01-04 07:32:32 +00:00
|
|
|
def make_icon(self):
|
|
|
|
#FIXME: should take middle or selected frame or som
|
|
|
|
frames = []
|
|
|
|
for f in self.main_videos():
|
|
|
|
for ff in f.frames.all():
|
|
|
|
frames.append(ff.frame.path)
|
|
|
|
if frames:
|
|
|
|
icon = self.path('icon.jpg')
|
|
|
|
self.icon.name = icon
|
|
|
|
frame = frames[int(len(frames)/2)]
|
|
|
|
timeline = self.path('timeline.64.png')
|
|
|
|
timeline = os.path.abspath(os.path.join(settings.MEDIA_ROOT, timeline))
|
|
|
|
if os.path.exists(timeline):
|
|
|
|
cmd = [settings.ITEM_ICON,
|
|
|
|
'-f', frame,
|
|
|
|
'-l', timeline,
|
|
|
|
'-i', self.icon.path
|
|
|
|
]
|
|
|
|
p = subprocess.Popen(cmd)
|
|
|
|
p.wait()
|
|
|
|
self.save()
|
|
|
|
return icon
|
|
|
|
return None
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
Item.facet_keys = []
|
|
|
|
Item.person_keys = []
|
|
|
|
for key in site_config['findKeys']:
|
|
|
|
name = key['id']
|
|
|
|
if key.get('autocomplete', False) and not site_config['keys'].get(name, {'type': None})['type'] == 'title':
|
|
|
|
Item.facet_keys.append(name)
|
|
|
|
if name in site_config['keys'] and site_config['keys'][name]['type'] == 'person':
|
|
|
|
Item.person_keys.append(name)
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
class ItemFind(models.Model):
|
2009-08-16 12:23:29 +00:00
|
|
|
"""
|
2010-11-06 16:14:00 +00:00
|
|
|
used to find items,
|
2010-11-27 12:12:53 +00:00
|
|
|
item.update_find populates this table
|
2010-11-06 16:14:00 +00:00
|
|
|
its used in manager.ItemManager
|
2009-08-16 12:23:29 +00:00
|
|
|
"""
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-06 16:14:00 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("item", "key")
|
|
|
|
|
|
|
|
item = models.ForeignKey('Item', related_name='find', db_index=True)
|
|
|
|
key = models.CharField(max_length=200, db_index=True)
|
|
|
|
value = models.TextField(blank=True)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-05 13:06:09 +00:00
|
|
|
'''
|
|
|
|
ItemSort
|
|
|
|
table constructed based on info in site_config['sortKeys']
|
|
|
|
'''
|
2011-01-03 14:14:54 +00:00
|
|
|
attrs = {
|
|
|
|
'__module__': 'item.models',
|
|
|
|
'item': models.OneToOneField('Item', related_name='sort', primary_key=True),
|
|
|
|
}
|
|
|
|
for key in site_config['sortKeys']:
|
|
|
|
name = key['id']
|
2011-01-03 20:30:50 +00:00
|
|
|
name = {'id': 'itemId'}.get(name, name)
|
2011-01-03 14:14:54 +00:00
|
|
|
field_type = key['type']
|
2011-01-03 19:45:56 +00:00
|
|
|
if field_type in ('string', 'title', 'person'):
|
2011-01-03 14:14:54 +00:00
|
|
|
attrs[name] = models.CharField(max_length=1000, db_index=True)
|
|
|
|
attrs['%s_desc'%name] = models.CharField(max_length=1000, db_index=True)
|
|
|
|
elif field_type == 'year':
|
|
|
|
attrs[name] = models.CharField(max_length=4, db_index=True)
|
|
|
|
attrs['%s_desc'%name] = models.CharField(max_length=4, db_index=True)
|
|
|
|
elif field_type in ('integer', 'words', 'length'):
|
|
|
|
attrs[name] = models.BigIntegerField(blank=True, db_index=True)
|
|
|
|
attrs['%s_desc'%name] = models.BigIntegerField(blank=True, db_index=True)
|
|
|
|
elif field_type == 'float':
|
|
|
|
attrs[name] = models.FloatField(blank=True, db_index=True)
|
|
|
|
attrs['%s_desc'%name] = models.FloatField(blank=True, db_index=True)
|
|
|
|
elif field_type == 'date':
|
|
|
|
attrs[name] = models.DateTimeField(blank=True, db_index=True)
|
|
|
|
attrs['%s_desc'%name] = models.DateTimeField(blank=True, db_index=True)
|
|
|
|
else:
|
|
|
|
print field_type
|
|
|
|
print key
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
ItemSort = type('ItemSort', (models.Model,), attrs)
|
|
|
|
ItemSort.fields = filter(lambda x: not x.endswith('_desc'), [f.name for f in ItemSort._meta.fields])
|
|
|
|
ItemSort.descending_fields = filter(lambda x: x.endswith('_desc'), [f.name for f in ItemSort._meta.fields])
|
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class Facet(models.Model):
|
2011-01-05 13:06:09 +00:00
|
|
|
'''
|
|
|
|
used for keys that can have multiple values like people, languages etc.
|
|
|
|
does not perform to well if total number of items goes above 10k
|
|
|
|
this happens for keywords in 0xdb right now
|
|
|
|
'''
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("item", "key", "value")
|
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey('Item', related_name='facets')
|
2010-07-12 14:56:14 +00:00
|
|
|
key = models.CharField(max_length=200, db_index=True)
|
2011-01-03 14:14:54 +00:00
|
|
|
value = models.CharField(max_length=200, db_index=True)
|
|
|
|
value_sort = models.CharField(max_length=200, db_index=True)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
if not self.value_sort:
|
2011-01-05 13:06:09 +00:00
|
|
|
self.value_sort = utils.sort_string(self.value)
|
2010-07-12 14:56:14 +00:00
|
|
|
super(Facet, self).save(*args, **kwargs)
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
class Stream(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
class Meta:
|
2010-09-23 16:01:48 +00:00
|
|
|
unique_together = ("item", "profile")
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey(Item, related_name='streams')
|
2010-09-03 13:28:44 +00:00
|
|
|
profile = models.CharField(max_length=255, default='96p.webm')
|
2010-12-04 01:26:49 +00:00
|
|
|
video = models.FileField(default=None, blank=True, upload_to=lambda f, x: f.path())
|
2010-09-03 13:28:44 +00:00
|
|
|
source = models.ForeignKey('Stream', related_name='derivatives', default=None, null=True)
|
|
|
|
available = models.BooleanField(default=False)
|
|
|
|
info = fields.DictField(default={})
|
|
|
|
|
2010-12-22 07:45:37 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s/%s" % (self.item, self.profile)
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2010-12-04 01:26:49 +00:00
|
|
|
def path(self):
|
2010-12-05 17:51:40 +00:00
|
|
|
return self.item.path(self.profile)
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-03 13:28:44 +00:00
|
|
|
def extract_derivatives(self):
|
|
|
|
if settings.VIDEO_H264:
|
|
|
|
profile = self.profile.replace('.webm', '.mp4')
|
2010-09-23 16:01:48 +00:00
|
|
|
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
2010-09-15 13:03:00 +00:00
|
|
|
if created:
|
|
|
|
derivative.source = self
|
2010-09-03 13:28:44 +00:00
|
|
|
derivative.video.name = self.video.name.replace(self.profile, profile)
|
|
|
|
derivative.encode()
|
2010-09-15 13:03:00 +00:00
|
|
|
derivative.save()
|
2010-09-03 13:28:44 +00:00
|
|
|
|
|
|
|
for p in settings.VIDEO_DERIVATIVES:
|
|
|
|
profile = p + '.webm'
|
|
|
|
target = self.video.path.replace(self.profile, profile)
|
2010-09-23 16:01:48 +00:00
|
|
|
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
2010-09-15 13:03:00 +00:00
|
|
|
if created:
|
|
|
|
derivative.source = self
|
2010-09-03 13:28:44 +00:00
|
|
|
derivative.video.name = self.video.name.replace(self.profile, profile)
|
|
|
|
derivative.encode()
|
2010-09-15 13:03:00 +00:00
|
|
|
derivative.save()
|
2010-09-03 13:28:44 +00:00
|
|
|
|
|
|
|
if settings.VIDEO_H264:
|
|
|
|
profile = p + '.mp4'
|
2010-09-23 16:01:48 +00:00
|
|
|
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
2010-09-15 13:03:00 +00:00
|
|
|
if created:
|
|
|
|
derivative.source = self
|
2010-09-03 13:28:44 +00:00
|
|
|
derivative.video.name = self.video.name.replace(self.profile, profile)
|
|
|
|
derivative.encode()
|
2010-09-15 13:03:00 +00:00
|
|
|
derivative.save()
|
2010-09-03 13:28:44 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def encode(self):
|
|
|
|
if self.source:
|
|
|
|
video = self.source.video.path
|
|
|
|
target = self.video.path
|
|
|
|
profile = self.profile
|
|
|
|
info = ox.avinfo(video)
|
|
|
|
if extract.stream(video, target, profile, info):
|
|
|
|
self.available=True
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
if self.video and not self.info:
|
|
|
|
self.info = ox.avinfo(self.video.path)
|
|
|
|
super(Stream, self).save(*args, **kwargs)
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 14:09:41 +00:00
|
|
|
class PosterUrl(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 14:09:41 +00:00
|
|
|
class Meta:
|
2010-09-23 16:01:48 +00:00
|
|
|
unique_together = ("item", "service", "url")
|
2010-09-13 15:19:38 +00:00
|
|
|
ordering = ('-height', )
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey(Item, related_name='poster_urls')
|
2010-09-10 14:09:41 +00:00
|
|
|
url = models.CharField(max_length=1024)
|
|
|
|
service = models.CharField(max_length=1024)
|
|
|
|
width = models.IntegerField(default=80)
|
|
|
|
height = models.IntegerField(default=128)
|
|
|
|
|
|
|
|
def __unicode__(self):
|
2010-09-23 16:01:48 +00:00
|
|
|
return u'%s %s %dx%d' % (unicode(self.item), self.service, self.width, self.height)
|