cablegates/pandora/item/models.py

987 lines
35 KiB
Python
Raw Normal View History

2009-06-08 16:08:59 +00:00
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
2010-08-07 14:31:20 +00:00
from __future__ import division, with_statement
2010-02-03 12:05:38 +00:00
from datetime import datetime
2009-06-08 16:08:59 +00:00
import os.path
2010-09-03 13:28:44 +00:00
import subprocess
from glob import glob
2011-01-28 08:48:38 +00:00
import uuid
import unicodedata
2009-06-08 16:08:59 +00:00
from django.db import models
2011-01-24 02:35:52 +00:00
from django.db.models import Sum
2009-12-31 15:04:32 +00:00
from django.core.files.base import ContentFile
from django.utils import simplejson as json
from django.conf import settings
2011-01-21 09:31:49 +00:00
from django.contrib.auth.models import User, Group
2010-07-07 22:46:41 +00:00
import ox
2011-01-01 11:44:42 +00:00
from ox.django import fields
from ox.normalize import canonicalTitle
import ox.web.imdb
2009-06-08 16:08:59 +00:00
2009-08-01 14:14:54 +00:00
import managers
2010-02-03 12:05:38 +00:00
import utils
import tasks
2010-09-03 13:28:44 +00:00
from archive import extract
2009-06-08 16:08:59 +00:00
2011-01-26 13:25:26 +00:00
from annotation.models import Annotation, Layer
2011-01-01 11:44:42 +00:00
from person.models import get_name_sort
from app.models import site_config
2011-02-22 16:09:13 +00:00
def get_item(info, user=None):
2010-01-16 20:42:11 +00:00
'''
info dict with:
imdbId, title, director, episode_title, season, series
'''
2011-01-16 13:28:57 +00:00
if settings.USE_IMDB:
if 'imdbId' in info and info['imdbId']:
try:
2011-01-16 13:28:57 +00:00
item = Item.objects.get(itemId=info['imdbId'])
2010-09-23 16:01:48 +00:00
except Item.DoesNotExist:
2011-01-16 13:28:57 +00:00
item = Item(itemId=info['imdbId'])
if 'title' in info and 'director' in info:
item.external_data = {
'title': info['title'],
'director': info['director'],
'year': info.get('year', '')
}
2011-02-22 16:09:13 +00:00
item.user = user
2011-01-16 13:28:57 +00:00
item.save()
tasks.update_external.delay(item.itemId)
else:
q = Item.objects.filter(find__key='title', find__value=info['title'])
if q.count() > 1:
print "FIXME: check more than title here!!?"
item = q[0]
else:
try:
2011-01-16 13:28:57 +00:00
item = Item.objects.get(itemId=info['oxdbId'])
except Item.DoesNotExist:
2011-01-16 13:28:57 +00:00
item = Item()
item.data = {
'title': info['title'],
'director': info['director'],
'year': info.get('year', '')
}
item.itemId = info['oxdbId']
for key in ('episode_title', 'series_title', 'season', 'episode'):
if key in info and info[key]:
item.data[key] = info[key]
try:
existing_item = Item.objects.get(oxdbId=item.oxdb_id())
item = existing_item
except Item.DoesNotExist:
item.save()
else:
2011-01-28 08:48:38 +00:00
qs = Item.objects.filter(find__key='title', find__value=info['title'])
if qs.count() == 1:
item = qs[0]
else:
item = Item()
item.data = {
'title': info['title']
}
2011-02-22 16:09:13 +00:00
item.user = user
2011-01-16 13:28:57 +00:00
item.save()
2010-09-23 16:01:48 +00:00
return item
2010-01-16 20:42:11 +00:00
2011-01-01 11:44:42 +00:00
2010-09-23 16:01:48 +00:00
class Item(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
2010-01-16 20:42:11 +00:00
published = models.DateTimeField(default=datetime.now, editable=False)
2009-06-08 16:08:59 +00:00
2011-02-22 16:09:13 +00:00
user = models.ForeignKey(User, null=True, related_name='items')
2011-02-23 11:51:32 +00:00
groups = models.ManyToManyField(Group, blank=True, related_name='items')
2011-01-21 09:31:49 +00:00
#only items that have data from files are available,
2011-01-01 11:44:42 +00:00
#this is indicated by setting available to True
2009-12-31 15:04:32 +00:00
available = models.BooleanField(default=False, db_index=True)
2011-01-21 09:31:49 +00:00
public = models.BooleanField(default=False, db_index=True)
2010-09-23 16:01:48 +00:00
itemId = models.CharField(max_length=128, unique=True, blank=True)
oxdbId = models.CharField(max_length=42, unique=True, blank=True)
2010-11-27 02:33:31 +00:00
external_data = fields.DictField(default={}, editable=False)
data = fields.DictField(default={}, editable=False)
json = fields.DictField(default={}, editable=False)
2011-01-22 10:14:30 +00:00
poster = models.ImageField(default=None, blank=True,
upload_to=lambda i, x: i.path("poster.jpg"))
2010-11-27 02:33:31 +00:00
poster_url = models.TextField(blank=True)
poster_height = models.IntegerField(default=0)
poster_width = models.IntegerField(default=0)
poster_frame = models.FloatField(default=-1)
2011-01-04 07:32:32 +00:00
2011-01-22 10:14:30 +00:00
icon = models.ImageField(default=None, blank=True,
upload_to=lambda i, x: i.path("icon.jpg"))
2011-01-04 07:32:32 +00:00
2010-11-27 02:33:31 +00:00
#stream related fields
stream_aspect = models.FloatField(default=4/3)
2010-09-23 16:01:48 +00:00
objects = managers.ItemManager()
2009-08-01 14:14:54 +00:00
def get(self, key, default=None):
if self.data and key in self.data:
return self.data[key]
if self.external_data and key in self.external_data:
return self.external_data[key]
return default
2011-01-24 13:44:38 +00:00
def access(self, user):
if self.public and self.available:
return True
elif user.is_authenticated() and \
(user.is_staff or self.user == user or \
self.groups.filter(id__in=user.groups.all()).count() > 0):
return True
2011-01-24 13:44:38 +00:00
return False
def editable(self, user):
2011-01-21 09:31:49 +00:00
if user.is_staff or \
self.user == user or \
self.groups.filter(id__in=user.groups.all()).count() > 0:
return True
return False
def edit(self, data):
#FIXME: how to map the keys to the right place to write them to?
2011-01-21 09:31:49 +00:00
if 'id' in data:
#FIXME: check if id is valid and exists and move/merge items accordingly
del data['id']
if 'id' in data:
groups = data.pop('groups')
self.groups.exclude(name__in=groups).delete()
for g in groups:
group, created = Group.objects.get_or_create(name=g)
self.groups.add(group)
2011-01-01 11:44:42 +00:00
for key in data:
if key != 'id':
setattr(self.data, key, data[key])
2011-01-21 09:31:49 +00:00
self.data.save()
self.save()
def reviews(self):
reviews = self.get('reviews', [])
_reviews = {}
for r in reviews:
for url in settings.REVIEW_WHITELIST:
if url in r[0]:
_reviews[settings.REVIEW_WHITELIST[url]] = r[0]
return _reviews
2011-01-16 13:28:57 +00:00
def update_external(self):
2010-09-23 16:01:48 +00:00
if len(self.itemId) == 7:
data = ox.web.imdb.Imdb(self.itemId)
#FIXME: all this should be in ox.web.imdb.Imdb
2011-01-04 06:50:52 +00:00
for key in ('directors', 'writers', 'editors', 'producers',
'cinematographers', 'languages', 'genres', 'keywords',
'episode_directors'):
if key in data:
data[key[:-1]] = data.pop(key)
if 'countries' in data:
data['country'] = data.pop('countries')
if 'release date' in data:
2011-01-04 06:23:59 +00:00
data['releasedate'] = data.pop('release date')
if isinstance(data['releasedate'], list):
data['releasedate'] = min(data['releasedate'])
if 'plot' in data:
data['summary'] = data.pop('plot')
2011-01-06 10:45:29 +00:00
if 'cast' in data:
if isinstance(data['cast'][0], basestring):
data['actor'] = [data['cast'][0]]
else:
data['actor'] = [c[0] for c in data['cast']]
self.external_data = data
self.save()
2009-06-08 16:08:59 +00:00
def __unicode__(self):
year = self.get('year')
if year:
2011-01-28 08:48:38 +00:00
return u'%s (%s)' % (self.get('title', 'Untitled'), self.get('year'))
return self.get('title', u'Untitled')
2010-09-14 14:10:37 +00:00
def get_absolute_url(self):
return '/%s' % self.itemId
2010-09-14 14:10:37 +00:00
2009-06-08 16:08:59 +00:00
def save(self, *args, **kwargs):
2011-01-28 08:48:38 +00:00
if not self.id:
if not self.itemId:
self.itemId = str(uuid.uuid1())
super(Item, self).save(*args, **kwargs)
if not settings.USE_IMDB:
self.itemId = ox.to32(self.id)
self.oxdbId = self.oxdb_id()
2010-07-05 12:07:59 +00:00
2010-09-06 20:31:12 +00:00
if self.poster:
self.poster_height = self.poster.height
self.poster_width = self.poster.width
else:
self.poster_height = 128
self.poster_width = 80
self.update_find()
self.update_sort()
self.update_facets()
self.json = self.get_json()
super(Item, self).save(*args, **kwargs)
2009-06-08 16:08:59 +00:00
2010-09-12 14:23:23 +00:00
def delete(self, *args, **kwargs):
self.delete_poster()
for f in glob("%s*"%self.timeline_prefix):
os.unlink(f)
for f in glob("%sstrip*"%self.timeline_prefix[:-8]):
os.unlink(f)
2010-09-23 16:01:48 +00:00
super(Item, self).delete(*args, **kwargs)
2010-09-12 14:23:23 +00:00
def merge_with(self, other):
2010-09-12 14:23:23 +00:00
'''
move all related tables to other and delete self
'''
for stream in self.streams.all():
2010-09-23 16:01:48 +00:00
stream.item = other
2010-09-12 14:23:23 +00:00
stream.save()
for l in self.lists.all():
2010-09-23 16:01:48 +00:00
l.items.remove(self)
if l.items.filter(id=other.id) == 0:
l.items.add(other)
2011-01-03 20:27:40 +00:00
#FIXME: should this really happen for annotations?
for a in self.annotations.all():
a.item = other
2010-09-12 14:23:23 +00:00
if hasattr(self, 'files'):
for f in self.files.all():
2010-09-23 16:01:48 +00:00
f.item = other
2010-09-12 14:23:23 +00:00
f.save()
self.delete()
other.save()
2010-09-10 15:12:22 +00:00
'''
JSON cache related functions
'''
#FIXME: this should not be used
2009-06-08 16:08:59 +00:00
_public_fields = {
2010-09-23 16:01:48 +00:00
'itemId': 'id',
2011-01-01 11:44:42 +00:00
'title': 'title',
'year': 'year',
2009-06-08 16:08:59 +00:00
2011-01-01 11:44:42 +00:00
'runtime': 'runtime',
2011-01-04 06:23:59 +00:00
'releasedate': 'releasedate',
2009-06-08 16:08:59 +00:00
2011-01-03 16:02:33 +00:00
'country': 'country',
'director': 'director',
'writer': 'writer',
'editor': 'editor',
'producer': 'producer',
2010-09-06 21:19:59 +00:00
'cinematographer': 'cinematographer',
2011-01-03 16:02:33 +00:00
'language': 'language',
2011-01-04 06:23:59 +00:00
'genre': 'genre',
2011-01-03 16:02:33 +00:00
'keyword': 'keyword',
2009-06-08 16:08:59 +00:00
'cast': 'cast',
'series_title': 'series_title',
'episode_title': 'episode_title',
'season': 'season',
'episode': 'episode',
'reviews': 'reviews',
2009-08-01 14:14:54 +00:00
'trivia': 'trivia',
'rating': 'rating',
'votes': 'votes',
2009-08-16 12:23:29 +00:00
'alternative_titles': 'alternative_titles',
2010-07-05 08:30:17 +00:00
'connections_json': 'connections',
2009-06-08 16:08:59 +00:00
}
2010-11-27 02:33:31 +00:00
2010-09-07 14:05:38 +00:00
def get_poster(self):
2010-09-06 20:45:11 +00:00
poster = {}
poster['width'] = self.poster_width
poster['height'] = self.poster_height
2010-09-23 16:01:48 +00:00
poster['url'] = '/%s/poster.jpg' % self.itemId
2010-09-07 14:05:38 +00:00
'''
2010-09-06 20:45:11 +00:00
if self.poster:
poster['url'] = self.poster.url
else:
poster['url'] = self.poster_url
2010-09-07 14:05:38 +00:00
'''
2010-09-06 20:45:11 +00:00
return poster
2010-09-10 15:12:22 +00:00
def get_posters(self):
posters = {}
for p in self.poster_urls.all():
if p.service not in posters:
posters[p.service] = []
posters[p.service].append({'url': p.url, 'width': p.width, 'height': p.height})
local_posters = self.local_posters().keys()
if local_posters:
posters['local'] = []
for p in local_posters:
2011-01-21 12:05:43 +00:00
#FIXME: media_url is no longer public
2010-09-10 15:12:22 +00:00
url = p.replace(settings.MEDIA_ROOT, settings.MEDIA_URL)
width = 640
height = 1024
posters['local'].append({'url': url, 'width': width, 'height': height})
return posters
def get_stream(self):
stream = {}
if self.streams.all().count():
s = self.streams.all()[0]
if s.video and s.info:
stream['duration'] = s.info['duration']
2011-01-01 11:44:42 +00:00
if 'video' in s.info and s.info['video']:
2010-09-10 15:12:22 +00:00
stream['aspectRatio'] = s.info['video'][0]['width'] / s.info['video'][0]['height']
2010-09-14 15:03:10 +00:00
if settings.XSENDFILE or settings.XACCELREDIRECT:
2010-09-23 16:01:48 +00:00
stream['baseUrl'] = '/%s' % self.itemId
2010-09-14 15:03:10 +00:00
else:
stream['baseUrl'] = os.path.dirname(s.video.url)
2010-09-10 15:12:22 +00:00
stream['profiles'] = list(set(map(lambda s: int(os.path.splitext(s['profile'])[0][:-1]), self.streams.all().values('profile'))))
2011-02-06 12:40:28 +00:00
stream['formats'] = list(set(map(lambda s: os.path.splitext(s['profile'])[1][1:], self.streams.all().values('profile'))))
2010-09-10 15:12:22 +00:00
return stream
2011-02-11 10:21:25 +00:00
def get_layers(self, user=None):
2010-09-18 14:44:35 +00:00
layers = {}
2011-01-24 13:44:38 +00:00
layers['subtitles'] = []
#FIXME: should subtitles be stored in Annotation?
2010-09-18 14:44:35 +00:00
qs = self.files.filter(is_subtitle=True, is_main=True, available=True)
if qs.count()>0:
layers['subtitles'] = qs[0].srt()
2011-01-24 13:44:38 +00:00
for l in Layer.objects.all():
ll = layers.setdefault(l.name, [])
2011-02-11 10:21:25 +00:00
qs = Annotation.objects.filter(layer=l, item=self)
if l.private:
if user.is_anonymous():
user = None
qs = qs.filter(user=user)
for a in qs.order_by('start'):
2011-01-24 13:44:38 +00:00
ll.append(a.json())
2010-09-18 14:44:35 +00:00
return layers
2009-12-31 15:04:32 +00:00
def get_json(self, fields=None):
i = {
'id': self.itemId
}
i.update(self.external_data)
i.update(self.data)
for k in site_config()['itemKeys']:
key = k['id']
if key not in i:
2011-01-05 13:06:09 +00:00
value = self.get(key)
#also get values from sort table, i.e. numberof values
if not value and self.sort and hasattr(self.sort, key):
value = getattr(self.sort, key)
2011-01-05 13:06:09 +00:00
if value:
i[key] = value
2011-01-05 13:06:09 +00:00
#format datetime values
for key in i:
if isinstance(i[key], datetime):
i[key] = i[key].strftime('%Y-%m-%dT%H:%M:%SZ')
2011-01-05 13:06:09 +00:00
i['poster'] = self.get_poster()
i['posters'] = self.get_posters()
return i
2009-06-08 16:08:59 +00:00
2009-08-16 12:23:29 +00:00
2010-11-28 16:03:23 +00:00
def oxdb_id(self):
2011-01-28 08:48:38 +00:00
if not settings.USE_IMDB:
return self.itemId
2011-02-21 10:25:13 +00:00
if not self.get('title', None) and not self.get('director', None):
return None
2011-01-03 16:02:33 +00:00
return utils.oxdb_id(self.get('title', ''), self.get('director', []), str(self.get('year', '')),
2010-11-28 16:03:23 +00:00
self.get('season', ''), self.get('episode', ''),
2011-01-04 06:50:52 +00:00
self.get('episode_title', ''), self.get('episode_director', []), self.get('episode_year', ''))
2010-09-10 15:12:22 +00:00
'''
Search related functions
'''
2011-01-01 11:44:42 +00:00
def update_find(self):
2011-01-01 11:44:42 +00:00
2010-11-06 16:14:00 +00:00
def save(key, value):
f, created = ItemFind.objects.get_or_create(item=self, key=key)
if value not in ('', ):
2011-01-16 13:28:57 +00:00
if isinstance(value, basestring):
value = value.strip()
f.value = value
2010-11-06 16:14:00 +00:00
f.save()
else:
f.delete()
2011-01-05 13:06:09 +00:00
#FIXME: use site_config
2011-01-28 08:48:38 +00:00
save('title', u'\n'.join([self.get('title', 'Untitled'),
self.get('original_title', '')]))
2011-01-01 11:44:42 +00:00
2010-07-15 15:55:10 +00:00
for key in self.facet_keys:
2011-01-03 17:47:20 +00:00
if key == 'character':
values = self.get('cast', '')
if values:
if isinstance(values[0], basestring):
values = [values[0], ]
else:
values = [i[1] for i in values]
else:
values = self.get(key, '')
if isinstance(values, list):
save(key, '\n'.join(values))
2010-07-16 10:13:16 +00:00
else:
2011-01-03 17:47:20 +00:00
save(key, values)
2011-01-05 13:06:09 +00:00
save('summary', self.get('summary', ''))
2010-11-06 16:14:00 +00:00
save('trivia', ' '.join(self.get('trivia', [])))
2010-07-13 22:20:14 +00:00
#FIXME:
qs = Annotation.objects.filter(layer__type='subtitle', item=self).order_by('start')
save('dialog', '\n'.join([l.value for l in qs]))
2009-08-16 12:23:29 +00:00
def update_sort(self):
try:
s = self.sort
2010-09-23 16:01:48 +00:00
except ItemSort.DoesNotExist:
s = ItemSort(item=self)
2009-08-16 12:23:29 +00:00
def sortNames(values):
2011-01-03 08:45:31 +00:00
sort_value = u''
if values:
2011-01-03 08:45:31 +00:00
sort_value = u'; '.join([get_name_sort(name) for name in values])
2009-08-16 12:23:29 +00:00
if not sort_value:
2011-01-03 08:45:31 +00:00
sort_value = u''
2009-08-16 12:23:29 +00:00
return sort_value
def set_value(s, name, value):
if not value:
value = None
setattr(s, name, value)
base_keys = (
'id',
'aspectratio',
'duration',
'hue',
'saturation',
'lightness',
'volume',
'clips',
'cuts',
'cutsperminute',
'words',
'wordsperminute',
'resolution',
'pixels',
'size',
'bitrate',
'numberoffiles',
'published',
'modified',
'popularity',
)
for key in filter(lambda k: 'columnWidth' in k, config['itemKeys']):
name = key['id']
source = name
sort_type = key.get('sort', key['type'])
if 'value' in key:
if 'layer' in key['value']:
continue
source = key['value']['key']
sort_type = key['value'].get('type', sort_type)
2011-01-05 13:06:09 +00:00
if name not in base_keys:
if sort_type == 'title':
2011-01-28 08:48:38 +00:00
value = utils.sort_title(canonicalTitle(self.get(source, u'Untitled')))
2011-01-03 19:45:56 +00:00
value = utils.sort_string(value)
set_value(s, name, value)
elif sort_type == 'person':
2011-01-05 13:06:09 +00:00
value = sortNames(self.get(source, []))
2011-01-03 19:45:56 +00:00
value = utils.sort_string(value)[:955]
set_value(s, name, value)
elif sort_type == 'string':
2011-01-05 13:06:09 +00:00
value = self.get(source, u'')
if isinstance(value, list):
2011-01-03 08:45:31 +00:00
value = u','.join(value)
2011-01-03 19:45:56 +00:00
value = utils.sort_string(value)[:955]
set_value(s, name, value)
elif sort_type in ('length', 'integer', 'float'):
2011-01-05 13:06:09 +00:00
#can be length of strings or length of arrays, i.e. keywords
value = self.get(source, None)
if isinstance(value, list):
value = len(value)
set_value(s, name, value)
elif sort_type == 'words':
value = self.get(source, None)
if isinstance(value, list):
value = '\n'.join(value)
if value:
value = len(value.split(' '))
set_value(s, name, value)
elif sort_type == 'year':
value = self.get(source, None)
set_value(s, name, value)
elif sort_type == 'date':
2011-01-05 13:06:09 +00:00
value = self.get(source, None)
if isinstance(value, basestring):
value = datetime.strptime(value, '%Y-%m-%d')
set_value(s, name, value)
#sort keys based on database, these will always be available
2011-01-03 20:30:50 +00:00
s.itemId = self.itemId.replace('0x', 'xx')
s.modified = self.modified
s.published = self.published
# sort values based on data from videos
s.words = 0 #FIXME: get words from all layers or something
s.wordsperminute = 0
s.clips = 0 #FIXME: get clips from all layers or something
s.popularity = 0 #FIXME: get popularity from somewhere
2010-12-22 15:14:36 +00:00
videos = self.main_videos()
if len(videos) > 0:
s.duration = sum([v.duration for v in videos])
s.resolution = videos[0].width * videos[0].height
s.aspectratio = int(1000 * utils.parse_decimal(v.display_aspect_ratio))
#FIXME: should be average over all files
if 'bitrate' in videos[0].info:
s.bitrate = videos[0].info['bitrate']
s.pixels = sum([v.pixels for v in videos])
s.numberoffiles = self.files.all().count()
s.size = sum([v.size for v in videos]) #FIXME: only size of movies?
s.volume = 0
2010-12-22 18:45:41 +00:00
else:
s.duration = None
s.resolution = None
s.aspectratio = None
s.bitrate = None
s.pixels = None
s.filename = None
s.files = None
s.size = None
s.volume = None
if 'color' in self.data:
s.hue, s.saturation, s.brightness = self.data['color']
else:
s.hue = None
s.saturation = None
s.brighness = None
s.cuts = len(self.data.get('cuts', []))
if s.duration:
s.cutsperminute = s.cuts / (s.duration/60)
2010-12-25 14:00:48 +00:00
else:
s.cutsperminute = None
2011-01-24 02:35:52 +00:00
s.popularity = self.accessed.aggregate(Sum('accessed'))['accessed__sum']
2009-08-16 12:23:29 +00:00
s.save()
2009-06-08 16:08:59 +00:00
2011-01-05 13:06:09 +00:00
def update_facets(self):
2011-01-01 11:44:42 +00:00
#FIXME: what to do with Unkown Director, Year, Country etc.
2010-07-15 15:55:10 +00:00
for key in self.facet_keys:
2011-01-03 17:47:20 +00:00
current_values = self.get(key, [])
if not isinstance(current_values, list):
current_values = [current_values]
2010-09-23 16:01:48 +00:00
saved_values = [i.value for i in Facet.objects.filter(item=self, key=key)]
removed_values = filter(lambda i: i not in current_values, saved_values)
if removed_values:
2010-09-23 16:01:48 +00:00
Facet.objects.filter(item=self, key=key, value__in=removed_values).delete()
for value in current_values:
if value not in saved_values:
value_sort = value
2010-07-15 15:55:10 +00:00
if key in self.person_keys:
value_sort = get_name_sort(value)
f = Facet(key=key, value=value, value_sort=value_sort)
2010-09-23 16:01:48 +00:00
f.item = self
f.save()
2011-01-01 11:44:42 +00:00
2010-12-04 01:26:49 +00:00
def path(self, name=''):
2010-12-05 17:51:40 +00:00
h = self.itemId
2010-12-07 19:05:59 +00:00
return os.path.join('items', h[:2], h[2:4], h[4:6], h[6:], name)
2010-12-04 01:26:49 +00:00
2010-09-10 15:12:22 +00:00
'''
Video related functions
'''
2011-01-01 11:44:42 +00:00
2010-09-10 15:12:22 +00:00
def frame(self, position, width=128):
2011-02-06 12:40:28 +00:00
stream = self.streams.filter(profile=settings.VIDEO_PROFILE)
2011-01-28 08:48:38 +00:00
if stream.count()>0:
stream = stream[0]
else:
return None
path = os.path.join(settings.MEDIA_ROOT, self.path(),
'frames', "%d"%width, "%s.jpg"%position)
2010-09-10 15:12:22 +00:00
if not os.path.exists(path):
extract.frame(stream.video.path, path, position, width)
return path
@property
def timeline_prefix(self):
2010-12-04 01:26:49 +00:00
return os.path.join(settings.MEDIA_ROOT, self.path(), 'timeline')
2010-09-10 15:12:22 +00:00
def main_videos(self):
#FIXME: needs to check if more than one user has main files and only
# take from "higher" user
videos = self.files.filter(is_main=True, is_video=True, available=True)
if videos.count()>0:
first = videos[0]
user = first.instances.all()[0].volume.user
#only take videos from same user and with same width/height
def check(v):
if v.instances.filter(volume__user=user).count()>0 and \
first.width == v.width and first.height == v.height:
return True
return False
2011-01-01 11:44:42 +00:00
videos = filter(check, videos)
2011-02-23 11:51:32 +00:00
else:
audio = self.files.filter(is_main=True, is_audio=True, available=True)
if audio.count()>0:
first = audio[0]
user = first.instances.all()[0].volume.user
#only take videos from same user and with same width/height
def check(v):
if v.instances.filter(volume__user=user).count()>0:
return True
return False
videos = filter(check, audio)
return videos
def update_streams(self):
2010-09-10 15:12:22 +00:00
files = {}
for f in self.main_videos():
2010-09-10 15:12:22 +00:00
files[utils.sort_title(f.name)] = f.video.path
2011-01-01 11:44:42 +00:00
2010-09-10 15:12:22 +00:00
#FIXME: how to detect if something changed?
if files:
stream, created = Stream.objects.get_or_create(item=self,
2011-02-06 12:40:28 +00:00
profile=settings.VIDEO_PROFILE)
2010-12-07 18:45:26 +00:00
stream.video.name = stream.path()
2010-09-10 15:12:22 +00:00
cmd = []
if os.path.exists(stream.video.path):
os.unlink(stream.video.path)
elif not os.path.exists(os.path.dirname(stream.video.path)):
os.makedirs(os.path.dirname(stream.video.path))
if len(files.values()) > 1:
for f in sorted(files):
cmd.append('+')
cmd.append(files[f])
cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:]
#print cmd
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
p.wait()
else:
os.symlink(files.values()[0], stream.video.path)
2010-09-10 15:12:22 +00:00
stream.save()
2011-02-23 11:51:32 +00:00
extract.timeline(stream.video.path, self.timeline_prefix)
if 'video' in stream.info and stream.info['video']:
v = stream.info['video'][0]
self.stream_aspect = v['width']/v['height']
self.data['cuts'] = extract.cuts(self.timeline_prefix)
self.data['color'] = extract.average_color(self.timeline_prefix)
#extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8])
2010-09-10 15:12:22 +00:00
stream.extract_derivatives()
2010-12-01 00:00:33 +00:00
self.make_local_posters()
self.make_poster()
2011-01-18 09:54:14 +00:00
self.make_icon()
2010-09-10 15:12:22 +00:00
self.available = True
self.save()
'''
Poster related functions
'''
2011-01-01 11:44:42 +00:00
2010-09-10 15:12:22 +00:00
def update_poster_urls(self):
_current = {}
for s in settings.POSTER_SERVICES:
2011-02-23 09:47:46 +00:00
url = '%s?id=%s'%(s, self.itemId)
try:
data = json.loads(ox.net.readUrlUnicode(url))
except:
continue
for service in data:
if service not in _current:
_current[service] = []
for poster in data[service]:
_current[service].append(poster)
#FIXME: remove urls that are no longer listed
for service in _current:
for poster in _current[service]:
2010-09-23 16:01:48 +00:00
p, created = PosterUrl.objects.get_or_create(item=self, url=poster['url'], service=service)
if created:
p.width = poster['width']
p.height = poster['height']
p.save()
2010-09-10 15:12:22 +00:00
def delete_poster(self):
2010-09-13 15:25:37 +00:00
if self.poster:
path = self.poster.path
self.poster.delete()
for f in glob(path.replace('.jpg', '*.jpg')):
os.unlink(f)
def prefered_poster_url(self):
if self.poster_url:
return self.poster_url
self.update_poster_urls()
for service in settings.POSTER_PRECEDENCE:
for u in self.poster_urls.filter(service=service).order_by('-height'):
return u.url
return None
2010-12-01 00:00:33 +00:00
def make_poster(self, force=False):
2010-09-10 15:12:22 +00:00
if not self.poster or force:
url = self.prefered_poster_url()
if url:
data = ox.net.readUrl(url)
2010-09-10 15:12:22 +00:00
if force:
self.delete_poster()
self.poster.save('poster.jpg', ContentFile(data))
self.save()
else:
2010-09-10 15:12:22 +00:00
if force:
self.delete_poster()
local_posters = self.make_local_posters()
if local_posters:
with open(local_posters[0]) as f:
self.poster.save('poster.jpg', ContentFile(f.read()))
def local_posters(self):
2010-09-03 13:28:44 +00:00
part = 1
posters = {}
timeline = self.path('timeline.64.png')
timeline = os.path.abspath(os.path.join(settings.MEDIA_ROOT, timeline))
if not os.path.exists(timeline):
return posters
2011-01-21 10:52:06 +00:00
if self.poster_frame >= 0:
2011-01-21 10:49:24 +00:00
frame = self.get_poster_frame_path()
path = self.path('poster.pandora.%s.%s.jpg'%(part, self.poster_frame))
path = os.path.abspath(os.path.join(settings.MEDIA_ROOT, path))
posters[path] = frame
else:
for f in self.main_videos():
for frame in f.frames.all():
path = self.path('poster.pandora.%s.%s.jpg'%(part, frame.position))
path = os.path.abspath(os.path.join(settings.MEDIA_ROOT, path))
posters[path] = frame.frame.path
part += 1
return posters
def make_local_posters(self):
posters = self.local_posters()
timeline = self.path('timeline.64.png')
timeline = os.path.abspath(os.path.join(settings.MEDIA_ROOT, timeline))
if os.path.exists(timeline):
for poster in posters:
frame = posters[poster]
cmd = [settings.ITEM_POSTER,
'-t', self.get('title'),
2011-01-03 16:02:33 +00:00
'-d', ', '.join(self.get('director', ['Unknown Director'])),
'-y', str(self.get('year', '')),
'-f', frame,
'-l', timeline,
'-p', poster
]
2011-01-28 08:48:38 +00:00
if settings.USE_IMDB:
if len(self.itemId) == 7:
cmd += ['-i', self.itemId]
cmd += ['-o', self.oxdbId]
else:
cmd += ['-i', self.itemId]
p = subprocess.Popen(cmd)
p.wait()
return posters.keys()
2010-09-03 13:28:44 +00:00
2011-01-21 10:49:24 +00:00
def get_poster_frame_path(self):
if self.poster_frame >= 0:
2011-02-06 12:40:28 +00:00
size = int(settings.VIDEO_PROFILE.split('.')[0][:-1])
2011-01-21 10:49:24 +00:00
return self.frame(self.poster_frame, size)
2011-01-04 07:32:32 +00:00
frames = []
for f in self.main_videos():
for ff in f.frames.all():
frames.append(ff.frame.path)
2011-02-23 11:51:32 +00:00
if frames:
return frames[int(len(frames)/2)]
2011-01-21 10:49:24 +00:00
def make_icon(self):
frame = self.get_poster_frame_path()
if frame:
2011-01-04 07:32:32 +00:00
icon = self.path('icon.jpg')
self.icon.name = icon
timeline = self.path('timeline.64.png')
timeline = os.path.abspath(os.path.join(settings.MEDIA_ROOT, timeline))
if os.path.exists(timeline):
cmd = [settings.ITEM_ICON,
'-f', frame,
'-l', timeline,
'-i', self.icon.path
]
p = subprocess.Popen(cmd)
p.wait()
self.save()
return icon
return None
config = site_config()
Item.facet_keys = []
for key in config['itemKeys']:
if 'autocomplete' in key and not 'autocompleteSortKey' in key:
Item.facet_keys.append(key['id'])
Item.person_keys = []
for key in config['itemKeys']:
if 'sort' in key and key['sort'] == 'person':
Item.person_keys.append(key['id'])
2011-01-01 11:44:42 +00:00
2010-09-23 16:01:48 +00:00
class ItemFind(models.Model):
2009-08-16 12:23:29 +00:00
"""
2010-11-06 16:14:00 +00:00
used to find items,
item.update_find populates this table
2010-11-06 16:14:00 +00:00
its used in manager.ItemManager
2009-08-16 12:23:29 +00:00
"""
2011-01-01 11:44:42 +00:00
2010-11-06 16:14:00 +00:00
class Meta:
unique_together = ("item", "key")
item = models.ForeignKey('Item', related_name='find', db_index=True)
key = models.CharField(max_length=200, db_index=True)
value = models.TextField(blank=True)
2009-06-08 16:08:59 +00:00
2011-01-15 14:22:29 +00:00
def __unicode__(self):
return u"%s=%s" % (self.key, self.value)
2011-01-05 13:06:09 +00:00
'''
ItemSort
table constructed based on info in site_config['itemKeys']
2011-01-05 13:06:09 +00:00
'''
attrs = {
'__module__': 'item.models',
'item': models.OneToOneField('Item', related_name='sort', primary_key=True),
}
for key in filter(lambda k: 'columnWidth' in k, config['itemKeys']):
name = key['id']
2011-01-03 20:30:50 +00:00
name = {'id': 'itemId'}.get(name, name)
sort_type = key.get('sort', key['type'])
if isinstance(sort_type, list):
sort_type = sort_type[0]
model = {
2011-01-24 13:44:38 +00:00
'char': (models.CharField, dict(null=True, max_length=1000, db_index=True)),
'year': (models.CharField, dict(null=True, max_length=4, db_index=True)),
'integer': (models.BigIntegerField, dict(null=True, blank=True, db_index=True)),
'float': (models.FloatField, dict(null=True, blank=True, db_index=True)),
'date': (models.DateTimeField, dict(null=True, blank=True, db_index=True))
}[{
'string': 'char',
'title': 'char',
'person': 'char',
'year': 'year',
'words': 'integer',
'length': 'integer',
'date': 'date',
'hue': 'float',
}.get(sort_type, sort_type)]
attrs[name] = model[0](**model[1])
2011-01-01 11:44:42 +00:00
ItemSort = type('ItemSort', (models.Model,), attrs)
ItemSort.fields = [f.name for f in ItemSort._meta.fields]
2011-01-24 13:44:38 +00:00
class Access(models.Model):
class Meta:
unique_together = ("item", "user")
access = models.DateTimeField(auto_now=True)
item = models.ForeignKey(Item, related_name='accessed')
user = models.ForeignKey(User, null=True, related_name='accessed_items')
accessed = models.IntegerField(default=0)
def save(self, *args, **kwargs):
if not self.accessed:
self.accessed = 0
self.accessed += 1
super(Access, self).save(*args, **kwargs)
2011-01-24 13:44:38 +00:00
def __unicode__(self):
if self.user:
return u"%s/%s/%s" % (self.user, self.item, self.access)
return u"%s/%s" % (self.item, self.access)
class Facet(models.Model):
2011-01-05 13:06:09 +00:00
'''
used for keys that can have multiple values like people, languages etc.
does not perform to well if total number of items goes above 10k
this happens for keywords in 0xdb right now
'''
class Meta:
unique_together = ("item", "key", "value")
2010-09-23 16:01:48 +00:00
item = models.ForeignKey('Item', related_name='facets')
key = models.CharField(max_length=200, db_index=True)
value = models.CharField(max_length=200, db_index=True)
value_sort = models.CharField(max_length=200, db_index=True)
2009-06-08 16:08:59 +00:00
def save(self, *args, **kwargs):
if not self.value_sort:
2011-01-05 13:06:09 +00:00
self.value_sort = utils.sort_string(self.value)
super(Facet, self).save(*args, **kwargs)
2009-08-16 12:23:29 +00:00
2011-01-01 11:44:42 +00:00
2010-09-03 13:28:44 +00:00
class Stream(models.Model):
2011-01-01 11:44:42 +00:00
2010-09-03 13:28:44 +00:00
class Meta:
2010-09-23 16:01:48 +00:00
unique_together = ("item", "profile")
2010-09-03 13:28:44 +00:00
2010-09-23 16:01:48 +00:00
item = models.ForeignKey(Item, related_name='streams')
2010-09-03 13:28:44 +00:00
profile = models.CharField(max_length=255, default='96p.webm')
2010-12-04 01:26:49 +00:00
video = models.FileField(default=None, blank=True, upload_to=lambda f, x: f.path())
2010-09-03 13:28:44 +00:00
source = models.ForeignKey('Stream', related_name='derivatives', default=None, null=True)
available = models.BooleanField(default=False)
info = fields.DictField(default={})
def __unicode__(self):
return u"%s/%s" % (self.item, self.profile)
2010-09-03 13:28:44 +00:00
2010-12-04 01:26:49 +00:00
def path(self):
2010-12-05 17:51:40 +00:00
return self.item.path(self.profile)
2011-01-01 11:44:42 +00:00
2010-09-03 13:28:44 +00:00
def extract_derivatives(self):
2011-02-06 12:40:28 +00:00
for profile in settings.VIDEO_DERIVATIVES:
2010-09-23 16:01:48 +00:00
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
2010-09-15 13:03:00 +00:00
if created:
derivative.source = self
2010-09-03 13:28:44 +00:00
derivative.video.name = self.video.name.replace(self.profile, profile)
derivative.encode()
2010-09-15 13:03:00 +00:00
derivative.save()
2010-09-03 13:28:44 +00:00
return True
def encode(self):
if self.source:
video = self.source.video.path
target = self.video.path
profile = self.profile
info = ox.avinfo(video)
if extract.stream(video, target, profile, info):
self.available=True
self.save()
def save(self, *args, **kwargs):
if self.video and not self.info:
self.info = ox.avinfo(self.video.path)
super(Stream, self).save(*args, **kwargs)
2011-01-01 11:44:42 +00:00
class PosterUrl(models.Model):
2011-01-01 11:44:42 +00:00
class Meta:
2010-09-23 16:01:48 +00:00
unique_together = ("item", "service", "url")
2010-09-13 15:19:38 +00:00
ordering = ('-height', )
2010-09-23 16:01:48 +00:00
item = models.ForeignKey(Item, related_name='poster_urls')
url = models.CharField(max_length=1024)
service = models.CharField(max_length=1024)
width = models.IntegerField(default=80)
height = models.IntegerField(default=128)
def __unicode__(self):
2010-09-23 16:01:48 +00:00
return u'%s %s %dx%d' % (unicode(self.item), self.service, self.width, self.height)