2009-06-08 16:08:59 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2010-08-07 14:31:20 +00:00
|
|
|
from __future__ import division, with_statement
|
|
|
|
|
2010-02-03 12:05:38 +00:00
|
|
|
from datetime import datetime
|
2009-06-08 16:08:59 +00:00
|
|
|
import os.path
|
2011-08-05 15:50:18 +00:00
|
|
|
import re
|
2010-09-03 13:28:44 +00:00
|
|
|
import subprocess
|
2010-09-10 14:09:41 +00:00
|
|
|
from glob import glob
|
2011-04-18 18:50:31 +00:00
|
|
|
import shutil
|
2011-01-28 08:48:38 +00:00
|
|
|
import uuid
|
|
|
|
import unicodedata
|
2011-07-03 16:21:27 +00:00
|
|
|
from urllib import quote
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
from django.db import models
|
2011-08-20 10:06:18 +00:00
|
|
|
from django.db.models import Count, Q, Sum
|
2009-12-31 15:04:32 +00:00
|
|
|
from django.core.files.base import ContentFile
|
2010-01-22 23:09:07 +00:00
|
|
|
from django.utils import simplejson as json
|
2010-02-16 12:41:57 +00:00
|
|
|
from django.conf import settings
|
2011-01-21 09:31:49 +00:00
|
|
|
from django.contrib.auth.models import User, Group
|
2011-04-18 18:50:31 +00:00
|
|
|
from django.db.models.signals import pre_delete
|
2011-07-03 16:21:27 +00:00
|
|
|
from django.contrib.sites.models import Site
|
2010-01-22 23:09:07 +00:00
|
|
|
|
2010-07-07 22:46:41 +00:00
|
|
|
import ox
|
2011-01-01 11:44:42 +00:00
|
|
|
from ox.django import fields
|
|
|
|
from ox.normalize import canonicalTitle
|
2010-12-25 13:45:19 +00:00
|
|
|
import ox.web.imdb
|
2011-08-16 15:06:40 +00:00
|
|
|
import ox.image
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2009-08-01 14:14:54 +00:00
|
|
|
import managers
|
2010-02-03 12:05:38 +00:00
|
|
|
import utils
|
2010-11-27 12:12:53 +00:00
|
|
|
import tasks
|
2011-08-20 17:53:26 +00:00
|
|
|
from .timelines import join_timelines
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-08-20 17:53:26 +00:00
|
|
|
from archive import extract
|
2011-01-26 13:25:26 +00:00
|
|
|
from annotation.models import Annotation, Layer
|
2011-08-23 17:39:34 +00:00
|
|
|
import archive.models
|
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
from person.models import get_name_sort
|
2010-11-14 21:49:16 +00:00
|
|
|
|
|
|
|
|
2011-08-20 09:04:19 +00:00
|
|
|
def get_item(info, user=None, async=False):
|
2010-01-16 20:42:11 +00:00
|
|
|
'''
|
|
|
|
info dict with:
|
|
|
|
imdbId, title, director, episode_title, season, series
|
|
|
|
'''
|
2011-01-16 13:28:57 +00:00
|
|
|
if settings.USE_IMDB:
|
|
|
|
if 'imdbId' in info and info['imdbId']:
|
2010-07-12 14:56:14 +00:00
|
|
|
try:
|
2011-01-16 13:28:57 +00:00
|
|
|
item = Item.objects.get(itemId=info['imdbId'])
|
2010-09-23 16:01:48 +00:00
|
|
|
except Item.DoesNotExist:
|
2011-01-16 13:28:57 +00:00
|
|
|
item = Item(itemId=info['imdbId'])
|
|
|
|
if 'title' in info and 'director' in info:
|
|
|
|
item.external_data = {
|
|
|
|
'title': info['title'],
|
|
|
|
'director': info['director'],
|
|
|
|
'year': info.get('year', '')
|
|
|
|
}
|
2011-02-22 16:09:13 +00:00
|
|
|
item.user = user
|
2011-09-28 12:47:13 +00:00
|
|
|
item.oxdbId = item.itemId
|
2011-01-16 13:28:57 +00:00
|
|
|
item.save()
|
2011-08-20 09:04:19 +00:00
|
|
|
if async:
|
|
|
|
tasks.update_external.delay(item.itemId)
|
|
|
|
else:
|
|
|
|
item.update_external()
|
2011-01-16 13:28:57 +00:00
|
|
|
else:
|
2011-07-05 14:28:22 +00:00
|
|
|
q = Item.objects.all()
|
|
|
|
for key in ('title', 'director', 'year'):
|
|
|
|
if key in info and info[key]:
|
2011-08-17 18:37:24 +00:00
|
|
|
if isinstance(info[key], list):
|
|
|
|
q = q.filter(find__key=key, find__value='\n'.join(info[key]))
|
|
|
|
else:
|
|
|
|
q = q.filter(find__key=key, find__value=info[key])
|
2011-07-05 14:28:22 +00:00
|
|
|
if q.count() >= 1:
|
2011-01-16 13:28:57 +00:00
|
|
|
item = q[0]
|
2011-07-05 14:28:22 +00:00
|
|
|
elif not 'oxdbId' in info:
|
|
|
|
item = Item()
|
|
|
|
item.data = {
|
|
|
|
'title': info['title'],
|
|
|
|
'director': info['director'],
|
|
|
|
'year': info.get('year', '')
|
|
|
|
}
|
|
|
|
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
|
|
|
if key in info and info[key]:
|
|
|
|
item.data[key] = info[key]
|
|
|
|
item.oxdbId = item.oxdb_id()
|
|
|
|
item.save()
|
2011-01-16 13:28:57 +00:00
|
|
|
else:
|
2010-12-08 01:16:13 +00:00
|
|
|
try:
|
2011-01-16 13:28:57 +00:00
|
|
|
item = Item.objects.get(itemId=info['oxdbId'])
|
2010-12-08 01:16:13 +00:00
|
|
|
except Item.DoesNotExist:
|
2011-01-16 13:28:57 +00:00
|
|
|
item = Item()
|
|
|
|
item.data = {
|
|
|
|
'title': info['title'],
|
|
|
|
'director': info['director'],
|
|
|
|
'year': info.get('year', '')
|
|
|
|
}
|
|
|
|
item.itemId = info['oxdbId']
|
|
|
|
|
|
|
|
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
|
|
|
if key in info and info[key]:
|
|
|
|
item.data[key] = info[key]
|
|
|
|
try:
|
|
|
|
existing_item = Item.objects.get(oxdbId=item.oxdb_id())
|
|
|
|
item = existing_item
|
|
|
|
except Item.DoesNotExist:
|
|
|
|
item.save()
|
|
|
|
else:
|
2011-01-28 08:48:38 +00:00
|
|
|
qs = Item.objects.filter(find__key='title', find__value=info['title'])
|
|
|
|
if qs.count() == 1:
|
|
|
|
item = qs[0]
|
|
|
|
else:
|
|
|
|
item = Item()
|
|
|
|
item.data = {
|
|
|
|
'title': info['title']
|
|
|
|
}
|
2011-02-22 16:09:13 +00:00
|
|
|
item.user = user
|
2011-01-16 13:28:57 +00:00
|
|
|
item.save()
|
2010-09-23 16:01:48 +00:00
|
|
|
return item
|
2010-01-16 20:42:11 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
class Item(models.Model):
|
2009-10-04 22:00:08 +00:00
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
2010-01-16 20:42:11 +00:00
|
|
|
published = models.DateTimeField(default=datetime.now, editable=False)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-02-22 16:09:13 +00:00
|
|
|
user = models.ForeignKey(User, null=True, related_name='items')
|
2011-02-23 11:51:32 +00:00
|
|
|
groups = models.ManyToManyField(Group, blank=True, related_name='items')
|
2011-01-21 09:31:49 +00:00
|
|
|
|
2011-06-27 13:39:35 +00:00
|
|
|
#while metadata is updated, files are set to rendered=False
|
|
|
|
rendered = models.BooleanField(default=False, db_index=True)
|
2011-09-16 17:17:49 +00:00
|
|
|
#should be set based on user
|
|
|
|
level = models.IntegerField(default=4, db_index=True)
|
2011-01-21 09:31:49 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
itemId = models.CharField(max_length=128, unique=True, blank=True)
|
2011-04-05 10:49:58 +00:00
|
|
|
oxdbId = models.CharField(max_length=42, unique=True, blank=True, null=True)
|
2010-11-27 02:33:31 +00:00
|
|
|
external_data = fields.DictField(default={}, editable=False)
|
|
|
|
data = fields.DictField(default={}, editable=False)
|
|
|
|
json = fields.DictField(default={}, editable=False)
|
2011-01-22 10:14:30 +00:00
|
|
|
poster = models.ImageField(default=None, blank=True,
|
|
|
|
upload_to=lambda i, x: i.path("poster.jpg"))
|
2011-07-30 12:52:49 +00:00
|
|
|
poster_source = models.TextField(blank=True)
|
2010-11-27 02:33:31 +00:00
|
|
|
poster_height = models.IntegerField(default=0)
|
|
|
|
poster_width = models.IntegerField(default=0)
|
|
|
|
poster_frame = models.FloatField(default=-1)
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-01-22 10:14:30 +00:00
|
|
|
icon = models.ImageField(default=None, blank=True,
|
|
|
|
upload_to=lambda i, x: i.path("icon.jpg"))
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-07-03 16:21:27 +00:00
|
|
|
torrent = models.FileField(default=None, blank=True,
|
|
|
|
upload_to=lambda i, x: i.path('torrent.torrent'))
|
2011-07-03 18:11:35 +00:00
|
|
|
stream_info = fields.DictField(default={}, editable=False)
|
2011-07-03 16:21:27 +00:00
|
|
|
|
2010-11-27 02:33:31 +00:00
|
|
|
#stream related fields
|
|
|
|
stream_aspect = models.FloatField(default=4/3)
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
objects = managers.ItemManager()
|
2009-08-01 14:14:54 +00:00
|
|
|
|
2009-10-04 22:00:08 +00:00
|
|
|
def get(self, key, default=None):
|
2010-11-14 21:49:16 +00:00
|
|
|
if self.data and key in self.data:
|
|
|
|
return self.data[key]
|
|
|
|
if self.external_data and key in self.external_data:
|
|
|
|
return self.external_data[key]
|
2009-10-04 22:00:08 +00:00
|
|
|
return default
|
|
|
|
|
2011-01-24 13:44:38 +00:00
|
|
|
def access(self, user):
|
2011-09-16 17:17:49 +00:00
|
|
|
if user.is_anonymous():
|
|
|
|
level = 'guest'
|
|
|
|
else:
|
|
|
|
level = user.get_profile().get_level()
|
|
|
|
allowed_level = settings.CONFIG['capabilities']['canSeeItem'][level]
|
2011-09-28 12:47:13 +00:00
|
|
|
if self.level <= allowed_level:
|
2011-01-24 13:44:38 +00:00
|
|
|
return True
|
2011-01-25 14:39:03 +00:00
|
|
|
elif user.is_authenticated() and \
|
2011-09-06 12:06:59 +00:00
|
|
|
(self.user == user or \
|
2011-01-25 14:39:03 +00:00
|
|
|
self.groups.filter(id__in=user.groups.all()).count() > 0):
|
|
|
|
return True
|
2011-01-24 13:44:38 +00:00
|
|
|
return False
|
|
|
|
|
2010-01-30 06:56:10 +00:00
|
|
|
def editable(self, user):
|
2011-06-06 18:38:16 +00:00
|
|
|
if user.is_anonymous():
|
|
|
|
return False
|
2011-01-21 09:31:49 +00:00
|
|
|
if user.is_staff or \
|
|
|
|
self.user == user or \
|
|
|
|
self.groups.filter(id__in=user.groups.all()).count() > 0:
|
|
|
|
return True
|
2010-01-27 06:43:17 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
def edit(self, data):
|
|
|
|
#FIXME: how to map the keys to the right place to write them to?
|
2011-01-21 09:31:49 +00:00
|
|
|
if 'id' in data:
|
|
|
|
#FIXME: check if id is valid and exists and move/merge items accordingly
|
|
|
|
del data['id']
|
2011-02-25 12:12:56 +00:00
|
|
|
if 'groups' in data:
|
2011-01-21 09:31:49 +00:00
|
|
|
groups = data.pop('groups')
|
|
|
|
self.groups.exclude(name__in=groups).delete()
|
|
|
|
for g in groups:
|
|
|
|
group, created = Group.objects.get_or_create(name=g)
|
|
|
|
self.groups.add(group)
|
2011-01-01 11:44:42 +00:00
|
|
|
for key in data:
|
|
|
|
if key != 'id':
|
2011-02-25 12:12:56 +00:00
|
|
|
self.data[key] = data[key]
|
2010-01-27 06:43:17 +00:00
|
|
|
self.save()
|
|
|
|
|
2009-10-04 22:00:08 +00:00
|
|
|
def reviews(self):
|
2010-07-12 14:56:14 +00:00
|
|
|
reviews = self.get('reviews', [])
|
2011-08-05 15:50:18 +00:00
|
|
|
_reviews = []
|
2010-07-12 14:56:14 +00:00
|
|
|
for r in reviews:
|
2010-12-22 07:45:37 +00:00
|
|
|
for url in settings.REVIEW_WHITELIST:
|
|
|
|
if url in r[0]:
|
2011-08-05 15:50:18 +00:00
|
|
|
_reviews.append({
|
|
|
|
'source': settings.REVIEW_WHITELIST[url],
|
|
|
|
'url': r[0]
|
|
|
|
})
|
2010-07-12 14:56:14 +00:00
|
|
|
return _reviews
|
|
|
|
|
2011-01-16 13:28:57 +00:00
|
|
|
def update_external(self):
|
2010-09-23 16:01:48 +00:00
|
|
|
if len(self.itemId) == 7:
|
2011-01-03 14:14:54 +00:00
|
|
|
data = ox.web.imdb.Imdb(self.itemId)
|
|
|
|
#FIXME: all this should be in ox.web.imdb.Imdb
|
2011-01-04 06:50:52 +00:00
|
|
|
for key in ('directors', 'writers', 'editors', 'producers',
|
|
|
|
'cinematographers', 'languages', 'genres', 'keywords',
|
|
|
|
'episode_directors'):
|
2011-01-03 14:14:54 +00:00
|
|
|
if key in data:
|
2011-04-06 12:24:40 +00:00
|
|
|
data[key[:-1]] = data.pop(key)
|
2011-01-03 14:14:54 +00:00
|
|
|
if 'countries' in data:
|
|
|
|
data['country'] = data.pop('countries')
|
|
|
|
if 'release date' in data:
|
2011-01-04 06:23:59 +00:00
|
|
|
data['releasedate'] = data.pop('release date')
|
|
|
|
if isinstance(data['releasedate'], list):
|
|
|
|
data['releasedate'] = min(data['releasedate'])
|
2011-01-03 14:14:54 +00:00
|
|
|
if 'plot' in data:
|
|
|
|
data['summary'] = data.pop('plot')
|
2011-01-06 10:45:29 +00:00
|
|
|
if 'cast' in data:
|
|
|
|
if isinstance(data['cast'][0], basestring):
|
2011-08-06 07:17:35 +00:00
|
|
|
data['cast'] = [data['cast']]
|
|
|
|
data['actor'] = [c[0] for c in data['cast']]
|
2011-08-05 15:50:18 +00:00
|
|
|
data['cast'] = map(lambda x: {'actor': x[0], 'character': x[1]}, data['cast'])
|
|
|
|
if 'trivia' in data:
|
|
|
|
def fix_links(t):
|
|
|
|
def fix_names(m):
|
|
|
|
return '<a href="/?find=name:%s">%s</a>' % (
|
|
|
|
quote(m.group(2).encode('utf-8')), m.group(2)
|
|
|
|
)
|
|
|
|
t = re.sub('<a href="(/name/.*?/)">(.*?)</a>', fix_names, t)
|
|
|
|
def fix_titles(m):
|
|
|
|
return '<a href="/?find=title:%s">%s</a>' % (
|
|
|
|
quote(m.group(2).encode('utf-8')), m.group(2)
|
|
|
|
)
|
|
|
|
t = re.sub('<a href="(/title/.*?/)">(.*?)</a>', fix_titles, t)
|
|
|
|
return t
|
|
|
|
data['trivia'] = [fix_links(t) for t in data['trivia']]
|
2011-08-19 12:20:30 +00:00
|
|
|
if 'aspectratio' in data:
|
|
|
|
data['aspectRatio'] = data.pop('aspectratio')
|
2011-08-05 15:50:18 +00:00
|
|
|
#filter reviews
|
2011-01-03 14:14:54 +00:00
|
|
|
self.external_data = data
|
2010-07-12 14:56:14 +00:00
|
|
|
self.save()
|
|
|
|
|
2011-09-30 17:37:41 +00:00
|
|
|
def expand_connections(self):
|
|
|
|
c = self.get('connections')
|
|
|
|
connections = {}
|
|
|
|
if c:
|
|
|
|
for t in c:
|
|
|
|
connections[t] = [{'item': l.itemId, 'title': l.get('title')}
|
|
|
|
for l in Item.objects.filter(itemId__in=c[t])]
|
|
|
|
connections[t].sort(key=lambda a: c[t].index(a['item']))
|
|
|
|
if not connections[t]:
|
|
|
|
del connections[t]
|
|
|
|
return connections
|
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
def __unicode__(self):
|
2010-09-10 14:09:41 +00:00
|
|
|
year = self.get('year')
|
|
|
|
if year:
|
2011-01-28 08:48:38 +00:00
|
|
|
return u'%s (%s)' % (self.get('title', 'Untitled'), self.get('year'))
|
|
|
|
return self.get('title', u'Untitled')
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2010-09-14 14:10:37 +00:00
|
|
|
def get_absolute_url(self):
|
2010-11-30 23:33:42 +00:00
|
|
|
return '/%s' % self.itemId
|
2010-09-14 14:10:37 +00:00
|
|
|
|
2009-06-08 16:08:59 +00:00
|
|
|
def save(self, *args, **kwargs):
|
2011-01-28 08:48:38 +00:00
|
|
|
if not self.id:
|
|
|
|
if not self.itemId:
|
|
|
|
self.itemId = str(uuid.uuid1())
|
|
|
|
super(Item, self).save(*args, **kwargs)
|
|
|
|
if not settings.USE_IMDB:
|
2011-07-05 14:28:22 +00:00
|
|
|
self.itemId = ox.to32(self.id)
|
2011-01-28 08:48:38 +00:00
|
|
|
|
2011-09-28 12:47:13 +00:00
|
|
|
oxdbId = self.oxdb_id()
|
|
|
|
if oxdbId:
|
|
|
|
self.oxdbId = oxdbId
|
2011-07-05 14:28:22 +00:00
|
|
|
|
|
|
|
#id changed, what about existing item with new id?
|
|
|
|
if settings.USE_IMDB and len(self.itemId) != 7 and self.oxdbId != self.itemId:
|
|
|
|
self.itemId = self.oxdbId
|
|
|
|
#FIXME: move files to new id here
|
2010-07-05 12:07:59 +00:00
|
|
|
|
2010-09-06 20:31:12 +00:00
|
|
|
if self.poster:
|
|
|
|
self.poster_height = self.poster.height
|
|
|
|
self.poster_width = self.poster.width
|
2010-09-10 10:41:08 +00:00
|
|
|
else:
|
|
|
|
self.poster_height = 128
|
|
|
|
self.poster_width = 80
|
2010-11-27 12:12:53 +00:00
|
|
|
self.update_find()
|
|
|
|
self.update_sort()
|
|
|
|
self.update_facets()
|
2011-02-25 11:22:44 +00:00
|
|
|
update_poster = False
|
|
|
|
if not settings.USE_IMDB:
|
|
|
|
if self.poster_frame == -1 and self.sort.duration:
|
|
|
|
self.poster_frame = self.sort.duration/2
|
|
|
|
update_poster = True
|
2011-04-22 23:34:01 +00:00
|
|
|
if not self.get('runtime') and self.sort.duration:
|
2011-02-25 11:22:44 +00:00
|
|
|
self.data['runtime'] = self.sort.duration
|
|
|
|
self.update_sort()
|
2011-01-06 06:53:35 +00:00
|
|
|
self.json = self.get_json()
|
|
|
|
super(Item, self).save(*args, **kwargs)
|
2011-02-25 11:22:44 +00:00
|
|
|
if update_poster:
|
|
|
|
tasks.update_poster.delay(self.itemId)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-04-18 18:50:31 +00:00
|
|
|
def delete_files(self):
|
2011-05-25 19:11:08 +00:00
|
|
|
path = os.path.join(settings.MEDIA_ROOT, self.path())
|
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
2011-04-18 18:50:31 +00:00
|
|
|
|
2010-09-12 14:23:23 +00:00
|
|
|
def delete(self, *args, **kwargs):
|
2011-04-18 18:50:31 +00:00
|
|
|
self.delete_files()
|
2010-09-23 16:01:48 +00:00
|
|
|
super(Item, self).delete(*args, **kwargs)
|
2010-09-12 14:23:23 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def merge_with(self, other):
|
2010-09-12 14:23:23 +00:00
|
|
|
'''
|
|
|
|
move all related tables to other and delete self
|
|
|
|
'''
|
|
|
|
for l in self.lists.all():
|
2011-05-25 19:11:08 +00:00
|
|
|
l.remove(self)
|
2010-09-23 16:01:48 +00:00
|
|
|
if l.items.filter(id=other.id) == 0:
|
2011-05-25 19:11:08 +00:00
|
|
|
l.add(other)
|
2011-01-03 20:27:40 +00:00
|
|
|
#FIXME: should this really happen for annotations?
|
|
|
|
for a in self.annotations.all():
|
|
|
|
a.item = other
|
|
|
|
|
2010-09-12 14:23:23 +00:00
|
|
|
if hasattr(self, 'files'):
|
|
|
|
for f in self.files.all():
|
2010-09-23 16:01:48 +00:00
|
|
|
f.item = other
|
2010-09-12 14:23:23 +00:00
|
|
|
f.save()
|
|
|
|
self.delete()
|
|
|
|
other.save()
|
2011-05-25 19:11:08 +00:00
|
|
|
#FIXME: update poster, stills and streams after this
|
2010-09-12 14:23:23 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def get_posters(self):
|
2011-07-30 10:51:23 +00:00
|
|
|
url = self.prefered_poster_url()
|
2011-06-06 18:38:16 +00:00
|
|
|
index = []
|
2011-07-30 13:51:14 +00:00
|
|
|
services = [p['service']
|
|
|
|
for p in self.poster_urls.values("service")
|
|
|
|
.annotate(Count("id")).order_by()]
|
|
|
|
for service in settings.POSTER_PRECEDENCE:
|
|
|
|
if service in services:
|
2011-06-06 18:38:16 +00:00
|
|
|
index.append(service)
|
2011-07-30 13:51:14 +00:00
|
|
|
for service in services:
|
2011-06-06 18:38:16 +00:00
|
|
|
if service not in index:
|
|
|
|
index.append(service)
|
|
|
|
if settings.URL not in index:
|
|
|
|
index.append(settings.URL)
|
2011-07-30 13:51:14 +00:00
|
|
|
|
2011-06-06 18:58:51 +00:00
|
|
|
posters = []
|
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
poster = self.path('siteposter.jpg')
|
2011-06-06 18:58:51 +00:00
|
|
|
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
|
|
|
if os.path.exists(poster):
|
|
|
|
posters.append({
|
2011-08-23 17:39:34 +00:00
|
|
|
'url': '/%s/siteposter.jpg' % self.itemId,
|
2011-07-26 17:22:23 +00:00
|
|
|
'width': 640,
|
|
|
|
'height': 1024,
|
2011-07-30 11:04:30 +00:00
|
|
|
'source': settings.URL,
|
2011-07-30 13:51:14 +00:00
|
|
|
'selected': url == None,
|
2011-06-06 18:38:16 +00:00
|
|
|
'index': index.index(settings.URL)
|
2011-06-06 18:58:51 +00:00
|
|
|
})
|
|
|
|
|
2011-07-30 10:51:23 +00:00
|
|
|
got = {}
|
|
|
|
for p in self.poster_urls.all().order_by('-height'):
|
|
|
|
if p.service not in got:
|
|
|
|
got[p.service] = 1
|
|
|
|
posters.append({
|
|
|
|
'url': p.url,
|
|
|
|
'width': p.width,
|
|
|
|
'height': p.height,
|
2011-07-30 11:04:30 +00:00
|
|
|
'source': p.service,
|
2011-07-30 13:51:14 +00:00
|
|
|
'selected': p.url == url,
|
2011-06-06 18:38:16 +00:00
|
|
|
'index': index.index(p.service)
|
2011-07-30 10:51:23 +00:00
|
|
|
})
|
2011-06-06 18:38:16 +00:00
|
|
|
posters.sort(key=lambda a: a['index'])
|
2010-09-10 15:12:22 +00:00
|
|
|
return posters
|
|
|
|
|
2011-06-06 18:58:51 +00:00
|
|
|
def get_frames(self):
|
|
|
|
frames = []
|
2011-08-04 17:53:43 +00:00
|
|
|
pframes = self.poster_frames()
|
|
|
|
if pframes:
|
2011-06-06 18:58:51 +00:00
|
|
|
pos = self.poster_frame
|
|
|
|
if pos < 0:
|
2011-08-04 17:55:41 +00:00
|
|
|
pos = int(len(pframes) / 2)
|
2011-06-06 18:58:51 +00:00
|
|
|
p = 0
|
2011-08-04 17:53:43 +00:00
|
|
|
for f in pframes:
|
2011-06-06 18:58:51 +00:00
|
|
|
frames.append({
|
|
|
|
'index': p,
|
|
|
|
'position': f['position'],
|
|
|
|
'selected': p == pos,
|
2011-08-06 18:00:15 +00:00
|
|
|
'url': '/%s/frameposter%d.jpg' %(self.itemId, p),
|
2011-06-06 18:58:51 +00:00
|
|
|
'height': f['height'],
|
|
|
|
'width': f['width']
|
|
|
|
})
|
|
|
|
p += 1
|
|
|
|
return frames
|
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def get_stream(self):
|
2011-08-18 19:37:12 +00:00
|
|
|
for s in self.streams():
|
|
|
|
return s.json()
|
2010-09-10 15:12:22 +00:00
|
|
|
|
2011-02-11 10:21:25 +00:00
|
|
|
def get_layers(self, user=None):
|
2010-09-18 14:44:35 +00:00
|
|
|
layers = {}
|
2011-01-24 13:44:38 +00:00
|
|
|
for l in Layer.objects.all():
|
|
|
|
ll = layers.setdefault(l.name, [])
|
2011-02-11 10:21:25 +00:00
|
|
|
qs = Annotation.objects.filter(layer=l, item=self)
|
|
|
|
if l.private:
|
2011-08-08 15:59:11 +00:00
|
|
|
if user and user.is_anonymous():
|
2011-02-11 10:21:25 +00:00
|
|
|
user = None
|
|
|
|
qs = qs.filter(user=user)
|
|
|
|
for a in qs.order_by('start'):
|
2011-01-24 13:44:38 +00:00
|
|
|
ll.append(a.json())
|
2010-09-18 14:44:35 +00:00
|
|
|
return layers
|
|
|
|
|
2011-07-30 12:23:06 +00:00
|
|
|
def get_json(self, keys=None):
|
2011-01-24 09:38:46 +00:00
|
|
|
i = {
|
2011-06-27 13:39:35 +00:00
|
|
|
'id': self.itemId,
|
|
|
|
'rendered': self.rendered
|
2011-01-06 06:53:35 +00:00
|
|
|
}
|
2011-01-24 09:38:46 +00:00
|
|
|
i.update(self.external_data)
|
|
|
|
i.update(self.data)
|
2011-09-06 12:06:59 +00:00
|
|
|
for k in settings.CONFIG['itemKeys']:
|
2011-01-24 09:38:46 +00:00
|
|
|
key = k['id']
|
2011-07-30 12:23:06 +00:00
|
|
|
if not keys or key in keys:
|
|
|
|
if key not in i:
|
|
|
|
value = self.get(key)
|
|
|
|
#also get values from sort table, i.e. numberof values
|
|
|
|
if not value and self.sort and hasattr(self.sort, key):
|
|
|
|
value = getattr(self.sort, key)
|
|
|
|
if value:
|
|
|
|
i[key] = value
|
|
|
|
|
2011-08-05 15:50:18 +00:00
|
|
|
if 'reviews' in i:
|
|
|
|
i['reviews'] = self.reviews()
|
2011-08-09 08:37:37 +00:00
|
|
|
if not i['reviews']:
|
|
|
|
del i['reviews']
|
2011-08-06 07:17:35 +00:00
|
|
|
if 'cast' in i and isinstance(i['cast'][0], basestring):
|
|
|
|
i['cast'] = [i['cast']]
|
2011-08-05 18:26:27 +00:00
|
|
|
if 'cast' in i and isinstance(i['cast'][0], list):
|
|
|
|
i['cast'] = map(lambda x: {'actor': x[0], 'character': x[1]}, i['cast'])
|
2011-08-05 15:50:18 +00:00
|
|
|
|
2011-09-30 17:37:41 +00:00
|
|
|
if 'connections' in i:
|
|
|
|
i['connections'] = self.expand_connections()
|
|
|
|
|
2011-08-19 12:20:30 +00:00
|
|
|
if not keys or 'posterRatio' in keys:
|
|
|
|
i['posterRatio'] = self.poster_width / self.poster_height
|
2011-08-06 13:34:56 +00:00
|
|
|
|
2011-09-29 13:05:34 +00:00
|
|
|
|
2011-08-19 12:20:30 +00:00
|
|
|
streams = self.streams()
|
|
|
|
i['durations'] = [s.duration for s in streams]
|
2011-08-18 19:37:12 +00:00
|
|
|
i['duration'] = sum(i['durations'])
|
2011-08-19 12:25:46 +00:00
|
|
|
i['parts'] = len(i['durations'])
|
|
|
|
if i['parts']:
|
2011-08-19 12:20:30 +00:00
|
|
|
i['videoRatio'] = streams[0].aspect_ratio
|
2011-08-18 12:01:37 +00:00
|
|
|
|
2011-08-06 13:34:56 +00:00
|
|
|
#only needed by admins
|
2011-06-06 18:58:51 +00:00
|
|
|
if keys and 'posters' in keys:
|
2011-07-30 10:51:23 +00:00
|
|
|
i['posters'] = self.get_posters()
|
2011-09-29 13:05:34 +00:00
|
|
|
|
|
|
|
frames = self.get_frames()
|
2011-06-06 18:58:51 +00:00
|
|
|
if keys and 'frames' in keys:
|
2011-09-29 13:05:34 +00:00
|
|
|
i['frames'] = frames
|
|
|
|
|
2011-09-30 01:20:47 +00:00
|
|
|
selected_frame = filter(lambda f: f['selected'], frames)
|
|
|
|
if selected_frame:
|
|
|
|
i['posterFrame'] = selected_frame[0]['position']
|
2011-09-29 13:05:34 +00:00
|
|
|
elif self.poster_frame != -1.0:
|
|
|
|
i['posterFrame'] = self.poster_frame
|
|
|
|
|
2011-07-30 12:23:06 +00:00
|
|
|
if keys:
|
|
|
|
info = {}
|
|
|
|
for key in keys:
|
|
|
|
if key in i:
|
|
|
|
info[key] = i[key]
|
|
|
|
return info
|
2011-01-24 09:38:46 +00:00
|
|
|
return i
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2010-11-28 16:03:23 +00:00
|
|
|
def oxdb_id(self):
|
2011-01-28 08:48:38 +00:00
|
|
|
if not settings.USE_IMDB:
|
|
|
|
return self.itemId
|
2011-04-22 23:34:01 +00:00
|
|
|
if not self.get('title') and not self.get('director'):
|
2011-02-21 10:25:13 +00:00
|
|
|
return None
|
2011-01-03 16:02:33 +00:00
|
|
|
return utils.oxdb_id(self.get('title', ''), self.get('director', []), str(self.get('year', '')),
|
2010-11-28 16:03:23 +00:00
|
|
|
self.get('season', ''), self.get('episode', ''),
|
2011-01-04 06:50:52 +00:00
|
|
|
self.get('episode_title', ''), self.get('episode_director', []), self.get('episode_year', ''))
|
2010-02-16 12:41:57 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
'''
|
|
|
|
Search related functions
|
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_find(self):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-06 16:14:00 +00:00
|
|
|
def save(key, value):
|
|
|
|
f, created = ItemFind.objects.get_or_create(item=self, key=key)
|
2011-09-04 21:56:22 +00:00
|
|
|
if value not in ('', None):
|
2011-01-16 13:28:57 +00:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
value = value.strip()
|
|
|
|
f.value = value
|
2010-11-06 16:14:00 +00:00
|
|
|
f.save()
|
|
|
|
else:
|
|
|
|
f.delete()
|
2009-10-04 22:00:08 +00:00
|
|
|
|
2011-09-06 12:06:59 +00:00
|
|
|
for key in settings.CONFIG['itemKeys']:
|
2011-09-04 21:56:22 +00:00
|
|
|
if key.get('find'):
|
|
|
|
i = key['id']
|
|
|
|
if i == 'title':
|
|
|
|
save(i, u'\n'.join([self.get('title', 'Untitled'),
|
|
|
|
self.get('original_title', '')]))
|
|
|
|
elif i == 'filename':
|
|
|
|
save(i,
|
|
|
|
'\n'.join([os.path.join(f.folder, f.name) for f in self.files.all()]))
|
|
|
|
elif key['type'] == 'layer':
|
|
|
|
qs = Annotation.objects.filter(layer__name=i, item=self).order_by('start')
|
|
|
|
save(i, '\n'.join([l.value for l in qs]))
|
2011-09-24 23:09:48 +00:00
|
|
|
elif i != '*' and i not in self.facet_keys:
|
2011-09-04 21:56:22 +00:00
|
|
|
value = self.get(i)
|
|
|
|
if isinstance(value, list):
|
|
|
|
value = u'\n'.join(value)
|
|
|
|
save(i, value)
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-07-15 15:55:10 +00:00
|
|
|
for key in self.facet_keys:
|
2011-01-03 17:47:20 +00:00
|
|
|
if key == 'character':
|
|
|
|
values = self.get('cast', '')
|
|
|
|
if values:
|
2011-08-06 07:41:42 +00:00
|
|
|
if isinstance(values[0], basestring):
|
|
|
|
values = [values]
|
|
|
|
if isinstance(values[0], list):
|
|
|
|
values = map(lambda x: {'actor': x[0], 'character': x[1]}, values)
|
2011-08-05 15:50:18 +00:00
|
|
|
values = [i['character'] for i in values]
|
2011-08-09 10:03:06 +00:00
|
|
|
elif key == 'name':
|
|
|
|
values = []
|
|
|
|
for k in map(lambda x: x['id'],
|
|
|
|
filter(lambda x: x.get('sort') == 'person',
|
2011-09-06 12:06:59 +00:00
|
|
|
settings.CONFIG['itemKeys'])):
|
2011-08-09 10:03:06 +00:00
|
|
|
values += self.get(k, [])
|
2011-01-03 17:47:20 +00:00
|
|
|
else:
|
|
|
|
values = self.get(key, '')
|
|
|
|
if isinstance(values, list):
|
2011-01-24 09:38:46 +00:00
|
|
|
save(key, '\n'.join(values))
|
2010-07-16 10:13:16 +00:00
|
|
|
else:
|
2011-01-03 17:47:20 +00:00
|
|
|
save(key, values)
|
2011-01-05 13:06:09 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_sort(self):
|
2009-10-04 22:00:08 +00:00
|
|
|
try:
|
|
|
|
s = self.sort
|
2010-09-23 16:01:48 +00:00
|
|
|
except ItemSort.DoesNotExist:
|
|
|
|
s = ItemSort(item=self)
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def sortNames(values):
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u''
|
2010-07-12 14:56:14 +00:00
|
|
|
if values:
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u'; '.join([get_name_sort(name) for name in values])
|
2009-08-16 12:23:29 +00:00
|
|
|
if not sort_value:
|
2011-01-03 08:45:31 +00:00
|
|
|
sort_value = u''
|
2009-08-16 12:23:29 +00:00
|
|
|
return sort_value
|
|
|
|
|
2011-01-25 14:39:03 +00:00
|
|
|
def set_value(s, name, value):
|
|
|
|
if not value:
|
|
|
|
value = None
|
|
|
|
setattr(s, name, value)
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
base_keys = (
|
|
|
|
'id',
|
|
|
|
'aspectratio',
|
|
|
|
'duration',
|
2011-01-25 14:39:03 +00:00
|
|
|
'hue',
|
2011-01-03 14:14:54 +00:00
|
|
|
'saturation',
|
2011-01-25 14:39:03 +00:00
|
|
|
'lightness',
|
2011-01-03 14:14:54 +00:00
|
|
|
'volume',
|
|
|
|
'clips',
|
|
|
|
'cuts',
|
|
|
|
'cutsperminute',
|
|
|
|
'words',
|
|
|
|
'wordsperminute',
|
|
|
|
'resolution',
|
|
|
|
'pixels',
|
|
|
|
'size',
|
|
|
|
'bitrate',
|
2011-01-25 14:39:03 +00:00
|
|
|
'numberoffiles',
|
2011-08-18 12:01:37 +00:00
|
|
|
'parts',
|
2011-01-03 14:14:54 +00:00
|
|
|
'published',
|
|
|
|
'modified',
|
2011-01-25 14:39:03 +00:00
|
|
|
'popularity',
|
2011-01-03 14:14:54 +00:00
|
|
|
)
|
2011-01-24 09:38:46 +00:00
|
|
|
|
2011-09-06 12:06:59 +00:00
|
|
|
for key in filter(lambda k: 'columnWidth' in k, settings.CONFIG['itemKeys']):
|
2011-01-03 14:14:54 +00:00
|
|
|
name = key['id']
|
2011-01-24 09:38:46 +00:00
|
|
|
source = name
|
2011-01-25 14:39:03 +00:00
|
|
|
sort_type = key.get('sort', key['type'])
|
2011-01-24 09:38:46 +00:00
|
|
|
if 'value' in key:
|
2011-01-25 14:39:03 +00:00
|
|
|
if 'layer' in key['value']:
|
|
|
|
continue
|
2011-01-24 09:38:46 +00:00
|
|
|
source = key['value']['key']
|
|
|
|
sort_type = key['value'].get('type', sort_type)
|
2011-01-05 13:06:09 +00:00
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
if name not in base_keys:
|
2011-01-24 09:38:46 +00:00
|
|
|
if sort_type == 'title':
|
2011-01-28 08:48:38 +00:00
|
|
|
value = utils.sort_title(canonicalTitle(self.get(source, u'Untitled')))
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-24 09:38:46 +00:00
|
|
|
elif sort_type == 'person':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = sortNames(self.get(source, []))
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)[:955]
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-24 09:38:46 +00:00
|
|
|
elif sort_type == 'string':
|
2011-01-05 13:06:09 +00:00
|
|
|
value = self.get(source, u'')
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
2011-01-03 08:45:31 +00:00
|
|
|
value = u','.join(value)
|
2011-01-03 19:45:56 +00:00
|
|
|
value = utils.sort_string(value)[:955]
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
|
|
|
elif sort_type in ('length', 'integer', 'float'):
|
2011-01-05 13:06:09 +00:00
|
|
|
#can be length of strings or length of arrays, i.e. keywords
|
2011-04-22 23:34:01 +00:00
|
|
|
value = self.get(source)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
|
|
|
value = len(value)
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-24 09:38:46 +00:00
|
|
|
elif sort_type == 'words':
|
2011-04-22 23:34:01 +00:00
|
|
|
value = self.get(source)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, list):
|
|
|
|
value = '\n'.join(value)
|
|
|
|
if value:
|
|
|
|
value = len(value.split(' '))
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-24 09:38:46 +00:00
|
|
|
elif sort_type == 'year':
|
2011-04-22 23:34:01 +00:00
|
|
|
value = self.get(source)
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-24 09:38:46 +00:00
|
|
|
elif sort_type == 'date':
|
2011-04-22 23:34:01 +00:00
|
|
|
value = self.get(source)
|
2011-01-03 14:14:54 +00:00
|
|
|
if isinstance(value, basestring):
|
|
|
|
value = datetime.strptime(value, '%Y-%m-%d')
|
2011-01-24 22:46:16 +00:00
|
|
|
set_value(s, name, value)
|
2011-01-03 14:14:54 +00:00
|
|
|
|
|
|
|
#sort keys based on database, these will always be available
|
2011-01-03 20:30:50 +00:00
|
|
|
s.itemId = self.itemId.replace('0x', 'xx')
|
2011-01-03 14:14:54 +00:00
|
|
|
s.modified = self.modified
|
|
|
|
s.published = self.published
|
|
|
|
|
|
|
|
# sort values based on data from videos
|
2011-09-06 12:06:59 +00:00
|
|
|
s.words = sum([len(a.value.split()) for a in self.annotations.all()])
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
s.clips = 0 #FIXME: get clips from all layers or something
|
2011-08-23 17:39:34 +00:00
|
|
|
videos = self.files.filter(active=True, is_video=True)
|
|
|
|
if videos.count() > 0:
|
2010-12-22 18:42:35 +00:00
|
|
|
s.duration = sum([v.duration for v in videos])
|
2011-08-23 17:39:34 +00:00
|
|
|
v = videos[0]
|
|
|
|
s.resolution = v.width * v.height
|
2011-08-18 12:01:37 +00:00
|
|
|
s.aspectratio = float(utils.parse_decimal(v.display_aspect_ratio))
|
2010-12-22 18:42:35 +00:00
|
|
|
s.pixels = sum([v.pixels for v in videos])
|
2011-01-25 14:39:03 +00:00
|
|
|
s.numberoffiles = self.files.all().count()
|
2011-08-23 17:39:34 +00:00
|
|
|
s.parts = videos.count()
|
2010-12-22 18:42:35 +00:00
|
|
|
s.size = sum([v.size for v in videos]) #FIXME: only size of movies?
|
2011-09-30 13:46:26 +00:00
|
|
|
if s.duration:
|
|
|
|
s.bitrate = s.size * 8 / s.duration
|
|
|
|
else:
|
|
|
|
s.bitrate = 0
|
2011-01-03 14:14:54 +00:00
|
|
|
s.volume = 0
|
2010-12-22 18:45:41 +00:00
|
|
|
else:
|
2011-01-24 22:46:16 +00:00
|
|
|
s.duration = None
|
|
|
|
s.resolution = None
|
|
|
|
s.aspectratio = None
|
|
|
|
s.bitrate = None
|
|
|
|
s.pixels = None
|
|
|
|
s.filename = None
|
|
|
|
s.files = None
|
|
|
|
s.size = None
|
|
|
|
s.volume = None
|
2011-08-18 12:01:37 +00:00
|
|
|
s.parts = 0
|
2010-07-12 14:56:14 +00:00
|
|
|
|
2011-01-25 14:39:03 +00:00
|
|
|
if 'color' in self.data:
|
2011-06-27 13:39:35 +00:00
|
|
|
s.hue, s.saturation, s.lightness = self.data['color']
|
2011-01-25 14:39:03 +00:00
|
|
|
else:
|
|
|
|
s.hue = None
|
|
|
|
s.saturation = None
|
|
|
|
s.brighness = None
|
2010-12-23 15:01:53 +00:00
|
|
|
s.cuts = len(self.data.get('cuts', []))
|
2010-12-25 13:45:19 +00:00
|
|
|
if s.duration:
|
|
|
|
s.cutsperminute = s.cuts / (s.duration/60)
|
2011-09-06 12:06:59 +00:00
|
|
|
s.wordsperminute = s.words / (s.duration / 60)
|
2010-12-25 14:00:48 +00:00
|
|
|
else:
|
2011-01-24 22:46:16 +00:00
|
|
|
s.cutsperminute = None
|
2011-09-06 12:06:59 +00:00
|
|
|
s.wordsperminute = None
|
2011-01-24 02:35:52 +00:00
|
|
|
s.popularity = self.accessed.aggregate(Sum('accessed'))['accessed__sum']
|
2009-08-16 12:23:29 +00:00
|
|
|
s.save()
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-05 13:06:09 +00:00
|
|
|
|
2010-11-27 12:12:53 +00:00
|
|
|
def update_facets(self):
|
2011-01-01 11:44:42 +00:00
|
|
|
#FIXME: what to do with Unkown Director, Year, Country etc.
|
2011-08-25 15:17:07 +00:00
|
|
|
for key in self.facet_keys + ['title']:
|
2011-01-03 17:47:20 +00:00
|
|
|
current_values = self.get(key, [])
|
2011-08-25 15:17:07 +00:00
|
|
|
if key == 'title':
|
2011-08-26 14:45:04 +00:00
|
|
|
if current_values:
|
|
|
|
current_values = [current_values]
|
|
|
|
else:
|
|
|
|
current_values = []
|
2011-08-25 15:41:14 +00:00
|
|
|
ot = self.get('original_title')
|
|
|
|
if ot:
|
|
|
|
current_values.append(ot)
|
2011-08-09 10:03:06 +00:00
|
|
|
#FIXME: is there a better way to build name collection?
|
|
|
|
if key == 'name':
|
|
|
|
current_values = []
|
|
|
|
for k in map(lambda x: x['id'],
|
|
|
|
filter(lambda x: x.get('sort') == 'person',
|
2011-09-06 12:06:59 +00:00
|
|
|
settings.CONFIG['itemKeys'])):
|
2011-08-09 10:03:06 +00:00
|
|
|
current_values += self.get(k, [])
|
2011-01-03 17:47:20 +00:00
|
|
|
if not isinstance(current_values, list):
|
2011-04-06 18:57:25 +00:00
|
|
|
current_values = [unicode(current_values)]
|
2011-04-06 12:24:40 +00:00
|
|
|
current_values = list(set(current_values))
|
2010-09-23 16:01:48 +00:00
|
|
|
saved_values = [i.value for i in Facet.objects.filter(item=self, key=key)]
|
2011-01-25 14:39:03 +00:00
|
|
|
removed_values = filter(lambda i: i not in current_values, saved_values)
|
2010-07-12 14:56:14 +00:00
|
|
|
if removed_values:
|
2010-09-23 16:01:48 +00:00
|
|
|
Facet.objects.filter(item=self, key=key, value__in=removed_values).delete()
|
2010-07-12 14:56:14 +00:00
|
|
|
for value in current_values:
|
|
|
|
if value not in saved_values:
|
|
|
|
value_sort = value
|
2010-07-15 15:55:10 +00:00
|
|
|
if key in self.person_keys:
|
2010-11-27 12:12:53 +00:00
|
|
|
value_sort = get_name_sort(value)
|
2010-07-12 14:56:14 +00:00
|
|
|
f = Facet(key=key, value=value, value_sort=value_sort)
|
2010-09-23 16:01:48 +00:00
|
|
|
f.item = self
|
2010-07-12 14:56:14 +00:00
|
|
|
f.save()
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-12-04 01:26:49 +00:00
|
|
|
def path(self, name=''):
|
2010-12-05 17:51:40 +00:00
|
|
|
h = self.itemId
|
2010-12-07 19:05:59 +00:00
|
|
|
return os.path.join('items', h[:2], h[2:4], h[4:6], h[6:], name)
|
2010-12-04 01:26:49 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
'''
|
|
|
|
Video related functions
|
|
|
|
'''
|
2011-08-06 18:17:22 +00:00
|
|
|
def frame(self, position, height=128):
|
2011-08-18 12:01:37 +00:00
|
|
|
offset = 0
|
2011-08-18 19:37:12 +00:00
|
|
|
streams = self.streams()
|
|
|
|
for stream in streams:
|
|
|
|
if stream.duration + offset < position:
|
|
|
|
offset += stream.duration
|
2011-08-18 12:01:37 +00:00
|
|
|
else:
|
|
|
|
position = position - offset
|
|
|
|
height = min(height, stream.resolution)
|
|
|
|
path = os.path.join(settings.MEDIA_ROOT, stream.path(),
|
|
|
|
'frames', "%dp"%height, "%s.jpg"%position)
|
|
|
|
if not os.path.exists(path) and stream.video:
|
|
|
|
extract.frame(stream.video.path, path, position, height)
|
|
|
|
if not os.path.exists(path):
|
|
|
|
return None
|
|
|
|
return path
|
2010-09-10 15:12:22 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def timeline_prefix(self):
|
2011-08-23 17:39:34 +00:00
|
|
|
videos = self.streams()
|
2011-08-18 12:01:37 +00:00
|
|
|
if len(videos) == 1:
|
|
|
|
return os.path.join(settings.MEDIA_ROOT, videos[0].path('timeline'))
|
2010-12-04 01:26:49 +00:00
|
|
|
return os.path.join(settings.MEDIA_ROOT, self.path(), 'timeline')
|
2010-09-10 15:12:22 +00:00
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
def get_files(self, user):
|
|
|
|
#FIXME: limit by user
|
|
|
|
return [f.json() for f in self.files.all()]
|
|
|
|
|
|
|
|
def users_with_files(self):
|
|
|
|
return User.objects.filter(volumes__files__file__item=self).distinct()
|
|
|
|
|
|
|
|
def update_wanted(self):
|
|
|
|
users = self.users_with_files()
|
|
|
|
if users.filter(is_superuser=True).count()>0:
|
|
|
|
files = self.files.filter(instances__volume__user__is_superuser=True)
|
2011-08-26 14:32:00 +00:00
|
|
|
users = User.objects.filter(volumes__files__file__in=files,
|
2011-08-23 17:39:34 +00:00
|
|
|
is_superuser=True).distinct()
|
|
|
|
elif users.filter(is_staff=True).count()>0:
|
|
|
|
files = self.files.filter(instances__volume__user__is_staff=True)
|
2011-08-26 14:32:00 +00:00
|
|
|
users = User.objects.filter(volumes__files__file__in=files,
|
2011-08-23 17:39:34 +00:00
|
|
|
is_staff=True).distinct()
|
2011-02-23 11:51:32 +00:00
|
|
|
else:
|
2011-08-23 17:39:34 +00:00
|
|
|
files = self.files.all()
|
|
|
|
files = files.filter(is_video=True, instances__extra=False, instances__gt=0).order_by('part')
|
|
|
|
folders = list(set([f.folder for f in files]))
|
|
|
|
if len(folders) > 1:
|
|
|
|
files = files.filter(folder=folders[0])
|
|
|
|
files.update(wanted=True)
|
2011-08-24 15:52:49 +00:00
|
|
|
self.files.exclude(id__in=files).update(wanted=False)
|
2011-08-23 17:39:34 +00:00
|
|
|
|
|
|
|
def update_selected(self):
|
|
|
|
files = archive.models.File.objects.filter(item=self,
|
|
|
|
streams__available=True,
|
|
|
|
streams__source=None)
|
2011-09-04 18:36:10 +00:00
|
|
|
if files.count() == 0:
|
|
|
|
return
|
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
def get_level(users):
|
|
|
|
if users.filter(is_superuser=True).count() > 0: level = 0
|
|
|
|
elif users.filter(is_staff=True).count() > 0: level = 1
|
|
|
|
else: level = 2
|
|
|
|
return level
|
|
|
|
|
2011-08-27 08:46:03 +00:00
|
|
|
current_users = User.objects.filter(volumes__files__file__in=self.files.filter(active=True)).distinct()
|
|
|
|
current_level = get_level(current_users)
|
2011-08-23 17:39:34 +00:00
|
|
|
|
|
|
|
users = User.objects.filter(volumes__files__file__in=files).distinct()
|
|
|
|
possible_level = get_level(users)
|
|
|
|
|
|
|
|
if possible_level < current_level:
|
|
|
|
files = self.files.filter(instances__volume__user__in=users).order_by('part')
|
|
|
|
#FIXME: this should be instance folders
|
|
|
|
folders = list(set([f.folder
|
|
|
|
for f in files.filter(is_video=True, instances__extra=False)]))
|
|
|
|
files = files.filter(folder__startswith=folders[0])
|
|
|
|
files.update(active=True)
|
|
|
|
self.rendered = False
|
|
|
|
self.save()
|
|
|
|
self.update_timeline()
|
2011-08-27 08:46:03 +00:00
|
|
|
else:
|
|
|
|
files = self.files.filter(instances__volume__user__in=current_users).order_by('part')
|
|
|
|
#FIXME: this should be instance folders
|
|
|
|
folders = list(set([f.folder
|
|
|
|
for f in files.filter(is_video=True, instances__extra=False)]))
|
|
|
|
files = files.filter(folder__startswith=folders[0])
|
|
|
|
if files.filter(active=False).count() > 0:
|
|
|
|
files.update(active=True)
|
|
|
|
self.rendered = False
|
|
|
|
self.save()
|
|
|
|
self.update_timeline()
|
|
|
|
|
2010-10-16 11:49:45 +00:00
|
|
|
|
2011-07-03 16:21:27 +00:00
|
|
|
def make_torrent(self):
|
|
|
|
base = self.path('torrent')
|
|
|
|
base = os.path.abspath(os.path.join(settings.MEDIA_ROOT, base))
|
|
|
|
if os.path.exists(base):
|
|
|
|
shutil.rmtree(base)
|
|
|
|
os.makedirs(base)
|
|
|
|
|
|
|
|
base = self.path('torrent/%s' % self.get('title'))
|
|
|
|
base = os.path.abspath(os.path.join(settings.MEDIA_ROOT, base))
|
|
|
|
size = 0
|
|
|
|
duration = 0.0
|
2011-08-23 17:39:34 +00:00
|
|
|
streams = self.streams()
|
|
|
|
if streams.count() == 1:
|
2011-07-03 16:21:27 +00:00
|
|
|
url = "%s/torrent/%s.webm" % (self.get_absolute_url(),
|
|
|
|
quote(self.get('title').encode('utf-8')))
|
|
|
|
video = "%s.webm" % base
|
2011-08-23 17:39:34 +00:00
|
|
|
v = streams[0]
|
2011-07-03 16:21:27 +00:00
|
|
|
os.symlink(v.video.path, video)
|
2011-08-23 17:39:34 +00:00
|
|
|
size = v.video.size
|
|
|
|
duration = v.duration
|
2011-07-03 16:21:27 +00:00
|
|
|
else:
|
|
|
|
url = "%s/torrent/" % self.get_absolute_url()
|
|
|
|
part = 1
|
|
|
|
os.makedirs(base)
|
2011-08-23 17:39:34 +00:00
|
|
|
for v in streams:
|
2011-07-03 16:21:27 +00:00
|
|
|
video = "%s/%s.Part %d.webm" % (base, self.get('title'), part)
|
|
|
|
part += 1
|
|
|
|
os.symlink(v.video.path, video)
|
2011-08-23 17:39:34 +00:00
|
|
|
size += v.video.size
|
|
|
|
duration += v.duration
|
2011-07-03 16:21:27 +00:00
|
|
|
video = base
|
|
|
|
|
|
|
|
torrent = '%s.torrent' % base
|
|
|
|
url = "http://%s%s" % (Site.objects.get_current().domain, url)
|
|
|
|
meta = {
|
|
|
|
'target': torrent,
|
|
|
|
'url-list': url,
|
|
|
|
}
|
|
|
|
if duration:
|
|
|
|
meta['playtime'] = ox.formatDuration(duration*1000)[:-4]
|
|
|
|
|
|
|
|
#slightly bigger torrent file but better for streaming
|
|
|
|
piece_size_pow2 = 15 #1 mbps -> 32KB pieces
|
|
|
|
if size / duration >= 1000000:
|
|
|
|
piece_size_pow2 = 16 #2 mbps -> 64KB pieces
|
|
|
|
meta['piece_size_pow2'] = piece_size_pow2
|
|
|
|
|
|
|
|
ox.torrent.createTorrent(video, settings.TRACKER_URL, meta)
|
|
|
|
self.torrent.name = self.path('torrent/%s.torrent' % self.get('title'))
|
|
|
|
self.save()
|
|
|
|
|
2011-08-18 19:37:12 +00:00
|
|
|
def streams(self):
|
2011-08-27 10:54:39 +00:00
|
|
|
return archive.models.Stream.objects.filter(source=None, available=True,
|
|
|
|
file__item=self, file__is_video=True, file__active=True).order_by('file__part')
|
2011-08-18 19:37:12 +00:00
|
|
|
|
2011-08-19 16:54:42 +00:00
|
|
|
def update_timeline(self, force=False):
|
2011-08-19 15:37:37 +00:00
|
|
|
streams = self.streams()
|
|
|
|
self.make_timeline()
|
|
|
|
self.data['cuts'] = extract.cuts(self.timeline_prefix)
|
|
|
|
self.data['color'] = extract.average_color(self.timeline_prefix)
|
|
|
|
#extract.timeline_strip(self, self.data['cuts'], stream.info, self.timeline_prefix[:-8])
|
2011-08-20 10:06:18 +00:00
|
|
|
self.select_frame()
|
2011-08-19 15:37:37 +00:00
|
|
|
self.make_local_poster()
|
|
|
|
self.make_poster()
|
|
|
|
self.make_icon()
|
2011-09-06 12:06:59 +00:00
|
|
|
if settings.CONFIG['video']['download']:
|
2011-08-20 10:06:18 +00:00
|
|
|
self.make_torrent()
|
|
|
|
self.load_subtitles()
|
2011-08-19 15:37:37 +00:00
|
|
|
self.rendered = streams != []
|
|
|
|
self.save()
|
2010-09-10 15:12:22 +00:00
|
|
|
|
|
|
|
'''
|
|
|
|
Poster related functions
|
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def update_poster_urls(self):
|
2010-09-10 14:09:41 +00:00
|
|
|
_current = {}
|
|
|
|
for s in settings.POSTER_SERVICES:
|
2011-02-23 09:47:46 +00:00
|
|
|
url = '%s?id=%s'%(s, self.itemId)
|
2010-09-10 14:09:41 +00:00
|
|
|
try:
|
|
|
|
data = json.loads(ox.net.readUrlUnicode(url))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
for service in data:
|
|
|
|
if service not in _current:
|
|
|
|
_current[service] = []
|
|
|
|
for poster in data[service]:
|
|
|
|
_current[service].append(poster)
|
|
|
|
#FIXME: remove urls that are no longer listed
|
|
|
|
for service in _current:
|
|
|
|
for poster in _current[service]:
|
2010-09-23 16:01:48 +00:00
|
|
|
p, created = PosterUrl.objects.get_or_create(item=self, url=poster['url'], service=service)
|
2010-09-10 14:09:41 +00:00
|
|
|
if created:
|
|
|
|
p.width = poster['width']
|
|
|
|
p.height = poster['height']
|
|
|
|
p.save()
|
|
|
|
|
2010-09-10 15:12:22 +00:00
|
|
|
def delete_poster(self):
|
2010-09-13 15:25:37 +00:00
|
|
|
if self.poster:
|
|
|
|
path = self.poster.path
|
|
|
|
self.poster.delete()
|
2011-06-06 18:38:16 +00:00
|
|
|
else:
|
|
|
|
poster= self.path('poster.jpg')
|
|
|
|
path = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
2011-08-23 17:39:34 +00:00
|
|
|
for f in glob(path.replace('.jpg', '*.jpg')):
|
2011-06-06 18:38:16 +00:00
|
|
|
os.unlink(f)
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2010-09-17 20:23:03 +00:00
|
|
|
def prefered_poster_url(self):
|
|
|
|
self.update_poster_urls()
|
2011-07-30 12:52:49 +00:00
|
|
|
service = self.poster_source
|
|
|
|
if service and service != settings.URL:
|
2010-09-17 20:23:03 +00:00
|
|
|
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
|
|
|
return u.url
|
2011-07-30 12:52:49 +00:00
|
|
|
if not service:
|
|
|
|
for service in settings.POSTER_PRECEDENCE:
|
|
|
|
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
|
|
|
return u.url
|
2010-09-17 20:23:03 +00:00
|
|
|
return None
|
|
|
|
|
2011-08-11 14:15:56 +00:00
|
|
|
def make_timeline(self):
|
2011-08-19 15:37:37 +00:00
|
|
|
streams = self.streams()
|
|
|
|
if len(streams) > 1:
|
2011-08-20 17:53:26 +00:00
|
|
|
timelines = [s.timeline_prefix for s in self.streams()]
|
|
|
|
join_timelines(timelines, self.timeline_prefix)
|
2011-08-11 14:15:56 +00:00
|
|
|
|
2010-12-01 00:00:33 +00:00
|
|
|
def make_poster(self, force=False):
|
2010-09-10 15:12:22 +00:00
|
|
|
if not self.poster or force:
|
2010-09-17 20:23:03 +00:00
|
|
|
url = self.prefered_poster_url()
|
2010-09-10 14:09:41 +00:00
|
|
|
if url:
|
|
|
|
data = ox.net.readUrl(url)
|
2011-06-06 18:38:16 +00:00
|
|
|
self.delete_poster()
|
2010-09-10 14:09:41 +00:00
|
|
|
self.poster.save('poster.jpg', ContentFile(data))
|
|
|
|
self.save()
|
|
|
|
else:
|
2011-06-06 18:38:16 +00:00
|
|
|
self.delete_poster()
|
2011-07-26 17:22:23 +00:00
|
|
|
poster = self.make_local_poster()
|
|
|
|
with open(poster) as f:
|
|
|
|
self.poster.save('poster.jpg', ContentFile(f.read()))
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2011-07-26 17:22:23 +00:00
|
|
|
def make_local_poster(self):
|
2011-08-23 17:39:34 +00:00
|
|
|
poster = self.path('siteposter.jpg')
|
2011-07-26 17:22:23 +00:00
|
|
|
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
|
|
|
|
|
|
|
frame = self.get_poster_frame_path()
|
2011-08-26 16:15:29 +00:00
|
|
|
timeline = '%s.64.png' % self.timeline_prefix
|
2011-07-26 17:22:23 +00:00
|
|
|
|
|
|
|
director = u', '.join(self.get('director', ['Unknown Director']))
|
|
|
|
cmd = [settings.ITEM_POSTER,
|
|
|
|
'-t', self.get('title').encode('utf-8'),
|
|
|
|
'-d', director.encode('utf-8'),
|
|
|
|
'-y', str(self.get('year', '')),
|
|
|
|
'-p', poster
|
|
|
|
]
|
|
|
|
if frame:
|
|
|
|
cmd += [
|
|
|
|
'-f', frame,
|
2011-08-04 13:50:13 +00:00
|
|
|
]
|
|
|
|
if os.path.exists(timeline):
|
|
|
|
cmd += [
|
2011-07-26 17:22:23 +00:00
|
|
|
'-l', timeline,
|
|
|
|
]
|
|
|
|
if settings.USE_IMDB:
|
|
|
|
if len(self.itemId) == 7:
|
2011-04-05 12:56:02 +00:00
|
|
|
cmd += ['-i', self.itemId]
|
2011-07-26 17:22:23 +00:00
|
|
|
cmd += ['-o', self.oxdbId]
|
|
|
|
else:
|
|
|
|
cmd += ['-i', self.itemId]
|
|
|
|
ox.makedirs(os.path.join(settings.MEDIA_ROOT,self.path()))
|
|
|
|
p = subprocess.Popen(cmd)
|
|
|
|
p.wait()
|
2011-08-23 17:39:34 +00:00
|
|
|
for f in glob(poster.replace('.jpg', '*.jpg')):
|
2011-08-24 11:31:23 +00:00
|
|
|
if f != poster:
|
|
|
|
os.unlink(f)
|
2011-07-26 17:22:23 +00:00
|
|
|
return poster
|
|
|
|
|
|
|
|
def poster_frames(self):
|
|
|
|
frames = []
|
2011-08-04 13:28:06 +00:00
|
|
|
offset = 0
|
2011-08-24 08:36:15 +00:00
|
|
|
for f in self.files.filter(active=True, is_video=True):
|
2011-07-26 17:22:23 +00:00
|
|
|
for ff in f.frames.all():
|
2011-08-04 13:28:06 +00:00
|
|
|
frames.append({
|
|
|
|
'position': offset + ff.position,
|
|
|
|
'path': ff.frame.path,
|
|
|
|
'width': ff.frame.width,
|
|
|
|
'height': ff.frame.height
|
|
|
|
})
|
|
|
|
offset += f.duration
|
2011-07-26 17:22:23 +00:00
|
|
|
return frames
|
2010-09-03 13:28:44 +00:00
|
|
|
|
2011-08-16 15:06:40 +00:00
|
|
|
def select_frame(self):
|
|
|
|
frames = self.poster_frames()
|
|
|
|
if frames:
|
|
|
|
heat = [ox.image.getImageHeat(f['path']) for f in frames]
|
|
|
|
self.poster_frame = heat.index(max(heat))
|
|
|
|
self.save()
|
|
|
|
|
2011-01-21 10:49:24 +00:00
|
|
|
def get_poster_frame_path(self):
|
2011-08-11 14:15:56 +00:00
|
|
|
frames = self.poster_frames()
|
2011-01-21 10:49:24 +00:00
|
|
|
if self.poster_frame >= 0:
|
2011-06-06 18:38:16 +00:00
|
|
|
if frames and len(frames) > int(self.poster_frame):
|
2011-08-04 13:28:06 +00:00
|
|
|
return frames[int(self.poster_frame)]['path']
|
2011-06-06 18:38:16 +00:00
|
|
|
else:
|
2011-09-06 12:06:59 +00:00
|
|
|
size = settings.CONFIG['video']['resolutions'][0]
|
2011-06-06 18:38:16 +00:00
|
|
|
return self.frame(self.poster_frame, size)
|
2011-01-21 10:49:24 +00:00
|
|
|
|
2011-07-26 17:22:23 +00:00
|
|
|
if frames:
|
2011-08-04 13:39:37 +00:00
|
|
|
return frames[int(len(frames)/2)]['path']
|
2011-01-21 10:49:24 +00:00
|
|
|
|
|
|
|
def make_icon(self):
|
|
|
|
frame = self.get_poster_frame_path()
|
2011-08-10 14:00:03 +00:00
|
|
|
icon = self.path('icon.jpg')
|
|
|
|
self.icon.name = icon
|
2011-08-26 16:15:29 +00:00
|
|
|
timeline = '%s.64.png' % self.timeline_prefix
|
2011-08-10 14:00:03 +00:00
|
|
|
cmd = [settings.ITEM_ICON,
|
|
|
|
'-i', self.icon.path
|
|
|
|
]
|
|
|
|
if os.path.exists(timeline):
|
|
|
|
cmd += ['-l', timeline]
|
2011-01-21 10:49:24 +00:00
|
|
|
if frame:
|
2011-08-10 14:00:03 +00:00
|
|
|
cmd += ['-f', frame]
|
|
|
|
p = subprocess.Popen(cmd)
|
|
|
|
p.wait()
|
|
|
|
self.save()
|
2011-08-10 17:00:49 +00:00
|
|
|
icons = os.path.abspath(os.path.join(settings.MEDIA_ROOT, icon))
|
|
|
|
icons = glob(icons.replace('.jpg', '*.jpg'))
|
|
|
|
for f in filter(lambda p: not p.endswith('/icon.jpg'), icons):
|
|
|
|
os.unlink(f)
|
2011-08-10 14:00:03 +00:00
|
|
|
return icon
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-08-20 10:06:18 +00:00
|
|
|
def load_subtitles(self):
|
|
|
|
layer = Layer.objects.get(name='subtitles')
|
|
|
|
Annotation.objects.filter(layer=layer,item=self).delete()
|
|
|
|
offset = 0
|
|
|
|
language = ''
|
2011-08-23 17:39:34 +00:00
|
|
|
languages = [f.language for f in self.files.filter(active=True, is_subtitle=True,
|
2011-08-20 10:06:18 +00:00
|
|
|
available=True)]
|
|
|
|
if languages:
|
|
|
|
if 'en' in languages:
|
|
|
|
language = 'en'
|
|
|
|
elif '' in languages:
|
|
|
|
language = ''
|
|
|
|
else:
|
|
|
|
language = languages[0]
|
2011-08-23 17:39:34 +00:00
|
|
|
for f in self.files.filter(active=True, is_subtitle=True,
|
2011-08-20 10:06:18 +00:00
|
|
|
available=True, language=language).order_by('part'):
|
|
|
|
user = f.instances.all()[0].volume.user
|
|
|
|
for data in f.srt(offset):
|
|
|
|
annotation = Annotation(
|
|
|
|
item=f.item,
|
|
|
|
layer=layer,
|
|
|
|
start=data['in'],
|
|
|
|
end=data['out'],
|
|
|
|
value=data['value'],
|
|
|
|
user=user
|
|
|
|
)
|
|
|
|
annotation.save()
|
|
|
|
duration = self.files.filter(Q(is_audio=True)|Q(is_video=True)) \
|
2011-08-23 17:39:34 +00:00
|
|
|
.filter(active=True, available=True, part=f.part)
|
2011-08-20 10:06:18 +00:00
|
|
|
if duration:
|
|
|
|
duration = duration[0].duration
|
|
|
|
else:
|
|
|
|
Annotation.objects.filter(layer=layer,item=self).delete()
|
|
|
|
break
|
|
|
|
offset += duration
|
|
|
|
self.update_find()
|
|
|
|
|
2011-04-18 18:50:31 +00:00
|
|
|
def delete_item(sender, **kwargs):
|
|
|
|
i = kwargs['instance']
|
|
|
|
i.delete_files()
|
|
|
|
pre_delete.connect(delete_item, sender=Item)
|
2011-01-24 09:38:46 +00:00
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
Item.facet_keys = []
|
2011-09-06 12:06:59 +00:00
|
|
|
for key in settings.CONFIG['itemKeys']:
|
2011-01-25 14:39:03 +00:00
|
|
|
if 'autocomplete' in key and not 'autocompleteSortKey' in key:
|
2011-01-24 09:38:46 +00:00
|
|
|
Item.facet_keys.append(key['id'])
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
Item.person_keys = []
|
2011-09-06 12:06:59 +00:00
|
|
|
for key in settings.CONFIG['itemKeys']:
|
2011-01-25 14:39:03 +00:00
|
|
|
if 'sort' in key and key['sort'] == 'person':
|
2011-01-24 09:38:46 +00:00
|
|
|
Item.person_keys.append(key['id'])
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
class ItemFind(models.Model):
|
2009-08-16 12:23:29 +00:00
|
|
|
"""
|
2010-11-06 16:14:00 +00:00
|
|
|
used to find items,
|
2010-11-27 12:12:53 +00:00
|
|
|
item.update_find populates this table
|
2010-11-06 16:14:00 +00:00
|
|
|
its used in manager.ItemManager
|
2009-08-16 12:23:29 +00:00
|
|
|
"""
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-11-06 16:14:00 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("item", "key")
|
|
|
|
|
|
|
|
item = models.ForeignKey('Item', related_name='find', db_index=True)
|
|
|
|
key = models.CharField(max_length=200, db_index=True)
|
|
|
|
value = models.TextField(blank=True)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-01-15 14:22:29 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s=%s" % (self.key, self.value)
|
2011-01-05 13:06:09 +00:00
|
|
|
'''
|
|
|
|
ItemSort
|
2011-09-06 12:06:59 +00:00
|
|
|
table constructed based on info in settings.CONFIG['itemKeys']
|
2011-01-05 13:06:09 +00:00
|
|
|
'''
|
2011-01-03 14:14:54 +00:00
|
|
|
attrs = {
|
|
|
|
'__module__': 'item.models',
|
|
|
|
'item': models.OneToOneField('Item', related_name='sort', primary_key=True),
|
2011-09-05 16:33:47 +00:00
|
|
|
'duration': models.FloatField(null=True, blank=True, db_index=True),
|
2011-01-03 14:14:54 +00:00
|
|
|
}
|
2011-09-06 12:06:59 +00:00
|
|
|
for key in filter(lambda k: 'columnWidth' in k, settings.CONFIG['itemKeys']):
|
2011-01-03 14:14:54 +00:00
|
|
|
name = key['id']
|
2011-01-03 20:30:50 +00:00
|
|
|
name = {'id': 'itemId'}.get(name, name)
|
2011-01-25 14:39:03 +00:00
|
|
|
sort_type = key.get('sort', key['type'])
|
|
|
|
if isinstance(sort_type, list):
|
|
|
|
sort_type = sort_type[0]
|
2011-01-24 09:38:46 +00:00
|
|
|
model = {
|
2011-01-24 13:44:38 +00:00
|
|
|
'char': (models.CharField, dict(null=True, max_length=1000, db_index=True)),
|
|
|
|
'year': (models.CharField, dict(null=True, max_length=4, db_index=True)),
|
|
|
|
'integer': (models.BigIntegerField, dict(null=True, blank=True, db_index=True)),
|
|
|
|
'float': (models.FloatField, dict(null=True, blank=True, db_index=True)),
|
|
|
|
'date': (models.DateTimeField, dict(null=True, blank=True, db_index=True))
|
2011-01-24 09:38:46 +00:00
|
|
|
}[{
|
|
|
|
'string': 'char',
|
|
|
|
'title': 'char',
|
|
|
|
'person': 'char',
|
|
|
|
'year': 'year',
|
|
|
|
'words': 'integer',
|
|
|
|
'length': 'integer',
|
|
|
|
'date': 'date',
|
2011-01-25 14:39:03 +00:00
|
|
|
'hue': 'float',
|
2011-01-24 09:38:46 +00:00
|
|
|
}.get(sort_type, sort_type)]
|
|
|
|
attrs[name] = model[0](**model[1])
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
ItemSort = type('ItemSort', (models.Model,), attrs)
|
2011-01-24 22:46:16 +00:00
|
|
|
ItemSort.fields = [f.name for f in ItemSort._meta.fields]
|
2011-01-03 14:14:54 +00:00
|
|
|
|
2011-01-24 13:44:38 +00:00
|
|
|
class Access(models.Model):
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("item", "user")
|
|
|
|
|
|
|
|
access = models.DateTimeField(auto_now=True)
|
|
|
|
item = models.ForeignKey(Item, related_name='accessed')
|
|
|
|
user = models.ForeignKey(User, null=True, related_name='accessed_items')
|
2011-01-25 14:39:03 +00:00
|
|
|
accessed = models.IntegerField(default=0)
|
|
|
|
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
if not self.accessed:
|
|
|
|
self.accessed = 0
|
|
|
|
self.accessed += 1
|
|
|
|
super(Access, self).save(*args, **kwargs)
|
|
|
|
|
2011-01-24 13:44:38 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
if self.user:
|
|
|
|
return u"%s/%s/%s" % (self.user, self.item, self.access)
|
|
|
|
return u"%s/%s" % (self.item, self.access)
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
class Facet(models.Model):
|
2011-01-05 13:06:09 +00:00
|
|
|
'''
|
|
|
|
used for keys that can have multiple values like people, languages etc.
|
|
|
|
does not perform to well if total number of items goes above 10k
|
|
|
|
this happens for keywords in 0xdb right now
|
|
|
|
'''
|
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("item", "key", "value")
|
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey('Item', related_name='facets')
|
2010-07-12 14:56:14 +00:00
|
|
|
key = models.CharField(max_length=200, db_index=True)
|
2011-08-25 15:41:14 +00:00
|
|
|
value = models.CharField(max_length=1000, db_index=True)
|
|
|
|
value_sort = models.CharField(max_length=1000, db_index=True)
|
2009-06-08 16:08:59 +00:00
|
|
|
|
2011-04-05 10:49:58 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return u"%s=%s" % (self.key, self.value)
|
|
|
|
|
2010-07-12 14:56:14 +00:00
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
if not self.value_sort:
|
2011-01-05 13:06:09 +00:00
|
|
|
self.value_sort = utils.sort_string(self.value)
|
2010-07-12 14:56:14 +00:00
|
|
|
super(Facet, self).save(*args, **kwargs)
|
2009-08-16 12:23:29 +00:00
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 14:09:41 +00:00
|
|
|
class PosterUrl(models.Model):
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-09-10 14:09:41 +00:00
|
|
|
class Meta:
|
2010-09-23 16:01:48 +00:00
|
|
|
unique_together = ("item", "service", "url")
|
2010-09-13 15:19:38 +00:00
|
|
|
ordering = ('-height', )
|
2010-09-10 14:09:41 +00:00
|
|
|
|
2010-09-23 16:01:48 +00:00
|
|
|
item = models.ForeignKey(Item, related_name='poster_urls')
|
2010-09-10 14:09:41 +00:00
|
|
|
url = models.CharField(max_length=1024)
|
|
|
|
service = models.CharField(max_length=1024)
|
|
|
|
width = models.IntegerField(default=80)
|
|
|
|
height = models.IntegerField(default=128)
|
|
|
|
|
|
|
|
def __unicode__(self):
|
2010-09-23 16:01:48 +00:00
|
|
|
return u'%s %s %dx%d' % (unicode(self.item), self.service, self.width, self.height)
|