more moving around
This commit is contained in:
parent
75f6e3b2dc
commit
bdb0365fb1
36 changed files with 200 additions and 120 deletions
0
pandora/item/__init__.py
Normal file
0
pandora/item/__init__.py
Normal file
175
pandora/item/load.py
Normal file
175
pandora/item/load.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
import random
|
||||
import os.path
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from ox import stripTags, findRe
|
||||
import ox.web.imdb
|
||||
|
||||
import models
|
||||
|
||||
|
||||
def debug(*msgs):
|
||||
for m in msgs:
|
||||
print m,
|
||||
print
|
||||
|
||||
'''Import data from imdb into database,
|
||||
param: impdb id
|
||||
return: Item Object, None if failed
|
||||
'''
|
||||
|
||||
def loadIMDb(imdbId):
|
||||
if len(imdbId) != 7:
|
||||
debug("IMDb ID not valid")
|
||||
return None
|
||||
try:
|
||||
item = models.Item.byImdbId(imdbId)
|
||||
except models.Item.DoesNotExist:
|
||||
#this shound not happen, just in case previous imports failed
|
||||
try:
|
||||
imdb = models.ItemImdb.objects.get(imdbId=imdbId)
|
||||
except models.ItemImdb.DoesNotExist:
|
||||
imdb = models.ItemImdb()
|
||||
imdb.imdbId = imdbId
|
||||
imdb.save()
|
||||
item = models.Item()
|
||||
item.imdb = imdb
|
||||
|
||||
info = ox.web.imdb.getMovieInfo(imdbId)
|
||||
for key in ('title',
|
||||
'tagline',
|
||||
'year',
|
||||
'release_date',
|
||||
'rating',
|
||||
'votes',
|
||||
'series_imdb',
|
||||
'season',
|
||||
'episode'):
|
||||
if key in info:
|
||||
setattr(item.imdb, key, info[key])
|
||||
debug(key, info[key])
|
||||
_info_map = {
|
||||
'episode title': 'episode_title',
|
||||
'series title': 'series_title',
|
||||
}
|
||||
for key in _info_map.keys():
|
||||
if key in info:
|
||||
setattr(item.imdb, _info_map.get(key, key), info[key])
|
||||
|
||||
item.imdb.plot = ox.web.imdb.getMoviePlot(imdbId)
|
||||
debug("plot", item.imdb.plot)
|
||||
|
||||
item.imdb.runtime = ox.web.imdb.getMovieRuntimeSeconds(imdbId)
|
||||
business = ox.web.imdb.getMovieBusinessSum(imdbId)
|
||||
for key in ('gross', 'profit', 'budget'):
|
||||
setattr(item.imdb, key, business[key])
|
||||
|
||||
item.imdb.save()
|
||||
item.oxdbId = "__init__%s" % random.randint(0, 100000)
|
||||
item.save()
|
||||
models.AlternativeTitle.objects.filter(item=item, manual=False).delete()
|
||||
for i in ox.web.imdb.getMovieAKATitles(imdbId):
|
||||
t = models.AlternativeTitle()
|
||||
t.item = item
|
||||
t.title = i[0]
|
||||
t.type = i[1]
|
||||
t.save()
|
||||
|
||||
#FIXME: related tables should be cleaned to not accumulate cruft
|
||||
#Country
|
||||
models.ItemCountry.objects.filter(item=item, manual=False).delete()
|
||||
position = 0
|
||||
if 'country' in info:
|
||||
for i in info['country']:
|
||||
debug("add country", i)
|
||||
country, created = models.Country.objects.get_or_create(name=i)
|
||||
models.ItemCountry.link(item, country, position)
|
||||
position += 1
|
||||
|
||||
#Language
|
||||
models.ItemLanguage.objects.filter(item=item, manual=False).delete()
|
||||
position = 0
|
||||
if 'language' in info:
|
||||
for i in info['language']:
|
||||
debug("add language", i)
|
||||
language, created = models.Language.objects.get_or_create(name=i)
|
||||
models.ItemLanguage.link(item, language, position)
|
||||
position += 1
|
||||
|
||||
#Location
|
||||
item.locations_all.filter(manual=False).delete()
|
||||
locations = ox.web.imdb.getMovieLocations(imdbId)
|
||||
for i in locations:
|
||||
debug("add location", i)
|
||||
location, created = models.Location.objects.get_or_create(name=i)
|
||||
location.items.add(item)
|
||||
|
||||
#Genre
|
||||
item.genres_all.filter(manual=False).delete()
|
||||
if 'genre' in info:
|
||||
for i in info['genre']:
|
||||
debug("add genre", i)
|
||||
genre, created = models.Genre.objects.get_or_create(name=i)
|
||||
genre.items.add(item)
|
||||
|
||||
#Keyword
|
||||
item.keywords_all.filter(manual=False).delete()
|
||||
keywords = ox.web.imdb.getMovieKeywords(imdbId)
|
||||
for g in keywords:
|
||||
debug("add keyword", g)
|
||||
keyword, created = models.Keyword.objects.get_or_create(name=g)
|
||||
keyword.items.add(item)
|
||||
|
||||
item.trivia_all.filter(manual=False).delete()
|
||||
position = 0
|
||||
trivia = ox.web.imdb.getMovieTrivia(imdbId)
|
||||
for i in trivia:
|
||||
debug("add trivia", i)
|
||||
t = models.Trivia()
|
||||
t.item = item
|
||||
t.trivia = i
|
||||
t.position = position
|
||||
t.save()
|
||||
position += 1
|
||||
|
||||
position = 0
|
||||
models.Cast.objects.filter(item=item).filter(manual=False).delete()
|
||||
credits = ox.web.imdb.getMovieCredits(imdbId)
|
||||
for role in credits:
|
||||
for p in credits[role]:
|
||||
name = stripTags(p[0])
|
||||
imdb_id = findRe(p[0], 'nm(\d{7})')
|
||||
debug("add cast", name)
|
||||
#FIXME: we could save character information here
|
||||
character = stripTags(p[1])
|
||||
person = models.Person.get_or_create(name, imdb_id)
|
||||
models.Cast.link(item, person, role, character, position)
|
||||
position += 1
|
||||
|
||||
item.connections_all.filter(manual=False).delete()
|
||||
connections = ox.web.imdb.getMovieConnections(imdbId)
|
||||
for relation in connections:
|
||||
for otherId in connections[relation]:
|
||||
try:
|
||||
object = models.Item.objects.get(imdb__imdbId=otherId)
|
||||
debug("add connection", relation, object)
|
||||
models.Connection.get_or_create(item, relation, object)
|
||||
except models.Item.DoesNotExist:
|
||||
pass
|
||||
|
||||
reviews = ox.web.imdb.getMovieExternalReviews(imdbId)
|
||||
item.reviews_all.filter(manual=False).delete()
|
||||
for r in reviews:
|
||||
debug("add review", r)
|
||||
review = models.Review.get_or_create(item, r)
|
||||
review.title = reviews[r]
|
||||
review.save()
|
||||
|
||||
item.oxdbId = item.oxid()
|
||||
item.save()
|
||||
return item
|
||||
|
||||
216
pandora/item/managers.py
Normal file
216
pandora/item/managers.py
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
import re
|
||||
from datetime import datetime
|
||||
from urllib2 import unquote
|
||||
import json
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import Q, Manager
|
||||
|
||||
import models
|
||||
|
||||
|
||||
def keyType(key):
|
||||
if key in ('released', ):
|
||||
return "date"
|
||||
if key in ('year', 'cast.length'):
|
||||
return "int"
|
||||
if key in ('rating', 'votes'):
|
||||
return "float"
|
||||
return "string"
|
||||
|
||||
def parseCondition(condition):
|
||||
'''
|
||||
condition: {
|
||||
value: "war"
|
||||
}
|
||||
or
|
||||
condition: {
|
||||
key: "year",
|
||||
value: "1970-1980,
|
||||
operator: "!="
|
||||
}
|
||||
...
|
||||
'''
|
||||
k = condition.get('key', 'all')
|
||||
k = {'id': 'itemId'}.get(k, k)
|
||||
if not k: k = 'all'
|
||||
v = condition['value']
|
||||
op = condition.get('operator', None)
|
||||
if not op: op = '~'
|
||||
if op.startswith('!'):
|
||||
op = op[1:]
|
||||
exclude = True
|
||||
else:
|
||||
exclude = False
|
||||
if keyType(k) == "string":
|
||||
in_find=True
|
||||
value_key = 'find__value'
|
||||
if op == '=':
|
||||
if k in models.Item.facet_keys:
|
||||
in_find=False
|
||||
v = models.Item.objects.filter(facets__key=k, facets__value=v)
|
||||
k = 'id__in'
|
||||
else:
|
||||
value_key = 'find__value__iexact'
|
||||
elif op == '^':
|
||||
v = v[1:]
|
||||
value_key = 'find__value__istartswith'
|
||||
elif op == '$':
|
||||
v = v[:-1]
|
||||
value_key = 'find__value__iendswith'
|
||||
else: # elif op == '~':
|
||||
value_key = 'find__value__icontains'
|
||||
k = str(k)
|
||||
if exclude:
|
||||
if in_find and not k.startswith('itemId'):
|
||||
q = ~Q(**{'find__key':k, value_key:v})
|
||||
else:
|
||||
q = ~Q(**{k:v})
|
||||
else:
|
||||
if in_find and not k.startswith('itemId'):
|
||||
q = Q(**{'find__key':k, value_key:v})
|
||||
else:
|
||||
q = Q(**{k:v})
|
||||
return q
|
||||
else: #number or date
|
||||
#FIXME: this part needs to be moved to use key/value find db
|
||||
def parseDate(d):
|
||||
while len(d) < 3:
|
||||
d.append(1)
|
||||
return datetime(*[int(i) for i in d])
|
||||
if op == '-':
|
||||
v1 = v[1]
|
||||
v2 = v[2]
|
||||
if keyType(k) == "date":
|
||||
v1 = parseDate(v1.split('.'))
|
||||
v2 = parseDate(v2.split('.'))
|
||||
|
||||
k = 'find__%s' % k
|
||||
if exclude: #!1960-1970
|
||||
k1 = str('%s__lt' % k)
|
||||
k2 = str('%s__gte' % k)
|
||||
return Q(**{k1:v1})|Q(**{k2:v2})
|
||||
else: #1960-1970
|
||||
k1 = str('%s__gte' % k)
|
||||
k2 = str('%s__lt' % k)
|
||||
return Q(**{k1:v1})&Q(**{k2:v2})
|
||||
else:
|
||||
if keyType(k) == "date":
|
||||
v = parseDate(v.split('.'))
|
||||
if op == '=':
|
||||
k = '%s__exact' % k
|
||||
elif op == '>':
|
||||
k = '%s__gt' % k
|
||||
elif op == '>=':
|
||||
k = '%s__gte' % k
|
||||
elif op == '<':
|
||||
k = '%s__lt' % k
|
||||
elif op == '<=':
|
||||
k = '%s__lte' % k
|
||||
|
||||
k = 'find__%s' % k
|
||||
k = str(k)
|
||||
if exclude: #!1960
|
||||
return ~Q(**{k:v})
|
||||
else: #1960
|
||||
return Q(**{k:v})
|
||||
|
||||
def parseConditions(conditions, operator):
|
||||
'''
|
||||
conditions: [
|
||||
{
|
||||
value: "war"
|
||||
}
|
||||
{
|
||||
key: "year",
|
||||
value: "1970-1980,
|
||||
operator: "!="
|
||||
},
|
||||
{
|
||||
key: "country",
|
||||
value: "f",
|
||||
operator: "^"
|
||||
}
|
||||
],
|
||||
operator: "&"
|
||||
'''
|
||||
conn = []
|
||||
for condition in conditions:
|
||||
if 'conditions' in condition:
|
||||
q = parseConditions(condition['conditions'],
|
||||
condition.get('operator', '&'))
|
||||
if q: conn.append(q)
|
||||
pass
|
||||
else:
|
||||
if condition.get('value', '') != '' or condition.get('operator', '') == '=':
|
||||
conn.append(parseCondition(condition))
|
||||
if conn:
|
||||
q = conn[0]
|
||||
for c in conn[1:]:
|
||||
if operator == '|':
|
||||
q = q | c
|
||||
else:
|
||||
q = q & c
|
||||
return q
|
||||
return None
|
||||
|
||||
class ItemManager(Manager):
|
||||
def get_query_set(self):
|
||||
return super(ItemManager, self).get_query_set()
|
||||
|
||||
def filter_list(self, qs, l, user):
|
||||
if l != "all":
|
||||
l = l.split(":")
|
||||
only_public = True
|
||||
if not user.is_anonymous():
|
||||
if len(l) == 1: l = [request.user.username] + l
|
||||
if request.user.username == l[0]:
|
||||
only_public = False
|
||||
if len(l) == 2:
|
||||
lqs = models.List.objects.filter(name=l[1], user__username=l[0])
|
||||
if only_public:
|
||||
lqs = qls.filter(public=True)
|
||||
if lqs.count() == 1:
|
||||
qs = qs.filter(listitem__list__id=lqs[0].id)
|
||||
return qs
|
||||
|
||||
def find(self, data, user):
|
||||
'''
|
||||
query: {
|
||||
conditions: [
|
||||
{
|
||||
value: "war"
|
||||
}
|
||||
{
|
||||
key: "year",
|
||||
value: "1970-1980,
|
||||
operator: "!="
|
||||
},
|
||||
{
|
||||
key: "country",
|
||||
value: "f",
|
||||
operator: "^"
|
||||
}
|
||||
],
|
||||
operator: "&"
|
||||
}
|
||||
'''
|
||||
|
||||
#join query with operator
|
||||
qs = self.get_query_set()
|
||||
#only include items that have hard metadata
|
||||
qs = qs.filter(available=True)
|
||||
conditions = parseConditions(data['query']['conditions'],
|
||||
data['query'].get('operator', '&'))
|
||||
if conditions:
|
||||
qs = qs.filter(conditions)
|
||||
|
||||
#FIXME: lists are part of query now
|
||||
# filter list, works for own or public lists
|
||||
l = data.get('list', 'all')
|
||||
qs = self.filter_list(qs, l, user)
|
||||
return qs
|
||||
|
||||
904
pandora/item/models.py
Normal file
904
pandora/item/models.py
Normal file
|
|
@ -0,0 +1,904 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division, with_statement
|
||||
|
||||
from datetime import datetime
|
||||
import os.path
|
||||
import math
|
||||
import random
|
||||
import re
|
||||
import subprocess
|
||||
from glob import glob
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.files.base import ContentFile
|
||||
from django.utils import simplejson as json
|
||||
from django.conf import settings
|
||||
|
||||
from oxdjango import fields
|
||||
import ox
|
||||
from ox import stripTags
|
||||
from ox.normalize import canonicalTitle, canonicalName
|
||||
from firefogg import Firefogg
|
||||
|
||||
import managers
|
||||
import load
|
||||
import utils
|
||||
from archive import extract
|
||||
|
||||
|
||||
def getItem(info):
|
||||
'''
|
||||
info dict with:
|
||||
imdbId, title, director, episode_title, season, series
|
||||
'''
|
||||
if 'imdbId' in info and info['imdbId']:
|
||||
try:
|
||||
item = Item.objects.get(itemId=info['imdbId'])
|
||||
except Item.DoesNotExist:
|
||||
item = Item(itemId=info['imdbId'])
|
||||
if 'title' in info and 'directors' in info:
|
||||
item.imdb = {
|
||||
'title': info['title'],
|
||||
'directors': info['directors'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
#FIXME: this should be done async
|
||||
#item.save()
|
||||
#tasks.updateImdb.delay(item.itemId)
|
||||
item.updateImdb()
|
||||
else:
|
||||
q = Item.objects.filter(find__title=info['title'])
|
||||
if q.count() > 1:
|
||||
print "FIXME: check more than title here!!?"
|
||||
item = q[0]
|
||||
else:
|
||||
try:
|
||||
item = Item.objects.get(itemId=info['oxdbId'])
|
||||
except Item.DoesNotExist:
|
||||
item = Item()
|
||||
item.metadata = {
|
||||
'title': info['title'],
|
||||
'directors': info['directors'],
|
||||
'year': info.get('year', '')
|
||||
}
|
||||
item.itemId = info['oxdbId']
|
||||
|
||||
for key in ('episode_title', 'series_title', 'season', 'episode'):
|
||||
if key in info and info[key]:
|
||||
item.metadata[key] = info[key]
|
||||
item.save()
|
||||
return item
|
||||
|
||||
class Item(models.Model):
|
||||
person_keys = ('director', 'writer', 'producer', 'editor', 'cinematographer', 'actor', 'character')
|
||||
facet_keys = person_keys + ('country', 'language', 'genre', 'keyword')
|
||||
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
published = models.DateTimeField(default=datetime.now, editable=False)
|
||||
|
||||
#only items that have metadata from files are available,
|
||||
#this is indicated by setting available to True
|
||||
available = models.BooleanField(default=False, db_index=True)
|
||||
|
||||
itemId = models.CharField(max_length=128, unique=True, blank=True)
|
||||
oxdbId = models.CharField(max_length=42, unique=True, blank=True)
|
||||
|
||||
objects = managers.ItemManager()
|
||||
|
||||
def get(self, key, default=None):
|
||||
if self.metadata and key in self.metadata:
|
||||
return self.metadata[key]
|
||||
if self.imdb and key in self.imdb:
|
||||
return self.imdb[key]
|
||||
return default
|
||||
|
||||
def editable(self, user):
|
||||
#FIXME: make permissions work
|
||||
return False
|
||||
|
||||
def edit(self, data):
|
||||
#FIXME: how to map the keys to the right place to write them to?
|
||||
for key in data:
|
||||
if key != 'id':
|
||||
setattr(self.metadata, key, data[key])
|
||||
self.oxdb.save()
|
||||
self.save()
|
||||
|
||||
def reviews(self):
|
||||
reviews = self.get('reviews', [])
|
||||
whitelist = [w for w in ReviewWhitelist.objects.all()]
|
||||
_reviews = {}
|
||||
for r in reviews:
|
||||
for w in whitelist:
|
||||
if w.url in r[0]:
|
||||
_reviews[w.title] = r[0]
|
||||
return _reviews
|
||||
|
||||
imdb = fields.DictField(default={}, editable=False)
|
||||
metadata = fields.DictField(default={}, editable=False)
|
||||
|
||||
json = fields.DictField(default={}, editable=False)
|
||||
|
||||
def updateImdb(self):
|
||||
if len(self.itemId) == 7:
|
||||
self.imdb = ox.web.imdb.Imdb(self.itemId)
|
||||
self.save()
|
||||
|
||||
poster = models.ImageField(default=None, blank=True, upload_to=lambda m, x: os.path.join(itemid_path(m.itemId), "poster.jpg"))
|
||||
poster_url = models.TextField(blank=True)
|
||||
poster_height = models.IntegerField(default=0)
|
||||
poster_width = models.IntegerField(default=0)
|
||||
|
||||
poster_frame = models.FloatField(default=-1)
|
||||
|
||||
#stream related fields
|
||||
stream_aspect = models.FloatField(default=4/3)
|
||||
|
||||
def __unicode__(self):
|
||||
year = self.get('year')
|
||||
if year:
|
||||
return u'%s (%s)' % (self.get('title'), self.get('year'))
|
||||
return self.get('title')
|
||||
|
||||
def get_absolute_url(self):
|
||||
return '/timeline#%s' % self.itemId
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.json = self.get_json()
|
||||
if not self.oxdbId:
|
||||
self.oxdbId = self.oxid()
|
||||
|
||||
if self.poster:
|
||||
self.poster_height = self.poster.height
|
||||
self.poster_width = self.poster.width
|
||||
else:
|
||||
self.poster_height = 128
|
||||
self.poster_width = 80
|
||||
super(Item, self).save(*args, **kwargs)
|
||||
self.updateFind()
|
||||
self.updateSort()
|
||||
self.updateFacets()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
self.delete_poster()
|
||||
for f in glob("%s*"%self.timeline_prefix):
|
||||
os.unlink(f)
|
||||
for f in glob("%sstrip*"%self.timeline_prefix[:-8]):
|
||||
os.unlink(f)
|
||||
super(Item, self).delete(*args, **kwargs)
|
||||
|
||||
def mergeWith(self, other):
|
||||
'''
|
||||
move all related tables to other and delete self
|
||||
'''
|
||||
for stream in self.streams.all():
|
||||
stream.item = other
|
||||
stream.save()
|
||||
for l in self.lists.all():
|
||||
l.items.remove(self)
|
||||
if l.items.filter(id=other.id) == 0:
|
||||
l.items.add(other)
|
||||
#FIXME: should this really happen for layers?
|
||||
for l in self.layer.all():
|
||||
l.items.remove(self)
|
||||
if l.items.filter(id=other.id) == 0:
|
||||
l.items.add(other)
|
||||
if hasattr(self, 'files'):
|
||||
for f in self.files.all():
|
||||
f.item = other
|
||||
f.save()
|
||||
self.delete()
|
||||
other.save()
|
||||
|
||||
'''
|
||||
JSON cache related functions
|
||||
'''
|
||||
_public_fields = {
|
||||
'itemId': 'id',
|
||||
'title': 'title',
|
||||
'year': 'year',
|
||||
|
||||
'runtime': 'runtime',
|
||||
'release_date': 'release_date',
|
||||
|
||||
'countries': 'country',
|
||||
'directors': 'director',
|
||||
'writers': 'writer',
|
||||
'editors': 'editor',
|
||||
'producers': 'producer',
|
||||
'cinematographer': 'cinematographer',
|
||||
'languages': 'language',
|
||||
'genres': 'genre',
|
||||
'keywords': 'keyword',
|
||||
'cast': 'cast',
|
||||
'series_title': 'series_title',
|
||||
'episode_title': 'episode_title',
|
||||
'season': 'season',
|
||||
'episode': 'episode',
|
||||
'reviews': 'reviews',
|
||||
'trivia': 'trivia',
|
||||
'rating': 'rating',
|
||||
'votes': 'votes',
|
||||
'alternative_titles': 'alternative_titles',
|
||||
'connections_json': 'connections',
|
||||
}
|
||||
def get_poster(self):
|
||||
poster = {}
|
||||
poster['width'] = self.poster_width
|
||||
poster['height'] = self.poster_height
|
||||
poster['url'] = '/%s/poster.jpg' % self.itemId
|
||||
'''
|
||||
if self.poster:
|
||||
poster['url'] = self.poster.url
|
||||
else:
|
||||
poster['url'] = self.poster_url
|
||||
'''
|
||||
return poster
|
||||
|
||||
def get_posters(self):
|
||||
posters = {}
|
||||
for p in self.poster_urls.all():
|
||||
if p.service not in posters:
|
||||
posters[p.service] = []
|
||||
posters[p.service].append({'url': p.url, 'width': p.width, 'height': p.height})
|
||||
local_posters = self.local_posters().keys()
|
||||
if local_posters:
|
||||
posters['local'] = []
|
||||
for p in local_posters:
|
||||
url = p.replace(settings.MEDIA_ROOT, settings.MEDIA_URL)
|
||||
width = 640
|
||||
height = 1024
|
||||
posters['local'].append({'url': url, 'width': width, 'height': height})
|
||||
return posters
|
||||
|
||||
def get_stream(self):
|
||||
stream = {}
|
||||
if self.streams.all().count():
|
||||
s = self.streams.all()[0]
|
||||
if s.video and s.info:
|
||||
stream['duration'] = s.info['duration']
|
||||
if 'video' in s.info and s.info['video']:
|
||||
stream['aspectRatio'] = s.info['video'][0]['width'] / s.info['video'][0]['height']
|
||||
if settings.XSENDFILE or settings.XACCELREDIRECT:
|
||||
stream['baseUrl'] = '/%s' % self.itemId
|
||||
else:
|
||||
stream['baseUrl'] = os.path.dirname(s.video.url)
|
||||
stream['profiles'] = list(set(map(lambda s: int(os.path.splitext(s['profile'])[0][:-1]), self.streams.all().values('profile'))))
|
||||
return stream
|
||||
|
||||
def get_layers(self):
|
||||
layers = {}
|
||||
layers['cuts'] = self.metadata.get('cuts', {})
|
||||
|
||||
layers['subtitles'] = {}
|
||||
#FIXME: subtitles should be stored in Layer
|
||||
qs = self.files.filter(is_subtitle=True, is_main=True, available=True)
|
||||
if qs.count()>0:
|
||||
layers['subtitles'] = qs[0].srt()
|
||||
return layers
|
||||
|
||||
def get_json(self, fields=None):
|
||||
item = {}
|
||||
for key in self._public_fields:
|
||||
pub_key = self._public_fields.get(key, key)
|
||||
if not fields or pub_key in fields:
|
||||
if hasattr(self, key):
|
||||
value = getattr(self, key)
|
||||
else:
|
||||
value = self.get(key)
|
||||
if callable(value):
|
||||
item[pub_key] = value()
|
||||
else:
|
||||
item[pub_key] = value
|
||||
if not fields:
|
||||
item['stream'] = self.get_stream()
|
||||
item['poster'] = self.get_poster()
|
||||
item['posters'] = self.get_posters()
|
||||
if fields:
|
||||
for f in fields:
|
||||
if f.endswith('.length') and f[:-7] in ('cast', 'genre', 'trivia'):
|
||||
item[f] = getattr(self.sort, f[:-7])
|
||||
return item
|
||||
|
||||
def fields(self):
|
||||
fields = {}
|
||||
for f in self._meta.fields:
|
||||
if f.name in self._public_fields:
|
||||
fields[f.name] = {}
|
||||
fields[f.name]['order'] = 'desc'
|
||||
fields[f.name]['type'] = type(f)
|
||||
return fields
|
||||
fields = classmethod(fields)
|
||||
|
||||
def oxid(self):
|
||||
return utils.oxid(self.get('title', ''), self.get('directors', []), str(self.get('year', '')),
|
||||
self.get('series title', ''), self.get('episode title', ''),
|
||||
self.get('season', ''), self.get('episode', ''))
|
||||
|
||||
|
||||
'''
|
||||
Search related functions
|
||||
'''
|
||||
def updateFind(self):
|
||||
def save(key, value):
|
||||
f, created = ItemFind.objects.get_or_create(item=self, key=key)
|
||||
if value not in ('', '||'):
|
||||
f.value = value
|
||||
f.save()
|
||||
else:
|
||||
f.delete()
|
||||
|
||||
save('title', '\n'.join([self.get('title'), self.get('original_title', '')]))
|
||||
|
||||
#FIXME: filter us/int title
|
||||
#f.title += ' '.join([t.title for t in self.alternative_titles()])
|
||||
|
||||
save('year', self.get('year', ''))
|
||||
|
||||
for key in self.facet_keys:
|
||||
if key == 'actor':
|
||||
values = [i[0] for i in self.get('actor', [])]
|
||||
elif key == 'character':
|
||||
values = [i[1] for i in self.get('actor', [])]
|
||||
else:
|
||||
values = self.get(utils.plural_key(key), [])
|
||||
save(key, '|%s|'%'|'.join(values))
|
||||
save('summary', self.get('plot', '') + self.get('plot_outline', ''))
|
||||
save('trivia', ' '.join(self.get('trivia', [])))
|
||||
save('location', '|%s|'%'|'.join(self.get('filming_locations', [])))
|
||||
|
||||
#FIXME:
|
||||
#f.dialog = 'fixme'
|
||||
save('dialog', '\n'.join([l.value for l in Layer.objects.filter(type='subtitle', item=self).order_by('start')]))
|
||||
|
||||
#FIXME: collate filenames
|
||||
#f.filename = self.filename
|
||||
all_find = ' '.join([f.value for f in ItemFind.objects.filter(item=self).exclude(key='all')])
|
||||
save('all', all_find)
|
||||
|
||||
def updateSort(self):
|
||||
try:
|
||||
s = self.sort
|
||||
except ItemSort.DoesNotExist:
|
||||
s = ItemSort(item=self)
|
||||
|
||||
def sortNames(values):
|
||||
sort_value = ''
|
||||
if values:
|
||||
sort_value = '; '.join([getPersonSort(name) for name in values])
|
||||
if not sort_value:
|
||||
sort_value = ''
|
||||
return sort_value
|
||||
|
||||
#title
|
||||
title = canonicalTitle(self.get('title'))
|
||||
s.title = utils.sort_title(title)
|
||||
|
||||
s.country = ','.join(self.get('countries', []))
|
||||
s.year = self.get('year', '')
|
||||
|
||||
for key in self.person_keys:
|
||||
setattr(s, key, sortNames(self.get(utils.plural_key(key), [])))
|
||||
|
||||
for key in ('language', 'country'):
|
||||
setattr(s, key, ','.join(self.get(utils.plural_key(key), [])))
|
||||
|
||||
s.runtime = self.get('runtime', 0)
|
||||
|
||||
for key in ('keywords', 'genres', 'cast', 'summary', 'trivia', 'connections'):
|
||||
setattr(s, key, len(self.get(key, '')))
|
||||
|
||||
s.itemId = self.itemId.replace('0x', 'xx')
|
||||
s.rating = self.get('rating', -1)
|
||||
s.votes = self.get('votes', -1)
|
||||
|
||||
# data from related subtitles
|
||||
s.scenes = 0 #FIXME
|
||||
s.dialog = 0 #FIXME
|
||||
s.words = 0 #FIXME
|
||||
s.wpm = 0 #FIXME
|
||||
s.risk = 0 #FIXME
|
||||
# data from related files
|
||||
s.duration = 0 #FIXME
|
||||
s.resolution = 0 #FIXME
|
||||
s.aspectratio = 0 #FIXME
|
||||
s.bitrate = 0 #FIXME
|
||||
s.pixels = 0 #FIXME
|
||||
s.filename = 0 #FIXME
|
||||
s.files = 0 #FIXME
|
||||
s.size = 0 #FIXME
|
||||
|
||||
for key in ('title', 'language', 'country') + self.person_keys:
|
||||
setattr(s, '%s_desc'%key, getattr(s, key))
|
||||
if not getattr(s, key):
|
||||
setattr(s, key, u'zzzzzzzzzzzzzzzzzzzzzzzzz')
|
||||
if not s.year:
|
||||
s.year_desc = '';
|
||||
s.year = '9999';
|
||||
#FIXME: also deal with number based rows like genre, keywords etc
|
||||
s.save()
|
||||
|
||||
def updateFacets(self):
|
||||
#FIXME: what to do with Unkown Director, Year, Country etc.
|
||||
for key in self.facet_keys:
|
||||
if key == 'actor':
|
||||
current_values = [i[0] for i in self.get('actor', [])]
|
||||
elif key == 'character':
|
||||
current_values = [i[1] for i in self.get('actor', [])]
|
||||
else:
|
||||
current_values = self.get(utils.plural_key(key), [])
|
||||
saved_values = [i.value for i in Facet.objects.filter(item=self, key=key)]
|
||||
removed_values = filter(lambda x: x not in current_values, saved_values)
|
||||
if removed_values:
|
||||
Facet.objects.filter(item=self, key=key, value__in=removed_values).delete()
|
||||
for value in current_values:
|
||||
if value not in saved_values:
|
||||
value_sort = value
|
||||
if key in self.person_keys:
|
||||
value_sort = getPersonSort(value)
|
||||
f = Facet(key=key, value=value, value_sort=value_sort)
|
||||
f.item = self
|
||||
f.save()
|
||||
year = self.get('year', None)
|
||||
if year:
|
||||
f, created = Facet.objects.get_or_create(key='year', value=year, value_sort=year, item=self)
|
||||
else:
|
||||
Facet.objects.filter(item=self, key='year').delete()
|
||||
|
||||
'''
|
||||
Video related functions
|
||||
'''
|
||||
def frame(self, position, width=128):
|
||||
stream = self.streams.filter(profile=settings.VIDEO_PROFILE+'.webm')[0]
|
||||
path = os.path.join(settings.MEDIA_ROOT, itemid_path(self.itemId), 'frames', "%d"%width, "%s.jpg"%position)
|
||||
if not os.path.exists(path):
|
||||
extract.frame(stream.video.path, path, position, width)
|
||||
return path
|
||||
|
||||
@property
|
||||
def timeline_prefix(self):
|
||||
return os.path.join(settings.MEDIA_ROOT, itemid_path(self.itemId), 'timeline')
|
||||
|
||||
def main_videos(self):
|
||||
#FIXME: needs to check if more than one user has main files and only take from "higher" user
|
||||
return self.files.filter(is_main=True, is_video=True, available=True)
|
||||
|
||||
def updateStreams(self):
|
||||
files = {}
|
||||
for f in self.main_videos():
|
||||
files[utils.sort_title(f.name)] = f.video.path
|
||||
|
||||
#FIXME: how to detect if something changed?
|
||||
if files:
|
||||
stream, created = Stream.objects.get_or_create(item=self, profile='%s.webm' % settings.VIDEO_PROFILE)
|
||||
stream.video.name = stream_path(stream)
|
||||
cmd = []
|
||||
|
||||
for f in sorted(files):
|
||||
cmd.append('+')
|
||||
cmd.append(files[f])
|
||||
if not os.path.exists(os.path.dirname(stream.video.path)):
|
||||
os.makedirs(os.path.dirname(stream.video.path))
|
||||
cmd = [ 'mkvmerge', '-o', stream.video.path ] + cmd[1:]
|
||||
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
p.wait()
|
||||
stream.save()
|
||||
|
||||
if 'video' in stream.info:
|
||||
extract.timeline(stream.video.path, self.timeline_prefix)
|
||||
self.stream_aspect = stream.info['video'][0]['width']/stream.info['video'][0]['height']
|
||||
self.metadata['cuts'] = extract.cuts(self.timeline_prefix)
|
||||
self.metadata['average_color'] = extract.average_color(self.timeline_prefix)
|
||||
#extract.timeline_strip(self, self.metadata['cuts'], stream.info, self.timeline_prefix[:-8])
|
||||
|
||||
stream.extract_derivatives()
|
||||
#something with poster
|
||||
self.available = True
|
||||
self.save()
|
||||
|
||||
'''
|
||||
Poster related functions
|
||||
'''
|
||||
def update_poster_urls(self):
|
||||
_current = {}
|
||||
for s in settings.POSTER_SERVICES:
|
||||
url = '%s?itemId=%s'%(s, self.itemId)
|
||||
try:
|
||||
data = json.loads(ox.net.readUrlUnicode(url))
|
||||
except:
|
||||
continue
|
||||
for service in data:
|
||||
if service not in _current:
|
||||
_current[service] = []
|
||||
for poster in data[service]:
|
||||
_current[service].append(poster)
|
||||
#FIXME: remove urls that are no longer listed
|
||||
for service in _current:
|
||||
for poster in _current[service]:
|
||||
p, created = PosterUrl.objects.get_or_create(item=self, url=poster['url'], service=service)
|
||||
if created:
|
||||
p.width = poster['width']
|
||||
p.height = poster['height']
|
||||
p.save()
|
||||
|
||||
def delete_poster(self):
|
||||
if self.poster:
|
||||
path = self.poster.path
|
||||
self.poster.delete()
|
||||
for f in glob(path.replace('.jpg', '*.jpg')):
|
||||
os.unlink(f)
|
||||
|
||||
def prefered_poster_url(self):
|
||||
if self.poster_url:
|
||||
return self.poster_url
|
||||
self.update_poster_urls()
|
||||
for service in settings.POSTER_PRECEDENCE:
|
||||
for u in self.poster_urls.filter(service=service).order_by('-height'):
|
||||
return u.url
|
||||
return None
|
||||
|
||||
def download_poster(self, force=False):
|
||||
if not self.poster or force:
|
||||
url = self.prefered_poster_url()
|
||||
if url:
|
||||
data = ox.net.readUrl(url)
|
||||
if force:
|
||||
self.delete_poster()
|
||||
self.poster.save('poster.jpg', ContentFile(data))
|
||||
self.save()
|
||||
else:
|
||||
if force:
|
||||
self.delete_poster()
|
||||
local_posters = self.make_local_posters()
|
||||
if local_posters:
|
||||
with open(local_posters[0]) as f:
|
||||
self.poster.save('poster.jpg', ContentFile(f.read()))
|
||||
|
||||
def local_posters(self):
|
||||
part = 1
|
||||
posters = {}
|
||||
for f in self.main_videos():
|
||||
for frame in f.frames.all():
|
||||
path = os.path.join(itemid_path(self.itemId), 'poster.pandora.%s.%s.jpg'%(part, frame.position))
|
||||
path = os.path.abspath(os.path.join(settings.MEDIA_ROOT, path))
|
||||
posters[path] = frame.frame.path
|
||||
part += 1
|
||||
return posters
|
||||
|
||||
def make_local_posters(self):
|
||||
posters = self.local_posters()
|
||||
for poster in posters:
|
||||
frame = posters[poster]
|
||||
cmd = [settings.ITEM_POSTER,
|
||||
'-t', self.get('title'),
|
||||
'-d', ', '.join(self.get('directors', ['Unknown Director'])),
|
||||
'-f', frame,
|
||||
'-p', poster
|
||||
]
|
||||
if len(self.itemId) == 7:
|
||||
cmd += ['-i', self.itemId]
|
||||
cmd += ['-o', self.oxdbId]
|
||||
p = subprocess.Popen(cmd)
|
||||
p.wait()
|
||||
return posters.keys()
|
||||
|
||||
class ItemFind(models.Model):
|
||||
"""
|
||||
used to find items,
|
||||
item.updateFind populates this table
|
||||
its used in manager.ItemManager
|
||||
"""
|
||||
class Meta:
|
||||
unique_together = ("item", "key")
|
||||
|
||||
item = models.ForeignKey('Item', related_name='find', db_index=True)
|
||||
key = models.CharField(max_length=200, db_index=True)
|
||||
value = models.TextField(blank=True)
|
||||
|
||||
class ItemSort(models.Model):
|
||||
"""
|
||||
used to sort items, all sort values are in here
|
||||
"""
|
||||
item = models.OneToOneField('Item', related_name='sort', primary_key=True)
|
||||
|
||||
title = models.CharField(max_length=1000, db_index=True)
|
||||
director = models.TextField(blank=True, db_index=True)
|
||||
country = models.TextField(blank=True, db_index=True)
|
||||
year = models.CharField(max_length=4, db_index=True)
|
||||
|
||||
producer = models.TextField(blank=True, db_index=True)
|
||||
writer = models.TextField(blank=True, db_index=True)
|
||||
editor = models.TextField(blank=True, db_index=True)
|
||||
cinematographer = models.TextField(blank=True, db_index=True)
|
||||
|
||||
language = models.TextField(blank=True, db_index=True)
|
||||
runtime = models.IntegerField(blank=True, null=True, db_index=True)
|
||||
|
||||
keywords = models.IntegerField(blank=True, db_index=True)
|
||||
genre = models.TextField(blank=True, db_index=True)
|
||||
cast = models.IntegerField(blank=True, db_index=True)
|
||||
summary = models.IntegerField(blank=True, db_index=True)
|
||||
trivia = models.IntegerField(blank=True, db_index=True)
|
||||
connections = models.IntegerField(blank=True, db_index=True)
|
||||
|
||||
rating = models.FloatField(blank=True, db_index=True)
|
||||
votes = models.IntegerField(blank=True, db_index=True)
|
||||
scenes = models.IntegerField(blank=True, db_index=True)
|
||||
dialog = models.IntegerField(null=True, blank=True, db_index=True)
|
||||
words = models.IntegerField(null=True, blank=True, db_index=True)
|
||||
wpm = models.IntegerField('Words per Minute', null=True, blank=True, db_index=True)
|
||||
risk = models.IntegerField(null=True, blank=True, db_index=True)
|
||||
|
||||
itemId = models.CharField('ID', max_length=128, blank=True, db_index=True)
|
||||
|
||||
duration = models.FloatField(default=-1, db_index=True)
|
||||
resolution = models.IntegerField(blank=True, db_index=True)
|
||||
aspectratio = models.IntegerField('Aspect Ratio', blank=True, db_index=True)
|
||||
bitrate = models.IntegerField(blank=True, db_index=True)
|
||||
pixels = models.BigIntegerField(blank=True, db_index=True)
|
||||
filename = models.IntegerField(blank=True, db_index=True)
|
||||
files = models.IntegerField(blank=True, db_index=True)
|
||||
size = models.BigIntegerField(blank=True, db_index=True)
|
||||
|
||||
#required to move empty values to the bottom for both asc and desc sort
|
||||
title_desc = models.CharField(max_length=1000, db_index=True)
|
||||
director_desc = models.TextField(blank=True, db_index=True)
|
||||
country_desc = models.TextField(blank=True, db_index=True)
|
||||
year_desc = models.CharField(max_length=4, db_index=True)
|
||||
|
||||
producer_desc = models.TextField(blank=True, db_index=True)
|
||||
writer_desc = models.TextField(blank=True, db_index=True)
|
||||
editor_desc = models.TextField(blank=True, db_index=True)
|
||||
cinematographer_desc = models.TextField(blank=True, db_index=True)
|
||||
|
||||
language_desc = models.TextField(blank=True, db_index=True)
|
||||
|
||||
_private_fields = ('id', 'item')
|
||||
#return available sort fields
|
||||
#FIXME: should return mapping name -> verbose_name
|
||||
def fields(self):
|
||||
fields = []
|
||||
for f in self._meta.fields:
|
||||
if f.name not in self._private_fields:
|
||||
name = f.verbose_name
|
||||
name = name[0].capitalize() + name[1:]
|
||||
fields.append(name)
|
||||
return tuple(fields)
|
||||
fields = classmethod(fields)
|
||||
|
||||
class Facet(models.Model):
|
||||
item = models.ForeignKey('Item', related_name='facets')
|
||||
key = models.CharField(max_length=200, db_index=True)
|
||||
value = models.CharField(max_length=200)
|
||||
value_sort = models.CharField(max_length=200)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.value_sort:
|
||||
self.value_sort = self.value
|
||||
super(Facet, self).save(*args, **kwargs)
|
||||
|
||||
def getPersonSort(name):
|
||||
person, created = Person.objects.get_or_create(name=name)
|
||||
name_sort = person.name_sort.replace(u'\xc5k', 'A')
|
||||
return name_sort
|
||||
|
||||
class Person(models.Model):
|
||||
name = models.CharField(max_length=200)
|
||||
imdbId = models.CharField(max_length=7, blank=True)
|
||||
name_sort = models.CharField(max_length=200)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name_sort', )
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.name_sort:
|
||||
self.name_sort = ox.normalize.canonicalName(self.name)
|
||||
super(Person, self).save(*args, **kwargs)
|
||||
|
||||
def get_or_create(model, name, imdbId=None):
|
||||
if imdbId:
|
||||
q = model.objects.filter(name=name, imdbId=imdbId)
|
||||
else:
|
||||
q = model.objects.all().filter(name=name)
|
||||
if q.count() > 0:
|
||||
o = q[0]
|
||||
else:
|
||||
o = model.objects.create(name=name)
|
||||
if imdbId:
|
||||
o.imdbId = imdbId
|
||||
o.save()
|
||||
return o
|
||||
get_or_create = classmethod(get_or_create)
|
||||
|
||||
def json(self):
|
||||
return self.name
|
||||
|
||||
class Location(models.Model):
|
||||
name = models.CharField(max_length=200, unique=True)
|
||||
manual = models.BooleanField(default=False)
|
||||
items = models.ManyToManyField(Item, related_name='locations_all')
|
||||
#fixme: geo data
|
||||
|
||||
lat_sw = models.FloatField(default=0)
|
||||
lng_sw = models.FloatField(default=0)
|
||||
lat_ne = models.FloatField(default=0)
|
||||
lng_ne = models.FloatField(default=0)
|
||||
lat_center = models.FloatField(default=0)
|
||||
lng_center = models.FloatField(default=0)
|
||||
area = models.FloatField(default=-1)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name', )
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def json(self):
|
||||
return self.name
|
||||
|
||||
class ReviewWhitelist(models.Model):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
url = models.CharField(max_length=255, unique=True)
|
||||
|
||||
class List(models.Model):
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
user = models.ForeignKey(User)
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
public = models.BooleanField(default=False)
|
||||
items = models.ManyToManyField(Item, related_name='lists', through='ListItem')
|
||||
|
||||
def add(self, item):
|
||||
q = self.items.filter(id=item.id)
|
||||
if q.count() == 0:
|
||||
l = ListItem()
|
||||
l.list = self
|
||||
l.item = item
|
||||
l.save()
|
||||
|
||||
def remove(self, item):
|
||||
self.ListItem.objects.all().filter(item=item, list=self).delete()
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s (%s)' % (self.title, unicode(self.user))
|
||||
|
||||
def editable(self, user):
|
||||
#FIXME: make permissions work
|
||||
if self.user == user or user.has_perm('Ox.admin'):
|
||||
return True
|
||||
return False
|
||||
|
||||
class ListItem(models.Model):
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
list = models.ForeignKey(List)
|
||||
item = models.ForeignKey(Item)
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s in %s' % (unicode(self.item), unicode(self.list))
|
||||
|
||||
class Layer(models.Model):
|
||||
#FIXME: here having a item,start index would be good
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
user = models.ForeignKey(User)
|
||||
item = models.ForeignKey(Item)
|
||||
|
||||
#seconds
|
||||
start = models.FloatField(default=-1)
|
||||
stop = models.FloatField(default=-1)
|
||||
|
||||
type = models.CharField(blank=True, max_length=255)
|
||||
value = models.TextField()
|
||||
|
||||
#FIXME: relational layers, Locations, clips etc
|
||||
#location = models.ForeignKey('Location', default=None)
|
||||
|
||||
def editable(self, user):
|
||||
if user.is_authenticated():
|
||||
if obj.user == user.id or user.has_perm('0x.admin'):
|
||||
return True
|
||||
if user.groups.filter(id__in=obj.groups.all()).count() > 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
class Collection(models.Model):
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
users = models.ManyToManyField(User, related_name='collections')
|
||||
name = models.CharField(blank=True, max_length=2048)
|
||||
subdomain = models.CharField(unique=True, max_length=2048)
|
||||
items = models.ForeignKey(Item)
|
||||
|
||||
def editable(self, user):
|
||||
return self.users.filter(id=user.id).count() > 0
|
||||
|
||||
def itemid_path(h):
|
||||
return os.path.join('items', h[:2], h[2:4], h[4:6], h[6:])
|
||||
|
||||
def stream_path(stream):
|
||||
return os.path.join(itemid_path(stream.item.itemId), stream.profile)
|
||||
|
||||
class Stream(models.Model):
|
||||
class Meta:
|
||||
unique_together = ("item", "profile")
|
||||
|
||||
item = models.ForeignKey(Item, related_name='streams')
|
||||
profile = models.CharField(max_length=255, default='96p.webm')
|
||||
video = models.FileField(default=None, blank=True, upload_to=lambda f, x: stream_path(f))
|
||||
source = models.ForeignKey('Stream', related_name='derivatives', default=None, null=True)
|
||||
available = models.BooleanField(default=False)
|
||||
info = fields.DictField(default={})
|
||||
|
||||
#def __unicode__(self):
|
||||
# return self.video
|
||||
|
||||
def extract_derivatives(self):
|
||||
if settings.VIDEO_H264:
|
||||
profile = self.profile.replace('.webm', '.mp4')
|
||||
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
||||
if created:
|
||||
derivative.source = self
|
||||
derivative.video.name = self.video.name.replace(self.profile, profile)
|
||||
derivative.encode()
|
||||
derivative.save()
|
||||
|
||||
for p in settings.VIDEO_DERIVATIVES:
|
||||
profile = p + '.webm'
|
||||
target = self.video.path.replace(self.profile, profile)
|
||||
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
||||
if created:
|
||||
derivative.source = self
|
||||
derivative.video.name = self.video.name.replace(self.profile, profile)
|
||||
derivative.encode()
|
||||
derivative.save()
|
||||
|
||||
if settings.VIDEO_H264:
|
||||
profile = p + '.mp4'
|
||||
derivative, created = Stream.objects.get_or_create(profile=profile, item=self.item)
|
||||
if created:
|
||||
derivative.source = self
|
||||
derivative.video.name = self.video.name.replace(self.profile, profile)
|
||||
derivative.encode()
|
||||
derivative.save()
|
||||
return True
|
||||
|
||||
def encode(self):
|
||||
if self.source:
|
||||
video = self.source.video.path
|
||||
target = self.video.path
|
||||
profile = self.profile
|
||||
info = ox.avinfo(video)
|
||||
if extract.stream(video, target, profile, info):
|
||||
self.available=True
|
||||
self.save()
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s (%s)" % (self.profile, self.item)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.video and not self.info:
|
||||
self.info = ox.avinfo(self.video.path)
|
||||
super(Stream, self).save(*args, **kwargs)
|
||||
|
||||
class PosterUrl(models.Model):
|
||||
class Meta:
|
||||
unique_together = ("item", "service", "url")
|
||||
ordering = ('-height', )
|
||||
|
||||
item = models.ForeignKey(Item, related_name='poster_urls')
|
||||
url = models.CharField(max_length=1024)
|
||||
service = models.CharField(max_length=1024)
|
||||
width = models.IntegerField(default=80)
|
||||
height = models.IntegerField(default=128)
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s %s %dx%d' % (unicode(self.item), self.service, self.width, self.height)
|
||||
|
||||
38
pandora/item/tasks.py
Normal file
38
pandora/item/tasks.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from datetime import timedelta
|
||||
|
||||
from celery.decorators import task, periodic_task
|
||||
|
||||
import load
|
||||
import models
|
||||
|
||||
|
||||
@periodic_task(run_every=timedelta(days=1))
|
||||
def cronjob(**kwargs):
|
||||
print "do some cleanup stuff once a day"
|
||||
|
||||
@task(ignore_resulsts=True, queue='default')
|
||||
def updatePoster(itemId):
|
||||
item = models.Item.objects.get(itemId=itemId)
|
||||
item.download_poster(True)
|
||||
|
||||
@task(ignore_resulsts=True, queue='default')
|
||||
def updateImdb(imdbId):
|
||||
item = models.Item.objects.get(itemId=imdbId)
|
||||
item.updateImdb()
|
||||
|
||||
@task(ignore_resulsts=True)
|
||||
def findItem(fileId):
|
||||
f = models.File.objects.get(pk=fileId)
|
||||
f.findItem()
|
||||
|
||||
@task(ignore_resulsts=True, queue="encoding")
|
||||
def updateStreams(itemId):
|
||||
'''
|
||||
create stream, extract timeline and create derivatives
|
||||
'''
|
||||
item = models.Item.objects.get(itemId=itemId)
|
||||
if item.files.filter(is_main=True, is_video=True, availble=False).count() == 0:
|
||||
item.updateStreams()
|
||||
|
||||
23
pandora/item/tests.py
Normal file
23
pandora/item/tests.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"""
|
||||
This file demonstrates two different styles of tests (one doctest and one
|
||||
unittest). These will both pass when you run "manage.py test".
|
||||
|
||||
Replace these with more appropriate tests for your application.
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
class SimpleTest(TestCase):
|
||||
def test_basic_addition(self):
|
||||
"""
|
||||
Tests that 1 + 1 always equals 2.
|
||||
"""
|
||||
self.failUnlessEqual(1 + 1, 2)
|
||||
|
||||
__test__ = {"doctest": """
|
||||
Another way to test that 1 + 1 is equal to 2.
|
||||
|
||||
>>> 1 + 1 == 2
|
||||
True
|
||||
"""}
|
||||
|
||||
16
pandora/item/urls.py
Normal file
16
pandora/item/urls.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
from django.conf.urls.defaults import *
|
||||
|
||||
|
||||
urlpatterns = patterns("item.views",
|
||||
(r'^(?P<id>.*)/frame/(?P<size>\d+)/(?P<position>[0-9\.,]+).jpg$', 'frame'),
|
||||
(r'^(?P<id>.*)/(?P<profile>.*.webm)$', 'video'),
|
||||
(r'^(?P<id>.*)/(?P<profile>.*.mp4)$', 'video'),
|
||||
(r'^(?P<id>.*)/poster\.(?P<size>\d+)\.jpg$', 'poster'),
|
||||
(r'^(?P<id>.*)/poster\.(?P<size>large)\.jpg$', 'poster'),
|
||||
(r'^(?P<id>.*)/poster\.jpg$', 'poster'),
|
||||
(r'^(?P<id>.*)/timelines/(?P<timeline>.+)\.(?P<size>\d+)\.(?P<position>\d+)\.png$', 'timeline'),
|
||||
)
|
||||
|
||||
171
pandora/item/utils.py
Normal file
171
pandora/item/utils.py
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
#
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import hashlib
|
||||
|
||||
import ox
|
||||
import ox.iso
|
||||
from ox.normalize import normalizeName, normalizeTitle, canonicalTitle
|
||||
|
||||
|
||||
def plural_key(term):
|
||||
return {
|
||||
'country': 'countries',
|
||||
}.get(term, term + 's')
|
||||
|
||||
def oxid(title, directors, year='', seriesTitle='', episodeTitle='', season=0, episode=0):
|
||||
director = ', '.join(directors)
|
||||
oxid_value = u"\n".join([title, director, year])
|
||||
oxid = hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()
|
||||
if seriesTitle:
|
||||
oxid_value = u"\n".join([seriesTitle, "%02d" % season])
|
||||
oxid = hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()[:20]
|
||||
oxid_value = u"\n".join(["%02d" % episode, episodeTitle, director, year])
|
||||
oxid += hashlib.sha1(oxid_value.encode('utf-8')).hexdigest()[:20]
|
||||
return u"0x" + oxid
|
||||
|
||||
def oxdb_id(title, directors=[], year='', season='', episode='', episode_title='', episode_director='', episode_year=''):
|
||||
# new id function, will replace oxid()
|
||||
def get_hash(string):
|
||||
return hashlib.sha1(string.encode('utf-8')).hexdigest().upper()
|
||||
director = ', '.join(directors)
|
||||
if not episode:
|
||||
oxdb_id = get_hash(director)[:8] + get_hash('\n'.join([title, year]))[:8]
|
||||
else:
|
||||
oxdb_id = get_hash('\n'.join([director, title, year, season]))[:8] + \
|
||||
get_hash('\n'.join([episode, episode_director, episode_title, episode_year]))[:8]
|
||||
return u'0x' + oxdb_id
|
||||
|
||||
def oxdb_directors(director):
|
||||
director = os.path.basename(os.path.dirname(director))
|
||||
if director.endswith('_'):
|
||||
director = "%s." % director[:-1]
|
||||
directors = [normalizeName(d) for d in director.split('; ')]
|
||||
def cleanup(director):
|
||||
director = director.strip()
|
||||
director = director.replace('Series', '')
|
||||
director = director.replace('Unknown Director', '')
|
||||
director = director.replace('Various Directors', '')
|
||||
return director
|
||||
directors = filter(None, [cleanup(d) for d in directors])
|
||||
return directors
|
||||
|
||||
def oxdb_title(_title, searchTitle = False):
|
||||
'''
|
||||
normalize filename to get item title
|
||||
'''
|
||||
_title = os.path.basename(_title)
|
||||
_title = _title.replace('... ', '_dot_dot_dot_')
|
||||
_title = _title.replace('. ', '_dot__space_')
|
||||
_title = _title.replace(' .', '_space__dot_')
|
||||
title = _title.split('.')[0]
|
||||
title = re.sub('([a-z0-9])_ ', '\\1: ', title)
|
||||
se = re.compile('Season (\d+).Episode (\d+)').findall(_title)
|
||||
if se:
|
||||
se = "S%02dE%02d" % (int(se[0][0]), int(se[0][1]))
|
||||
if 'Part' in _title.split('.')[-2] and 'Episode' not in _title.split('.')[-3]:
|
||||
stitle = _title.split('.')[-3]
|
||||
else:
|
||||
stitle = _title.split('.')[-2]
|
||||
if stitle.startswith('Episode '):
|
||||
stitle = ''
|
||||
if searchTitle:
|
||||
title = '"%s" %s' % (title, stitle)
|
||||
else:
|
||||
title = '%s (%s) %s' % (title, se, stitle)
|
||||
title = title.strip()
|
||||
title = title.replace('_dot_dot_dot_', '... ')
|
||||
title = title.replace('_dot__space_', '. ')
|
||||
title = title.replace('_space__dot_', ' .')
|
||||
year = ox.findRe(title, '(\(\d{4}\))')
|
||||
if title.endswith(year):
|
||||
title = title[:-len(year)].strip()
|
||||
title = normalizeTitle(title)
|
||||
return title
|
||||
|
||||
def oxdb_year(data):
|
||||
return ox.findRe(data, '\.(\d{4})\.')
|
||||
|
||||
def oxdb_series_title(path):
|
||||
seriesTitle = u''
|
||||
if path.startswith('Series'):
|
||||
seriesTitle = os.path.basename(os.path.dirname(path))
|
||||
else:
|
||||
t = oxdb_title(path)
|
||||
if " (S" in t:
|
||||
seriesTitle = t.split(" (S")[0]
|
||||
return seriesTitle
|
||||
|
||||
def oxdb_episode_title(path):
|
||||
episodeTitle = u''
|
||||
ep = re.compile('.Episode \d+?\.(.*?)\.[a-zA-Z]').findall(path)
|
||||
if ep:
|
||||
episodeTitle = ep[0]
|
||||
return episodeTitle
|
||||
|
||||
def oxdb_season_episode(path):
|
||||
season = 0
|
||||
episode = 0
|
||||
path = os.path.basename(path)
|
||||
se = re.compile('Season (\d+).Episode (\d+)').findall(path)
|
||||
if se:
|
||||
season = int(se[0][0])
|
||||
episode = int(se[0][1])
|
||||
else:
|
||||
ep = re.compile('.Episode (\d+?)').findall(path)
|
||||
if ep:
|
||||
episode = int(ep[0][0])
|
||||
if season == 0 and episode == 0:
|
||||
se = re.compile('S(\d\d)E(\d\d)').findall(path)
|
||||
if se:
|
||||
season = int(se[0][0])
|
||||
episode = int(se[0][1])
|
||||
return (season, episode)
|
||||
|
||||
def oxdb_part(path):
|
||||
part = 1
|
||||
path = path.lower()
|
||||
p = re.compile('part\s*?(\d+)\.').findall(path)
|
||||
if p:
|
||||
part = p[0]
|
||||
else:
|
||||
p = re.compile('cd\s*?(\d+)\.').findall(path)
|
||||
if p:
|
||||
part = p[0]
|
||||
return part
|
||||
|
||||
def parse_path(path):
|
||||
import ox.web.imdb
|
||||
search_title = oxdb_title(path, True)
|
||||
r = {}
|
||||
r['title'] = oxdb_title(path)
|
||||
r['directors'] = oxdb_directors(path)
|
||||
year = ox.findRe(path, '\((\d{4})\)')
|
||||
if year:
|
||||
r['year'] = year
|
||||
|
||||
#FIXME: only include it its actually a series
|
||||
r['episode_title'] = oxdb_episode_title(path)
|
||||
r['season'], r['episode'] = oxdb_season_episode(path)
|
||||
r['series_title'] = oxdb_series_title(path)
|
||||
|
||||
r['imdbId'] = ox.web.imdb.guess(search_title, ', '.join(r['directors']), timeout=-1)
|
||||
r['oxdbId'] = oxid(r['title'], r['directors'],
|
||||
seriesTitle=r['series_title'],
|
||||
episodeTitle=r['episode_title'],
|
||||
season=r['season'], episode=r['episode'])
|
||||
return r
|
||||
|
||||
def sort_title(title):
|
||||
#title
|
||||
title = re.sub(u'[\'!¿¡,\.;\-"\:\*\[\]]', '', title)
|
||||
title = title.replace(u'Æ', 'Ae')
|
||||
#pad numbered titles
|
||||
title = re.sub('(\d+)', lambda x: '%010d' % int(x.group(0)), title)
|
||||
return title.strip()
|
||||
|
||||
483
pandora/item/views.py
Normal file
483
pandora/item/views.py
Normal file
|
|
@ -0,0 +1,483 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
import os.path
|
||||
import re
|
||||
from datetime import datetime
|
||||
from urllib2 import unquote
|
||||
import mimetypes
|
||||
|
||||
from django import forms
|
||||
from django.core.paginator import Paginator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import Q, Avg, Count, Sum
|
||||
from django.http import HttpResponse, Http404
|
||||
from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404, redirect
|
||||
from django.template import RequestContext
|
||||
from django.conf import settings
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
from django.utils import simplejson as json
|
||||
|
||||
from oxdjango.decorators import login_required_json
|
||||
from oxdjango.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
||||
from oxdjango.http import HttpFileResponse
|
||||
import ox
|
||||
|
||||
import models
|
||||
import utils
|
||||
import tasks
|
||||
|
||||
from user.models import getUserJSON
|
||||
|
||||
from archive.models import File
|
||||
from archive import extract
|
||||
|
||||
def _order_query(qs, sort, prefix='sort__'):
|
||||
order_by = []
|
||||
if len(sort) == 1:
|
||||
if sort[0]['key'] in ('title', 'director'):
|
||||
sort.append({'operator': '-', 'key': 'year'})
|
||||
if sort[0]['key'] in ('year', ):
|
||||
sort.append({'operator': '+', 'key': 'director'})
|
||||
for e in sort:
|
||||
operator = e['operator']
|
||||
if operator != '-': operator = ''
|
||||
key = {'id': 'itemId'}.get(e['key'], e['key'])
|
||||
#FIXME: this should be a property of models.ItemSort!!!
|
||||
if operator=='-' and key in ('title', 'director', 'writer', 'producer', 'editor', 'cinematographer', 'language', 'country', 'year'):
|
||||
key = '%s_desc' % key
|
||||
order = '%s%s%s' % (operator, prefix, key)
|
||||
order_by.append(order)
|
||||
if order_by:
|
||||
qs = qs.order_by(*order_by)
|
||||
return qs
|
||||
|
||||
def _parse_query(data, user):
|
||||
query = {}
|
||||
query['range'] = [0, 100]
|
||||
query['sort'] = [{'key':'title', 'operator':'+'}]
|
||||
for key in ('sort', 'keys', 'group', 'list', 'range', 'ids'):
|
||||
if key in data:
|
||||
query[key] = data[key]
|
||||
query['qs'] = models.Item.objects.find(data, user)
|
||||
#group by only allows sorting by name or number of itmes
|
||||
return query
|
||||
|
||||
def _get_positions(ids, get_ids):
|
||||
positions = {}
|
||||
for i in get_ids:
|
||||
try:
|
||||
positions[i] = ids.index(i)
|
||||
except:
|
||||
pass
|
||||
return positions
|
||||
|
||||
def api_find(request):
|
||||
'''
|
||||
param data
|
||||
{'query': query, 'sort': array, 'range': array}
|
||||
|
||||
query: query object, more on query syntax at
|
||||
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
||||
sort: array of key, operator dics
|
||||
[
|
||||
{
|
||||
key: "year",
|
||||
operator: "-"
|
||||
},
|
||||
{
|
||||
key: "director",
|
||||
operator: ""
|
||||
}
|
||||
]
|
||||
range: result range, array [from, to]
|
||||
keys: array of keys to return
|
||||
group: group elements by, country, genre, director...
|
||||
|
||||
with keys, items is list of dicts with requested properties:
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {items: array}}
|
||||
|
||||
Groups
|
||||
param data
|
||||
{'query': query, 'key': string, 'group': string, 'range': array}
|
||||
|
||||
query: query object, more on query syntax at
|
||||
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
||||
range: result range, array [from, to]
|
||||
keys: array of keys to return
|
||||
group: group elements by, country, genre, director...
|
||||
|
||||
possible values for keys: name, items
|
||||
|
||||
with keys
|
||||
items contains list of {'name': string, 'items': int}:
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {items: array}}
|
||||
|
||||
without keys: return number of items in given query
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {items: int}}
|
||||
|
||||
Positions
|
||||
param data
|
||||
{'query': query, 'ids': []}
|
||||
|
||||
query: query object, more on query syntax at
|
||||
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
||||
ids: ids of items for which positions are required
|
||||
'''
|
||||
data = json.loads(request.POST['data'])
|
||||
if settings.JSON_DEBUG:
|
||||
print json.dumps(data, indent=2)
|
||||
query = _parse_query(data, request.user)
|
||||
|
||||
response = json_response({})
|
||||
if 'group' in query:
|
||||
if 'sort' in query:
|
||||
if len(query['sort']) == 1 and query['sort'][0]['key'] == 'items':
|
||||
if query['group'] == "year":
|
||||
query['sort'].append({'key': 'name', 'operator':'-'})
|
||||
else:
|
||||
query['sort'].append({'key': 'name', 'operator':'+'})
|
||||
else:
|
||||
query['sort'] = [{'key': 'name', 'operator':'+'}]
|
||||
response['data']['items'] = []
|
||||
items = 'items'
|
||||
item_qs = query['qs']
|
||||
qs = models.Facet.objects.filter(key=query['group']).filter(item__id__in=item_qs)
|
||||
qs = qs.values('value').annotate(items=Count('id')).order_by()
|
||||
name = 'value'
|
||||
name_sort = 'value_sort'
|
||||
|
||||
#replace normalized items/name sort with actual db value
|
||||
for i in range(0, len(query['sort'])):
|
||||
if query['sort'][i]['key'] == 'name':
|
||||
query['sort'][i]['key'] = name_sort
|
||||
elif query['sort'][i]['key'] == 'items':
|
||||
query['sort'][i]['key'] = items
|
||||
qs = _order_query(qs, query['sort'], prefix='')
|
||||
if 'ids' in query:
|
||||
#FIXME: this does not scale for larger results
|
||||
response['data']['positions'] = {}
|
||||
ids = [j[name] for j in qs]
|
||||
response['data']['positions'] = _get_positions(ids, query['ids'])
|
||||
|
||||
elif 'range' in data:
|
||||
qs = qs[query['range'][0]:query['range'][1]]
|
||||
response['data']['items'] = [{'name': i[name], 'items': i[items]} for i in qs]
|
||||
else:
|
||||
response['data']['items'] = qs.count()
|
||||
elif 'ids' in query:
|
||||
#FIXME: this does not scale for larger results
|
||||
qs = _order_query(query['qs'], query['sort'])
|
||||
|
||||
response['data']['positions'] = {}
|
||||
ids = [j['itemId'] for j in qs.values('itemId')]
|
||||
response['data']['positions'] = _get_positions(ids, query['ids'])
|
||||
|
||||
elif 'keys' in query:
|
||||
response['data']['items'] = []
|
||||
qs = _order_query(query['qs'], query['sort'])
|
||||
_p = query['keys']
|
||||
def only_p(m):
|
||||
r = {}
|
||||
if m:
|
||||
m = json.loads(m)
|
||||
for p in _p:
|
||||
r[p] = m.get(p, '')
|
||||
return r
|
||||
qs = qs[query['range'][0]:query['range'][1]]
|
||||
response['data']['items'] = [only_p(m['json']) for m in qs.values('json')]
|
||||
else: # otherwise stats
|
||||
items = query['qs']
|
||||
files = File.objects.all().filter(item__in=items)
|
||||
r = files.aggregate(
|
||||
Sum('duration'),
|
||||
Sum('pixels'),
|
||||
Sum('size')
|
||||
)
|
||||
response['data']['duration'] = r['duration__sum']
|
||||
response['data']['files'] = files.count()
|
||||
response['data']['items'] = items.count()
|
||||
response['data']['pixels'] = r['pixels__sum']
|
||||
response['data']['runtime'] = items.aggregate(Sum('sort__runtime'))['sort__runtime__sum']
|
||||
if response['data']['runtime'] == None:
|
||||
response['data']['runtime'] = 1337
|
||||
response['data']['size'] = r['size__sum']
|
||||
return render_to_json_response(response)
|
||||
|
||||
def api_getItem(request):
|
||||
'''
|
||||
param data
|
||||
string id
|
||||
|
||||
return item array
|
||||
'''
|
||||
response = json_response({})
|
||||
itemId = json.loads(request.POST['data'])
|
||||
item = get_object_or_404_json(models.Item, itemId=itemId)
|
||||
#FIXME: check permissions
|
||||
info = item.get_json()
|
||||
info['stream'] = item.get_stream()
|
||||
info['layers'] = item.get_layers()
|
||||
response['data'] = {'item': info}
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_editItem(request):
|
||||
'''
|
||||
param data
|
||||
{id: string, key: value,..}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
data = json.loads(request.POST['data'])
|
||||
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
||||
if item.editable(request.user):
|
||||
response = json_response(status=501, text='not implemented')
|
||||
item.edit(data)
|
||||
else:
|
||||
response = json_response(status=403, text='permissino denied')
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_removeItem(request):
|
||||
'''
|
||||
param data
|
||||
string id
|
||||
|
||||
return {'status': {'code': int, 'text': string}}
|
||||
'''
|
||||
response = json_response({})
|
||||
itemId = json.loads(request.POST['data'])
|
||||
item = get_object_or_404_json(models.Item, itemId=itemId)
|
||||
if item.editable(request.user):
|
||||
response = json_response(status=501, text='not implemented')
|
||||
else:
|
||||
response = json_response(status=403, text='permissino denied')
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_addLayer(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = {'status': {'code': 501, 'text': 'not implemented'}}
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_removeLayer(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = {'status': {'code': 501, 'text': 'not implemented'}}
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_editLayer(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response({})
|
||||
data = json.loads(request.POST['data'])
|
||||
layer = get_object_or_404_json(models.Layer, pk=data['id'])
|
||||
if layer.editable(request.user):
|
||||
response = json_response(status=501, text='not implemented')
|
||||
else:
|
||||
response = json_response(status=403, text='permission denied')
|
||||
return render_to_json_response(response)
|
||||
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
'''
|
||||
List API
|
||||
'''
|
||||
@login_required_json
|
||||
def api_addListItem(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_removeListItem(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_addList(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
@login_required_json
|
||||
def api_editList(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
def api_removeList(request):
|
||||
'''
|
||||
param data
|
||||
{key: value}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {}}
|
||||
'''
|
||||
response = json_response(status=501, text='not implemented')
|
||||
return render_to_json_response(response)
|
||||
|
||||
'''
|
||||
Poster API
|
||||
'''
|
||||
def api_parse(request): #parse path and return info
|
||||
'''
|
||||
param data
|
||||
{path: string}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
data: {imdb: string}}
|
||||
'''
|
||||
path = json.loads(request.POST['data'])['path']
|
||||
response = json_response(utils.parse_path(path))
|
||||
return render_to_json_response(response)
|
||||
|
||||
|
||||
def api_setPosterFrame(request): #parse path and return info
|
||||
'''
|
||||
param data
|
||||
{id: itemId, position: float}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
data: {}}
|
||||
'''
|
||||
data = json.loads(request.POST['data'])
|
||||
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
||||
if item.editable(request.user):
|
||||
#FIXME: some things need to be updated after changing this
|
||||
item.poster_frame = data['position']
|
||||
item.save()
|
||||
response = json_response(status=200, text='ok')
|
||||
else:
|
||||
response = json_response(status=403, text='permissino denied')
|
||||
return render_to_json_response(response)
|
||||
|
||||
def api_setPoster(request): #parse path and return info
|
||||
'''
|
||||
param data
|
||||
{id: itemId, url: string}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
data: {poster: {url,width,height}}}
|
||||
'''
|
||||
data = json.loads(request.POST['data'])
|
||||
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
||||
if item.editable(request.user):
|
||||
valid_urls = [p.url for p in item.poster_urls.all()]
|
||||
if data['url'] in valid_urls:
|
||||
item.poster_url = data['url']
|
||||
if item.poster:
|
||||
item.poster.delete()
|
||||
item.save()
|
||||
tasks.updatePoster.delay(item.itemId)
|
||||
response = json_response(status=200, text='ok')
|
||||
response['data']['poster'] = item.get_poster()
|
||||
else:
|
||||
response = json_response(status=403, text='invalid poster url')
|
||||
else:
|
||||
response = json_response(status=403, text='permission denied')
|
||||
return render_to_json_response(response)
|
||||
|
||||
def api_getImdbId(request):
|
||||
'''
|
||||
param data
|
||||
{title: string, director: string, year: string}
|
||||
return {'status': {'code': int, 'text': string},
|
||||
'data': {imdbId:string }}
|
||||
'''
|
||||
imdbId = ox.web.imdb.guess(search_title, r['director'], timeout=-1)
|
||||
if imdbId:
|
||||
response = json_response({'imdbId': imdbId})
|
||||
else:
|
||||
response = json_response(status=404, text='not found')
|
||||
return render_to_json_response(response)
|
||||
|
||||
|
||||
'''
|
||||
media delivery
|
||||
'''
|
||||
def frame(request, id, position, size):
|
||||
item = get_object_or_404(models.Item, itemId=id)
|
||||
position = float(position.replace(',', '.'))
|
||||
frame = item.frame(position, int(size))
|
||||
if not frame:
|
||||
raise Http404
|
||||
return HttpFileResponse(frame, content_type='image/jpeg')
|
||||
|
||||
def poster(request, id, size=None):
|
||||
item = get_object_or_404(models.Item, itemId=id)
|
||||
if size == 'large':
|
||||
size = None
|
||||
if item.poster:
|
||||
if size:
|
||||
size = int(size)
|
||||
poster_path = item.poster.path.replace('.jpg', '.%d.jpg'%size)
|
||||
if not os.path.exists(poster_path):
|
||||
poster_size = max(item.poster.width, item.poster.height)
|
||||
if size > poster_size:
|
||||
return redirect('/%s/poster.jpg' % item.itemId)
|
||||
extract.resize_image(item.poster.path, poster_path, size=size)
|
||||
else:
|
||||
poster_path = item.poster.path
|
||||
else:
|
||||
if not size: size='large'
|
||||
return redirect('http://0xdb.org/%s/poster.%s.jpg' % (item.itemId, size))
|
||||
poster_path = os.path.join(settings.STATIC_ROOT, 'png/posterDark.48.png')
|
||||
return HttpFileResponse(poster_path, content_type='image/jpeg')
|
||||
|
||||
def timeline(request, id, timeline, size, position):
|
||||
item = get_object_or_404(models.Item, itemId=id)
|
||||
if timeline == 'strip':
|
||||
timeline = '%s.%s.%04d.png' %(item.timeline_prefix[:-8] + 'strip', size, int(position))
|
||||
else:
|
||||
timeline = '%s.%s.%04d.png' %(item.timeline_prefix, size, int(position))
|
||||
return HttpFileResponse(timeline, content_type='image/png')
|
||||
|
||||
def video(request, id, profile):
|
||||
item = get_object_or_404(models.Item, itemId=id)
|
||||
stream = get_object_or_404(item.streams, profile=profile)
|
||||
path = stream.video.path
|
||||
content_type = path.endswith('.mp4') and 'video/mp4' or 'video/webm'
|
||||
return HttpFileResponse(path, content_type=content_type)
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue