2009-08-01 14:14:54 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2010-02-16 12:41:57 +00:00
|
|
|
from __future__ import division
|
2009-08-01 14:14:54 +00:00
|
|
|
import os.path
|
2011-08-19 18:10:28 +00:00
|
|
|
from datetime import datetime, timedelta
|
2011-06-27 19:40:44 +00:00
|
|
|
import mimetypes
|
2011-11-12 08:57:51 +00:00
|
|
|
import random
|
2012-01-10 16:00:41 +00:00
|
|
|
from urlparse import urlparse
|
2012-01-11 07:42:32 +00:00
|
|
|
import time
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
import Image
|
2011-01-25 14:39:03 +00:00
|
|
|
from django.db.models import Count, Sum, Max
|
2012-01-09 09:06:35 +00:00
|
|
|
from django.template import RequestContext
|
2011-09-28 12:47:13 +00:00
|
|
|
from django.http import HttpResponse, HttpResponseForbidden, Http404
|
2012-01-09 09:06:35 +00:00
|
|
|
from django.shortcuts import get_object_or_404, redirect, render_to_response
|
2010-02-08 10:26:25 +00:00
|
|
|
from django.conf import settings
|
2010-02-03 12:05:38 +00:00
|
|
|
|
2012-01-10 16:00:41 +00:00
|
|
|
from ox.utils import json, ET
|
2010-02-03 12:05:38 +00:00
|
|
|
|
2010-11-23 09:53:12 +00:00
|
|
|
from ox.django.decorators import login_required_json
|
|
|
|
from ox.django.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
|
|
|
from ox.django.http import HttpFileResponse
|
2011-06-01 11:03:43 +00:00
|
|
|
from django.db.models import Q
|
2010-07-07 22:46:41 +00:00
|
|
|
import ox
|
2009-12-31 15:04:32 +00:00
|
|
|
|
2009-08-01 14:14:54 +00:00
|
|
|
import models
|
2009-10-04 22:00:08 +00:00
|
|
|
import utils
|
2010-06-25 11:53:57 +00:00
|
|
|
import tasks
|
2010-01-25 09:20:21 +00:00
|
|
|
|
2011-08-19 18:10:28 +00:00
|
|
|
from archive.models import File, Stream
|
2010-09-07 14:05:38 +00:00
|
|
|
from archive import extract
|
2011-10-19 15:55:29 +00:00
|
|
|
from clip.models import Clip
|
2010-09-07 14:05:38 +00:00
|
|
|
|
2012-01-03 20:18:47 +00:00
|
|
|
from ox.django.api import actions
|
2010-11-25 15:21:23 +00:00
|
|
|
|
2012-01-20 17:53:21 +00:00
|
|
|
import utils
|
|
|
|
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-02-22 09:25:29 +00:00
|
|
|
def _order_query(qs, sort, prefix='sort__'):
|
2009-08-16 12:23:29 +00:00
|
|
|
order_by = []
|
2010-07-12 14:56:14 +00:00
|
|
|
if len(sort) == 1:
|
2012-01-20 17:53:21 +00:00
|
|
|
key = utils.get_by_id(settings.CONFIG['itemKeys'], sort[0]['key'])
|
|
|
|
for s in key.get('additionalSort', settings.CONFIG.get('additionalSort', [])):
|
|
|
|
sort.append(s)
|
2010-02-22 09:25:29 +00:00
|
|
|
for e in sort:
|
2010-06-30 08:40:48 +00:00
|
|
|
operator = e['operator']
|
2011-01-01 11:44:42 +00:00
|
|
|
if operator != '-':
|
|
|
|
operator = ''
|
2011-01-25 14:39:03 +00:00
|
|
|
key = {
|
|
|
|
'id': 'itemId',
|
|
|
|
}.get(e['key'], e['key'])
|
2011-11-11 17:45:46 +00:00
|
|
|
if key not in ('itemId', ):
|
2011-01-25 14:39:03 +00:00
|
|
|
key = "%s%s" % (prefix, key)
|
|
|
|
order = '%s%s' % (operator, key)
|
2010-02-22 09:25:29 +00:00
|
|
|
order_by.append(order)
|
|
|
|
if order_by:
|
2011-01-24 22:46:16 +00:00
|
|
|
qs = qs.order_by(*order_by, nulls_last=True)
|
2010-02-22 09:25:29 +00:00
|
|
|
return qs
|
|
|
|
|
2011-01-24 13:44:38 +00:00
|
|
|
def _order_by_group(query):
|
|
|
|
if 'sort' in query:
|
|
|
|
if len(query['sort']) == 1 and query['sort'][0]['key'] == 'items':
|
2011-09-18 13:48:52 +00:00
|
|
|
order_by = query['sort'][0]['operator'] == '-' and '-items' or 'items'
|
2011-01-24 13:44:38 +00:00
|
|
|
if query['group'] == "year":
|
2011-10-29 23:32:11 +00:00
|
|
|
secondary = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
2011-09-18 13:48:52 +00:00
|
|
|
order_by = (order_by, secondary)
|
|
|
|
elif query['group'] != "keyword":
|
2011-10-29 23:32:11 +00:00
|
|
|
order_by = (order_by, 'sortvalue')
|
2011-01-24 13:44:38 +00:00
|
|
|
else:
|
2012-01-31 00:24:18 +00:00
|
|
|
order_by = (order_by, 'value')
|
2011-01-24 13:44:38 +00:00
|
|
|
else:
|
2011-10-29 23:32:11 +00:00
|
|
|
order_by = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
2011-01-24 13:44:38 +00:00
|
|
|
order_by = (order_by, 'items')
|
|
|
|
else:
|
2011-10-29 23:32:11 +00:00
|
|
|
order_by = ('-sortvalue', 'items')
|
2011-01-24 13:44:38 +00:00
|
|
|
return order_by
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-02-22 15:26:06 +00:00
|
|
|
def parse_query(data, user):
|
2010-02-22 09:25:29 +00:00
|
|
|
query = {}
|
|
|
|
query['range'] = [0, 100]
|
2010-07-01 08:49:08 +00:00
|
|
|
query['sort'] = [{'key':'title', 'operator':'+'}]
|
2011-06-01 11:03:43 +00:00
|
|
|
for key in ('sort', 'keys', 'group', 'range', 'position', 'positions'):
|
2010-02-22 09:25:29 +00:00
|
|
|
if key in data:
|
|
|
|
query[key] = data[key]
|
2010-09-23 16:01:48 +00:00
|
|
|
query['qs'] = models.Item.objects.find(data, user)
|
2011-10-19 16:32:11 +00:00
|
|
|
if 'clips' in data:
|
2012-01-18 19:03:29 +00:00
|
|
|
conditions = {'query': data['clips']['query']}
|
|
|
|
query['clip_qs'] = Clip.objects.find(conditions, user).order_by('start')
|
|
|
|
query['clip_filter'] = models.Clip.objects.filter_annotations(conditions, user)
|
2011-10-19 16:32:11 +00:00
|
|
|
query['clip_items'] = data['clips'].get('items', 5)
|
|
|
|
query['clip_keys'] = data['clips'].get('keys')
|
|
|
|
if not query['clip_keys']:
|
|
|
|
query['clip_keys'] = ['id', 'in', 'out', 'annotations']
|
2011-06-01 11:03:43 +00:00
|
|
|
|
2010-02-22 09:25:29 +00:00
|
|
|
#group by only allows sorting by name or number of itmes
|
|
|
|
return query
|
2009-08-01 14:14:54 +00:00
|
|
|
|
2010-12-22 07:45:37 +00:00
|
|
|
def find(request):
|
2010-01-25 09:20:21 +00:00
|
|
|
'''
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
'query': query,
|
|
|
|
'sort': array,
|
|
|
|
'range': array
|
2011-10-19 16:20:12 +00:00
|
|
|
clipsQuery: ...
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-02-22 09:25:29 +00:00
|
|
|
query: query object, more on query syntax at
|
2010-06-30 14:21:04 +00:00
|
|
|
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
2010-06-27 12:41:38 +00:00
|
|
|
sort: array of key, operator dics
|
|
|
|
[
|
|
|
|
{
|
|
|
|
key: "year",
|
|
|
|
operator: "-"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
key: "director",
|
|
|
|
operator: ""
|
|
|
|
}
|
|
|
|
]
|
2010-02-22 09:25:29 +00:00
|
|
|
range: result range, array [from, to]
|
|
|
|
keys: array of keys to return
|
|
|
|
group: group elements by, country, genre, director...
|
2010-01-26 13:11:57 +00:00
|
|
|
|
2010-02-22 09:25:29 +00:00
|
|
|
with keys, items is list of dicts with requested properties:
|
2010-01-26 13:11:57 +00:00
|
|
|
return {'status': {'code': int, 'text': string},
|
2010-01-29 11:03:45 +00:00
|
|
|
'data': {items: array}}
|
2010-01-26 13:11:57 +00:00
|
|
|
|
2010-06-30 11:18:04 +00:00
|
|
|
Groups
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
'query': query,
|
|
|
|
'key': string,
|
|
|
|
'group': string,
|
|
|
|
'range': array
|
2011-10-19 16:32:11 +00:00
|
|
|
clips: {}
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-06-30 11:18:04 +00:00
|
|
|
query: query object, more on query syntax at
|
2010-06-30 14:21:04 +00:00
|
|
|
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
2010-06-30 11:18:04 +00:00
|
|
|
range: result range, array [from, to]
|
|
|
|
keys: array of keys to return
|
|
|
|
group: group elements by, country, genre, director...
|
|
|
|
|
|
|
|
possible values for keys: name, items
|
|
|
|
|
|
|
|
with keys
|
|
|
|
items contains list of {'name': string, 'items': int}:
|
|
|
|
return {'status': {'code': int, 'text': string},
|
|
|
|
'data': {items: array}}
|
2010-01-26 13:11:57 +00:00
|
|
|
|
2010-06-30 11:18:04 +00:00
|
|
|
without keys: return number of items in given query
|
2010-01-26 13:11:57 +00:00
|
|
|
return {'status': {'code': int, 'text': string},
|
|
|
|
'data': {items: int}}
|
|
|
|
|
2010-06-30 11:18:04 +00:00
|
|
|
Positions
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
'query': query,
|
2011-06-01 11:03:43 +00:00
|
|
|
'positions': [],
|
|
|
|
'sort': array
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-06-30 11:18:04 +00:00
|
|
|
query: query object, more on query syntax at
|
2010-06-30 14:21:04 +00:00
|
|
|
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
|
2011-06-01 11:03:43 +00:00
|
|
|
positions: ids of items for which positions are required
|
2011-01-13 12:06:05 +00:00
|
|
|
return {
|
|
|
|
status: {...},
|
|
|
|
data: {
|
|
|
|
positions: {
|
|
|
|
id: position
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2010-01-25 09:20:21 +00:00
|
|
|
'''
|
2010-02-22 09:25:29 +00:00
|
|
|
data = json.loads(request.POST['data'])
|
2010-06-26 14:32:08 +00:00
|
|
|
if settings.JSON_DEBUG:
|
|
|
|
print json.dumps(data, indent=2)
|
2011-02-22 15:26:06 +00:00
|
|
|
query = parse_query(data, request.user)
|
2011-01-01 11:44:42 +00:00
|
|
|
|
2010-02-10 13:10:28 +00:00
|
|
|
response = json_response({})
|
2010-07-01 08:49:08 +00:00
|
|
|
if 'group' in query:
|
2010-06-30 11:18:04 +00:00
|
|
|
response['data']['items'] = []
|
2010-09-23 16:01:48 +00:00
|
|
|
items = 'items'
|
|
|
|
item_qs = query['qs']
|
2011-01-24 13:44:38 +00:00
|
|
|
order_by = _order_by_group(query)
|
2010-09-23 16:01:48 +00:00
|
|
|
qs = models.Facet.objects.filter(key=query['group']).filter(item__id__in=item_qs)
|
2011-01-03 06:44:38 +00:00
|
|
|
qs = qs.values('value').annotate(items=Count('id')).order_by(*order_by)
|
2011-01-03 14:14:54 +00:00
|
|
|
|
2011-06-01 11:03:43 +00:00
|
|
|
if 'positions' in query:
|
2010-07-01 08:49:08 +00:00
|
|
|
response['data']['positions'] = {}
|
2011-01-03 14:14:54 +00:00
|
|
|
ids = [j['value'] for j in qs]
|
2011-06-01 11:03:43 +00:00
|
|
|
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
2010-07-01 08:49:08 +00:00
|
|
|
elif 'range' in data:
|
2010-06-30 11:18:04 +00:00
|
|
|
qs = qs[query['range'][0]:query['range'][1]]
|
2011-01-03 14:14:54 +00:00
|
|
|
response['data']['items'] = [{'name': i['value'], 'items': i[items]} for i in qs]
|
2010-06-30 11:18:04 +00:00
|
|
|
else:
|
|
|
|
response['data']['items'] = qs.count()
|
2011-06-01 11:03:43 +00:00
|
|
|
elif 'position' in query:
|
2010-06-30 11:18:04 +00:00
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
2010-09-23 16:01:48 +00:00
|
|
|
ids = [j['itemId'] for j in qs.values('itemId')]
|
2011-06-01 11:03:43 +00:00
|
|
|
data['conditions'] = data['conditions'] + {
|
|
|
|
'value': query['position'],
|
|
|
|
'key': query['sort'][0]['key'],
|
|
|
|
'operator': '^'
|
|
|
|
}
|
|
|
|
query = parse_query(data, request.user)
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
if qs.count() > 0:
|
|
|
|
response['data']['position'] = utils.get_positions(ids, [qs[0].itemId])[0]
|
|
|
|
elif 'positions' in query:
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
ids = [j['itemId'] for j in qs.values('itemId')]
|
|
|
|
response['data']['positions'] = utils.get_positions(ids, query['positions'])
|
2010-07-01 08:49:08 +00:00
|
|
|
elif 'keys' in query:
|
|
|
|
response['data']['items'] = []
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
_p = query['keys']
|
2011-10-19 16:54:33 +00:00
|
|
|
|
|
|
|
def get_clips(qs):
|
|
|
|
n = qs.count()
|
|
|
|
if n > query['clip_items']:
|
2011-10-19 21:20:47 +00:00
|
|
|
num = query['clip_items']
|
2011-10-19 16:54:33 +00:00
|
|
|
clips = []
|
2011-10-19 21:20:47 +00:00
|
|
|
step = int(n / (num + 1))
|
|
|
|
i = step
|
|
|
|
while i <= (n - step) and i < n and len(clips) < num:
|
2011-10-19 16:54:33 +00:00
|
|
|
clips.append(qs[i])
|
2011-10-19 21:20:47 +00:00
|
|
|
i += step
|
2011-10-19 16:54:33 +00:00
|
|
|
else:
|
|
|
|
clips = qs
|
2012-01-05 12:31:56 +00:00
|
|
|
return [c.json(query['clip_keys'], query['clip_filter']) for c in clips]
|
2011-10-19 16:54:33 +00:00
|
|
|
|
2011-01-25 14:39:03 +00:00
|
|
|
def only_p_sums(m):
|
|
|
|
r = {}
|
|
|
|
for p in _p:
|
2011-11-11 10:34:57 +00:00
|
|
|
if p == 'accessed':
|
2011-11-11 18:00:09 +00:00
|
|
|
r[p] = m.sort.accessed or ''
|
2011-11-10 19:52:26 +00:00
|
|
|
elif p == 'timesaccessed':
|
2011-11-11 17:45:46 +00:00
|
|
|
r[p] = m.sort.timesaccessed
|
2011-01-25 14:39:03 +00:00
|
|
|
else:
|
|
|
|
r[p] = m.json.get(p, '')
|
2011-10-19 15:55:29 +00:00
|
|
|
if 'clip_qs' in query:
|
2011-10-19 16:54:33 +00:00
|
|
|
r['clips'] = get_clips(query['clip_qs'].filter(item=m))
|
2011-01-25 14:39:03 +00:00
|
|
|
return r
|
2010-07-01 08:49:08 +00:00
|
|
|
def only_p(m):
|
|
|
|
r = {}
|
|
|
|
if m:
|
2011-08-16 15:36:35 +00:00
|
|
|
m = json.loads(m, object_hook=ox.django.fields.from_json)
|
2010-07-01 08:49:08 +00:00
|
|
|
for p in _p:
|
|
|
|
r[p] = m.get(p, '')
|
2011-10-19 15:55:29 +00:00
|
|
|
if 'clip_qs' in query:
|
2011-10-19 16:54:33 +00:00
|
|
|
r['clips'] = get_clips(query['clip_qs'].filter(item__itemId=m['id']))
|
2010-07-01 08:49:08 +00:00
|
|
|
return r
|
|
|
|
qs = qs[query['range'][0]:query['range'][1]]
|
2011-01-24 13:44:38 +00:00
|
|
|
#response['data']['items'] = [m.get_json(_p) for m in qs]
|
2011-11-10 19:52:26 +00:00
|
|
|
if 'viewed' in _p or 'timesaccessed' in _p or 'accessed' in _p:
|
2011-10-09 13:49:21 +00:00
|
|
|
qs = qs.select_related()
|
2011-01-25 14:39:03 +00:00
|
|
|
response['data']['items'] = [only_p_sums(m) for m in qs]
|
|
|
|
else:
|
|
|
|
response['data']['items'] = [only_p(m['json']) for m in qs.values('json')]
|
2011-06-01 11:03:43 +00:00
|
|
|
|
2010-07-01 08:49:08 +00:00
|
|
|
else: # otherwise stats
|
2010-09-23 16:01:48 +00:00
|
|
|
items = query['qs']
|
2011-04-08 08:20:59 +00:00
|
|
|
files = File.objects.filter(item__in=items).filter(size__gt=0)
|
2010-07-01 08:49:08 +00:00
|
|
|
r = files.aggregate(
|
2010-07-12 14:56:14 +00:00
|
|
|
Sum('duration'),
|
|
|
|
Sum('pixels'),
|
|
|
|
Sum('size')
|
2010-07-01 08:49:08 +00:00
|
|
|
)
|
2012-01-20 17:53:21 +00:00
|
|
|
totals = [i['id'] for i in settings.CONFIG['totals']]
|
|
|
|
if 'duration' in totals:
|
|
|
|
response['data']['duration'] = r['duration__sum']
|
|
|
|
if 'files' in totals:
|
|
|
|
response['data']['files'] = files.count()
|
|
|
|
if 'items' in totals:
|
|
|
|
response['data']['items'] = items.count()
|
|
|
|
if 'pixels' in totals:
|
|
|
|
response['data']['pixels'] = r['pixels__sum']
|
|
|
|
if 'runtime' in totals:
|
|
|
|
response['data']['runtime'] = items.aggregate(Sum('sort__runtime'))['sort__runtime__sum'] or 0
|
|
|
|
if 'size' in totals:
|
|
|
|
response['data']['size'] = r['size__sum']
|
2011-01-24 13:44:38 +00:00
|
|
|
for key in ('runtime', 'duration', 'pixels', 'size'):
|
2012-01-20 17:53:21 +00:00
|
|
|
if key in totals and response['data'][key] == None:
|
2011-01-24 13:44:38 +00:00
|
|
|
response['data'][key] = 0
|
2009-08-01 14:14:54 +00:00
|
|
|
return render_to_json_response(response)
|
2010-12-22 07:45:37 +00:00
|
|
|
actions.register(find)
|
|
|
|
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-01-03 14:14:54 +00:00
|
|
|
def autocomplete(request):
|
|
|
|
'''
|
|
|
|
param data
|
|
|
|
key
|
|
|
|
value
|
2011-11-11 12:31:27 +00:00
|
|
|
operator '=', '==', '^', '$'
|
|
|
|
query
|
2011-01-03 14:14:54 +00:00
|
|
|
range
|
|
|
|
return
|
2011-11-11 12:31:27 +00:00
|
|
|
|
|
|
|
query can be an item query to limit results
|
2011-01-03 14:14:54 +00:00
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
if not 'range' in data:
|
|
|
|
data['range'] = [0, 10]
|
2011-11-11 12:39:05 +00:00
|
|
|
op = data.get('operator', '=')
|
2011-01-03 14:14:54 +00:00
|
|
|
|
2011-09-06 12:06:59 +00:00
|
|
|
key = settings.CONFIG['keys'][data['key']]
|
2011-01-25 15:14:36 +00:00
|
|
|
order_by = key.get('autocompleteSortKey', False)
|
2011-01-15 14:22:29 +00:00
|
|
|
if order_by:
|
2011-01-24 22:46:16 +00:00
|
|
|
order_by = '-sort__%s' % order_by
|
2011-01-15 14:22:29 +00:00
|
|
|
else:
|
|
|
|
order_by = '-items'
|
2011-01-25 15:14:36 +00:00
|
|
|
sort_type = key.get('sort', key.get('type', 'string'))
|
2011-01-24 09:38:46 +00:00
|
|
|
if sort_type == 'title':
|
2011-02-22 15:26:06 +00:00
|
|
|
qs = parse_query({'query': data.get('query', {})}, request.user)['qs']
|
2011-01-03 14:14:54 +00:00
|
|
|
if data['value']:
|
2011-11-11 12:39:05 +00:00
|
|
|
if op == '=':
|
2011-01-03 14:14:54 +00:00
|
|
|
qs = qs.filter(find__key=data['key'], find__value__icontains=data['value'])
|
2011-11-11 12:31:27 +00:00
|
|
|
elif op == '==':
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__iexact=data['value'])
|
2011-01-03 14:14:54 +00:00
|
|
|
elif op == '^':
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__istartswith=data['value'])
|
|
|
|
elif op == '$':
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__iendswith=data['value'])
|
2011-01-25 15:23:21 +00:00
|
|
|
qs = qs.order_by(order_by, nulls_last=True)
|
2011-01-03 14:14:54 +00:00
|
|
|
qs = qs[data['range'][0]:data['range'][1]]
|
|
|
|
response = json_response({})
|
2011-11-08 00:23:12 +00:00
|
|
|
response['data']['items'] = list(set([i.get(data['key']) for i in qs]))
|
2011-01-03 14:14:54 +00:00
|
|
|
else:
|
|
|
|
qs = models.Facet.objects.filter(key=data['key'])
|
|
|
|
if data['value']:
|
2011-11-11 12:31:27 +00:00
|
|
|
if op == '=':
|
2011-01-03 14:14:54 +00:00
|
|
|
qs = qs.filter(value__icontains=data['value'])
|
2011-11-11 12:31:27 +00:00
|
|
|
elif op == '==':
|
|
|
|
qs = qs.filter(value__iexact=data['value'])
|
2011-01-03 14:14:54 +00:00
|
|
|
elif op == '^':
|
|
|
|
qs = qs.filter(value__istartswith=data['value'])
|
|
|
|
elif op == '$':
|
|
|
|
qs = qs.filter(value__iendswith=data['value'])
|
2011-11-11 12:31:27 +00:00
|
|
|
if 'query' in data:
|
|
|
|
item_query = parse_query({'query': data.get('query', {})}, request.user)['qs']
|
|
|
|
qs = qs.filter(item__in=item_query)
|
2011-01-15 14:22:29 +00:00
|
|
|
qs = qs.values('value').annotate(items=Count('id'))
|
|
|
|
qs = qs.order_by(order_by)
|
2011-01-03 14:14:54 +00:00
|
|
|
qs = qs[data['range'][0]:data['range'][1]]
|
|
|
|
response = json_response({})
|
|
|
|
response['data']['items'] = [i['value'] for i in qs]
|
|
|
|
return render_to_json_response(response)
|
|
|
|
actions.register(autocomplete)
|
2010-12-22 07:45:37 +00:00
|
|
|
|
2011-10-18 20:06:01 +00:00
|
|
|
def findId(request):
|
|
|
|
'''
|
|
|
|
param data {
|
|
|
|
'query': query,
|
|
|
|
'sort': array,
|
|
|
|
'range': array
|
|
|
|
}
|
|
|
|
|
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
response = json_response({})
|
|
|
|
response['data']['items'] = []
|
|
|
|
'''
|
|
|
|
FIXME: can not handle query for director []
|
|
|
|
query = parse_query(data, request.user)
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
if qs.count() == 1:
|
|
|
|
response['data']['items'] = [i.get_json(data['keys']) for i in qs]
|
|
|
|
elif settings.DATA_SERVICE:
|
|
|
|
'''
|
|
|
|
if settings.DATA_SERVICE:
|
|
|
|
'''
|
|
|
|
info = {}
|
|
|
|
for c in data['query']['conditions']:
|
|
|
|
info[c['key']] = c['value']
|
|
|
|
r = models.external_data('getId', info)
|
|
|
|
'''
|
|
|
|
r = models.external_data('getId', data)
|
|
|
|
if r['status']['code'] == 200:
|
|
|
|
response['data']['items'] = [r['data']]
|
|
|
|
return render_to_json_response(response)
|
|
|
|
actions.register(findId)
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-07-30 10:51:23 +00:00
|
|
|
def get(request):
|
|
|
|
'''
|
|
|
|
param data {
|
|
|
|
id: string
|
|
|
|
keys: array
|
|
|
|
}
|
|
|
|
return item array
|
|
|
|
'''
|
|
|
|
response = json_response({})
|
|
|
|
data = json.loads(request.POST['data'])
|
2012-01-23 20:45:49 +00:00
|
|
|
data['keys'] = data.get('keys', [])
|
2011-07-30 10:51:23 +00:00
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
|
|
|
if item.access(request.user):
|
|
|
|
info = item.get_json(data['keys'])
|
2011-08-06 13:34:56 +00:00
|
|
|
if not data['keys'] or 'stream' in data['keys']:
|
2011-07-30 10:51:23 +00:00
|
|
|
info['stream'] = item.get_stream()
|
2011-10-20 09:05:48 +00:00
|
|
|
if data['keys'] and 'layers' in data['keys']:
|
2011-07-30 10:51:23 +00:00
|
|
|
info['layers'] = item.get_layers(request.user)
|
2011-08-23 17:39:34 +00:00
|
|
|
if data['keys'] and 'files' in data['keys']:
|
|
|
|
info['files'] = item.get_files(request.user)
|
2011-10-22 12:42:45 +00:00
|
|
|
if not data['keys'] or 'notes' in data['keys'] \
|
2011-10-25 13:59:27 +00:00
|
|
|
and request.user.get_profile().capability('canEditMetadata'):
|
2011-10-22 12:42:45 +00:00
|
|
|
info['notes'] = item.notes
|
2012-01-15 15:05:37 +00:00
|
|
|
if not data['keys'] or 'groups' in data['keys'] \
|
|
|
|
and request.user.get_profile().capability('canEditMetadata'):
|
2012-01-13 09:47:18 +00:00
|
|
|
info['groups'] = [g.name for g in item.groups.all()]
|
2012-01-15 15:05:37 +00:00
|
|
|
|
|
|
|
info['editable'] = item.editable(request.user)
|
2011-07-30 12:23:06 +00:00
|
|
|
response['data'] = info
|
2011-07-30 10:51:23 +00:00
|
|
|
else:
|
2012-01-16 15:01:07 +00:00
|
|
|
#response = json_response(status=403, text='permission denied')
|
|
|
|
response = json_response(status=404, text='not found')
|
2011-07-30 10:51:23 +00:00
|
|
|
return render_to_json_response(response)
|
|
|
|
actions.register(get)
|
|
|
|
|
2010-01-26 13:11:57 +00:00
|
|
|
@login_required_json
|
2011-10-22 22:12:56 +00:00
|
|
|
def edit(request):
|
2010-01-26 02:24:05 +00:00
|
|
|
'''
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
id: string,
|
|
|
|
key: value,..
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
|
|
|
data: {}
|
|
|
|
}
|
2010-01-26 02:24:05 +00:00
|
|
|
'''
|
2010-01-27 06:43:17 +00:00
|
|
|
data = json.loads(request.POST['data'])
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
2010-01-27 06:43:17 +00:00
|
|
|
if item.editable(request.user):
|
2012-01-31 17:06:10 +00:00
|
|
|
item.log()
|
2011-02-25 12:12:56 +00:00
|
|
|
response = json_response(status=200, text='ok')
|
2011-10-22 12:42:45 +00:00
|
|
|
if 'notes' in data:
|
2011-10-25 13:59:27 +00:00
|
|
|
if request.user.get_profile().capability('canEditMetadata'):
|
2011-10-22 12:42:45 +00:00
|
|
|
item.notes = data['notes']
|
|
|
|
del data['notes']
|
2011-10-25 13:59:27 +00:00
|
|
|
if 'rightslevel' in data:
|
|
|
|
item.level = data['rightslevel']
|
|
|
|
del data['rightslevel']
|
2011-10-24 20:48:14 +00:00
|
|
|
r = item.edit(data)
|
|
|
|
if r:
|
|
|
|
r.wait()
|
2011-10-24 19:07:28 +00:00
|
|
|
response['data'] = item.get_json()
|
2010-09-18 13:03:27 +00:00
|
|
|
else:
|
2010-02-10 13:10:28 +00:00
|
|
|
response = json_response(status=403, text='permissino denied')
|
2010-01-25 09:20:21 +00:00
|
|
|
return render_to_json_response(response)
|
2011-10-22 22:12:56 +00:00
|
|
|
actions.register(edit, cache=False)
|
2010-01-26 02:24:05 +00:00
|
|
|
|
2010-01-26 13:11:57 +00:00
|
|
|
@login_required_json
|
2011-08-01 17:57:26 +00:00
|
|
|
def remove(request):
|
2010-01-26 02:24:05 +00:00
|
|
|
'''
|
2011-08-01 17:57:26 +00:00
|
|
|
param data {
|
|
|
|
id: string
|
|
|
|
}
|
2010-01-27 06:43:17 +00:00
|
|
|
|
|
|
|
return {'status': {'code': int, 'text': string}}
|
2010-01-26 02:24:05 +00:00
|
|
|
'''
|
2010-02-10 13:10:28 +00:00
|
|
|
response = json_response({})
|
2011-08-01 17:57:26 +00:00
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
2010-09-18 13:03:27 +00:00
|
|
|
if item.editable(request.user):
|
2012-01-31 17:06:10 +00:00
|
|
|
item.log()
|
2011-08-01 17:57:26 +00:00
|
|
|
#FIXME: is this cascading enough or do we end up with orphan files etc.
|
|
|
|
item.delete()
|
|
|
|
response = json_response(status=200, text='removed')
|
2010-09-18 13:03:27 +00:00
|
|
|
else:
|
2010-11-25 15:21:23 +00:00
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-01-25 09:20:21 +00:00
|
|
|
return render_to_json_response(response)
|
2011-08-01 17:57:26 +00:00
|
|
|
actions.register(remove, cache=False)
|
2010-01-26 02:24:05 +00:00
|
|
|
|
2010-09-13 13:46:49 +00:00
|
|
|
'''
|
|
|
|
Poster API
|
|
|
|
'''
|
2010-08-07 14:31:20 +00:00
|
|
|
|
|
|
|
|
2010-12-22 07:45:37 +00:00
|
|
|
def setPosterFrame(request): #parse path and return info
|
2010-08-07 14:31:20 +00:00
|
|
|
'''
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
id: itemId,
|
|
|
|
position: float
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
2010-12-24 10:31:44 +00:00
|
|
|
data: {
|
|
|
|
}
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2010-08-07 14:31:20 +00:00
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
2010-08-07 14:31:20 +00:00
|
|
|
if item.editable(request.user):
|
|
|
|
item.poster_frame = data['position']
|
|
|
|
item.save()
|
2011-06-06 18:38:16 +00:00
|
|
|
tasks.update_poster(item.itemId)
|
2010-12-24 09:23:34 +00:00
|
|
|
response = json_response()
|
2010-09-18 13:03:27 +00:00
|
|
|
else:
|
2010-08-07 14:31:20 +00:00
|
|
|
response = json_response(status=403, text='permissino denied')
|
|
|
|
return render_to_json_response(response)
|
2011-01-13 08:33:14 +00:00
|
|
|
actions.register(setPosterFrame, cache=False)
|
2010-08-07 14:31:20 +00:00
|
|
|
|
2010-12-22 07:45:37 +00:00
|
|
|
def setPoster(request): #parse path and return info
|
2010-08-07 14:31:20 +00:00
|
|
|
'''
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
id: itemId,
|
2011-07-30 12:52:49 +00:00
|
|
|
source: string
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
2010-12-24 10:31:44 +00:00
|
|
|
data: {
|
|
|
|
poster: {url,width,height}
|
|
|
|
}
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2010-08-07 14:31:20 +00:00
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
2011-07-26 17:22:23 +00:00
|
|
|
response = json_response()
|
2010-08-07 14:31:20 +00:00
|
|
|
if item.editable(request.user):
|
2011-07-30 12:52:49 +00:00
|
|
|
valid_sources = [p['source'] for p in item.get_posters()]
|
|
|
|
if data['source'] in valid_sources:
|
|
|
|
item.poster_source = data['source']
|
2010-09-10 15:12:22 +00:00
|
|
|
if item.poster:
|
|
|
|
item.poster.delete()
|
|
|
|
item.save()
|
2011-08-01 16:55:18 +00:00
|
|
|
tasks.update_poster(item.itemId)
|
2010-12-24 09:23:34 +00:00
|
|
|
response = json_response()
|
2011-08-19 12:20:30 +00:00
|
|
|
response['data']['posterAspect'] = item.poster_width/item.poster_height
|
2010-09-10 15:12:22 +00:00
|
|
|
else:
|
|
|
|
response = json_response(status=403, text='invalid poster url')
|
2010-09-18 13:03:27 +00:00
|
|
|
else:
|
2010-09-10 15:12:22 +00:00
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-01-25 09:20:21 +00:00
|
|
|
return render_to_json_response(response)
|
2011-01-13 08:33:14 +00:00
|
|
|
actions.register(setPoster, cache=False)
|
2010-01-25 09:20:21 +00:00
|
|
|
|
2011-09-29 15:16:48 +00:00
|
|
|
def updateExternalData(request):
|
|
|
|
'''
|
|
|
|
param data {
|
|
|
|
id: itemId,
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
|
|
|
data: {
|
|
|
|
poster: {url,width,height}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
item = get_object_or_404_json(models.Item, itemId=data['id'])
|
|
|
|
response = json_response()
|
|
|
|
if item.editable(request.user):
|
|
|
|
item.update_external()
|
|
|
|
else:
|
|
|
|
response = json_response(status=403, text='permission denied')
|
|
|
|
return render_to_json_response(response)
|
|
|
|
actions.register(updateExternalData, cache=False)
|
|
|
|
|
2011-06-04 16:15:38 +00:00
|
|
|
def lookup(request):
|
|
|
|
'''
|
|
|
|
param data {
|
|
|
|
title: string,
|
|
|
|
director: [string],
|
|
|
|
year: string,
|
|
|
|
id: string
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
|
|
|
data: {
|
|
|
|
title: string,
|
|
|
|
director: [string],
|
|
|
|
year: string,
|
|
|
|
id: string
|
|
|
|
}
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
if 'id' in data:
|
|
|
|
i = models.Item.objects.get(itemId=data['id'])
|
|
|
|
r = {'id': i.itemId}
|
|
|
|
for key in ('title', 'director', 'year'):
|
|
|
|
r[key] = i.get(key)
|
|
|
|
response = json_response(r)
|
|
|
|
else:
|
|
|
|
response = json_response(status=404, text='not found')
|
|
|
|
return render_to_json_response(response)
|
|
|
|
actions.register(lookup)
|
|
|
|
|
2010-12-22 07:45:37 +00:00
|
|
|
def getImdbId(request):
|
2010-01-26 13:11:57 +00:00
|
|
|
'''
|
2010-12-24 09:23:34 +00:00
|
|
|
param data {
|
|
|
|
title: string,
|
|
|
|
director: string,
|
|
|
|
year: string
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: {'code': int, 'text': string},
|
2010-12-24 10:31:44 +00:00
|
|
|
data: {
|
|
|
|
imdbId:string
|
|
|
|
}
|
2010-12-24 09:23:34 +00:00
|
|
|
}
|
2010-01-26 13:11:57 +00:00
|
|
|
'''
|
2011-01-01 11:44:42 +00:00
|
|
|
data = json.loads(request.POST['data'])
|
|
|
|
imdbId = ox.web.imdb.getImdbId(data['title'], data['director'], timeout=-1)
|
2010-01-26 13:11:57 +00:00
|
|
|
if imdbId:
|
2010-02-10 13:10:28 +00:00
|
|
|
response = json_response({'imdbId': imdbId})
|
2010-01-26 13:11:57 +00:00
|
|
|
else:
|
2010-09-18 13:03:27 +00:00
|
|
|
response = json_response(status=404, text='not found')
|
2010-01-25 09:20:21 +00:00
|
|
|
return render_to_json_response(response)
|
2010-12-22 07:45:37 +00:00
|
|
|
actions.register(getImdbId)
|
2010-01-25 09:20:21 +00:00
|
|
|
|
2010-09-13 13:46:49 +00:00
|
|
|
'''
|
2010-09-18 15:16:45 +00:00
|
|
|
media delivery
|
2010-09-13 13:46:49 +00:00
|
|
|
'''
|
2011-08-16 10:11:07 +00:00
|
|
|
def frame(request, id, size, position=None):
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-08-16 13:46:11 +00:00
|
|
|
frame = None
|
2011-08-16 10:11:07 +00:00
|
|
|
if not position:
|
2011-08-16 13:46:11 +00:00
|
|
|
frames = item.poster_frames()
|
|
|
|
if frames:
|
|
|
|
position = item.poster_frame
|
2011-10-05 21:20:43 +00:00
|
|
|
if position == -1 or position > len(frames):
|
2011-08-16 13:46:11 +00:00
|
|
|
position = int(len(frames)/2)
|
|
|
|
position = frames[int(position)]['position']
|
|
|
|
elif item.poster_frame == -1 and item.sort.duration:
|
2011-08-16 10:11:07 +00:00
|
|
|
position = item.sort.duration/2
|
|
|
|
else:
|
|
|
|
position = item.poster_frame
|
|
|
|
else:
|
|
|
|
position = float(position.replace(',', '.'))
|
2011-08-16 13:46:11 +00:00
|
|
|
|
|
|
|
if not frame:
|
|
|
|
frame = item.frame(position, int(size))
|
|
|
|
|
2010-09-08 11:56:58 +00:00
|
|
|
if not frame:
|
2012-01-21 10:51:04 +00:00
|
|
|
frame = os.path.join(settings.STATIC_ROOT, 'jpg/list256.jpg')
|
|
|
|
#raise Http404
|
2010-09-08 11:56:58 +00:00
|
|
|
return HttpFileResponse(frame, content_type='image/jpeg')
|
|
|
|
|
2011-07-26 17:22:23 +00:00
|
|
|
def poster_frame(request, id, position):
|
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-07-26 17:22:23 +00:00
|
|
|
position = int(position)
|
|
|
|
frames = item.poster_frames()
|
|
|
|
if frames and len(frames) > position:
|
2011-08-04 13:28:06 +00:00
|
|
|
frame = frames[position]['path']
|
2011-07-26 17:22:23 +00:00
|
|
|
return HttpFileResponse(frame, content_type='image/jpeg')
|
|
|
|
raise Http404
|
|
|
|
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
def image_to_response(image, size=None):
|
2011-01-04 07:32:32 +00:00
|
|
|
if size:
|
|
|
|
size = int(size)
|
|
|
|
path = image.path.replace('.jpg', '.%d.jpg'%size)
|
|
|
|
if not os.path.exists(path):
|
|
|
|
image_size = max(image.width, image.height)
|
|
|
|
if size > image_size:
|
2011-08-06 18:00:15 +00:00
|
|
|
path = image.path
|
|
|
|
else:
|
|
|
|
extract.resize_image(image.path, path, size=size)
|
2011-01-04 07:32:32 +00:00
|
|
|
else:
|
|
|
|
path = image.path
|
|
|
|
return HttpFileResponse(path, content_type='image/jpeg')
|
|
|
|
|
2011-08-23 17:39:34 +00:00
|
|
|
def siteposter(request, id, size=None):
|
2011-07-26 17:22:23 +00:00
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-08-23 17:39:34 +00:00
|
|
|
poster = item.path('siteposter.jpg')
|
2011-07-26 17:22:23 +00:00
|
|
|
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
2011-08-23 17:39:34 +00:00
|
|
|
if size:
|
2011-08-23 20:51:57 +00:00
|
|
|
size = int(size)
|
2011-08-23 17:39:34 +00:00
|
|
|
image = Image.open(poster)
|
|
|
|
image_size = max(image.size)
|
|
|
|
if size < image_size:
|
|
|
|
path = poster.replace('.jpg', '.%d.jpg'%size)
|
|
|
|
extract.resize_image(poster, path, size=size)
|
|
|
|
poster = path
|
2011-07-26 17:22:23 +00:00
|
|
|
return HttpFileResponse(poster, content_type='image/jpeg')
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2010-09-13 15:19:38 +00:00
|
|
|
def poster(request, id, size=None):
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2010-09-23 16:01:48 +00:00
|
|
|
if item.poster:
|
2011-08-23 20:20:31 +00:00
|
|
|
return image_to_response(item.poster, size)
|
2010-09-08 11:56:58 +00:00
|
|
|
else:
|
2011-10-27 22:15:37 +00:00
|
|
|
poster_path = os.path.join(settings.STATIC_ROOT, 'jpg/poster.jpg')
|
2011-08-06 18:00:15 +00:00
|
|
|
response = HttpFileResponse(poster_path, content_type='image/jpeg')
|
|
|
|
response['Cache-Control'] = 'no-cache'
|
|
|
|
return response
|
2010-09-08 11:56:58 +00:00
|
|
|
|
2011-01-04 07:32:32 +00:00
|
|
|
def icon(request, id, size=None):
|
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-01-04 07:32:32 +00:00
|
|
|
if item.icon:
|
2011-08-23 20:20:31 +00:00
|
|
|
return image_to_response(item.icon, size)
|
2011-01-04 07:32:32 +00:00
|
|
|
else:
|
2011-12-27 07:49:30 +00:00
|
|
|
poster_path = os.path.join(settings.STATIC_ROOT, 'jpg/poster.jpg')
|
|
|
|
response = HttpFileResponse(poster_path, content_type='image/jpeg')
|
|
|
|
response['Cache-Control'] = 'no-cache'
|
|
|
|
return response
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-08-06 18:00:15 +00:00
|
|
|
def timeline(request, id, size, position):
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-10-15 15:21:41 +00:00
|
|
|
timeline = '%s%sp%04d.png' %(item.timeline_prefix, size, int(position))
|
2010-09-08 11:56:58 +00:00
|
|
|
return HttpFileResponse(timeline, content_type='image/png')
|
|
|
|
|
2011-01-04 07:32:32 +00:00
|
|
|
|
2011-01-16 13:28:57 +00:00
|
|
|
def timeline_overview(request, id, size):
|
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-10-15 15:21:41 +00:00
|
|
|
timeline = '%s%sp.png' %(item.timeline_prefix, size)
|
2011-01-16 13:28:57 +00:00
|
|
|
return HttpFileResponse(timeline, content_type='image/png')
|
|
|
|
|
2011-07-03 16:21:27 +00:00
|
|
|
def torrent(request, id, filename=None):
|
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-07-03 16:21:27 +00:00
|
|
|
if not item.torrent:
|
|
|
|
raise Http404
|
|
|
|
if not filename or filename.endswith('.torrent'):
|
2012-01-17 10:39:45 +00:00
|
|
|
response = HttpResponse(item.get_torrent(request),
|
|
|
|
content_type='application/x-bittorrent')
|
2012-01-17 11:12:39 +00:00
|
|
|
filename = utils.safe_filename("%s.torrent" % item.get('title'))
|
2011-08-11 20:10:06 +00:00
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s"' % filename.encode('utf-8')
|
2011-07-03 16:21:27 +00:00
|
|
|
return response
|
|
|
|
while filename.startswith('/'):
|
|
|
|
filename = filename[1:]
|
|
|
|
filename = filename.replace('/../', '/')
|
|
|
|
filename = item.path('torrent/%s' % filename)
|
|
|
|
filename = os.path.abspath(os.path.join(settings.MEDIA_ROOT, filename))
|
2011-08-11 20:10:06 +00:00
|
|
|
response = HttpFileResponse(filename)
|
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s"' % \
|
|
|
|
os.path.basename(filename.encode('utf-8'))
|
|
|
|
return response
|
2011-07-03 16:21:27 +00:00
|
|
|
|
2011-08-18 12:01:37 +00:00
|
|
|
def video(request, id, resolution, format, index=None):
|
2010-09-23 16:01:48 +00:00
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
2011-09-28 12:47:13 +00:00
|
|
|
if not item.access(request.user):
|
|
|
|
return HttpResponseForbidden()
|
2011-08-06 18:00:15 +00:00
|
|
|
if index:
|
2011-08-17 17:26:10 +00:00
|
|
|
index = int(index) - 1
|
2011-06-27 19:40:44 +00:00
|
|
|
else:
|
2011-08-18 12:01:37 +00:00
|
|
|
index = 0
|
2011-10-15 15:21:41 +00:00
|
|
|
#streams = Stream.object.filter(file__item__itemId=item.itemId,
|
|
|
|
# file__selected=True, file__part=index,
|
|
|
|
# resolution=resolution, format=format)
|
|
|
|
#if streams.count() != 1:
|
|
|
|
# reise Http404
|
2011-09-25 14:49:45 +00:00
|
|
|
streams = Stream.objects.filter(file__item__itemId=item.itemId,
|
|
|
|
resolution=resolution, format=format).order_by('file__part')
|
2011-12-15 11:21:21 +00:00
|
|
|
if index + 1 > streams.count():
|
2011-08-18 12:01:37 +00:00
|
|
|
raise Http404
|
2011-08-19 18:10:28 +00:00
|
|
|
stream = streams[index]
|
|
|
|
if not stream.available or not stream.video:
|
2011-08-18 18:01:03 +00:00
|
|
|
raise Http404
|
2011-08-18 12:01:37 +00:00
|
|
|
path = stream.video.path
|
|
|
|
|
2010-12-31 12:04:33 +00:00
|
|
|
#server side cutting
|
2011-08-18 12:01:37 +00:00
|
|
|
#FIXME: this needs to join segments if needed
|
2011-04-22 23:34:01 +00:00
|
|
|
t = request.GET.get('t')
|
2011-01-02 09:51:39 +00:00
|
|
|
if t:
|
2010-12-31 12:04:33 +00:00
|
|
|
t = map(float, t.split(','))
|
2011-08-18 12:01:37 +00:00
|
|
|
ext = '.%s' % format
|
2011-06-27 19:40:44 +00:00
|
|
|
content_type = mimetypes.guess_type(path)[0]
|
2011-01-02 09:51:39 +00:00
|
|
|
if len(t) == 2 and t[1] > t[0] and stream.info['duration']>=t[1]:
|
2011-01-01 10:25:13 +00:00
|
|
|
response = HttpResponse(extract.chop(path, t[0], t[1]), content_type=content_type)
|
2012-01-24 09:59:04 +00:00
|
|
|
filename = u"Clip of %s - %s-%s - %s %s%s" % (
|
2010-12-31 12:04:33 +00:00
|
|
|
item.get('title'),
|
2012-01-20 17:53:21 +00:00
|
|
|
ox.formatDuration(t[0] * 1000).replace(':', '.')[:-4],
|
|
|
|
ox.formatDuration(t[1] * 1000).replace(':', '.')[:-4],
|
2010-12-31 12:04:33 +00:00
|
|
|
settings.SITENAME,
|
2011-01-02 09:51:39 +00:00
|
|
|
item.itemId,
|
|
|
|
ext
|
2010-12-31 12:04:33 +00:00
|
|
|
)
|
2012-01-24 09:59:04 +00:00
|
|
|
filename = filename.encode('utf8')
|
2010-12-31 12:04:33 +00:00
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
|
2011-01-01 10:25:13 +00:00
|
|
|
return response
|
|
|
|
else:
|
2011-01-02 09:51:39 +00:00
|
|
|
filename = "%s - %s %s%s" % (
|
2011-01-01 10:25:13 +00:00
|
|
|
item.get('title'),
|
|
|
|
settings.SITENAME,
|
2011-01-02 09:51:39 +00:00
|
|
|
item.itemId,
|
|
|
|
ext
|
2011-01-01 10:25:13 +00:00
|
|
|
)
|
|
|
|
response = HttpFileResponse(path, content_type=content_type)
|
2012-01-24 09:59:04 +00:00
|
|
|
filename = filename.encode('utf8')
|
2011-01-01 10:25:13 +00:00
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
|
2010-12-31 12:04:33 +00:00
|
|
|
return response
|
2011-08-19 15:37:37 +00:00
|
|
|
if not settings.XSENDFILE and not settings.XACCELREDIRECT:
|
|
|
|
return redirect(stream.video.url)
|
2011-08-19 18:10:28 +00:00
|
|
|
response = HttpFileResponse(path)
|
|
|
|
response['Cache-Control'] = 'public'
|
|
|
|
return response
|
2011-11-12 08:57:51 +00:00
|
|
|
|
2012-01-02 17:08:19 +00:00
|
|
|
def srt(request, id, layer, index=None):
|
|
|
|
item = get_object_or_404(models.Item, itemId=id)
|
|
|
|
if not item.access(request.user):
|
|
|
|
response = HttpResponseForbidden()
|
|
|
|
else:
|
|
|
|
response = HttpResponse()
|
|
|
|
filename = "%s.srt" % item.get('title')
|
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
|
|
|
|
response['Content-Type'] = 'text/x-srt'
|
|
|
|
response.write(item.srt(layer))
|
|
|
|
return response
|
|
|
|
|
2011-11-12 08:57:51 +00:00
|
|
|
def random_annotation(request):
|
|
|
|
n = models.Item.objects.all().count()
|
|
|
|
pos = random.randint(0, n)
|
|
|
|
item = models.Item.objects.all()[pos]
|
|
|
|
n = item.annotations.all().count()
|
|
|
|
pos = random.randint(0, n)
|
|
|
|
clip = item.annotations.all()[pos]
|
|
|
|
return redirect('/%s'% clip.public_id)
|
2012-01-02 17:08:19 +00:00
|
|
|
|
2012-01-16 08:18:39 +00:00
|
|
|
def atom_xml(request):
|
|
|
|
feed = ET.Element("feed")
|
|
|
|
feed.attrib['xmlns'] = 'http://www.w3.org/2005/Atom'
|
|
|
|
feed.attrib['xmlns:media'] = 'http://search.yahoo.com/mrss'
|
|
|
|
feed.attrib['xml:lang'] = 'en'
|
|
|
|
title = ET.SubElement(feed, "title")
|
|
|
|
title.text = settings.SITENAME
|
|
|
|
title.attrib['type'] = 'text'
|
|
|
|
link = ET.SubElement(feed, "link")
|
|
|
|
link.attrib['rel'] = 'self'
|
|
|
|
link.attrib['type'] = 'application/atom+xml'
|
|
|
|
atom_link = request.build_absolute_uri('/atom.xml')
|
|
|
|
link.attrib['href'] = atom_link
|
|
|
|
'''
|
|
|
|
rights = ET.SubElement(feed, 'rights')
|
|
|
|
rights.attrib['type'] = 'text'
|
|
|
|
rights.text = "PGL"
|
|
|
|
'''
|
|
|
|
el = ET.SubElement(feed, 'id')
|
|
|
|
el.text = atom_link
|
2012-01-16 15:01:07 +00:00
|
|
|
|
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
if not request.user.is_anonymous():
|
|
|
|
level = request.user.get_profile().level
|
2012-01-16 08:18:39 +00:00
|
|
|
for item in models.Item.objects.filter(level__lte=level, rendered=True).order_by('-created')[:7]:
|
|
|
|
page_link = request.build_absolute_uri('/%s' % item.itemId)
|
|
|
|
|
|
|
|
entry = ET.Element("entry")
|
|
|
|
title = ET.SubElement(entry, "title")
|
|
|
|
title.text = item.get('title')
|
|
|
|
link = ET.SubElement(entry, "link")
|
|
|
|
link.attrib['rel'] = 'alternate'
|
|
|
|
link.attrib['href'] = "%s/info" % page_link
|
|
|
|
updated = ET.SubElement(entry, "updated")
|
|
|
|
updated.text = item.modified.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
published = ET.SubElement(entry, "published")
|
|
|
|
published.text = item.created.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
el = ET.SubElement(entry, "id")
|
|
|
|
el.text = page_link
|
|
|
|
|
|
|
|
if item.get('director'):
|
|
|
|
el = ET.SubElement(entry, "author")
|
|
|
|
name = ET.SubElement(el, "name")
|
|
|
|
name.text = u', '.join(item.get('director'))
|
|
|
|
for topic in item.get('topics', []):
|
|
|
|
el = ET.SubElement(entry, "category")
|
|
|
|
el.attrib['term'] = topic
|
|
|
|
|
|
|
|
'''
|
|
|
|
el = ET.SubElement(entry, "rights")
|
|
|
|
el.text = "PGL"
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
el.attrib['rel'] = "license"
|
|
|
|
el.attrib['type'] = "text/html"
|
|
|
|
el.attrib['href'] = item.licenseUrl
|
|
|
|
'''
|
|
|
|
'''
|
|
|
|
el = ET.SubElement(entry, "contributor")
|
|
|
|
name = ET.SubElement(el, "name")
|
|
|
|
name.text = item.user.username
|
|
|
|
'''
|
|
|
|
|
|
|
|
description = item.get('description', item.get('summary'))
|
|
|
|
if description:
|
|
|
|
content = ET.SubElement(entry, "content")
|
|
|
|
content.attrib['type'] = 'html'
|
|
|
|
content.text = description
|
|
|
|
|
|
|
|
format = ET.SubElement(entry, "format")
|
|
|
|
format.attrib['xmlns'] = 'http://transmission.cc/FileFormat'
|
|
|
|
stream = item.streams().filter(source=None).order_by('-id')[0]
|
|
|
|
for key in ('size', 'duration', 'video_codec',
|
|
|
|
'framerate', 'width', 'height',
|
|
|
|
'audio_codec', 'samplerate', 'channels'):
|
|
|
|
value = stream.info.get(key)
|
|
|
|
if not value and stream.info.get('video'):
|
|
|
|
value = stream.info['video'][0].get({
|
|
|
|
'video_codec': 'codec'
|
|
|
|
}.get(key, key))
|
|
|
|
if not value and stream.info.get('audio'):
|
|
|
|
value = stream.info['audio'][0].get({
|
|
|
|
'audio_codec': 'codec'
|
|
|
|
}.get(key, key))
|
|
|
|
if value and value != -1:
|
|
|
|
el = ET.SubElement(format, key)
|
|
|
|
el.text = unicode(value)
|
|
|
|
el = ET.SubElement(format, 'pixel_aspect_ratio')
|
|
|
|
el.text = u"1:1"
|
|
|
|
|
|
|
|
if settings.CONFIG['video'].get('download'):
|
2012-01-17 10:39:45 +00:00
|
|
|
if item.torrent:
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
el.attrib['rel'] = 'enclosure'
|
|
|
|
el.attrib['type'] = 'application/x-bittorrent'
|
|
|
|
el.attrib['href'] = '%s/torrent/' % page_link
|
|
|
|
el.attrib['length'] = '%s' % ox.getTorrentSize(item.torrent.path)
|
2012-01-16 08:18:39 +00:00
|
|
|
#FIXME: loop over streams
|
2012-01-17 10:39:45 +00:00
|
|
|
#for s in item.streams().filter(resolution=max(settings.CONFIG['video']['resolutions'])):
|
2012-01-16 08:18:39 +00:00
|
|
|
for s in item.streams().filter(source=None):
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
el.attrib['rel'] = 'enclosure'
|
|
|
|
el.attrib['type'] = 'video/%s' % s.format
|
|
|
|
el.attrib['href'] = '%s/%sp.%s' % (page_link, s.resolution, s.format)
|
|
|
|
el.attrib['length'] = '%s'%s.video.size
|
|
|
|
|
|
|
|
el = ET.SubElement(entry, "media:thumbnail")
|
|
|
|
thumbheight = 96
|
|
|
|
thumbwidth = int(thumbheight * item.stream_aspect)
|
|
|
|
thumbwidth -= thumbwidth % 2
|
|
|
|
el.attrib['url'] = '%s/%sp.jpg' % (page_link, thumbheight)
|
|
|
|
el.attrib['width'] = str(thumbwidth)
|
|
|
|
el.attrib['height'] = str(thumbheight)
|
|
|
|
feed.append(entry)
|
|
|
|
return HttpResponse(
|
|
|
|
'<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n' + ET.tostring(feed),
|
|
|
|
'application/atom+xml'
|
|
|
|
)
|
|
|
|
|
2012-01-10 16:00:41 +00:00
|
|
|
def oembed(request):
|
|
|
|
format = request.GET.get('format', 'json')
|
|
|
|
maxwidth = request.GET.get('maxwidth', 640)
|
|
|
|
maxheight = request.GET.get('maxheight', 480)
|
|
|
|
|
|
|
|
url = request.GET['url']
|
|
|
|
parts = urlparse(url).path.split('/')
|
|
|
|
itemId = parts[1]
|
|
|
|
#fixme: embed should reflect actuall url
|
|
|
|
item = get_object_or_404_json(models.Item, itemId=itemId)
|
|
|
|
embed_url = request.build_absolute_uri('/%s/embed' % item.itemId)
|
|
|
|
oembed = {}
|
|
|
|
oembed['version'] = '1.0'
|
|
|
|
oembed['type'] = 'video'
|
|
|
|
oembed['provider_name'] = settings.SITENAME
|
|
|
|
oembed['provider_url'] = request.build_absolute_uri('/')
|
|
|
|
oembed['title'] = item.get('title')
|
|
|
|
#oembed['author_name'] = item.get('director')
|
|
|
|
#oembed['author_url'] = ??
|
|
|
|
height = 96
|
|
|
|
width = 128
|
|
|
|
if maxheight > height or height > maxheight:
|
|
|
|
height = maxheight
|
|
|
|
if maxwidth > width or width > maxwidth:
|
|
|
|
width = maxwidth
|
|
|
|
oembed['html'] = '<iframe width="%s" height="%s" src="%s" frameborder="0" allowfullscreen></iframe>' % (height, width, embed_url)
|
|
|
|
oembed['width'] = width
|
|
|
|
oembed['height'] = height
|
|
|
|
thumbheight = 96
|
2012-01-16 08:18:39 +00:00
|
|
|
thumbwidth = int(thumbheight * item.stream_aspect)
|
2012-01-10 16:00:41 +00:00
|
|
|
thumbwidth -= thumbwidth % 2
|
|
|
|
oembed['thumbnail_height'] = thumbheight
|
|
|
|
oembed['thumbnail_width'] = thumbwidth
|
|
|
|
oembed['thumbnail_url'] = request.build_absolute_uri('/%s/%sp.jpg' % (item.itemId, thumbheight))
|
|
|
|
if format == 'xml':
|
|
|
|
oxml = ET.Element('oembed')
|
|
|
|
for key in oembed:
|
|
|
|
e = ET.SubElement(oxml, key)
|
|
|
|
e.text = unicode(oembed[key])
|
|
|
|
return HttpResponse(
|
|
|
|
'<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n' + ET.tostring(oxml),
|
|
|
|
'application/xml'
|
|
|
|
)
|
|
|
|
return HttpResponse(json.dumps(oembed, indent=2), 'application/json')
|
|
|
|
|
2012-01-11 07:51:26 +00:00
|
|
|
def sitemap_xml(request):
|
|
|
|
sitemap = os.path.abspath(os.path.join(settings.MEDIA_ROOT, 'sitemap.xml'))
|
2012-01-11 07:42:32 +00:00
|
|
|
if not os.path.exists(sitemap):
|
|
|
|
tasks.update_sitemap(request.build_absolute_uri('/'))
|
2012-01-11 07:51:26 +00:00
|
|
|
elif time.mktime(time.localtime()) - os.stat(sitemap).st_ctime > 24*60*60:
|
2012-01-11 07:42:32 +00:00
|
|
|
tasks.update_sitemap.delay(request.build_absolute_uri('/'))
|
|
|
|
response = HttpFileResponse(sitemap)
|
|
|
|
response['Content-Type'] = 'application/xml'
|
|
|
|
return response
|
|
|
|
|
2012-01-09 09:06:35 +00:00
|
|
|
def item(request, id):
|
|
|
|
id = id.split('/')[0]
|
|
|
|
template = 'index.html'
|
2012-01-16 15:01:07 +00:00
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
if not request.user.is_anonymous():
|
|
|
|
level = request.user.get_profile().level
|
|
|
|
qs = models.Item.objects.filter(itemId=id, level__lte=level)
|
2012-01-09 09:06:35 +00:00
|
|
|
if qs.count() == 0:
|
|
|
|
context = RequestContext(request, {
|
|
|
|
'base_url': request.build_absolute_uri('/'),
|
|
|
|
'settings': settings
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
item = qs[0]
|
|
|
|
template = 'item.html'
|
|
|
|
keys = [
|
|
|
|
'year',
|
|
|
|
'director',
|
2012-01-09 09:55:52 +00:00
|
|
|
'topic',
|
|
|
|
'description'
|
2012-01-09 09:06:35 +00:00
|
|
|
]
|
|
|
|
data = []
|
|
|
|
for key in keys:
|
|
|
|
value = item.get(key)
|
|
|
|
if value:
|
|
|
|
if isinstance(value, list):
|
|
|
|
value = value = u', '.join([unicode(v) for v in value])
|
|
|
|
data.append({'key': key.capitalize(), 'value': value})
|
|
|
|
clips = []
|
2012-01-10 16:00:41 +00:00
|
|
|
clip = {'in': 0, 'annotations': []}
|
|
|
|
for a in item.annotations.filter(
|
|
|
|
layer__in=models.Annotation.public_layers()).order_by('start', 'end', 'sortvalue'):
|
|
|
|
if clip['in'] < a.start:
|
|
|
|
if clip['annotations']:
|
|
|
|
clip['annotations'] = '<br />\n'.join(clip['annotations'])
|
|
|
|
clips.append(clip)
|
|
|
|
clip = {'in': a.start, 'annotations': []}
|
|
|
|
clip['annotations'].append(a.value)
|
2012-01-09 09:06:35 +00:00
|
|
|
ctx = {
|
2012-01-10 16:00:41 +00:00
|
|
|
'current_url': request.build_absolute_uri(request.get_full_path()),
|
2012-01-09 09:06:35 +00:00
|
|
|
'base_url': request.build_absolute_uri('/'),
|
|
|
|
'url': request.build_absolute_uri('/%s' % id),
|
|
|
|
'id': id,
|
|
|
|
'settings': settings,
|
|
|
|
'data': data,
|
|
|
|
'clips': clips,
|
2012-01-09 09:55:52 +00:00
|
|
|
'icon': settings.CONFIG['user']['ui']['icons'] == 'frames' and 'icon' or 'poster',
|
2012-01-09 09:06:35 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
for key in ('title', 'description', 'keywords'):
|
|
|
|
value = item.get({
|
2012-01-09 09:55:52 +00:00
|
|
|
'description': 'summary' in keys and 'summary' or 'description',
|
|
|
|
'keywords': 'topic' in keys and 'topic' or 'keywords'
|
2012-01-09 09:06:35 +00:00
|
|
|
}.get(key, key))
|
|
|
|
if isinstance(value, list):
|
|
|
|
value = value = ', '.join(value)
|
|
|
|
if value:
|
|
|
|
ctx[key] = ox.stripTags(value)
|
|
|
|
|
|
|
|
context = RequestContext(request, ctx)
|
|
|
|
return render_to_response(template, context)
|
|
|
|
|