2009-08-01 16:14:54 +02:00
|
|
|
|
# -*- coding: utf-8 -*-
|
2016-08-23 12:27:06 +02:00
|
|
|
|
from __future__ import division, print_function, absolute_import
|
|
|
|
|
|
2009-08-01 16:14:54 +02:00
|
|
|
|
import os.path
|
2011-06-27 21:40:44 +02:00
|
|
|
|
import mimetypes
|
2011-11-12 09:57:51 +01:00
|
|
|
|
import random
|
2012-01-11 13:12:32 +05:30
|
|
|
|
import time
|
2017-08-04 17:32:36 +02:00
|
|
|
|
from datetime import datetime, timedelta
|
2011-01-01 17:14:42 +05:30
|
|
|
|
|
2016-08-23 12:27:06 +02:00
|
|
|
|
from six import PY2
|
|
|
|
|
from six.moves.urllib.parse import quote, urlparse
|
2016-06-25 20:36:20 +02:00
|
|
|
|
from PIL import Image
|
2013-02-21 13:28:36 +00:00
|
|
|
|
from django.db.models import Count, Sum
|
2011-09-28 14:47:13 +02:00
|
|
|
|
from django.http import HttpResponse, HttpResponseForbidden, Http404
|
2016-02-20 00:25:48 +05:30
|
|
|
|
from django.shortcuts import get_object_or_404, redirect, render
|
2014-02-13 18:48:47 +00:00
|
|
|
|
from django.core.files.temp import NamedTemporaryFile
|
2016-02-19 22:02:12 +05:30
|
|
|
|
from wsgiref.util import FileWrapper
|
2010-02-08 15:56:25 +05:30
|
|
|
|
from django.conf import settings
|
2010-02-03 17:35:38 +05:30
|
|
|
|
|
2012-01-10 21:30:41 +05:30
|
|
|
|
from ox.utils import json, ET
|
2010-02-03 17:35:38 +05:30
|
|
|
|
|
2016-02-20 09:06:41 +00:00
|
|
|
|
from oxdjango.decorators import login_required_json
|
|
|
|
|
from oxdjango.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
|
|
|
|
from oxdjango.http import HttpFileResponse
|
2010-07-08 00:46:41 +02:00
|
|
|
|
import ox
|
2009-12-31 16:04:32 +01:00
|
|
|
|
|
2016-08-23 12:27:06 +02:00
|
|
|
|
from . import models
|
|
|
|
|
from . import utils
|
|
|
|
|
from . import tasks
|
2010-01-25 14:50:21 +05:30
|
|
|
|
|
2011-08-19 20:10:28 +02:00
|
|
|
|
from archive.models import File, Stream
|
2010-09-07 16:05:38 +02:00
|
|
|
|
from archive import extract
|
2011-10-19 17:55:29 +02:00
|
|
|
|
from clip.models import Clip
|
2013-07-06 11:15:10 +00:00
|
|
|
|
from user.models import has_capability
|
2014-12-17 13:45:46 +00:00
|
|
|
|
from changelog.models import add_changelog
|
2010-09-07 16:05:38 +02:00
|
|
|
|
|
2016-02-20 09:06:41 +00:00
|
|
|
|
from oxdjango.api import actions
|
2010-11-25 16:21:23 +01:00
|
|
|
|
|
2016-08-23 12:27:06 +02:00
|
|
|
|
if not PY2:
|
|
|
|
|
unicode = str
|
2011-01-01 17:14:42 +05:30
|
|
|
|
|
2010-02-22 14:55:29 +05:30
|
|
|
|
def _order_query(qs, sort, prefix='sort__'):
|
2009-08-16 14:23:29 +02:00
|
|
|
|
order_by = []
|
2010-07-12 16:56:14 +02:00
|
|
|
|
if len(sort) == 1:
|
2014-02-09 11:29:00 +00:00
|
|
|
|
additional_sort = settings.CONFIG['user']['ui']['listSort']
|
2012-01-20 23:23:21 +05:30
|
|
|
|
key = utils.get_by_id(settings.CONFIG['itemKeys'], sort[0]['key'])
|
2014-02-09 11:29:00 +00:00
|
|
|
|
for s in key.get('additionalSort', additional_sort):
|
2012-01-20 23:23:21 +05:30
|
|
|
|
sort.append(s)
|
2010-02-22 14:55:29 +05:30
|
|
|
|
for e in sort:
|
2010-06-30 10:40:48 +02:00
|
|
|
|
operator = e['operator']
|
2011-01-01 17:14:42 +05:30
|
|
|
|
if operator != '-':
|
|
|
|
|
operator = ''
|
2011-01-25 20:09:03 +05:30
|
|
|
|
key = {
|
2014-09-19 12:26:46 +00:00
|
|
|
|
'id': 'public_id',
|
2013-07-17 12:44:29 +00:00
|
|
|
|
'index': 'listitem__index'
|
2011-01-25 20:09:03 +05:30
|
|
|
|
}.get(e['key'], e['key'])
|
2013-07-17 12:44:29 +00:00
|
|
|
|
if key not in ('listitem__index', ):
|
|
|
|
|
key = "%s%s" % (prefix, key)
|
2011-01-25 20:09:03 +05:30
|
|
|
|
order = '%s%s' % (operator, key)
|
2010-02-22 14:55:29 +05:30
|
|
|
|
order_by.append(order)
|
|
|
|
|
if order_by:
|
2011-01-25 04:16:16 +05:30
|
|
|
|
qs = qs.order_by(*order_by, nulls_last=True)
|
2010-02-22 14:55:29 +05:30
|
|
|
|
return qs
|
|
|
|
|
|
2011-01-24 19:14:38 +05:30
|
|
|
|
def _order_by_group(query):
|
|
|
|
|
if 'sort' in query:
|
|
|
|
|
if len(query['sort']) == 1 and query['sort'][0]['key'] == 'items':
|
2011-09-18 09:48:52 -04:00
|
|
|
|
order_by = query['sort'][0]['operator'] == '-' and '-items' or 'items'
|
2011-01-24 19:14:38 +05:30
|
|
|
|
if query['group'] == "year":
|
2011-10-30 01:32:11 +02:00
|
|
|
|
secondary = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
2011-09-18 09:48:52 -04:00
|
|
|
|
order_by = (order_by, secondary)
|
|
|
|
|
elif query['group'] != "keyword":
|
2011-10-30 01:32:11 +02:00
|
|
|
|
order_by = (order_by, 'sortvalue')
|
2011-01-24 19:14:38 +05:30
|
|
|
|
else:
|
2012-01-31 05:54:18 +05:30
|
|
|
|
order_by = (order_by, 'value')
|
2011-01-24 19:14:38 +05:30
|
|
|
|
else:
|
2011-10-30 01:32:11 +02:00
|
|
|
|
order_by = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
2011-01-24 19:14:38 +05:30
|
|
|
|
order_by = (order_by, 'items')
|
|
|
|
|
else:
|
2011-10-30 01:32:11 +02:00
|
|
|
|
order_by = ('-sortvalue', 'items')
|
2011-01-24 19:14:38 +05:30
|
|
|
|
return order_by
|
2011-01-04 13:02:32 +05:30
|
|
|
|
|
2011-02-22 16:26:06 +01:00
|
|
|
|
def parse_query(data, user):
|
2010-02-22 14:55:29 +05:30
|
|
|
|
query = {}
|
|
|
|
|
query['range'] = [0, 100]
|
2016-06-14 21:06:27 +02:00
|
|
|
|
query['sort'] = [{'key': 'title', 'operator': '+'}]
|
2011-06-01 13:03:43 +02:00
|
|
|
|
for key in ('sort', 'keys', 'group', 'range', 'position', 'positions'):
|
2010-02-22 14:55:29 +05:30
|
|
|
|
if key in data:
|
|
|
|
|
query[key] = data[key]
|
2016-03-17 16:06:08 +01:00
|
|
|
|
if [r for r in query['range'] if not isinstance(r, int)]:
|
|
|
|
|
query['range'] = [0, 0]
|
2010-09-23 18:01:48 +02:00
|
|
|
|
query['qs'] = models.Item.objects.find(data, user)
|
2011-10-19 16:32:11 +00:00
|
|
|
|
if 'clips' in data:
|
2012-01-19 00:33:29 +05:30
|
|
|
|
conditions = {'query': data['clips']['query']}
|
|
|
|
|
query['clip_qs'] = Clip.objects.find(conditions, user).order_by('start')
|
|
|
|
|
query['clip_filter'] = models.Clip.objects.filter_annotations(conditions, user)
|
2017-02-17 15:32:52 +01:00
|
|
|
|
query['clip_items'] = max(data['clips'].get('items', 5), 0)
|
2011-10-19 16:32:11 +00:00
|
|
|
|
query['clip_keys'] = data['clips'].get('keys')
|
|
|
|
|
if not query['clip_keys']:
|
|
|
|
|
query['clip_keys'] = ['id', 'in', 'out', 'annotations']
|
2011-06-01 13:03:43 +02:00
|
|
|
|
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# group by only allows sorting by name or number of itmes
|
2010-02-22 14:55:29 +05:30
|
|
|
|
return query
|
2009-08-01 16:14:54 +02:00
|
|
|
|
|
2017-07-25 20:25:30 +02:00
|
|
|
|
def get_group(request, query, data):
|
|
|
|
|
group = {
|
|
|
|
|
'items': []
|
|
|
|
|
}
|
|
|
|
|
items = 'items'
|
|
|
|
|
item_qs = query['qs']
|
|
|
|
|
order_by = _order_by_group(query)
|
|
|
|
|
qs = models.Facet.objects.filter(key=query['group']).filter(item__id__in=item_qs)
|
|
|
|
|
qs = qs.values('value').annotate(items=Count('id')).order_by(*order_by)
|
|
|
|
|
|
|
|
|
|
if 'positions' in query:
|
|
|
|
|
group['positions'] = {}
|
|
|
|
|
ids = [j['value'] for j in qs]
|
|
|
|
|
group['positions'] = utils.get_positions(ids, query['positions'])
|
|
|
|
|
elif 'range' in data:
|
|
|
|
|
qs = qs[query['range'][0]:query['range'][1]]
|
|
|
|
|
group['items'] = [{'name': i['value'], 'items': i[items]} for i in qs]
|
|
|
|
|
else:
|
|
|
|
|
group['items'] = qs.count()
|
|
|
|
|
return group
|
|
|
|
|
|
|
|
|
|
def get_position(request, query, data):
|
|
|
|
|
response = {}
|
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
|
ids = [j['public_id'] for j in qs.values('public_id')]
|
|
|
|
|
data['conditions'] = data['conditions'] + {
|
|
|
|
|
'value': query['position'],
|
|
|
|
|
'key': query['sort'][0]['key'],
|
|
|
|
|
'operator': '^'
|
|
|
|
|
}
|
|
|
|
|
query = parse_query(data, request.user)
|
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
|
if qs.count() > 0:
|
|
|
|
|
response['position'] = utils.get_positions(ids, [qs[0].public_id])[0]
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
def get_positions(request, query):
|
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
|
ids = list(qs.values_list('public_id', flat=True))
|
|
|
|
|
return utils.get_positions(ids, query['positions'])
|
|
|
|
|
|
|
|
|
|
def is_editable(request, item):
|
|
|
|
|
if request.user.is_anonymous():
|
|
|
|
|
return False
|
|
|
|
|
if not hasattr(request, 'user_group_names'):
|
|
|
|
|
request.user_group_names = {g.name for g in request.user.groups.all()}
|
|
|
|
|
if request.user.profile.capability('canEditMetadata') or \
|
|
|
|
|
request.user.is_staff or \
|
|
|
|
|
item.get('user') == request.user.username or \
|
|
|
|
|
set(item.get('groups', [])) & request.user_group_names:
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def get_clips(query, qs):
|
|
|
|
|
n = qs.count()
|
|
|
|
|
if n > query['clip_items']:
|
|
|
|
|
num = query['clip_items']
|
|
|
|
|
clips = []
|
|
|
|
|
step = int(n / (num + 1))
|
|
|
|
|
i = step
|
|
|
|
|
while i <= (n - step) and i < n and len(clips) < num:
|
|
|
|
|
clips.append(qs[i])
|
|
|
|
|
i += step
|
|
|
|
|
else:
|
|
|
|
|
clips = qs
|
|
|
|
|
return [c.json(query['clip_keys'], query['clip_filter']) for c in clips]
|
|
|
|
|
|
|
|
|
|
def only_p_sums(request, query, m):
|
|
|
|
|
r = {}
|
|
|
|
|
for p in query['keys']:
|
|
|
|
|
if p == 'accessed':
|
|
|
|
|
r[p] = m.sort.accessed or ''
|
|
|
|
|
elif p == 'editable':
|
2018-06-19 22:28:44 +02:00
|
|
|
|
r[p] = is_editable(request, m.cache)
|
2017-07-25 20:25:30 +02:00
|
|
|
|
elif p in item_sort_keys:
|
|
|
|
|
r[p] = getattr(m.sort, p)
|
|
|
|
|
else:
|
2018-06-19 22:28:44 +02:00
|
|
|
|
r[p] = m.cache.get(p)
|
2017-07-25 20:25:30 +02:00
|
|
|
|
if 'clip_qs' in query:
|
|
|
|
|
r['clips'] = get_clips(query, query['clip_qs'].filter(item=m))
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
def only_p(request, query, m):
|
|
|
|
|
r = {}
|
|
|
|
|
if m:
|
|
|
|
|
if not isinstance(m, dict):
|
|
|
|
|
m = json.loads(m, object_hook=oxdjango.fields.from_json)
|
|
|
|
|
for p in query['keys']:
|
|
|
|
|
if p == 'editable':
|
|
|
|
|
r[p] = is_editable(request, m)
|
|
|
|
|
else:
|
|
|
|
|
r[p] = m.get(p)
|
|
|
|
|
if 'clip_qs' in query:
|
|
|
|
|
r['clips'] = get_clips(query['clip_qs'].filter(item__public_id=m['id']))
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
item_sort_keys = {
|
|
|
|
|
'accessed', 'modified', 'timesaccessed',
|
|
|
|
|
'numberofannotations', 'numberoffiles', 'numberofdocuments'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def get_items(request, query):
|
|
|
|
|
items = []
|
|
|
|
|
qs = _order_query(query['qs'], query['sort'])
|
|
|
|
|
qs = qs[query['range'][0]:query['range'][1]]
|
2018-06-20 00:03:01 +02:00
|
|
|
|
# items = [m.json(_p) for m in qs]
|
2017-07-25 20:25:30 +02:00
|
|
|
|
if any(p for p in query['keys'] if p in item_sort_keys):
|
|
|
|
|
qs = qs.select_related()
|
|
|
|
|
items = [only_p_sums(request, query, m) for m in qs]
|
|
|
|
|
else:
|
2018-06-19 22:28:44 +02:00
|
|
|
|
items = [only_p(request, query, m['cache']) for m in qs.values('cache')]
|
2017-07-25 20:25:30 +02:00
|
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
def get_stats(request, query):
|
|
|
|
|
stats = {}
|
|
|
|
|
items = query['qs']
|
|
|
|
|
files = File.objects.filter(item__in=items).filter(selected=True).filter(size__gt=0)
|
|
|
|
|
r = files.aggregate(
|
|
|
|
|
Sum('duration'),
|
|
|
|
|
Sum('pixels'),
|
|
|
|
|
Sum('size')
|
|
|
|
|
)
|
|
|
|
|
totals = [
|
|
|
|
|
i['id']
|
|
|
|
|
for i in settings.CONFIG['totals']
|
|
|
|
|
if 'capability' not in i or has_capability(request.user, i['capability'])
|
|
|
|
|
]
|
|
|
|
|
if 'duration' in totals:
|
|
|
|
|
stats['duration'] = r['duration__sum']
|
|
|
|
|
if 'files' in totals:
|
|
|
|
|
stats['files'] = files.count()
|
|
|
|
|
if 'items' in totals:
|
|
|
|
|
stats['items'] = items.count()
|
|
|
|
|
if 'pixels' in totals:
|
|
|
|
|
stats['pixels'] = r['pixels__sum']
|
|
|
|
|
if 'runtime' in totals:
|
|
|
|
|
stats['runtime'] = items.aggregate(Sum('sort__runtime'))['sort__runtime__sum'] or 0
|
|
|
|
|
if 'size' in totals:
|
|
|
|
|
stats['size'] = r['size__sum']
|
|
|
|
|
for key in ('runtime', 'duration', 'pixels', 'size'):
|
|
|
|
|
if key in totals and stats[key] is None:
|
|
|
|
|
stats[key] = 0
|
|
|
|
|
return stats
|
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def find(request, data):
|
2010-01-25 14:50:21 +05:30
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Finds items for a given query
|
2014-11-18 16:19:50 +00:00
|
|
|
|
takes {
|
2014-12-18 16:39:47 +00:00
|
|
|
|
clipsQuery: object, // clips query object (optional)
|
2014-12-18 18:18:18 +00:00
|
|
|
|
group: string, // item key to group results by (optional)
|
2014-12-18 16:39:47 +00:00
|
|
|
|
keys: [string], // list of keys to return, [] for all (optional)
|
2014-11-18 16:19:50 +00:00
|
|
|
|
positions: [string], // list of item ids (optional)
|
|
|
|
|
query: { // query object
|
2014-12-18 16:39:47 +00:00
|
|
|
|
conditions: [{ // list of condition objects...
|
|
|
|
|
key: string, // item key
|
2014-11-18 16:19:50 +00:00
|
|
|
|
operator: string, // comparison operator, see below
|
2014-12-18 16:39:47 +00:00
|
|
|
|
value: string // value
|
2014-12-19 19:36:34 +00:00
|
|
|
|
}, { // ... and/or query objects (nested subconditions)
|
2015-03-05 14:17:02 +05:30
|
|
|
|
conditions: [object, ...], // list of condition objects
|
|
|
|
|
operator: string // logical operator, '&' or '|'
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}],
|
|
|
|
|
operator: string // logical operator, '&' or '|'
|
|
|
|
|
},
|
2014-12-18 16:39:47 +00:00
|
|
|
|
range: [int, int] // items to return, per current sort order
|
2014-12-20 17:36:38 +00:00
|
|
|
|
sort: [{ // list of sort objects, applied in the given ordering
|
2014-11-18 16:19:50 +00:00
|
|
|
|
key: string, // item key
|
2014-12-20 17:36:38 +00:00
|
|
|
|
operator: string // sort operator, '+' or '-'
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}]
|
2014-12-18 16:39:47 +00:00
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
returns { // if `keys` is present
|
|
|
|
|
items: [ // returns list of matching items
|
2014-12-18 16:39:47 +00:00
|
|
|
|
{
|
|
|
|
|
id: string, // item id
|
|
|
|
|
... // more item properties
|
|
|
|
|
},
|
|
|
|
|
... // more items
|
|
|
|
|
]
|
2014-12-18 18:18:18 +00:00
|
|
|
|
} or { // if `clipsQuery` is present
|
|
|
|
|
clips: [ // returns list of matching clips
|
2014-12-18 16:39:47 +00:00
|
|
|
|
{
|
|
|
|
|
id: string, // clip id
|
|
|
|
|
... // more clip properties
|
|
|
|
|
},
|
|
|
|
|
... // more clips
|
|
|
|
|
]
|
2014-12-18 18:18:18 +00:00
|
|
|
|
} or { // if `group` is present
|
|
|
|
|
items: [ // returns results for filters
|
2014-12-18 16:39:47 +00:00
|
|
|
|
{
|
|
|
|
|
name: string, // value for item key specified as group
|
|
|
|
|
items: int // number of matches
|
|
|
|
|
},
|
|
|
|
|
... // more group objects
|
|
|
|
|
]
|
2014-12-18 18:18:18 +00:00
|
|
|
|
} or { // if `keys` is missing
|
|
|
|
|
items: int // returns total number of items
|
2014-12-19 15:49:16 +00:00
|
|
|
|
} or { // if `positions` is present
|
2014-12-18 18:18:18 +00:00
|
|
|
|
positions: { // returns positions of given items
|
2014-12-18 16:39:47 +00:00
|
|
|
|
id: position, // position of the item, per current sort order
|
|
|
|
|
... // more id/position pairs
|
|
|
|
|
}
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
2014-12-18 16:39:47 +00:00
|
|
|
|
notes: Comparison operators are '=' (contains) '==' (is), '^' (starts with),
|
2014-11-18 16:19:50 +00:00
|
|
|
|
'$' (ends with), '<', '<=', '>', or '>=', each optionally prefixed with '!'
|
|
|
|
|
(not).
|
2014-12-21 11:34:41 +00:00
|
|
|
|
To make a query for "all videos in groups that the user is in", pass
|
|
|
|
|
"groups" as key and "$my" as value.
|
2014-12-18 19:26:37 +00:00
|
|
|
|
Leaving out `keys` or passing `positions` can be useful when building a
|
2014-12-19 15:05:08 +00:00
|
|
|
|
responsive UI: First leave out `keys` to get totals as fast as possible,
|
2014-12-18 19:26:37 +00:00
|
|
|
|
then pass `positions` to get the positions of previously selected items,
|
2014-12-21 12:46:35 +00:00
|
|
|
|
finally make the query with the `keys` you need and an appropriate `range`.
|
2014-12-19 15:05:08 +00:00
|
|
|
|
For more examples, see https://wiki.0x2620.org/wiki/pandora/QuerySyntax.
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: add, edit, get, lookup, remove, upload
|
2010-01-25 14:50:21 +05:30
|
|
|
|
'''
|
2010-06-26 16:32:08 +02:00
|
|
|
|
if settings.JSON_DEBUG:
|
2016-02-18 16:19:26 +05:30
|
|
|
|
print(json.dumps(data, indent=2))
|
2011-02-22 16:26:06 +01:00
|
|
|
|
query = parse_query(data, request.user)
|
2011-01-01 17:14:42 +05:30
|
|
|
|
|
2010-02-10 18:40:28 +05:30
|
|
|
|
response = json_response({})
|
2010-07-01 10:49:08 +02:00
|
|
|
|
if 'group' in query:
|
2017-07-25 20:25:30 +02:00
|
|
|
|
response['data'] = get_group(request, query, data)
|
2011-06-01 13:03:43 +02:00
|
|
|
|
elif 'position' in query:
|
2017-07-25 20:25:30 +02:00
|
|
|
|
response['data'] = get_position(request, query, data)
|
2011-06-01 13:03:43 +02:00
|
|
|
|
elif 'positions' in query:
|
2017-07-25 20:25:30 +02:00
|
|
|
|
response['data']['positions'] = get_positions(request, query)
|
2010-07-01 10:49:08 +02:00
|
|
|
|
elif 'keys' in query:
|
2017-07-25 20:25:30 +02:00
|
|
|
|
response['data']['items'] = get_items(request, query)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
else: # otherwise stats
|
2017-07-25 20:25:30 +02:00
|
|
|
|
response['data'] = get_stats(request, query)
|
2009-08-01 16:14:54 +02:00
|
|
|
|
return render_to_json_response(response)
|
2010-12-22 13:15:37 +05:30
|
|
|
|
actions.register(find)
|
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def autocomplete(request, data):
|
2011-01-03 19:44:54 +05:30
|
|
|
|
'''
|
2014-12-18 20:56:28 +00:00
|
|
|
|
Returns autocomplete strings for a given item key and search string
|
2014-11-18 16:19:50 +00:00
|
|
|
|
takes {
|
2014-12-18 20:56:28 +00:00
|
|
|
|
key: string, // item key
|
|
|
|
|
value: string, // search string
|
2014-12-18 18:18:18 +00:00
|
|
|
|
operator: string, // '=', '==', '^', '$'
|
|
|
|
|
query: object, // item query to limit results, see `find`
|
2015-04-30 22:48:07 +02:00
|
|
|
|
range: [int, int] // range of results to return
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
|
|
|
|
returns {
|
2014-12-18 20:56:28 +00:00
|
|
|
|
items: [string, ...] // list of matching strings
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
2016-10-05 00:00:03 +02:00
|
|
|
|
see: autocompleteDocuments, autocompleteEntities
|
2011-01-03 19:44:54 +05:30
|
|
|
|
'''
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if 'range' not in data:
|
2011-01-03 19:44:54 +05:30
|
|
|
|
data['range'] = [0, 10]
|
2011-11-11 12:39:05 +00:00
|
|
|
|
op = data.get('operator', '=')
|
2011-01-03 19:44:54 +05:30
|
|
|
|
|
2013-10-17 15:04:21 +00:00
|
|
|
|
key = utils.get_by_id(settings.CONFIG['itemKeys'], data['key'])
|
2013-07-22 13:16:50 +00:00
|
|
|
|
order_by = key.get('autocompleteSort', False)
|
2011-01-15 14:22:29 +00:00
|
|
|
|
if order_by:
|
2013-08-16 12:19:29 +02:00
|
|
|
|
for o in order_by:
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if o['operator'] != '-':
|
|
|
|
|
o['operator'] = ''
|
2016-03-02 15:20:23 +00:00
|
|
|
|
order_by = ['%(operator)ssort__%(key)s' % o for o in order_by]
|
2011-01-15 14:22:29 +00:00
|
|
|
|
else:
|
2016-03-02 15:20:23 +00:00
|
|
|
|
order_by = ['-items']
|
2012-03-13 13:39:43 +01:00
|
|
|
|
sort_type = key.get('sortType', key.get('type', 'string'))
|
2011-01-24 15:08:46 +05:30
|
|
|
|
if sort_type == 'title':
|
2011-02-22 16:26:06 +01:00
|
|
|
|
qs = parse_query({'query': data.get('query', {})}, request.user)['qs']
|
2011-01-03 19:44:54 +05:30
|
|
|
|
if data['value']:
|
2011-11-11 12:39:05 +00:00
|
|
|
|
if op == '=':
|
2011-01-03 19:44:54 +05:30
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__icontains=data['value'])
|
2011-11-11 13:31:27 +01:00
|
|
|
|
elif op == '==':
|
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__iexact=data['value'])
|
2011-01-03 19:44:54 +05:30
|
|
|
|
elif op == '^':
|
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__istartswith=data['value'])
|
|
|
|
|
elif op == '$':
|
|
|
|
|
qs = qs.filter(find__key=data['key'], find__value__iendswith=data['value'])
|
2016-03-02 15:20:23 +00:00
|
|
|
|
qs = qs.order_by(*order_by, nulls_last=True)
|
2011-01-03 19:44:54 +05:30
|
|
|
|
qs = qs[data['range'][0]:data['range'][1]]
|
|
|
|
|
response = json_response({})
|
2011-11-08 01:23:12 +01:00
|
|
|
|
response['data']['items'] = list(set([i.get(data['key']) for i in qs]))
|
2011-01-03 19:44:54 +05:30
|
|
|
|
else:
|
|
|
|
|
qs = models.Facet.objects.filter(key=data['key'])
|
|
|
|
|
if data['value']:
|
2011-11-11 13:31:27 +01:00
|
|
|
|
if op == '=':
|
2011-01-03 19:44:54 +05:30
|
|
|
|
qs = qs.filter(value__icontains=data['value'])
|
2011-11-11 13:31:27 +01:00
|
|
|
|
elif op == '==':
|
|
|
|
|
qs = qs.filter(value__iexact=data['value'])
|
2011-01-03 19:44:54 +05:30
|
|
|
|
elif op == '^':
|
|
|
|
|
qs = qs.filter(value__istartswith=data['value'])
|
|
|
|
|
elif op == '$':
|
|
|
|
|
qs = qs.filter(value__iendswith=data['value'])
|
2011-11-11 13:31:27 +01:00
|
|
|
|
if 'query' in data:
|
|
|
|
|
item_query = parse_query({'query': data.get('query', {})}, request.user)['qs']
|
|
|
|
|
qs = qs.filter(item__in=item_query)
|
2011-01-15 14:22:29 +00:00
|
|
|
|
qs = qs.values('value').annotate(items=Count('id'))
|
2016-03-02 15:20:23 +00:00
|
|
|
|
qs = qs.order_by(*order_by)
|
2011-01-03 19:44:54 +05:30
|
|
|
|
qs = qs[data['range'][0]:data['range'][1]]
|
|
|
|
|
response = json_response({})
|
|
|
|
|
response['data']['items'] = [i['value'] for i in qs]
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(autocomplete)
|
2010-12-22 13:15:37 +05:30
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def findId(request, data):
|
2011-10-18 22:06:01 +02:00
|
|
|
|
'''
|
2014-12-18 18:18:18 +00:00
|
|
|
|
Undocumented
|
2014-11-18 16:19:50 +00:00
|
|
|
|
takes {
|
2014-12-19 15:37:00 +00:00
|
|
|
|
id: string
|
|
|
|
|
title: string
|
|
|
|
|
director: [string]
|
|
|
|
|
year: int
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
2011-10-18 22:06:01 +02:00
|
|
|
|
'''
|
|
|
|
|
response = json_response({})
|
|
|
|
|
response['data']['items'] = []
|
2013-02-25 13:30:42 +00:00
|
|
|
|
if 'id' in data:
|
2014-09-19 12:26:46 +00:00
|
|
|
|
qs = models.Item.objects.filter(public_id=data['id'])
|
2013-02-25 13:30:42 +00:00
|
|
|
|
if qs.count() == 1:
|
|
|
|
|
response['data']['items'] = [
|
2018-06-20 00:03:01 +02:00
|
|
|
|
i.json(['title', 'director', 'year', 'id']) for i in qs
|
2013-02-25 13:30:42 +00:00
|
|
|
|
]
|
2013-02-28 20:12:52 +00:00
|
|
|
|
|
2013-02-25 13:30:42 +00:00
|
|
|
|
if not response['data']['items'] \
|
2016-06-14 21:06:27 +02:00
|
|
|
|
and settings.USE_IMDB \
|
|
|
|
|
and settings.DATA_SERVICE:
|
2011-10-18 22:06:01 +02:00
|
|
|
|
r = models.external_data('getId', data)
|
|
|
|
|
if r['status']['code'] == 200:
|
|
|
|
|
response['data']['items'] = [r['data']]
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(findId)
|
2011-01-04 13:02:32 +05:30
|
|
|
|
|
2013-02-24 08:26:59 +00:00
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def getMetadata(request, data):
|
2013-02-24 08:26:59 +00:00
|
|
|
|
'''
|
2014-11-18 16:19:50 +00:00
|
|
|
|
Gets metadata from an external service
|
|
|
|
|
takes {
|
2014-12-18 18:18:18 +00:00
|
|
|
|
id: string, // item id
|
|
|
|
|
keys: [string] // list of item keys to return
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
|
|
|
|
returns {
|
2014-12-18 18:18:18 +00:00
|
|
|
|
key: value // item key and value
|
|
|
|
|
... // more key/value pairs
|
2014-11-18 16:19:50 +00:00
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
notes: This can be used to populate metadata from a remote source, like
|
|
|
|
|
IMDb.
|
|
|
|
|
see: getIds, updateExternalData
|
2013-02-24 08:26:59 +00:00
|
|
|
|
'''
|
|
|
|
|
response = json_response({})
|
|
|
|
|
if settings.DATA_SERVICE:
|
|
|
|
|
'''
|
|
|
|
|
info = {}
|
|
|
|
|
for c in data['query']['conditions']:
|
|
|
|
|
info[c['key']] = c['value']
|
|
|
|
|
r = models.external_data('getId', info)
|
|
|
|
|
'''
|
|
|
|
|
r = models.external_data('getData', {'id': data['id']})
|
|
|
|
|
if r['status']['code'] == 200:
|
|
|
|
|
if 'keys' in data and data['keys']:
|
|
|
|
|
for key in data['keys']:
|
|
|
|
|
if key in r['data']:
|
|
|
|
|
response['data'][key] = r['data'][key]
|
|
|
|
|
else:
|
|
|
|
|
response['data'] = r['data']
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(getMetadata)
|
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def getIds(request, data):
|
2013-02-28 10:34:14 +00:00
|
|
|
|
'''
|
2014-12-18 18:18:18 +00:00
|
|
|
|
Gets ids from an external service
|
|
|
|
|
takes {
|
|
|
|
|
title: string, // title
|
|
|
|
|
director: [string], // list of directors
|
|
|
|
|
year: int // year
|
|
|
|
|
}
|
|
|
|
|
returns {
|
|
|
|
|
items: [{
|
|
|
|
|
title: string, // title
|
|
|
|
|
director: [string], // list of directors
|
|
|
|
|
year: int, // year
|
|
|
|
|
originalTitle: string // original title
|
|
|
|
|
}]
|
|
|
|
|
}
|
|
|
|
|
notes: This can be used to populate metadata from a remote source, like
|
|
|
|
|
IMDb.
|
|
|
|
|
see: getMetadata, updateExternalData
|
2013-02-28 10:34:14 +00:00
|
|
|
|
'''
|
|
|
|
|
response = json_response({})
|
|
|
|
|
if settings.DATA_SERVICE:
|
|
|
|
|
r = models.external_data('getIds', data)
|
|
|
|
|
if r['status']['code'] == 200:
|
|
|
|
|
response['data']['items'] = r['data']['items']
|
|
|
|
|
else:
|
|
|
|
|
response['data']['items']
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(getIds)
|
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def get(request, data):
|
2011-07-30 12:51:23 +02:00
|
|
|
|
'''
|
2014-12-18 18:18:18 +00:00
|
|
|
|
Gets an item by id
|
|
|
|
|
takes {
|
|
|
|
|
id: string, // item id
|
|
|
|
|
keys: [string] // item properties to return
|
|
|
|
|
}
|
|
|
|
|
returns {
|
|
|
|
|
key: value, // item key and value
|
|
|
|
|
... // more key/value pairs
|
|
|
|
|
}
|
|
|
|
|
see: add, edit, find, lookup, remove, upload
|
2011-07-30 12:51:23 +02:00
|
|
|
|
'''
|
|
|
|
|
response = json_response({})
|
2012-01-24 02:15:49 +05:30
|
|
|
|
data['keys'] = data.get('keys', [])
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2011-07-30 12:51:23 +02:00
|
|
|
|
if item.access(request.user):
|
2018-06-20 00:03:01 +02:00
|
|
|
|
info = item.json(data['keys'])
|
2011-08-06 13:34:56 +00:00
|
|
|
|
if not data['keys'] or 'stream' in data['keys']:
|
2011-07-30 12:51:23 +02:00
|
|
|
|
info['stream'] = item.get_stream()
|
2016-08-13 15:35:20 +02:00
|
|
|
|
if not data['keys'] or 'streams' in data['keys']:
|
|
|
|
|
info['streams'] = [s.file.oshash for s in item.streams()]
|
2011-10-20 11:05:48 +02:00
|
|
|
|
if data['keys'] and 'layers' in data['keys']:
|
2011-07-30 12:51:23 +02:00
|
|
|
|
info['layers'] = item.get_layers(request.user)
|
2013-05-27 11:21:08 +00:00
|
|
|
|
if data['keys'] and 'documents' in data['keys']:
|
|
|
|
|
info['documents'] = item.get_documents(request.user)
|
2011-08-23 19:39:34 +02:00
|
|
|
|
if data['keys'] and 'files' in data['keys']:
|
|
|
|
|
info['files'] = item.get_files(request.user)
|
2012-01-15 20:35:37 +05:30
|
|
|
|
if not data['keys'] or 'groups' in data['keys'] \
|
2013-07-15 15:45:24 +00:00
|
|
|
|
and item.editable(request.user):
|
2012-01-13 15:17:18 +05:30
|
|
|
|
info['groups'] = [g.name for g in item.groups.all()]
|
2013-03-09 08:23:47 +00:00
|
|
|
|
for k in settings.CONFIG['itemKeys']:
|
|
|
|
|
if 'capability' in k \
|
2016-06-14 21:06:27 +02:00
|
|
|
|
and not (item.editable(request.user) or has_capability(request.user, k['capability'])) \
|
|
|
|
|
and k['id'] in info \
|
|
|
|
|
and k['id'] not in ('parts', 'durations', 'duration'):
|
|
|
|
|
del info[k['id']]
|
2012-01-15 20:35:37 +05:30
|
|
|
|
info['editable'] = item.editable(request.user)
|
2011-07-30 14:23:06 +02:00
|
|
|
|
response['data'] = info
|
2011-07-30 12:51:23 +02:00
|
|
|
|
else:
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# response = json_response(status=403, text='permission denied')
|
2012-01-16 20:31:07 +05:30
|
|
|
|
response = json_response(status=404, text='not found')
|
2011-07-30 12:51:23 +02:00
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(get)
|
|
|
|
|
|
2017-03-03 18:42:54 +01:00
|
|
|
|
def edit_item(request, item, data):
|
|
|
|
|
update_clips = False
|
|
|
|
|
response = json_response(status=200, text='ok')
|
|
|
|
|
if 'rightslevel' in data:
|
|
|
|
|
if request.user.profile.capability('canEditRightsLevel'):
|
|
|
|
|
item.level = int(data['rightslevel'])
|
|
|
|
|
else:
|
|
|
|
|
response = json_response(status=403, text='permission denied')
|
|
|
|
|
del data['rightslevel']
|
|
|
|
|
if 'user' in data:
|
|
|
|
|
if request.user.profile.get_level() in ('admin', 'staff') and \
|
|
|
|
|
models.User.objects.filter(username=data['user']).exists():
|
|
|
|
|
new_user = models.User.objects.get(username=data['user'])
|
|
|
|
|
if new_user != item.user:
|
|
|
|
|
item.user = new_user
|
|
|
|
|
update_clips = True
|
|
|
|
|
del data['user']
|
|
|
|
|
if 'groups' in data:
|
|
|
|
|
if not request.user.profile.capability('canManageUsers'):
|
|
|
|
|
# Users wihtout canManageUsers can only add/remove groups they are not in
|
|
|
|
|
groups = set([g.name for g in item.groups.all()])
|
|
|
|
|
user_groups = set([g.name for g in request.user.groups.all()])
|
|
|
|
|
other_groups = list(groups - user_groups)
|
|
|
|
|
data['groups'] = [g for g in data['groups'] if g in user_groups] + other_groups
|
|
|
|
|
r = item.edit(data)
|
|
|
|
|
if r:
|
|
|
|
|
r.wait()
|
|
|
|
|
if update_clips:
|
|
|
|
|
tasks.update_clips.delay(item.public_id)
|
|
|
|
|
return response
|
|
|
|
|
|
2013-02-25 10:30:29 +00:00
|
|
|
|
@login_required_json
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def add(request, data):
|
2013-02-25 10:30:29 +00:00
|
|
|
|
'''
|
2014-11-15 18:39:16 +00:00
|
|
|
|
Adds a new item (without video)
|
|
|
|
|
takes {
|
2014-12-18 18:18:18 +00:00
|
|
|
|
title: string, // title (optional)
|
2017-03-03 18:42:54 +01:00
|
|
|
|
|
|
|
|
|
... // more key/value pairs (like edit, can be passed to add)
|
2014-11-15 18:39:16 +00:00
|
|
|
|
}
|
|
|
|
|
returns {
|
2014-12-18 13:38:20 +00:00
|
|
|
|
id: string, // item id
|
2014-12-18 18:18:18 +00:00
|
|
|
|
title: string, // title
|
2014-12-18 13:38:20 +00:00
|
|
|
|
... // more item properties
|
2014-11-15 18:39:16 +00:00
|
|
|
|
}
|
2014-12-19 12:59:10 +00:00
|
|
|
|
notes: To allow for this, set config option `itemRequiresVideo` to false.
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: edit, find, get, lookup, remove, upload
|
2013-02-25 10:30:29 +00:00
|
|
|
|
'''
|
2016-02-19 22:04:15 +05:30
|
|
|
|
if not request.user.profile.capability('canAddItems'):
|
2015-04-28 18:12:00 +02:00
|
|
|
|
response = json_response(status=403, text='permission denied')
|
2013-02-25 10:30:29 +00:00
|
|
|
|
else:
|
2017-03-03 18:42:54 +01:00
|
|
|
|
response = json_response(status=200, text='created')
|
2013-02-25 10:30:29 +00:00
|
|
|
|
data['title'] = data.get('title', 'Untitled')
|
2017-03-03 18:42:54 +01:00
|
|
|
|
request_data = data.copy()
|
|
|
|
|
item = models.Item()
|
|
|
|
|
item.data['title'] = data['title']
|
|
|
|
|
item.user = request.user
|
|
|
|
|
p = item.save()
|
2013-02-25 11:14:04 +00:00
|
|
|
|
if p:
|
|
|
|
|
p.wait()
|
|
|
|
|
else:
|
2016-06-26 23:24:11 +02:00
|
|
|
|
i.make_poster()
|
2017-03-03 18:42:54 +01:00
|
|
|
|
del data['title']
|
|
|
|
|
if data:
|
|
|
|
|
response = edit_item(request, item, data)
|
2018-06-20 00:03:01 +02:00
|
|
|
|
response['data'] = item.json()
|
2017-03-03 18:42:54 +01:00
|
|
|
|
add_changelog(request, request_data, item.public_id)
|
2013-02-25 10:30:29 +00:00
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(add, cache=False)
|
|
|
|
|
|
2010-01-26 18:41:57 +05:30
|
|
|
|
@login_required_json
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def edit(request, data):
|
2010-01-26 07:54:05 +05:30
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Edits metadata of an item
|
|
|
|
|
takes {
|
|
|
|
|
id: string, // item id
|
2014-12-18 18:18:18 +00:00
|
|
|
|
key: value, // item key and new value
|
2014-12-18 16:39:47 +00:00
|
|
|
|
... // more key/value pairs
|
|
|
|
|
}
|
|
|
|
|
returns {
|
2014-12-18 18:18:18 +00:00
|
|
|
|
key: value // item key and new value
|
2014-12-18 16:39:47 +00:00
|
|
|
|
... // more key/value pairs
|
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: add, find, get, lookup, remove, upload
|
2010-01-26 07:54:05 +05:30
|
|
|
|
'''
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2010-01-27 12:13:17 +05:30
|
|
|
|
if item.editable(request.user):
|
2017-03-03 18:42:54 +01:00
|
|
|
|
request_data = data.copy()
|
|
|
|
|
response = edit_item(request, item, data)
|
2018-06-20 00:03:01 +02:00
|
|
|
|
response['data'] = item.json()
|
2017-03-03 18:42:54 +01:00
|
|
|
|
add_changelog(request, request_data)
|
2010-09-18 15:03:27 +02:00
|
|
|
|
else:
|
2014-06-04 10:34:36 +03:00
|
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-01-25 14:50:21 +05:30
|
|
|
|
return render_to_json_response(response)
|
2011-10-22 22:12:56 +00:00
|
|
|
|
actions.register(edit, cache=False)
|
2010-01-26 07:54:05 +05:30
|
|
|
|
|
2010-01-26 18:41:57 +05:30
|
|
|
|
@login_required_json
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def remove(request, data):
|
2010-01-26 07:54:05 +05:30
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Removes an item
|
|
|
|
|
takes {
|
|
|
|
|
id: string // item id
|
|
|
|
|
}
|
|
|
|
|
returns {}
|
2014-12-19 15:31:57 +00:00
|
|
|
|
notes: The return status code is 200 for success or 403 for permission denied.
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: add, edit, find, get, lookup, upload
|
2010-01-26 07:54:05 +05:30
|
|
|
|
'''
|
2010-02-10 18:40:28 +05:30
|
|
|
|
response = json_response({})
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2013-02-25 10:30:29 +00:00
|
|
|
|
user = request.user
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if user.profile.capability('canRemoveItems') or \
|
|
|
|
|
user.is_staff or \
|
|
|
|
|
item.user == user or \
|
|
|
|
|
item.groups.filter(id__in=user.groups.all()).count() > 0:
|
2014-12-17 13:45:46 +00:00
|
|
|
|
add_changelog(request, data)
|
2011-08-01 19:57:26 +02:00
|
|
|
|
item.delete()
|
|
|
|
|
response = json_response(status=200, text='removed')
|
2010-09-18 15:03:27 +02:00
|
|
|
|
else:
|
2010-11-25 16:21:23 +01:00
|
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-01-25 14:50:21 +05:30
|
|
|
|
return render_to_json_response(response)
|
2011-08-01 19:57:26 +02:00
|
|
|
|
actions.register(remove, cache=False)
|
2010-01-26 07:54:05 +05:30
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def setPosterFrame(request, data):
|
2010-08-07 16:31:20 +02:00
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Sets the poster frame for an item
|
|
|
|
|
takes {
|
|
|
|
|
id: string, // item id
|
|
|
|
|
position: float // position in seconds
|
|
|
|
|
}
|
|
|
|
|
returns {}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: setPoster
|
2010-08-07 16:31:20 +02:00
|
|
|
|
'''
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2010-08-07 16:31:20 +02:00
|
|
|
|
if item.editable(request.user):
|
2017-02-17 15:40:53 +01:00
|
|
|
|
item.poster_frame = float(data['position'])
|
2010-08-07 16:31:20 +02:00
|
|
|
|
item.save()
|
2014-09-19 12:26:46 +00:00
|
|
|
|
tasks.update_poster(item.public_id)
|
2010-12-24 14:53:34 +05:30
|
|
|
|
response = json_response()
|
2014-12-17 13:45:46 +00:00
|
|
|
|
add_changelog(request, data)
|
2010-09-18 15:03:27 +02:00
|
|
|
|
else:
|
2015-04-28 18:12:00 +02:00
|
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-08-07 16:31:20 +02:00
|
|
|
|
return render_to_json_response(response)
|
2011-01-13 08:33:14 +00:00
|
|
|
|
actions.register(setPosterFrame, cache=False)
|
2010-08-07 16:31:20 +02:00
|
|
|
|
|
2013-03-04 19:35:06 +00:00
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def setPoster(request, data):
|
2010-08-07 16:31:20 +02:00
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Sets the poster for an item
|
|
|
|
|
takes {
|
|
|
|
|
id: string, // item id
|
|
|
|
|
source: string // poster url
|
|
|
|
|
}
|
|
|
|
|
returns {
|
|
|
|
|
poster: {
|
|
|
|
|
height: int, // height in px
|
|
|
|
|
url: string, // poster url
|
|
|
|
|
width: int // width in px
|
2010-12-24 14:53:34 +05:30
|
|
|
|
}
|
2014-12-18 16:39:47 +00:00
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: setPosterFrame
|
2010-08-07 16:31:20 +02:00
|
|
|
|
'''
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2011-07-26 19:22:23 +02:00
|
|
|
|
response = json_response()
|
2010-08-07 16:31:20 +02:00
|
|
|
|
if item.editable(request.user):
|
2011-07-30 14:52:49 +02:00
|
|
|
|
valid_sources = [p['source'] for p in item.get_posters()]
|
|
|
|
|
if data['source'] in valid_sources:
|
|
|
|
|
item.poster_source = data['source']
|
2016-10-28 17:56:39 +02:00
|
|
|
|
item.remove_poster()
|
2010-09-10 17:12:22 +02:00
|
|
|
|
item.save()
|
2014-09-19 12:26:46 +00:00
|
|
|
|
tasks.update_poster(item.public_id)
|
2010-12-24 14:53:34 +05:30
|
|
|
|
response = json_response()
|
2011-08-19 14:20:30 +02:00
|
|
|
|
response['data']['posterAspect'] = item.poster_width/item.poster_height
|
2014-12-17 13:45:46 +00:00
|
|
|
|
add_changelog(request, data)
|
2010-09-10 17:12:22 +02:00
|
|
|
|
else:
|
|
|
|
|
response = json_response(status=403, text='invalid poster url')
|
2010-09-18 15:03:27 +02:00
|
|
|
|
else:
|
2010-09-10 17:12:22 +02:00
|
|
|
|
response = json_response(status=403, text='permission denied')
|
2010-01-25 14:50:21 +05:30
|
|
|
|
return render_to_json_response(response)
|
2011-01-13 08:33:14 +00:00
|
|
|
|
actions.register(setPoster, cache=False)
|
2010-01-25 14:50:21 +05:30
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def updateExternalData(request, data):
|
2011-09-29 15:16:48 +00:00
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Updates metadata from an external service
|
|
|
|
|
takes {
|
|
|
|
|
id: string // item id
|
|
|
|
|
}
|
2016-09-07 20:16:25 +02:00
|
|
|
|
returns {
|
|
|
|
|
taskId: string, // taskId
|
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
notes: This can be used to populate metadata from a remote source, like
|
|
|
|
|
IMDb.
|
|
|
|
|
see: getIds, getMetadata
|
2011-09-29 15:16:48 +00:00
|
|
|
|
'''
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=data['id'])
|
2011-09-29 15:16:48 +00:00
|
|
|
|
response = json_response()
|
|
|
|
|
if item.editable(request.user):
|
2016-09-07 20:16:25 +02:00
|
|
|
|
t = tasks.update_external.delay(item.public_id)
|
2016-09-07 20:23:31 +02:00
|
|
|
|
response['data']['taskId'] = t.task_id
|
2011-09-29 15:16:48 +00:00
|
|
|
|
else:
|
|
|
|
|
response = json_response(status=403, text='permission denied')
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(updateExternalData, cache=False)
|
|
|
|
|
|
2014-10-06 08:26:43 +00:00
|
|
|
|
def lookup(request, data):
|
2011-06-04 18:15:38 +02:00
|
|
|
|
'''
|
2014-12-18 16:39:47 +00:00
|
|
|
|
Looks up an item given partial metadata
|
|
|
|
|
takes {
|
|
|
|
|
director: [string], // directors (optional)
|
|
|
|
|
id: string, // item id (optional)
|
|
|
|
|
title: string, // title (optional)
|
|
|
|
|
year: string // year (optional)
|
|
|
|
|
}
|
|
|
|
|
returns {
|
|
|
|
|
director: [string], // director
|
|
|
|
|
id: string, // item id
|
|
|
|
|
title: string, // title
|
|
|
|
|
year: string // year
|
|
|
|
|
}
|
2014-12-18 18:18:18 +00:00
|
|
|
|
see: add, edit, find, get, remove, upload
|
2011-06-04 18:15:38 +02:00
|
|
|
|
'''
|
2015-02-16 16:00:26 +00:00
|
|
|
|
i = None
|
2011-06-04 18:15:38 +02:00
|
|
|
|
if 'id' in data:
|
2014-09-19 12:26:46 +00:00
|
|
|
|
i = models.Item.objects.get(public_id=data['id'])
|
2017-02-16 14:24:51 +01:00
|
|
|
|
elif not list(filter(None, [d not in ('title', 'year', 'director') for d in data.keys()])):
|
2015-02-16 16:00:26 +00:00
|
|
|
|
qs = models.Item.objects.find({'query': {
|
|
|
|
|
'conditions': [
|
|
|
|
|
{'key': key, 'value': data[key], 'operator': '=='} for key in data
|
|
|
|
|
],
|
|
|
|
|
'operator': '&'
|
|
|
|
|
}}, request.user)
|
|
|
|
|
if qs.count() == 1:
|
|
|
|
|
i = qs[0]
|
|
|
|
|
if i:
|
2014-09-19 12:26:46 +00:00
|
|
|
|
r = {'id': i.public_id}
|
2011-06-04 18:15:38 +02:00
|
|
|
|
for key in ('title', 'director', 'year'):
|
2015-02-16 16:12:44 +00:00
|
|
|
|
value = i.get(key)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if value is not None:
|
2015-02-16 16:12:44 +00:00
|
|
|
|
r[key] = value
|
2011-06-04 18:15:38 +02:00
|
|
|
|
response = json_response(r)
|
|
|
|
|
else:
|
|
|
|
|
response = json_response(status=404, text='not found')
|
|
|
|
|
return render_to_json_response(response)
|
|
|
|
|
actions.register(lookup)
|
|
|
|
|
|
2013-02-24 08:26:59 +00:00
|
|
|
|
|
2011-08-16 12:11:07 +02:00
|
|
|
|
def frame(request, id, size, position=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2011-08-16 15:46:11 +02:00
|
|
|
|
frame = None
|
2011-08-16 12:11:07 +02:00
|
|
|
|
if not position:
|
2013-03-07 10:24:25 +00:00
|
|
|
|
if settings.CONFIG['media']['importFrames'] or item.poster_frame == -1:
|
|
|
|
|
frames = item.poster_frames()
|
|
|
|
|
if frames:
|
|
|
|
|
position = item.poster_frame
|
|
|
|
|
if position == -1 or position > len(frames):
|
|
|
|
|
position = int(len(frames)/2)
|
|
|
|
|
position = frames[int(position)]['position']
|
|
|
|
|
elif item.poster_frame == -1 and item.sort.duration:
|
|
|
|
|
position = item.sort.duration/2
|
2017-03-11 21:05:52 +02:00
|
|
|
|
else:
|
|
|
|
|
position = 0
|
2011-08-16 12:11:07 +02:00
|
|
|
|
else:
|
|
|
|
|
position = item.poster_frame
|
|
|
|
|
else:
|
|
|
|
|
position = float(position.replace(',', '.'))
|
2011-08-16 15:46:11 +02:00
|
|
|
|
|
|
|
|
|
if not frame:
|
|
|
|
|
frame = item.frame(position, int(size))
|
|
|
|
|
|
2010-09-08 13:56:58 +02:00
|
|
|
|
if not frame:
|
2012-01-21 16:21:04 +05:30
|
|
|
|
frame = os.path.join(settings.STATIC_ROOT, 'jpg/list256.jpg')
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# raise Http404
|
2014-02-02 19:46:27 +00:00
|
|
|
|
response = HttpFileResponse(frame, content_type='image/jpeg')
|
2014-02-05 06:37:00 +00:00
|
|
|
|
if request.method == 'OPTIONS':
|
|
|
|
|
response.allow_access()
|
2014-02-02 19:46:27 +00:00
|
|
|
|
return response
|
2010-09-08 13:56:58 +02:00
|
|
|
|
|
2011-07-26 19:22:23 +02:00
|
|
|
|
def poster_frame(request, id, position):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2011-07-26 19:22:23 +02:00
|
|
|
|
position = int(position)
|
|
|
|
|
frames = item.poster_frames()
|
|
|
|
|
if frames and len(frames) > position:
|
2011-08-04 15:28:06 +02:00
|
|
|
|
frame = frames[position]['path']
|
2011-07-26 19:22:23 +02:00
|
|
|
|
return HttpFileResponse(frame, content_type='image/jpeg')
|
|
|
|
|
raise Http404
|
|
|
|
|
|
2011-01-04 13:02:32 +05:30
|
|
|
|
|
2011-08-23 19:39:34 +02:00
|
|
|
|
def image_to_response(image, size=None):
|
2011-01-04 13:02:32 +05:30
|
|
|
|
if size:
|
|
|
|
|
size = int(size)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
path = image.path.replace('.jpg', '.%d.jpg' % size)
|
2011-01-04 13:02:32 +05:30
|
|
|
|
if not os.path.exists(path):
|
|
|
|
|
image_size = max(image.width, image.height)
|
|
|
|
|
if size > image_size:
|
2011-08-06 18:00:15 +00:00
|
|
|
|
path = image.path
|
|
|
|
|
else:
|
|
|
|
|
extract.resize_image(image.path, path, size=size)
|
2011-01-04 13:02:32 +05:30
|
|
|
|
else:
|
|
|
|
|
path = image.path
|
|
|
|
|
return HttpFileResponse(path, content_type='image/jpeg')
|
|
|
|
|
|
2011-08-23 19:39:34 +02:00
|
|
|
|
def siteposter(request, id, size=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2011-08-23 19:39:34 +02:00
|
|
|
|
poster = item.path('siteposter.jpg')
|
2011-07-26 19:22:23 +02:00
|
|
|
|
poster = os.path.abspath(os.path.join(settings.MEDIA_ROOT, poster))
|
2011-08-23 19:39:34 +02:00
|
|
|
|
if size:
|
2011-08-23 22:51:57 +02:00
|
|
|
|
size = int(size)
|
2011-08-23 19:39:34 +02:00
|
|
|
|
image = Image.open(poster)
|
|
|
|
|
image_size = max(image.size)
|
|
|
|
|
if size < image_size:
|
2016-06-14 21:06:27 +02:00
|
|
|
|
path = poster.replace('.jpg', '.%d.jpg' % size)
|
2011-08-23 19:39:34 +02:00
|
|
|
|
extract.resize_image(poster, path, size=size)
|
|
|
|
|
poster = path
|
2011-07-26 19:22:23 +02:00
|
|
|
|
return HttpFileResponse(poster, content_type='image/jpeg')
|
2011-01-04 13:02:32 +05:30
|
|
|
|
|
2018-01-14 20:55:50 +01:00
|
|
|
|
|
|
|
|
|
def temp_poster():
|
|
|
|
|
poster_path = os.path.join(settings.STATIC_ROOT, 'jpg/poster.jpg')
|
|
|
|
|
with open(poster_path, 'rb') as fd:
|
|
|
|
|
response = HttpResponse(fd.read(), content_type='image/jpeg')
|
|
|
|
|
response['Cache-Control'] = 'no-store'
|
|
|
|
|
response['Expires'] = datetime.strftime(datetime.utcnow() + timedelta(seconds=10), "%a, %d-%b-%Y %H:%M:%S GMT")
|
|
|
|
|
return response
|
|
|
|
|
|
2010-09-13 17:19:38 +02:00
|
|
|
|
def poster(request, id, size=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2013-01-29 07:19:57 +05:30
|
|
|
|
if not item.poster:
|
|
|
|
|
poster_path = os.path.join(settings.MEDIA_ROOT, item.path('poster.jpg'))
|
|
|
|
|
if os.path.exists(poster_path):
|
|
|
|
|
item.poster.name = item.path('poster.jpg')
|
2013-01-31 08:16:04 +00:00
|
|
|
|
item.poster_height = item.poster.height
|
|
|
|
|
item.poster_width = item.poster.width
|
2013-01-29 07:19:57 +05:30
|
|
|
|
models.Item.objects.filter(pk=item.id).update(
|
|
|
|
|
poster=item.poster.name,
|
2013-01-31 08:16:04 +00:00
|
|
|
|
poster_height=item.poster_height,
|
|
|
|
|
poster_width=item.poster_width,
|
|
|
|
|
icon=item.icon.name,
|
2018-06-20 00:03:01 +02:00
|
|
|
|
json=item.json()
|
2013-01-29 07:19:57 +05:30
|
|
|
|
)
|
2017-07-26 15:36:59 +02:00
|
|
|
|
if item.poster and os.path.exists(item.poster.path):
|
2011-08-23 22:20:31 +02:00
|
|
|
|
return image_to_response(item.poster, size)
|
2010-09-08 13:56:58 +02:00
|
|
|
|
else:
|
2018-01-14 20:55:50 +01:00
|
|
|
|
return temp_poster()
|
2010-09-08 13:56:58 +02:00
|
|
|
|
|
2011-01-04 13:02:32 +05:30
|
|
|
|
def icon(request, id, size=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2017-07-26 15:31:51 +02:00
|
|
|
|
if item.icon and os.path.exists(item.icon.path):
|
2011-08-23 22:20:31 +02:00
|
|
|
|
return image_to_response(item.icon, size)
|
2011-01-04 13:02:32 +05:30
|
|
|
|
else:
|
2018-01-14 20:55:50 +01:00
|
|
|
|
return temp_poster()
|
2011-01-04 13:02:32 +05:30
|
|
|
|
|
2012-05-17 11:38:59 +02:00
|
|
|
|
def timeline(request, id, size, position=-1, format='jpg', mode=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2010-09-08 13:56:58 +02:00
|
|
|
|
|
2012-05-17 11:38:59 +02:00
|
|
|
|
if not mode:
|
|
|
|
|
mode = 'antialias'
|
|
|
|
|
modes = [t['id'] for t in settings.CONFIG['timelines']]
|
|
|
|
|
if mode not in modes:
|
|
|
|
|
raise Http404
|
|
|
|
|
modes.pop(modes.index(mode))
|
|
|
|
|
|
|
|
|
|
prefix = os.path.join(item.timeline_prefix, 'timeline')
|
2016-08-23 12:27:06 +02:00
|
|
|
|
position = int(position)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
|
2012-05-17 11:38:59 +02:00
|
|
|
|
def timeline():
|
|
|
|
|
timeline = '%s%s%sp' % (prefix, mode, size)
|
|
|
|
|
if position > -1:
|
|
|
|
|
timeline += '%d' % int(position)
|
|
|
|
|
return timeline + '.jpg'
|
|
|
|
|
|
|
|
|
|
path = timeline()
|
|
|
|
|
while modes and not os.path.exists(path):
|
2012-06-03 00:39:36 +02:00
|
|
|
|
mode = modes.pop(0)
|
2012-05-17 11:38:59 +02:00
|
|
|
|
path = timeline()
|
2014-02-02 19:46:27 +00:00
|
|
|
|
response = HttpFileResponse(path, content_type='image/jpeg')
|
2014-02-05 06:37:00 +00:00
|
|
|
|
if request.method == 'OPTIONS':
|
|
|
|
|
response.allow_access()
|
2014-02-02 19:46:27 +00:00
|
|
|
|
return response
|
2011-01-16 18:58:57 +05:30
|
|
|
|
|
2016-10-10 11:28:21 +02:00
|
|
|
|
def download_source(request, id, part=None):
|
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
if part:
|
|
|
|
|
part = int(part) - 1
|
|
|
|
|
else:
|
|
|
|
|
part = 0
|
2018-05-09 18:39:06 +01:00
|
|
|
|
streams = item.streams()
|
2018-05-09 18:47:36 +01:00
|
|
|
|
if part > len(streams):
|
2016-10-10 11:28:21 +02:00
|
|
|
|
raise Http404
|
2018-05-09 18:39:06 +01:00
|
|
|
|
f = streams[part].file
|
2016-10-10 11:28:21 +02:00
|
|
|
|
if not f.data:
|
|
|
|
|
raise Http404
|
|
|
|
|
|
|
|
|
|
parts = ['%s - %s ' % (item.get('title'), settings.SITENAME), item.public_id]
|
|
|
|
|
parts.append('.')
|
|
|
|
|
parts.append(f.extension)
|
|
|
|
|
filename = ''.join(parts)
|
|
|
|
|
|
|
|
|
|
path = f.data.path
|
|
|
|
|
content_type = mimetypes.guess_type(path)[0]
|
|
|
|
|
response = HttpFileResponse(path, content_type=content_type)
|
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
|
|
|
|
return response
|
|
|
|
|
|
2014-03-08 11:18:12 +00:00
|
|
|
|
def download(request, id, resolution=None, format='webm'):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2014-03-08 11:18:12 +00:00
|
|
|
|
if not resolution or int(resolution) not in settings.CONFIG['video']['resolutions']:
|
|
|
|
|
resolution = max(settings.CONFIG['video']['resolutions'])
|
2013-09-14 14:09:37 +00:00
|
|
|
|
else:
|
2014-03-08 11:18:12 +00:00
|
|
|
|
resolution = int(resolution)
|
2014-02-13 18:48:47 +00:00
|
|
|
|
if not item.access(request.user) or not item.rendered:
|
|
|
|
|
return HttpResponseForbidden()
|
2014-03-08 11:18:12 +00:00
|
|
|
|
ext = '.%s' % format
|
2014-09-19 12:26:46 +00:00
|
|
|
|
parts = ['%s - %s ' % (item.get('title'), settings.SITENAME), item.public_id]
|
2014-03-08 11:18:12 +00:00
|
|
|
|
if resolution != max(settings.CONFIG['video']['resolutions']):
|
|
|
|
|
parts.append('.%dp' % resolution)
|
|
|
|
|
parts.append(ext)
|
|
|
|
|
filename = ''.join(parts)
|
2014-02-13 18:48:47 +00:00
|
|
|
|
video = NamedTemporaryFile(suffix=ext)
|
|
|
|
|
content_type = mimetypes.guess_type(video.name)[0]
|
2014-03-08 11:18:12 +00:00
|
|
|
|
r = item.merge_streams(video.name, resolution, format)
|
2014-02-13 18:48:47 +00:00
|
|
|
|
if not r:
|
|
|
|
|
return HttpResponseForbidden()
|
2016-06-14 21:06:27 +02:00
|
|
|
|
elif r is True:
|
2014-02-13 18:48:47 +00:00
|
|
|
|
response = HttpResponse(FileWrapper(video), content_type=content_type)
|
|
|
|
|
response['Content-Length'] = os.path.getsize(video.name)
|
|
|
|
|
else:
|
|
|
|
|
response = HttpFileResponse(r, content_type=content_type)
|
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
|
|
|
|
return response
|
|
|
|
|
|
2011-07-03 18:21:27 +02:00
|
|
|
|
def torrent(request, id, filename=None):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2011-07-03 18:21:27 +02:00
|
|
|
|
if not item.torrent:
|
|
|
|
|
raise Http404
|
|
|
|
|
if not filename or filename.endswith('.torrent'):
|
2012-01-17 16:09:45 +05:30
|
|
|
|
response = HttpResponse(item.get_torrent(request),
|
|
|
|
|
content_type='application/x-bittorrent')
|
2012-01-17 16:42:39 +05:30
|
|
|
|
filename = utils.safe_filename("%s.torrent" % item.get('title'))
|
2012-10-24 17:18:55 +02:00
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
2011-07-03 18:21:27 +02:00
|
|
|
|
return response
|
|
|
|
|
while filename.startswith('/'):
|
|
|
|
|
filename = filename[1:]
|
|
|
|
|
filename = filename.replace('/../', '/')
|
|
|
|
|
filename = item.path('torrent/%s' % filename)
|
|
|
|
|
filename = os.path.abspath(os.path.join(settings.MEDIA_ROOT, filename))
|
2011-08-11 22:10:06 +02:00
|
|
|
|
response = HttpFileResponse(filename)
|
2012-10-24 17:18:55 +02:00
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % \
|
|
|
|
|
quote(os.path.basename(filename.encode('utf-8')))
|
2011-08-11 22:10:06 +02:00
|
|
|
|
return response
|
2011-07-03 18:21:27 +02:00
|
|
|
|
|
2014-07-23 17:27:27 +02:00
|
|
|
|
def video(request, id, resolution, format, index=None, track=None):
|
2013-04-28 12:07:49 +00:00
|
|
|
|
resolution = int(resolution)
|
|
|
|
|
resolutions = sorted(settings.CONFIG['video']['resolutions'])
|
|
|
|
|
if resolution not in resolutions:
|
|
|
|
|
raise Http404
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2011-09-28 14:47:13 +02:00
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
return HttpResponseForbidden()
|
2011-08-06 18:00:15 +00:00
|
|
|
|
if index:
|
2011-08-17 19:26:10 +02:00
|
|
|
|
index = int(index) - 1
|
2011-06-27 21:40:44 +02:00
|
|
|
|
else:
|
2011-08-18 14:01:37 +02:00
|
|
|
|
index = 0
|
2014-07-23 17:27:27 +02:00
|
|
|
|
streams = item.streams(track)
|
2011-12-15 12:21:21 +01:00
|
|
|
|
if index + 1 > streams.count():
|
2011-08-18 14:01:37 +02:00
|
|
|
|
raise Http404
|
2013-04-28 12:07:49 +00:00
|
|
|
|
stream = streams[index].get(resolution, format)
|
2013-04-22 11:45:22 +00:00
|
|
|
|
if not stream.available or not stream.media:
|
2011-08-18 20:01:03 +02:00
|
|
|
|
raise Http404
|
2013-04-22 11:45:22 +00:00
|
|
|
|
path = stream.media.path
|
2011-08-18 14:01:37 +02:00
|
|
|
|
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# server side cutting
|
2011-04-23 01:34:01 +02:00
|
|
|
|
t = request.GET.get('t')
|
2011-01-02 15:21:39 +05:30
|
|
|
|
if t:
|
2012-09-23 10:32:35 +02:00
|
|
|
|
def parse_timestamp(s):
|
|
|
|
|
if ':' in s:
|
|
|
|
|
s = ox.time2ms(s) / 1000
|
|
|
|
|
return float(s)
|
2017-06-06 14:50:51 +02:00
|
|
|
|
t = list(map(parse_timestamp, t.split(',')))
|
2011-08-18 14:01:37 +02:00
|
|
|
|
ext = '.%s' % format
|
2017-11-16 16:39:38 +01:00
|
|
|
|
duration = stream.info['duration']
|
|
|
|
|
|
|
|
|
|
# multipart request beyond first part, merge parts and chop that
|
|
|
|
|
if not index and streams.count() > 1 and stream.info['duration'] < t[1]:
|
|
|
|
|
video = NamedTemporaryFile(suffix=ext)
|
|
|
|
|
r = item.merge_streams(video.name, resolution, format)
|
|
|
|
|
if not r:
|
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
path = video.name
|
2018-06-19 22:28:44 +02:00
|
|
|
|
duration = sum(item.cache['durations'])
|
2017-11-16 16:39:38 +01:00
|
|
|
|
|
2011-06-27 21:40:44 +02:00
|
|
|
|
content_type = mimetypes.guess_type(path)[0]
|
2017-11-16 16:39:38 +01:00
|
|
|
|
if len(t) == 2 and t[1] > t[0] and duration >= t[1]:
|
2011-01-01 15:55:13 +05:30
|
|
|
|
response = HttpResponse(extract.chop(path, t[0], t[1]), content_type=content_type)
|
2012-01-24 15:29:04 +05:30
|
|
|
|
filename = u"Clip of %s - %s-%s - %s %s%s" % (
|
2010-12-31 17:34:33 +05:30
|
|
|
|
item.get('title'),
|
2012-08-15 17:15:06 +02:00
|
|
|
|
ox.format_duration(t[0] * 1000).replace(':', '.')[:-4],
|
|
|
|
|
ox.format_duration(t[1] * 1000).replace(':', '.')[:-4],
|
2010-12-31 17:34:33 +05:30
|
|
|
|
settings.SITENAME,
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item.public_id,
|
2011-01-02 15:21:39 +05:30
|
|
|
|
ext
|
2010-12-31 17:34:33 +05:30
|
|
|
|
)
|
2012-10-24 17:18:55 +02:00
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
2011-01-01 15:55:13 +05:30
|
|
|
|
return response
|
|
|
|
|
else:
|
2011-01-02 15:21:39 +05:30
|
|
|
|
filename = "%s - %s %s%s" % (
|
2011-01-01 15:55:13 +05:30
|
|
|
|
item.get('title'),
|
|
|
|
|
settings.SITENAME,
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item.public_id,
|
2011-01-02 15:21:39 +05:30
|
|
|
|
ext
|
2011-01-01 15:55:13 +05:30
|
|
|
|
)
|
|
|
|
|
response = HttpFileResponse(path, content_type=content_type)
|
2012-10-24 17:18:55 +02:00
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
2010-12-31 17:34:33 +05:30
|
|
|
|
return response
|
2011-08-19 17:37:37 +02:00
|
|
|
|
if not settings.XSENDFILE and not settings.XACCELREDIRECT:
|
2013-04-22 11:45:22 +00:00
|
|
|
|
return redirect(stream.media.url)
|
2011-08-19 20:10:28 +02:00
|
|
|
|
response = HttpFileResponse(path)
|
|
|
|
|
response['Cache-Control'] = 'public'
|
|
|
|
|
return response
|
2011-11-12 09:57:51 +01:00
|
|
|
|
|
2016-03-11 11:48:13 +00:00
|
|
|
|
|
|
|
|
|
_subtitle_formats = {
|
|
|
|
|
'srt': ('text/x-srt', ox.srt),
|
|
|
|
|
'vtt': ('text/vtt', ox.vtt),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def srt(request, id, layer, language=None, index=None, ext='srt'):
|
2014-09-19 12:26:46 +00:00
|
|
|
|
item = get_object_or_404(models.Item, public_id=id)
|
2012-01-02 22:38:19 +05:30
|
|
|
|
if not item.access(request.user):
|
|
|
|
|
response = HttpResponseForbidden()
|
|
|
|
|
else:
|
2016-03-11 11:48:13 +00:00
|
|
|
|
content_type, encoder = _subtitle_formats[ext]
|
2012-01-02 22:38:19 +05:30
|
|
|
|
response = HttpResponse()
|
2014-09-02 14:25:27 +00:00
|
|
|
|
if language:
|
2016-03-11 11:48:13 +00:00
|
|
|
|
filename = u"%s.%s.%s" % (item.get('title'), language, ext)
|
2014-09-02 14:25:27 +00:00
|
|
|
|
else:
|
2016-03-11 11:48:13 +00:00
|
|
|
|
filename = u"%s.%s" % (item.get('title'), ext)
|
2012-10-24 17:18:55 +02:00
|
|
|
|
response['Content-Disposition'] = "attachment; filename*=UTF-8''%s" % quote(filename.encode('utf-8'))
|
2016-03-11 11:48:13 +00:00
|
|
|
|
response['Content-Type'] = content_type
|
|
|
|
|
response.write(item.srt(layer, language, encoder=encoder))
|
2012-01-02 22:38:19 +05:30
|
|
|
|
return response
|
|
|
|
|
|
2011-11-12 09:57:51 +01:00
|
|
|
|
def random_annotation(request):
|
|
|
|
|
n = models.Item.objects.all().count()
|
|
|
|
|
pos = random.randint(0, n)
|
|
|
|
|
item = models.Item.objects.all()[pos]
|
|
|
|
|
n = item.annotations.all().count()
|
|
|
|
|
pos = random.randint(0, n)
|
|
|
|
|
clip = item.annotations.all()[pos]
|
2016-06-14 21:06:27 +02:00
|
|
|
|
return redirect('/%s' % clip.public_id)
|
2012-01-02 22:38:19 +05:30
|
|
|
|
|
2012-01-16 13:48:39 +05:30
|
|
|
|
def atom_xml(request):
|
2012-02-24 01:19:15 +05:30
|
|
|
|
add_updated = True
|
2012-01-16 13:48:39 +05:30
|
|
|
|
feed = ET.Element("feed")
|
|
|
|
|
feed.attrib['xmlns'] = 'http://www.w3.org/2005/Atom'
|
2012-02-24 01:19:15 +05:30
|
|
|
|
feed.attrib['xmlns:media'] = 'http://search.yahoo.com/mrss/'
|
2012-01-16 13:48:39 +05:30
|
|
|
|
feed.attrib['xml:lang'] = 'en'
|
|
|
|
|
title = ET.SubElement(feed, "title")
|
|
|
|
|
title.text = settings.SITENAME
|
|
|
|
|
title.attrib['type'] = 'text'
|
|
|
|
|
link = ET.SubElement(feed, "link")
|
|
|
|
|
link.attrib['rel'] = 'self'
|
|
|
|
|
link.attrib['type'] = 'application/atom+xml'
|
|
|
|
|
atom_link = request.build_absolute_uri('/atom.xml')
|
|
|
|
|
link.attrib['href'] = atom_link
|
|
|
|
|
'''
|
|
|
|
|
rights = ET.SubElement(feed, 'rights')
|
|
|
|
|
rights.attrib['type'] = 'text'
|
|
|
|
|
rights.text = "PGL"
|
|
|
|
|
'''
|
|
|
|
|
el = ET.SubElement(feed, 'id')
|
|
|
|
|
el.text = atom_link
|
2012-01-16 20:31:07 +05:30
|
|
|
|
|
|
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
|
if not request.user.is_anonymous():
|
2016-02-19 22:04:15 +05:30
|
|
|
|
level = request.user.profile.level
|
2012-01-16 13:48:39 +05:30
|
|
|
|
for item in models.Item.objects.filter(level__lte=level, rendered=True).order_by('-created')[:7]:
|
2012-02-24 01:19:15 +05:30
|
|
|
|
if add_updated:
|
|
|
|
|
updated = ET.SubElement(feed, "updated")
|
|
|
|
|
updated.text = item.modified.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
add_updated = False
|
|
|
|
|
|
2014-09-19 12:26:46 +00:00
|
|
|
|
page_link = request.build_absolute_uri('/%s' % item.public_id)
|
2012-01-16 13:48:39 +05:30
|
|
|
|
|
|
|
|
|
entry = ET.Element("entry")
|
|
|
|
|
title = ET.SubElement(entry, "title")
|
2014-01-25 09:00:52 +00:00
|
|
|
|
title.text = ox.decode_html(item.get('title'))
|
2012-01-16 13:48:39 +05:30
|
|
|
|
link = ET.SubElement(entry, "link")
|
|
|
|
|
link.attrib['rel'] = 'alternate'
|
|
|
|
|
link.attrib['href'] = "%s/info" % page_link
|
|
|
|
|
updated = ET.SubElement(entry, "updated")
|
|
|
|
|
updated.text = item.modified.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
published = ET.SubElement(entry, "published")
|
|
|
|
|
published.text = item.created.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
el = ET.SubElement(entry, "id")
|
|
|
|
|
el.text = page_link
|
|
|
|
|
|
|
|
|
|
if item.get('director'):
|
|
|
|
|
el = ET.SubElement(entry, "author")
|
|
|
|
|
name = ET.SubElement(el, "name")
|
2014-01-25 09:00:52 +00:00
|
|
|
|
name.text = ox.decode_html(u', '.join(item.get('director')))
|
2012-02-24 01:19:15 +05:30
|
|
|
|
elif item.user:
|
|
|
|
|
el = ET.SubElement(entry, "author")
|
|
|
|
|
name = ET.SubElement(el, "name")
|
|
|
|
|
name.text = item.user.username
|
|
|
|
|
|
2012-01-16 13:48:39 +05:30
|
|
|
|
for topic in item.get('topics', []):
|
2016-06-14 21:06:27 +02:00
|
|
|
|
el = ET.SubElement(entry, "category")
|
|
|
|
|
el.attrib['term'] = topic
|
2012-01-16 13:48:39 +05:30
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
el = ET.SubElement(entry, "rights")
|
|
|
|
|
el.text = "PGL"
|
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
|
el.attrib['rel'] = "license"
|
|
|
|
|
el.attrib['type'] = "text/html"
|
|
|
|
|
el.attrib['href'] = item.licenseUrl
|
|
|
|
|
'''
|
|
|
|
|
'''
|
|
|
|
|
el = ET.SubElement(entry, "contributor")
|
|
|
|
|
name = ET.SubElement(el, "name")
|
|
|
|
|
name.text = item.user.username
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
description = item.get('description', item.get('summary'))
|
|
|
|
|
if description:
|
|
|
|
|
content = ET.SubElement(entry, "content")
|
|
|
|
|
content.attrib['type'] = 'html'
|
|
|
|
|
content.text = description
|
|
|
|
|
|
|
|
|
|
format = ET.SubElement(entry, "format")
|
|
|
|
|
format.attrib['xmlns'] = 'http://transmission.cc/FileFormat'
|
2013-07-06 11:15:10 +00:00
|
|
|
|
streams = item.streams().filter(source=None).order_by('-id')
|
|
|
|
|
if streams.exists():
|
|
|
|
|
stream = streams[0]
|
|
|
|
|
for key in ('size', 'duration', 'video_codec',
|
|
|
|
|
'framerate', 'width', 'height',
|
|
|
|
|
'audio_codec', 'samplerate', 'channels'):
|
|
|
|
|
value = stream.info.get(key)
|
|
|
|
|
if not value and stream.info.get('video'):
|
|
|
|
|
value = stream.info['video'][0].get({
|
|
|
|
|
'video_codec': 'codec'
|
|
|
|
|
}.get(key, key))
|
|
|
|
|
if not value and stream.info.get('audio'):
|
|
|
|
|
value = stream.info['audio'][0].get({
|
|
|
|
|
'audio_codec': 'codec'
|
|
|
|
|
}.get(key, key))
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if value and value != -1:
|
2013-07-06 11:15:10 +00:00
|
|
|
|
el = ET.SubElement(format, key)
|
|
|
|
|
el.text = unicode(value)
|
2012-01-16 13:48:39 +05:30
|
|
|
|
el = ET.SubElement(format, 'pixel_aspect_ratio')
|
|
|
|
|
el.text = u"1:1"
|
|
|
|
|
|
2013-07-06 11:15:10 +00:00
|
|
|
|
if has_capability(request.user, 'canDownloadVideo'):
|
2012-01-17 16:09:45 +05:30
|
|
|
|
if item.torrent:
|
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
|
el.attrib['rel'] = 'enclosure'
|
|
|
|
|
el.attrib['type'] = 'application/x-bittorrent'
|
|
|
|
|
el.attrib['href'] = '%s/torrent/' % page_link
|
2012-09-08 13:14:28 +02:00
|
|
|
|
el.attrib['length'] = '%s' % ox.get_torrent_size(item.torrent.path)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# FIXME: loop over streams
|
|
|
|
|
# for s in item.streams().filter(resolution=max(settings.CONFIG['video']['resolutions'])):
|
2012-01-16 13:48:39 +05:30
|
|
|
|
for s in item.streams().filter(source=None):
|
|
|
|
|
el = ET.SubElement(entry, "link")
|
|
|
|
|
el.attrib['rel'] = 'enclosure'
|
|
|
|
|
el.attrib['type'] = 'video/%s' % s.format
|
|
|
|
|
el.attrib['href'] = '%s/%sp.%s' % (page_link, s.resolution, s.format)
|
2016-06-14 21:06:27 +02:00
|
|
|
|
el.attrib['length'] = '%s' % s.media.size
|
2012-01-16 13:48:39 +05:30
|
|
|
|
|
|
|
|
|
el = ET.SubElement(entry, "media:thumbnail")
|
|
|
|
|
thumbheight = 96
|
|
|
|
|
thumbwidth = int(thumbheight * item.stream_aspect)
|
|
|
|
|
thumbwidth -= thumbwidth % 2
|
|
|
|
|
el.attrib['url'] = '%s/%sp.jpg' % (page_link, thumbheight)
|
|
|
|
|
el.attrib['width'] = str(thumbwidth)
|
|
|
|
|
el.attrib['height'] = str(thumbheight)
|
|
|
|
|
feed.append(entry)
|
|
|
|
|
return HttpResponse(
|
2017-02-16 20:00:50 +01:00
|
|
|
|
'<?xml version="1.0" encoding="utf-8" ?>\n' + ET.tostring(feed).decode(),
|
2012-01-16 13:48:39 +05:30
|
|
|
|
'application/atom+xml'
|
|
|
|
|
)
|
|
|
|
|
|
2012-01-10 21:30:41 +05:30
|
|
|
|
def oembed(request):
|
|
|
|
|
format = request.GET.get('format', 'json')
|
2013-04-22 12:20:01 +00:00
|
|
|
|
maxwidth = int(request.GET.get('maxwidth', 640))
|
|
|
|
|
maxheight = int(request.GET.get('maxheight', 480))
|
2012-01-10 21:30:41 +05:30
|
|
|
|
|
|
|
|
|
url = request.GET['url']
|
|
|
|
|
parts = urlparse(url).path.split('/')
|
2017-02-16 14:33:51 +01:00
|
|
|
|
if len(parts) < 2:
|
2016-03-19 20:28:25 +01:00
|
|
|
|
raise Http404
|
2014-09-19 12:26:46 +00:00
|
|
|
|
public_id = parts[1]
|
|
|
|
|
item = get_object_or_404_json(models.Item, public_id=public_id)
|
|
|
|
|
embed_url = request.build_absolute_uri('/%s' % public_id)
|
2013-11-07 12:01:38 +00:00
|
|
|
|
if url.startswith(embed_url):
|
2013-11-16 11:48:08 +00:00
|
|
|
|
embed_url = url
|
2016-06-14 21:06:27 +02:00
|
|
|
|
if '#embed' not in embed_url:
|
2013-11-07 12:01:38 +00:00
|
|
|
|
embed_url = '%s#embed' % embed_url
|
|
|
|
|
|
2012-01-10 21:30:41 +05:30
|
|
|
|
oembed = {}
|
|
|
|
|
oembed['version'] = '1.0'
|
|
|
|
|
oembed['type'] = 'video'
|
|
|
|
|
oembed['provider_name'] = settings.SITENAME
|
|
|
|
|
oembed['provider_url'] = request.build_absolute_uri('/')
|
|
|
|
|
oembed['title'] = item.get('title')
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# oembed['author_name'] = item.get('director')
|
|
|
|
|
# oembed['author_url'] = ??
|
2013-04-22 12:20:01 +00:00
|
|
|
|
height = max(settings.CONFIG['video']['resolutions'])
|
|
|
|
|
height = min(height, maxheight)
|
|
|
|
|
width = int(round(height * item.stream_aspect))
|
|
|
|
|
if width > maxwidth:
|
2012-01-10 21:30:41 +05:30
|
|
|
|
width = maxwidth
|
2013-04-22 12:20:01 +00:00
|
|
|
|
height = min(maxheight, int(width / item.stream_aspect))
|
2013-11-07 12:01:38 +00:00
|
|
|
|
oembed['html'] = '<iframe width="%s" height="%s" src="%s" frameborder="0" allowfullscreen></iframe>' % (width, height, embed_url)
|
2012-01-10 21:30:41 +05:30
|
|
|
|
oembed['width'] = width
|
|
|
|
|
oembed['height'] = height
|
|
|
|
|
thumbheight = 96
|
2012-01-16 13:48:39 +05:30
|
|
|
|
thumbwidth = int(thumbheight * item.stream_aspect)
|
2012-01-10 21:30:41 +05:30
|
|
|
|
thumbwidth -= thumbwidth % 2
|
|
|
|
|
oembed['thumbnail_height'] = thumbheight
|
|
|
|
|
oembed['thumbnail_width'] = thumbwidth
|
2014-09-19 12:26:46 +00:00
|
|
|
|
oembed['thumbnail_url'] = request.build_absolute_uri('/%s/%sp.jpg' % (item.public_id, thumbheight))
|
2012-01-10 21:30:41 +05:30
|
|
|
|
if format == 'xml':
|
|
|
|
|
oxml = ET.Element('oembed')
|
|
|
|
|
for key in oembed:
|
|
|
|
|
e = ET.SubElement(oxml, key)
|
|
|
|
|
e.text = unicode(oembed[key])
|
|
|
|
|
return HttpResponse(
|
2017-02-16 20:00:50 +01:00
|
|
|
|
'<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n' + ET.tostring(oxml).decode(),
|
2012-01-10 21:30:41 +05:30
|
|
|
|
'application/xml'
|
|
|
|
|
)
|
|
|
|
|
return HttpResponse(json.dumps(oembed, indent=2), 'application/json')
|
|
|
|
|
|
2012-01-11 13:21:26 +05:30
|
|
|
|
def sitemap_xml(request):
|
|
|
|
|
sitemap = os.path.abspath(os.path.join(settings.MEDIA_ROOT, 'sitemap.xml'))
|
2012-01-11 13:12:32 +05:30
|
|
|
|
if not os.path.exists(sitemap):
|
|
|
|
|
tasks.update_sitemap(request.build_absolute_uri('/'))
|
2012-01-11 13:21:26 +05:30
|
|
|
|
elif time.mktime(time.localtime()) - os.stat(sitemap).st_ctime > 24*60*60:
|
2012-01-11 13:12:32 +05:30
|
|
|
|
tasks.update_sitemap.delay(request.build_absolute_uri('/'))
|
|
|
|
|
response = HttpFileResponse(sitemap)
|
|
|
|
|
response['Content-Type'] = 'application/xml'
|
|
|
|
|
return response
|
|
|
|
|
|
2012-02-15 13:15:00 +05:30
|
|
|
|
def item_json(request, id):
|
|
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
|
if not request.user.is_anonymous():
|
2016-02-19 22:04:15 +05:30
|
|
|
|
level = request.user.profile.level
|
2014-09-19 12:26:46 +00:00
|
|
|
|
qs = models.Item.objects.filter(public_id=id, level__lte=level)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
if qs.count() == 0:
|
|
|
|
|
response = json_response(status=404, text='not found')
|
|
|
|
|
else:
|
|
|
|
|
item = qs[0]
|
2018-06-20 00:03:01 +02:00
|
|
|
|
response = item.json()
|
2012-02-15 16:42:08 +05:30
|
|
|
|
response['layers'] = item.get_layers(request.user)
|
|
|
|
|
return render_to_json_response(response)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
|
|
|
|
|
def item_xml(request, id):
|
|
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
|
if not request.user.is_anonymous():
|
2016-02-19 22:04:15 +05:30
|
|
|
|
level = request.user.profile.level
|
2014-09-19 12:26:46 +00:00
|
|
|
|
qs = models.Item.objects.filter(public_id=id, level__lte=level)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
if qs.count() == 0:
|
|
|
|
|
response = json_response(status=404, text='not found')
|
2012-02-15 16:42:08 +05:30
|
|
|
|
response = render_to_json_response(response)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
else:
|
|
|
|
|
item = qs[0]
|
2018-06-20 00:03:01 +02:00
|
|
|
|
j = item.json()
|
2012-02-15 13:15:00 +05:30
|
|
|
|
j['layers'] = item.get_layers(request.user)
|
2012-09-11 14:36:25 +02:00
|
|
|
|
if 'resolution' in j:
|
2016-06-14 21:06:27 +02:00
|
|
|
|
j['resolution'] = {'width': j['resolution'][0], 'height': j['resolution'][1]}
|
|
|
|
|
|
2012-02-15 13:15:00 +05:30
|
|
|
|
def xmltree(root, key, data):
|
|
|
|
|
if isinstance(data, list) or \
|
|
|
|
|
isinstance(data, tuple):
|
2012-02-15 16:42:08 +05:30
|
|
|
|
e = ET.SubElement(root, key)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
for value in data:
|
2012-02-15 16:42:08 +05:30
|
|
|
|
xmltree(e, key, value)
|
2012-02-15 13:15:00 +05:30
|
|
|
|
elif isinstance(data, dict):
|
|
|
|
|
for k in data:
|
|
|
|
|
if data[k]:
|
|
|
|
|
xmltree(root, k, data[k])
|
|
|
|
|
else:
|
|
|
|
|
e = ET.SubElement(root, key)
|
|
|
|
|
e.text = unicode(data)
|
|
|
|
|
|
|
|
|
|
oxml = ET.Element('item')
|
|
|
|
|
xmltree(oxml, 'item', j)
|
|
|
|
|
response = HttpResponse(
|
2017-02-16 20:00:50 +01:00
|
|
|
|
'<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n' + ET.tostring(oxml).decode(),
|
2012-02-15 13:15:00 +05:30
|
|
|
|
'application/xml'
|
|
|
|
|
)
|
|
|
|
|
return response
|
|
|
|
|
|
2012-01-09 14:36:35 +05:30
|
|
|
|
def item(request, id):
|
|
|
|
|
id = id.split('/')[0]
|
2014-01-11 09:25:24 +00:00
|
|
|
|
view = None
|
2012-01-09 14:36:35 +05:30
|
|
|
|
template = 'index.html'
|
2012-01-16 20:31:07 +05:30
|
|
|
|
level = settings.CONFIG['capabilities']['canSeeItem']['guest']
|
|
|
|
|
if not request.user.is_anonymous():
|
2016-02-19 22:04:15 +05:30
|
|
|
|
level = request.user.profile.level
|
2014-09-19 12:26:46 +00:00
|
|
|
|
qs = models.Item.objects.filter(public_id=id, level__lte=level)
|
2012-01-09 14:36:35 +05:30
|
|
|
|
if qs.count() == 0:
|
2016-02-20 00:25:48 +05:30
|
|
|
|
ctx = {
|
2012-01-09 14:36:35 +05:30
|
|
|
|
'base_url': request.build_absolute_uri('/'),
|
|
|
|
|
'settings': settings
|
2016-02-20 00:25:48 +05:30
|
|
|
|
}
|
2012-01-09 14:36:35 +05:30
|
|
|
|
else:
|
|
|
|
|
item = qs[0]
|
|
|
|
|
template = 'item.html'
|
|
|
|
|
keys = [
|
|
|
|
|
'year',
|
|
|
|
|
'director',
|
2014-01-11 09:51:03 +00:00
|
|
|
|
'writer',
|
|
|
|
|
'producer',
|
|
|
|
|
'cinematographer',
|
|
|
|
|
'editor',
|
|
|
|
|
'actor',
|
2012-01-09 15:25:52 +05:30
|
|
|
|
'topic',
|
2012-01-09 14:36:35 +05:30
|
|
|
|
]
|
2014-01-11 09:51:03 +00:00
|
|
|
|
if not settings.USE_IMDB:
|
|
|
|
|
keys += [
|
|
|
|
|
'summary'
|
|
|
|
|
]
|
|
|
|
|
keys += [
|
|
|
|
|
'duration'
|
|
|
|
|
'aspectratio'
|
|
|
|
|
'hue',
|
|
|
|
|
'saturation',
|
|
|
|
|
'lightness',
|
|
|
|
|
'volume',
|
|
|
|
|
'numberofcuts',
|
|
|
|
|
]
|
|
|
|
|
|
2012-01-09 14:36:35 +05:30
|
|
|
|
data = []
|
2014-01-11 09:58:04 +00:00
|
|
|
|
for k in keys:
|
|
|
|
|
value = item.get(k)
|
|
|
|
|
key = utils.get_by_id(settings.CONFIG['itemKeys'], k)
|
2012-01-09 14:36:35 +05:30
|
|
|
|
if value:
|
2014-01-16 10:02:08 +00:00
|
|
|
|
if k == 'actor':
|
|
|
|
|
title = 'Cast'
|
|
|
|
|
else:
|
|
|
|
|
title = key['title'] if key else k.capitalize()
|
2012-01-09 14:36:35 +05:30
|
|
|
|
if isinstance(value, list):
|
|
|
|
|
value = value = u', '.join([unicode(v) for v in value])
|
2014-01-11 09:51:03 +00:00
|
|
|
|
elif key and key.get('type') == 'float':
|
|
|
|
|
value = '%0.3f' % value
|
2016-06-14 21:06:27 +02:00
|
|
|
|
elif key and key.get('type') == 'time':
|
2014-01-11 09:51:03 +00:00
|
|
|
|
value = ox.format_duration(value * 1000)
|
2014-01-11 09:58:04 +00:00
|
|
|
|
data.append({'key': k, 'title': title, 'value': value})
|
2012-01-09 14:36:35 +05:30
|
|
|
|
clips = []
|
2012-01-10 21:30:41 +05:30
|
|
|
|
clip = {'in': 0, 'annotations': []}
|
2016-06-14 21:06:27 +02:00
|
|
|
|
# logged in users should have javascript. not adding annotations makes load faster
|
2014-01-11 09:34:54 +00:00
|
|
|
|
if not settings.USE_IMDB and request.user.is_anonymous():
|
2014-01-16 13:11:19 +00:00
|
|
|
|
for a in item.annotations.exclude(
|
|
|
|
|
layer='subtitles'
|
|
|
|
|
).exclude(
|
|
|
|
|
value=''
|
|
|
|
|
).filter(
|
|
|
|
|
layer__in=models.Annotation.public_layers()
|
|
|
|
|
).order_by('start', 'end', 'sortvalue'):
|
2012-02-19 09:11:18 +00:00
|
|
|
|
if clip['in'] < a.start:
|
|
|
|
|
if clip['annotations']:
|
|
|
|
|
clip['annotations'] = '<br />\n'.join(clip['annotations'])
|
|
|
|
|
clips.append(clip)
|
|
|
|
|
clip = {'in': a.start, 'annotations': []}
|
|
|
|
|
clip['annotations'].append(a.value)
|
2014-01-16 13:11:19 +00:00
|
|
|
|
if clip['annotations']:
|
|
|
|
|
clip['annotations'] = '<br />\n'.join(clip['annotations'])
|
|
|
|
|
clips.append(clip)
|
2014-02-03 09:35:15 +00:00
|
|
|
|
head_title = item.get('title', '')
|
2014-01-16 13:11:19 +00:00
|
|
|
|
title = item.get('title', '')
|
2014-01-11 09:25:24 +00:00
|
|
|
|
if item.get('director'):
|
|
|
|
|
head_title += u' (%s)' % u', '.join(item.get('director', []))
|
|
|
|
|
if item.get('year'):
|
|
|
|
|
head_title += u' %s' % item.get('year')
|
2014-01-16 13:11:19 +00:00
|
|
|
|
title += u' (%s)' % item.get('year')
|
2014-01-11 09:25:24 +00:00
|
|
|
|
if view:
|
|
|
|
|
head_title += u' – %s' % view
|
2014-02-03 09:35:15 +00:00
|
|
|
|
head_title += u' – %s' % settings.SITENAME
|
2014-01-11 10:31:54 +00:00
|
|
|
|
head_title = ox.decode_html(head_title)
|
2014-01-16 13:11:19 +00:00
|
|
|
|
title = ox.decode_html(title)
|
2012-01-09 14:36:35 +05:30
|
|
|
|
ctx = {
|
2012-01-10 21:30:41 +05:30
|
|
|
|
'current_url': request.build_absolute_uri(request.get_full_path()),
|
2012-01-09 14:36:35 +05:30
|
|
|
|
'base_url': request.build_absolute_uri('/'),
|
|
|
|
|
'url': request.build_absolute_uri('/%s' % id),
|
|
|
|
|
'id': id,
|
|
|
|
|
'settings': settings,
|
|
|
|
|
'data': data,
|
|
|
|
|
'clips': clips,
|
2012-01-09 15:25:52 +05:30
|
|
|
|
'icon': settings.CONFIG['user']['ui']['icons'] == 'frames' and 'icon' or 'poster',
|
2014-01-16 13:11:19 +00:00
|
|
|
|
'title': title,
|
2014-01-11 09:25:24 +00:00
|
|
|
|
'head_title': head_title,
|
2014-01-16 13:11:19 +00:00
|
|
|
|
'description': item.get_item_description(),
|
|
|
|
|
'description_html': item.get_item_description_html()
|
2012-01-09 14:36:35 +05:30
|
|
|
|
}
|
2013-03-09 09:35:04 +00:00
|
|
|
|
if not settings.USE_IMDB:
|
|
|
|
|
value = item.get('topic' in keys and 'topic' or 'keywords')
|
2012-01-09 14:36:35 +05:30
|
|
|
|
if isinstance(value, list):
|
|
|
|
|
value = value = ', '.join(value)
|
|
|
|
|
if value:
|
2013-03-09 09:35:04 +00:00
|
|
|
|
ctx['keywords'] = ox.strip_tags(value)
|
2012-01-09 14:36:35 +05:30
|
|
|
|
|
2016-02-20 00:25:48 +05:30
|
|
|
|
return render(request, template, ctx)
|
2012-01-09 14:36:35 +05:30
|
|
|
|
|