pandora/pandora/archive/views.py

434 lines
15 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division
import os.path
from datetime import datetime
from django import forms
2011-01-01 11:44:42 +00:00
from django.shortcuts import get_object_or_404, redirect
from django.conf import settings
2011-01-19 12:06:03 +00:00
from django.db.models import Count, Sum
2010-12-25 10:14:52 +00:00
from ox.utils import json
from ox.django.decorators import login_required_json
from ox.django.shortcuts import render_to_json_response, get_object_or_404_json, json_response
from ox.django.views import task_status
2011-01-29 11:19:11 +00:00
from item import utils
from item.models import get_item
2011-01-19 12:06:03 +00:00
from item.views import _parse_query
2010-11-08 16:34:25 +00:00
import item.tasks
from api.actions import actions
import models
import tasks
2010-08-10 22:01:41 +00:00
@login_required_json
def removeVolume(request):
2010-08-10 22:01:41 +00:00
data = json.loads(request.POST['data'])
user = request.user
try:
volume = models.Volume.objects.get(user=user, name=data['volume'])
volume.files.delete()
volume.delete()
response = json_response()
2010-08-10 22:01:41 +00:00
except models.Volume.DoesNotExist:
response = json_response(status=404, text='volume not found')
return render_to_json_response(response)
2011-01-13 08:33:14 +00:00
actions.register(removeVolume, cache=False)
2010-08-10 22:01:41 +00:00
2011-01-01 11:44:42 +00:00
2010-08-10 22:01:41 +00:00
@login_required_json
def update(request):
'''
2010-08-10 22:01:41 +00:00
2 calls possible:
volume/files
info
call volume/files first and fill in requested info after that
2010-12-24 10:31:44 +00:00
param data {
2010-08-10 22:01:41 +00:00
volume: '',
2010-08-07 14:31:20 +00:00
files: [
2010-12-24 10:31:44 +00:00
{oshash:, path:, mtime:, },
...
],
2010-08-07 14:31:20 +00:00
info: {oshash: object}
2010-12-24 10:31:44 +00:00
}
return {
status: {'code': int, 'text': string},
data: {
info: list,
data: list,
file: list
}
}
'''
data = json.loads(request.POST['data'])
2010-08-07 14:31:20 +00:00
user = request.user
2010-08-10 22:01:41 +00:00
response = json_response({'info': [], 'data': [], 'file': []})
2010-09-17 21:06:01 +00:00
volume = None
2010-08-07 14:31:20 +00:00
if 'files' in data:
#update files info async, this takes to long otherwise
#FIXME: how can client know if update is done? possibly with taksStatus?
t = tasks.update_files.delay(user.username, data['volume'], data['files'])
response['data']['taskId'] = t.task_id
2010-08-07 14:31:20 +00:00
user_profile = user.get_profile()
user_profile.files_updated = datetime.now()
user_profile.save()
if 'info' in data:
for oshash in data['info']:
info = data['info'][oshash]
2010-11-08 17:43:59 +00:00
instance = models.Instance.objects.filter(file__oshash=oshash, volume__user=user)
2010-08-07 14:31:20 +00:00
if instance.count()>0:
instance = instance[0]
if not instance.file.info:
2010-08-10 22:01:41 +00:00
for key in ('atime', 'mtime', 'ctime'):
if key in info:
del info[key]
2010-08-07 14:31:20 +00:00
instance.file.info = info
instance.file.save()
2010-11-08 17:43:59 +00:00
files = models.Instance.objects.filter(volume__user=user, file__available=False)
2010-09-17 21:06:01 +00:00
if volume:
files = files.filter(volume=volume)
2010-08-10 22:01:41 +00:00
response['data']['info'] = [f.file.oshash for f in files.filter(file__info='{}')]
#needs some flag to find those that are actually used main is to generic
response['data']['data'] = [f.file.oshash for f in files.filter(file__is_video=True, file__is_main=True)]
response['data']['file'] = [f.file.oshash for f in files.filter(file__is_subtitle=True)]
2010-08-07 14:31:20 +00:00
return render_to_json_response(response)
2011-01-13 08:33:14 +00:00
actions.register(update, cache=False)
2010-08-07 14:31:20 +00:00
2011-01-01 11:44:42 +00:00
@login_required_json
def encodingProfile(request):
response = json_response({'profile': settings.VIDEO_PROFILE})
return render_to_json_response(response)
actions.register(encodingProfile)
2011-01-01 11:44:42 +00:00
2010-08-10 22:01:41 +00:00
@login_required_json
def upload(request):
'''
2010-08-24 17:16:33 +00:00
oshash: string
frame: [] //multipart frames
file: [] //multipart file
2010-12-24 10:31:44 +00:00
return {
status: {'code': int, 'text': string},
data: {
info: object,
rename: object
}
}
'''
2010-08-07 14:31:20 +00:00
user = request.user
f = get_object_or_404_json(models.File, oshash=request.POST['oshash'])
2010-08-24 17:16:33 +00:00
if 'frame' in request.FILES:
if f.frames.count() == 0:
for frame in request.FILES.getlist('frame'):
name = frame.name
#float required?
position = float(os.path.splitext(name)[0])
fr = models.Frame(file=f, position=position)
fr.save()
fr.frame.save(name, frame)
response = json_response({})
else:
response = json_response(status=403, text='permissino denied')
if 'file' in request.FILES:
2010-09-17 21:06:01 +00:00
if not f.available:
2010-09-03 13:28:44 +00:00
f.data.save('data.raw', request.FILES['file'])
2010-09-17 21:06:01 +00:00
f.available = True
f.save()
response = json_response(text='file saved')
2010-08-24 17:16:33 +00:00
else:
response = json_response(status=403, text='permissino denied')
2010-08-07 14:31:20 +00:00
return render_to_json_response(response)
2011-01-13 08:33:14 +00:00
actions.register(upload, cache=False)
2010-08-07 14:31:20 +00:00
2011-01-01 11:44:42 +00:00
2010-08-07 14:31:20 +00:00
class VideoChunkForm(forms.Form):
chunk = forms.FileField()
chunkId = forms.IntegerField(required=False)
done = forms.IntegerField(required=False)
2011-01-01 11:44:42 +00:00
2010-08-07 14:31:20 +00:00
@login_required_json
def firefogg_upload(request):
2010-08-13 17:17:23 +00:00
profile = request.GET['profile']
2010-09-05 21:08:05 +00:00
if profile.endswith('.webm'):
profile = os.path.splitext(profile)[0]
2010-08-13 17:17:23 +00:00
oshash = request.GET['oshash']
2010-08-07 14:31:20 +00:00
#handle video upload
if request.method == 'POST':
#post next chunk
2010-08-13 17:17:23 +00:00
if 'chunk' in request.FILES and oshash:
2010-09-03 13:28:44 +00:00
f = get_object_or_404(models.File, oshash=oshash)
2010-08-07 14:31:20 +00:00
form = VideoChunkForm(request.POST, request.FILES)
2010-09-03 13:28:44 +00:00
if form.is_valid() and profile == settings.VIDEO_PROFILE and f.editable(request.user):
2010-08-07 14:31:20 +00:00
c = form.cleaned_data['chunk']
chunk_id = form.cleaned_data['chunkId']
response = {
'result': 1,
'resultUrl': request.build_absolute_uri('/')
}
2010-09-03 13:28:44 +00:00
if not f.save_chunk(c, chunk_id):
2010-08-07 14:31:20 +00:00
response['result'] = -1
elif form.cleaned_data['done']:
2010-09-03 13:28:44 +00:00
f.available = True
f.save()
#FIXME: this fails badly if rabbitmq goes down
try:
t = item.tasks.update_streams.delay((f.item.itemId))
2011-01-01 11:44:42 +00:00
response['resultUrl'] = t.task_id
except:
pass
2010-08-07 14:31:20 +00:00
response['result'] = 1
response['done'] = 1
return render_to_json_response(response)
2010-09-03 13:28:44 +00:00
#init upload
elif oshash and profile == settings.VIDEO_PROFILE:
2010-08-13 17:17:23 +00:00
#404 if oshash is not know, files must be registered via update api first
f = get_object_or_404(models.File, oshash=oshash)
2010-09-03 13:28:44 +00:00
if f.editable(request.user):
if f.video:
f.video.delete()
2010-09-08 17:14:01 +00:00
f.available = False
2010-09-03 13:28:44 +00:00
f.save()
response = {
#is it possible to no hardcode url here?
'uploadUrl': request.build_absolute_uri('/api/upload/?oshash=%s&profile=%s' % (f.oshash, profile)),
'result': 1
}
return render_to_json_response(response)
2010-08-07 14:31:20 +00:00
response = json_response(status=400, text='this request requires POST')
return render_to_json_response(response)
2011-01-01 11:44:42 +00:00
@login_required_json
def taskStatus(request):
#FIXME: should check if user has permissions to get status
data = json.loads(request.POST['data'])
user = request.user
task_id = data['task_id']
response = task_status(request, task_id)
return render_to_json_response(response)
2011-01-13 08:33:14 +00:00
actions.register(taskStatus, cache=False)
2011-01-01 11:44:42 +00:00
@login_required_json
def editFile(request):
'''
change file / item link
2010-12-24 10:31:44 +00:00
param data {
oshash: hash of file
itemId: new itemId
}
return {
status: {'code': int, 'text': string},
data: {
imdbId:string
}
}
'''
#FIXME: permissions, need to be checked
data = json.loads(request.POST['data'])
f = get_object_or_404_json(models.File, oshash=data['oshash'])
response = json_response()
if f.item.id != data['itemId']:
if len(data['itemId']) != 7:
folder = f.instances.all()[0].folder
2011-01-29 11:19:11 +00:00
item_info = utils.parse_path(folder)
item = get_item(item_info)
else:
item = get_item({'imdbId': data['itemId']})
f.item = item
f.save()
#FIXME: other things might need updating here
response = json_response(text='updated')
2010-08-07 14:31:20 +00:00
return render_to_json_response(response)
2011-01-13 08:33:14 +00:00
actions.register(editFile, cache=False)
2010-08-07 14:31:20 +00:00
2011-01-01 11:44:42 +00:00
2010-09-14 14:10:37 +00:00
def lookup_file(request, oshash):
f = get_object_or_404(models.File, oshash=oshash)
2010-09-23 16:01:48 +00:00
return redirect(f.item.get_absolute_url())
2010-09-14 14:10:37 +00:00
2011-01-19 12:06:03 +00:00
def _order_query(qs, sort, prefix=''):
order_by = []
if len(sort) == 1:
sort.append({'operator': '+', 'key': 'name'})
sort.append({'operator': '-', 'key': 'created'})
'''
2011-01-19 12:06:03 +00:00
if sort[0]['key'] == 'title':
sort.append({'operator': '-', 'key': 'year'})
sort.append({'operator': '+', 'key': 'director'})
elif sort[0]['key'] == 'director':
sort.append({'operator': '-', 'key': 'year'})
sort.append({'operator': '+', 'key': 'title'})
elif sort[0]['key'] == 'year':
sort.append({'operator': '+', 'key': 'director'})
sort.append({'operator': '+', 'key': 'title'})
elif not sort[0]['key'] in ('value', 'value_sort'):
sort.append({'operator': '+', 'key': 'director'})
sort.append({'operator': '-', 'key': 'year'})
sort.append({'operator': '+', 'key': 'title'})
'''
2011-01-19 12:06:03 +00:00
for e in sort:
operator = e['operator']
if operator != '-':
operator = ''
key = {'id': 'item__itemId', 'name': 'sort_name'}.get(e['key'], e['key'])
#if operator=='-' and '%s_desc'%key in models.ItemSort.descending_fields:
# key = '%s_desc' % key
order = '%s%s%s' % (operator, prefix, key)
order_by.append(order)
if order_by:
qs = qs.order_by(*order_by)
return qs
def findFiles(request):
2011-01-01 11:44:42 +00:00
'''
2011-01-19 12:06:03 +00:00
param data {
'query': query,
'sort': array,
'range': array
}
query: query object, more on query syntax at
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
sort: array of key, operator dics
[
{
key: "year",
operator: "-"
},
{
key: "director",
operator: ""
}
]
range: result range, array [from, to]
keys: array of keys to return
group: group elements by, country, genre, director...
with keys, items is list of dicts with requested properties:
return {'status': {'code': int, 'text': string},
'data': {items: array}}
Groups
param data {
'query': query,
'key': string,
'group': string,
'range': array
}
query: query object, more on query syntax at
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
range: result range, array [from, to]
keys: array of keys to return
group: group elements by, country, genre, director...
possible values for keys: name, items
with keys
items contains list of {'name': string, 'items': int}:
return {'status': {'code': int, 'text': string},
'data': {items: array}}
without keys: return number of items in given query
return {'status': {'code': int, 'text': string},
'data': {items: int}}
Positions
param data {
'query': query,
'ids': []
}
query: query object, more on query syntax at
https://wiki.0x2620.org/wiki/pandora/QuerySyntax
ids: ids of items for which positions are required
return {
status: {...},
data: {
positions: {
id: position
}
}
}
2011-01-01 11:44:42 +00:00
'''
2011-01-19 12:06:03 +00:00
data = json.loads(request.POST['data'])
if settings.JSON_DEBUG:
print json.dumps(data, indent=2)
query = _parse_query(data, request.user)
response = json_response({})
if 'group' in query:
if 'sort' in query:
if len(query['sort']) == 1 and query['sort'][0]['key'] == 'items':
if query['group'] == "year":
order_by = query['sort'][0]['operator'] == '-' and 'items' or '-items'
else:
order_by = query['sort'][0]['operator'] == '-' and '-items' or 'items'
if query['group'] != "keyword":
order_by = (order_by, 'value_sort')
else:
order_by = (order_by,)
else:
order_by = query['sort'][0]['operator'] == '-' and '-value_sort' or 'value_sort'
order_by = (order_by, 'items')
else:
order_by = ('-value_sort', 'items')
response['data']['items'] = []
items = 'items'
item_qs = query['qs']
qs = models.Facet.objects.filter(key=query['group']).filter(item__id__in=item_qs)
qs = qs.values('value').annotate(items=Count('id')).order_by(*order_by)
if 'ids' in query:
#FIXME: this does not scale for larger results
response['data']['positions'] = {}
ids = [j['value'] for j in qs]
response['data']['positions'] = utils.get_positions(ids, query['ids'])
elif 'range' in data:
qs = qs[query['range'][0]:query['range'][1]]
response['data']['items'] = [{'name': i['value'], 'items': i[items]} for i in qs]
else:
response['data']['items'] = qs.count()
elif 'ids' in query:
#FIXME: this does not scale for larger results
qs = _order_query(query['qs'], query['sort'])
response['data']['positions'] = {}
ids = [j['itemId'] for j in qs.values('itemId')]
response['data']['positions'] = utils.get_positions(ids, query['ids'])
elif 'keys' in query:
response['data']['items'] = []
qs = models.File.objects.filter(item__in=query['qs'])
2011-01-29 11:19:11 +00:00
#qs = _order_query(qs, query['sort'])
2011-01-19 12:06:03 +00:00
keys = query['keys']
qs = qs[query['range'][0]:query['range'][1]]
response['data']['items'] = [f.json(keys) for f in qs]
else: # otherwise stats
items = query['qs']
files = models.File.objects.filter(item__in=query['qs'])
2011-01-29 11:19:11 +00:00
response['data']['items'] = files.count()
2011-01-19 12:06:03 +00:00
return render_to_json_response(response)
actions.register(findFiles)