merge
This commit is contained in:
commit
d30c2ba78e
19 changed files with 163 additions and 27 deletions
2
README
2
README
|
@ -54,7 +54,7 @@ Running developer environment:
|
||||||
in one terminal:
|
in one terminal:
|
||||||
./manage.py runserver
|
./manage.py runserver
|
||||||
and in another one:
|
and in another one:
|
||||||
./manage.py celeryd -Q default,encoding
|
./manage.py celeryd -Q default,encoding -B
|
||||||
|
|
||||||
Updating database:
|
Updating database:
|
||||||
right now database updates are not managed, each time you update to current bzr
|
right now database updates are not managed, each time you update to current bzr
|
||||||
|
|
|
@ -17,6 +17,7 @@ test -e /var/log/pandora || (mkdir -p /var/log/pandora && chown $USER:$USER /var
|
||||||
test -e /var/run/pandora || (mkdir -p /var/run/pandora && chown $USER:$USER /var/run/pandora)
|
test -e /var/run/pandora || (mkdir -p /var/run/pandora && chown $USER:$USER /var/run/pandora)
|
||||||
cd $VENV/pandora
|
cd $VENV/pandora
|
||||||
exec /usr/bin/sudo -u $USER $VENV/bin/python $VENV/pandora/manage.py celeryd \
|
exec /usr/bin/sudo -u $USER $VENV/bin/python $VENV/pandora/manage.py celeryd \
|
||||||
|
-B -s /var/run/pandora/celerybeat-schedule \
|
||||||
-Q encoding \
|
-Q encoding \
|
||||||
-n pandora-encoding \
|
-n pandora-encoding \
|
||||||
-p /var/run/pandora/pandora-encoding.pid \
|
-p /var/run/pandora/pandora-encoding.pid \
|
||||||
|
|
|
@ -16,6 +16,7 @@ script
|
||||||
test -e /var/log/pandora || (mkdir -p /var/log/pandora && chown $USER:$USER /var/log/pandora)
|
test -e /var/log/pandora || (mkdir -p /var/log/pandora && chown $USER:$USER /var/log/pandora)
|
||||||
test -e /var/run/pandora || (mkdir -p /var/run/pandora && chown $USER:$USER /var/run/pandora)
|
test -e /var/run/pandora || (mkdir -p /var/run/pandora && chown $USER:$USER /var/run/pandora)
|
||||||
cd $VENV/pandora
|
cd $VENV/pandora
|
||||||
|
./manage.py compile_pyc
|
||||||
exec /usr/bin/sudo -u $USER $VENV/bin/gunicorn_django \
|
exec /usr/bin/sudo -u $USER $VENV/bin/gunicorn_django \
|
||||||
--bind 127.0.0.1:2620 \
|
--bind 127.0.0.1:2620 \
|
||||||
--timeout 90 \
|
--timeout 90 \
|
||||||
|
|
|
@ -557,7 +557,8 @@
|
||||||
// fixme: there should be no magic applied to this file
|
// fixme: there should be no magic applied to this file
|
||||||
"id": "{{settings.SITEID}}",
|
"id": "{{settings.SITEID}}",
|
||||||
"name": "{{settings.SITENAME}}",
|
"name": "{{settings.SITENAME}}",
|
||||||
"url": "{{settings.URL}}"
|
"url": "{{settings.URL}}",
|
||||||
|
"videoprefix": ""
|
||||||
},
|
},
|
||||||
"sitePages": [
|
"sitePages": [
|
||||||
{"id": "about", "title": "About"},
|
{"id": "about", "title": "About"},
|
||||||
|
|
|
@ -9,8 +9,11 @@ import ox
|
||||||
from archive import extract
|
from archive import extract
|
||||||
from clip.models import Clip
|
from clip.models import Clip
|
||||||
|
|
||||||
import utils
|
|
||||||
import managers
|
import managers
|
||||||
|
import utils
|
||||||
|
from tasks import update_matching_events, update_matching_places
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def load_layers(layers):
|
def load_layers(layers):
|
||||||
|
@ -125,6 +128,9 @@ class Annotation(models.Model):
|
||||||
super(Annotation, self).save(*args, **kwargs)
|
super(Annotation, self).save(*args, **kwargs)
|
||||||
if set_public_id:
|
if set_public_id:
|
||||||
self.set_public_id()
|
self.set_public_id()
|
||||||
|
#how expensive is this?
|
||||||
|
#update_matching_events.delay(self.value)
|
||||||
|
#update_matching_places.delay(self.value)
|
||||||
|
|
||||||
def json(self, layer=False, keys=None):
|
def json(self, layer=False, keys=None):
|
||||||
j = {
|
j = {
|
||||||
|
|
27
pandora/annotation/tasks.py
Normal file
27
pandora/annotation/tasks.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
|
from celery.decorators import task
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@task(ignore_resulsts=True, queue='default')
|
||||||
|
def update_matching_events(value):
|
||||||
|
from event.models import Event
|
||||||
|
ids = [e['id'] for e in Event.objects.all().values('id')]
|
||||||
|
for i in ids:
|
||||||
|
e = Event.objects.get(pk=i)
|
||||||
|
for name in [e.name] + list(e.alternativeNames):
|
||||||
|
if name in value:
|
||||||
|
e.update_matches()
|
||||||
|
break
|
||||||
|
|
||||||
|
@task(ignore_resulsts=True, queue='default')
|
||||||
|
def update_matching_places(value):
|
||||||
|
from place.models import Place
|
||||||
|
ids = [e['id'] for e in Place.objects.all().values('id')]
|
||||||
|
for i in ids:
|
||||||
|
e = Place.objects.get(pk=i)
|
||||||
|
for name in [e.name] + list(e.alternativeNames):
|
||||||
|
if name in value:
|
||||||
|
e.update_matches()
|
||||||
|
break
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
from __future__ import division, with_statement
|
from __future__ import division, with_statement
|
||||||
|
@ -28,6 +27,7 @@ def load_config():
|
||||||
config['site']['name'] = settings.SITENAME
|
config['site']['name'] = settings.SITENAME
|
||||||
config['site']['sectionName'] = settings.SITENAME
|
config['site']['sectionName'] = settings.SITENAME
|
||||||
config['site']['url'] = settings.URL
|
config['site']['url'] = settings.URL
|
||||||
|
config['site']['videoprefix'] = settings.VIDEO_PREFIX
|
||||||
|
|
||||||
config['keys'] = {}
|
config['keys'] = {}
|
||||||
for key in config['itemKeys']:
|
for key in config['itemKeys']:
|
||||||
|
|
63
pandora/app/log.py
Normal file
63
pandora/app/log.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
|
from __future__ import division, with_statement
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorHandler(logging.Handler):
|
||||||
|
def __init__(self):
|
||||||
|
logging.Handler.__init__(self)
|
||||||
|
|
||||||
|
"""An exception log handler that log entries into log database.
|
||||||
|
|
||||||
|
If the request is passed as the first argument to the log record,
|
||||||
|
request data will be provided in the
|
||||||
|
"""
|
||||||
|
def emit(self, record):
|
||||||
|
import traceback
|
||||||
|
from django.views.debug import ExceptionReporter
|
||||||
|
from django.conf import settings
|
||||||
|
import models
|
||||||
|
user = None
|
||||||
|
line = 0
|
||||||
|
text = ''
|
||||||
|
url = ''
|
||||||
|
try:
|
||||||
|
if sys.version_info < (2,5):
|
||||||
|
# A nasty workaround required because Python 2.4's logging
|
||||||
|
# module doesn't support passing in extra context.
|
||||||
|
# For this handler, the only extra data we need is the
|
||||||
|
# request, and that's in the top stack frame.
|
||||||
|
request = record.exc_info[2].tb_frame.f_locals['request']
|
||||||
|
else:
|
||||||
|
request = record.request
|
||||||
|
|
||||||
|
request_repr = repr(request)
|
||||||
|
if request.user.is_authenticated():
|
||||||
|
user = request.user
|
||||||
|
url = request.META.get('PATH_INFO', '')
|
||||||
|
except:
|
||||||
|
request = None
|
||||||
|
request_repr = "%s %s\n\nRequest repr() unavailable" % (record.levelname, record.msg)
|
||||||
|
|
||||||
|
if record.exc_info:
|
||||||
|
stack_trace = '\n'.join(traceback.format_exception(*record.exc_info))
|
||||||
|
stack_info = traceback.extract_tb(record.exc_info[2])
|
||||||
|
if stack_info:
|
||||||
|
url = stack_info[-1][0]
|
||||||
|
line = stack_info[-1][1]
|
||||||
|
else:
|
||||||
|
stack_trace = 'No stack trace available'
|
||||||
|
|
||||||
|
text = "%s\n\n%s" % (stack_trace, request_repr)
|
||||||
|
if text:
|
||||||
|
l = models.Log(
|
||||||
|
text=text,
|
||||||
|
line=line,
|
||||||
|
url=url
|
||||||
|
)
|
||||||
|
if user:
|
||||||
|
l.user = user
|
||||||
|
l.save()
|
|
@ -24,7 +24,7 @@ class Log(models.Model):
|
||||||
text = models.TextField(blank=True)
|
text = models.TextField(blank=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.id
|
return u"%s" % self.id
|
||||||
|
|
||||||
def json(self):
|
def json(self):
|
||||||
return {
|
return {
|
||||||
|
|
14
pandora/app/tasks.py
Normal file
14
pandora/app/tasks.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from celery.decorators import task, periodic_task
|
||||||
|
from celery.task.schedules import crontab
|
||||||
|
|
||||||
|
|
||||||
|
@periodic_task(run_every=crontab(hour=6, minute=0), queue='encoding')
|
||||||
|
def cron(**kwargs):
|
||||||
|
from django.db import transaction
|
||||||
|
from django.contrib.sessions.models import Session
|
||||||
|
Session.objects.filter(expire_date__lt=datetime.datetime.now()).delete()
|
||||||
|
transaction.commit_unless_managed()
|
|
@ -108,17 +108,18 @@ def log(request):
|
||||||
}
|
}
|
||||||
'''
|
'''
|
||||||
data = json.loads(request.POST['data'])
|
data = json.loads(request.POST['data'])
|
||||||
if not request.user.is_authenticated:
|
if request.user.is_authenticated():
|
||||||
user = request.user
|
user = request.user
|
||||||
else:
|
else:
|
||||||
user = None
|
user = None
|
||||||
if 'text' in data:
|
if 'text' in data:
|
||||||
l = models.Log(
|
l = models.Log(
|
||||||
user=user,
|
|
||||||
text=data['text'],
|
text=data['text'],
|
||||||
line=int(data.get('line', 0)),
|
line=int(data.get('line', 0)),
|
||||||
url=data.get('url', '')
|
url=data.get('url', '')
|
||||||
)
|
)
|
||||||
|
if user:
|
||||||
|
l.user = user
|
||||||
l.save()
|
l.save()
|
||||||
response = json_response()
|
response = json_response()
|
||||||
return render_to_json_response(response)
|
return render_to_json_response(response)
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
from __future__ import division, with_statement
|
from __future__ import division, with_statement
|
||||||
import unicodedata
|
|
||||||
import string
|
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.contrib.auth.models import User, Group
|
from django.contrib.auth.models import User
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
||||||
import ox
|
import ox
|
||||||
|
@ -16,6 +14,7 @@ from item.models import Item
|
||||||
from item import utils
|
from item import utils
|
||||||
from person.models import get_name_sort
|
from person.models import get_name_sort
|
||||||
from title.models import get_title_sort
|
from title.models import get_title_sort
|
||||||
|
|
||||||
import managers
|
import managers
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,12 +3,21 @@
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from celery.decorators import task, periodic_task
|
from celery.decorators import task, periodic_task
|
||||||
|
from celery.task.schedules import crontab
|
||||||
|
|
||||||
import models
|
|
||||||
|
|
||||||
|
from models import Event
|
||||||
|
|
||||||
|
|
||||||
|
@periodic_task(run_every=crontab(hour=7, minute=30), queue='encoding')
|
||||||
|
def update_all_matches(**kwargs):
|
||||||
|
ids = [e['id'] for e in Event.objects.all().values('id')]
|
||||||
|
for i in ids:
|
||||||
|
e = Event.objects.get(pk=i)
|
||||||
|
e.update_matches()
|
||||||
|
|
||||||
@task(ignore_resulsts=True, queue='default')
|
@task(ignore_resulsts=True, queue='default')
|
||||||
def update_matches(eventId):
|
def update_matches(eventId):
|
||||||
event = models.Event.objects.get(pk=eventId)
|
event = Event.objects.get(pk=eventId)
|
||||||
event.update_matches()
|
event.update_matches()
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
from __future__ import division, with_statement
|
from __future__ import division, with_statement
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import math
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -1083,8 +1084,9 @@ class Item(models.Model):
|
||||||
annotation.save()
|
annotation.save()
|
||||||
#otherwise add empty 5 seconds annotation every minute
|
#otherwise add empty 5 seconds annotation every minute
|
||||||
if not subtitles_added:
|
if not subtitles_added:
|
||||||
i = offset
|
for i in range(int (offset / 60) * 60 + 60,
|
||||||
while i < offset + f.duration - 5:
|
int(offset + f.duration) - 5,
|
||||||
|
60):
|
||||||
annotation = Annotation(
|
annotation = Annotation(
|
||||||
item=self,
|
item=self,
|
||||||
layer=layer,
|
layer=layer,
|
||||||
|
@ -1094,7 +1096,6 @@ class Item(models.Model):
|
||||||
user=user
|
user=user
|
||||||
)
|
)
|
||||||
annotation.save()
|
annotation.save()
|
||||||
i += 60
|
|
||||||
offset += f.duration
|
offset += f.duration
|
||||||
self.update_find()
|
self.update_find()
|
||||||
|
|
||||||
|
|
|
@ -3,11 +3,20 @@
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from celery.decorators import task, periodic_task
|
from celery.decorators import task, periodic_task
|
||||||
|
from celery.task.schedules import crontab
|
||||||
|
|
||||||
import models
|
import models
|
||||||
|
|
||||||
|
|
||||||
|
@periodic_task(run_every=crontab(hour=6, minute=30), queue='encoding')
|
||||||
|
def update_all_matches(**kwargs):
|
||||||
|
ids = [p['id'] for p in models.Place.objects.all().values('id')]
|
||||||
|
for i in ids:
|
||||||
|
p = models.Place.objects.get(pk=i)
|
||||||
|
p.update_matches()
|
||||||
|
|
||||||
@task(ignore_resulsts=True, queue='default')
|
@task(ignore_resulsts=True, queue='default')
|
||||||
def update_matches(id):
|
def update_matches(id):
|
||||||
place = models.Place.objects.get(pk=id)
|
place = models.Place.objects.get(pk=id)
|
||||||
place.update_matches()
|
place.update_matches()
|
||||||
|
|
||||||
|
|
|
@ -75,9 +75,13 @@ MEDIA_ROOT = normpath(join(PROJECT_ROOT, '..', 'data'))
|
||||||
STATIC_ROOT = normpath(join(PROJECT_ROOT, '..', 'static'))
|
STATIC_ROOT = normpath(join(PROJECT_ROOT, '..', 'static'))
|
||||||
TESTS_ROOT = join(PROJECT_ROOT, 'tests')
|
TESTS_ROOT = join(PROJECT_ROOT, 'tests')
|
||||||
|
|
||||||
|
#if videos are served from another subdomain
|
||||||
|
VIDEO_PREFIX = ''
|
||||||
|
|
||||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||||
# trailing slash if there is a path component (optional in other cases).
|
# trailing slash if there is a path component (optional in other cases).
|
||||||
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
# Examples: "http://media.lawrence.com", "http://example.com/media/"
|
||||||
|
|
||||||
MEDIA_URL = '/data/'
|
MEDIA_URL = '/data/'
|
||||||
|
|
||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
@ -140,23 +144,19 @@ INSTALLED_APPS = (
|
||||||
'urlalias',
|
'urlalias',
|
||||||
)
|
)
|
||||||
|
|
||||||
# A sample logging configuration. The only tangible logging
|
# Log errors into db
|
||||||
# performed by this configuration is to send an email to
|
|
||||||
# the site admins on every HTTP 500 error.
|
|
||||||
# See http://docs.djangoproject.com/en/dev/topics/logging for
|
|
||||||
# more details on how to customize your logging configuration.
|
|
||||||
LOGGING = {
|
LOGGING = {
|
||||||
'version': 1,
|
'version': 1,
|
||||||
'disable_existing_loggers': False,
|
'disable_existing_loggers': False,
|
||||||
'handlers': {
|
'handlers': {
|
||||||
'mail_admins': {
|
'errors': {
|
||||||
'level': 'ERROR',
|
'level': 'ERROR',
|
||||||
'class': 'django.utils.log.AdminEmailHandler'
|
'class': 'pandora.app.log.ErrorHandler'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'loggers': {
|
'loggers': {
|
||||||
'django.request': {
|
'django.request': {
|
||||||
'handlers': ['mail_admins'],
|
'handlers': ['errors'],
|
||||||
'level': 'ERROR',
|
'level': 'ERROR',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
|
|
|
@ -417,7 +417,11 @@ pandora.ui.filesView = function(options, self) {
|
||||||
}, function(result) {
|
}, function(result) {
|
||||||
['title', 'director', 'year'].forEach(function(key) {
|
['title', 'director', 'year'].forEach(function(key) {
|
||||||
if (result.data[key]) {
|
if (result.data[key]) {
|
||||||
self['$' + key + 'Input'].options({value: result.data[key]});
|
self['$' + key + 'Input'].options({
|
||||||
|
value: key == 'director'
|
||||||
|
? result.data[key].join(', ')
|
||||||
|
: result.data[key]
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
updateForm();
|
updateForm();
|
||||||
|
|
|
@ -64,7 +64,7 @@ pandora.ui.item = function() {
|
||||||
});
|
});
|
||||||
pandora.site.video.resolutions.forEach(function(resolution) {
|
pandora.site.video.resolutions.forEach(function(resolution) {
|
||||||
video[resolution] = Ox.range(result.data.parts).map(function(i) {
|
video[resolution] = Ox.range(result.data.parts).map(function(i) {
|
||||||
return '/' + pandora.user.ui.item + '/'
|
return pandora.site.site.videoprefix + '/' + pandora.user.ui.item + '/'
|
||||||
+ resolution + 'p' + (i + 1) + '.' + pandora.user.videoFormat;
|
+ resolution + 'p' + (i + 1) + '.' + pandora.user.videoFormat;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -5,4 +5,4 @@ cd static/oxjs
|
||||||
bzr pull http://code.0x2620.org/oxjs/
|
bzr pull http://code.0x2620.org/oxjs/
|
||||||
test -e src/python-ox && cd src/python-ox && bzr pull http://code.0x2620.org/python-ox/
|
test -e src/python-ox && cd src/python-ox && bzr pull http://code.0x2620.org/python-ox/
|
||||||
cd $base
|
cd $base
|
||||||
cd pandora && ./manage.py update_static
|
cd pandora && ./manage.py update_static && ./manage.py compile_pyc
|
||||||
|
|
Loading…
Reference in a new issue