Compare commits
No commits in common. "0b44b3b66b5d52c6d506b00b650520b21860c1f4" and "de14061b6896f1a106eca5d7eab926135156094d" have entirely different histories.
0b44b3b66b
...
de14061b68
15 changed files with 28 additions and 96 deletions
|
|
@ -24,7 +24,7 @@ DATABASES = {
|
||||||
'PORT': 5432,
|
'PORT': 5432,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CELERY_BROKER_URL = "amqp://{0}:{1}@rabbitmq:5672//".format(os.environ.get('RABBITMQ_DEFAULT_USER'), os.environ.get('RABBITMQ_DEFAULT_PASS'))
|
BROKER_URL = "amqp://{0}:{1}@rabbitmq:5672//".format(os.environ.get('RABBITMQ_DEFAULT_USER'), os.environ.get('RABBITMQ_DEFAULT_PASS'))
|
||||||
XACCELREDIRECT = True
|
XACCELREDIRECT = True
|
||||||
|
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
|
|
|
||||||
|
|
@ -9,9 +9,8 @@ User=pandora
|
||||||
Group=pandora
|
Group=pandora
|
||||||
PIDFile=/run/pandora/cron.pid
|
PIDFile=/run/pandora/cron.pid
|
||||||
WorkingDirectory=/srv/pandora/pandora
|
WorkingDirectory=/srv/pandora/pandora
|
||||||
ExecStart=/srv/pandora/bin/celery \
|
ExecStart=/srv/pandora/bin/python /srv/pandora/pandora/manage.py \
|
||||||
-A app beat \
|
celerybeat -s /run/pandora/celerybeat-schedule \
|
||||||
-s /run/pandora/celerybeat-schedule \
|
|
||||||
--pidfile /run/pandora/cron.pid \
|
--pidfile /run/pandora/cron.pid \
|
||||||
-l INFO
|
-l INFO
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,8 @@ User=pandora
|
||||||
Group=pandora
|
Group=pandora
|
||||||
PIDFile=/run/pandora/encoding.pid
|
PIDFile=/run/pandora/encoding.pid
|
||||||
WorkingDirectory=/srv/pandora/pandora
|
WorkingDirectory=/srv/pandora/pandora
|
||||||
ExecStart=/srv/pandora/bin/celery \
|
ExecStart=/srv/pandora/bin/python /srv/pandora/pandora/manage.py \
|
||||||
-A app worker \
|
celery worker \
|
||||||
-Q encoding -n pandora-encoding \
|
-Q encoding -n pandora-encoding \
|
||||||
--pidfile /run/pandora/encoding.pid \
|
--pidfile /run/pandora/encoding.pid \
|
||||||
--maxtasksperchild 500 \
|
--maxtasksperchild 500 \
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,8 @@ User=pandora
|
||||||
Group=pandora
|
Group=pandora
|
||||||
PIDFile=/run/pandora/tasks.pid
|
PIDFile=/run/pandora/tasks.pid
|
||||||
WorkingDirectory=/srv/pandora/pandora
|
WorkingDirectory=/srv/pandora/pandora
|
||||||
ExecStart=/srv/pandora/bin/celery \
|
ExecStart=/srv/pandora/bin/python /srv/pandora/pandora/manage.py \
|
||||||
-A app worker \
|
celery worker \
|
||||||
-Q default,celery -n pandora-default \
|
-Q default,celery -n pandora-default \
|
||||||
--pidfile /run/pandora/tasks.pid \
|
--pidfile /run/pandora/tasks.pid \
|
||||||
--maxtasksperchild 1000 \
|
--maxtasksperchild 1000 \
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
|
|
||||||
from .celery import app as celery_app
|
|
||||||
|
|
||||||
__all__ = ('celery_app',)
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
import os
|
|
||||||
|
|
||||||
from celery import Celery
|
|
||||||
|
|
||||||
root_dir = os.path.normpath(os.path.abspath(os.path.dirname(__file__)))
|
|
||||||
root_dir = os.path.dirname(root_dir)
|
|
||||||
os.chdir(root_dir)
|
|
||||||
|
|
||||||
# set the default Django settings module for the 'celery' program.
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
|
||||||
|
|
||||||
app = Celery('pandora')
|
|
||||||
|
|
||||||
# Using a string here means the worker doesn't have to serialize
|
|
||||||
# the configuration object to child processes.
|
|
||||||
# - namespace='CELERY' means all celery-related configuration keys
|
|
||||||
# should have a `CELERY_` prefix.
|
|
||||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
|
||||||
|
|
||||||
# Load task modules from all registered Django app configs.
|
|
||||||
app.autodiscover_tasks()
|
|
||||||
|
|
@ -32,19 +32,4 @@ def monkey_patch_username():
|
||||||
if isinstance(v, MaxLengthValidator):
|
if isinstance(v, MaxLengthValidator):
|
||||||
v.limit_value = 255
|
v.limit_value = 255
|
||||||
|
|
||||||
def apply_patch():
|
|
||||||
from django.db import connection, transaction
|
|
||||||
cursor = connection.cursor()
|
|
||||||
table = connection.introspection.get_table_description(cursor, User._meta.db_table)
|
|
||||||
sql = []
|
|
||||||
for row in table:
|
|
||||||
if row.name in NEW_LENGTH and row.internal_size != NEW_LENGTH[row.name]:
|
|
||||||
sql.append('ALTER TABLE "%s" ALTER "%s" TYPE varchar(%d)' % (User._meta.db_table, row.name, NEW_LENGTH[row.name]))
|
|
||||||
|
|
||||||
for q in sql:
|
|
||||||
cursor.execute(q)
|
|
||||||
if sql:
|
|
||||||
transaction.commit()
|
|
||||||
|
|
||||||
|
|
||||||
monkey_patch_username()
|
monkey_patch_username()
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from django.db.models import Count, Q
|
||||||
|
|
||||||
from six import string_types
|
from six import string_types
|
||||||
from celery.utils import get_full_cls_name
|
from celery.utils import get_full_cls_name
|
||||||
from celery._state import current_app
|
from celery.backends import default_backend
|
||||||
import ox
|
import ox
|
||||||
from oxdjango.decorators import login_required_json
|
from oxdjango.decorators import login_required_json
|
||||||
from oxdjango.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
from oxdjango.shortcuts import render_to_json_response, get_object_or_404_json, json_response
|
||||||
|
|
@ -390,11 +390,8 @@ def getTaskStatus(request, data):
|
||||||
else:
|
else:
|
||||||
task_id = data['task_id']
|
task_id = data['task_id']
|
||||||
response = json_response(status=200, text='ok')
|
response = json_response(status=200, text='ok')
|
||||||
|
status = default_backend.get_status(task_id)
|
||||||
backend = current_app.backend
|
res = default_backend.get_result(task_id)
|
||||||
status = backend.get_status(task_id)
|
|
||||||
res = backend.get_result(task_id)
|
|
||||||
|
|
||||||
response['data'] = {
|
response['data'] = {
|
||||||
'id': task_id,
|
'id': task_id,
|
||||||
'status': status
|
'status': status
|
||||||
|
|
@ -403,8 +400,8 @@ def getTaskStatus(request, data):
|
||||||
response['data'].update(res)
|
response['data'].update(res)
|
||||||
else:
|
else:
|
||||||
response['data']['result'] = res
|
response['data']['result'] = res
|
||||||
if status in backend.EXCEPTION_STATES:
|
if status in default_backend.EXCEPTION_STATES:
|
||||||
traceback = backend.get_traceback(task_id)
|
traceback = default_backend.get_traceback(task_id)
|
||||||
response['data'].update({
|
response['data'].update({
|
||||||
'result': str(res),
|
'result': str(res),
|
||||||
'exc': get_full_cls_name(res.__class__),
|
'exc': get_full_cls_name(res.__class__),
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,6 @@ class Command(BaseCommand):
|
||||||
print(sql)
|
print(sql)
|
||||||
cursor.execute(sql)
|
cursor.execute(sql)
|
||||||
|
|
||||||
app.monkey_patch.apply_patch()
|
|
||||||
|
|
||||||
if settings.DB_GIN_TRGM:
|
if settings.DB_GIN_TRGM:
|
||||||
import entity.models
|
import entity.models
|
||||||
import document.models
|
import document.models
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,8 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from os.path import join, normpath, dirname
|
from os.path import join, normpath, dirname
|
||||||
|
import djcelery
|
||||||
|
djcelery.setup_loader()
|
||||||
|
|
||||||
BASE_DIR = PROJECT_ROOT = normpath(dirname(__file__))
|
BASE_DIR = PROJECT_ROOT = normpath(dirname(__file__))
|
||||||
BIN_DIR = normpath(join(PROJECT_ROOT, '..', 'bin'))
|
BIN_DIR = normpath(join(PROJECT_ROOT, '..', 'bin'))
|
||||||
|
|
@ -120,7 +122,7 @@ INSTALLED_APPS = (
|
||||||
'django.contrib.humanize',
|
'django.contrib.humanize',
|
||||||
|
|
||||||
'django_extensions',
|
'django_extensions',
|
||||||
'django_celery_results',
|
'djcelery',
|
||||||
'app',
|
'app',
|
||||||
'log',
|
'log',
|
||||||
'annotation',
|
'annotation',
|
||||||
|
|
@ -195,12 +197,12 @@ DATABASES = {
|
||||||
}
|
}
|
||||||
|
|
||||||
#rabbitmq connection settings
|
#rabbitmq connection settings
|
||||||
CELERY_RESULT_BACKEND = 'django-db'
|
CELERY_RESULT_BACKEND = 'database'
|
||||||
CELERY_TASK_SERIALIZER = 'json'
|
CELERY_TASK_SERIALIZER = 'json'
|
||||||
CELERY_RESULT_SERIALIZER = 'json'
|
CELERY_RESULT_SERIALIZER = 'json'
|
||||||
CELERY_ACCEPT_CONTENT = ['json']
|
CELERY_ACCEPT_CONTENT = ['json']
|
||||||
|
|
||||||
CELERY_BROKER_URL = 'amqp://pandora:box@localhost:5672//pandora'
|
BROKER_URL = 'amqp://pandora:box@localhost:5672//pandora'
|
||||||
|
|
||||||
SEND_CELERY_ERROR_EMAILS = False
|
SEND_CELERY_ERROR_EMAILS = False
|
||||||
|
|
||||||
|
|
@ -262,10 +264,6 @@ COLLECTION_ICON = join(SCRIPT_ROOT, 'list_icon.py')
|
||||||
|
|
||||||
DB_GIN_TRGM = False
|
DB_GIN_TRGM = False
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['*']
|
|
||||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
|
||||||
|
|
||||||
DATA_UPLOAD_MAX_MEMORY_SIZE = 32 * 1024 * 1024
|
|
||||||
|
|
||||||
RELOADER_RUNNING = False
|
RELOADER_RUNNING = False
|
||||||
#you can ignore things below this line
|
#you can ignore things below this line
|
||||||
|
|
@ -297,4 +295,7 @@ except NameError:
|
||||||
|
|
||||||
INSTALLED_APPS = tuple(list(INSTALLED_APPS) + LOCAL_APPS)
|
INSTALLED_APPS = tuple(list(INSTALLED_APPS) + LOCAL_APPS)
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||||
|
|
||||||
|
DATA_UPLOAD_MAX_MEMORY_SIZE = 32 * 1024 * 1024
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ from __future__ import division, print_function, absolute_import
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
|
from celery.backends import default_backend
|
||||||
from celery.utils import get_full_cls_name
|
from celery.utils import get_full_cls_name
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ class Worker(ConsumerMixin):
|
||||||
message.ack()
|
message.ack()
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
with Connection(settings.CELERY_BROKER_URL) as conn:
|
with Connection(settings.BROKER_URL) as conn:
|
||||||
try:
|
try:
|
||||||
worker = Worker(conn)
|
worker = Worker(conn)
|
||||||
worker.run()
|
worker.run()
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
Django==1.11.22
|
Django==1.11.22
|
||||||
simplejson
|
simplejson
|
||||||
chardet
|
chardet
|
||||||
celery>4
|
celery==3.1.26.post2
|
||||||
django-celery-results
|
django-celery==3.2.2
|
||||||
django-extensions==2.0.7
|
django-extensions==2.0.7
|
||||||
gunicorn==19.8.1
|
gunicorn==19.8.1
|
||||||
html5lib
|
html5lib
|
||||||
|
|
|
||||||
26
update.py
26
update.py
|
|
@ -261,17 +261,6 @@ if __name__ == "__main__":
|
||||||
run('./pandora/manage.py', 'createcachetable')
|
run('./pandora/manage.py', 'createcachetable')
|
||||||
if old <= 6108:
|
if old <= 6108:
|
||||||
run('./bin/pip', 'install', '-r', 'requirements.txt')
|
run('./bin/pip', 'install', '-r', 'requirements.txt')
|
||||||
if old <= 6160:
|
|
||||||
run('./bin/pip', 'install', '-r', 'requirements.txt')
|
|
||||||
with open('pandora/local_settings.py', 'r') as f:
|
|
||||||
local_settings = f.read()
|
|
||||||
if 'BROKER_URL' in local_settings and 'CELERY_BROKER_URL' not in local_settings:
|
|
||||||
local_settings = [
|
|
||||||
'CELERY_' + l if l.startswith('BROKER_URL') else l
|
|
||||||
for l in local_settings.split('\n')
|
|
||||||
]
|
|
||||||
with open('pandora/local_settings.py', 'w') as f:
|
|
||||||
f.write('\n'.join(local_settings))
|
|
||||||
else:
|
else:
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
branch = get_branch()
|
branch = get_branch()
|
||||||
|
|
@ -323,22 +312,9 @@ if __name__ == "__main__":
|
||||||
run('./manage.py', 'compile_pyc', '-p', '.')
|
run('./manage.py', 'compile_pyc', '-p', '.')
|
||||||
os.chdir(join(base, 'pandora'))
|
os.chdir(join(base, 'pandora'))
|
||||||
diff = get('./manage.py', 'sqldiff', '-a').strip()
|
diff = get('./manage.py', 'sqldiff', '-a').strip()
|
||||||
for row in [
|
|
||||||
'-- Model missing for table: djcelery_periodictasks\n',
|
|
||||||
'-- Model missing for table: celery_taskmeta\n',
|
|
||||||
'-- Model missing for table: celery_tasksetmeta\n',
|
|
||||||
'-- Model missing for table: djcelery_crontabschedule\n',
|
|
||||||
'-- Model missing for table: djcelery_periodictask\n',
|
|
||||||
'-- Model missing for table: djcelery_intervalschedule\n',
|
|
||||||
'-- Model missing for table: djcelery_workerstate\n',
|
|
||||||
'-- Model missing for table: djcelery_taskstate\n',
|
|
||||||
'-- Model missing for table: cache\n',
|
|
||||||
]:
|
|
||||||
if row in diff:
|
|
||||||
diff = diff.replace(row, '')
|
|
||||||
if diff not in [
|
if diff not in [
|
||||||
'-- No differences',
|
'-- No differences',
|
||||||
'BEGIN;\nCOMMIT;'
|
'BEGIN;\n-- Model missing for table: cache\nCOMMIT;'
|
||||||
]:
|
]:
|
||||||
print('Database has changed, please make a backup and run %s db' % sys.argv[0])
|
print('Database has changed, please make a backup and run %s db' % sys.argv[0])
|
||||||
elif branch != 'master':
|
elif branch != 'master':
|
||||||
|
|
|
||||||
|
|
@ -117,9 +117,9 @@ if [ "$RABBITMQ" == "local" ]; then
|
||||||
rabbitmqctl add_user pandora $RABBITPWD
|
rabbitmqctl add_user pandora $RABBITPWD
|
||||||
rabbitmqctl add_vhost /pandora
|
rabbitmqctl add_vhost /pandora
|
||||||
rabbitmqctl set_permissions -p /pandora pandora ".*" ".*" ".*"
|
rabbitmqctl set_permissions -p /pandora pandora ".*" ".*" ".*"
|
||||||
CELERY_BROKER_URL="amqp://pandora:$RABBITPWD@localhost:5672//pandora"
|
BROKER_URL="amqp://pandora:$RABBITPWD@localhost:5672//pandora"
|
||||||
else
|
else
|
||||||
CELERY_BROKER_URL="$RABBITMQ"
|
BROKER_URL="$RABBITMQ"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# checkout pandora from git
|
# checkout pandora from git
|
||||||
|
|
@ -145,7 +145,7 @@ DATABASES = {
|
||||||
'PASSWORD': '',
|
'PASSWORD': '',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CELERY_BROKER_URL = '$CELERY_BROKER_URL'
|
BROKER_URL = '$BROKER_URL'
|
||||||
XACCELREDIRECT = True
|
XACCELREDIRECT = True
|
||||||
|
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue