New example: Distributed task queue

Adds an example of using Kubernetes to build a distributed task queue
using Celery along with a RabbitMQ broker and Flower frontend.

Resolves: #1788
This commit is contained in:
Karl Beecher
2015-03-06 09:05:26 +01:00
parent 8b627f516f
commit 43ab8188c8
11 changed files with 441 additions and 0 deletions

View File

@@ -0,0 +1,9 @@
FROM dockerfile/celery
ADD celery_conf.py /data/celery_conf.py
ADD run_tasks.py /data/run_tasks.py
ADD run.sh /usr/local/bin/run.sh
ENV C_FORCE_ROOT 1
CMD ["/bin/bash", "/usr/local/bin/run.sh"]

View File

@@ -0,0 +1,13 @@
import os
from celery import Celery
# Get Kubernetes-provided address of the broker service
broker_service_host = os.environ.get('RABBITMQ_SERVICE_SERVICE_HOST')
app = Celery('tasks', broker='amqp://guest@%s//' % broker_service_host, backend='amqp')
@app.task
def add(x, y):
return x + y

View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Run the celery worker
/usr/local/bin/celery -A celery_conf worker -f /data/celery.log &
# Start firing periodic tasks automatically
python /data/run_tasks.py

View File

@@ -0,0 +1,13 @@
import random
import syslog
import time
from celery_conf import add
while True:
x = random.randint(1, 10)
y = random.randint(1, 10)
res = add.delay(x, y)
time.sleep(5)
if res.ready():
res.get()