Spark: Don't daemonize, don't pretend to tail logs we don't use

Along the way: Fixup port mismatch in worker container
This commit is contained in:
Zach Loafman 2015-10-30 11:37:29 -07:00
parent 887b9dd78a
commit 436839f196
9 changed files with 12 additions and 33 deletions

View File

@ -4,7 +4,7 @@ all: push
# this tag and reset to v1. You should also double check the native
# Hadoop libs at that point (we grab the 2.6.1 libs, which are
# appropriate for 1.5.1-with-2.6).
TAG = 1.5.1_v1
TAG = 1.5.1_v2
containers:
docker build -t gcr.io/google_containers/spark-base base

View File

@ -17,8 +17,6 @@
. /start-common.sh
echo "$(hostname -i) spark-master" >> /etc/hosts
export SPARK_LOCAL_HOSTNAME=spark-master
export SPARK_MASTER_IP=spark-master
/opt/spark/sbin/start-master.sh
tail -F /opt/spark/logs/*
# Run spark-class directly so that when it exits (or crashes), the pod restarts.
/opt/spark/bin/spark-class org.apache.spark.deploy.master.Master --ip spark-master --port 7077 --webui-port 8080

View File

@ -2,6 +2,6 @@ FROM gcr.io/google_containers/spark-base:latest
ADD start.sh /
ADD log4j.properties /opt/spark/conf/log4j.properties
EXPOSE 8080
EXPOSE 8081
ENTRYPOINT ["/start.sh"]

View File

@ -16,6 +16,5 @@
. /start-common.sh
/opt/spark/sbin/start-slave.sh spark://spark-master:7077
tail -F /opt/spark/logs/*
# Run spark-class directly so that when it exits (or crashes), the pod restarts.
/opt/spark/bin/spark-class org.apache.spark.deploy.worker.Worker spark://spark-master:7077 --webui-port 8081

View File

@ -13,7 +13,7 @@ spec:
spec:
containers:
- name: spark-driver
image: gcr.io/google_containers/spark-driver:1.5.1_v1
image: gcr.io/google_containers/spark-driver:1.5.1_v2
resources:
requests:
cpu: 100m

View File

@ -15,7 +15,7 @@ spec:
spec:
containers:
- name: spark-master
image: gcr.io/google_containers/spark-master:1.5.1_v1
image: gcr.io/google_containers/spark-master:1.5.1_v2
ports:
- containerPort: 7077
volumeMounts:

View File

@ -16,7 +16,7 @@ spec:
spec:
containers:
- name: spark-worker
image: gcr.io/google_containers/spark-worker:1.5.1_v1
image: gcr.io/google_containers/spark-worker:1.5.1_v2
ports:
- containerPort: 8888
volumeMounts:

View File

@ -13,19 +13,10 @@ spec:
spec:
containers:
- name: spark-master
image: gcr.io/google_containers/spark-master:1.5.1_v1
image: gcr.io/google_containers/spark-master:1.5.1_v2
ports:
- containerPort: 7077
- containerPort: 8080
livenessProbe:
exec:
command:
- /opt/spark/sbin/spark-daemon.sh
- status
- org.apache.spark.deploy.master.Master
- '1'
initialDelaySeconds: 30
timeoutSeconds: 1
resources:
requests:
cpu: 100m

View File

@ -13,18 +13,9 @@ spec:
spec:
containers:
- name: spark-worker
image: gcr.io/google_containers/spark-worker:1.5.1_v1
image: gcr.io/google_containers/spark-worker:1.5.1_v2
ports:
- containerPort: 8888
livenessProbe:
exec:
command:
- /opt/spark/sbin/spark-daemon.sh
- status
- org.apache.spark.deploy.worker.Worker
- '1'
initialDelaySeconds: 30
timeoutSeconds: 1
- containerPort: 8081
resources:
requests:
cpu: 100m