Spark: Bump images to 1.5.1, add Makefile

This commit is contained in:
Zach Loafman 2015-10-24 06:48:41 -07:00
parent e05819f36a
commit c0fba82da4
10 changed files with 44 additions and 14 deletions

View File

@ -0,0 +1,28 @@
all: push
# To bump the Spark version, bump the version in base/Dockerfile, bump
# this tag, and bump the uses of this tag in
# worker/master/driver.
TAG = 1.5.1_v1
containers:
docker build -t gcr.io/google_containers/spark-base base
docker tag gcr.io/google_containers/spark-base gcr.io/google_containers/spark-base:$(TAG)
docker build -t gcr.io/google_containers/spark-worker worker
docker tag gcr.io/google_containers/spark-worker gcr.io/google_containers/spark-worker:$(TAG)
docker build -t gcr.io/google_containers/spark-master master
docker tag gcr.io/google_containers/spark-master gcr.io/google_containers/spark-master:$(TAG)
docker build -t gcr.io/google_containers/spark-driver driver
docker tag gcr.io/google_containers/spark-driver gcr.io/google_containers/spark-driver:$(TAG)
push: containers
gcloud docker push gcr.io/google_containers/spark-base
gcloud docker push gcr.io/google_containers/spark-base:$(TAG)
gcloud docker push gcr.io/google_containers/spark-worker
gcloud docker push gcr.io/google_containers/spark-worker:$(TAG)
gcloud docker push gcr.io/google_containers/spark-master
gcloud docker push gcr.io/google_containers/spark-master:$(TAG)
gcloud docker push gcr.io/google_containers/spark-driver
gcloud docker push gcr.io/google_containers/spark-driver:$(TAG)
clean:

View File

@ -3,14 +3,16 @@ FROM java:latest
RUN apt-get update -y
RUN apt-get install -y scala
ENV version 1.5.1
# Get Spark from some apache mirror.
RUN mkdir -p /opt && \
cd /opt && \
wget http://apache.mirrors.pair.com/spark/spark-1.4.0/spark-1.4.0-bin-hadoop2.6.tgz && \
tar -zvxf spark-1.4.0-bin-hadoop2.6.tgz && \
rm spark-1.4.0-bin-hadoop2.6.tgz && \
ln -s spark-1.4.0-bin-hadoop2.6 spark && \
echo Spark installed in /opt
wget http://apache.mirrors.pair.com/spark/spark-${version}/spark-${version}-bin-hadoop2.6.tgz && \
tar -zvxf spark-${version}-bin-hadoop2.6.tgz && \
rm spark-${version}-bin-hadoop2.6.tgz && \
ln -s spark-${version}-bin-hadoop2.6 spark && \
echo Spark ${version} installed in /opt
ADD log4j.properties /opt/spark/conf/log4j.properties
ADD setup_client.sh /

View File

@ -1,3 +1,3 @@
FROM gcr.io/google_containers/spark-base:1.4.0_v1
FROM gcr.io/google_containers/spark-base:1.5.1_v1
ADD start.sh /start.sh
CMD ["/start.sh"]

View File

@ -1,4 +1,4 @@
FROM gcr.io/google_containers/spark-base:1.4.0_v1
FROM gcr.io/google_containers/spark-base:1.5.1_v1
ADD start.sh /
ADD log4j.properties /opt/spark/conf/log4j.properties

View File

@ -1,7 +1,7 @@
FROM gcr.io/google_containers/spark-base:1.4.0_v1
FROM gcr.io/google_containers/spark-base:1.5.1_v1
ADD start.sh /
ADD log4j.properties /opt/spark/conf/log4j.properties
EXPOSE 8080
ENTRYPOINT ["/start.sh"]

View File

@ -11,7 +11,7 @@
"containers": [
{
"name": "spark-driver",
"image": "gcr.io/google_containers/spark-driver:1.4.0_v1",
"image": "gcr.io/google_containers/spark-driver:1.5.1_v1",
"resources": {
"limits": {
"cpu": "100m"

View File

@ -15,7 +15,7 @@ spec:
spec:
containers:
- name: spark-master
image: gcr.io/google_containers/spark-master
image: gcr.io/google_containers/spark-master:1.5.1_v1
ports:
- containerPort: 7077
volumeMounts:

View File

@ -16,7 +16,7 @@ spec:
spec:
containers:
- name: spark-worker
image: gcr.io/google_containers/spark-worker
image: gcr.io/google_containers/spark-worker:1.5.1_v1
ports:
- containerPort: 8888
volumeMounts:

View File

@ -11,7 +11,7 @@
"containers": [
{
"name": "spark-master",
"image": "gcr.io/google_containers/spark-master:1.4.0_v1",
"image": "gcr.io/google_containers/spark-master:1.5.1_v1",
"ports": [
{
"containerPort": 7077

View File

@ -23,7 +23,7 @@
"containers": [
{
"name": "spark-worker",
"image": "gcr.io/google_containers/spark-worker:1.4.0_v1",
"image": "gcr.io/google_containers/spark-worker:1.5.1_v1",
"ports": [
{
"hostPort": 8888,