From 86ee08b112ce837ae656cb1953259086ed1113f2 Mon Sep 17 00:00:00 2001
From: Michael Kamprath <michael@kamprath.net>
Date: Sun, 9 Feb 2020 11:01:33 -0800
Subject: [PATCH] fixed some minor issues

---
 spark-qfs-swarm/README.md              | 9 ++++++---
 spark-qfs-swarm/worker-node/Dockerfile | 2 +-
 2 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/spark-qfs-swarm/README.md b/spark-qfs-swarm/README.md
index de0d491..a3540ff 100644
--- a/spark-qfs-swarm/README.md
+++ b/spark-qfs-swarm/README.md
@@ -18,7 +18,11 @@ This set up depends on have a GlusterFS volume mounted at `/mnt/gfs` and a norma
 * `/mnt/data/qfs/checkpoint` - Where the QFS metaserver will store the fulesystem check points. This actually only needs to exist on the master node.
 * `/mnt/data/spark` - The local working directory for spark
 
-You can adjust these as you see fit, but be sure to update the mounts specified in `deploy-spark-qfs-swarm.yml`. 
+You can adjust these as you see fit, but be sure to update the mounts specified in `deploy-spark-qfs-swarm.yml`. Then build the docker images from in this project's directory:
+
+```
+./build-images.sh
+```
 
 Before the first time you run this cluster, you will need to initialize the QFS file system. Do so by launching a qfs-master container on the master node:
 ```
@@ -31,9 +35,8 @@ qfs -mkdir /history/spark-event
 exit
 ```
 
-Finally, to start up the Spark cluster in your Docker swarm, `cd` into this project's directory and:
+Finally, to start up the Spark cluster in your Docker swarm:
 ```
-./build-images.sh
 docker stack deploy -c deploy-spark-qfs-swarm.yml spark
 ```
 
diff --git a/spark-qfs-swarm/worker-node/Dockerfile b/spark-qfs-swarm/worker-node/Dockerfile
index fe31275..4721c66 100644
--- a/spark-qfs-swarm/worker-node/Dockerfile
+++ b/spark-qfs-swarm/worker-node/Dockerfile
@@ -82,7 +82,7 @@ RUN mkdir -p /data/qfs/ \
 # SPARK
 ENV SPARK_PACKAGE spark-${SPARK_VERSION}-bin-hadoop${HADOOP_MINOR_VERSION}
 ENV SPARK_HOME /usr/spark-${SPARK_VERSION}
-ENV SPARK_DIST_CLASSPATH="$QFS_HOME/lib/hadoop-$HADOOP_VERSION-qfs-$QFS_VERSION.jar:$QFS_HOME/lib/qfs-access-$QFS_VERSION"
+ENV SPARK_DIST_CLASSPATH="$QFS_HOME/lib/hadoop-$HADOOP_VERSION-qfs-$QFS_VERSION.jar:$QFS_HOME/lib/qfs-access-$QFS_VERSION.jar"
 ENV HADOOP_CONF_DIR=${SPARK_HOME}/conf/
 ENV PATH $PATH:${SPARK_HOME}/bin
 ARG SPARK_DOWNLOAD_URL="https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/${SPARK_PACKAGE}.tgz"