Skip to content

Commit

Permalink
update for Spark 3.3 and a few other tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelkamprath committed Jul 6, 2022
1 parent 3b891b8 commit 0df7dcb
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 9 deletions.
6 changes: 3 additions & 3 deletions spark-qfs-swarm/deploy-spark-qfs-swarm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ services:
resources:
limits:
cpus: "2.0"
memory: 9g
memory: 10g
jupyter-server:
image: master:5000/jupyter-server:latest
hostname: jupyter-server
Expand All @@ -67,7 +67,7 @@ services:
resources:
limits:
cpus: "2.0"
memory: 9g
memory: 10g
worker-node:
image: master:5000/worker-node:latest
hostname: worker
Expand All @@ -91,7 +91,7 @@ services:
mode: global
resources:
limits:
memory: 52g
memory: 50g
networks:
cluster_network:
attachable: true
Expand Down
6 changes: 3 additions & 3 deletions spark-qfs-swarm/worker-node/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ MAINTAINER Michael Kamprath "https://github.com/michaelkamprath"
#

ARG QFS_VERSION=2.2.5
ARG SPARK_VERSION=3.2.1
ARG HADOOP_MINOR_VERSION=2.7
ARG SPARK_VERSION=3.3.0
ARG HADOOP_MAJOR_VERSION=2
ARG HADOOP_VERSION=2.7.2
ARG SCALA_VERSION=2.12.15

Expand Down Expand Up @@ -77,7 +77,7 @@ RUN mkdir -p /data/qfs/ \
&& chown spark -R /data/qfs

# SPARK
ENV SPARK_PACKAGE spark-${SPARK_VERSION}-bin-hadoop${HADOOP_MINOR_VERSION}
ENV SPARK_PACKAGE spark-${SPARK_VERSION}-bin-hadoop${HADOOP_MAJOR_VERSION}
ENV SPARK_HOME /usr/spark-${SPARK_VERSION}
ENV SPARK_DIST_CLASSPATH="$QFS_HOME/lib/hadoop-$HADOOP_VERSION-qfs-$QFS_VERSION.jar:$QFS_HOME/lib/qfs-access-$QFS_VERSION.jar"
ENV HADOOP_CONF_DIR=${SPARK_HOME}/conf/
Expand Down
4 changes: 2 additions & 2 deletions spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ spark.executor.cores 12
spark.executor.extraJavaOptions -XX:+UseG1GC

# driver configurations
spark.driver.memory 6g
spark.driver.memoryOverhead 2g
spark.driver.memory 10g
spark.driver.memoryOverhead 3g
spark.driver.cores 2
spark.driver.extraJavaOptions -XX:+UseG1GC

Expand Down
2 changes: 1 addition & 1 deletion spark-qfs-swarm/worker-node/spark-conf/spark-env.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# the total amount of memory a worker (node) can use
SPARK_WORKER_MEMORY=55g
SPARK_WORKER_MEMORY=50g

# the total amount of cores a worker (node) can use
SPARK_WORKER_CORES=12
Expand Down

0 comments on commit 0df7dcb

Please sign in to comment.