From 87337a3face556af26b7e6ec1a72fdb4245700b6 Mon Sep 17 00:00:00 2001 From: Michael Kamprath Date: Sun, 5 Sep 2021 23:35:51 +0000 Subject: [PATCH] bumpped version and tuned memory usage --- spark-qfs-swarm/worker-node/Dockerfile | 4 ++-- spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/spark-qfs-swarm/worker-node/Dockerfile b/spark-qfs-swarm/worker-node/Dockerfile index 04dfbcd..2ba9a17 100644 --- a/spark-qfs-swarm/worker-node/Dockerfile +++ b/spark-qfs-swarm/worker-node/Dockerfile @@ -15,8 +15,8 @@ MAINTAINER Michael Kamprath "https://github.com/michaelkamprath" # spark-master - the service where the spark master runs # -ARG QFS_VERSION=2.2.3 -ARG SPARK_VERSION=3.1.1 +ARG QFS_VERSION=2.2.4 +ARG SPARK_VERSION=3.1.2 ARG HADOOP_MINOR_VERSION=2.7 ARG HADOOP_VERSION=2.7.2 ARG SCALA_VERSION=2.12.13 diff --git a/spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf b/spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf index 141926a..70b65d1 100644 --- a/spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf +++ b/spark-qfs-swarm/worker-node/spark-conf/spark-defaults.conf @@ -5,13 +5,14 @@ spark.sql.shuffle.partitions 200 # worker node / executor set up # expecting a worker with 12 cores and 56g of memory -spark.executor.memory 51g +spark.executor.memory 50g spark.executor.cores 12 spark.executor.extraJavaOptions -XX:+UseG1GC # driver configurations -spark.driver.memory 5g +spark.driver.memory 8g spark.driver.cores 2 +spark.driver.extraJavaOptions -XX:+UseG1GC # operational configurations spark.logConf true