From b088f42df0d1932a75bedf40eabc83c9a9081c52 Mon Sep 17 00:00:00 2001 From: Michael Kamprath Date: Thu, 2 Jan 2020 01:08:32 -0800 Subject: [PATCH] fixed qfs webui usage --- spark-qfs-swarm/qfs-master/qfs-conf/webUI.cfg | 15 ++++++++++++++- spark-qfs-swarm/worker-node/Dockerfile | 7 +++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/spark-qfs-swarm/qfs-master/qfs-conf/webUI.cfg b/spark-qfs-swarm/qfs-master/qfs-conf/webUI.cfg index 6538585..9ef4ba8 100644 --- a/spark-qfs-swarm/qfs-master/qfs-conf/webUI.cfg +++ b/spark-qfs-swarm/qfs-master/qfs-conf/webUI.cfg @@ -2,6 +2,19 @@ webServer.metaserverHost = qfs-master webServer.metaserverPort = 20000 webServer.port = 20050 -webServer.docRoot = $QFS_HOME/webui/files/ +webServer.docRoot = /usr/local/qfs/webui/files/ webServer.host = 0.0.0.0 webserver.allmachinesfn = /dev/null + +[chunk] +refreshInterval = 5 +currentSize = 30 +currentSpan =10 +hourlySize = 30 +hourlySpan =120 +daylySize = 24 +daylySpan = 3600 +monthlySize = 30 +monthlySpan = 86400 +predefinedHeaders = D-Timer-overrun-count&D-Timer-overrun-sec&XMeta-location&Client-active&Buffer-usec-wait-avg&D-CPU-sys&D-CPU-user&D-Disk-read-bytes&D-Disk-read-count&D-Disk-write-bytes&D-Disk-write-count&Write-appenders&D-Disk-read-errors&D-Disk-write-errors&Num-wr-drives&Num-writable-chunks +predefinedChunkDirHeaders = Chunks&Dev-id&Read-bytes&D-Read-bytes&Read-err&D-Read-err&Read-io&D-Read-io&D-Read-time-microsec&Read-timeout&Space-avail&Space-util-pct&Started-ago&Stopped-ago&Write-bytes&D-Write-bytes&Write-err&D-Write-err&Write-io&D-Write-io&D-Write-time-microsec&Write-timeout&Chunk-server&Chunk-dir diff --git a/spark-qfs-swarm/worker-node/Dockerfile b/spark-qfs-swarm/worker-node/Dockerfile index c97114b..d23d7bb 100644 --- a/spark-qfs-swarm/worker-node/Dockerfile +++ b/spark-qfs-swarm/worker-node/Dockerfile @@ -62,7 +62,8 @@ RUN curl -sL --retry 3 \ | gunzip \ | tar x -C /usr/ \ && mv /usr/$QFS_PACKAGE $QFS_HOME \ - && chown -R root:root $QFS_HOME + && chown -R root:root $QFS_HOME \ + && ln -s $QFS_HOME /usr/local/qfs ENV PATH $PATH:${QFS_HOME}/bin:${QFS_HOME}/bin/tools RUN mkdir -p /data/qfs/ \ && chown spark -R /data/qfs @@ -79,9 +80,11 @@ RUN curl -sL --retry 3 \ | gunzip \ | tar x -C /usr/ \ && mv /usr/$SPARK_PACKAGE $SPARK_HOME \ - && chown -R root:root $SPARK_HOME + && chown -R root:root $SPARK_HOME \ + && ln -s $SPARK_HOME /usr/local/spark RUN mkdir -p /data/spark \ && chown spark -R /data/spark + # add python libraries useful in PySpark RUN python3 -mpip install matplotlib \