@@ -9,23 +9,23 @@ ARG NB_GID="100"
99
1010USER root
1111
12- ENV HADOOP_HOME= /usr/hdp/current/hadoop \
13- ANACONDA_HOME= /opt/conda
14-
15- ENV SHELL= /bin/bash \
16- NB_USER= $NB_USER \
17- NB_UID= $NB_UID \
18- NB_GID= $NB_GID \
19- LC_ALL= en_US.UTF-8 \
20- LANG= en_US.UTF-8 \
21- LANGUAGE= en_US.UTF-8 \
22- JAVA_HOME= /usr/lib/jvm/java \
23- SPARK_HOME= /usr/hdp/current/spark2-client \
24- PYSPARK_PYTHON= $ANACONDA_HOME/bin/python \
25- HADOOP_CONF_DIR= $HADOOP_HOME/etc/hadoop
26-
27- ENV HOME= /home/$NB_USER \
28- PATH= $JAVA_HOME/bin:$ANACONDA_HOME/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PATH
12+ ENV HADOOP_HOME /usr/hdp/current/hadoop
13+ ENV ANACONDA_HOME /opt/conda
14+
15+ ENV SHELL /bin/bash \
16+ NB_USER $NB_USER \
17+ NB_UID $NB_UID \
18+ NB_GID $NB_GID \
19+ LC_ALL en_US.UTF-8 \
20+ LANG en_US.UTF-8 \
21+ LANGUAGE en_US.UTF-8 \
22+ JAVA_HOME /usr/lib/jvm/java \
23+ SPARK_HOME /usr/hdp/current/spark2-client \
24+ PYSPARK_PYTHON $ANACONDA_HOME/bin/python \
25+ HADOOP_CONF_DIR $HADOOP_HOME/etc/hadoop
26+
27+ ENV HOME /home/$NB_USER
28+ ENV PATH $JAVA_HOME/bin:$ANACONDA_HOME/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PATH
2929
3030ENV SPARK_VER $SPARK_VERSION
3131ENV HADOOP_VER 3.3.1
@@ -56,7 +56,7 @@ RUN dpkg --purge --force-depends ca-certificates-java \
5656 && apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security main' \
5757 && apt-add-repository 'deb http://deb.debian.org/debian/ sid main' \
5858 && apt-get update && apt-get install -yq --no-install-recommends \
59- openjdk-8-jre -headless \
59+ openjdk-8-jdk -headless \
6060 ca-certificates-java \
6161 && apt-get clean \
6262 && rm -rf /var/lib/apt/lists/*
@@ -71,14 +71,14 @@ ADD fix-permissions /usr/local/bin/fix-permissions
7171# and make sure these dirs are writable by the `users` group.
7272RUN groupadd wheel -g 11 && \
7373 echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
74- useradd -m -s /bin/bash -N -u $NB_UID $NB_USER && \
74+ useradd -m -s /bin/bash -N -u " $NB_UID" " $NB_USER" && \
7575 mkdir -p /usr/hdp/current && \
7676 mkdir -p /usr/local/share/jupyter && \
77- chown $NB_USER: $NB_GID $ANACONDA_HOME && \
77+ chown " $NB_USER" : " $NB_GID" " $ANACONDA_HOME" && \
7878 chmod g+w /etc/passwd && \
7979 chmod +x /usr/local/bin/fix-permissions && \
80- fix-permissions $HOME && \
81- fix-permissions $ANACONDA_HOME && \
80+ fix-permissions " $HOME" && \
81+ fix-permissions " $ANACONDA_HOME" && \
8282 fix-permissions /usr/hdp/current && \
8383 fix-permissions /usr/local/share/jupyter
8484
@@ -90,8 +90,8 @@ RUN useradd -m -s /bin/bash -N -u 1111 elyra && \
9090USER $NB_UID
9191
9292# Setup work directory for backward-compatibility
93- RUN mkdir /home/$NB_USER/work && \
94- fix-permissions /home/$NB_USER
93+ RUN mkdir " /home/$NB_USER/work" && \
94+ fix-permissions " /home/$NB_USER"
9595
9696# DOWNLOAD HADOOP AND SPARK
9797RUN curl -sL https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VER/hadoop-$HADOOP_VER.tar.gz | tar -xz -C /usr/hdp/current
@@ -109,8 +109,8 @@ RUN conda install mamba -n base -c conda-forge && \
109109 'r-argparse' \
110110 'python=3.10' && \
111111 mamba clean -y --all &&\
112- fix-permissions $ANACONDA_HOME && \
113- fix-permissions /home/$NB_USER
112+ fix-permissions " $ANACONDA_HOME" && \
113+ fix-permissions " /home/$NB_USER"
114114
115115USER $NB_UID
116116
@@ -142,8 +142,8 @@ RUN cd /tmp && \
142142 pip install /tmp/toree-0.5.0.tar.gz && \
143143 jupyter toree install --spark_home=$SPARK_HOME --kernel_name="Spark $SPARK_VER" --interpreters=Scala && \
144144 rm -f /tmp/toree-0.5.0.tar.gz && \
145- fix-permissions $ANACONDA_HOME && \
146- fix-permissions /home/$NB_USER
145+ fix-permissions " $ANACONDA_HOME" && \
146+ fix-permissions " /home/$NB_USER"
147147
148148# SETUP PASSWORDLESS SSH FOR $NB_USER
149149RUN ssh-keygen -q -N "" -t rsa -f /home/$NB_USER/.ssh/id_rsa && \
0 commit comments