@@ -9,23 +9,23 @@ ARG NB_GID="100"
9
9
10
10
USER root
11
11
12
- ENV HADOOP_HOME= /usr/hdp/current/hadoop \
13
- ANACONDA_HOME= /opt/conda
14
-
15
- ENV SHELL= /bin/bash \
16
- NB_USER= $NB_USER \
17
- NB_UID= $NB_UID \
18
- NB_GID= $NB_GID \
19
- LC_ALL= en_US.UTF-8 \
20
- LANG= en_US.UTF-8 \
21
- LANGUAGE= en_US.UTF-8 \
22
- JAVA_HOME= /usr/lib/jvm/java \
23
- SPARK_HOME= /usr/hdp/current/spark2-client \
24
- PYSPARK_PYTHON= $ANACONDA_HOME/bin/python \
25
- HADOOP_CONF_DIR= $HADOOP_HOME/etc/hadoop
26
-
27
- ENV HOME= /home/$NB_USER \
28
- PATH= $JAVA_HOME/bin:$ANACONDA_HOME/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PATH
12
+ ENV HADOOP_HOME /usr/hdp/current/hadoop
13
+ ENV ANACONDA_HOME /opt/conda
14
+
15
+ ENV SHELL /bin/bash \
16
+ NB_USER $NB_USER \
17
+ NB_UID $NB_UID \
18
+ NB_GID $NB_GID \
19
+ LC_ALL en_US.UTF-8 \
20
+ LANG en_US.UTF-8 \
21
+ LANGUAGE en_US.UTF-8 \
22
+ JAVA_HOME /usr/lib/jvm/java \
23
+ SPARK_HOME /usr/hdp/current/spark2-client \
24
+ PYSPARK_PYTHON $ANACONDA_HOME/bin/python \
25
+ HADOOP_CONF_DIR $HADOOP_HOME/etc/hadoop
26
+
27
+ ENV HOME /home/$NB_USER
28
+ ENV PATH $JAVA_HOME/bin:$ANACONDA_HOME/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PATH
29
29
30
30
ENV SPARK_VER $SPARK_VERSION
31
31
ENV HADOOP_VER 3.3.1
@@ -56,7 +56,7 @@ RUN dpkg --purge --force-depends ca-certificates-java \
56
56
&& apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security main' \
57
57
&& apt-add-repository 'deb http://deb.debian.org/debian/ sid main' \
58
58
&& apt-get update && apt-get install -yq --no-install-recommends \
59
- openjdk-8-jre -headless \
59
+ openjdk-8-jdk -headless \
60
60
ca-certificates-java \
61
61
&& apt-get clean \
62
62
&& rm -rf /var/lib/apt/lists/*
@@ -71,14 +71,14 @@ ADD fix-permissions /usr/local/bin/fix-permissions
71
71
# and make sure these dirs are writable by the `users` group.
72
72
RUN groupadd wheel -g 11 && \
73
73
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
74
- useradd -m -s /bin/bash -N -u $NB_UID $NB_USER && \
74
+ useradd -m -s /bin/bash -N -u " $NB_UID" " $NB_USER" && \
75
75
mkdir -p /usr/hdp/current && \
76
76
mkdir -p /usr/local/share/jupyter && \
77
- chown $NB_USER: $NB_GID $ANACONDA_HOME && \
77
+ chown " $NB_USER" : " $NB_GID" " $ANACONDA_HOME" && \
78
78
chmod g+w /etc/passwd && \
79
79
chmod +x /usr/local/bin/fix-permissions && \
80
- fix-permissions $HOME && \
81
- fix-permissions $ANACONDA_HOME && \
80
+ fix-permissions " $HOME" && \
81
+ fix-permissions " $ANACONDA_HOME" && \
82
82
fix-permissions /usr/hdp/current && \
83
83
fix-permissions /usr/local/share/jupyter
84
84
@@ -90,8 +90,8 @@ RUN useradd -m -s /bin/bash -N -u 1111 elyra && \
90
90
USER $NB_UID
91
91
92
92
# Setup work directory for backward-compatibility
93
- RUN mkdir /home/$NB_USER/work && \
94
- fix-permissions /home/$NB_USER
93
+ RUN mkdir " /home/$NB_USER/work" && \
94
+ fix-permissions " /home/$NB_USER"
95
95
96
96
# DOWNLOAD HADOOP AND SPARK
97
97
RUN curl -sL https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VER/hadoop-$HADOOP_VER.tar.gz | tar -xz -C /usr/hdp/current
@@ -109,8 +109,8 @@ RUN conda install mamba -n base -c conda-forge && \
109
109
'r-argparse' \
110
110
'python=3.10' && \
111
111
mamba clean -y --all &&\
112
- fix-permissions $ANACONDA_HOME && \
113
- fix-permissions /home/$NB_USER
112
+ fix-permissions " $ANACONDA_HOME" && \
113
+ fix-permissions " /home/$NB_USER"
114
114
115
115
USER $NB_UID
116
116
@@ -142,8 +142,8 @@ RUN cd /tmp && \
142
142
pip install /tmp/toree-0.5.0.tar.gz && \
143
143
jupyter toree install --spark_home=$SPARK_HOME --kernel_name="Spark $SPARK_VER" --interpreters=Scala && \
144
144
rm -f /tmp/toree-0.5.0.tar.gz && \
145
- fix-permissions $ANACONDA_HOME && \
146
- fix-permissions /home/$NB_USER
145
+ fix-permissions " $ANACONDA_HOME" && \
146
+ fix-permissions " /home/$NB_USER"
147
147
148
148
# SETUP PASSWORDLESS SSH FOR $NB_USER
149
149
RUN ssh-keygen -q -N "" -t rsa -f /home/$NB_USER/.ssh/id_rsa && \
0 commit comments