From da9e8a3f0bc32bf8e06880826f051e56ff50884b Mon Sep 17 00:00:00 2001 From: Yikun Jiang Date: Wed, 26 Apr 2023 09:10:25 +0800 Subject: [PATCH] Switch 3.4.0 default Java to Java17 Signed-off-by: Yikun Jiang --- .github/workflows/build_3.4.0.yaml | 2 +- .github/workflows/publish.yml | 10 +- .../Dockerfile | 86 +++++++++++++ .../entrypoint.sh | 114 ++++++++++++++++++ .../Dockerfile | 83 +++++++++++++ .../entrypoint.sh | 114 ++++++++++++++++++ 3.4.0/scala2.12-java17-r-ubuntu/Dockerfile | 82 +++++++++++++ 3.4.0/scala2.12-java17-r-ubuntu/entrypoint.sh | 107 ++++++++++++++++ 3.4.0/scala2.12-java17-ubuntu/Dockerfile | 79 ++++++++++++ 3.4.0/scala2.12-java17-ubuntu/entrypoint.sh | 107 ++++++++++++++++ add-dockerfiles.sh | 8 ++ versions.json | 19 ++- 12 files changed, 794 insertions(+), 17 deletions(-) create mode 100644 3.4.0/scala2.12-java17-python3-r-ubuntu/Dockerfile create mode 100644 3.4.0/scala2.12-java17-python3-r-ubuntu/entrypoint.sh create mode 100644 3.4.0/scala2.12-java17-python3-ubuntu/Dockerfile create mode 100644 3.4.0/scala2.12-java17-python3-ubuntu/entrypoint.sh create mode 100644 3.4.0/scala2.12-java17-r-ubuntu/Dockerfile create mode 100644 3.4.0/scala2.12-java17-r-ubuntu/entrypoint.sh create mode 100644 3.4.0/scala2.12-java17-ubuntu/Dockerfile create mode 100644 3.4.0/scala2.12-java17-ubuntu/entrypoint.sh diff --git a/.github/workflows/build_3.4.0.yaml b/.github/workflows/build_3.4.0.yaml index 8dd4e1e..522b770 100644 --- a/.github/workflows/build_3.4.0.yaml +++ b/.github/workflows/build_3.4.0.yaml @@ -39,5 +39,5 @@ jobs: with: spark: 3.4.0 scala: 2.12 - java: 11 + java: 17 image-type: ${{ matrix.image-type }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 70b88b8..ed4b537 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -27,11 +27,9 @@ on: required: true default: '3.4.0' type: choice + # We support GA docker publish since Apache Spark 3.4 options: - 3.4.0 - - 3.3.2 - - 3.3.1 - - 3.3.0 publish: description: 'Publish the image or not.' default: false @@ -50,11 +48,11 @@ on: jobs: run-build: - # if: startsWith(inputs.spark, '3.3') + # if: startsWith(inputs.spark, '3.4') strategy: matrix: scala: [2.12] - java: [11] + java: [17] image-type: ["all", "python", "scala", "r"] permissions: packages: write @@ -67,4 +65,4 @@ jobs: java: ${{ matrix.java }} publish: ${{ inputs.publish }} repository: ${{ inputs.repository }} - image-type: ${{ matrix.image-type }} + image-type: ${{ matrix.image-type }} \ No newline at end of file diff --git a/3.4.0/scala2.12-java17-python3-r-ubuntu/Dockerfile b/3.4.0/scala2.12-java17-python3-r-ubuntu/Dockerfile new file mode 100644 index 0000000..47bbfba --- /dev/null +++ b/3.4.0/scala2.12-java17-python3-r-ubuntu/Dockerfile @@ -0,0 +1,86 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM eclipse-temurin:17-jre + +ARG spark_uid=185 + +RUN groupadd --system --gid=${spark_uid} spark && \ + useradd --system --uid=${spark_uid} --gid=spark spark + +RUN set -ex && \ + apt-get update && \ + ln -s /lib /lib64 && \ + apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \ + apt install -y python3 python3-pip && \ + apt install -y r-base r-base-dev && \ + mkdir -p /opt/spark && \ + mkdir /opt/spark/python && \ + mkdir -p /opt/spark/examples && \ + mkdir -p /opt/spark/work-dir && \ + touch /opt/spark/RELEASE && \ + chown -R spark:spark /opt/spark && \ + rm /bin/sh && \ + ln -sv /bin/bash /bin/sh && \ + echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \ + chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \ + rm -rf /var/cache/apt/* && \ + rm -rf /var/lib/apt/lists/* + +# Install Apache Spark +# https://downloads.apache.org/spark/KEYS +ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \ + SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz.asc \ + GPG_KEY=CC68B3D16FE33A766705160BA7E57908C7A4E1B1 + +RUN set -ex; \ + export SPARK_TMP="$(mktemp -d)"; \ + cd $SPARK_TMP; \ + wget -nv -O spark.tgz "$SPARK_TGZ_URL"; \ + wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL"; \ + export GNUPGHOME="$(mktemp -d)"; \ + gpg --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" || \ + gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY"; \ + gpg --batch --verify spark.tgz.asc spark.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" spark.tgz.asc; \ + \ + tar -xf spark.tgz --strip-components=1; \ + chown -R spark:spark .; \ + mv jars /opt/spark/; \ + mv bin /opt/spark/; \ + mv sbin /opt/spark/; \ + mv kubernetes/dockerfiles/spark/decom.sh /opt/; \ + mv examples /opt/spark/; \ + mv kubernetes/tests /opt/spark/; \ + mv data /opt/spark/; \ + mv python/pyspark /opt/spark/python/pyspark/; \ + mv python/lib /opt/spark/python/lib/; \ + mv R /opt/spark/; \ + cd ..; \ + rm -rf "$SPARK_TMP"; + +COPY entrypoint.sh /opt/ + +ENV SPARK_HOME /opt/spark +ENV R_HOME /usr/lib/R + +WORKDIR /opt/spark/work-dir +RUN chmod g+w /opt/spark/work-dir +RUN chmod a+x /opt/decom.sh +RUN chmod a+x /opt/entrypoint.sh + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/3.4.0/scala2.12-java17-python3-r-ubuntu/entrypoint.sh b/3.4.0/scala2.12-java17-python3-r-ubuntu/entrypoint.sh new file mode 100644 index 0000000..4bb1557 --- /dev/null +++ b/3.4.0/scala2.12-java17-python3-r-ubuntu/entrypoint.sh @@ -0,0 +1,114 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Check whether there is a passwd entry for the container UID +myuid=$(id -u) +mygid=$(id -g) +# turn off -e for getent because it will return error code in anonymous uid case +set +e +uidentry=$(getent passwd $myuid) +set -e + +# If there is no passwd entry for the container UID, attempt to create one +if [ -z "$uidentry" ] ; then + if [ -w /etc/passwd ] ; then + echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd + else + echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID" + fi +fi + +if [ -z "$JAVA_HOME" ]; then + JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}') +fi + +SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" +env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt +readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt + +if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then + SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" +fi + +if ! [ -z ${PYSPARK_PYTHON+x} ]; then + export PYSPARK_PYTHON +fi +if ! [ -z ${PYSPARK_DRIVER_PYTHON+x} ]; then + export PYSPARK_DRIVER_PYTHON +fi + +# If HADOOP_HOME is set and SPARK_DIST_CLASSPATH is not set, set it here so Hadoop jars are available to the executor. +# It does not set SPARK_DIST_CLASSPATH if already set, to avoid overriding customizations of this value from elsewhere e.g. Docker/K8s. +if [ -n "${HADOOP_HOME}" ] && [ -z "${SPARK_DIST_CLASSPATH}" ]; then + export SPARK_DIST_CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath)" +fi + +if ! [ -z ${HADOOP_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH"; +fi + +if ! [ -z ${SPARK_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$SPARK_CONF_DIR:$SPARK_CLASSPATH"; +elif ! [ -z ${SPARK_HOME+x} ]; then + SPARK_CLASSPATH="$SPARK_HOME/conf:$SPARK_CLASSPATH"; +fi + +case "$1" in + driver) + shift 1 + CMD=( + "$SPARK_HOME/bin/spark-submit" + --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS" + --deploy-mode client + "$@" + ) + ;; + executor) + shift 1 + CMD=( + ${JAVA_HOME}/bin/java + "${SPARK_EXECUTOR_JAVA_OPTS[@]}" + -Xms$SPARK_EXECUTOR_MEMORY + -Xmx$SPARK_EXECUTOR_MEMORY + -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" + org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend + --driver-url $SPARK_DRIVER_URL + --executor-id $SPARK_EXECUTOR_ID + --cores $SPARK_EXECUTOR_CORES + --app-id $SPARK_APPLICATION_ID + --hostname $SPARK_EXECUTOR_POD_IP + --resourceProfileId $SPARK_RESOURCE_PROFILE_ID + --podName $SPARK_EXECUTOR_POD_NAME + ) + ;; + + *) + # Non-spark-on-k8s command provided, proceeding in pass-through mode... + CMD=("$@") + ;; +esac + +# Switch to spark if no USER specified (root by default) otherwise use USER directly +switch_spark_if_root() { + if [ $(id -u) -eq 0 ]; then + echo gosu spark + fi +} + +# Execute the container CMD under tini for better hygiene +exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" diff --git a/3.4.0/scala2.12-java17-python3-ubuntu/Dockerfile b/3.4.0/scala2.12-java17-python3-ubuntu/Dockerfile new file mode 100644 index 0000000..da7ffa0 --- /dev/null +++ b/3.4.0/scala2.12-java17-python3-ubuntu/Dockerfile @@ -0,0 +1,83 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM eclipse-temurin:17-jre + +ARG spark_uid=185 + +RUN groupadd --system --gid=${spark_uid} spark && \ + useradd --system --uid=${spark_uid} --gid=spark spark + +RUN set -ex && \ + apt-get update && \ + ln -s /lib /lib64 && \ + apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \ + apt install -y python3 python3-pip && \ + mkdir -p /opt/spark && \ + mkdir /opt/spark/python && \ + mkdir -p /opt/spark/examples && \ + mkdir -p /opt/spark/work-dir && \ + touch /opt/spark/RELEASE && \ + chown -R spark:spark /opt/spark && \ + rm /bin/sh && \ + ln -sv /bin/bash /bin/sh && \ + echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \ + chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \ + rm -rf /var/cache/apt/* && \ + rm -rf /var/lib/apt/lists/* + +# Install Apache Spark +# https://downloads.apache.org/spark/KEYS +ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \ + SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz.asc \ + GPG_KEY=CC68B3D16FE33A766705160BA7E57908C7A4E1B1 + +RUN set -ex; \ + export SPARK_TMP="$(mktemp -d)"; \ + cd $SPARK_TMP; \ + wget -nv -O spark.tgz "$SPARK_TGZ_URL"; \ + wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL"; \ + export GNUPGHOME="$(mktemp -d)"; \ + gpg --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" || \ + gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY"; \ + gpg --batch --verify spark.tgz.asc spark.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" spark.tgz.asc; \ + \ + tar -xf spark.tgz --strip-components=1; \ + chown -R spark:spark .; \ + mv jars /opt/spark/; \ + mv bin /opt/spark/; \ + mv sbin /opt/spark/; \ + mv kubernetes/dockerfiles/spark/decom.sh /opt/; \ + mv examples /opt/spark/; \ + mv kubernetes/tests /opt/spark/; \ + mv data /opt/spark/; \ + mv python/pyspark /opt/spark/python/pyspark/; \ + mv python/lib /opt/spark/python/lib/; \ + cd ..; \ + rm -rf "$SPARK_TMP"; + +COPY entrypoint.sh /opt/ + +ENV SPARK_HOME /opt/spark + +WORKDIR /opt/spark/work-dir +RUN chmod g+w /opt/spark/work-dir +RUN chmod a+x /opt/decom.sh +RUN chmod a+x /opt/entrypoint.sh + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/3.4.0/scala2.12-java17-python3-ubuntu/entrypoint.sh b/3.4.0/scala2.12-java17-python3-ubuntu/entrypoint.sh new file mode 100644 index 0000000..4bb1557 --- /dev/null +++ b/3.4.0/scala2.12-java17-python3-ubuntu/entrypoint.sh @@ -0,0 +1,114 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Check whether there is a passwd entry for the container UID +myuid=$(id -u) +mygid=$(id -g) +# turn off -e for getent because it will return error code in anonymous uid case +set +e +uidentry=$(getent passwd $myuid) +set -e + +# If there is no passwd entry for the container UID, attempt to create one +if [ -z "$uidentry" ] ; then + if [ -w /etc/passwd ] ; then + echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd + else + echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID" + fi +fi + +if [ -z "$JAVA_HOME" ]; then + JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}') +fi + +SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" +env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt +readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt + +if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then + SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" +fi + +if ! [ -z ${PYSPARK_PYTHON+x} ]; then + export PYSPARK_PYTHON +fi +if ! [ -z ${PYSPARK_DRIVER_PYTHON+x} ]; then + export PYSPARK_DRIVER_PYTHON +fi + +# If HADOOP_HOME is set and SPARK_DIST_CLASSPATH is not set, set it here so Hadoop jars are available to the executor. +# It does not set SPARK_DIST_CLASSPATH if already set, to avoid overriding customizations of this value from elsewhere e.g. Docker/K8s. +if [ -n "${HADOOP_HOME}" ] && [ -z "${SPARK_DIST_CLASSPATH}" ]; then + export SPARK_DIST_CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath)" +fi + +if ! [ -z ${HADOOP_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH"; +fi + +if ! [ -z ${SPARK_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$SPARK_CONF_DIR:$SPARK_CLASSPATH"; +elif ! [ -z ${SPARK_HOME+x} ]; then + SPARK_CLASSPATH="$SPARK_HOME/conf:$SPARK_CLASSPATH"; +fi + +case "$1" in + driver) + shift 1 + CMD=( + "$SPARK_HOME/bin/spark-submit" + --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS" + --deploy-mode client + "$@" + ) + ;; + executor) + shift 1 + CMD=( + ${JAVA_HOME}/bin/java + "${SPARK_EXECUTOR_JAVA_OPTS[@]}" + -Xms$SPARK_EXECUTOR_MEMORY + -Xmx$SPARK_EXECUTOR_MEMORY + -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" + org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend + --driver-url $SPARK_DRIVER_URL + --executor-id $SPARK_EXECUTOR_ID + --cores $SPARK_EXECUTOR_CORES + --app-id $SPARK_APPLICATION_ID + --hostname $SPARK_EXECUTOR_POD_IP + --resourceProfileId $SPARK_RESOURCE_PROFILE_ID + --podName $SPARK_EXECUTOR_POD_NAME + ) + ;; + + *) + # Non-spark-on-k8s command provided, proceeding in pass-through mode... + CMD=("$@") + ;; +esac + +# Switch to spark if no USER specified (root by default) otherwise use USER directly +switch_spark_if_root() { + if [ $(id -u) -eq 0 ]; then + echo gosu spark + fi +} + +# Execute the container CMD under tini for better hygiene +exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" diff --git a/3.4.0/scala2.12-java17-r-ubuntu/Dockerfile b/3.4.0/scala2.12-java17-r-ubuntu/Dockerfile new file mode 100644 index 0000000..4c9b617 --- /dev/null +++ b/3.4.0/scala2.12-java17-r-ubuntu/Dockerfile @@ -0,0 +1,82 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM eclipse-temurin:17-jre + +ARG spark_uid=185 + +RUN groupadd --system --gid=${spark_uid} spark && \ + useradd --system --uid=${spark_uid} --gid=spark spark + +RUN set -ex && \ + apt-get update && \ + ln -s /lib /lib64 && \ + apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \ + apt install -y r-base r-base-dev && \ + mkdir -p /opt/spark && \ + mkdir -p /opt/spark/examples && \ + mkdir -p /opt/spark/work-dir && \ + touch /opt/spark/RELEASE && \ + chown -R spark:spark /opt/spark && \ + rm /bin/sh && \ + ln -sv /bin/bash /bin/sh && \ + echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \ + chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \ + rm -rf /var/cache/apt/* && \ + rm -rf /var/lib/apt/lists/* + +# Install Apache Spark +# https://downloads.apache.org/spark/KEYS +ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \ + SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz.asc \ + GPG_KEY=CC68B3D16FE33A766705160BA7E57908C7A4E1B1 + +RUN set -ex; \ + export SPARK_TMP="$(mktemp -d)"; \ + cd $SPARK_TMP; \ + wget -nv -O spark.tgz "$SPARK_TGZ_URL"; \ + wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL"; \ + export GNUPGHOME="$(mktemp -d)"; \ + gpg --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" || \ + gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY"; \ + gpg --batch --verify spark.tgz.asc spark.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" spark.tgz.asc; \ + \ + tar -xf spark.tgz --strip-components=1; \ + chown -R spark:spark .; \ + mv jars /opt/spark/; \ + mv bin /opt/spark/; \ + mv sbin /opt/spark/; \ + mv kubernetes/dockerfiles/spark/decom.sh /opt/; \ + mv examples /opt/spark/; \ + mv kubernetes/tests /opt/spark/; \ + mv data /opt/spark/; \ + mv R /opt/spark/; \ + cd ..; \ + rm -rf "$SPARK_TMP"; + +COPY entrypoint.sh /opt/ + +ENV SPARK_HOME /opt/spark +ENV R_HOME /usr/lib/R + +WORKDIR /opt/spark/work-dir +RUN chmod g+w /opt/spark/work-dir +RUN chmod a+x /opt/decom.sh +RUN chmod a+x /opt/entrypoint.sh + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/3.4.0/scala2.12-java17-r-ubuntu/entrypoint.sh b/3.4.0/scala2.12-java17-r-ubuntu/entrypoint.sh new file mode 100644 index 0000000..159d539 --- /dev/null +++ b/3.4.0/scala2.12-java17-r-ubuntu/entrypoint.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Check whether there is a passwd entry for the container UID +myuid=$(id -u) +mygid=$(id -g) +# turn off -e for getent because it will return error code in anonymous uid case +set +e +uidentry=$(getent passwd $myuid) +set -e + +# If there is no passwd entry for the container UID, attempt to create one +if [ -z "$uidentry" ] ; then + if [ -w /etc/passwd ] ; then + echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd + else + echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID" + fi +fi + +if [ -z "$JAVA_HOME" ]; then + JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}') +fi + +SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" +env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt +readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt + +if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then + SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" +fi + +# If HADOOP_HOME is set and SPARK_DIST_CLASSPATH is not set, set it here so Hadoop jars are available to the executor. +# It does not set SPARK_DIST_CLASSPATH if already set, to avoid overriding customizations of this value from elsewhere e.g. Docker/K8s. +if [ -n "${HADOOP_HOME}" ] && [ -z "${SPARK_DIST_CLASSPATH}" ]; then + export SPARK_DIST_CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath)" +fi + +if ! [ -z ${HADOOP_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH"; +fi + +if ! [ -z ${SPARK_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$SPARK_CONF_DIR:$SPARK_CLASSPATH"; +elif ! [ -z ${SPARK_HOME+x} ]; then + SPARK_CLASSPATH="$SPARK_HOME/conf:$SPARK_CLASSPATH"; +fi + +case "$1" in + driver) + shift 1 + CMD=( + "$SPARK_HOME/bin/spark-submit" + --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS" + --deploy-mode client + "$@" + ) + ;; + executor) + shift 1 + CMD=( + ${JAVA_HOME}/bin/java + "${SPARK_EXECUTOR_JAVA_OPTS[@]}" + -Xms$SPARK_EXECUTOR_MEMORY + -Xmx$SPARK_EXECUTOR_MEMORY + -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" + org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend + --driver-url $SPARK_DRIVER_URL + --executor-id $SPARK_EXECUTOR_ID + --cores $SPARK_EXECUTOR_CORES + --app-id $SPARK_APPLICATION_ID + --hostname $SPARK_EXECUTOR_POD_IP + --resourceProfileId $SPARK_RESOURCE_PROFILE_ID + --podName $SPARK_EXECUTOR_POD_NAME + ) + ;; + + *) + # Non-spark-on-k8s command provided, proceeding in pass-through mode... + CMD=("$@") + ;; +esac + +# Switch to spark if no USER specified (root by default) otherwise use USER directly +switch_spark_if_root() { + if [ $(id -u) -eq 0 ]; then + echo gosu spark + fi +} + +# Execute the container CMD under tini for better hygiene +exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" diff --git a/3.4.0/scala2.12-java17-ubuntu/Dockerfile b/3.4.0/scala2.12-java17-ubuntu/Dockerfile new file mode 100644 index 0000000..c8aae28 --- /dev/null +++ b/3.4.0/scala2.12-java17-ubuntu/Dockerfile @@ -0,0 +1,79 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM eclipse-temurin:17-jre + +ARG spark_uid=185 + +RUN groupadd --system --gid=${spark_uid} spark && \ + useradd --system --uid=${spark_uid} --gid=spark spark + +RUN set -ex && \ + apt-get update && \ + ln -s /lib /lib64 && \ + apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \ + mkdir -p /opt/spark && \ + mkdir -p /opt/spark/examples && \ + mkdir -p /opt/spark/work-dir && \ + touch /opt/spark/RELEASE && \ + chown -R spark:spark /opt/spark && \ + rm /bin/sh && \ + ln -sv /bin/bash /bin/sh && \ + echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \ + chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \ + rm -rf /var/cache/apt/* && \ + rm -rf /var/lib/apt/lists/* + +# Install Apache Spark +# https://downloads.apache.org/spark/KEYS +ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \ + SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz.asc \ + GPG_KEY=CC68B3D16FE33A766705160BA7E57908C7A4E1B1 + +RUN set -ex; \ + export SPARK_TMP="$(mktemp -d)"; \ + cd $SPARK_TMP; \ + wget -nv -O spark.tgz "$SPARK_TGZ_URL"; \ + wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL"; \ + export GNUPGHOME="$(mktemp -d)"; \ + gpg --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" || \ + gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY"; \ + gpg --batch --verify spark.tgz.asc spark.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" spark.tgz.asc; \ + \ + tar -xf spark.tgz --strip-components=1; \ + chown -R spark:spark .; \ + mv jars /opt/spark/; \ + mv bin /opt/spark/; \ + mv sbin /opt/spark/; \ + mv kubernetes/dockerfiles/spark/decom.sh /opt/; \ + mv examples /opt/spark/; \ + mv kubernetes/tests /opt/spark/; \ + mv data /opt/spark/; \ + cd ..; \ + rm -rf "$SPARK_TMP"; + +COPY entrypoint.sh /opt/ + +ENV SPARK_HOME /opt/spark + +WORKDIR /opt/spark/work-dir +RUN chmod g+w /opt/spark/work-dir +RUN chmod a+x /opt/decom.sh +RUN chmod a+x /opt/entrypoint.sh + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/3.4.0/scala2.12-java17-ubuntu/entrypoint.sh b/3.4.0/scala2.12-java17-ubuntu/entrypoint.sh new file mode 100644 index 0000000..159d539 --- /dev/null +++ b/3.4.0/scala2.12-java17-ubuntu/entrypoint.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Check whether there is a passwd entry for the container UID +myuid=$(id -u) +mygid=$(id -g) +# turn off -e for getent because it will return error code in anonymous uid case +set +e +uidentry=$(getent passwd $myuid) +set -e + +# If there is no passwd entry for the container UID, attempt to create one +if [ -z "$uidentry" ] ; then + if [ -w /etc/passwd ] ; then + echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd + else + echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID" + fi +fi + +if [ -z "$JAVA_HOME" ]; then + JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}') +fi + +SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" +env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt +readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt + +if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then + SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" +fi + +# If HADOOP_HOME is set and SPARK_DIST_CLASSPATH is not set, set it here so Hadoop jars are available to the executor. +# It does not set SPARK_DIST_CLASSPATH if already set, to avoid overriding customizations of this value from elsewhere e.g. Docker/K8s. +if [ -n "${HADOOP_HOME}" ] && [ -z "${SPARK_DIST_CLASSPATH}" ]; then + export SPARK_DIST_CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath)" +fi + +if ! [ -z ${HADOOP_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH"; +fi + +if ! [ -z ${SPARK_CONF_DIR+x} ]; then + SPARK_CLASSPATH="$SPARK_CONF_DIR:$SPARK_CLASSPATH"; +elif ! [ -z ${SPARK_HOME+x} ]; then + SPARK_CLASSPATH="$SPARK_HOME/conf:$SPARK_CLASSPATH"; +fi + +case "$1" in + driver) + shift 1 + CMD=( + "$SPARK_HOME/bin/spark-submit" + --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS" + --deploy-mode client + "$@" + ) + ;; + executor) + shift 1 + CMD=( + ${JAVA_HOME}/bin/java + "${SPARK_EXECUTOR_JAVA_OPTS[@]}" + -Xms$SPARK_EXECUTOR_MEMORY + -Xmx$SPARK_EXECUTOR_MEMORY + -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" + org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend + --driver-url $SPARK_DRIVER_URL + --executor-id $SPARK_EXECUTOR_ID + --cores $SPARK_EXECUTOR_CORES + --app-id $SPARK_APPLICATION_ID + --hostname $SPARK_EXECUTOR_POD_IP + --resourceProfileId $SPARK_RESOURCE_PROFILE_ID + --podName $SPARK_EXECUTOR_POD_NAME + ) + ;; + + *) + # Non-spark-on-k8s command provided, proceeding in pass-through mode... + CMD=("$@") + ;; +esac + +# Switch to spark if no USER specified (root by default) otherwise use USER directly +switch_spark_if_root() { + if [ $(id -u) -eq 0 ]; then + echo gosu spark + fi +} + +# Execute the container CMD under tini for better hygiene +exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" diff --git a/add-dockerfiles.sh b/add-dockerfiles.sh index 1683f33..a82f2ba 100755 --- a/add-dockerfiles.sh +++ b/add-dockerfiles.sh @@ -33,6 +33,10 @@ scala2.12-java11-python3-r-ubuntu scala2.12-java11-python3-ubuntu scala2.12-java11-r-ubuntu scala2.12-java11-ubuntu +scala2.12-java17-python3-r-ubuntu +scala2.12-java17-python3-ubuntu +scala2.12-java17-r-ubuntu +scala2.12-java17-ubuntu " for TAG in $TAGS; do @@ -45,6 +49,10 @@ for TAG in $TAGS; do OPTS+=" --sparkr" fi + if echo $TAG | grep -q "java17"; then + OPTS+=" --image eclipse-temurin:17-jre" + fi + OPTS+=" --spark-version $VERSION" mkdir -p $VERSION/$TAG diff --git a/versions.json b/versions.json index edc20b6..30a4bf3 100644 --- a/versions.json +++ b/versions.json @@ -1,35 +1,34 @@ { "versions": [ { - "path": "3.4.0/scala2.12-java11-python3-ubuntu", + "path": "3.4.0/scala2.12-java17-python3-ubuntu", "tags": [ - "3.4.0-scala2.12-java11-python3-ubuntu", + "3.4.0-scala2.12-java17-python3-ubuntu", "3.4.0-python3", "3.4.0", - "python3", - "latest" + "python3" ] }, { - "path": "3.4.0/scala2.12-java11-r-ubuntu", + "path": "3.4.0/scala2.12-java17-r-ubuntu", "tags": [ - "3.4.0-scala2.12-java11-r-ubuntu", + "3.4.0-scala2.12-java17-r-ubuntu", "3.4.0-r", "r" ] }, { - "path": "3.4.0/scala2.12-java11-ubuntu", + "path": "3.4.0/scala2.12-java17-ubuntu", "tags": [ - "3.4.0-scala2.12-java11-ubuntu", + "3.4.0-scala2.12-java17-ubuntu", "3.4.0-scala", "scala" ] }, { - "path": "3.4.0/scala2.12-java11-python3-r-ubuntu", + "path": "3.4.0/scala2.12-java17-python3-r-ubuntu", "tags": [ - "3.4.0-scala2.12-java11-python3-r-ubuntu" + "3.4.0-scala2.12-java17-python3-r-ubuntu" ] }, {