From 3785f4a883749372670b40008f7a099411912fc1 Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Tue, 22 Apr 2025 19:42:43 +0800 Subject: [PATCH 1/7] adding spark 3.5.4 --- APPLICATION/spark/3.5.4/Dockerfile | 80 ++++++++++++++++ APPLICATION/spark/3.5.4/entrypoint.sh | 130 ++++++++++++++++++++++++++ APPLICATION/spark/buildspec.yml | 12 +-- 3 files changed, 216 insertions(+), 6 deletions(-) create mode 100644 APPLICATION/spark/3.5.4/Dockerfile create mode 100644 APPLICATION/spark/3.5.4/entrypoint.sh diff --git a/APPLICATION/spark/3.5.4/Dockerfile b/APPLICATION/spark/3.5.4/Dockerfile new file mode 100644 index 0000000..70cae70 --- /dev/null +++ b/APPLICATION/spark/3.5.4/Dockerfile @@ -0,0 +1,80 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM dragonwell-registry.cn-hangzhou.cr.aliyuncs.com/dragonwell/dragonwell:8-ubuntu +ARG spark_uid=185 + +RUN groupadd --system --gid=${spark_uid} spark && \ + useradd --system --uid=${spark_uid} --gid=spark spark + +RUN set -ex; \ + apt-get update; \ + apt-get install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu libnss-wrapper; \ + mkdir -p /opt/spark; \ + mkdir /opt/spark/python; \ + mkdir -p /opt/spark/examples; \ + mkdir -p /opt/spark/work-dir; \ + chmod g+w /opt/spark/work-dir; \ + touch /opt/spark/RELEASE; \ + chown -R spark:spark /opt/spark; \ + echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \ + rm -rf /var/lib/apt/lists/* + +# Install Apache Spark +# https://downloads.apache.org/spark/KEYS +ENV SPARK_TGZ_URL=https://archive.apache.org/dist/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz \ + SPARK_TGZ_ASC_URL=https://archive.apache.org/dist/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz.asc \ + GPG_KEY=19F745C40A0E550420BB2C522541488DA93FE4B4 + +RUN set -ex; \ + export SPARK_TMP="$(mktemp -d)"; \ + cd $SPARK_TMP; \ + wget -nv -O spark.tgz "$SPARK_TGZ_URL"; \ + wget -nv -O spark.tgz.asc "$SPARK_TGZ_ASC_URL"; \ + export GNUPGHOME="$(mktemp -d)"; \ + gpg --batch --keyserver hkps://keys.openpgp.org --recv-key "$GPG_KEY" || \ + gpg --batch --keyserver hkps://keyserver.ubuntu.com --recv-keys "$GPG_KEY"; \ + gpg --batch --verify spark.tgz.asc spark.tgz; \ + gpgconf --kill all; \ + rm -rf "$GNUPGHOME" spark.tgz.asc; \ + \ + tar -xf spark.tgz --strip-components=1; \ + chown -R spark:spark .; \ + mv jars /opt/spark/; \ + mv RELEASE /opt/spark/; \ + mv bin /opt/spark/; \ + mv sbin /opt/spark/; \ + mv kubernetes/dockerfiles/spark/decom.sh /opt/; \ + mv examples /opt/spark/; \ + ln -s "$(basename /opt/spark/examples/jars/spark-examples_*.jar)" /opt/spark/examples/jars/spark-examples.jar; \ + mv kubernetes/tests /opt/spark/; \ + mv data /opt/spark/; \ + mv python/pyspark /opt/spark/python/pyspark/; \ + mv python/lib /opt/spark/python/lib/; \ + mv R /opt/spark/; \ + chmod a+x /opt/decom.sh; \ + cd ..; \ + rm -rf "$SPARK_TMP"; + +COPY entrypoint.sh /opt/ + +ENV SPARK_HOME /opt/spark + +WORKDIR /opt/spark/work-dir + +USER spark + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/APPLICATION/spark/3.5.4/entrypoint.sh b/APPLICATION/spark/3.5.4/entrypoint.sh new file mode 100644 index 0000000..c576d8f --- /dev/null +++ b/APPLICATION/spark/3.5.4/entrypoint.sh @@ -0,0 +1,130 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Prevent any errors from being silently ignored +set -eo pipefail + +attempt_setup_fake_passwd_entry() { + # Check whether there is a passwd entry for the container UID + local myuid; myuid="$(id -u)" + # If there is no passwd entry for the container UID, attempt to fake one + # You can also refer to the https://github.com/docker-library/official-images/pull/13089#issuecomment-1534706523 + # It's to resolve OpenShift random UID case. + # See also: https://github.com/docker-library/postgres/pull/448 + if ! getent passwd "$myuid" &> /dev/null; then + local wrapper + for wrapper in {/usr,}/lib{/*,}/libnss_wrapper.so; do + if [ -s "$wrapper" ]; then + NSS_WRAPPER_PASSWD="$(mktemp)" + NSS_WRAPPER_GROUP="$(mktemp)" + export LD_PRELOAD="$wrapper" NSS_WRAPPER_PASSWD NSS_WRAPPER_GROUP + local mygid; mygid="$(id -g)" + printf 'spark:x:%s:%s:${SPARK_USER_NAME:-anonymous uid}:%s:/bin/false\n' "$myuid" "$mygid" "$SPARK_HOME" > "$NSS_WRAPPER_PASSWD" + printf 'spark:x:%s:\n' "$mygid" > "$NSS_WRAPPER_GROUP" + break + fi + done + fi +} + +if [ -z "$JAVA_HOME" ]; then + JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}') +fi + +SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" +for v in "${!SPARK_JAVA_OPT_@}"; do + SPARK_EXECUTOR_JAVA_OPTS+=( "${!v}" ) +done + +if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then + SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" +fi + +if ! [ -z "${PYSPARK_PYTHON+x}" ]; then + export PYSPARK_PYTHON +fi +if ! [ -z "${PYSPARK_DRIVER_PYTHON+x}" ]; then + export PYSPARK_DRIVER_PYTHON +fi + +# If HADOOP_HOME is set and SPARK_DIST_CLASSPATH is not set, set it here so Hadoop jars are available to the executor. +# It does not set SPARK_DIST_CLASSPATH if already set, to avoid overriding customizations of this value from elsewhere e.g. Docker/K8s. +if [ -n "${HADOOP_HOME}" ] && [ -z "${SPARK_DIST_CLASSPATH}" ]; then + export SPARK_DIST_CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath)" +fi + +if ! [ -z "${HADOOP_CONF_DIR+x}" ]; then + SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH"; +fi + +if ! [ -z "${SPARK_CONF_DIR+x}" ]; then + SPARK_CLASSPATH="$SPARK_CONF_DIR:$SPARK_CLASSPATH"; +elif ! [ -z "${SPARK_HOME+x}" ]; then + SPARK_CLASSPATH="$SPARK_HOME/conf:$SPARK_CLASSPATH"; +fi + +# SPARK-43540: add current working directory into executor classpath +SPARK_CLASSPATH="$SPARK_CLASSPATH:$PWD" + +# Switch to spark if no USER specified (root by default) otherwise use USER directly +switch_spark_if_root() { + if [ $(id -u) -eq 0 ]; then + echo gosu spark + fi +} + +case "$1" in + driver) + shift 1 + CMD=( + "$SPARK_HOME/bin/spark-submit" + --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS" + --conf "spark.executorEnv.SPARK_DRIVER_POD_IP=$SPARK_DRIVER_BIND_ADDRESS" + --deploy-mode client + "$@" + ) + attempt_setup_fake_passwd_entry + # Execute the container CMD under tini for better hygiene + exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" + ;; + executor) + shift 1 + CMD=( + ${JAVA_HOME}/bin/java + "${SPARK_EXECUTOR_JAVA_OPTS[@]}" + -Xms"$SPARK_EXECUTOR_MEMORY" + -Xmx"$SPARK_EXECUTOR_MEMORY" + -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" + org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend + --driver-url "$SPARK_DRIVER_URL" + --executor-id "$SPARK_EXECUTOR_ID" + --cores "$SPARK_EXECUTOR_CORES" + --app-id "$SPARK_APPLICATION_ID" + --hostname "$SPARK_EXECUTOR_POD_IP" + --resourceProfileId "$SPARK_RESOURCE_PROFILE_ID" + --podName "$SPARK_EXECUTOR_POD_NAME" + ) + attempt_setup_fake_passwd_entry + # Execute the container CMD under tini for better hygiene + exec $(switch_spark_if_root) /usr/bin/tini -s -- "${CMD[@]}" + ;; + + *) + # Non-spark-on-k8s command provided, proceeding in pass-through mode... + exec "$@" + ;; +esac diff --git a/APPLICATION/spark/buildspec.yml b/APPLICATION/spark/buildspec.yml index 2a5d879..47e288a 100644 --- a/APPLICATION/spark/buildspec.yml +++ b/APPLICATION/spark/buildspec.yml @@ -20,9 +20,9 @@ images: BuildSparkECSApplicationDockerImage: build: true docker_file: - path: APPLICATION/spark/general/Dockerfile + path: APPLICATION/spark/3.5.4/Dockerfile variable: - # - &SPARK33_JAVA8_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.6")] + # - &SPARK35_JAVA8_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.6")] # - &SPARK33_JAVA11_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.6")] # - &SPARK33_JAVA8_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.8")] # - &SPARK33_JAVA11_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.8")] @@ -41,10 +41,10 @@ images: # - ["3.3.3-java11-8.6", "3.3-java11-8.6", "3-java11-8.6", "3.3.3-8.6", "3.3-8.6", "3-8.6"] # - ["3.3.3-java8-8.8", "3.3-java8-8.8", "3-java8-8.8"] # - ["3.3.3-java11-8.8", "3.3-java11-8.8", "3-java11-8.8", "3.3.3-8.8", "3.3-8.8", "3-8.8"] - - ["3.3.3-java8-3", "3.3-java8-3", "3-java8-3"] - - ["3.3.3-java11-3", "3.3-java11-3", "3-java11-3", "3.3.3-3", "3.3-3", "3-3"] - - ["3.3.3-java8-23", "3.3-java8-23", "3-java8-23", "3.3.3-java8", "3.3-java8", "3-java8"] - - ["3.3.3-java11-23", "3.3-java11-23", "3-java11-23", "3.3.3-23", "3.3-23", "3-23", "3.3.3", "3.3", "3", "latest"] + - ["3.5.4-java8-3", "3.5-java8-3", "3-java8-3"] + - ["3.5.4-java11-3", "3.5-java11-3", "3-java11-3", "3.5.4-3", "3.5-4", "3-5"] + - ["3.5.4-java8-23", "3.5-java8-23", "3-java8-23", "3.5.4-java8", "3.5-java8", "3-java8"] + - ["3.5.4-java11-23", "3.5-java11-23", "3-java11-23", "3.5.4-23", "3.3-23", "3-23", "3.5.4", "3.5", "3", "latest"] registry: [*ACR_ALINUX, *ACR_ALINUX, *ACR_ANOLIS, *ACR_ANOLIS] test_config: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] -- Gitee From e0b2ea536052487147e406aed8dfca522c4ce43a Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Wed, 23 Apr 2025 14:32:57 +0800 Subject: [PATCH 2/7] adding execution permit for entrypoint.sh --- APPLICATION/spark/3.5.4/entrypoint.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 APPLICATION/spark/3.5.4/entrypoint.sh diff --git a/APPLICATION/spark/3.5.4/entrypoint.sh b/APPLICATION/spark/3.5.4/entrypoint.sh old mode 100644 new mode 100755 -- Gitee From e256bd59a069fc788a081616231fe602493f50ba Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Thu, 24 Apr 2025 11:03:25 +0800 Subject: [PATCH 3/7] switch to jdk11 --- APPLICATION/spark/3.5.4/Dockerfile | 2 +- APPLICATION/spark/3.5.4/entrypoint.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/APPLICATION/spark/3.5.4/Dockerfile b/APPLICATION/spark/3.5.4/Dockerfile index 70cae70..a422066 100644 --- a/APPLICATION/spark/3.5.4/Dockerfile +++ b/APPLICATION/spark/3.5.4/Dockerfile @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -FROM dragonwell-registry.cn-hangzhou.cr.aliyuncs.com/dragonwell/dragonwell:8-ubuntu +FROM dragonwell-registry.cn-hangzhou.cr.aliyuncs.com/dragonwell/dragonwell:11-ubuntu ARG spark_uid=185 RUN groupadd --system --gid=${spark_uid} spark && \ diff --git a/APPLICATION/spark/3.5.4/entrypoint.sh b/APPLICATION/spark/3.5.4/entrypoint.sh index c576d8f..c008a4b 100755 --- a/APPLICATION/spark/3.5.4/entrypoint.sh +++ b/APPLICATION/spark/3.5.4/entrypoint.sh @@ -108,6 +108,7 @@ case "$1" in "${SPARK_EXECUTOR_JAVA_OPTS[@]}" -Xms"$SPARK_EXECUTOR_MEMORY" -Xmx"$SPARK_EXECUTOR_MEMORY" + -XX:+UseParallelGC -cp "$SPARK_CLASSPATH:$SPARK_DIST_CLASSPATH" org.apache.spark.scheduler.cluster.k8s.KubernetesExecutorBackend --driver-url "$SPARK_DRIVER_URL" -- Gitee From 5e4847fd7df59cdef9ea42810bc666cbc35a7c65 Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Tue, 29 Apr 2025 19:21:35 +0800 Subject: [PATCH 4/7] adding 3.3.1 files --- APPLICATION/spark/3.3.1/Dockerfile | 7 +++++++ APPLICATION/spark/buildspec.yml | 18 ++++-------------- 2 files changed, 11 insertions(+), 14 deletions(-) create mode 100644 APPLICATION/spark/3.3.1/Dockerfile diff --git a/APPLICATION/spark/3.3.1/Dockerfile b/APPLICATION/spark/3.3.1/Dockerfile new file mode 100644 index 0000000..a90d5b4 --- /dev/null +++ b/APPLICATION/spark/3.3.1/Dockerfile @@ -0,0 +1,7 @@ +FROM apache/spark:3.3.1 +USER root +RUN rm -rf /usr/local/openjdk-11 +ADD https://github.com/dragonwell-project/dragonwell11/releases/download/dragonwell-extended-11.0.26.23_jdk-11.0.26-ga/Alibaba_Dragonwell_Extended_11.0.26.23.4_x64_linux.tar.gz ./dragonwell11.tar.gz +RUN tar xvf dragonwell11.tar.gz +RUN mv dragonwell-11.0.26.23+4-GA /usr/local/openjdk-11 +RUN rm -rf dragonwell11.tar.gz diff --git a/APPLICATION/spark/buildspec.yml b/APPLICATION/spark/buildspec.yml index 47e288a..42dfdfb 100644 --- a/APPLICATION/spark/buildspec.yml +++ b/APPLICATION/spark/buildspec.yml @@ -20,31 +20,24 @@ images: BuildSparkECSApplicationDockerImage: build: true docker_file: - path: APPLICATION/spark/3.5.4/Dockerfile + path: APPLICATION/spark/3.3.1/Dockerfile variable: # - &SPARK35_JAVA8_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.6")] # - &SPARK33_JAVA11_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.6")] # - &SPARK33_JAVA8_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.8")] # - &SPARK33_JAVA11_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.8")] - - &SPARK33_JAVA8_A3 [(BASE_OS, alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "3")] - - &SPARK33_JAVA11_A3 [(BASE_OS, alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "3")] - - &SPARK33_JAVA8_A23 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:23), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "23")] - - &SPARK33_JAVA11_A23 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:23), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "23")] + - &SPARK33_JAVA11_A3 [(BASE_OS, alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "11"), (OS_SUFFIX, "3")] scene: args: [ # *SPARK33_JAVA8_A86, *SPARK33_JAVA11_A86, # *SPARK33_JAVA8_A88, *SPARK33_JAVA11_A88, - *SPARK33_JAVA8_A3, *SPARK33_JAVA11_A3, - *SPARK33_JAVA8_A23, *SPARK33_JAVA11_A23] + *SPARK33_JAVA11_A3] tags: # - ["3.3.3-java8-8.6", "3.3-java8-8.6", "3-java8-8.6"] # - ["3.3.3-java11-8.6", "3.3-java11-8.6", "3-java11-8.6", "3.3.3-8.6", "3.3-8.6", "3-8.6"] # - ["3.3.3-java8-8.8", "3.3-java8-8.8", "3-java8-8.8"] # - ["3.3.3-java11-8.8", "3.3-java11-8.8", "3-java11-8.8", "3.3.3-8.8", "3.3-8.8", "3-8.8"] - - ["3.5.4-java8-3", "3.5-java8-3", "3-java8-3"] - - ["3.5.4-java11-3", "3.5-java11-3", "3-java11-3", "3.5.4-3", "3.5-4", "3-5"] - - ["3.5.4-java8-23", "3.5-java8-23", "3-java8-23", "3.5.4-java8", "3.5-java8", "3-java8"] - - ["3.5.4-java11-23", "3.5-java11-23", "3-java11-23", "3.5.4-23", "3.3-23", "3-23", "3.5.4", "3.5", "3", "latest"] + - ["3.3.1-java11-3", "3.3-java11-3", "3-java11-3", "3.3.1-3", "3.3-1", "3-3"] registry: [*ACR_ALINUX, *ACR_ALINUX, *ACR_ANOLIS, *ACR_ANOLIS] test_config: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] @@ -52,6 +45,3 @@ images: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] - - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] - - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] - - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] -- Gitee From 8f00e291497f3dec4531f79632500165e5174484 Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Tue, 29 Apr 2025 19:33:34 +0800 Subject: [PATCH 5/7] change array length --- APPLICATION/spark/buildspec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/APPLICATION/spark/buildspec.yml b/APPLICATION/spark/buildspec.yml index 42dfdfb..21c3a64 100644 --- a/APPLICATION/spark/buildspec.yml +++ b/APPLICATION/spark/buildspec.yml @@ -37,7 +37,7 @@ images: # - ["3.3.3-java11-8.6", "3.3-java11-8.6", "3-java11-8.6", "3.3.3-8.6", "3.3-8.6", "3-8.6"] # - ["3.3.3-java8-8.8", "3.3-java8-8.8", "3-java8-8.8"] # - ["3.3.3-java11-8.8", "3.3-java11-8.8", "3-java11-8.8", "3.3.3-8.8", "3.3-8.8", "3-8.8"] - - ["3.3.1-java11-3", "3.3-java11-3", "3-java11-3", "3.3.1-3", "3.3-1", "3-3"] + - ["3.3.1-java11-3", "3.3-java11-3", "3-java11-3", "3.3.1-3"] registry: [*ACR_ALINUX, *ACR_ALINUX, *ACR_ANOLIS, *ACR_ANOLIS] test_config: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] -- Gitee From 6aaf3952883c45dcbd9eeb01acbff14c603aea33 Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Tue, 29 Apr 2025 19:36:06 +0800 Subject: [PATCH 6/7] change array --- APPLICATION/spark/buildspec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/APPLICATION/spark/buildspec.yml b/APPLICATION/spark/buildspec.yml index 21c3a64..b004241 100644 --- a/APPLICATION/spark/buildspec.yml +++ b/APPLICATION/spark/buildspec.yml @@ -37,7 +37,7 @@ images: # - ["3.3.3-java11-8.6", "3.3-java11-8.6", "3-java11-8.6", "3.3.3-8.6", "3.3-8.6", "3-8.6"] # - ["3.3.3-java8-8.8", "3.3-java8-8.8", "3-java8-8.8"] # - ["3.3.3-java11-8.8", "3.3-java11-8.8", "3-java11-8.8", "3.3.3-8.8", "3.3-8.8", "3-8.8"] - - ["3.3.1-java11-3", "3.3-java11-3", "3-java11-3", "3.3.1-3"] + - ["3.3.1-dragonwell11-3"] registry: [*ACR_ALINUX, *ACR_ALINUX, *ACR_ANOLIS, *ACR_ANOLIS] test_config: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] -- Gitee From cf50f5acc699ef644222d99d8dfb16b3509d1d12 Mon Sep 17 00:00:00 2001 From: Wang Zhuo Date: Tue, 29 Apr 2025 19:40:51 +0800 Subject: [PATCH 7/7] change tags --- APPLICATION/spark/buildspec.yml | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/APPLICATION/spark/buildspec.yml b/APPLICATION/spark/buildspec.yml index b004241..a68d85d 100644 --- a/APPLICATION/spark/buildspec.yml +++ b/APPLICATION/spark/buildspec.yml @@ -22,22 +22,29 @@ images: docker_file: path: APPLICATION/spark/3.3.1/Dockerfile variable: - # - &SPARK35_JAVA8_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.6")] - # - &SPARK33_JAVA11_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.6")] - # - &SPARK33_JAVA8_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.8")] - # - &SPARK33_JAVA11_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.3"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.8")] + # - &SPARK35_JAVA8_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.6")] + # - &SPARK33_JAVA11_A86 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.6), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.6")] + # - &SPARK33_JAVA8_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "8"), (OS_SUFFIX, "8.8")] + # - &SPARK33_JAVA11_A88 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:8.8), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "11"), (OS_SUFFIX, "8.8")] + - &SPARK33_JAVA8_A3 [(BASE_OS, alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "8"), (OS_SUFFIX, "3")] - &SPARK33_JAVA11_A3 [(BASE_OS, alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "11"), (OS_SUFFIX, "3")] + - &SPARK33_JAVA8_A23 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:23), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "8"), (OS_SUFFIX, "23")] + - &SPARK33_JAVA11_A23 [(BASE_OS, registry.openanolis.cn/openanolis/anolisos:23), (SPARK_VERSION, "3.3.1"), (JAVA_VERSION, "11"), (OS_SUFFIX, "23")] scene: args: [ # *SPARK33_JAVA8_A86, *SPARK33_JAVA11_A86, # *SPARK33_JAVA8_A88, *SPARK33_JAVA11_A88, - *SPARK33_JAVA11_A3] + *SPARK33_JAVA8_A3, *SPARK33_JAVA11_A3, + *SPARK33_JAVA8_A23, *SPARK33_JAVA11_A23] tags: # - ["3.3.3-java8-8.6", "3.3-java8-8.6", "3-java8-8.6"] # - ["3.3.3-java11-8.6", "3.3-java11-8.6", "3-java11-8.6", "3.3.3-8.6", "3.3-8.6", "3-8.6"] # - ["3.3.3-java8-8.8", "3.3-java8-8.8", "3-java8-8.8"] # - ["3.3.3-java11-8.8", "3.3-java11-8.8", "3-java11-8.8", "3.3.3-8.8", "3.3-8.8", "3-8.8"] - - ["3.3.1-dragonwell11-3"] + - ["3.3.1-java8-3", "3.3-java8-3", "3-java8-3"] + - ["3.3.1-java11-3", "3.3-java11-3", "3-java11-3", "3.3.1-3", "3.3-4", "3-5"] + - ["3.3.1-java8-23", "3.3-java8-23", "3-java8-23", "3.3.1-java8", "3.3-java8", "3-java8"] + - ["3.3.1-java11-23", "3.3-java11-23", "3-java11-23", "3.3.1-23", "3.3-23", "3-23", "3.3.1", "3.3", "3", "latest"] registry: [*ACR_ALINUX, *ACR_ALINUX, *ACR_ANOLIS, *ACR_ANOLIS] test_config: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] @@ -45,3 +52,6 @@ images: # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] # - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] + - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] + - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] + - [(*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''), (*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[1], '')] -- Gitee