From 7c6cef4aa69b88b8c49eee08dcf8fcd670dd9028 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Wed, 8 Jan 2025 10:08:56 +0800 Subject: [PATCH] Deploy the sources and Javadoc JARs in the nightly CICD [skip ci] (#11915) * Deploy the sources and Javadoc JARs in the nightly CICD Deploy the sources and javadoc JARS to make sure nightly CICD includes all jars required by Sonatype release Deploy dist jars in the final step to ensure that the POM files are not overwritten Signed-off-by: Tim Liu * Share the deploy scripts for nightly and release CI Support deploying both Scala 2.12 and Scala 2.13 artifacts in jenkins/deploy.sh Signed-off-by: Tim Liu * Set default environments to deploy artifacts to the internal maven repo Signed-off-by: Tim Liu --------- Signed-off-by: Tim Liu --- jenkins/deploy.sh | 15 +++++++++++---- jenkins/spark-nightly-build.sh | 28 ++++++++-------------------- 2 files changed, 19 insertions(+), 24 deletions(-) diff --git a/jenkins/deploy.sh b/jenkins/deploy.sh index 8eb336678fa..091b5e48671 100755 --- a/jenkins/deploy.sh +++ b/jenkins/deploy.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2020-2025, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,6 +42,9 @@ DIST_PL=${DIST_PL:-"dist"} ###### Build the path of jar(s) to be deployed ###### MVN_SETTINGS=${MVN_SETTINGS:-"jenkins/settings.xml"} MVN="mvn -B -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 -s $MVN_SETTINGS" +SCALA_BINARY_VER=${SCALA_BINARY_VER:-"2.12"} +[ $SCALA_BINARY_VER == "2.13" ] && MVN="$MVN -f scala2.13/" + function mvnEval { $MVN help:evaluate -q -DforceStdout -pl $1 -Dexpression=$2 } @@ -73,9 +76,13 @@ DEPLOY_FILES=$(echo $CLASSIFIERS | sed -e "s;\([^,]*\);${FPATH}-\1.jar;g") SQL_ART_ID=$(mvnEval $SQL_PL project.artifactId) SQL_ART_VER=$(mvnEval $SQL_PL project.version) JS_FPATH="$(echo -n ${SQL_PL}/target/spark*)/${SQL_ART_ID}-${SQL_ART_VER}" -cp $JS_FPATH-sources.jar $FPATH-sources.jar -cp $JS_FPATH-javadoc.jar $FPATH-javadoc.jar - +if [ $SCALA_BINARY_VER == "2.13" ]; then + cp scala2.13/$JS_FPATH-sources.jar scala2.13/$FPATH-sources.jar + cp scala2.13/$JS_FPATH-javadoc.jar scala2.13/$FPATH-javadoc.jar +else + cp $JS_FPATH-sources.jar $FPATH-sources.jar + cp $JS_FPATH-javadoc.jar $FPATH-javadoc.jar +fi echo "Plan to deploy ${FPATH}.jar to $SERVER_URL (ID:$SERVER_ID)" GPG_PLUGIN="org.apache.maven.plugins:maven-gpg-plugin:3.1.0:sign-and-deploy-file" diff --git a/jenkins/spark-nightly-build.sh b/jenkins/spark-nightly-build.sh index 00735e02c84..d8f50141e88 100755 --- a/jenkins/spark-nightly-build.sh +++ b/jenkins/spark-nightly-build.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2020-2025, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,9 +61,6 @@ if [[ "$DIST_INCLUDES_DATABRICKS" == "true" ]] && [[ -n ${SPARK_SHIM_VERSIONS_DA DIST_PROFILE_OPT="$DIST_PROFILE_OPT,"$(IFS=,; echo "${SPARK_SHIM_VERSIONS_DATABRICKS[*]}") fi -DEPLOY_TYPES='jar' -DEPLOY_FILES="${DIST_FPATH}.jar" -DEPLOY_CLASSIFIERS="$DEFAULT_CUDA_CLASSIFIER" # Make sure that the local m2 repo on the build machine has the same pom # installed as the one being pushed to the remote repo. This to prevent # discrepancies between the build machines regardless of how the local repo was populated. @@ -76,16 +73,6 @@ function distWithReducedPom { mvnCmd="install:install-file" mvnExtraFlags="-Dpackaging=jar" ;; - - deploy) - mvnCmd="deploy:deploy-file" - if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then - # try move tmp artifacts back to target folder for simplifying separate release process - mv ${TMP_PATH}/${ART_ID}-${ART_VER}-*.jar ${DIST_PATH}/target/ - fi - mvnExtraFlags="-Durl=${URM_URL}-local -DrepositoryId=snapshots -Dtypes=${DEPLOY_TYPES} -Dfiles=${DEPLOY_FILES} -Dclassifiers=${DEPLOY_CLASSIFIERS}" - ;; - *) echo "Unknown command: $cmd" ;; @@ -168,10 +155,6 @@ if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then # move artifacts to temp for deployment later artifactFile="${ART_ID}-${ART_VER}-${classifier}.jar" mv ${DIST_PATH}/target/${artifactFile} ${TMP_PATH}/ - # update deployment properties - DEPLOY_TYPES="${DEPLOY_TYPES},jar" - DEPLOY_FILES="${DEPLOY_FILES},${DIST_PL}/target/${artifactFile}" - DEPLOY_CLASSIFIERS="${DEPLOY_CLASSIFIERS},${classifier}" done fi # build dist w/ default cuda classifier @@ -180,8 +163,6 @@ installDistArtifact ${DEFAULT_CUDA_CLASSIFIER} distWithReducedPom "install" if [[ $SKIP_DEPLOY != 'true' ]]; then - distWithReducedPom "deploy" - # this deploys selected submodules that is unconditionally built with Spark 3.2.0 $MVN -B deploy -pl "!${DIST_PL}" \ -Dbuildver=$SPARK_BASE_SHIM_VERSION \ @@ -189,6 +170,13 @@ if [[ $SKIP_DEPLOY != 'true' ]]; then -Dmaven.scaladoc.skip -Dmaven.scalastyle.skip=true \ $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \ -Dcuda.version=$DEFAULT_CUDA_CLASSIFIER + + # try move tmp artifacts back to target folder for simplifying separate release process + if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then + mv ${TMP_PATH}/${ART_ID}-${ART_VER}-*.jar ${DIST_PATH}/target/ + fi + # Deploy dist jars in the final step to ensure that the POM files are not overwritten + SERVER_URL=${SERVER_URL:-"$URM_URL"} SERVER_ID=${SERVER_ID:-"snapshots"} jenkins/deploy.sh fi # Parse Spark files from local mvn repo