Skip to content

Commit

Permalink
Deploy the sources and Javadoc JARs in the nightly CICD [skip ci] (#1…
Browse files Browse the repository at this point in the history
…1915)

* Deploy the sources and Javadoc JARs in the nightly CICD

Deploy the sources and javadoc JARS to make sure nightly CICD includes all jars required by Sonatype release

Deploy dist jars in the final step to ensure that the POM files are not overwritten

Signed-off-by: Tim Liu <[email protected]>

* Share the deploy scripts for nightly and release CI

Support deploying both Scala 2.12 and Scala 2.13 artifacts in jenkins/deploy.sh

Signed-off-by: Tim Liu <[email protected]>

* Set default environments to deploy artifacts to the internal maven repo

Signed-off-by: Tim Liu <[email protected]>

---------

Signed-off-by: Tim Liu <[email protected]>
  • Loading branch information
NvTimLiu authored Jan 8, 2025
1 parent 50b14de commit 7c6cef4
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 24 deletions.
15 changes: 11 additions & 4 deletions jenkins/deploy.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020-2025, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -42,6 +42,9 @@ DIST_PL=${DIST_PL:-"dist"}
###### Build the path of jar(s) to be deployed ######
MVN_SETTINGS=${MVN_SETTINGS:-"jenkins/settings.xml"}
MVN="mvn -B -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 -s $MVN_SETTINGS"
SCALA_BINARY_VER=${SCALA_BINARY_VER:-"2.12"}
[ $SCALA_BINARY_VER == "2.13" ] && MVN="$MVN -f scala2.13/"

function mvnEval {
$MVN help:evaluate -q -DforceStdout -pl $1 -Dexpression=$2
}
Expand Down Expand Up @@ -73,9 +76,13 @@ DEPLOY_FILES=$(echo $CLASSIFIERS | sed -e "s;\([^,]*\);${FPATH}-\1.jar;g")
SQL_ART_ID=$(mvnEval $SQL_PL project.artifactId)
SQL_ART_VER=$(mvnEval $SQL_PL project.version)
JS_FPATH="$(echo -n ${SQL_PL}/target/spark*)/${SQL_ART_ID}-${SQL_ART_VER}"
cp $JS_FPATH-sources.jar $FPATH-sources.jar
cp $JS_FPATH-javadoc.jar $FPATH-javadoc.jar

if [ $SCALA_BINARY_VER == "2.13" ]; then
cp scala2.13/$JS_FPATH-sources.jar scala2.13/$FPATH-sources.jar
cp scala2.13/$JS_FPATH-javadoc.jar scala2.13/$FPATH-javadoc.jar
else
cp $JS_FPATH-sources.jar $FPATH-sources.jar
cp $JS_FPATH-javadoc.jar $FPATH-javadoc.jar
fi
echo "Plan to deploy ${FPATH}.jar to $SERVER_URL (ID:$SERVER_ID)"

GPG_PLUGIN="org.apache.maven.plugins:maven-gpg-plugin:3.1.0:sign-and-deploy-file"
Expand Down
28 changes: 8 additions & 20 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020-2025, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -61,9 +61,6 @@ if [[ "$DIST_INCLUDES_DATABRICKS" == "true" ]] && [[ -n ${SPARK_SHIM_VERSIONS_DA
DIST_PROFILE_OPT="$DIST_PROFILE_OPT,"$(IFS=,; echo "${SPARK_SHIM_VERSIONS_DATABRICKS[*]}")
fi

DEPLOY_TYPES='jar'
DEPLOY_FILES="${DIST_FPATH}.jar"
DEPLOY_CLASSIFIERS="$DEFAULT_CUDA_CLASSIFIER"
# Make sure that the local m2 repo on the build machine has the same pom
# installed as the one being pushed to the remote repo. This to prevent
# discrepancies between the build machines regardless of how the local repo was populated.
Expand All @@ -76,16 +73,6 @@ function distWithReducedPom {
mvnCmd="install:install-file"
mvnExtraFlags="-Dpackaging=jar"
;;

deploy)
mvnCmd="deploy:deploy-file"
if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then
# try move tmp artifacts back to target folder for simplifying separate release process
mv ${TMP_PATH}/${ART_ID}-${ART_VER}-*.jar ${DIST_PATH}/target/
fi
mvnExtraFlags="-Durl=${URM_URL}-local -DrepositoryId=snapshots -Dtypes=${DEPLOY_TYPES} -Dfiles=${DEPLOY_FILES} -Dclassifiers=${DEPLOY_CLASSIFIERS}"
;;

*)
echo "Unknown command: $cmd"
;;
Expand Down Expand Up @@ -168,10 +155,6 @@ if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then
# move artifacts to temp for deployment later
artifactFile="${ART_ID}-${ART_VER}-${classifier}.jar"
mv ${DIST_PATH}/target/${artifactFile} ${TMP_PATH}/
# update deployment properties
DEPLOY_TYPES="${DEPLOY_TYPES},jar"
DEPLOY_FILES="${DEPLOY_FILES},${DIST_PL}/target/${artifactFile}"
DEPLOY_CLASSIFIERS="${DEPLOY_CLASSIFIERS},${classifier}"
done
fi
# build dist w/ default cuda classifier
Expand All @@ -180,15 +163,20 @@ installDistArtifact ${DEFAULT_CUDA_CLASSIFIER}
distWithReducedPom "install"

if [[ $SKIP_DEPLOY != 'true' ]]; then
distWithReducedPom "deploy"

# this deploys selected submodules that is unconditionally built with Spark 3.2.0
$MVN -B deploy -pl "!${DIST_PL}" \
-Dbuildver=$SPARK_BASE_SHIM_VERSION \
-DskipTests \
-Dmaven.scaladoc.skip -Dmaven.scalastyle.skip=true \
$MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \
-Dcuda.version=$DEFAULT_CUDA_CLASSIFIER

# try move tmp artifacts back to target folder for simplifying separate release process
if (( ${#CLASSIFIERS_ARR[@]} > 1 )); then
mv ${TMP_PATH}/${ART_ID}-${ART_VER}-*.jar ${DIST_PATH}/target/
fi
# Deploy dist jars in the final step to ensure that the POM files are not overwritten
SERVER_URL=${SERVER_URL:-"$URM_URL"} SERVER_ID=${SERVER_ID:-"snapshots"} jenkins/deploy.sh
fi

# Parse Spark files from local mvn repo
Expand Down

0 comments on commit 7c6cef4

Please sign in to comment.