Alma9 build v5 nightly release #798
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Alma9 build v5 nightly release | |
on: | |
schedule: | |
- cron: "0 3 * * *" | |
workflow_dispatch: | |
inputs: | |
tag-prefix: | |
description: 'nightly tag prefix (also used for output container tag)' | |
default: '' | |
feature-branch: | |
description: 'feature branch to be used across all DAQ repos wherever possible.' | |
default: develop | |
cvmfs-deployment: | |
description: 'whether to deploy the release to cvmfs' | |
default: 'no' | |
send-slack-message: | |
description: 'whether to send a message to #daq-release-notifications on completion' | |
default: 'no' | |
jobs: | |
make_nightly_tag: | |
name: create nightly tag | |
runs-on: ubuntu-latest | |
outputs: | |
tag: ${{ steps.create_nightly_tag.outputs.nightly_tag }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- id: create_nightly_tag | |
run: | | |
date_tag=$(date +%y%m%d) | |
echo "nightly_tag=${{ github.event.inputs.tag-prefix }}_DEV_${date_tag}_A9" >> "$GITHUB_OUTPUT" | |
cat "$GITHUB_OUTPUT" | |
build_the_develop_release_spack: | |
name: build_dev_release_spack | |
runs-on: daq | |
needs: make_nightly_tag | |
container: | |
image: ghcr.io/dune-daq/alma9-slim-externals:v2.1 | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: Checkout daq-release | |
uses: actions/checkout@v4 | |
with: | |
path: daq-release | |
ref: amogan/issue422_scan_for_fetcherrors | |
- name: Set environment variables | |
run: | | |
echo "NIGHTLY_TAG=${{ needs.make_nightly_tag.outputs.tag }}" >> $GITHUB_ENV | |
echo "BASE_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NB${{ needs.make_nightly_tag.outputs.tag }}" >> $GITHUB_ENV | |
echo "DET_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NFD${{ needs.make_nightly_tag.outputs.tag }}" >> $GITHUB_ENV | |
echo "OS=almalinux9" >> $GITHUB_ENV | |
- name: setup directories and install spack for the coredaq release | |
run: | | |
source daq-release/.github/workflows/wf-setup-tools.sh | |
input_feature_branch=${{ github.event.inputs.feature-branch }} | |
# For automatically triggered workflows where feature-branch is not set, fall back to default branch | |
branch=${input_feature_branch:-"develop"} | |
daq-release/scripts/checkout-daq-package.py -i daq-release/configs/coredaq/coredaq-develop/release.yaml -a -o $DET_RELEASE_DIR/sourcecode -b $branch | |
daq-release/scripts/spack/build-release.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR core $OS ${{ github.event.inputs.feature-branch }} && break | |
# build-release.sh will exit 111 in case of a transient connection error | |
if [[ $? -eq 111 ]]; then | |
max_attempts=3 | |
attempt=1 | |
echo "First build attempt failed due to a FetchError. Will retry up to $max_attempts times." | |
while (( attempt <= max_attempts )); do | |
echo " --- Attempt number $attempt of $max_attempts --- " | |
daq-release/scripts/spack/build-release.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR core $OS ${{ github.event.inputs.feature-branch }} | |
build_release_exit_code=$? | |
echo "Build release exit code: $build_release_exit_code" | |
if [[ $build_release_exit_code -ne 111 ]]; then | |
break | |
fi | |
echo "Retry attempt $attempt/$max_attempts failed due to a FetchError." | |
attempt=$((attempt+1)) | |
done | |
fi | |
if (( attempt > max_attempts )); then | |
echo "build-release.sh failed after $max_attempts attempts. Exiting..." | |
exit 111 | |
fi | |
#daq-release/scripts/spack/build-release.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR fd $OS ${{ github.event.inputs.feature-branch }} | |
cd $DET_RELEASE_DIR/../ | |
tar_and_stage_release ${DET_RELEASE_TAG} | |
- name: upload spack nightly tarball for base release | |
uses: actions/upload-artifact@v4 | |
with: | |
name: nightly_coredaq | |
path: ${{ github.workspace }}/tarballs_for_upload/NB${{needs.make_nightly_tag.outputs.tag}}.tar.gz | |
- name: setup directories and install spack for the fddaq release | |
run: | | |
source daq-release/.github/workflows/wf-setup-tools.sh | |
input_feature_branch=${{ github.event.inputs.feature-branch }} | |
# For automatically triggered workflows where feature-branch is not set, fall back to default branch | |
branch=${input_feature_branch:-"develop"} | |
daq-release/scripts/checkout-daq-package.py -i daq-release/configs/fddaq/fddaq-develop/release.yaml -a -o $DET_RELEASE_DIR/sourcecode -b $branch | |
daq-release/scripts/spack/build-release.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR fd $OS ${{ github.event.inputs.feature-branch }} | |
if [[ $? -eq 111 ]]; then | |
exit 111 | |
fi | |
cd $DET_RELEASE_DIR/../ | |
tar_and_stage_release ${DET_RELEASE_TAG} | |
- name: upload spack nightly tarball for far detector release | |
uses: actions/upload-artifact@v4 | |
with: | |
name: nightly_fddaq | |
path: ${{ github.workspace }}/tarballs_for_upload/NFD${{needs.make_nightly_tag.outputs.tag}}.tar.gz | |
update_image: | |
name: update_spack_image_nightly | |
runs-on: daq | |
strategy: | |
matrix: | |
include: | |
- input_image: "ghcr.io/dune-daq/alma9-slim-externals:v2.1" | |
output_image: "ghcr.io/dune-daq/nightly-release-alma9" | |
tag: "development_v5${{ github.event.inputs.tag-prefix }}" | |
needs: [build_the_develop_release_spack, make_nightly_tag] | |
environment: dockerhub | |
permissions: | |
packages: write | |
contents: read | |
steps: | |
- name: clean docker-build | |
run: | | |
mkdir -p ${GITHUB_WORKSPACE}/docker-build | |
rm -rf ${GITHUB_WORKSPACE}/docker-build/* | |
- name: Download spack nightly release tarball artifact for base release | |
uses: actions/download-artifact@v4 | |
with: | |
name: nightly_coredaq | |
path: ${{ github.workspace }}/docker-build | |
- name: Download spack nightly release tarball artifact for far detector release | |
uses: actions/download-artifact@v4 | |
with: | |
name: nightly_fddaq | |
path: ${{ github.workspace }}/docker-build | |
- name: prepare cvmfs mirror spack-nightly | |
env: | |
NIGHTLY_TAG: ${{needs.make_nightly_tag.outputs.tag}} | |
run: | | |
cd ${{ github.workspace }}/docker-build | |
mkdir -p nightly | |
cd nightly | |
base_tag="NB${NIGHTLY_TAG}" | |
tar xf ../${base_tag}.tar.gz | |
rm -rf ../${base_tag}.tar.gz | |
fddaq_tag="NFD${NIGHTLY_TAG}" | |
tar xf ../${fddaq_tag}.tar.gz | |
rm -rf ../${fddaq_tag}.tar.gz | |
test "${NIGHTLY_TAG:0:1}" == "_" && ln -s ${fddaq_tag} last_fddaq | |
cd .. | |
echo "FROM "${{ matrix.input_image }} > Dockerfile | |
echo 'MAINTAINER John Freeman "[email protected]"' >> Dockerfile | |
echo "ENV REFRESHED_AT ${NIGHTLY_TAG}" >> Dockerfile | |
echo "COPY --from=ghcr.io/dune-daq/pypi-repo:latest /cvmfs/dunedaq.opensciencegrid.org/pypi-repo /cvmfs/dunedaq.opensciencegrid.org/pypi-repo" >> Dockerfile | |
echo "ADD nightly /cvmfs/dunedaq-development.opensciencegrid.org/nightly" >> Dockerfile | |
echo 'ENTRYPOINT ["/bin/bash"]' >> Dockerfile | |
- name: Log in to the Container registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract metadata (tags, labels) for Docker | |
id: meta | |
uses: docker/metadata-action@v5 | |
with: | |
images: | | |
ghcr.io/dune-daq/nightly-release-alma9 | |
tags: | | |
type=raw,value=development_v5${{ github.event.inputs.tag-prefix }} | |
- name: Build and push Docker images | |
uses: docker/build-push-action@v6 | |
with: | |
context: ${{ github.workspace }}/docker-build | |
push: true | |
tags: ${{ steps.meta.outputs.tags }} | |
labels: ${{ steps.meta.outputs.labels }} | |
generate_dbt_setup_release_env: | |
name: generate_dbt_setup_release_env | |
runs-on: daq | |
needs: [make_nightly_tag, update_image] | |
container: | |
image: ghcr.io/dune-daq/nightly-release-alma9:development_v5${{ github.event.inputs.tag-prefix }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: create dbt-setup-release-env.sh and daq_app_rte.sh for fddaq | |
env: | |
NIGHTLY_TAG: ${{needs.make_nightly_tag.outputs.tag}} | |
run: | | |
export DET=fd | |
source /cvmfs/dunedaq.opensciencegrid.org/setup_dunedaq.sh | |
setup_dbt latest_v5 | |
dbt-setup-release -n NFD${NIGHTLY_TAG} | |
declare -x > ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh | |
declare -f >> ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh | |
egrep "declare -x (PATH|.*_SHARE|CET_PLUGIN_PATH|DUNEDAQ_SHARE_PATH|LD_LIBRARY_PATH|LIBRARY_PATH|PYTHONPATH)=" ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh > ${GITHUB_WORKSPACE}/${DET}daq_app_rte.sh | |
- name: upload fddaq-dbt-setup-release-env.sh | |
uses: actions/upload-artifact@v4 | |
with: | |
name: fddaq-dbt_setup_release_env | |
path: ${{ github.workspace }}/fddaq-dbt-setup-release-env.sh | |
- name: upload fddaq_app_rte.sh | |
uses: actions/upload-artifact@v4 | |
with: | |
name: fddaq_app_rte | |
path: ${{ github.workspace }}/fddaq_app_rte.sh | |
publish_to_cvmfs: | |
name: publish to cvmfs | |
needs: generate_dbt_setup_release_env | |
runs-on: daq | |
steps: | |
- name: check_cvmfs_deployment | |
id: do_deployment | |
run: | | |
cvmfs_deployment_input=${{ github.event.inputs.cvmfs-deployment }} | |
cvmfs_deployment_flag=${cvmfs_deployment_input:-"yes"} | |
echo "Do cvmfs deployment: ${cvmfs_deployment_flag}" | |
[[ ${cvmfs_deployment_flag} == 'yes' ]] && exit 0 || exit 1 | |
continue-on-error: true | |
- name: download fddaq-dbt-setup-release-env.sh | |
if: steps.do_deployment.outcome == 'success' | |
uses: actions/download-artifact@v4 | |
with: | |
name: fddaq-dbt_setup_release_env | |
path: ${{ github.workspace }}/fddaq-release | |
- name: download fddaq_app_rte.sh | |
if: steps.do_deployment.outcome == 'success' | |
uses: actions/download-artifact@v4 | |
with: | |
name: fddaq_app_rte | |
path: ${{ github.workspace }}/fddaq-rte | |
- name: start docker container, and then rsync... | |
if: steps.do_deployment.outcome == 'success' | |
run: | | |
[[ ${DO_DEPLOYMENT} == 'no' ]] && exit 0 | |
cd $GITHUB_WORKSPACE | |
IMAGE="ghcr.io/dune-daq/nightly-release-alma9:development_v5${{ github.event.inputs.tag-prefix }}" | |
unique_name=$( date | tr " :" _ ) | |
docker pull $IMAGE | |
files_to_copy=$( docker run --rm --entrypoint ls $IMAGE /cvmfs/dunedaq-development.opensciencegrid.org/nightly ) | |
docker run --name $unique_name $IMAGE | |
mkdir $unique_name | |
for file in $files_to_copy; do | |
docker cp $unique_name:/cvmfs/dunedaq-development.opensciencegrid.org/nightly/$file $unique_name | |
done | |
docker rm $unique_name | |
docker system prune -f | |
dir_for_env_scripts=$( find $unique_name -mindepth 1 -maxdepth 1 -type d -name "NFD*" ) | |
cp ./fddaq-release/fddaq-dbt-setup-release-env.sh $dir_for_env_scripts/dbt-setup-release-env.sh | |
cp ./fddaq-rte/fddaq_app_rte.sh $dir_for_env_scripts/daq_app_rte.sh | |
kinit -k -t $HOME/daq-nightly.keytab nightly-build/dune/[email protected] | |
ssh -o StrictHostKeyChecking=no -l cvmfsdunedaqdev oasiscfs05.fnal.gov "cvmfs_server transaction dunedaq-development.opensciencegrid.org" | |
rsync -e "ssh -o StrictHostKeyChecking=no" -rlpt --stats $unique_name/* [email protected]:/cvmfs/dunedaq-development.opensciencegrid.org/nightly | |
ssh -o StrictHostKeyChecking=no -l cvmfsdunedaqdev oasiscfs05.fnal.gov "cvmfs_server publish dunedaq-development.opensciencegrid.org" | |
rm -rf $unique_name | |
send_slack_message: | |
if: (github.event_name != 'workflow_dispatch' && failure()) || (github.event_name == 'workflow_dispatch' && github.event.inputs.send-slack-message == 'yes') | |
needs: publish_to_cvmfs | |
uses: ./.github/workflows/slack-notification.yml | |
with: | |
# cvmfs != failure logic accounts for the fact that the publish step is optional and might be skipped | |
workflow_success: ${{ needs.publish_to_cvmfs.result == 'success' }} | |
secrets: | |
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} |