Alma9 build v4 production nightly release #313
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Alma9 build v4 production nightly release | |
on: | |
schedule: | |
- cron: "0 0 * * 0" | |
workflow_dispatch: | |
inputs: | |
tag-prefix: | |
description: 'nightly tag prefix' | |
default: '' | |
cvmfs-deployment: | |
description: 'whether to deploy the release to cvmfs' | |
default: 'no' | |
send-slack-message: | |
description: 'whether to send a message to #daq-release-notifications on completion' | |
default: 'no' | |
jobs: | |
make_nightly_tag: | |
name: create nightly tag | |
runs-on: ubuntu-latest | |
outputs: | |
tag: ${{ steps.create_nightly_tag.outputs.nightly_tag }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- id: create_nightly_tag | |
run: | | |
date_tag=$(date +%y%m%d) | |
if [ "$(date +%a)" != "Sun" ]; then | |
date_tag=$(date +%y%m%d -d 'last sunday'); | |
fi | |
echo "nightly_tag=${{ github.event.inputs.tag-prefix }}_PROD4_${date_tag}_A9" >> "$GITHUB_OUTPUT" | |
cat "$GITHUB_OUTPUT" | |
build_the_develop_release_spack: | |
name: build_dev_release_spack | |
runs-on: daq | |
needs: make_nightly_tag | |
container: | |
image: ghcr.io/dune-daq/alma9-slim-externals:v2.0 | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: Checkout daq-release | |
uses: actions/checkout@v4 | |
with: | |
path: daq-release | |
- name: setup directories and install spack for the coredaq release | |
env: | |
NIGHTLY_TAG: ${{needs.make_nightly_tag.outputs.tag}} | |
run: | | |
export BASE_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NB${NIGHTLY_TAG} | |
export DET_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NFD${NIGHTLY_TAG} | |
export OS=almalinux9 | |
source daq-release/.github/workflows/wf-setup-tools-v4.sh | |
daq-release/scripts/checkout-daq-package.py -i daq-release/configs/coredaq/coredaq-production_v4/release.yaml -a -b production/v4 -o $DET_RELEASE_DIR/sourcecode | |
daq-release/scripts/spack/build-release-v4.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR core $OS production/v4 | |
cd $BASE_RELEASE_DIR/../ | |
tar_and_stage_release ${BASE_RELEASE_TAG} | |
- name: upload spack nightly tarball for base release | |
uses: actions/upload-artifact@v4 | |
with: | |
name: nightly_coredaq | |
path: ${{ github.workspace }}/tarballs_for_upload/NB${{needs.make_nightly_tag.outputs.tag}}.tar.gz | |
- name: setup directories and install spack for the fddaq release | |
env: | |
NIGHTLY_TAG: ${{needs.make_nightly_tag.outputs.tag}} | |
run: | | |
export BASE_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NB${NIGHTLY_TAG} | |
export DET_RELEASE_DIR=/cvmfs/dunedaq-development.opensciencegrid.org/nightly/NFD${NIGHTLY_TAG} | |
export OS=almalinux9 | |
source daq-release/.github/workflows/wf-setup-tools-v4.sh | |
daq-release/scripts/checkout-daq-package.py -i daq-release/configs/fddaq/fddaq-production_v4/release.yaml -a -b production/v4 -o $DET_RELEASE_DIR/sourcecode | |
daq-release/scripts/spack/build-release-v4.sh $BASE_RELEASE_DIR $DET_RELEASE_DIR fd $OS production/v4 | |
cd $DET_RELEASE_DIR/../ | |
tar_and_stage_release ${DET_RELEASE_TAG} | |
- name: upload spack nightly tarball for far detector release | |
uses: actions/upload-artifact@v4 | |
with: | |
name: nightly_fddaq | |
path: ${{ github.workspace }}/tarballs_for_upload/NFD${{needs.make_nightly_tag.outputs.tag}}.tar.gz | |
update_image: | |
name: update_spack_image_nightly | |
runs-on: daq | |
strategy: | |
matrix: | |
include: | |
- input_image: "ghcr.io/dune-daq/alma9-slim-externals:v2.0" | |
output_image: "ghcr.io/dune-daq/nightly-release-alma9" | |
tag: "production_v4" | |
needs: [build_the_develop_release_spack, make_nightly_tag] | |
environment: dockerhub | |
permissions: | |
packages: write | |
contents: read | |
steps: | |
- name: clean docker-build | |
run: | | |
mkdir -p ${GITHUB_WORKSPACE}/docker-build | |
rm -rf ${GITHUB_WORKSPACE}/docker-build/* | |
- name: Download spack nightly release tarball artifact for base release | |
uses: actions/download-artifact@v4 | |
with: | |
name: nightly_coredaq | |
path: ${{ github.workspace }}/docker-build | |
- name: Download spack nightly release tarball artifact for far detector release | |
uses: actions/download-artifact@v4 | |
with: | |
name: nightly_fddaq | |
path: ${{ github.workspace }}/docker-build | |
- name: prepare cvmfs mirror spack-nightly | |
env: | |
NIGHTLY_TAG: ${{needs.make_nightly_tag.outputs.tag}} | |
run: | | |
cd ${{ github.workspace }}/docker-build | |
mkdir -p nightly | |
cd nightly | |
base_tag="NB${NIGHTLY_TAG}" | |
tar xf ../${base_tag}.tar.gz | |
rm -rf ../${base_tag}.tar.gz | |
fddaq_tag="NFD${NIGHTLY_TAG}" | |
tar xf ../${fddaq_tag}.tar.gz | |
rm -rf ../${fddaq_tag}.tar.gz | |
ln -s ${fddaq_tag} last_fddaq | |
cd .. | |
echo "FROM "${{ matrix.input_image }} > Dockerfile | |
echo 'MAINTAINER John Freeman "[email protected]"' >> Dockerfile | |
echo "ENV REFRESHED_AT ${NIGHTLY_TAG}" >> Dockerfile | |
echo "COPY --from=ghcr.io/dune-daq/pypi-repo:latest /cvmfs/dunedaq.opensciencegrid.org/pypi-repo /cvmfs/dunedaq.opensciencegrid.org/pypi-repo" >> Dockerfile | |
echo "ADD nightly /cvmfs/dunedaq-development.opensciencegrid.org/nightly" >> Dockerfile | |
echo 'ENTRYPOINT ["/bin/bash"]' >> Dockerfile | |
- name: Log in to the Container registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract metadata (tags, labels) for Docker | |
id: meta | |
uses: docker/metadata-action@v5 | |
with: | |
images: | | |
ghcr.io/dune-daq/nightly-release-alma9 | |
tags: | | |
type=raw,value=production_v4 | |
- name: Build and push Docker images | |
uses: docker/build-push-action@v6 | |
with: | |
context: ${{ github.workspace }}/docker-build | |
push: true | |
tags: ${{ steps.meta.outputs.tags }} | |
labels: ${{ steps.meta.outputs.labels }} | |
generate_dbt_setup_release_env: | |
name: generate_dbt_setup_release_env | |
runs-on: ubuntu-latest | |
needs: update_image | |
container: | |
image: ghcr.io/dune-daq/nightly-release-alma9:production_v4 | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: create dbt-setup-release-env.sh and daq_app_rte.sh for fddaq | |
run: | | |
export DET=fd | |
source /cvmfs/dunedaq.opensciencegrid.org/setup_dunedaq.sh | |
setup_dbt latest_v4 | |
dbt-setup-release -n last_${DET}daq | |
rm -f /cvmfs/dunedaq-development.opensciencegrid.org/nightly/last_${DET}daq | |
declare -x > ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh | |
declare -f >> ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh | |
egrep "declare -x (PATH|.*_SHARE|CET_PLUGIN_PATH|DUNEDAQ_SHARE_PATH|LD_LIBRARY_PATH|LIBRARY_PATH|PYTHONPATH)=" ${GITHUB_WORKSPACE}/${DET}daq-dbt-setup-release-env.sh > ${GITHUB_WORKSPACE}/${DET}daq_app_rte.sh | |
- name: upload fddaq-dbt-setup-release-env.sh | |
uses: actions/upload-artifact@v4 | |
with: | |
name: fddaq-dbt_setup_release_env | |
path: ${{ github.workspace }}/fddaq-dbt-setup-release-env.sh | |
- name: upload fddaq_app_rte.sh | |
uses: actions/upload-artifact@v4 | |
with: | |
name: fddaq_app_rte | |
path: ${{ github.workspace }}/fddaq_app_rte.sh | |
publish_to_cvmfs: | |
name: publish to cvmfs | |
needs: generate_dbt_setup_release_env | |
runs-on: daq | |
steps: | |
- name: check_cvmfs_deployment | |
id: do_deployment | |
run: | | |
cvmfs_deployment_input=${{ github.event.inputs.cvmfs-deployment }} | |
cvmfs_deployment_flag=${cvmfs_deployment_input:-"yes"} | |
echo "Do cvmfs deployment: ${cvmfs_deployment_flag}" | |
[[ ${cvmfs_deployment_flag} == 'yes' ]] && exit 0 || exit 1 | |
continue-on-error: true | |
- name: download fddaq-dbt-setup-release-env.sh | |
if: steps.do_deployment.outcome == 'success' | |
uses: actions/download-artifact@v4 | |
with: | |
name: fddaq-dbt_setup_release_env | |
path: ${{ github.workspace }}/fddaq-release | |
- name: download fddaq_app_rte.sh | |
if: steps.do_deployment.outcome == 'success' | |
uses: actions/download-artifact@v4 | |
with: | |
name: fddaq_app_rte | |
path: ${{ github.workspace }}/fddaq-rte | |
- name: start docker container, and then rsync... | |
if: steps.do_deployment.outcome == 'success' | |
run: | | |
[[ ${DO_DEPLOYMENT} == 'no' ]] && exit 0 | |
cd $GITHUB_WORKSPACE | |
IMAGE="ghcr.io/dune-daq/nightly-release-alma9:production_v4" | |
unique_name=$( date | tr " :" _ ) | |
docker pull $IMAGE | |
files_to_copy=$( docker run --rm --entrypoint ls $IMAGE /cvmfs/dunedaq-development.opensciencegrid.org/nightly ) | |
docker run --name $unique_name $IMAGE | |
mkdir $unique_name | |
for file in $files_to_copy; do | |
docker cp $unique_name:/cvmfs/dunedaq-development.opensciencegrid.org/nightly/$file $unique_name | |
done | |
docker rm $unique_name | |
docker system prune -f | |
dir_for_env_scripts=$( find $unique_name -mindepth 1 -maxdepth 1 -type d -name "NFD*" ) | |
cp ./fddaq-release/fddaq-dbt-setup-release-env.sh $dir_for_env_scripts/dbt-setup-release-env.sh | |
cp ./fddaq-rte/fddaq_app_rte.sh $dir_for_env_scripts/daq_app_rte.sh | |
kinit -k -t $HOME/daq-nightly.keytab nightly-build/dune/[email protected] | |
ssh -o StrictHostKeyChecking=no -l cvmfsdunedaqdev oasiscfs05.fnal.gov "cvmfs_server transaction dunedaq-development.opensciencegrid.org" | |
rsync -e "ssh -o StrictHostKeyChecking=no" -rlpt --stats $unique_name/* [email protected]:/cvmfs/dunedaq-development.opensciencegrid.org/nightly | |
ssh -o StrictHostKeyChecking=no -l cvmfsdunedaqdev oasiscfs05.fnal.gov "cvmfs_server publish dunedaq-development.opensciencegrid.org" | |
rm -rf $unique_name | |
send_slack_message: | |
if: (github.event_name != 'workflow_dispatch' && failure()) || (github.event_name == 'workflow_dispatch' && github.event.inputs.send-slack-message == 'yes') | |
needs: publish_to_cvmfs | |
uses: ./.github/workflows/slack-notification.yml | |
with: | |
# cvmfs != failure logic accounts for the fact that the publish step is optional and might be skipped | |
workflow_success: ${{ needs.publish_to_cvmfs.result == 'success' }} | |
secrets: | |
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} |