diff --git a/.github/workflows/run_workflow.yml b/.github/workflows/run_workflow.yml new file mode 100644 index 00000000000..eb5a4ce5341 --- /dev/null +++ b/.github/workflows/run_workflow.yml @@ -0,0 +1,163 @@ +name: 'Azure e2e - Run Workflow' +on: + workflow_dispatch: + inputs: + target-branch: + description: 'Branch of Cromwell to run tests on' + required: true + default: 'develop' + type: string + # Replace user data with seeded data provided by devOps (once available) + owner-subject: + description: 'Owner of billing project' + required: true + default: 'hermione.owner@quality.firecloud.org' + type: string + service-account: + description: 'Email address or unique identifier of the Google Cloud service account for which to generate credentials' + required: true + default: 'firecloud-qa@broad-dsde-qa.iam.gserviceaccount.com' + type: string + +env: + BEE_NAME: '${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt}}-dev' + BROADBOT_TOKEN: '${{ secrets.BROADBOT_GITHUB_TOKEN }}' # github token for access to kick off a job in the private repo + RUN_NAME_SUFFIX: '${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt }}' + +jobs: + init-github-context: + runs-on: ubuntu-latest + outputs: + branch: ${{ steps.extract-inputs.outputs.branch }} + steps: +# NOTE: below was included in the rawls template but seems redundant due to defined values up on top +# Remove if it ends up being unecessary + - name: Get inputs or use defaults + id: extract-inputs + run: | + echo "branch=${{ inputs.target-branch || 'develop' }}" >> "$GITHUB_OUTPUT" + + # This job provisions useful parameters for e2e tests, including access tokens. + # Please note: access tokens are for use in the same workflow, they cannot be dispatched to remote workflows. + params-gen: + runs-on: ubuntu-latest + permissions: + contents: 'read' + id-token: 'write' + outputs: + project-name: ${{ steps.gen.outputs.project_name }} + steps: + - name: Generate a random billing project name + id: 'gen' + run: | + project_name=$(echo "tmp-billing-project-$(uuidgen)" | cut -c -30) + echo "project_name=${project_name}" >> $GITHUB_OUTPUT + + create-bee-workflow: + runs-on: ubuntu-latest + needs: [init-github-context, params-gen] + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: Dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: bee-create + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + # NOTE: Opting to use "prod" instead of custom tag since I specifically want to test against the current prod state + # NOTE: For testing purposes I'm using dev + inputs: '{ "bee-name": "${{ env.BEE_NAME }}", "version-template": "dev" }' + + create-and-attach-billing-project-to-landing-zone-workflow: + runs-on: ubuntu-latest + needs: [create-bee-workflow, params-gen] + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: attach-billing-project-to-landing-zone.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ + "run-name": "attach-billing-project-to-landing-zone-${{ env.RUN_NAME_SUFFIX }}", + "bee-name": "${{ env.BEE_NAME }}", + "billing-project": "${{ needs.params-gen.outputs.project-name }}", + "billing-project-creator": "${{ inputs.owner-subject }}", + "service-account": "${{inputs.service-account}}" }' + + run-cromwell-az-e2e: + runs-on: ubuntu-latest + needs: [params-gen, create-and-attach-billing-project-to-landing-zone-workflow] + permissions: + contents: 'read' + id-token: 'write' + steps: + - uses: 'actions/checkout@v3' + - name: Generate OAuth2 2.0 access token for owner + id: 'owner_auth' + uses: google-github-actions/auth@v1 + with: + token_format: 'access_token' + workload_identity_provider: 'projects/1038484894585/locations/global/workloadIdentityPools/github-wi-pool/providers/github-wi-provider' + service_account: ${{ inputs.service-account }} + access_token_scopes: 'profile, email, openid' + access_token_subject: ${{ inputs.owner-subject }} + export_environment_variables: false + create_credentials_file: false + - uses: actions/checkout@v3 + with: + #NOTE: replace with ${{inputs.target-branch}} before merge + ref: refs/heads/WX-983 + - name: Install poetry + uses: snok/install-poetry@v1 + - name: Run e2e test + env: + BEE_NAME: ${{env.BEE_NAME}} + BILLING_PROJECT_NAME: ${{needs.params-gen.outputs.project-name}} + BEARER_TOKEN: ${{ steps.owner_auth.outputs.access_token }} + run: | + poetry run python server/src/test/python/cromwell-az-e2e/tests/az-e2e.py + + delete-billing-project-v2-from-bee-workflow: + runs-on: ubuntu-latest + needs: [run-cromwell-az-e2e, create-and-attach-billing-project-to-landing-zone-workflow, params-gen] + if: always() + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: .github/workflows/delete-billing-project-v2-from-bee.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ + "run-name": "delete-billing-project-v2-from-bee-${{ env.RUN_NAME_SUFFIX }}", + "bee-name": "${{ env.BEE_NAME }}", + "billing-project": "${{ needs.params-gen.outputs.project-name }}", + "billing-project-owner": "${{ inputs.owner-subject }}", + "service-account": "${{ inputs.service-account }}", + "silent-on-failure": "false" }' + + destroy-bee-workflow: + runs-on: ubuntu-latest + # QUESTION: Can a billing project be deleted if the bee is deleted? + # QUESTION: Can a billing project be deleted if the bee is still tied to it? + needs: [create-bee-workflow, delete-billing-project-v2-from-bee-workflow] + if: always() + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: bee-destroy.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ "bee-name": "${{ env.BEE_NAME }}" }' + wait-for-completion: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index a5b72f6b263..250b6aa3c16 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,9 @@ console_output.txt expected.json run_mode_metadata.json +#bloop files +/.bloop + # custom config cromwell-executions cromwell-test-executions @@ -55,3 +58,6 @@ tesk_application.conf **/venv/ exome_germline_single_sample_v1.3/ **/*.pyc + +# GHA credentials +gha-creds-*.json diff --git a/server/src/test/python/cromwell-az-e2e/README.md b/server/src/test/python/cromwell-az-e2e/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/server/src/test/python/cromwell-az-e2e/poetry.lock b/server/src/test/python/cromwell-az-e2e/poetry.lock new file mode 100644 index 00000000000..c10503cb1c6 --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/poetry.lock @@ -0,0 +1,245 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.0.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "06b3ef1674f1968da42c4bb56fcdf8a16816beaf5878c93a8d135aa5f961c39f" diff --git a/server/src/test/python/cromwell-az-e2e/pyproject.toml b/server/src/test/python/cromwell-az-e2e/pyproject.toml new file mode 100644 index 00000000000..017a583268e --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "cromwell-az-e2e" +version = "0.1.0" +description = "" +authors = ["JVThomas "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10" +requests = "^2.31.0" + +[tool.ruff] +line-length = 120 + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/server/src/test/python/cromwell-az-e2e/tests/az-e2e.py b/server/src/test/python/cromwell-az-e2e/tests/az-e2e.py new file mode 100644 index 00000000000..838c769948b --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/tests/az-e2e.py @@ -0,0 +1,155 @@ +import requests +import os +import json +import random +import string +import uuid +import time + +bearer_token = os.environ['BEARER_TOKEN'] +bee_name = os.environ['BEE_NAME'] +billing_project_name = os.environ['BILLING_PROJECT_NAME'] +number_of_workspaces = 1 +wds_upload=False +cbas_submit_workflow=False +number_of_workflows_to_kick_off = 1 + +rawls_url = f"https://rawls.{bee_name}.bee.envs-terra.bio" +leo_url = f"https://leonardo.{bee_name}.bee.envs-terra.bio" + +def handle_failed_request(response, msg, status_code=200): + if(response.status_code != status_code): + raise Exception(msg) + +def create_workspace(): + rawls_api_call = f"{rawls_url}/api/workspaces" + request_body= { + "namespace": billing_project_name, # Billing project name + "name": f"api-workspace-{''.join(random.choices(string.ascii_lowercase, k=5))}", # workspace name + "attributes": {}} + + create_workspace_response = requests.post(url=rawls_api_call, + json=request_body, + headers={"Authorization": f"Bearer {bearer_token}"} + ).json() + + create_workspace_data = json.loads(json.dumps(create_workspace_response)) + workspaceId = create_workspace_data['workspaceId'] + + print(f"Enabling CBAS for workspace {workspaceId}") + activate_cbas_request = f"{leo_url}/api/apps/v2/{workspaceId}/terra-app-{str(uuid.uuid4())}" + cbas_request_body = { + "appType": "CROMWELL" + } + + response = requests.post(url=activate_cbas_request, json=cbas_request_body, + headers={"Authorization": f"Bearer {bearer_token}"}) + # will return 202 or error + handle_failed_request(response, "Error activating CBAS", 202) + + print(response) + return workspaceId + +# GET WDS OR CROMWELL ENDPOINT URL FROM LEO +def get_app_url(workspaceId, app): + """"Get url for wds/cbas.""" + uri = f"{leo_url}/api/apps/v2/{workspaceId}?includeDeleted=false" + + headers = {"Authorization": bearer_token, + "accept": "application/json"} + + response = requests.get(uri, headers=headers) + status_code = response.status_code + + if status_code != 200: + return response.text + print("Successfully retrieved details.") + response = response.json() + + app_url = "" + app_type = "CROMWELL" if app != 'wds' else app.upper() + print(f"App type: {app_type}") + for entries in response: + if entries['appType'] == app_type and entries['proxyUrls'][app] is not None: + print(entries['status']) + if(entries['status'] == "PROVISIONING"): + print(f"{app} is still provisioning") + break + print(f"App status: {entries['status']}") + app_url = entries['proxyUrls'][app] + break + + if app_url is None: + print(f"{app} is missing in current workspace") + else: + print(f"{app} url: {app_url}") + + return app_url + +def submit_workflow_to_cromwell(app_url, workflow_test_name): + workflow_endpoint = f'{app_url}/cromwell/api/workflows/v1' + file_source = '../workflow_files' + headers = {"Authorization": bearer_token, + "accept": "application/json", + "Content-Type": "multipart/form-data"} + files = {'workflowSource': open(f'{file_source}/hello.wdl', 'rb'), + 'workflowInputs': ('hello.inputs', + open(f'{file_source}/hello.inputs', 'rb'), + 'application/json'), + 'workflowType': 'WDL', + 'workflowTypeVersion': '1.0', + } + response = requests.post(workflow_endpoint, headers=headers, files=files) + handle_failed_request(response, f"Error submitting workflow to Cromwell for {workflow_test_name}") + print(response.json()) # NOTE: remove after testing + return response.json() + +def get_workflow_information(app_url, workflow_id): + workflow_endpoint = f'{app_url}/cromwell/api/workflows/v1/{workflow_id}/metadata' + headers = {"Authorization": bearer_token, + "accept": "application/json"} + response = requests.get(workflow_endpoint, headers=headers) + handle_failed_request(response, f"Error fetching workflow metadata for {workflow_id}") + print(response.json()) # NOTE: remove after testing + return response.json() + +def get_completed_workflow(app_url, workflow_ids, max_retries=4): + target_statuses = ['Succeeded', 'Failed'] + current_running_workflow_count = 0 + while workflow_ids: + if max_retries == 0: + raise Exception(f"Workflow(s) did not finish running within retry window ({max_retries} retries)") + workflow_id = workflow_ids.pop() + workflow_metadata = get_workflow_information(app_url, workflow_id) + if workflow_metadata['status'] in target_statuses: + print(f"{workflow_id} finished running. Status: {workflow_metadata['status']}") + else: + workflow_ids.append(workflow_id) + current_running_workflow_count += 1 + if current_running_workflow_count == workflow_ids.len(): + if current_running_workflow_count == 0: + print("Workflow(s) finished running") + else: + # Reset current count to 0 for next retry + # Decrement max_retries by 1 + # Wait 5 minutes before checking workflow statuses again + print(f"These workflows have yet to return a completed status: [{workflow_ids.join(', ')}]") + max_retries -= 1 + current_running_workflow_count = 0 + time.sleep(60 * 5) + +# This chunk of code only executes one workflow +# Would like to modify this down the road to execute and store references for multiple workflows +workspace_id = create_workspace() +time.sleep(60 * 20) # Added an sleep here to give the workspace time to provision +app_url = get_app_url(workspace_id, 'cromwell') +workflow_response = submit_workflow_to_cromwell(app_url, "Run Workflow Test") +#Giving workflow 10 minutes to complete +#Will need to update this when swapping out hello wdl with fetch_sra_to_bam (20 min?) +time.sleep(60 * 10) + +# This chunk of code supports checking one or more workflows +# Probably won't require too much modification if we want to run additional submission tests +workflow_ids = [workflow_response['id']] +get_completed_workflow(app_url, workflow_ids) +print("Workflow submission and completion successful") \ No newline at end of file diff --git a/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.inputs b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.inputs new file mode 100644 index 00000000000..10310ef283b --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.inputs @@ -0,0 +1,4 @@ +{ + "wf_hello.hello.addressee": "m'Lord" +} + diff --git a/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.options b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.options new file mode 100644 index 00000000000..68c25807cfb --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.options @@ -0,0 +1,3 @@ +{ + "google_legacy_machine_selection": true +} diff --git a/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.wdl b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.wdl new file mode 100644 index 00000000000..8da447e4eb0 --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello.wdl @@ -0,0 +1,19 @@ +task hello { + String addressee + command { + echo "Hello ${addressee}!" + } + output { + String salutation = read_string(stdout()) + } + runtime { + docker: "ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950" + } +} + +workflow wf_hello { + call hello + output { + hello.salutation + } +} diff --git a/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello_yaml.inputs b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello_yaml.inputs new file mode 100644 index 00000000000..4b88ebc3222 --- /dev/null +++ b/server/src/test/python/cromwell-az-e2e/tests/workflow_files/hello_yaml.inputs @@ -0,0 +1 @@ +"wf_hello.hello.addressee": m'Lord