diff --git a/.github/workflows/buildContainer.yaml b/.github/workflows/buildContainer.yaml new file mode 100644 index 0000000..737c167 --- /dev/null +++ b/.github/workflows/buildContainer.yaml @@ -0,0 +1,297 @@ +# todo: "make" files once and push around through artifacts! + +name: docker + +on: + pull_request: + branches: + - tinqiita + push: + branches: + - tinqiita + +jobs: + make_certificates: + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Create certificate + # second copy of "qiita_server_certificates" is necessary to match path for docker build, first copy for mounting into container + run: | + make ./references/qiita_server_certificates ./environments/qiita_db.env ./environments/qiita.env config + cp -r ./references/qiita_server_certificates ./qiita_server_certificates + + - name: Store certifactes for follow up jobs + uses: actions/upload-artifact@v4 + with: + name: certificates + path: | + ./qiita_server_certificates + ./references/qiita_server_certificates + + build_main: + needs: make_certificates + strategy: + matrix: + container: ["nginx", "qiita", "plugin_collector"] + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Read image version from dockerfile + id: vars + run: | + VERSION=$(head -n 1 Images/${{ matrix.container }}/${{ matrix.container }}.dockerfile | cut -d ":" -f 2- | tr -d " ") + echo "IMAGE_TAG=$VERSION" >> $GITHUB_ENV + cp Images/${{ matrix.container }}/start_${{ matrix.container }}.sh Images/test_plugin.sh Images/nginx/nginx_qiita.conf Images/${{ matrix.container }}/requirements.txt . + cp Images/qiita/config_portal.cfg Images/qiita/config_qiita_oidc.cfg Images/qiita/drop_workflows.py Images/qiita/start_plugin.py Images/qiita/start_qiita-initDB.sh Images/qiita/start_qiita.sh . + cp Images/plugin_collector/collect_configs.py Images/plugin_collector/fix_test_db.py Images/plugin_collector/stefan_cert.conf Images/plugin_collector/stefan_csr.conf . + + - name: Download certificates from job build_main + uses: actions/download-artifact@v4 + with: + name: certificates + path: ./ + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build main qiita images and push to github's own registry + uses: docker/build-push-action@v6 + with: + context: . + push: true + file: Images/${{ matrix.container }}/${{ matrix.container }}.dockerfile + tags: ghcr.io/${{ github.repository }}/${{ matrix.container }}:testcandidate + cache-from: type=gha,scope=tinqiita-${{ github.ref_name }} + cache-to: type=gha,scope=tinqiita-${{ github.ref_name }},mode=max + + make_references: + needs: build_main + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Create partially fake reference databases + run: | + mkdir -p ./references/qp-deblur/ ./references/qp-target-gene + make ./environments/qiita_db.env ./environments/qiita.env config ./references/qp-deblur/reference-gg-raxml-bl.tre + for f in `echo "references/qp-target-gene/97_otus.fasta references/qp-target-gene/97_otus.tree references/qp-target-gene/97_otu_taxonomy.txt"`; do echo "fake" > $f; done + + - name: Store fake references for follow up jobs + uses: actions/upload-artifact@v4 + with: + name: fake_references + path: | + ./references/qp-deblur + ./references/qp-target-gene + ./environments + + build_plugins: + needs: + - build_main + - make_references + strategy: + matrix: + plugin: ["qp-deblur", "qtp-biom", "qtp-sequencing", "qtp-visualization", "qtp-diversity", "qp-target-gene", "qtp-job-output-folder"] + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Read image version from dockerfile + id: vars + run: | + VERSION=$(head -n 1 Images/${{ matrix.plugin }}/${{ matrix.plugin }}.dockerfile | cut -d ":" -f 2- | tr -d " ") + echo "IMAGE_TAG=$VERSION" >> $GITHUB_ENV + cp Images/${{ matrix.plugin }}/start_${{ matrix.plugin }}.sh Images/test_plugin.sh Images/${{ matrix.plugin }}/requirements.txt Images/trigger_noconda.py Images/trigger.py . + + - name: Download certificates from job build_main + uses: actions/download-artifact@v4 + with: + name: certificates + path: ./ + - name: Download fake references + uses: actions/download-artifact@v4 + with: + name: fake_references + path: ./ + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build plugin images + uses: docker/build-push-action@v6 + with: + context: . + load: true + file: Images/${{ matrix.plugin }}/${{ matrix.plugin }}.dockerfile + tags: ghcr.io/${{ github.repository }}/${{ matrix.plugin }}:testcandidate + cache-from: type=gha,scope=tinqiita-${{ github.ref_name }} + cache-to: type=gha,scope=tinqiita-${{ github.ref_name }},mode=max + + - name: adapt compose file to select specific plugin + run: | + sed -i "s/MATRIXPLUGIN/${{ matrix.plugin }}/g" compose_github.yaml + + - name: Run docker compose + uses: hoverkraft-tech/compose-action@v2.0.1 + with: + compose-file: "compose_github.yaml" + services: | + nginx + + - name: Execute tests in the running services + run: | + sleep 5 + docker compose exec ${{ matrix.plugin }} /bin/bash -c "bash /test_plugin.sh" + + - name: Push image to ghcr (only if tests passed) + if: success() + uses: docker/build-push-action@v6 + with: + context: . + push: true + file: Images/${{ matrix.plugin }}/${{ matrix.plugin }}.dockerfile + tags: ghcr.io/${{ github.repository }}/${{ matrix.plugin }}:testcandidate + + # the qp-qiime2 plugin cannot be tested in isolation, it also needs qtp-diversity and qtp-visualization to be active in qiita + build_mulit_plugins: + needs: + - build_plugins + strategy: + matrix: + multiplugin: ["qp-qiime2"] + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Read image version from dockerfile + id: vars + run: | + VERSION=$(head -n 1 Images/${{ matrix.multiplugin }}/${{ matrix.multiplugin }}.dockerfile | cut -d ":" -f 2- | tr -d " ") + echo "IMAGE_TAG=$VERSION" >> $GITHUB_ENV + cp Images/${{ matrix.multiplugin }}/start_${{ matrix.multiplugin }}.sh Images/test_plugin.sh Images/${{ matrix.multiplugin }}/requirements.txt Images/trigger_noconda.py Images/trigger.py . + + - name: Download certificates from job build_main + uses: actions/download-artifact@v4 + with: + name: certificates + path: ./ + - name: Download fake references + uses: actions/download-artifact@v4 + with: + name: fake_references + path: ./ + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build plugin images + uses: docker/build-push-action@v6 + with: + context: . + load: true + file: Images/${{ matrix.multiplugin }}/${{ matrix.multiplugin }}.dockerfile + tags: ghcr.io/${{ github.repository }}/${{ matrix.multiplugin }}:testcandidate + cache-from: type=gha,scope=tinqiita-${{ github.ref_name }} + cache-to: type=gha,scope=tinqiita-${{ github.ref_name }},mode=max + + - name: adapt compose file to select specific plugin + run: | + if [[ "${{ matrix.multiplugin }}" == "qp-qiime2" ]]; then sed -i 's|- QIITA_PLUGINS="MATRIXPLUGIN:"|- QIITA_PLUGINS="${{ matrix.multiplugin }}:qtp-diversity:qtp-visualization:"|' compose_github.yaml; sed -i 's|MATRIXPLUGIN:|${{ matrix.multiplugin }}:\n condition: service_started\n qtp-diversity:\n condition: service_started\n qtp-visualization:|' compose_github.yaml; fi; + + - name: Run docker compose + uses: hoverkraft-tech/compose-action@v2.0.1 + with: + compose-file: "compose_github.yaml" + services: | + nginx + + - name: Execute tests in the running services + run: | + sleep 5 + docker compose exec ${{ matrix.multiplugin }} /bin/bash -c "bash /test_plugin.sh" + + - name: Push image to ghcr (only if tests passed) + if: success() + uses: docker/build-push-action@v6 + with: + context: . + push: true + file: Images/${{ matrix.multiplugin }}/${{ matrix.multiplugin }}.dockerfile + tags: ghcr.io/${{ github.repository }}/${{ matrix.multiplugin }}:testcandidate + + publish_images: + needs: + - build_plugins + - build_main + - build_mulit_plugins + strategy: + matrix: + image: ["qp-deblur", "qtp-biom", "qtp-sequencing", "qtp-visualization", "qtp-diversity", "qp-target-gene", "qtp-job-output-folder", + "nginx", "qiita", "plugin_collector", + "qp-qiime2"] + runs-on: ubuntu-latest + steps: + - name: Checkout tinqiita repo + uses: actions/checkout@v4 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Login to computational.bio registry + uses: docker/login-action@v3 + with: + registry: harbor.computational.bio.uni-giessen.de + username: ${{ vars.HARBOR_CB_USERNAME }} + password: ${{ secrets.HARBOR_CB_SECRET }} + + - name: Pull image from GHCR + run: docker pull ghcr.io/${{ github.repository }}/${{ matrix.image }}:testcandidate + + - name: Read image version from dockerfile + id: vars + run: | + VERSION=$(head -n 1 Images/${{ matrix.image }}/${{ matrix.image }}.dockerfile | cut -d ":" -f 2- | tr -d " ") + echo "IMAGE_TAG=$VERSION" >> $GITHUB_ENV + + - name: Retag image for Docker Hub + run: | + docker tag ghcr.io/${{ github.repository }}/${{ matrix.image }}:testcandidate harbor.computational.bio.uni-giessen.de/tinqiita/${{ matrix.image }}:${{ env.IMAGE_TAG }} + docker tag ghcr.io/${{ github.repository }}/${{ matrix.image }}:testcandidate harbor.computational.bio.uni-giessen.de/tinqiita/${{ matrix.image }}:latest + + - name: Push image to Docker Hub + run: | + docker push harbor.computational.bio.uni-giessen.de/tinqiita/${{ matrix.image }}:${{ env.IMAGE_TAG }} + docker push harbor.computational.bio.uni-giessen.de/tinqiita/${{ matrix.image }}:latest diff --git a/.gitignore b/.gitignore index d967199..7af62fb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,5 @@ -environments/db.env -environments/keycloak.env +environments/*.env +logs/* +.built_image_* +Certificates/* +references/* \ No newline at end of file diff --git a/DemoData/metadata.txt b/DemoData/metadata.txt new file mode 100644 index 0000000..0fcf059 --- /dev/null +++ b/DemoData/metadata.txt @@ -0,0 +1,43 @@ +sample_name cage collection_date collection_timestamp comments_2022_05_31 country description dna_extracted elevation empo_1 empo_2 empo_3 empo_4 env_biome env_feature env_material env_package geo_loc_name host_autopsy host_body_habitat host_body_product host_body_site host_common_name host_genotype host_scientific_name host_subject_id host_taxid host_weight host_weight_2022_05_11 host_weight_2022_05_11_units host_weight_2022_05_18 host_weight_2022_05_18_units host_weight_2022_05_24 host_weight_2022_05_24_units host_weight_2022_05_31 host_weight_2022_05_31_units host_weight_2022_06_01 host_weight_2022_06_01_units host_weight_2022_06_02 host_weight_2022_06_02_units host_weight_2022_06_07 host_weight_2022_06_07_units host_weight_2022_06_14 host_weight_2022_06_14_units host_weight_2022_06_21 host_weight_2022_06_21_units host_weight_units latitude longitude mouse_label physical_specimen_location physical_specimen_remaining qiita_sample_type sample_type scientific_name taxon_id timestamp_flashtreatment timestamp_treatment timestamp_tumorinjection treatment tumor_cellline tumor_volume_2022_05_26 tumor_volume_2022_05_26_units tumor_volume_2022_05_31 tumor_volume_2022_05_31_units tumor_volume_2022_06_02 tumor_volume_2022_06_02_units tumor_volume_2022_06_07 tumor_volume_2022_06_07_units tumor_volume_2022_06_09 tumor_volume_2022_06_09_units tumor_volume_2022_06_13 tumor_volume_2022_06_13_units tumor_volume_2022_06_15 tumor_volume_2022_06_15_units tumor_volume_2022_06_17 tumor_volume_2022_06_17_units tumor_volume_2022_06_20 tumor_volume_2022_06_20_units tumor_volume_2022_06_24 tumor_volume_2022_06_24_units zymo_custom_label +6.15299.zr5156.10V3V4 R1C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt 2 Metastasis in Ling UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 37 10090 18.8 18.8 grams 19.9 grams 19.7 grams 21 grams not collected grams not collected grams 21.7 grams 23.4 grams 23.1 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control osteosarcoma cells LM8 108.8 cubic_millimeters 202.14 cubic_millimeters 122.9 cubic_millimeters 355.87 cubic_millimeters 193.21 cubic_millimeters 404.43 cubic_millimeters 225.94 cubic_millimeters 307.72 cubic_millimeters 598.69 cubic_millimeters 758.31 cubic_millimeters 12 +6.15299.zr5156.11V3V4 R1C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt No Metastasis UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 38 10090 19.6 19.6 grams 21.2 grams 21.6 grams 20.7 grams not collected grams not collected grams 21.2 grams 23.4 grams 22.8 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control +CD133 osteosarcoma cells LM8 155.74 cubic_millimeters 205.15 cubic_millimeters 192.87 cubic_millimeters 260.46 cubic_millimeters 231.28 cubic_millimeters 363.82 cubic_millimeters 435.2 cubic_millimeters 334.93 cubic_millimeters 460.53 cubic_millimeters not collected cubic_millimeters 13 +6.15299.zr5156.12V3V4 R1C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 39 10090 18.8 18.8 grams 20.8 grams 20.5 grams 22.1 grams not collected grams not collected grams 21.7 grams 21.7 grams 21.6 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control +CD133 osteosarcoma cells LM8 176.87 cubic_millimeters 102.76 cubic_millimeters 219.75 cubic_millimeters 191.59 cubic_millimeters 190.12 cubic_millimeters 340.03 cubic_millimeters 376.11 cubic_millimeters 230.79 cubic_millimeters 376.8 cubic_millimeters not collected cubic_millimeters 14 +6.15299.zr5156.13V3V4 R1C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Big Tumour intestine UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 40 10090 18.8 18.8 grams 20.6 grams 20.5 grams 22.3 grams not collected grams not collected grams 22.1 grams 23.1 grams 23 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control +CD133 osteosarcoma cells LM8 156.06 cubic_millimeters 205.73 cubic_millimeters 281.13 cubic_millimeters 281.13 cubic_millimeters 326.98 cubic_millimeters 553.1 cubic_millimeters 546.34 cubic_millimeters 551.07 cubic_millimeters 706.5 cubic_millimeters not collected cubic_millimeters 15 +6.15299.zr5156.14V3V4 R1C4 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Tumour more bony UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 42 10090 17.6 17.6 grams 20 grams 19.7 grams 19.6 grams not collected grams not collected grams 21.4 grams 21.2 grams 22.9 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 194.26 cubic_millimeters 160.06 cubic_millimeters 131.35 cubic_millimeters 163.2 cubic_millimeters 142.87 cubic_millimeters 127.55 cubic_millimeters 164.85 cubic_millimeters 109.9 cubic_millimeters 131.88 cubic_millimeters 189.81 cubic_millimeters 16 +6.15299.zr5156.15V3V4 R1C4 2022 2022 booster_diet Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt 3 Metastasis UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 43 10090 20 20 grams 21.2 grams 21.8 grams 18.9 grams 19.2 grams 21.5 grams 22.4 grams 24 grams 24.3 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 146.53 cubic_millimeters 164.6 cubic_millimeters 162.81 cubic_millimeters 131.08 cubic_millimeters 105.69 cubic_millimeters 156.06 cubic_millimeters 78.5 cubic_millimeters 78.5 cubic_millimeters 131.88 cubic_millimeters 143.05 cubic_millimeters 17 +6.15299.zr5156.16V3V4 R1C4 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt No Visible Tumour UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 44 10090 17.3 17.3 grams 18.9 grams 21 grams 20 grams not collected grams not collected grams 22.6 grams 21.8 grams 22.1 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 137.85 cubic_millimeters 169.56 cubic_millimeters 165.96 cubic_millimeters 65.42 cubic_millimeters 80.07 cubic_millimeters 65.42 cubic_millimeters 86.35 cubic_millimeters 65.42 cubic_millimeters 78.5 cubic_millimeters not collected cubic_millimeters 18 +6.15299.zr5156.17V3V4 R1C4 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 45 10090 19.7 19.7 grams 21.1 grams 21.5 grams 20.9 grams not collected grams not collected grams 23.8 grams 22.9 grams 23.3 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 116.52 cubic_millimeters 173.64 cubic_millimeters 188.12 cubic_millimeters 153.73 cubic_millimeters 134.59 cubic_millimeters 257.17 cubic_millimeters 249.11 cubic_millimeters 175.84 cubic_millimeters 175.84 cubic_millimeters 142.5 cubic_millimeters 19 +6.15299.zr5156.18V3V4 R1C5 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 46 10090 20.8 20.8 grams 22.8 grams 21.7 grams 21.7 grams not collected grams not collected grams 23.2 grams 23.5 grams 24.5 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 105.35 cubic_millimeters 99.44 cubic_millimeters 158.93 cubic_millimeters 113.33 cubic_millimeters 114.13 cubic_millimeters 148.73 cubic_millimeters 156.65 cubic_millimeters 109.9 cubic_millimeters 109.9 cubic_millimeters 111.03 cubic_millimeters 20 +6.15299.zr5156.19V3V4 R1C5 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 47 10090 17.6 17.6 grams 20.2 grams 21.1 grams 19.6 grams not collected grams not collected grams 22.5 grams 21.8 grams 23.2 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 85.11 cubic_millimeters 135.61 cubic_millimeters 118.88 cubic_millimeters 124.66 cubic_millimeters 86.35 cubic_millimeters 173.89 cubic_millimeters 157.47 cubic_millimeters 109.9 cubic_millimeters 87.92 cubic_millimeters 154.13 cubic_millimeters 21 +6.15299.zr5156.1V3V4 R1C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 26 10090 18.5 18.5 grams 20.6 grams 21 grams 21.5 grams not collected grams not collected grams 23.5 grams 23.1 grams 23.7 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control no Tumor osteosarcoma cells LM8 not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters 1 +6.15299.zr5156.20V3V4 R1C5 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 48 10090 18 18 grams 19.5 grams 19.8 grams 20 grams not collected grams not collected grams 21.5 grams 22.8 grams 22.4 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 127.69 cubic_millimeters 74.87 cubic_millimeters 96.57 cubic_millimeters 149.13 cubic_millimeters 66.06 cubic_millimeters 82.9 cubic_millimeters 117.44 cubic_millimeters 78.5 cubic_millimeters 91.58 cubic_millimeters no visible tumour cubic_millimeters 22 +6.15299.zr5156.21V3V4 R1C5 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 49 10090 17.4 17.4 grams 19.1 grams 19.7 grams 21.1 grams not collected grams not collected grams 20.9 grams 22.4 grams 23.7 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 92.32 cubic_millimeters 112.66 cubic_millimeters 93.4 cubic_millimeters 95.77 cubic_millimeters 95.77 cubic_millimeters 94.81 cubic_millimeters 246.35 cubic_millimeters 146.53 cubic_millimeters 109.9 cubic_millimeters 65.42 cubic_millimeters 23 +6.15299.zr5156.22V3V4 R1C5 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 50 10090 19.2 19.2 grams 20.5 grams 21.4 grams 20.8 grams not collected grams not collected grams 23.3 grams 23 grams 23.1 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 109.24 cubic_millimeters 159.4 cubic_millimeters 180.07 cubic_millimeters 100.89 cubic_millimeters 165.32 cubic_millimeters 182.87 cubic_millimeters 169.06 cubic_millimeters 94.2 cubic_millimeters 91.58 cubic_millimeters 101.72 cubic_millimeters 24 +6.15299.zr5156.23V3V4 R1C6 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 51 10090 17.1 17.1 grams 19.7 grams 20.1 grams 20.1 grams not collected grams not collected grams 22.4 grams 22.5 grams 22.9 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 127.48 cubic_millimeters 114.19 cubic_millimeters 117.56 cubic_millimeters 104.22 cubic_millimeters 127.37 cubic_millimeters 131.84 cubic_millimeters 105.35 cubic_millimeters 113.04 cubic_millimeters 78.5 cubic_millimeters 129.31 cubic_millimeters 25 +6.15299.zr5156.24V3V4 R1C6 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 52 10090 17.9 17.9 grams 19.5 grams 20.2 grams 20.2 grams not collected grams not collected grams 22.6 grams 21.7 grams 22.7 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 56.52 cubic_millimeters 151.6 cubic_millimeters 126.35 cubic_millimeters 63.85 cubic_millimeters 71.83 cubic_millimeters 199.98 cubic_millimeters 160.14 cubic_millimeters 94.2 cubic_millimeters 109.9 cubic_millimeters 153.86 cubic_millimeters 26 +6.15299.zr5156.25V3V4 R1C6 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 53 10090 15.6 15.6 grams 18.4 grams 21.3 grams 20.5 grams not collected grams not collected grams 22.6 grams 22.1 grams 23.2 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 65.42 cubic_millimeters 32.5 cubic_millimeters 50.24 cubic_millimeters 34.54 cubic_millimeters 71.24 cubic_millimeters 76.62 cubic_millimeters 99.76 cubic_millimeters 65.42 cubic_millimeters 104.67 cubic_millimeters 64.37 cubic_millimeters 27 +6.15299.zr5156.26V3V4 R1C6 2022 2022 booster_diet Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 54 10090 18 18 grams 20.1 grams 21.5 grams 18.3 grams 20.5 grams 20.8 grams 21.9 grams 23.1 grams 23.3 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 118.44 cubic_millimeters 165.8 cubic_millimeters 175.49 cubic_millimeters 130.62 cubic_millimeters 112.26 cubic_millimeters 167.21 cubic_millimeters 166.68 cubic_millimeters 109.9 cubic_millimeters 128.22 cubic_millimeters 368.43 cubic_millimeters 28 +6.15299.zr5156.27V3V4 R1C6 2022 2022 booster_diet Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 55 10090 16.5 16.5 grams 18.2 grams 20.2 grams 17.9 grams 19.8 grams 19.9 grams 22.5 grams 22.3 grams 23.8 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 FLASH C12 osteosarcoma cells LM8 153.86 cubic_millimeters 195.84 cubic_millimeters 204.65 cubic_millimeters 163.2 cubic_millimeters 125.6 cubic_millimeters 206.86 cubic_millimeters 160.41 cubic_millimeters 128.22 cubic_millimeters 91.58 cubic_millimeters 129.85 cubic_millimeters 29 +6.15299.zr5156.28V3V4 R2C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt No Tumour in Tissue visible UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 56 10090 18.6 18.6 grams 19.3 grams 19.7 grams 20.5 grams not collected grams not collected grams 21.5 grams 21.9 grams 22.2 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 65.42 cubic_millimeters 23.55 cubic_millimeters 30.38 cubic_millimeters 39.25 cubic_millimeters 65.42 cubic_millimeters 65.42 cubic_millimeters 112.26 cubic_millimeters 128.22 cubic_millimeters 113.04 cubic_millimeters no visible tumour cubic_millimeters 30 +6.15299.zr5156.29V3V4 R2C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Metastasis in Kidney UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 57 10090 19.8 19.8 grams 20.5 grams 20.6 grams 19.1 grams not collected grams not collected grams 18.7 grams 22.6 grams 22.8 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 78.63 cubic_millimeters 171.15 cubic_millimeters 175.49 cubic_millimeters 194.52 cubic_millimeters 168.2 cubic_millimeters 272.01 cubic_millimeters 146.53 cubic_millimeters 234.45 cubic_millimeters 179.5 cubic_millimeters 149.67 cubic_millimeters 31 +6.15299.zr5156.2V3V4 R1C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 27 10090 19.4 19.4 grams 21.9 grams 21.9 grams 21.7 grams not collected grams not collected grams 22.2 grams 23.9 grams 24.6 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control no Tumor osteosarcoma cells LM8 not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters 2 +6.15299.zr5156.30V3V4 R2C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 58 10090 18.4 18.4 grams 20 grams 20.3 grams 20 grams not collected grams not collected grams 22.1 grams 23.3 grams 23 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 143.34 cubic_millimeters 233.78 cubic_millimeters 153.88 cubic_millimeters 126.93 cubic_millimeters 170.75 cubic_millimeters 204.66 cubic_millimeters 194.99 cubic_millimeters 200.96 cubic_millimeters 109.9 cubic_millimeters 172.39 cubic_millimeters 32 +6.15299.zr5156.31V3V4 R2C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt 1 Metastasis in Liver UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 59 10090 17.4 17.4 grams 20.2 grams 19.9 grams 19.1 grams not collected grams not collected grams 22.2 grams 21.6 grams 20.9 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 149.28 cubic_millimeters 149.18 cubic_millimeters 215.36 cubic_millimeters 141.3 cubic_millimeters 152.56 cubic_millimeters 228.69 cubic_millimeters 282.39 cubic_millimeters 263.76 cubic_millimeters 282.6 cubic_millimeters 131.88 cubic_millimeters 33 +6.15299.zr5156.32V3V4 R2C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 60 10090 17.7 17.7 grams 20.3 grams 21.1 grams 20.5 grams not collected grams not collected grams 22.8 grams 23.3 grams 23.5 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 154.74 cubic_millimeters 206.65 cubic_millimeters 211.92 cubic_millimeters 117.44 cubic_millimeters 126.07 cubic_millimeters 229 cubic_millimeters 231.31 cubic_millimeters 179.5 cubic_millimeters 200.96 cubic_millimeters 78.5 cubic_millimeters 34 +6.15299.zr5156.33V3V4 R1C3 2022 2022 booster_diet Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt No metastasis UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 61 10090 16.8 16.8 grams 18.3 grams 20.6 grams 21.1 grams not collected grams not collected grams 20.8 grams 21.4 grams 22.5 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control osteosarcoma cells LM8 31.15 cubic_millimeters 45.51 cubic_millimeters 77.39 cubic_millimeters 135.65 cubic_millimeters 253.02 cubic_millimeters 78.5 cubic_millimeters 93.55 cubic_millimeters 65.42 cubic_millimeters 65.42 cubic_millimeters 571.24 cubic_millimeters 35 +6.15299.zr5156.34V3V4 R2C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 62 10090 18.8 18.8 grams 20.8 grams 20.3 grams 19.2 grams not collected grams not collected grams 22.2 grams 23.4 grams 23.5 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 138.55 cubic_millimeters 136.17 cubic_millimeters 271.24 cubic_millimeters 151.85 cubic_millimeters 120.42 cubic_millimeters 188.38 cubic_millimeters 190.76 cubic_millimeters 131.88 cubic_millimeters 131.88 cubic_millimeters 176.87 cubic_millimeters 36 +6.15299.zr5156.35V3V4 R2C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 63 10090 16 16 grams 19.9 grams 20 grams 19.4 grams not collected grams not collected grams 20 grams 22 grams 22.9 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 103.62 cubic_millimeters 178.72 cubic_millimeters 178.59 cubic_millimeters 230.7 cubic_millimeters 217.08 cubic_millimeters 187.27 cubic_millimeters 297.67 cubic_millimeters 251.2 cubic_millimeters 251.2 cubic_millimeters 402.97 cubic_millimeters 37 +6.15299.zr5156.36V3V4 R2C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 64 10090 18.4 18.4 grams 21.3 grams 20.8 grams 19.6 grams not collected grams not collected grams 22.4 grams 22.7 grams 23.3 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 180.32 cubic_millimeters 210.68 cubic_millimeters 233.35 cubic_millimeters 113.01 cubic_millimeters 165.79 cubic_millimeters 341.08 cubic_millimeters 242.88 cubic_millimeters 226.08 cubic_millimeters 167.47 cubic_millimeters 199.86 cubic_millimeters 38 +6.15299.zr5156.37V3V4 R2C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 65 10090 16.5 16.5 grams 19.5 grams 19.7 grams 19.1 grams not collected grams not collected grams 21.1 grams 22.1 grams 22.6 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 165.99 cubic_millimeters 121.47 cubic_millimeters 226.21 cubic_millimeters 128.58 cubic_millimeters 146.91 cubic_millimeters 169.3 cubic_millimeters 187.2 cubic_millimeters 113.04 cubic_millimeters 131.88 cubic_millimeters 381.51 cubic_millimeters 39 +6.15299.zr5156.38V3V4 R2C3 2022 2022 booster_diet Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 66 10090 16.6 16.6 grams 20.2 grams 22 grams 19.8 grams 20 grams 21.6 grams 23.1 grams 22.7 grams 21.9 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 140.17 cubic_millimeters 116.58 cubic_millimeters 158.66 cubic_millimeters 147.8 cubic_millimeters 132.84 cubic_millimeters 244.21 cubic_millimeters 171.44 cubic_millimeters 175.84 cubic_millimeters 113.04 cubic_millimeters 376.8 cubic_millimeters 40 +6.15299.zr5156.39V3V4 R2C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 67 10090 16.3 16.3 grams 19.5 grams 19.8 grams 19.3 grams not collected grams not collected grams 21.8 grams 22 grams 24.4 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 65.42 cubic_millimeters 153.15 cubic_millimeters 234.45 cubic_millimeters 138.16 cubic_millimeters 114.13 cubic_millimeters 188.11 cubic_millimeters 178.59 cubic_millimeters 175.84 cubic_millimeters 109.9 cubic_millimeters 137.01 cubic_millimeters 41 +6.15299.zr5156.3V3V4 R1C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 28 10090 17.6 17.6 grams 19.8 grams 20.4 grams 20.2 grams not collected grams not collected grams 20.6 grams 21.9 grams 24.4 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control no Tumor osteosarcoma cells LM8 not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters 3 +6.15299.zr5156.40V3V4 R2C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Metastasis Intestine UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 68 10090 18.9 18.9 grams 20.5 grams 22.2 grams 22 grams not collected grams not collected grams 23.5 grams 23.1 grams 23.9 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 109.9 cubic_millimeters 164.85 cubic_millimeters 210.05 cubic_millimeters 137.01 cubic_millimeters 142.43 cubic_millimeters 177.77 cubic_millimeters 254.34 cubic_millimeters 251.2 cubic_millimeters 175.84 cubic_millimeters 339.12 cubic_millimeters 42 +6.15299.zr5156.41V3V4 R2C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 69 10090 17.8 17.8 grams 19.9 grams 20.7 grams 19.5 grams not collected grams not collected grams 22.1 grams 22.4 grams 22.6 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 109.9 cubic_millimeters 165.2 cubic_millimeters 155.43 cubic_millimeters 165.53 cubic_millimeters 126.42 cubic_millimeters 138.79 cubic_millimeters 217.92 cubic_millimeters 263.76 cubic_millimeters 109.9 cubic_millimeters 171.88 cubic_millimeters 43 +6.15299.zr5156.42V3V4 R2C3 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 70 10090 17.2 17.2 grams 20.1 grams 21.3 grams 21.2 grams not collected grams not collected grams 23.2 grams 22.9 grams 24 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 133.71 cubic_millimeters 138.87 cubic_millimeters 124.66 cubic_millimeters 159.75 cubic_millimeters 135.05 cubic_millimeters 260.46 cubic_millimeters 190.49 cubic_millimeters 293.07 cubic_millimeters 263.76 cubic_millimeters 117.75 cubic_millimeters 44 +6.15299.zr5156.4V3V4 R1C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 29 10090 17.3 17.3 grams 18.3 grams 22.3 grams 22.1 grams not collected grams not collected grams 23.6 grams 26.2 grams 26.1 grams grams 8.677660635 49.93039264 2 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control no Tumor osteosarcoma cells LM8 not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters 4 +6.15299.zr5156.5V3V4 R1C1 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt - UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 30 10090 17.3 17.3 grams 18.5 grams 19.1 grams 20.1 grams not collected grams not collected grams 22.4 grams 22.7 grams 21.7 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control no Tumor osteosarcoma cells LM8 not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters not applicable cubic_millimeters 5 +6.15299.zr5156.6V3V4 R1C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt No metastasis; small primary Tumour UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 32 10090 15 15 grams 15 grams 16.6 grams 17.3 grams not collected grams not collected grams 19.1 grams 18.9 grams 18.4 grams grams 8.677660635 49.93039264 1 Links Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control osteosarcoma cells LM8 170.44 cubic_millimeters 202.22 cubic_millimeters 212.84 cubic_millimeters 112.26 cubic_millimeters 176.31 cubic_millimeters 229.93 cubic_millimeters 242.48 cubic_millimeters 200.96 cubic_millimeters 301.44 cubic_millimeters 109.9 cubic_millimeters 7 +6.15299.zr5156.7V3V4 R1C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Lot of metastasis; Big Tumour in Liver and Intestine UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 33 10090 17.1 17.1 grams 16.6 grams 18.8 grams 19.4 grams not collected grams not collected grams 20.2 grams 19.4 grams 20.2 grams grams 8.677660635 49.93039264 1 Rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control osteosarcoma cells LM8 153.83 cubic_millimeters 205.1 cubic_millimeters 181.32 cubic_millimeters 300.98 cubic_millimeters 378.33 cubic_millimeters 520.28 cubic_millimeters 487.22 cubic_millimeters 575.67 cubic_millimeters 898.04 cubic_millimeters 590.12 cubic_millimeters 8 +6.15299.zr5156.8V3V4 R1C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt Big Tumour intestine; Lot of metastasis in Lung; Metstatsis intestine and Kidney UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 35 10090 16.7 16.7 grams 16.5 grams 18.5 grams 19.4 grams not collected grams not collected grams 20 grams 20 grams 21.2 grams grams 8.677660635 49.93039264 1 Links 1 rechts Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Control osteosarcoma cells LM8 124.63 cubic_millimeters 222.98 cubic_millimeters 247.71 cubic_millimeters 230.79 cubic_millimeters 240.31 cubic_millimeters 267.08 cubic_millimeters 354.32 cubic_millimeters 226.08 cubic_millimeters 179.5 cubic_millimeters 372.75 cubic_millimeters 10 +6.15299.zr5156.9V3V4 R2C2 2022 2022 Germany FLASH vs conventional irradiation therapy TRUE 134.5 Host-associated Host-associated (non-saline) Animal (non-saline) Animal distal gut (non-saline) urban biome animal-associated habitat feces host-associated Germany:Hesse:Darmstadt UBERON:feces UBERON:feces UBERON:feces mouse C3H/He Mus musculus 36 10090 19.3 19.3 grams 19.8 grams 20.6 grams 19.3 grams not collected grams not collected grams 22.5 grams 23.2 grams 23.7 grams grams 8.677660635 49.93039264 avg Germany FALSE feces feces mouse gut metagenome 410661 2022-05-26 2022-05-26 2022-05-19 Konventional C12 osteosarcoma cells LM8 140.17 cubic_millimeters 117.7 cubic_millimeters 147.8 cubic_millimeters 145.13 cubic_millimeters 130.56 cubic_millimeters 194.29 cubic_millimeters 214.02 cubic_millimeters 200.96 cubic_millimeters 175.84 cubic_millimeters no visible tumour cubic_millimeters 11 diff --git a/DemoData/prep_data.txt b/DemoData/prep_data.txt new file mode 100644 index 0000000..6ea0328 --- /dev/null +++ b/DemoData/prep_data.txt @@ -0,0 +1,4 @@ +sample_name barcode center_name center_project_name experiment_design_description instrument_model library_construction_protocol linker pcr_primers platform primer run_center run_date run_prefix sequencing_meth target_gene target_subfragment +6.15299.zr5156.1V3V4 not provided Zymo Research Corporation zr5156.16S_221128.zymo FLASH vs conventional irradiation on gut microbiome Illumina MiSeq DNA Extraction: One of three different DNA extraction kits was used depending on the sample type and sample volume. In most cases, the ZymoBIOMICS®-96 MagBead DNA Kit (Zymo Research, Irvine, CA) was used to extract DNA using an automated platform. In some cases, ZymoBIOMICS® DNA Miniprep Kit (Zymo Research, Irvine, CA) was used. For low biomass samples, such as skin swabs, the ZymoBIOMICS® DNA Microprep Kit (Zymo Research, Irvine, CA) was used as it permits for a lower elution volume, resulting in more concentrated DNA samples. Targeted Library Preparation: Bacterial 16S ribosomal RNA gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit (Zymo Research, Irvine, CA). In most cases, the bacterial 16S primers amplified the V3-V4 region of the 16S rRNA gene. These primers have been custom-designed by Zymo Research to provide the best coverage of the 16S gene while maintaining high sensitivity. Fungal ITS gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit with custom ITS2 primers substituted for 16S primers. The sequencing library was prepared using an innovative library preparation process in which PCR reactions were performed in real-time PCR machines to control cycles and therefore limit PCR chimera formation. The final PCR products were quantified with qPCR fluorescence readings and pooled together based on equal molarity. The final pooled library was cleaned with the Select-a-Size DNA Clean & Concentrator(TM) (Zymo Research, Irvine, CA), then quantified with TapeStation®(Agilent Technologies, Santa Clara, CA) and Qubit® (Thermo Fisher Scientific, Waltham, WA). GT FWD:CCTAYGGGDBGCWGCAG; REV:GACTACNVGGGTMTCTAATCC Illumina CCTAYGGGDBGCWGCAG GSI Helmhotzzentrum fuer Schwerionenforschung GmbH 2022-11-28 zr5156_1V3V4 Sequencing by synthesis 16S rRNA V3 +6.15299.zr5156.2V3V4 not provided Zymo Research Corporation zr5156.16S_221128.zymo FLASH vs conventional irradiation on gut microbiome Illumina MiSeq DNA Extraction: One of three different DNA extraction kits was used depending on the sample type and sample volume. In most cases, the ZymoBIOMICS®-96 MagBead DNA Kit (Zymo Research, Irvine, CA) was used to extract DNA using an automated platform. In some cases, ZymoBIOMICS® DNA Miniprep Kit (Zymo Research, Irvine, CA) was used. For low biomass samples, such as skin swabs, the ZymoBIOMICS® DNA Microprep Kit (Zymo Research, Irvine, CA) was used as it permits for a lower elution volume, resulting in more concentrated DNA samples. Targeted Library Preparation: Bacterial 16S ribosomal RNA gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit (Zymo Research, Irvine, CA). In most cases, the bacterial 16S primers amplified the V3-V4 region of the 16S rRNA gene. These primers have been custom-designed by Zymo Research to provide the best coverage of the 16S gene while maintaining high sensitivity. Fungal ITS gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit with custom ITS2 primers substituted for 16S primers. The sequencing library was prepared using an innovative library preparation process in which PCR reactions were performed in real-time PCR machines to control cycles and therefore limit PCR chimera formation. The final PCR products were quantified with qPCR fluorescence readings and pooled together based on equal molarity. The final pooled library was cleaned with the Select-a-Size DNA Clean & Concentrator(TM) (Zymo Research, Irvine, CA), then quantified with TapeStation®(Agilent Technologies, Santa Clara, CA) and Qubit® (Thermo Fisher Scientific, Waltham, WA). GT FWD:CCTAYGGGDBGCWGCAG; REV:GACTACNVGGGTMTCTAATCC Illumina CCTAYGGGDBGCWGCAG GSI Helmhotzzentrum fuer Schwerionenforschung GmbH 2022-11-28 zr5156_2V3V4 Sequencing by synthesis 16S rRNA V3 +6.15299.zr5156.3V3V4 not provided Zymo Research Corporation zr5156.16S_221128.zymo FLASH vs conventional irradiation on gut microbiome Illumina MiSeq DNA Extraction: One of three different DNA extraction kits was used depending on the sample type and sample volume. In most cases, the ZymoBIOMICS®-96 MagBead DNA Kit (Zymo Research, Irvine, CA) was used to extract DNA using an automated platform. In some cases, ZymoBIOMICS® DNA Miniprep Kit (Zymo Research, Irvine, CA) was used. For low biomass samples, such as skin swabs, the ZymoBIOMICS® DNA Microprep Kit (Zymo Research, Irvine, CA) was used as it permits for a lower elution volume, resulting in more concentrated DNA samples. Targeted Library Preparation: Bacterial 16S ribosomal RNA gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit (Zymo Research, Irvine, CA). In most cases, the bacterial 16S primers amplified the V3-V4 region of the 16S rRNA gene. These primers have been custom-designed by Zymo Research to provide the best coverage of the 16S gene while maintaining high sensitivity. Fungal ITS gene targeted sequencing was performed using the Quick-16S(TM) NGS Library Prep Kit with custom ITS2 primers substituted for 16S primers. The sequencing library was prepared using an innovative library preparation process in which PCR reactions were performed in real-time PCR machines to control cycles and therefore limit PCR chimera formation. The final PCR products were quantified with qPCR fluorescence readings and pooled together based on equal molarity. The final pooled library was cleaned with the Select-a-Size DNA Clean & Concentrator(TM) (Zymo Research, Irvine, CA), then quantified with TapeStation®(Agilent Technologies, Santa Clara, CA) and Qubit® (Thermo Fisher Scientific, Waltham, WA). GT FWD:CCTAYGGGDBGCWGCAG; REV:GACTACNVGGGTMTCTAATCC Illumina CCTAYGGGDBGCWGCAG GSI Helmhotzzentrum fuer Schwerionenforschung GmbH 2022-11-28 zr5156_3V3V4 Sequencing by synthesis 16S rRNA V3 diff --git a/DemoData/zr5156_1V3V4_R1.fastq.gz b/DemoData/zr5156_1V3V4_R1.fastq.gz new file mode 100644 index 0000000..4b605c3 Binary files /dev/null and b/DemoData/zr5156_1V3V4_R1.fastq.gz differ diff --git a/DemoData/zr5156_1V3V4_R2.fastq.gz b/DemoData/zr5156_1V3V4_R2.fastq.gz new file mode 100644 index 0000000..c78b169 Binary files /dev/null and b/DemoData/zr5156_1V3V4_R2.fastq.gz differ diff --git a/DemoData/zr5156_2V3V4_R1.fastq.gz b/DemoData/zr5156_2V3V4_R1.fastq.gz new file mode 100644 index 0000000..8789af7 Binary files /dev/null and b/DemoData/zr5156_2V3V4_R1.fastq.gz differ diff --git a/DemoData/zr5156_2V3V4_R2.fastq.gz b/DemoData/zr5156_2V3V4_R2.fastq.gz new file mode 100644 index 0000000..51ec621 Binary files /dev/null and b/DemoData/zr5156_2V3V4_R2.fastq.gz differ diff --git a/DemoData/zr5156_3V3V4_R1.fastq.gz b/DemoData/zr5156_3V3V4_R1.fastq.gz new file mode 100644 index 0000000..8c5dbe1 Binary files /dev/null and b/DemoData/zr5156_3V3V4_R1.fastq.gz differ diff --git a/DemoData/zr5156_3V3V4_R2.fastq.gz b/DemoData/zr5156_3V3V4_R2.fastq.gz new file mode 100644 index 0000000..3b8c25c Binary files /dev/null and b/DemoData/zr5156_3V3V4_R2.fastq.gz differ diff --git a/Images/nginx/nginx.dockerfile b/Images/nginx/nginx.dockerfile new file mode 100644 index 0000000..c2ea3cd --- /dev/null +++ b/Images/nginx/nginx.dockerfile @@ -0,0 +1,13 @@ +# VERSION: 2025.08.28 + +FROM yspreen/nginx + +COPY nginx_qiita.conf / +COPY start_nginx.sh / + +RUN chmod 777 nginx_qiita.conf +RUN chmod 777 start_nginx.sh + +RUN mkdir -p /var/log/nginx + +CMD ["nginx", "/start_nginx.sh"] diff --git a/Images/nginx/nginx_qiita.conf b/Images/nginx/nginx_qiita.conf new file mode 100644 index 0000000..8e0772e --- /dev/null +++ b/Images/nginx/nginx_qiita.conf @@ -0,0 +1,95 @@ +user nobody nogroup; +daemon off; +# error_log /var/log/nginx/error_log warn; +error_log /logs/nginx_error.log warn; + +events { + worker_connections 1024; +} + +http { + client_max_body_size 7M; # increase maximum body size from default 1M to match https://github.com/qiita-spots/qiita/blob/ac62aba5333f537c32e213855edc39c273aa9871/qiita_pet/static/vendor/js/resumable-uploader.js#L51 (which is 3M). Note that resumable-uploader.js's last chunk can be max. twice as large as chunk size, see: https://github.com/23/resumable.js/issues/51 + + # ports to redirect for mainqiita + upstream mainqiita { + server qiita:21174; + server tinqiita-qiita-worker-1:21175; + server tinqiita-qiita-worker-2:21175; + server tinqiita-qiita-worker-3:21175; + } + + # define variables for the actions that shall be taken for websocket handshake + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + # listening to 8080 and redirecting to https + #server { + # listen 8383; + # server_name localhost; + # return 301 https://$server_name$request_uri; + #} + + server { + listen 8383 ssl; + server_name _; + merge_slashes off; + + access_log /logs/nginx_access.log; + + ssl_certificate /qiita_certificates/stefan_server.crt; + ssl_certificate_key /qiita_certificates/stefan_server.key; + + ssl_session_timeout 5m; + + # no catche + expires off; + + port_in_redirect off; + + # download configuration, based on: + # https://groups.google.com/forum/#!topic/python-tornado/sgadmx8Hd_s + + # protected location for working diretory + location /protected-working_dir/ { + internal; + + # CHANGE ME: This should match the WORKING_DIR in your qiita + # config. E.g., + alias /qiita_data/working_dir/; + } + + # protected location + location /protected/ { + internal; + + # CHANGE ME: This should match the BASE_DATA_DIR in your qiita + # config. E.g., + alias /qiita_data/; + } + + # enables communiction through websockets. + # Currently, only endpoints /consumer/, /analysis/selected/socket/, and /study/list/socket/ use websockets + # not needed for our local docker setup + # location ~ ^/(consumer|analysis/selected/socket|study/list/socket)/ { + # proxy_pass $scheme://mainqiita; + # proxy_set_header Host $http_host; + # proxy_redirect http:// https://; + # proxy_http_version 1.1; + # proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + # proxy_set_header Upgrade $http_upgrade; + # proxy_set_header Connection $connection_upgrade; + # proxy_set_header X-Forwarded-Host $http_host; + # } + + location / { + proxy_pass $scheme://mainqiita; + proxy_redirect off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Accept-Encoding identity; + } + } +} diff --git a/Images/nginx/requirements.txt b/Images/nginx/requirements.txt new file mode 100644 index 0000000..bcbe69d --- /dev/null +++ b/Images/nginx/requirements.txt @@ -0,0 +1 @@ +empty! \ No newline at end of file diff --git a/Images/nginx/start_nginx.sh b/Images/nginx/start_nginx.sh new file mode 100644 index 0000000..2906803 --- /dev/null +++ b/Images/nginx/start_nginx.sh @@ -0,0 +1,5 @@ +#!/bin/sh +mkdir -p /var/run/nginx/ /usr/local/var/tmp/nginx/ + +nginx -t -c /qiita_configuration/nginx_qiita.conf +nginx -c /qiita_configuration/nginx_qiita.conf diff --git a/Images/plugin_collector/collect_configs.py b/Images/plugin_collector/collect_configs.py new file mode 100644 index 0000000..39aa384 --- /dev/null +++ b/Images/plugin_collector/collect_configs.py @@ -0,0 +1,41 @@ +import os +import requests +import sys + +ENV_PLUGINS = 'QIITA_PLUGINS' +PORT = 5000 +API_ENDPOINT = "config" + +if ENV_PLUGINS not in os.environ or os.environ['QIITA_PLUGINS'] is None or os.environ['QIITA_PLUGINS'] == "": + raise ValueError("No qiita plugins given for which configuration files should be retrieved! Environment variable '%s' not set!" % ENV_PLUGINS) + +var_plugins = os.environ['QIITA_PLUGINS'] +# strip potential quotes +if var_plugins.startswith('"') or var_plugins.startswith("'"): + var_plugins = var_plugins[1:] +if var_plugins.endswith('"') or var_plugins.endswith("'"): + var_plugins = var_plugins[:-1] + +containers = [c for c in var_plugins.split(':') if c != ""] + +print("retrieving %i qiita plugin configurations:" % len(containers), file=sys.stderr) +for i, container in enumerate(containers): + if container == "": + continue + print(' (%i/%i) %s' % (i+1, len(containers), container), end="", file=sys.stderr) + url = 'http://%s%s-1:%s/%s' % ('tinqiita-', container, PORT, API_ENDPOINT) + print(" '%s'" % url, end="", file=sys.stderr) + + req = requests.get(url) + if req.status_code != 200: + print(" failed.", file=sys.stderr) + else: + fp_config = '/qiita_plugins/%s.conf' % container + if os.path.exists(fp_config): + print(" already present. Will overwrite", file=sys.stderr) # currently, we always want to overwrite configuration to match oauth token + if True: + with open(fp_config, 'w') as f: + f.write(req.content.decode('utf-8')) + print(" ok.", file=sys.stderr) + +print("done.", file=sys.stderr) diff --git a/Images/plugin_collector/fix_test_db.py b/Images/plugin_collector/fix_test_db.py new file mode 100644 index 0000000..7865708 --- /dev/null +++ b/Images/plugin_collector/fix_test_db.py @@ -0,0 +1,63 @@ +import os +from glob import glob +import configparser +import psycopg2 + + +qiita_config = configparser.ConfigParser() +qiita_config.read('/qiita_configurations/qiita_server.cfg') +is_test = qiita_config['main']['TEST_ENVIRONMENT'].upper() == 'TRUE' +print("qiita is in %s mode." % ('TEST' if is_test else 'PRODUCTIVE')) + +conn = psycopg2.connect(database=qiita_config['postgres']['DATABASE'], + host=qiita_config['postgres']['HOST'], + user=qiita_config['postgres']['ADMIN_USER'], + password=qiita_config['postgres']['ADMIN_PASSWORD'], + port=qiita_config['postgres']['PORT']) +cursor = conn.cursor() + +# update conda env for qiita private plugins +sql = "UPDATE qiita.software SET environment_script = 'source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/qiita' WHERE description = 'Internal Qiita jobs';" +cursor.execute(sql) +conn.commit() + +if is_test: + fps_plugin_configs = glob('/qiita_plugins/*.conf') + print("Updating plugin credentials in dummy test DB with actual values from %i plugins." % len(fps_plugin_configs)) + for i, fp_plugin_config in enumerate(fps_plugin_configs): + config = configparser.ConfigParser() + config.read(fp_plugin_config) + + print(" (%i/%i) %s: " % (i+1, len(fps_plugin_configs), config['main']['name']), end="") + + SQL_get_softwareID_clientID = "SELECT software.software_id, oauth_software.client_id FROM qiita.software JOIN qiita.oauth_software ON qiita.software.software_id=qiita.oauth_software.software_id WHERE name='%s' AND version='%s';" % ( + config['main']['name'], config['main']['version'] + ) + cursor.execute(SQL_get_softwareID_clientID) + sql_result = cursor.fetchone() + if sql_result is None: + print(" plugin not (yet) in database.") + else: + old_software_id, old_client_id = sql_result + + if config['oauth2']['client_id'] != old_client_id: + SQL_update = "BEGIN; " + # add in the new client secret + SQL_update += "INSERT INTO qiita.oauth_identifiers VALUES ('%s', '%s');" % (config['oauth2']['client_id'], config['oauth2']['client_secret']) + # add in a new software_id to client_id row + SQL_update += "INSERT INTO qiita.oauth_software VALUES (%s, '%s');" % (old_software_id, config['oauth2']['client_id']) + # remove old client_id + SQL_update += "DELETE FROM qiita.oauth_software WHERE software_id=%s AND client_id='%s';" % (old_software_id, old_client_id) + # delete old client_id client_secret relation + SQL_update += "DELETE FROM qiita.oauth_identifiers WHERE client_id='%s';" % old_client_id + # replace ENVIRONMENT_SCRIPT with the one given in config file + SQL_update += "UPDATE qiita.software SET environment_script='%s' WHERE software_id='%s';" % (config['main']['ENVIRONMENT_SCRIPT'], old_software_id) + # replace START_SCRIPT with the one given in config file + SQL_update += "UPDATE qiita.software SET start_script='%s' WHERE software_id='%s';" % (config['main']['START_SCRIPT'], old_software_id) + SQL_update += " COMMIT;" + cursor.execute(SQL_update) + print(" credentials replaced.") + else: + print(" credentials already up to date.") + + conn.close() diff --git a/Images/plugin_collector/plugin_collector.dockerfile b/Images/plugin_collector/plugin_collector.dockerfile new file mode 100644 index 0000000..452c5b0 --- /dev/null +++ b/Images/plugin_collector/plugin_collector.dockerfile @@ -0,0 +1,17 @@ +# VERSION: 2025.09.02 + +FROM ubuntu:24.04 + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + curl \ + python3 \ + python3-requests \ + python3-psycopg2 + +COPY collect_configs.py /collect_configs.py +COPY fix_test_db.py /fix_test_db.py +COPY start_plugin_collector.sh /start_plugin_collector.sh +RUN chmod u+x /start_plugin_collector.sh + +CMD ["/start_plugin_collector.sh"] diff --git a/Images/plugin_collector/requirements.txt b/Images/plugin_collector/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/Images/plugin_collector/start_plugin_collector.sh b/Images/plugin_collector/start_plugin_collector.sh new file mode 100644 index 0000000..8208fd2 --- /dev/null +++ b/Images/plugin_collector/start_plugin_collector.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# it seems to be necessary to give the plugin container some lead time +# TODO: this might be more appropriately be addressed with healthchecks in the compose file +sleep 3 +# create WORKING_DIR, UPLOAD_DATA_DIR and BASE_DATA_DIR in shared volume +mkdir -p /qiita_data/working_dir/ /qiita_data/uploads/ +python3 /collect_configs.py +python3 /fix_test_db.py diff --git a/Images/plugin_collector/stefan_cert.conf b/Images/plugin_collector/stefan_cert.conf new file mode 100644 index 0000000..2eb1ee9 --- /dev/null +++ b/Images/plugin_collector/stefan_cert.conf @@ -0,0 +1,12 @@ +authorityKeyIdentifier=keyid,issuer +basicConstraints=CA:FALSE +keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment +subjectAltName = @alt_names + +[alt_names] +DNS.1 = localhost +DNS.2 = tinqiita-qiita-1 +DNS.3 = tinqiita-qiita-worker-1 +DNS.4 = tinqiita-qiita-worker-2 +DNS.5 = tinqiita-qiita-worker-3 +DNS.6 = tinqiita-nginx-1 diff --git a/Images/plugin_collector/stefan_csr.conf b/Images/plugin_collector/stefan_csr.conf new file mode 100644 index 0000000..011124a --- /dev/null +++ b/Images/plugin_collector/stefan_csr.conf @@ -0,0 +1,26 @@ +[ req ] +default_bits = 2048 +prompt = no +default_md = sha256 +req_extensions = req_ext +distinguished_name = dn + +[ dn ] +C = DE +ST = Hesse +L = Giessen +O = JLU +OU = Stefan Janssen +CN = localhost + +[ req_ext ] +subjectAltName = @alt_names + +[ alt_names ] +DNS.1 = localhost +IP.1 = 127.0.0.1 +DNS.2 = tinqiita-qiita-1 +DNS.3 = tinqiita-qiita-worker-1 +DNS.4 = tinqiita-qiita-worker-2 +DNS.5 = tinqiita-qiita-worker-3 +DNS.6 = tinqiita-nginx-1 diff --git a/Images/qiita/config_portal.cfg b/Images/qiita/config_portal.cfg new file mode 100644 index 0000000..a9dad14 --- /dev/null +++ b/Images/qiita/config_portal.cfg @@ -0,0 +1,43 @@ +# ---------- Base information for the website ---------- +[sitebase] +# Logo should be 100px by 40px +LOGO = /static/img/logo-clear.png +# Full path to portal custom CSS styling file +CSS_FP = +TITLE = Qiita + +# ---------- Welcome text on index page ---------- +[index] +HEADER = Qiita Spots Patterns +TEXT =

+ Qiita (canonically pronounced cheetah) is an entirely + open-source microbial study management platform. It allows + users to keep track of multiple studies with multiple ‘omics data. + Additionally, Qiita is capable of supporting multiple analytical pipelines + through a 3rd-party plugin system, allowing the user to have a single entry + point for all of their analyses. +

+

+ Qiita provides database and compute resources to the global community, + alleviating the technical burdens that are typically limiting for + researchers studying microbial ecology (e.g. familiarity with the command + line or access to compute power). +

+

+ Qiita’s platform allows for quick reanalysis of the datasets that have been + deposited using the latest analytical technologies. This means that Qiita’s + internal datasets are living data that is periodically re-annotated + according to current best practices. +

+

+ For more information about how to use Qiita, visit the + documentation. +

+

+ Note that you should be logged into the system to access any studies and + files available. +

+ +# ---------- Study listing page ---------- +[study_list] +EXAMPLE_SEARCH = env_matter = soil diff --git a/Images/qiita/config_qiita_oidc.cfg b/Images/qiita/config_qiita_oidc.cfg new file mode 100644 index 0000000..273e03d --- /dev/null +++ b/Images/qiita/config_qiita_oidc.cfg @@ -0,0 +1,266 @@ +# WARNING!!!! DO NOT MODIFY THIS FILE +# IF YOU NEED TO PROVIDE YOUR OWN CONFIGURATION, COPY THIS FILE TO A NEW +# LOCATION AND EDIT THE COPY + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +# ------------------------------ Main settings -------------------------------- +[main] +# Change to FALSE in a production system +TEST_ENVIRONMENT = TRUE + +# Absolute path to the directory where log files are saved. If not given, no +# log file will be created +LOG_DIR = /logs/ + +# Whether studies require admin approval to be made available +REQUIRE_APPROVAL = True + +# Base URL: DO NOT ADD TRAILING SLASH +BASE_URL = https://tinqiita-nginx-1:8383 + +# Download path files +UPLOAD_DATA_DIR = /qiita_data/uploads/ + +# Working directory path +WORKING_DIR = /qiita_data/working_dir/ + +# Maximum upload size (in Gb) +MAX_UPLOAD_SIZE = 100 + +# Path to the base directory where the data files are going to be stored +BASE_DATA_DIR = /qiita_data/ + +# Valid upload extension, comma separated. Empty for no uploads +VALID_UPLOAD_EXTENSION = fastq,fastq.gz,txt,tsv,sff,fna,qual + +# The script used to start the qiita environment, if any +# used to spawn private CLI to a cluster +QIITA_ENV = source activate qiita + +# Script used for launching private Qiita tasks +PRIVATE_LAUNCHER = qiita-private-launcher + +# Script used for launching plugins +PLUGIN_LAUNCHER = qiita-plugin-launcher + +# Plugins configuration directory +PLUGIN_DIR = /qiita_plugins/ + +# Webserver certificate file paths +CERTIFICATE_FILE = /qiita_certificates/stefan_server.crt +KEY_FILE = /qiita_certificates/stefan_server.key + +# The value used to secure cookies used for user sessions. A suitable value can +# be generated with: +# +# python -c "from base64 import b64encode;\ +# from uuid import uuid4;\ +# print b64encode(uuid4().bytes + uuid4().bytes)" +COOKIE_SECRET = SECRET + +# The value used to secure JWTs for delegated permission artifact download. +JWT_SECRET = SUPER_SECRET + +# Address a user should write to when asking for help +HELP_EMAIL = foo@bar.com + +# The email address, Qiita sends internal notifications to a sys admin +SYSADMIN_EMAIL = jeff@bar.com + +# ----------------------------- SMTP settings ----------------------------- +[smtp] +# The hostname to connect to +# Google: smtp.google.com +HOST = localhost + +# The port to connect to the database +# Google: 587 +PORT = 25 + +# SSL needed (True or False) +# Google: True +SSL = False + +# The user name to connect with +USER = + +# The user password to connect with +PASSWORD = + +# The email to have messages sent from +EMAIL = example@domain.com + +# ----------------------------- Redis settings -------------------------------- +[redis] +HOST = redis +PORT = 7777 +PASSWORD = +# The redis database you will use, redis has a max of 16. +# Qiita should have its own database +DB = 13 + +# ----------------------------- Postgres settings ----------------------------- +[postgres] +# The user name to connect to the database +USER = postgres + +# The administrator user, which can be used to create/drop environments +ADMIN_USER = postgres + +# The database to connect to +DATABASE = qiita_test + +# The host where the database lives on +HOST = qiita-db + +# The port to connect to the database +PORT = 5432 + +# The password to use to connect to the database +PASSWORD = postgres + +# The postgres password for the admin_user +ADMIN_PASSWORD = postgres + +# ----------------------------- Job Scheduler Settings ----------------------------- +[job_scheduler] +# The email address of the submitter of jobs +JOB_SCHEDULER_JOB_OWNER = user@somewhere.org + +# The number of seconds to wait between successive calls +JOB_SCHEDULER__POLLING_VALUE = 15 + +# Hard upper-limit on concurrently running validator jobs +JOB_SCHEDULER_PROCESSING_QUEUE_COUNT = 2 + +# ----------------------------- EBI settings ----------------------------- +[ebi] +# The user to use when submitting to EBI +EBI_SEQ_XFER_USER = Webin-41528 + +# Password for the above user +EBI_SEQ_XFER_PASS = + +# URL of EBI's FASP site +EBI_SEQ_XFER_URL = webin.ebi.ac.uk + +# URL of EBI's HTTPS dropbox +# live submission URL +#EBI_DROPBOX_URL = https://www.ebi.ac.uk/ena/submit/drop-box/submit/ +# testing URL +EBI_DROPBOX_URL = https://www-test.ebi.ac.uk/ena/submit/drop-box/submit/ + +# The name of the sequencing center to use when doing EBI submissions +EBI_CENTER_NAME = qiita-test + +# This string (with an underscore) will be prefixed to your EBI submission and +# study aliases +EBI_ORGANIZATION_PREFIX = example_organization + +# ----------------------------- VAMPS settings ----------------------------- +[vamps] +# general info to submit to vamps +USER = user +PASSWORD = password +URL = https://vamps.mbl.edu/mobe_workshop/getfile.php + +# ----------------------------- Portal settings ----------------------------- +[portal] + +# Portal the site is working under +PORTAL = QIITA + +# Portal subdirectory +PORTAL_DIR = + +# Full path to portal styling config file +PORTAL_FP = /qiita_configurations/config_portal.cfg + +# The center latitude of the world map, shown on the Stats map. +# Defaults to 40.01027 (Boulder, CO, USA) +STATS_MAP_CENTER_LATITUDE = + +# The center longitude of the world map, shown on the Stats map. +# Defaults to -105.24827 (Boulder, CO, USA) +STATS_MAP_CENTER_LONGITUDE = + +# ----------------------------- iframes settings --------------------------- +[iframe] +# The real world QIIMP will always need to be accessed with https because Qiita +# runs on https too +QIIMP = https://localhost:8898/ + + +# --------------------- External Identity Provider settings -------------------- +# user authentication happens per default within Qiita, i.e. when a user logs in, +# the stored password hash and email address is compared against what a user +# just provided. You might however, use an external identity provider (IdP) to +# authenticate the user like +# google: https://developers.google.com/identity/protocols/oauth2 or +# github: https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps or +# self hosted keycloak: https://www.keycloak.org/ +# Thus, you don't have to deal with user verification, reset passwords, ... +# Authorization (i.e. if the authorized user is allowed to use Qiita or which +# user level he/she gets assigned is an independent process. You can even use +# multiple independent external identity providers! +# Qiita currently only support the "open ID connect" protocol with the implicit flow. +# Each identity provider comes as its own config section [oidc_foo] and needs +# to specify the following five fields: +# +# Typical identity provider manage multiple "realms" and specific "clients" per realm +# You need to contact your IdP and register Qiita as a new "client". The IdP will +# provide you with the correct values. +# +# The authorization protocol requires three steps to obtain user information: +# 1) you identify as the correct client and ask the IdP for a request code +# You have to forward the user to the login page of your IdP. To let the IdP +# know how to come back to Qiita, you need to provide a redirect URL +# 2) you exchange the code for a user token +# 3) you obtain information about the user for the obtaines user token +# Typically, each step is implemented as a separate URL endpoint +# +# To activate IdP: comment out the following config section + +#[oidc_localkeycloak] +# +## client ID for Qiita as registered at your Identity Provider of choice +# CLIENT_ID = qiita +# +## client secret to verify Qiita as the correct client. Not all IdPs require +## a client secret! +# CLIENT_SECRET = SUPERSECRETSTRING +# +# +## redirect URL (end point in your Qiita instance), to which the IdP redirects +## after user types in his/her credentials. If you don't want to change code in +## qiita_pet/webserver.py the URL must follow the pattern: +## base_URL/auth/login_OIDC/foo where foo is the name of this config section +## without the oidc_ prefix! +# REDIRECT_ENDPOINT = /auth/login_OIDC/localkeycloak +# +## The URL of the well-known json document, specifying how API end points +## like 'authorize', 'token' or 'userinfo' are defined. See e.g. +## https://swagger.io/docs/specification/authentication/ +## openid-connect-discovery/ +# WELLKNOWN_URI = http://keycloak:9999/realms/qiita_realm/.well-known/openid-configuration/ +# +## a speaking label for the Identity Provider. Section name is used if empty. +# LABEL = localkeycloak +# +## The scope, i.e. fields about a user, which Qiita requests from the +## Identity Provider, e.g. "profile email eduperson_orcid". +## Will be automatically extended by the scope "openid", to enable the +## "authorize_code" OIDC flow. +# SCOPE = openid +# +##Optional. Name of a file in qiita_pet/static/img that shall be +##displayed for login through Service Provider, instead of a plain button +# LOGO = +# diff --git a/Images/qiita/drop_workflows.py b/Images/qiita/drop_workflows.py new file mode 100644 index 0000000..aa45d90 --- /dev/null +++ b/Images/qiita/drop_workflows.py @@ -0,0 +1,67 @@ +import qiita_db as qdb +import sys + + +def remove(self): + # store for later, after table entry is dropped + workflow_name = self.name + + def _get_workflow_id(name): + with qdb.sql_connection.TRN: + sql = """SELECT default_workflow_id + FROM qiita.default_workflow + WHERE name = %s""" + qdb.sql_connection.TRN.add(sql, [name]) + return qdb.sql_connection.TRN.execute_fetchlast() + def _get_node_ids(workflow_id): + with qdb.sql_connection.TRN: + sql = """SELECT default_workflow_node_id + FROM qiita.default_workflow_node + WHERE default_workflow_id = %s""" + qdb.sql_connection.TRN.add(sql, [workflow_id]) + return qdb.sql_connection.TRN.execute_fetchflatten() + def _get_edge_ids(node_ids): + if len(node_ids) > 0: + with qdb.sql_connection.TRN: + sql = """SELECT default_workflow_edge_id + FROM qiita.default_workflow_edge + WHERE parent_id in %s OR child_id in %s""" + qdb.sql_connection.TRN.add(sql, [tuple(node_ids), tuple(node_ids)]) + return qdb.sql_connection.TRN.execute_fetchflatten() + else: + return [] + + workflow_id = _get_workflow_id(self.name) + node_ids = _get_node_ids(workflow_id) + edge_ids = _get_edge_ids(node_ids) + with qdb.sql_connection.TRN: + if len(edge_ids) > 0: + sql = """DELETE FROM qiita.default_workflow_edge_connections + WHERE default_workflow_edge_id in %s""" + qdb.sql_connection.TRN.add(sql, [tuple(edge_ids)]) + + sql = """DELETE FROM qiita.default_workflow_edge + WHERE default_workflow_edge_id in %s""" + qdb.sql_connection.TRN.add(sql, [tuple(edge_ids)]) + + if workflow_id is not None: + sql = """DELETE FROM qiita.default_workflow_node + WHERE default_workflow_id = %s""" + qdb.sql_connection.TRN.add(sql, [workflow_id]) + + sql = """DELETE FROM qiita.default_workflow_data_type + WHERE default_workflow_id = %s""" + qdb.sql_connection.TRN.add(sql, [workflow_id]) + + sql = """DELETE FROM qiita.default_workflow + WHERE default_workflow_id = %s""" + qdb.sql_connection.TRN.add(sql, [workflow_id]) + print("removed workflow '%s': ID=%i with %i nodes and %i edges" % (workflow_name, workflow_id, len(node_ids), len(edge_ids)), file=sys.stderr) + +def remove_workflows(): + for w in qdb.software.DefaultWorkflow.iter(): + w.remove = remove + w.remove(w) + + +remove_workflows() diff --git a/Images/qiita/qiita.dockerfile b/Images/qiita/qiita.dockerfile new file mode 100644 index 0000000..0a1ab71 --- /dev/null +++ b/Images/qiita/qiita.dockerfile @@ -0,0 +1,96 @@ +# VERSION: 2025.09.09 + +FROM ubuntu:24.04 + +ARG MINIFORGE_VERSION=24.1.2-0 +ARG MODZIP_VERSION=1.3.0 +ARG NGINX_VERSION=1.26.0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +# install following packages for nginx compilation: libpcre2-dev, libxslt-dev and libgd-dev +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + libpcre2-dev \ + libxslt-dev \ + libgd-dev \ + postgresql-client +RUN apt-get -y install build-essential +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc \ + conda init + +# create conda env for qiita with all necessary dependencies (conda and pip) +RUN conda create --quiet --yes -n qiita python=3.9 pip libgfortran numpy cython anaconda::redis +# TODO: Redis container +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-n", "qiita", "/bin/bash", "-c"] + +RUN pip install -U pip +RUN pip install \ + sphinx \ + sphinx-bootstrap-theme \ + nose-timer \ + Click \ + coverage \ + psycopg2-binary + + +# Clone the Qiita Repo +# RUN git clone -b master https://github.com/qiita-spots/qiita.git +RUN git clone -b auth_oidc https://github.com/jlab/qiita.git + +# should tests re-populate the DB, ensure private plugin, qtp-biom and qp-target-gene use the correct conda env +RUN sed -i "s|'source /home/runner/.profile; conda activate qiita'|'source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/qiita'|" /qiita/qiita_db/support_files/populate_test_db.sql +RUN sed -i "s|'source ~/virtualenv/python2.7/bin/activate; export PATH=\$HOME/miniconda3/bin/:\$PATH; . activate qtp-biom'|'true'|" /qiita/qiita_db/support_files/populate_test_db.sql +RUN sed -i "s|'source activate qiita'|'true'|" /qiita/qiita_db/support_files/populate_test_db.sql + +# there seems to be a conflict with parameter names für qp-target-gene. See: https://github.com/qiita-spots/qp-target-gene/issues/24 +RUN sed -i "s|'1.9.1',|'1.9.hide',|" /qiita/qiita_db/support_files/populate_test_db.sql + +# We need to install necessary dependencies +# as well as some extra dependencies for psycopg2 to work +RUN git clone https://github.com/psycopg/psycopg2.git +RUN export PATH=/usr/lib/postgresql/14.11/bin/:$PATH +RUN pip install -e psycopg2/. + +# Install pip packaages for Qiita +RUN pip install -e qiita --no-binary redbiom + +# A qiita configuration file is directly mounted into the qiita container via the compose file + +# Copy Bash Script to run Qiita to the container. start_qiita differentiates between one "master" and multiple workers +COPY start_qiita.sh . +COPY start_qiita-initDB.sh . +RUN chmod 755 start_qiita.sh start_qiita-initDB.sh + +RUN apt-get install -y curl +COPY start_plugin.py /start_plugin.py +RUN chmod a+x /start_plugin.py + +# hide certificate and server configuration copy from source code +RUN rm -rf /qiita/qiita_core/support_files + +# hide default configurations from github sources +RUN rm -f /qiita/qiita_pet/nginx_example.conf /qiita/qiita_pet/supervisor_example.conf /qiita/qiita_pet/support_files/config_portal.cfg + +COPY drop_workflows.py /drop_workflows.py + +# install aspera client for ENA submission +RUN conda install hcc::aspera-cli + +# something is wired with permissions of the git repo?! +RUN git config --global --add safe.directory /qiita + +# CMD ["conda", "run", "-n", "qiita"] diff --git a/Images/qiita/requirements.txt b/Images/qiita/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/Images/qiita/start_plugin.py b/Images/qiita/start_plugin.py new file mode 100644 index 0000000..5709f63 --- /dev/null +++ b/Images/qiita/start_plugin.py @@ -0,0 +1,27 @@ +import os +import sys +import requests +import json + +PORT = 5000 +API_ENDPOINT = "run" + +pluginname, qiita_server_url, job_id, output_dir = sys.argv[1:] + +req = requests.post('http://%s%s-1:%s/run' % ('tinqiita-', pluginname, PORT), + json={'url': qiita_server_url, + 'job_id': job_id, + 'output_dir': output_dir}) +print(req.status_code) +if req.status_code != 200: + print(req.content) + +retvalues = json.loads(req.text) +if 'stderr' in retvalues.keys(): + print("=== request STDERR ===\n%s" % retvalues['stderr']) +else: + print("=== request STDERR: empty ===\n") +if 'stdout' in retvalues.keys(): + print("=== request STDOUT ===\n%s" % retvalues['stdout']) +else: + print("=== request STDOUT: empty ===\n") diff --git a/Images/qiita/start_qiita-initDB.sh b/Images/qiita/start_qiita-initDB.sh new file mode 100644 index 0000000..86d9db0 --- /dev/null +++ b/Images/qiita/start_qiita-initDB.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +CONDA_DIR=/opt/conda +ENV_NAME=qiita +#PORT=21174 + +# create (empty) workding directory as given in Qiita configuration +mkdir -p `grep ^WORKING_DIR $QIITA_CONFIG_FP | cut -d "=" -f 2` + +sleep 5 # wait for postgress to boot up + +# We execute qiita-env make every time. We expect that it will fail always but the very first time, as the qiita DB should exist from then on +source $CONDA_DIR/etc/profile.d/conda.sh; conda activate $CONDA_DIR/envs/$ENV_NAME; cd /qiita; qiita-env make --no-load-ontologies 2> .env-make.err || true +# To avoid confusing the user, STDERR is written into a file and only reported if it does not contain the text that we expect to see if it just reports the existing DB +grep 'already present on the system. You can drop it by running' .env-make.err > /dev/null || cat .env-make.err + + +# currently, commands with which you can process artifacts are limited to those that are present in available "recommended workflows". +# As they are not properly set up in the test database, we e.g. cannot run "deblur" on demux or trimmed existing artifacts (it's different in workflows in construction) +# As long as we don't have a nice mechanism to carry over recommended workflow, we better remove them altogether +source $CONDA_DIR/etc/profile.d/conda.sh; conda activate $CONDA_DIR/envs/$ENV_NAME; python /drop_workflows.py diff --git a/Images/qiita/start_qiita.sh b/Images/qiita/start_qiita.sh new file mode 100644 index 0000000..4b8ef08 --- /dev/null +++ b/Images/qiita/start_qiita.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +CONDA_DIR=/opt/conda +ENV_NAME=qiita +#PORT=21174 + +#sleep 300000 +#export QIITA_CONFIG_FP="/qiita/config_qiita_oidc.cfg" +if [ -n "${MASTER}" ] && [ ! -d /qiita/qiita_db/__pycache__ ]; then + source $CONDA_DIR/etc/profile.d/conda.sh; conda activate $CONDA_DIR/envs/$ENV_NAME; cd /qiita; pip install -e . --no-binary redbiom; +fi + +# # We execute qiita-env make every time. We expect that it will fail always but the very first time, as the qiita DB should exist from then on +# source $CONDA_DIR/etc/profile.d/conda.sh; conda activate $CONDA_DIR/envs/$ENV_NAME; cd /qiita; qiita-env make --no-load-ontologies 2> .env-make.err || true +# # To avoid confusing the user, STDERR is written into a file and only reported if it does not contain the text that we expect to see if it just reports the existing DB +# grep 'already present on the system. You can drop it by running' .env-make.err > /dev/null || cat .env-make.err + +# This was commented out bc it stopped working anymore and i was focusing on fixing something else, if you create the database for the first +# time you will have to pick the appropriate options. +#if [ "$( export PGPASSWORD='postgres'; psql -h qiita-db -U postgres -XtAc "SELECT 1 FROM postgres WHERE datname='qiita_test'" )" = '1' ] +#then +# qiita pet webserver --no-build-docs start --port 21174 --master +#else +# qiita-env make --no-load-ontologies +# qiita pet webserver --no-build-docs start --port 21174 --master +#fi +#qiita-env make --no-load-ontologies; true +#mkdir -p /qiita/plugins +#sleep 3 +source $CONDA_DIR/etc/profile.d/conda.sh; conda activate $CONDA_DIR/envs/$ENV_NAME; cd /qiita && qiita pet webserver --no-build-docs start --port $PORT $MASTER 2> /logs/qiita_pet$MASTER.log 1>&2 + +tail -f /dev/null diff --git a/Images/qiita/supervisor_foreground.conf b/Images/qiita/supervisor_foreground.conf new file mode 100644 index 0000000..1016fe2 --- /dev/null +++ b/Images/qiita/supervisor_foreground.conf @@ -0,0 +1,34 @@ +[supervisorctl] +serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket + +[supervisord] +logfile=/qiita_logs/supervisord.log ; main log file; default $CWD/supervisord.log +loglevel=debug ; log level; default info; others: debug,warn,trace +pidfile=/qiita_logs/supervisord.pid ; supervisord pidfile; default supervisord.pid +nodaemon=true + +[include] +files=/home/travis/miniconda3/envs/qiita/bin + +[group:qiita_all] +programs=qiita_workers_0,qiita_workers_1,qiita_workers_2,qiita_master ; each refers to 'x' in [program:x] definitions + +[program:qiita_master] +command=qiita pet webserver --no-build-docs start --port 21174 --master +process_name=%(program_name)s ; process_name expr (default %(program_name)s) +numprocs=1 ; number of processes copies to start (def 1) + +[program:qiita_workers_0] +command=qiita pet webserver --no-build-docs start --port 21175 +process_name=%(program_name)s ; process_name expr (default %(program_name)s) +numprocs=1 ; number of processes copies to start (def 1) + +[program:qiita_workers_1] +command=qiita pet webserver --no-build-docs start --port 21176 +process_name=%(program_name)s ; process_name expr (default %(program_name)s) +numprocs=1 ; number of processes copies to start (def 1) + +[program:qiita_workers_2] +command=qiita pet webserver --no-build-docs start --port 21177 +process_name=%(program_name)s ; process_name expr (default %(program_name)s) +numprocs=1 ; number of processes copies to start (def 1) diff --git a/Images/qp-deblur/qp-deblur.dockerfile b/Images/qp-deblur/qp-deblur.dockerfile new file mode 100644 index 0000000..067fe51 --- /dev/null +++ b/Images/qp-deblur/qp-deblur.dockerfile @@ -0,0 +1,143 @@ +# VERSION: 2025.08.22 + +# ========================== +# Stage 1: Build wheels +# ========================== +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# Create conda env +RUN conda create --quiet -n deblur python=3.5 pip libgfortran=3 +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/deblur", "/bin/bash", "-c"] + +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN sed -i "s/f'Entered BaseQiitaPlugin._register_command({command.name})'/'Entered BaseQiitaPlugin._register_command(%s)' % command.name/" qiita_client/qiita_client/plugin.py +RUN cd qiita_client && pip install --no-cache-dir . + +RUN conda install --quiet --yes -c bioconda -c biocore "VSEARCH=2.7.0" MAFFT=7.310 SortMeRNA=2.0 fragment-insertion gcc +RUN pip install -U pip +RUN pip install numpy cython pandas +RUN pip install scikit-bio==0.5.5 + +RUN pip install -U pip pip-system-certs + +RUN git clone -b uncouplePlugins https://github.com/jlab/qp-deblur.git +RUN cd qp-deblur && pip install . + +COPY requirements.txt ./requirements.txt +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt + + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.5-slim +# ^^ 110 MB + +# let the container know it's plugin name +ENV PLUGIN=qp-deblur + +# deblur dependent binaries + necessary libraries: mafft, vsearch, sortmerna +COPY --from=builder /opt/conda/envs/deblur/bin/mafft /opt/conda/envs/deblur/bin/vsearch /opt/conda/envs/deblur/bin/indexdb_rna /opt/conda/envs/deblur/bin/sortmerna /usr/local/bin/ +# ^^ 113 MB +COPY --from=builder /opt/conda/envs/deblur/libexec/mafft /opt/conda/envs/deblur/libexec/mafft/ +# ^^ 122 MB +COPY --from=builder /opt/conda/envs/deblur/lib/libgomp.so.1.0.0 /lib/x86_64-linux-gnu/libgomp.so.1 +# ^^ 123 MB + +# python package compile in build stage +COPY --from=builder /wheels /wheels +# ^^ 235 MB +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf /usr/local/lib/python3.5/site-packages/biom/tests +# ^^ 612 MB + +COPY --from=builder /opt/conda/envs/deblur/bin/run-sepp.sh /opt/conda/envs/deblur/bin/seppJsonMerger.jar /opt/conda/envs/deblur/bin/hmm* /opt/conda/envs/deblur/bin/pplacer /opt/conda/envs/deblur/bin/guppy /usr/local/bin/ +# ^^ 633 MB + +# minimal Java Runtime Environment for SEPP's seppJsonMerger.jar +RUN mkdir -p /usr/share/man/man1 && \ + echo "deb [trusted=yes] http://archive.debian.org/debian buster main" > /etc/apt/sources.list && \ + echo "deb [trusted=yes] http://archive.debian.org/debian-security buster/updates main" >> /etc/apt/sources.list && \ + echo "deb [trusted=yes] http://archive.debian.org/debian buster-updates main" >> /etc/apt/sources.list && \ + apt-get update && \ + apt-get install -y --no-install-recommends openjdk-11-jre-headless && \ + rm -rf /var/lib/apt/lists/* +# ^^ 841 MB + +# copy SEPP +COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/sepp/run_sepp.py /opt/conda/envs/deblur/share/fragment-insertion/sepp/run_sepp.py +COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/sepp/sepp /opt/conda/envs/deblur/share/fragment-insertion/sepp/sepp +COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/sepp/dendropy /opt/conda/envs/deblur/share/fragment-insertion/sepp/dendropy +COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/sepp/home.path /opt/conda/envs/deblur/share/fragment-insertion/sepp/home.path +COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/sepp/.sepp/main.config /opt/conda/envs/deblur/share/fragment-insertion/sepp/.sepp/main.config +RUN sed -i "s|/opt/conda/envs/deblur/share/fragment-insertion/sepp/.sepp/bundled-v4.3.5/|/usr/local/bin/|g" /opt/conda/envs/deblur/share/fragment-insertion/sepp/.sepp/main.config +# ^^ 845 MB +# following step increases image size from by 1.3 GB!! Better mount as volume and "make" these files during Makefile execution +# COPY --from=builder /opt/conda/envs/deblur/share/fragment-insertion/ref/ /opt/conda/envs/deblur/share/fragment-insertion/ref/ + +# install tornado based trigger layer in base environment +RUN pip install -U --no-cache-dir tornado pip-system-certs +COPY trigger_noconda.py /trigger.py +# ^^ 848 MB + +WORKDIR / + +COPY start_qp-deblur.sh . +RUN chmod 755 start_qp-deblur.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +RUN sed -i "s|^#\!.*|#\!/usr/local/bin/python|" /usr/local/bin/configure_deblur +RUN sed -i "s|^#\!.*|#\!/usr/local/bin/python|" /usr/local/bin/start_deblur + +# use git branch instead of pypi version (stored via wheel) +COPY --from=builder /qiita_client /qiita_client +RUN cd qiita_client && pip install . + +RUN mkdir -p /qiita_server_certificates/ +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN /usr/local/bin/configure_deblur --env-script "true" --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` filesystem +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qp-deblur/" /unshared_plugins/*.conf + +# remove conda command from tigger.py +RUN sed -i "s|source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/%s;||" /trigger.py && sed -i "s|conda_env_name, ||" /trigger.py + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qp-deblur.sh"] +# ^^ 848 MB \ No newline at end of file diff --git a/Images/qp-deblur/requirements.txt b/Images/qp-deblur/requirements.txt new file mode 100644 index 0000000..37e968b --- /dev/null +++ b/Images/qp-deblur/requirements.txt @@ -0,0 +1,3 @@ +scikit-bio==0.5.5 + +-e /qp-deblur \ No newline at end of file diff --git a/Images/qp-deblur/start_qp-deblur.sh b/Images/qp-deblur/start_qp-deblur.sh new file mode 100644 index 0000000..b2a90c6 --- /dev/null +++ b/Images/qp-deblur/start_qp-deblur.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_deblur + +tail -f /dev/null diff --git a/Images/qp-qiime2/qp-qiime2.dockerfile b/Images/qp-qiime2/qp-qiime2.dockerfile new file mode 100644 index 0000000..629e40c --- /dev/null +++ b/Images/qp-qiime2/qp-qiime2.dockerfile @@ -0,0 +1,111 @@ +# VERSION: 2025.09.12 + +FROM ubuntu:24.04 + +ARG MINIFORGE_VERSION=24.1.2-0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential \ + zip \ + tzdata + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# install tornado based trigger layer in base environment +RUN pip install -U pip +RUN pip install tornado +COPY trigger.py /trigger.py + +# Download qiime2 yaml +RUN wget --quiet https://data.qiime2.org/distro/core/qiime2-2023.5-py38-linux-conda.yml + +# Create conda env +RUN conda env create --name qiime2 -y --file qiime2-2023.5-py38-linux-conda.yml \ + && conda clean --all -y \ + && rm -rf /opt/conda/pkgs +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qiime2", "/bin/bash", "-c"] + +RUN pip install -U pip +RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN pip install https://github.com/biocore/q2-mislabeled/archive/refs/heads/main.zip +RUN pip install q2-umap q2-greengenes2 +RUN git clone https://github.com/qiita-spots/qp-qiime2.git +WORKDIR /qp-qiime2 + +RUN sed -i "s|self.basedir, '..', '..', '|'/|g" /qp-qiime2/qp_qiime2/tests/test_qiime2.py + +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs + +# configuring the databases available for QIIME 2 +RUN mkdir /databases +RUN wget --quiet -O "/databases/gg-13-8-99-515-806-nb-classifier.qza" "https://data.qiime2.org/2021.4/common/gg-13-8-99-515-806-nb-classifier.qza" +RUN export QP_QIIME2_DBS=/databases + +# configuring the filtering QZAs available for QIIME 2 +RUN mkdir /filtering +RUN wget -O /filtering/bloom-analyses.zip https://github.com/knightlab-analyses/bloom-analyses/archive/refs/heads/master.zip \ + && unzip -j /filtering/bloom-analyses.zip bloom-analyses-master/data/qiime2-artifacts-for-qiita/*.qza -d /filtering/ \ + && rm -f /filtering/bloom-analyses.zip +RUN export QP_QIIME2_FILTER_QZA=/filtering/ + +# TODO: should the plugin get the server configuration?! +RUN export QIITA_CONFIG_FP=/qiita/config_qiita_oidc.cfg + +# let the container know it's plugin name +ENV PLUGIN=qp-qiime2 + +# configure language +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +# configure timezone +RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN dpkg-reconfigure -f noninteractive tzdata + + +WORKDIR / + +COPY start_qp-qiime2.sh . +RUN chmod 755 start_qp-qiime2.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +#RUN export QIITA_ROOTCA_CERT=/unshared_certificates/ci_rootca.crt +RUN chmod u+x /qp-qiime2/scripts/configure_qiime2 /qp-qiime2/scripts/start_qiime2 +ENV QP_QIIME2_DBS=/databases +ENV QP_QIIME2_FILTER_QZA=/filtering/ +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN /qp-qiime2/scripts/configure_qiime2 --env-script 'true' --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qp-qiime2/" /unshared_plugins/*.conf + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qp-qiime2.sh"] diff --git a/Images/qp-qiime2/requirements.txt b/Images/qp-qiime2/requirements.txt new file mode 100644 index 0000000..4493267 --- /dev/null +++ b/Images/qp-qiime2/requirements.txt @@ -0,0 +1,9 @@ +tornado +q2-umap +q2-greengenes2 +certifi +pip-system-certs + +-e /qiita_client +-e /qiita-files +-e /qp-qiime2 diff --git a/Images/qp-qiime2/start_qp-qiime2.sh b/Images/qp-qiime2/start_qp-qiime2.sh new file mode 100644 index 0000000..50069a0 --- /dev/null +++ b/Images/qp-qiime2/start_qp-qiime2.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py qiime2 start_qiime2 /qp-qiime2 + +tail -f /dev/null diff --git a/Images/qp-target-gene/qp-target-gene.dockerfile b/Images/qp-target-gene/qp-target-gene.dockerfile new file mode 100644 index 0000000..dc3b9f7 --- /dev/null +++ b/Images/qp-target-gene/qp-target-gene.dockerfile @@ -0,0 +1,136 @@ +# VERSION: 2025.08.28 + +# ========================== +# Stage 1: Build wheels +# ========================== +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# install tornado based trigger layer in base environment +RUN pip install -U pip + +# Create conda env +RUN conda create --name qp-target-gene -y -c conda-forge -c bioconda -c biocore python=2.7 SortMeRNA==2.0 numpy==1.13.1 pigz biom-format +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qp-target-gene", "/bin/bash", "-c"] + +# see https://stackoverflow.com/questions/49940813/pip-no-module-named-internal +RUN wget https://bootstrap.pypa.io/pip/2.7/get-pip.py -O /get-pip2.7.py +RUN wget https://bootstrap.pypa.io/pip/3.7/get-pip.py -O /get-pip3.7.py +RUN python2.7 get-pip2.7.py --force-reinstall + +RUN pip install -U pip +#RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd /qiita_client && pip install --no-cache-dir . + +RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone https://github.com/qiita-spots/qp-target-gene.git +WORKDIR /qp-target-gene +RUN pip install biom-format +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs + +WORKDIR / + +# qiime 1.9.1 comes with https://pypi.org/project/qiime-default-reference/ as dependency, which is ~184MB +# we "hide" it here, as necessary files will be downloaded from ftp.microbio.me/greengenes_release while setting up qiita anyway +RUN pip download --dest /qiime_default_reference qiime_default_reference \ + && cd /qiime_default_reference \ + && tar xzvf *.tar.gz \ + && cd qiime-default-reference-0.1.3 \ + && for fzip in `find . -type f -name "97*"`; do fplain=`echo $fzip | sed "s|.gz$||g"`; echo "content erased to generate small wheel file, as reference shall be mounted to target container later on." > $fplain; gzip -f $fplain; done + +COPY requirements.txt ./requirements.txt +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt + +# download sortmerna / index_db sources for version 2.0 and re-compile statically as different glibc and libstdc++ couse issues +RUN wget https://github.com/sortmerna/sortmerna/archive/refs/tags/2.0.tar.gz && tar xzvf 2.0.tar.gz && cd /sortmerna-2.0 && ./configure LDFLAGS=" -static " && make -j + +# ========================== +# Stage 2: Runtime +# ========================== +# I am testing ubuntu as base image, since python:xxx-slim was too hard/large to install python2 and python3 side by side +FROM ubuntu:22.04 + +# let the container know it's plugin name +ENV PLUGIN=qp-target-gene + +# py2 and py3 +RUN mkdir -p /usr/share/man/man1 && \ + apt-get update && \ + apt-get install -y --no-install-recommends python2 python3 curl python-tk && \ + rm -rf /var/lib/apt/lists/* + +# install pip2 +COPY --from=builder /get-pip2.7.py /get-pip3.7.py / +RUN python2 get-pip2.7.py \ + && rm get-pip2.7.py + +# install pip3 +RUN python3 get-pip3.7.py \ + && rm get-pip3.7.py + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip2 install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python2.7/site-packages -type d -name "tests" | grep -v numpy` +COPY --from=builder /opt/conda/envs/qp-target-gene/lib/libpython2.7.so.1.0 /usr/lib/x86_64-linux-gnu/libpython2.7.so.1.0 + +# "install" pigz +COPY --from=builder /opt/conda/envs/qp-target-gene/bin/pigz /usr/local/bin/ + +COPY start_qp-target-gene.sh . +RUN chmod 755 start_qp-target-gene.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +RUN pip3 install tornado +COPY trigger_noconda.py /trigger.py + +# copy sortmerna binaries +COPY --from=builder /sortmerna-2.0/sortmerna /usr/local/bin/sortmerna +COPY --from=builder /sortmerna-2.0/indexdb_rna /usr/local/bin/indexdb_rna + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +RUN export QIITA_ROOTCA_CERT=/unshared_certificates/ci_rootca.crt +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN sed -i "s|^#\!.*|#\!/usr/bin/python2|" /usr/local/bin/configure_target_gene +RUN sed -i "s|^#\!.*|#\!/usr/bin/python2|" /usr/local/bin/start_target_gene +RUN configure_target_gene --env-script "true" --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qp-target-gene/" /unshared_plugins/*.conf + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qp-target-gene.sh"] diff --git a/Images/qp-target-gene/requirements.txt b/Images/qp-target-gene/requirements.txt new file mode 100644 index 0000000..52ed9a5 --- /dev/null +++ b/Images/qp-target-gene/requirements.txt @@ -0,0 +1,6 @@ +-e /qiime_default_reference/qiime-default-reference-0.1.3 +pip-system-certs + +https://github.com/qiita-spots/qiita-files/archive/master.zip +-e /qiita_client +-e /qp-target-gene diff --git a/Images/qp-target-gene/start_qp-target-gene.sh b/Images/qp-target-gene/start_qp-target-gene.sh new file mode 100644 index 0000000..cf92cee --- /dev/null +++ b/Images/qp-target-gene/start_qp-target-gene.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python3 trigger.py start_target_gene + +tail -f /dev/null diff --git a/Images/qtp-biom/_visualizer.py.patch b/Images/qtp-biom/_visualizer.py.patch new file mode 100644 index 0000000..a46b0e4 --- /dev/null +++ b/Images/qtp-biom/_visualizer.py.patch @@ -0,0 +1,128 @@ +--- _visualizer.py 2022-12-19 21:51:47.000000000 +0000 ++++ _visualizer.py 2025-08-22 12:24:22.603489318 +0000 +@@ -16,48 +16,50 @@ + import seaborn as sns + import matplotlib + import matplotlib.pyplot as plt +-from q2_types.feature_data import DNAIterator +-import q2templates +-import skbio +-import qiime2 ++# from q2_types.feature_data import DNAIterator ++# import q2templates ++# import skbio ++# import qiime2 + import json +-from ._vega_spec import vega_spec ++import sys; sys.path.append("/"); from q2summarize import util ++from q2summarize import _templates ++from q2summarize._vega_spec import vega_spec + + _blast_url_template = ("http://www.ncbi.nlm.nih.gov/BLAST/Blast.cgi?" + "ALIGNMENT_VIEW=Pairwise&PROGRAM=blastn&DATABASE" + "=nt&CMD=Put&QUERY=%s") + +-TEMPLATES = pkg_resources.resource_filename('q2_feature_table', '_summarize') ++TEMPLATES = '/q2summarize/' + + +-def tabulate_seqs(output_dir: str, data: DNAIterator) -> None: +- sequences = [] +- seq_lengths = [] +- with open(os.path.join(output_dir, 'sequences.fasta'), 'w') as fh: +- for sequence in data: +- skbio.io.write(sequence, format='fasta', into=fh) +- str_seq = str(sequence) +- seq_len = len(str_seq) +- sequences.append({'id': sequence.metadata['id'], +- 'len': seq_len, +- 'url': _blast_url_template % str_seq, +- 'seq': str_seq}) +- seq_lengths.append(seq_len) +- seq_len_stats = _compute_descriptive_stats(seq_lengths) +- _write_tsvs_of_descriptive_stats(seq_len_stats, output_dir) +- +- index = os.path.join(TEMPLATES, 'tabulate_seqs_assets', 'index.html') +- q2templates.render(index, output_dir, context={'data': sequences, +- 'stats': seq_len_stats}) +- +- js = os.path.join( +- TEMPLATES, 'tabulate_seqs_assets', 'js', 'tsorter.min.js') +- os.mkdir(os.path.join(output_dir, 'js')) +- shutil.copy(js, os.path.join(output_dir, 'js', 'tsorter.min.js')) ++# def tabulate_seqs(output_dir: str, data: DNAIterator) -> None: ++# sequences = [] ++# seq_lengths = [] ++# with open(os.path.join(output_dir, 'sequences.fasta'), 'w') as fh: ++# for sequence in data: ++# skbio.io.write(sequence, format='fasta', into=fh) ++# str_seq = str(sequence) ++# seq_len = len(str_seq) ++# sequences.append({'id': sequence.metadata['id'], ++# 'len': seq_len, ++# 'url': _blast_url_template % str_seq, ++# 'seq': str_seq}) ++# seq_lengths.append(seq_len) ++# seq_len_stats = _compute_descriptive_stats(seq_lengths) ++# _write_tsvs_of_descriptive_stats(seq_len_stats, output_dir) ++ ++# index = os.path.join(TEMPLATES, 'tabulate_seqs_assets', 'index.html') ++# q2templates.render(index, output_dir, context={'data': sequences, ++# 'stats': seq_len_stats}) ++ ++# js = os.path.join( ++# TEMPLATES, 'tabulate_seqs_assets', 'js', 'tsorter.min.js') ++# os.mkdir(os.path.join(output_dir, 'js')) ++# shutil.copy(js, os.path.join(output_dir, 'js', 'tsorter.min.js')) + + + def summarize(output_dir: str, table: biom.Table, +- sample_metadata: qiime2.Metadata = None) -> None: ++ sample_metadata: pd.DataFrame = None) -> None: + # this value is to limit the amount of memory used by seaborn.histplot, for + # more information see: https://github.com/mwaskom/seaborn/issues/2325 + MAX_BINS = 50 +@@ -115,10 +117,9 @@ + os.path.join(output_dir, 'feature-frequencies.pdf')) + feature_frequencies_ax.get_figure().savefig( + os.path.join(output_dir, 'feature-frequencies.png')) +- +- sample_summary_table = q2templates.df_to_html( ++ sample_summary_table = util.df_to_html( + sample_summary.apply('{:,}'.format).to_frame('Frequency')) +- feature_summary_table = q2templates.df_to_html( ++ feature_summary_table = util.df_to_html( + feature_summary.apply('{:,}'.format).to_frame('Frequency')) + + index = os.path.join(TEMPLATES, 'summarize_assets', 'index.html') +@@ -142,7 +143,7 @@ + .apply('{:,}'.format).to_frame('Frequency') + feature_frequencies['# of Samples Observed In'] = \ + pd.Series(feature_qualitative_data).astype(int).apply('{:,}'.format) +- feature_frequencies_table = q2templates.df_to_html(feature_frequencies) ++ feature_frequencies_table = util.df_to_html(feature_frequencies) + sample_frequency_template = os.path.join( + TEMPLATES, 'summarize_assets', 'sample-frequency-detail.html') + feature_frequency_template = os.path.join( +@@ -172,11 +173,11 @@ + )) + }) + context.update({'sample_frequencies_json': sample_frequencies_json}) +- q2templates.util.copy_assets(os.path.join(TEMPLATES, ++ util.copy_assets(os.path.join(TEMPLATES, + 'summarize_assets', + 'vega'), + output_dir) +- q2templates.render(templates, output_dir, context=context) ++ _templates.render(templates, output_dir, context=context) + + plt.close('all') + +@@ -280,3 +281,5 @@ + 'Median frequency', '3rd quartile', + 'Maximum frequency', 'Mean frequency']) + return summary, frequencies ++ ++# summarize("test.xxx", biom.load_table('reference-hit.biom'), pd.read_csv('2_20250704-123139.txt', sep="\t", index_col=0)) +\ No newline at end of file + diff --git a/Images/qtp-biom/qtp-biom.dockerfile b/Images/qtp-biom/qtp-biom.dockerfile new file mode 100644 index 0000000..98d4841 --- /dev/null +++ b/Images/qtp-biom/qtp-biom.dockerfile @@ -0,0 +1,178 @@ +# VERSION: 2025.08.29 + +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 +ARG QIIME2RELEASE=2022.8 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# biom artifact validation throws an error since provenance tracking of Qiime2 cannot get proper time zone information, if not configured here +# https://stackoverflow.com/questions/21717411/timezone-information-missing-in-pytz +RUN dpkg-reconfigure -f noninteractive tzdata + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# install tornado based trigger layer in base environment +RUN pip install -U pip +# RUN conda install tornado + +# Download qtp-biom yaml +# RUN wget https://raw.githubusercontent.com/qiime2/distributions/refs/heads/dev/${QIIME2RELEASE}/tiny/released/qiime2-tiny-ubuntu-latest-conda.yml +RUN wget https://data.qiime2.org/distro/core/qiime2-${QIIME2RELEASE}-py38-linux-conda.yml + +RUN sed -n '/channels/,/dependencies/p' qiime2-2022.8-py38-linux-conda.yml > tinyq2.yml && \ + echo " - q2-metadata=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-mystery-stew=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-types=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2cli=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2templates=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - qiime2=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-feature-table=${QIIME2RELEASE}" >> tinyq2.yml + +# # #RUN echo "- q2-feature-table" >> qiime2-${QIIME2RELEASE}-py38-linux-conda.yml +# # #RUN sed -i "s|- conda-forge|- https://packages.qiime2.org/qiime2/${QIIME2RELEASE}/passed/core/\n- conda-forge|" qiime2-${QIIME2RELEASE}-py38-linux-conda.yml +# Create conda env +RUN conda config --set channel_priority strict && conda env create --quiet -n qtp-biom --file tinyq2.yml +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qtp-biom", "/bin/bash", "-c"] + +RUN pip install -U pip +# RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd qiita_client && pip install --no-cache-dir . + +# RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita-files.git +# COPY ./qiita-files /qiita-files +RUN cd /qiita-files && pip install -e . -v +RUN git clone https://github.com/qiita-spots/qtp-biom.git +# COPY ./qtp-biom /qtp-biom +WORKDIR /qtp-biom +RUN sed -i "s|'qiita-files @ https://github.com/qiita-spots/'||" setup.py +RUN sed -i "s|'qiita-files/archive/master.zip',||" setup.py +RUN sed -i "s|'qiita_client @ https://github.com/qiita-spots/'||" setup.py +RUN sed -i "s|'qiita_client/archive/master.zip'||" setup.py +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs +#RUN conda install tornado + +# TODO: should the plugin get the server configuration?! +RUN export QIITA_CONFIG_FP=/qiita/config_qiita_oidc.cfg + +WORKDIR / + +#COPY start_qtp-biom.sh . +#RUN chmod 755 start_qtp-biom.sh + +#RUN mkdir -p /unshared_plugins +#ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +## Export cert and config filepaths +#COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +#ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +#ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +#RUN export QIITA_ROOTCA_CERT=/unshared_certificates/ci_rootca.crt +#COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +#RUN /qtp-biom/scripts/configure_biom --env-script "true" --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +#RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-biom/" /unshared_plugins/*.conf + +# prepare for runtime stage +# WORKDIR / +RUN pip uninstall pip-system-certs -y +RUN repo=q2-feature-table; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-metadata; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-mystery-stew; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-types; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2cli; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2templates; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=qiime2; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.3.tar.gz | tar -xz --strip-components=1 -C /$repo + +COPY requirements.txt ./requirements.txt +RUN conda install cython +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt +RUN pip install iow + +CMD ["./start_qtp-biom.sh"] + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.8-slim + +# let the container know it's plugin name +ENV PLUGIN=qtp-biom + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python3.8/site-packages -type d -name "tests" | grep -v numpy` +# ^^ 788MB + +COPY --from=builder /opt/conda/envs/qtp-biom/lib/python3.8/site-packages/bp /usr/local/lib/python3.8/site-packages/bp +RUN ln -s /usr/local/lib/python3.8/site-packages/scikit_learn.libs/libgomp-a34b3233.so.1.0.0 /lib/x86_64-linux-gnu/libgomp.so.1 + +# install tornado based trigger layer in base environment +#RUN pip install -U --no-cache-dir tornado +COPY trigger_noconda.py /trigger.py + +WORKDIR / + +COPY start_qtp-biom.sh . +RUN chmod 755 start_qtp-biom.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +# RUN pip install pip-system-certs + +#RUN sed -i "s|^#\!.*|#\!/usr/local/bin/python|" /usr/local/bin/configure_biom +#RUN sed -i "s|^#\!.*|#\!/usr/local/bin/python|" /usr/local/bin/start_biom + +# use git branch instead of pypi version (stored via wheel) +#COPY --from=builder /qiita_client /qiita_client +#RUN cd qiita_client && pip install . + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +#RUN mkdir -p /qiita_server_certificates/ +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN configure_biom --env-script "true" --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-biom/" /unshared_plugins/*.conf + +# fix an pandas deprecation issue, i.e. patch q2templates code +RUN sed -i "s/'display.max_colwidth', -1/'display.max_colwidth', None/" /usr/local/lib/python3.8/site-packages/q2templates/util.py + +# remove conda command from tigger.py +# RUN sed -i "s|source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/%s;||" /trigger.py && sed -i "s|conda_env_name, ||" /trigger.py + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qtp-biom.sh"] + +# python -c "import qiime2.plugins.feature_table" \ No newline at end of file diff --git a/Images/qtp-biom/requirements.txt b/Images/qtp-biom/requirements.txt new file mode 100644 index 0000000..930e5eb --- /dev/null +++ b/Images/qtp-biom/requirements.txt @@ -0,0 +1,32 @@ +-e /q2-feature-table +-e /q2-metadata +-e /q2-mystery-stew +-e /q2-types +-e /q2cli +-e /q2templates +-e /qiime2 + +pyyaml +decorator +tzlocal +bibtexparser +psutil +flufl.lock +parsl +appdirs +tomlkit +scikit-bio +rnanorm +seaborn +jinja2 +ijson +pyhmmer +frictionless +numpy +iow +tornado +pip-system-certs + +-e /qtp-biom +-e /qiita-files +-e /qiita_client diff --git a/Images/qtp-biom/start_qtp-biom.sh b/Images/qtp-biom/start_qtp-biom.sh new file mode 100644 index 0000000..dc62c7e --- /dev/null +++ b/Images/qtp-biom/start_qtp-biom.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_biom + +tail -f /dev/null diff --git a/Images/qtp-biom/summary.py.patch b/Images/qtp-biom/summary.py.patch new file mode 100644 index 0000000..60b2bae --- /dev/null +++ b/Images/qtp-biom/summary.py.patch @@ -0,0 +1,53 @@ +--- /usr/local/lib/python3.8/site-packages/qtp_biom/summary.py 2025-08-22 22:16:54.361692587 +0000 ++++ /exchange/summary.py.new 2025-08-22 22:16:20.964222594 +0000 +@@ -12,8 +12,11 @@ + import pandas as pd + from tempfile import mkstemp + +-import qiime2 +-from qiime2.plugins.feature_table.visualizers import summarize ++# import qiime2 ++# from qiime2.plugins.feature_table.visualizers import summarize ++import sys ++sys.path.append("/") ++from q2summarize import summarize as q2_summarize # this is a stripped, patched version from the above but comes with way less q2 dependencies! + from skbio.tree import TreeNode + from biom import load_table + +@@ -57,19 +60,19 @@ + df = pd.DataFrame.from_dict(metadata, orient='index') + df.to_csv(path, index_label='#SampleID', na_rep='', sep='\t', + encoding='utf-8') +- metadata = qiime2.Metadata.load(path) ++ metadata = df # qiime2.Metadata.load(path) + remove(path) + else: +- metadata = qiime2.Metadata.load(metadata) ++ metadata = pd.read_csv(metadata, sep="\t", index_col=0) # qiime2.Metadata.load(metadata) + +- table = qiime2.Artifact.import_data('FeatureTable[Frequency]', biom_fp) ++ table = load_table(biom_fp) # qiime2.Artifact.import_data('FeatureTable[Frequency]', biom_fp) + +- summary, = summarize(table=table, sample_metadata=metadata) +- index_paths = summary.get_index_paths() ++ q2_summarize(out_dir, table=table, sample_metadata=metadata) ++ # index_paths = summary.get_index_paths() + # this block is not really necessary but better safe than sorry +- if 'html' not in index_paths: +- return (False, None, +- "Only Qiime 2 visualization with an html index are supported") ++ # if 'html' not in index_paths: ++ # return (False, None, ++ # "Only Qiime 2 visualization with an html index are supported") + + # gather some stats about the phylogenetic tree if exists + summary_tree = "" +@@ -99,7 +102,7 @@ + " ") % (num_placements, num_rejected, + num_tips_reference) + +- index_name = basename(index_paths['html']) ++ index_name = 'index.html' # basename(index_paths['html']) + index_fp = join(out_dir, 'index.html') + with open(index_fp, 'w') as f: + f.write(Q2_INDEX % (summary_tree, index_name)) diff --git a/Images/qtp-diversity/qtp-diversity.dockerfile b/Images/qtp-diversity/qtp-diversity.dockerfile new file mode 100644 index 0000000..f7d79cc --- /dev/null +++ b/Images/qtp-diversity/qtp-diversity.dockerfile @@ -0,0 +1,183 @@ +# VERSION: 2025.09.11 + +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 +ARG QIIME2RELEASE=2022.11 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# Download qiime2 yaml +RUN wget -q https://data.qiime2.org/distro/core/qiime2-${QIIME2RELEASE}-py38-linux-conda.yml + +RUN sed -n '/channels/,/dependencies/p' qiime2-${QIIME2RELEASE}-py38-linux-conda.yml > tinyq2.yml && \ + echo " - q2-metadata=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-mystery-stew=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-types=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-diversity=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-diversity-lib=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2cli=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2templates=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-taxa=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - typeguard=2.13.3" >> tinyq2.yml && \ + echo " - unifrac-binaries=1.1.1" >> tinyq2.yml && \ + echo " - qiime2" >> tinyq2.yml + +# Create conda env +RUN conda config --set channel_priority strict && conda env create --name qiime2 -y --file tinyq2.yml +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qiime2", "/bin/bash", "-c"] + +RUN pip install -U pip +# RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd qiita_client && pip install --no-cache-dir . + +# RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita-files.git +RUN cd /qiita-files && pip install -e . -v + +#RUN pip install https://github.com/biocore/q2-mislabeled/archive/refs/heads/main.zip +RUN git clone https://github.com/qiita-spots/qtp-diversity.git +WORKDIR /qtp-diversity +RUN sed -i "s|'qiita-files @ https://github.com/'||" setup.py +RUN sed -i "s|'qiita-spots/qiita-files/archive/master.zip',||" setup.py +RUN sed -i "s|'qiita_client @ https://github.com/qiita-spots/'||" setup.py +RUN sed -i "s|'qiita_client/archive/master.zip'||" setup.py +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs + +WORKDIR / + +RUN repo=q2-metadata; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-mystery-stew; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-types; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2cli; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2templates; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=qiime2; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-taxa; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-diversity; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-diversity-lib; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo + +# the below one is huge as it installs sourece tracker as dependencies. I think (smj 2025-09-09) that we "just" need the type definition for the CI tests of qtp-diversity. Thus, patch dependency away +RUN repo=q2-mislabeld; mkdir -p /$repo && wget -O- https://github.com/biocore/q2-mislabeled/archive/refs/tags/2023.2.tar.gz | tar -xz --strip-components=1 -C /$repo && cd /$repo && sed -i "s|'sourcetracker @ https://github.com/'||" setup.py && sed -i "s|'wasade/sourcetracker2/archive/be_sparse.zip'||" setup.py && pip install -e . + +RUN repo=unifrac; mkdir -p /$repo && wget -O- https://github.com/biocore/unifrac/archive/refs/tags/1.1.1.tar.gz | tar -xz --strip-components=1 -C /$repo + +COPY requirements.txt ./requirements.txt +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt + +# RUN sed -n '/channels/,/dependencies/p' qiime2-${QIIME2RELEASE}-py38-linux-conda.yml > deps.yml && \ +# echo " - umap-learn" >> deps.yml && \ +# echo " - unifrac=1.1.1" >> deps.yml && \ +# echo " - unifrac-binaries=1.1.1" >> deps.yml && \ +# echo " - qiime2" >> deps.yml + +# # Create conda env +# RUN conda config --set channel_priority strict && conda env create --name dependencies -y --file deps.yml +RUN cd /opt/conda/envs/qiime2/lib/python3.8/site-packages/q2_diversity/ && tar czvf /q2_diversity_assets.tgz _beta/adonis_assets _beta/beta_rarefaction_assets _beta/mantel_assets _beta/beta_group_significance_assets _alpha/alpha_group_significance_assets _alpha/alpha_correlation_assets _alpha/alpha_rarefaction_assets + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.8-slim + +# let the container know it's plugin name +ENV PLUGIN=qtp-diversity + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python3.8/site-packages -type d -name "tests" | grep -v numpy` + +COPY start_qtp-diversity.sh . +RUN chmod 755 start_qtp-diversity.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +# install tornado based trigger layer in base environment +COPY trigger_noconda.py /trigger.py + +#COPY --from=builder /opt/conda/envs/dependencies/bin/* /opt/conda/bin/ +#COPY --from=builder /opt/conda/envs/dependencies/sbin/* /opt/conda/sbin/ +#COPY --from=builder /opt/conda/envs/dependencies/lib/* /opt/conda/lib/ +#COPY --from=builder /opt/conda/envs/dependencies/x86_64-conda-linux-gnu/* /opt/conda/x86_64-conda-linux-gnu/ +#ENV PATH=$PATH:/opt/conda/bin:/opt/conda/sbin +# RUN for d in `echo bin lib sbin x86_64-conda-linux-gnu`; do cp -r /dep_conda/$d/* /usr/local/$d/; done + +RUN ln -s /usr/local/lib/python3.8/site-packages/scikit_learn.libs/libgomp-a34b3233.so.1.0.0 /lib/x86_64-linux-gnu/libgomp.so.1 + +# everything for unifrac +COPY --from=builder /opt/conda/envs/qiime2/bin/ssu /usr/local/bin/ssu +COPY --from=builder /opt/conda/envs/qiime2/bin/faithpd /usr/local/bin/faithpd +COPY --from=builder /opt/conda/envs/qiime2/lib/libssu.so /usr/local/lib/libssu.so +COPY --from=builder /opt/conda/envs/qiime2/lib/libopenblasp-r0.3.25.so /usr/local/lib/libopenblasp-r0.3.25.so +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5_hl_cpp.so.100.1.4 /usr/local/lib/libhdf5_hl_cpp.so.100 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5_hl.so.100.1.3 /usr/local/lib/libhdf5_hl.so.100 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5.so.103.2.0 /usr/local/lib/libhdf5.so.103 +COPY --from=builder /opt/conda/envs/qiime2/lib/libgfortran.so.5.0.0 /usr/local/lib/libgfortran.so.5 +COPY --from=builder /opt/conda/envs/qiime2/lib/libquadmath.so.0.0.0 /usr/local/lib/libquadmath.so.0 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5_cpp.so.103.2.0 /usr/local/lib/libhdf5_cpp.so.103 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5_hl_cpp.so.100.1.4 /usr/local/lib/libhdf5_hl_cpp.so.100 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5_hl.so.100.1.3 /usr/local/lib/libhdf5_hl.so.100 +COPY --from=builder /opt/conda/envs/qiime2/lib/libhdf5.so.103.2.0 /usr/local/lib/libhdf5.so.103 +COPY --from=builder /opt/conda/envs/qiime2/lib/libcrypto.so.1.1 /usr/local/lib/libcrypto.so.1.1 +COPY --from=builder /opt/conda/envs/qiime2/lib/libcurl.so.4.8.0 /usr/local/lib/libcurl.so.4 +COPY --from=builder /opt/conda/envs/qiime2/lib/libnghttp2.so.14.24.1 /usr/local/lib/libnghttp2.so.14 +COPY --from=builder /opt/conda/envs/qiime2/lib/libssh2.so.1.0.1 /usr/local/lib/libssh2.so.1 +COPY --from=builder /opt/conda/envs/qiime2/lib/libssl.so.1.1 /usr/local/lib/libssl.so.1.1 +RUN ln -s /usr/local/lib/libopenblasp-r0.3.25.so /usr/local/lib/libcblas.so.3 +RUN ln -s /usr/local/lib/libopenblasp-r0.3.25.so /usr/local/lib/liblapacke.so.3 +RUN for f in `echo "libssu.so libhdf5_cpp.so.103 liblapacke.so.3 libcblas.so.3 libhdf5_hl_cpp.so.100 libhdf5_hl.so.100 libhdf5.so.103 libcrypto.so.1.1 libcurl.so.4 libgfortran.so.5 libnghttp2.so.14 libssh2.so.1 libssl.so.1.1 libquadmath.so.0"`; do ln -s /usr/local/lib/$f /lib/x86_64-linux-gnu/$f; done + +# fix an pandas deprecation issue, i.e. patch q2templates code +RUN sed -i "s/'display.max_colwidth', -1/'display.max_colwidth', None/" /usr/local/lib/python3.8/site-packages/q2templates/util.py + +# COPY trigger_noconda.py /trigger.py +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +# initialize qiim2 +RUN qiime + +# copy qiime2 diversity assets +COPY --from=builder /q2_diversity_assets.tgz /usr/local/lib/python3.8/site-packages/q2_diversity/q2_diversity_assets.tgz +RUN cd /usr/local/lib/python3.8/site-packages/q2_diversity && tar xzvf q2_diversity_assets.tgz + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN chmod u+x /usr/local/bin/configure_diversity_types /usr/local/bin/start_diversity_types +RUN configure_diversity_types --env-script "true" --ca-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-diversity/" /unshared_plugins/*.conf + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qtp-diversity.sh"] diff --git a/Images/qtp-diversity/requirements.txt b/Images/qtp-diversity/requirements.txt new file mode 100644 index 0000000..d56143a --- /dev/null +++ b/Images/qtp-diversity/requirements.txt @@ -0,0 +1,36 @@ +-e /q2-metadata +-e /q2-mystery-stew +-e /q2-types +-e /q2-taxa +-e /q2-diversity +-e /q2-diversity-lib +-e /q2-mislabeld +-e /q2cli +-e /q2templates +-e /qiime2 + +tornado +pip-system-certs +pyyaml +decorator +tzlocal +bibtexparser +psutil +flufl.lock +parsl +appdirs +tomlkit +scikit-bio +rnanorm +seaborn +jinja2 +ijson +pyhmmer +frictionless +numpy +umap +-e /unifrac + +-e /qtp-diversity +-e /qiita-files +-e /qiita_client \ No newline at end of file diff --git a/Images/qtp-diversity/start_qtp-diversity.sh b/Images/qtp-diversity/start_qtp-diversity.sh new file mode 100644 index 0000000..0396871 --- /dev/null +++ b/Images/qtp-diversity/start_qtp-diversity.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_diversity_types + +tail -f /dev/null diff --git a/Images/qtp-job-output-folder/qtp-job-output-folder.dockerfile b/Images/qtp-job-output-folder/qtp-job-output-folder.dockerfile new file mode 100644 index 0000000..3ea824a --- /dev/null +++ b/Images/qtp-job-output-folder/qtp-job-output-folder.dockerfile @@ -0,0 +1,99 @@ +# VERSION: 2025.09.11 + +# ========================== +# Stage 1: Build wheels +# ========================== +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# install tornado based trigger layer in base environment +RUN pip install -U pip + +# Create conda env +RUN conda create --name qtp-job-output-folder -y python=3.6 pip==9.0.3 +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qtp-job-output-folder", "/bin/bash", "-c"] + +RUN pip install -U pip + +#RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd qiita_client && pip install --no-cache-dir . + +# RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita-files.git +RUN cd /qiita-files && pip install -e . -v + +RUN git clone https://github.com/qiita-spots/qtp-job-output-folder.git +WORKDIR /qtp-job-output-folder +RUN sed -i "s|'qiita-files @ https://github.com/'||" setup.py +RUN sed -i "s|'qiita-spots/qiita-files/archive/master.zip',||" setup.py +RUN sed -i "s|'qiita_client @ https://github.com/'||" setup.py +RUN sed -i "s|'qiita-spots/qiita_client/archive/master.zip'||" setup.py +RUN pip install -e . + +WORKDIR / +COPY requirements.txt /requirements.txt +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r /requirements.txt + + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.6-slim + +# let the container know it's plugin name +ENV PLUGIN=qtp-job-output-folder + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python3.6/site-packages -type d -name "tests" | grep -v numpy` + +COPY trigger_noconda.py /trigger.py + +COPY start_qtp-job-output-folder.sh . +RUN chmod 755 start_qtp-job-output-folder.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +#RUN export QIITA_ROOTCA_CERT=/unshared_certificates/ci_rootca.crt +RUN chmod u+x /usr/local/bin/configure_qtp_job_output_folder /usr/local/bin/start_qtp_job_output_folder +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN configure_qtp_job_output_folder --env-script "true" --ca-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-job-output-folder/" /unshared_plugins/*.conf + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qtp-job-output-folder.sh"] diff --git a/Images/qtp-job-output-folder/requirements.txt b/Images/qtp-job-output-folder/requirements.txt new file mode 100644 index 0000000..d56395f --- /dev/null +++ b/Images/qtp-job-output-folder/requirements.txt @@ -0,0 +1,6 @@ +tornado +pip-system-certs + +-e /qiita_client +-e /qiita-files +-e /qtp-job-output-folder diff --git a/Images/qtp-job-output-folder/start_qtp-job-output-folder.sh b/Images/qtp-job-output-folder/start_qtp-job-output-folder.sh new file mode 100644 index 0000000..d3097cb --- /dev/null +++ b/Images/qtp-job-output-folder/start_qtp-job-output-folder.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_qtp_job_output_folder + +tail -f /dev/null diff --git a/Images/qtp-sequencing/qtp-sequencing.dockerfile b/Images/qtp-sequencing/qtp-sequencing.dockerfile new file mode 100644 index 0000000..20cbd1d --- /dev/null +++ b/Images/qtp-sequencing/qtp-sequencing.dockerfile @@ -0,0 +1,115 @@ +# VERSION: 2025.09.08 + +# ========================== +# Stage 1: Build wheels (~5.8 GB) +# ========================== +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# Create conda env +RUN conda create --name qtp-sequencing -y -c conda-forge -c bioconda pip pigz quast fqtools python=3.9 +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qtp-sequencing", "/bin/bash", "-c"] + +RUN pip install -U pip +#RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd qiita_client && pip install --no-cache-dir . + +# RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita-files.git +RUN cd /qiita-files && pip install -e . -v + +RUN git clone https://github.com/qiita-spots/qtp-sequencing.git +WORKDIR /qtp-sequencing +RUN sed -i "s|'qiita-files @ https://github.com/'||" setup.py +RUN sed -i "s|'qiita-spots/qiita-files/archive/master.zip',||" setup.py +RUN sed -i "s|'qiita_client @ https://github.com/'||" setup.py +RUN sed -i "s|'qiita-spots/qiita_client/archive/master.zip'||" setup.py +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs + +COPY requirements.txt ./requirements.txt +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.9-slim + +# let the container know it's plugin name +ENV PLUGIN=qtp-sequencing + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python3.9/site-packages -type d -name "tests" | grep -v numpy` + +# "install" https://github.com/alastair-droop/fqtools +COPY --from=builder /opt/conda/envs/qtp-sequencing/bin/fqtools /usr/local/bin/fqtools +COPY --from=builder /opt/conda/envs/qtp-sequencing/lib/libhts.so.1.22.1 /lib/x86_64-linux-gnu/libhts.so.3 +COPY --from=builder /opt/conda/envs/qtp-sequencing/lib/libdeflate.so.0 /lib/x86_64-linux-gnu/ + +# "install" pigz +COPY --from=builder /opt/conda/envs/qtp-sequencing/bin/pigz /usr/local/bin/ + +COPY trigger_noconda.py /trigger.py + +# link to quast program +RUN ln -s /usr/local/bin/quast.py /usr/local/bin/quast + +# WORKDIR / + +COPY start_qtp-sequencing.sh . +RUN chmod 755 start_qtp-sequencing.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +RUN configure_qtp_sequencing --env-script "true" --ca-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-sequencing/" /unshared_plugins/*.conf + +# for docker compose health check +RUN mkdir -p /usr/share/man/man1 && \ + echo "deb [trusted=yes] http://archive.debian.org/debian buster main" > /etc/apt/sources.list && \ + echo "deb [trusted=yes] http://archive.debian.org/debian-security buster/updates main" >> /etc/apt/sources.list && \ + echo "deb [trusted=yes] http://archive.debian.org/debian buster-updates main" >> /etc/apt/sources.list && \ + apt-get update && \ + apt-get install -y --no-install-recommends wget && \ + rm -rf /var/lib/apt/lists/* + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qtp-sequencing.sh"] \ No newline at end of file diff --git a/Images/qtp-sequencing/requirements.txt b/Images/qtp-sequencing/requirements.txt new file mode 100644 index 0000000..cbcce99 --- /dev/null +++ b/Images/qtp-sequencing/requirements.txt @@ -0,0 +1,7 @@ +tornado +pip-system-certs +quast + +-e /qiita_client +-e /qiita-files +-e /qtp-sequencing \ No newline at end of file diff --git a/Images/qtp-sequencing/start_qtp-sequencing.sh b/Images/qtp-sequencing/start_qtp-sequencing.sh new file mode 100644 index 0000000..d9ab477 --- /dev/null +++ b/Images/qtp-sequencing/start_qtp-sequencing.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_qtp_sequencing + +tail -f /dev/null diff --git a/Images/qtp-visualization/qtp-visualization.dockerfile b/Images/qtp-visualization/qtp-visualization.dockerfile new file mode 100644 index 0000000..e08ad3e --- /dev/null +++ b/Images/qtp-visualization/qtp-visualization.dockerfile @@ -0,0 +1,122 @@ +# VERSION: 2025.08.29 + +FROM ubuntu:24.04 AS builder + +ARG MINIFORGE_VERSION=24.1.2-0 +ARG QIIME2RELEASE=2023.5 + +ENV CONDA_DIR=/opt/conda +ENV PATH=${CONDA_DIR}/bin:${PATH} + +RUN apt-get -y update +RUN apt-get -y --fix-missing install \ + git \ + wget \ + libpq-dev \ + python3-dev \ + gcc \ + build-essential + +# install miniforge3 for "conda" +# see https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile +RUN wget https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh -O /tmp/miniforge3.sh && \ + /bin/bash /tmp/miniforge3.sh -b -p ${CONDA_DIR} && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \ + echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc && \ + conda init && \ + rm -f /tmp/miniforge3.sh + +# install tornado based trigger layer in base environment +RUN pip install -U pip + +# Download qiime2 yaml (make sure to use a qiime2 version that is able to visualize qiime artifacts of the correct version) +RUN wget --quiet https://data.qiime2.org/distro/core/qiime2-${QIIME2RELEASE}-py38-linux-conda.yml + +RUN sed -n '/channels/,/dependencies/p' qiime2-${QIIME2RELEASE}-py38-linux-conda.yml > tinyq2.yml && \ + echo " - q2-metadata=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-mystery-stew=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2-types=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2cli=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - q2templates=${QIIME2RELEASE}" >> tinyq2.yml && \ + echo " - typeguard=2.13.3" >> tinyq2.yml && \ + echo " - qiime2" >> tinyq2.yml + +# Create conda env +RUN conda config --set channel_priority strict && conda env create --name qtp-visualization -y --file tinyq2.yml +# Make RUN commands use the new environment: +# append --format docker to the build command, see https://github.com/containers/podman/issues/8477 +SHELL ["conda", "run", "-p", "/opt/conda/envs/qtp-visualization", "/bin/bash", "-c"] + +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +RUN pip install -U pip +#RUN pip install https://github.com/qiita-spots/qiita_client/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita_client.git +RUN cd qiita_client && pip install --no-cache-dir . + +#RUN pip install https://github.com/qiita-spots/qiita-files/archive/master.zip +RUN git clone -b master https://github.com/qiita-spots/qiita-files.git +RUN cd /qiita-files && pip install -e . -v + +RUN git clone https://github.com/qiita-spots/qtp-visualization.git +WORKDIR /qtp-visualization +RUN sed -i "s|'qiita_client', 'click >= 3.3', 'qiime2'|'click >= 3.3'|" setup.py +RUN pip install -e . +RUN pip install --upgrade certifi +RUN pip install pip-system-certs + +WORKDIR / + +RUN repo=q2-metadata; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-mystery-stew; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2-types; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2cli; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=q2templates; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.0.tar.gz | tar -xz --strip-components=1 -C /$repo +RUN repo=qiime2; mkdir -p /$repo && wget -O- https://github.com/qiime2/$repo/archive/refs/tags/${QIIME2RELEASE}.1.tar.gz | tar -xz --strip-components=1 -C /$repo + +COPY requirements.txt ./requirements.txt +# RUN conda install cython +RUN pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt +# RUN pip install iow + + + +# ========================== +# Stage 2: Runtime +# ========================== +FROM python:3.8-slim + +# let the container know it's plugin name +ENV PLUGIN=qtp-visualization + +# python package compile in build stage +COPY --from=builder /wheels /wheels + +RUN pip install --no-cache-dir /wheels/* \ + && rm -rf rm -rf `find /usr/local/lib/python3.8/site-packages -type d -name "tests" | grep -v numpy` + +COPY start_qtp-visualization.sh . +RUN chmod 755 start_qtp-visualization.sh + +RUN mkdir -p /unshared_plugins +ENV QIITA_PLUGINS_DIR=/unshared_plugins/ + +COPY trigger_noconda.py /trigger.py + +## Export cert and config filepaths +COPY qiita_server_certificates/qiita_server_certificates.pem /qiita_server_certificates/qiita_server_certificates.pem +ENV REQUESTS_CA_BUNDLE=/qiita_server_certificates/qiita_server_certificates.pem +ENV SSL_CERT_FILE=/qiita_server_certificates/qiita_server_certificates.pem + +RUN chmod u+x /usr/local/bin/configure_visualization_types /usr/local/bin/start_visualization_types +COPY qiita_server_certificates/*_server.* /qiita_server_certificates/ +# qiime2 expects to have a CONDA_PREFIX set, see https://github.com/qiime2/qiime2/blob/812fd09cf80b4ed76c1f39827ae2dba729448436/qiime2/sdk/parallel_config.py#L30 +ENV CONDA_PREFIX=/usr/local +RUN configure_visualization_types --env-script "true" --server-cert `find /qiita_server_certificates/ -name "*_server.crt" -type f` +RUN sed -i -E "s/^START_SCRIPT = .+/START_SCRIPT = python \/start_plugin.py qtp-visualization/" /unshared_plugins/*.conf + +# for testing +COPY test_plugin.sh /test_plugin.sh + +CMD ["./start_qtp-visualization.sh"] \ No newline at end of file diff --git a/Images/qtp-visualization/requirements.txt b/Images/qtp-visualization/requirements.txt new file mode 100644 index 0000000..15ececc --- /dev/null +++ b/Images/qtp-visualization/requirements.txt @@ -0,0 +1,30 @@ +-e /q2-metadata +-e /q2-mystery-stew +-e /q2-types +-e /q2cli +-e /q2templates +-e /qiime2 + +tornado +pip-system-certs +pyyaml +decorator +tzlocal +bibtexparser +psutil +flufl.lock +parsl +appdirs +tomlkit +scikit-bio +rnanorm +seaborn +jinja2 +ijson +pyhmmer +frictionless +numpy + +-e /qtp-visualization +-e /qiita-files +-e /qiita_client \ No newline at end of file diff --git a/Images/qtp-visualization/start_qtp-visualization.sh b/Images/qtp-visualization/start_qtp-visualization.sh new file mode 100644 index 0000000..e5a4c14 --- /dev/null +++ b/Images/qtp-visualization/start_qtp-visualization.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd / && python trigger.py start_visualization_types + +tail -f /dev/null diff --git a/Images/test_plugin.sh b/Images/test_plugin.sh new file mode 100644 index 0000000..2f03968 --- /dev/null +++ b/Images/test_plugin.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +echo "plugin to be tested is: '$PLUGIN'" + +# install dependencies +apt-get update +apt-get -y --fix-missing install git +if [ "qp-target-gene" == "$PLUGIN" ]; then + REQUESTS_CA_BUNDLE="" pip2 install "pytest<5"; +else + REQUESTS_CA_BUNDLE="" pip install pytest; +fi; + +if [ "qp-qiime2" != "$PLUGIN" ]; then + # clone plugin repository + git clone https://github.com/qiita-spots/${PLUGIN} +fi; + +# NOTE: client api reset only works when communicating with Qitta Master, +# thus, you need to directly address the port of the master container. Don't +# go through nginx! + +# fix qiita base url in client +for f in `find /usr/local/lib/python*/site-packages/qiita_client/ /usr/local/lib/python*/dist-packages/qiita_client/ /opt/conda/envs/qiime2/lib/python3.8/site-packages/qiita_client/ -name "testing.py"`; do + sed -i 's|URL = "https://localhost:8383"|URL = "https://tinqiita-qiita-1:21174"|' $f; +done + +# fix qiita base url in qtp-sequencing plugin tests +for f in `find /${PLUGIN}/*/tests/ -name 'test_*.py'`; do + sed -i 's|https://localhost:21174|https://tinqiita-qiita-1:21174|' $f; + # below seen in qp-target-gene + sed -i 's|plugin("https://localhost:21174", .register., .ignored.)|plugin("https://tinqiita-qiita-1:21174", "register", "ignored")|' $f; +done + +# fix qiita base url in qtp-diversity plugin tests. Use . instead of " or ' to be more general +for f in `find /${PLUGIN}/*/tests/ -name 'test_*.py'`; do + sed -i "s|plugin(.https://localhost:8383., .register., .ignored.)|plugin('https://tinqiita-nginx-1:8383', 'register', 'ignored')|" $f; + # below seen in qtp-biom + sed -i 's|plugin("https://localhost:8383", job_id, self.out_dir)|plugin("https://tinqiita-nginx-1:8383", job_id, self.out_dir)|' $f; +done + +# better save than sorry +export QIITA_PORT=21174 +export QIITA_ROOTCA_CERT=$SSL_CERT_FILE + +# change into plugin source directory and execute actual tests +if [ "qp-qiime2" == "$PLUGIN" ]; then + source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/qiime2; cd ${PLUGIN} && pytest; +else + cd ${PLUGIN} && pytest; +fi; diff --git a/Images/trigger.py b/Images/trigger.py new file mode 100644 index 0000000..9293396 --- /dev/null +++ b/Images/trigger.py @@ -0,0 +1,88 @@ +import tornado.ioloop +import tornado.web +import json +import subprocess +from glob import glob +import sys +import traceback +import asyncio + +conda_env_name = None +plugin_start_script = None +plugin_src_dir = None + +class RunCommandHandler(tornado.web.RequestHandler): + async def post(self): + try: + # JSON-Request-Daten lesen + data = json.loads(self.request.body.decode("utf-8")) + qiita_worker_url = data.get('url') + job_id = data.get('job_id') + output_dir = data.get('output_dir') + + #command = data.get("command") + + if not qiita_worker_url or not job_id or not output_dir: + self.set_status(400) + self.write({"error": "Kein Befehl angegeben"}) + return + + # Systembefehl ausfuehren + cmd = 'source /opt/conda/etc/profile.d/conda.sh; conda activate /opt/conda/envs/%s; %s/scripts/%s %s %s %s' % (conda_env_name, plugin_src_dir, plugin_start_script, qiita_worker_url, job_id, output_dir) + # Asynchronen Subprozess starten + proc = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + executable='/bin/bash' + ) + stdout, stderr = await proc.communicate() + #result = subprocess.run(cmd, shell=True, universal_newlines=True, executable='/bin/bash', stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if proc.returncode != 0: + self.set_status(500) + + # Antwort zurueckgeben + self.write({ + "stdout": stdout.decode(), + "stderr": stderr.decode(), + "returncode": proc.returncode, + "cmd": cmd, + }) + + except Exception as e: + self.set_status(500) + # a hack to learn which docker service I am in + plugin_name = "unknown" + for f in glob('/start_*.sh'): + plugin_name = f.split('_')[-1].replace('.sh', '') + break + print("Error in service '%s': %s" % (plugin_name, str(e)), file=sys.stderr) + traceback.print_exc() + self.write({"error": str(e)}) + +class RunConfigHandler(tornado.web.RequestHandler): + async def get(self): + try: + for fp_config in glob('/unshared_plugins/*.conf'): + with open(fp_config, 'r') as f: + self.write(''.join(f.readlines()) + '\n') + except Exception as e: + self.set_status(500) + self.write({"error": str(e)}) + +def make_app(): + return tornado.web.Application([ + (r"/run", RunCommandHandler), + (r"/config", RunConfigHandler), + ]) + +if __name__ == "__main__": + conda_env_name = sys.argv[1] + plugin_start_script = sys.argv[2] + plugin_src_dir = sys.argv[3] + + app = make_app() + app.listen(5000) # Server auf Port 5000 starten + print("Server laeuft auf http://localhost:5000", file=sys.stderr) + tornado.ioloop.IOLoop.current().start() diff --git a/Images/trigger_noconda.py b/Images/trigger_noconda.py new file mode 100644 index 0000000..5f62d5c --- /dev/null +++ b/Images/trigger_noconda.py @@ -0,0 +1,82 @@ +import tornado.ioloop +import tornado.web +import json +import subprocess +from glob import glob +import sys +import traceback +import os +import asyncio + +plugin_start_script = None + +class RunCommandHandler(tornado.web.RequestHandler): + async def post(self): + try: + # JSON-Request-Daten lesen + data = json.loads(self.request.body.decode("utf-8")) + qiita_worker_url = data.get('url') + job_id = data.get('job_id') + output_dir = data.get('output_dir') + + #command = data.get("command") + + if not qiita_worker_url or not job_id or not output_dir: + self.set_status(400) + self.write({"error": "Kein Befehl angegeben"}) + return + + # Systembefehl ausfuehren + cmd = '%s %s %s %s' % (plugin_start_script, qiita_worker_url, job_id, output_dir) + # Asynchronen Subprozess starten + proc = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + executable='/bin/bash' + ) + stdout, stderr = await proc.communicate() + #result = subprocess.run(cmd, shell=True, universal_newlines=True, executable='/bin/bash', stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if proc.returncode != 0: + self.set_status(500) + + # Antwort zurueckgeben + self.write({ + "stdout": stdout.decode(), + "stderr": stderr.decode(), + "returncode": proc.returncode, + "cmd": cmd, + }) + + except Exception as e: + self.set_status(500) + # a hack to learn which docker service I am in + plugin_name = os.path.basename(plugin_start_script).replace('start_', '') + print("Error in service '%s': %s" % (plugin_name, str(e)), file=sys.stderr) + traceback.print_exc() + self.write({"error": str(e)}) + +class RunConfigHandler(tornado.web.RequestHandler): + async def get(self): + try: + for fp_config in glob('/unshared_plugins/*.conf'): + with open(fp_config, 'r') as f: + self.write(''.join(f.readlines()) + '\n') + except Exception as e: + self.set_status(500) + self.write({"error": str(e)}) + +def make_app(): + return tornado.web.Application([ + (r"/run", RunCommandHandler), + (r"/config", RunConfigHandler), + ]) + +if __name__ == "__main__": + plugin_start_script = sys.argv[1] + + app = make_app() + app.listen(5000) # Server auf Port 5000 starten + print("Server laeuft auf http://localhost:5000", file=sys.stderr) + tornado.ioloop.IOLoop.current().start() diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..37eb050 --- /dev/null +++ b/Makefile @@ -0,0 +1,142 @@ +PODMAN_FLAGS = +PODMAN_BIN = docker buildx +CERTNAME=stefan +OPENSSL=/bin/openssl +DIR_REFERENCES=references +# docker compose prepends name of directory to containers + +TMPDIR := $(shell mktemp -d) +ifeq ($(origin tmpdir), undefined) +tmpdir = $(TMPDIR) +endif + +$(DIR_REFERENCES)/qiita_server_certificates: Images/plugin_collector/stefan_csr.conf Images/plugin_collector/stefan_cert.conf + # === create own certificates === + mkdir -p $@ + # Generate a new root CA private key and certificate + cd $@ && $(OPENSSL) req -x509 -sha256 -days 356 -nodes -newkey rsa:2048 -subj "/CN=tinqiita-nginx-1/C=DE/L=Giessen" -keyout $(CERTNAME)_rootca.key -out $(CERTNAME)_rootca.crt + # Generate a new server private key + cd $@ && $(OPENSSL) genrsa -out $(CERTNAME)_server.key 2048 + # Copy the following to a new file named csr.conf and modify to suit your needs + # Copy the following to a new file named cert.conf and modify to suit your needs + # Nils: alt_names is the important aspect. Make entries for all valid hostnames with which services shall be addressed + for f in `echo "$^"`; do cat $$f > $@/`basename $$f`; done + #cp $^ $@/ + # Generate a certificate signing request + cd $@ && $(OPENSSL) req -new -key $(CERTNAME)_server.key -out $(CERTNAME)_server.csr -config $(CERTNAME)_csr.conf + # Generate a new signed server.crt to use with your server.key + cd $@ && $(OPENSSL) x509 -req -in $(CERTNAME)_server.csr -CA $(CERTNAME)_rootca.crt -CAkey $(CERTNAME)_rootca.key -CAcreateserial -out $(CERTNAME)_server.crt -days 365 -sha256 -extfile $(CERTNAME)_cert.conf + # concat rootca and server certificates into one file + cd $@ && cat $(CERTNAME)_rootca.crt $(CERTNAME)_server.crt > qiita_server_certificates.pem + # === end: create own certificates === + +# a general target, executed for each plugin +plugin: Images/trigger.py Images/trigger_noconda.py $(DIR_REFERENCES)/qiita_server_certificates Images/test_plugin.sh + cp -r $^ $(tmpdir)/ + +.built_image_qtp-biom: Images/qtp-biom/qtp-biom.dockerfile Images/qtp-biom/start_qtp-biom.sh src/qiita-files/ src/qtp-biom/ Images/qtp-biom/requirements.txt + test -d src/qtp-biom || git clone https://github.com/qiita-spots/qtp-biom.git src/qtp-biom + tmpdir=$(TMPDIR) $(MAKE) plugin + cp -r $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_qtp-sequencing: Images/qtp-sequencing/qtp-sequencing.dockerfile Images/qtp-sequencing/start_qtp-sequencing.sh Images/qtp-sequencing/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +# download GG13.8 reference sets from ftp://ftp.microbio.me/greengenes_release/gg_13_8_otus, instead of storing these large files within the qp-target-gene image 149 MB +$(DIR_REFERENCES)/qp-target-gene: + mkdir -p $(DIR_REFERENCES)/qp-target-gene + echo '56ef15dccf2e931ec173f4f977ed649b 97_otu_taxonomy.txt' > $(DIR_REFERENCES)/qp-target-gene/exp.md5 + echo '50b2269712b3738afb41892bed936c29 97_otus.fasta' >> $(DIR_REFERENCES)/qp-target-gene/exp.md5 + echo 'b7e76593bce82913af1cfb06edf15732 97_otus.tree' >> $(DIR_REFERENCES)/qp-target-gene/exp.md5 + wget 'ftp://ftp.microbio.me/greengenes_release/gg_13_8_otus/trees/97_otus.tree' -O $(DIR_REFERENCES)/qp-target-gene/97_otus.tree + wget 'ftp://ftp.microbio.me/greengenes_release/gg_13_8_otus/taxonomy/97_otu_taxonomy.txt' -O $(DIR_REFERENCES)/qp-target-gene/97_otu_taxonomy.txt + wget 'ftp://ftp.microbio.me/greengenes_release/gg_13_8_otus/rep_set/97_otus.fasta' -O $(DIR_REFERENCES)/qp-target-gene/97_otus.fasta + cd $(DIR_REFERENCES)/qp-target-gene/ && md5sum -c exp.md5 || rm -rf $(DIR_REFERENCES)/qp-target-gene/ + +.built_image_qp-target-gene: Images/qp-target-gene/qp-target-gene.dockerfile Images/qp-target-gene/start_qp-target-gene.sh $(DIR_REFERENCES)/qp-target-gene Images/qp-target-gene/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp -r $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_qtp-visualization: Images/qtp-visualization/qtp-visualization.dockerfile Images/qtp-visualization/start_qtp-visualization.sh Images/qtp-visualization/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_qtp-diversity: Images/qtp-diversity/qtp-diversity.dockerfile Images/qtp-diversity/start_qtp-diversity.sh Images/qtp-diversity/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +# download Silva and GG13.8 reference sets from bioconda fragment-insertion package, instead of storing these large files within the qp-deblur image ~1.3 GB +$(DIR_REFERENCES)/qp-deblur/reference-gg-raxml-bl.tre: + mkdir -p $(DIR_REFERENCES)/tmp_sepp $(DIR_REFERENCES)/qp-deblur + wget "https://anaconda.org/biocore/fragment-insertion/4.3.5/download/linux-64/fragment-insertion-4.3.5-py35_0.tar.bz2" -O $(DIR_REFERENCES)/tmp_sepp/fragment-insertion-4.3.5-py35_0.tar.bz2 + cd $(DIR_REFERENCES)/tmp_sepp && tar xjf fragment-insertion-4.3.5-py35_0.tar.bz2 + cp $(DIR_REFERENCES)/tmp_sepp/share/fragment-insertion/ref/* $(DIR_REFERENCES)/qp-deblur/ + rm -rf $(DIR_REFERENCES)/tmp_sepp/ + +.built_image_qp-deblur: Images/qp-deblur/qp-deblur.dockerfile Images/qp-deblur/start_qp-deblur.sh $(DIR_REFERENCES)/qp-deblur/reference-gg-raxml-bl.tre Images/qp-deblur/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_qp-qiime2: Images/qp-qiime2/qp-qiime2.dockerfile Images/qp-qiime2/start_qp-qiime2.sh + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_qtp-job-output-folder: Images/qtp-job-output-folder/qtp-job-output-folder.dockerfile Images/qtp-job-output-folder/start_qtp-job-output-folder.sh Images/qtp-job-output-folder/requirements.txt + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-`basename $< | cut -d "." -f 1` + touch .built_image_`basename $< | cut -d "." -f 1` + +.built_image_nginx: Images/nginx/nginx.dockerfile Images/nginx/start_nginx.sh Images/nginx/nginx_qiita.conf + cd Images/nginx && $(PODMAN_BIN) build . -f `basename $<` $(PODMAN_FLAGS) -t local-nginx_qiita + mkdir -p ./logs + touch ./logs/nginx_access.log ./logs/nginx_error.log + chmod a+rw ./logs/nginx_access.log ./logs/nginx_error.log + touch .built_image_nginx + +.built_image_qiita: Images/qiita/qiita.dockerfile Images/qiita/config_qiita_oidc.cfg Images/qiita/start_qiita.sh Images/qiita/start_qiita-initDB.sh Images/qiita/supervisor_foreground.conf Images/qiita/start_plugin.py Images/qiita/config_portal.cfg Images/qiita/drop_workflows.py + test -d src/qiita || git clone -b auth_oidc https://github.com/jlab/qiita.git src/qiita + # remove configuration and certificate files from upstream qiita repo + rm -rf src/qiita/qiita_core/support_files + cd Images/qiita && $(PODMAN_BIN) build . -f `basename $<` $(PODMAN_FLAGS) -t local-qiita + touch .built_image_qiita + +.built_image_plugin_collector: Images/plugin_collector/plugin_collector.dockerfile Images/plugin_collector/fix_test_db.py Images/plugin_collector/collect_configs.py Images/plugin_collector/start_plugin_collector.sh + tmpdir=$(TMPDIR) $(MAKE) plugin + cp $^ $(TMPDIR) + $(PODMAN_BIN) build $(TMPDIR)/ -f $(TMPDIR)/`basename $<` $(PODMAN_FLAGS) -t local-plugin_collector + touch .built_image_plugin_collector + +images: .built_image_qtp-biom .built_image_nginx .built_image_qiita .built_image_plugin_collector .built_image_qtp-sequencing .built_image_qp-target-gene .built_image_qtp-visualization .built_image_qtp-diversity .built_image_qp-deblur .built_image_qp-qiime2 .built_image_qp-qiime2 .built_image_qtp-job-output-folder + +environments/qiita_db.env: environments/qiita_db.env.example + cp environments/qiita_db.env.example environments/qiita_db.env + sed -E -i "s/^POSTGRES_PASSWORD=.+$$/POSTGRES_PASSWORD=postgres/" environments/qiita_db.env + +environments/qiita.env: environments/qiita.env.example + cp environments/qiita.env.example environments/qiita.env + +config: environments/qiita_db.env environments/qiita.env + +make clean: + rm .built_image_* + rm -rf $(DIR_REFERENCES) + rm -rf /var/lib/docker/volumes/tinqiita_server-certificates/_data/* + rm -rf /var/lib/docker/volumes/tinqiita_server-plugin-configs/_data/* + +all: config images diff --git a/README.md b/README.md new file mode 100644 index 0000000..ec93ee9 --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ +## Howto start-up qiita through docker compose +Note: this does currently **not** work with podman :-( So strictly stick to docker here. + +1. We assume you operate on your local computer, i.e. not within the BCF cluster as you won't have docker, on a Ubuntu/Mint like OS. You will need approx. 55 GB free disk space. +2. Install necessary software (git, docker.io, postgresql-client): `sudo apt-get install git docker.io postgresql-client` +3. Install docker-compose: + - You need to register their apt repository first: see https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository for details). In short: copy & paste the following command and execute in terminal: + ``` + # Add Docker's official GPG key: + sudo apt-get update + sudo apt-get install ca-certificates curl + sudo install -m 0755 -d /etc/apt/keyrings + sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc + sudo chmod a+r /etc/apt/keyrings/docker.asc + + # Add the repository to Apt sources: + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + ``` + - Install docker-compose through apt: `sudo apt-get update && sudo apt-get install docker-compose-plugin` +4. clone a local copy of this repository, branch "tinqiita": `git clone -b tinqiita https://github.com/jlab/qiita-keycloak.git tinqiita` +5. change into this new directory: `cd tinqiita` +6. create necessary images and other files (as this will create multiple docker images, it can take quite some time, approx. 30min?!): `sudo make all` +7. start the docker ensemble: `sudo docker compose up` +8. take your favorite browser and surf to `https://localhost:8383`. You probably get a warning due to incorrect SSL certificates like: + ![image](https://github.com/user-attachments/assets/58e978ab-c633-4197-b6f9-b0a62b8b671c) for Firefox. Press "Advanced..." and then "Accept the Risk and Continue" +9. You should now be able to see your living qiita. Log in as user `admin@foo.bar` (or `test@foo.bar`) and password `password`. + +That's it. Enjoy! + +## For plugin developers +To integrate your shiny new plugin into this docker compose version of Qiita, I suggest you use one of the existing plugins as template, e.g. "qp-deblur". +Remember to: + 1. **makefile**: create a make target for the docker image of your plugin, like https://github.com/jlab/qiita-keycloak/blob/c94955dc0909e5ad866d046e5eafc6459bc8efc1/Makefile#L65-L69 and add the target name to line https://github.com/jlab/qiita-keycloak/blob/c94955dc0909e5ad866d046e5eafc6459bc8efc1/Makefile#L104 + 2. **compose file**: copy and paste a "service" like here https://github.com/jlab/qiita-keycloak/blob/c94955dc0909e5ad866d046e5eafc6459bc8efc1/compose.yaml#L288-L306 and make your new "service" a dependency of the "plugin-collector" service here: https://github.com/jlab/qiita-keycloak/blob/c94955dc0909e5ad866d046e5eafc6459bc8efc1/compose.yaml#L371-L372 to also start-up this container with all others + let the plugin collector python script know about the existance of the new plugin by appending it's name to the string here: https://github.com/jlab/qiita-keycloak/blob/c94955dc0909e5ad866d046e5eafc6459bc8efc1/compose.yaml#L378 + +### pro infos +- log files will be written to `tinqiita/logs` +- You can access the relevant containers by checking for their names with `sudo docker container ls` and then running `sudo docker exec -it bash` +- keycloak service is **not** activated at the moment of writing, but should you want to work on that: + 1. Run `sudo docker compose up keycloak keycloakdb` + 2. Open `http://localhost:8080`, login admin pw admin + 3. Configure Qiita as a service, create a user. + 4. Edit `config_qiita_oidc.cfg` to fit your local Keycloak configuration, remove # from necessary oidc block, change SUPERSECRETSTRING. + + diff --git a/compose.yaml b/compose.yaml index e581619..53a4b4d 100644 --- a/compose.yaml +++ b/compose.yaml @@ -1,52 +1,375 @@ -version: '3' +name: tinqiita services: - keycloak-db: + qiita-db: image: postgres:15 - container_name: keycloak-db - hostname: keycloak-db - restart: unless-stopped + container_name: qiita-db + hostname: qiita-db + restart: no env_file: - - ./environments/db.env + - ./environments/qiita_db.env environment: - POSTGRES_DB=postgres - POSTGRES_USER=postgres - - KEYCLOAK_DB_NAME=keycloak - - KEYCLOAK_DB_USER=keycloak volumes: - - './environments/db-init.sh:/docker-entrypoint-initdb.d/db-init.sh' + - './environments/qiita-db-init.sh:/docker-entrypoint-initdb.d/qiita-db-init.sh' - 'postgres-data:/var/lib/postgresql/data' + - server-plugin-configs:/qiita_plugins + networks: + - qiita-net + ports: + - "15432:5432" - keycloak: - image: quay.io/keycloak/keycloak:24.0.2 - command: ['start'] + qiita-initialize-db: + image: local-qiita:latest + command: ['/start_qiita-initDB.sh'] + depends_on: + - qiita-db + env_file: + - './environments/qiita.env' + environment: + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - ./src/qiita:/qiita:U + - qiita-data:/qiita_data + - server-plugin-configs:/qiita_plugins + - ./logs:/logs + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + networks: + - qiita-net + + qiita: + image: local-qiita:latest + build: # image wird hier direkt gebaut + context: ./Images/qiita + dockerfile: Dockerfile + command: ['/start_qiita.sh'] # executes bash script inside the container + # entrypoint: /bin/bash + # stdin_open: true + # tty: true + # ports: + # - "21174:21174" # wihtout nginx + # - 127.0.0.1:8383:8383 #damit bur ich dran komme + restart: no + depends_on: + - qiita-worker + env_file: + - './environments/qiita.env' + environment: + #- QIITA_ROOTCA_CERT=/qiita_certificates/stefan_rootca.crt # does not seem to have effect if not set in config file + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - PORT=21174 + - MASTER=--master + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - qiita-data:/qiita_data + - ./src/qiita:/qiita:U + - ./logs:/logs + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + - server-plugin-configs:/qiita_plugins + - ./references/qiita_server_certificates:/qiita_certificates + # - ./Images/qiita/start_qiita.sh:/qiita/start_qiita.sh + - test_tmp_dir:/tmp + networks: + - qiita-net ports: - - "8282:8282" # as the HAproxy of BCF if configured to forward requests here - restart: unless-stopped + - "21174:21174" + # deploy: + # replicas: 3 + qiita-worker: + image: local-qiita:latest + build: # image wird hier direkt gebaut + context: ./Images/qiita + dockerfile: Dockerfile + command: ['./start_qiita.sh'] # executes bash script inside the container + # entrypoint: /bin/bash + # stdin_open: true + # tty: true + # ports: + # - "21174:21174" # wihtout nginx + # - 127.0.0.1:8383:8383 #damit bur ich dran komme + restart: no depends_on: - - keycloak-db + redis: + condition: service_started + plugin-collector: + condition: service_completed_successfully env_file: - - './environments/keycloak.env' + - './environments/qiita.env' environment: - - KEYCLOAK_ADMIN=admin - - KC_DB=postgres - - KC_DB_URL_HOST=keycloak-db - - KC_DB_URL_PORT=5432 - - KC_DB_URL_DATABASE=keycloak - - KC_DB_USERNAME=keycloak - - KC_DB_SCHEMA=public - - KC_HTTP_HOST=0.0.0.0 - - KC_HTTP_ENABLED=false - - KC_HTTP_PORT=8282 - - KC_PROXY=edge - - KC_PROXY_HEADERS=forwarded - - KC_PROXY_ADDRESS_FORWARDING=true - - KC_HOSTNAME_URL=https://keycloak.jlab.bio - - KC_HOSTNAME_ADMIN_URL=https://keycloak.jlab.bio - - KC_LOG_LEVEL=WARN - #- KC_HOSTNAME_DEBUG=true - #- JAVA_OPTS_APPEND="-Djava.net.preferIPv4Stack=true" + #- QIITA_ROOTCA_CERT=/qiita_certificates/stefan_rootca.crt # does not seem to have effect if not set in config file + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - PORT=21175 + - MASTER= + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - qiita-data:/qiita_data + - ./logs:/logs + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + - server-plugin-configs:/qiita_plugins + - ./references/qiita_server_certificates:/qiita_certificates + - ./src/qiita:/qiita:U + # - ./Images/qiita/start_qiita_worker.sh:/qiita/start_qiita_worker.sh + - test_tmp_dir:/tmp + networks: + - qiita-net + deploy: + replicas: 3 + redis: + image: redis:latest + restart: no + command: > + sh -c "redis-server --port 7777 && + redis-server --port 6379" + volumes: + - qiita-data:/qiita + - ./logs:/logs + networks: + - qiita-net + #~ keycloak: # from https://stackoverflow.com/questions/78071458/keycloak-docker-compose + #~ image: quay.io/keycloak/keycloak:24.0.2 + #~ container_name: keycloak + #~ environment: + #~ KC_DB: postgres + #~ KC_DB_URL: jdbc:postgresql://keycloakdb:5432/keycloak + #~ KC_DB_USERNAME: keycloak + #~ KC_DB_PASSWORD: password + + #~ KC_HOSTNAME: keycloak + #~ KC_HOSTNAME_PORT: 9999 + #~ KC_HOSTNAME_STRICT: false + #~ KC_HOSTNAME_STRICT_HTTPS: false + + #~ KC_LOG_LEVEL: info + #~ KC_METRICS_ENABLED: false + #~ KC_HEALTH_ENABLED: true + #~ KEYCLOAK_ADMIN: admin + #~ KEYCLOAK_ADMIN_PASSWORD: admin + #~ command: ["start-dev", "--http-port=9999"] + #~ depends_on: + #~ - keycloakdb + #~ ports: + #~ - 127.0.0.1:9999:9999 + #~ networks: + #~ - qiita-net + + #~ keycloakdb: + #~ image: postgres:15 + #~ volumes: + #~ - keycloak-postgres-data:/var/lib/postgresql/data + #~ environment: + #~ POSTGRES_DB: keycloak + #~ POSTGRES_USER: keycloak + #~ POSTGRES_PASSWORD: password + #~ networks: + #~ - qiita-net + + nginx: + image: local-nginx_qiita:latest + build: + context: ./Images/nginx + dockerfile: Dockerfile + ports: + - "8383:8383" + command: ['./start_nginx.sh'] + # stdin_open: true + # tty: true + restart: no + depends_on: + - qiita + volumes: + - qiita-data:/qiita_data + #- ./logs/nginx_access.log:/logs/nginx_access.log + #- ./logs/nginx_error.log:/logs/nginx_error.log + - ./logs:/logs + - ./Images/nginx/nginx_qiita.conf:/qiita_configuration/nginx_qiita.conf + - ./references/qiita_server_certificates:/qiita_certificates + networks: + - qiita-net + + qtp-biom: + image: local-qtp-biom:latest + command: ['./start_qtp-biom.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - ./src/qtp-biom:/qtp-biom:U + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-sequencing: + image: local-qtp-sequencing:latest + command: ['./start_qtp-sequencing.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qp-target-gene: + image: local-qp-target-gene:latest + command: ['./start_qp-target-gene.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + - ./references/qp-target-gene:/databases/gg/13_8/rep_set + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-visualization: + image: local-qtp-visualization:latest + command: ['./start_qtp-visualization.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-diversity: + image: local-qtp-diversity:latest + command: ['./start_qtp-diversity.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qp-deblur: + image: local-qp-deblur:latest + command: ['./start_qp-deblur.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - ./references/qp-deblur:/opt/conda/envs/deblur/share/fragment-insertion/ref + #- ./src/qtp-biom:/qtp-biom:U + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qp-qiime2: + image: local-qp-qiime2:latest + command: ['./start_qp-qiime2.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + - TZ=Europe/Berlin # this is important to avoid a local timezone error! See https://forum.qiime2.org/t/qiime2-timezone-error/17410 + networks: + - qiita-net + + qtp-job-output-folder: + image: local-qtp-job-output-folder:latest + command: ['./start_qtp-job-output-folder.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + plugin-collector: + # prior to qiita (master and worker) start up, iterates through the QIITA_PLUGINS : separated list of plugin containers + # to compile all q*.conf files from plugin containers in the server-plugin-configs volume + image: local-plugin_collector + restart: no + networks: + - qiita-net + volumes: + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - server-plugin-configs:/qiita_plugins + - qiita-data:/qiita_data + depends_on: + qiita-initialize-db: + condition: service_completed_successfully + qtp-biom: # one of the plugins + condition: service_started + qtp-sequencing: + condition: service_started + qp-target-gene: + condition: service_started + qtp-visualization: + condition: service_started + qtp-diversity: + condition: service_started + qp-deblur: + condition: service_started + qp-qiime2: + condition: service_started + qtp-job-output-folder: + condition: service_started + environment: + - QIITA_PLUGINS="qtp-biom:qtp-sequencing:qp-target-gene:qtp-visualization:qtp-diversity:qp-deblur:qp-qiime2:qtp-job-output-folder:" + #- QIITA_PLUGINS="qp-target-gene:" + command: ['/start_plugin_collector.sh'] + +networks: + qiita-net: + name: qiita-net + #external: true + volumes: postgres-data: - name: keycloak-postgres-data + #~ keycloak-postgres-data: + #~ name: keycloak-postgres-data + qiita-data: + server-plugin-configs: # a shared directory on qiita server side (master and workers) to collect qiita plugin configuration files + test_tmp_dir: # many of plugin tests are written under the assumption that they are executed on same machine as qiita main and thus share /tmp - which is not the case in our docker compose scenario! + \ No newline at end of file diff --git a/compose_github.yaml b/compose_github.yaml new file mode 100644 index 0000000..443a6f9 --- /dev/null +++ b/compose_github.yaml @@ -0,0 +1,352 @@ +name: tinqiita + +services: + qiita-db: + image: postgres:15 + container_name: qiita-db + hostname: qiita-db + restart: no + env_file: + - ./environments/qiita_db.env + environment: + - POSTGRES_DB=postgres + - POSTGRES_USER=postgres + volumes: + - './environments/qiita-db-init.sh:/docker-entrypoint-initdb.d/qiita-db-init.sh' + - 'postgres-data:/var/lib/postgresql/data' + - server-plugin-configs:/qiita_plugins + networks: + - qiita-net + ports: + - "15432:5432" + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"] + # interval: 10s + # timeout: 5s + # retries: 5 + # start_period: 20s + + qiita-initialize-db: + image: ghcr.io/jlab/qiita-keycloak/qiita:testcandidate + command: ['/start_qiita-initDB.sh'] + depends_on: + - qiita-db + env_file: + - './environments/qiita.env' + environment: + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - qiita-data:/qiita_data + - server-plugin-configs:/qiita_plugins + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + - qiita-logs:/logs + networks: + - qiita-net + + qiita: + image: ghcr.io/jlab/qiita-keycloak/qiita:testcandidate + build: # image wird hier direkt gebaut + context: ./Images/qiita + dockerfile: Dockerfile + command: ['/start_qiita.sh'] # executes bash script inside the container + # entrypoint: /bin/bash + # stdin_open: true + # tty: true + # ports: + # - "21174:21174" # wihtout nginx + # - 127.0.0.1:8383:8383 #damit bur ich dran komme + restart: no + depends_on: + - qiita-worker + env_file: + - './environments/qiita.env' + environment: + #- QIITA_ROOTCA_CERT=/qiita_certificates/stefan_rootca.crt # does not seem to have effect if not set in config file + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - PORT=21174 + - MASTER=--master + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - qiita-data:/qiita_data + - qiita-logs:/logs + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + - server-plugin-configs:/qiita_plugins + - ./references/qiita_server_certificates:/qiita_certificates + - test_tmp_dir:/tmp + networks: + - qiita-net + ports: + - "21174:21174" + + qiita-worker: + image: ghcr.io/jlab/qiita-keycloak/qiita:testcandidate + build: # image wird hier direkt gebaut + context: ./Images/qiita + dockerfile: Dockerfile + command: ['./start_qiita.sh'] # executes bash script inside the container + # entrypoint: /bin/bash + # stdin_open: true + # tty: true + # ports: + # - "21174:21174" # wihtout nginx + # - 127.0.0.1:8383:8383 #damit bur ich dran komme + restart: no + depends_on: + redis: + condition: service_started + plugin-collector: + condition: service_completed_successfully + env_file: + - './environments/qiita.env' + environment: + #- QIITA_ROOTCA_CERT=/qiita_certificates/stefan_rootca.crt # does not seem to have effect if not set in config file + - QIITA_CONFIG_FP=/qiita_configurations/qiita_server.cfg + - PORT=21175 + - MASTER= + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + volumes: + - qiita-data:/qiita_data + - qiita-logs:/logs + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - ./Images/qiita/config_portal.cfg:/qiita_configurations/config_portal.cfg:r + - server-plugin-configs:/qiita_plugins + - ./references/qiita_server_certificates:/qiita_certificates + - test_tmp_dir:/tmp + networks: + - qiita-net + deploy: + replicas: 3 + + redis: + image: redis:latest + restart: no + environment: + - PORTSTATS=7777 + - PORTREDBIOM=6379 + command: > + sh -c "redis-server --port $$PORTSTATS && + redis-server --port $$PORTREDBIOM" + volumes: + - qiita-data:/qiita + - qiita-logs:/logs + networks: + - qiita-net + # healthcheck: + # test: ["CMD-SHELL", "redis-cli -p $$PORTSTATS ping"] + # interval: 10s + # timeout: 5s + # retries: 5 + # start_period: 20s + + nginx: + image: ghcr.io/jlab/qiita-keycloak/nginx:testcandidate + build: + context: ./Images/nginx + dockerfile: Dockerfile + ports: + - "8383:8383" + command: ['./start_nginx.sh'] + restart: no + depends_on: + - qiita + volumes: + - qiita-data:/qiita_data + - qiita-logs:/logs + - ./Images/nginx/nginx_qiita.conf:/qiita_configuration/nginx_qiita.conf + - ./references/qiita_server_certificates:/qiita_certificates + networks: + - qiita-net + # healthcheck: + # # looks wired as wget is the busybox version and lacks certificates + # # we thus just test if anything answers on the port, otherwise nginx + # # returns a "Connection refused" + # test: ["CMD-SHELL", "wget https://tinqiita-nginx-1:8383 2>&1 | grep refused -c || true"] + # interval: 10s + # timeout: 5s + # retries: 5 + # start_period: 10s + + qtp-biom: + image: ghcr.io/jlab/qiita-keycloak/qtp-biom:testcandidate + command: ['./start_qtp-biom.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-sequencing: + image: ghcr.io/jlab/qiita-keycloak/qtp-sequencing:testcandidate + command: ['./start_qtp-sequencing.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + # healthcheck: + # test: ["CMD-SHELL", "wget http://localhost:5000/config -O /dev/null || exit 1"] + # interval: 10s + # timeout: 5s + # retries: 2 + # start_period: 3s + + qp-target-gene: + image: ghcr.io/jlab/qiita-keycloak/qp-target-gene:testcandidate + command: ['./start_qp-target-gene.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + - ./references/qp-target-gene:/databases/gg/13_8/rep_set + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-visualization: + image: ghcr.io/jlab/qiita-keycloak/qtp-visualization:testcandidate + command: ['./start_qtp-visualization.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qtp-diversity: + image: ghcr.io/jlab/qiita-keycloak/qtp-diversity:testcandidate + command: ['./start_qtp-diversity.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qp-deblur: + image: ghcr.io/jlab/qiita-keycloak/qp-deblur:testcandidate + command: ['./start_qp-deblur.sh'] + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - ./references/qp-deblur:/opt/conda/envs/deblur/share/fragment-insertion/ref + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + qp-qiime2: + image: ghcr.io/jlab/qiita-keycloak/qp-qiime2:testcandidate + command: ['./start_qp-qiime2.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + - TZ=Europe/Berlin # this is important to avoid a local timezone error! See https://forum.qiime2.org/t/qiime2-timezone-error/17410 + networks: + - qiita-net + + qtp-job-output-folder: + image: ghcr.io/jlab/qiita-keycloak/qtp-job-output-folder:testcandidate + command: ['./start_qtp-job-output-folder.sh'] + # network_mode: host + # stdin_open: true + # tty: true + restart: no + volumes: + - qiita-data:/qiita_data + - ./references/qiita_server_certificates:/qiita_server_certificates + - test_tmp_dir:/tmp + environment: + - QIITA_CLIENT_DEBUG_LEVEL=DEBUG + networks: + - qiita-net + + plugin-collector: + # prior to qiita (master and worker) start up, iterates through the QIITA_PLUGINS : separated list of plugin containers + # to compile all q*.conf files from plugin containers in the server-plugin-configs volume + image: ghcr.io/jlab/qiita-keycloak/plugin_collector:testcandidate + restart: no + networks: + - qiita-net + volumes: + - ./Images/qiita/config_qiita_oidc.cfg:/qiita_configurations/qiita_server.cfg:r + - server-plugin-configs:/qiita_plugins + - qiita-data:/qiita_data + depends_on: + qiita-initialize-db: + condition: service_completed_successfully + MATRIXPLUGIN: + condition: service_started + # qtp-biom: # one of the plugins + # condition: service_started + # qtp-sequencing: + # condition: service_started + # qp-target-gene: + # condition: service_started + # qtp-visualization: + # condition: service_started + # qtp-diversity: + # condition: service_started + # qp-deblur: + # condition: service_started + # qp-qiime2: + # condition: service_started + # qtp-job-output-folder: + # condition: service_started + environment: + - QIITA_PLUGINS="MATRIXPLUGIN:" + command: ['/start_plugin_collector.sh'] + +networks: + qiita-net: + name: qiita-net + +volumes: + postgres-data: + qiita-data: + server-plugin-configs: # a shared directory on qiita server side (master and workers) to collect qiita plugin configuration files + qiita-logs: + test_tmp_dir: # many of plugin tests are written under the assumption that they are executed on same machine as qiita main and thus share /tmp - which is not the case in our docker compose scenario! + \ No newline at end of file diff --git a/environments/qiita-db-init.sh b/environments/qiita-db-init.sh new file mode 100644 index 0000000..3534aa4 --- /dev/null +++ b/environments/qiita-db-init.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -e +set -u + +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" "$POSTGRES_DB" <<-EOSQL + ALTER DATABASE "$POSTGRES_DB" OWNER TO "$POSTGRES_USER" +EOSQL diff --git a/environments/qiita.env.example b/environments/qiita.env.example new file mode 100644 index 0000000..74ee17a --- /dev/null +++ b/environments/qiita.env.example @@ -0,0 +1,2 @@ +# This is a place for additional configurations to your Qiita installation +# which are not addressed in the Qiita image itself \ No newline at end of file diff --git a/environments/qiita_db.env.example b/environments/qiita_db.env.example new file mode 100644 index 0000000..f8ac7cf --- /dev/null +++ b/environments/qiita_db.env.example @@ -0,0 +1 @@ +POSTGRES_PASSWORD=supersecretpassword \ No newline at end of file diff --git a/keycloak_compose.yaml b/keycloak_compose.yaml new file mode 100644 index 0000000..e581619 --- /dev/null +++ b/keycloak_compose.yaml @@ -0,0 +1,52 @@ +version: '3' + +services: + keycloak-db: + image: postgres:15 + container_name: keycloak-db + hostname: keycloak-db + restart: unless-stopped + env_file: + - ./environments/db.env + environment: + - POSTGRES_DB=postgres + - POSTGRES_USER=postgres + - KEYCLOAK_DB_NAME=keycloak + - KEYCLOAK_DB_USER=keycloak + volumes: + - './environments/db-init.sh:/docker-entrypoint-initdb.d/db-init.sh' + - 'postgres-data:/var/lib/postgresql/data' + + keycloak: + image: quay.io/keycloak/keycloak:24.0.2 + command: ['start'] + ports: + - "8282:8282" # as the HAproxy of BCF if configured to forward requests here + restart: unless-stopped + depends_on: + - keycloak-db + env_file: + - './environments/keycloak.env' + environment: + - KEYCLOAK_ADMIN=admin + - KC_DB=postgres + - KC_DB_URL_HOST=keycloak-db + - KC_DB_URL_PORT=5432 + - KC_DB_URL_DATABASE=keycloak + - KC_DB_USERNAME=keycloak + - KC_DB_SCHEMA=public + - KC_HTTP_HOST=0.0.0.0 + - KC_HTTP_ENABLED=false + - KC_HTTP_PORT=8282 + - KC_PROXY=edge + - KC_PROXY_HEADERS=forwarded + - KC_PROXY_ADDRESS_FORWARDING=true + - KC_HOSTNAME_URL=https://keycloak.jlab.bio + - KC_HOSTNAME_ADMIN_URL=https://keycloak.jlab.bio + - KC_LOG_LEVEL=WARN + #- KC_HOSTNAME_DEBUG=true + #- JAVA_OPTS_APPEND="-Djava.net.preferIPv4Stack=true" + +volumes: + postgres-data: + name: keycloak-postgres-data