From 00521207f01338eef7b7f660577c5f00eab17d01 Mon Sep 17 00:00:00 2001 From: Kuan Fan Date: Fri, 21 Mar 2025 11:22:27 -0700 Subject: [PATCH 1/4] update to 3.0.1 --- .github/workflows/dev-ci.yaml | 11 +++++------ .github/workflows/dev-release.yaml | 6 +++--- .github/workflows/prod-ci.yaml | 11 +++++------ .github/workflows/test-ci.yaml | 10 ++++------ .github/workflows/tfrs-release.yaml | 4 ++-- .pipeline/lib/config.js | 4 ++-- charts/tfrs-apps/Chart.yaml | 2 +- charts/tfrs-apps/charts/tfrs-backend/Chart.yaml | 2 +- .../charts/tfrs-backend/templates/_helpers.tpl | 2 +- charts/tfrs-apps/charts/tfrs-celery/Chart.yaml | 4 +--- .../charts/tfrs-celery/templates/_helpers.tpl | 2 +- charts/tfrs-apps/charts/tfrs-frontend/Chart.yaml | 2 +- .../charts/tfrs-frontend/templates/_helpers.tpl | 2 +- .../charts/tfrs-notification-server/Chart.yaml | 2 +- .../tfrs-notification-server/templates/_helpers.tpl | 2 +- .../tfrs-scan-coordinator/templates/_helpers.tpl | 2 +- charts/tfrs-apps/charts/tfrs-scan-handler/Chart.yaml | 2 +- frontend/package.json | 2 +- 18 files changed, 33 insertions(+), 39 deletions(-) diff --git a/.github/workflows/dev-ci.yaml b/.github/workflows/dev-ci.yaml index a20f30f79..c39462b30 100644 --- a/.github/workflows/dev-ci.yaml +++ b/.github/workflows/dev-ci.yaml @@ -1,8 +1,8 @@ -name: TFRS New Pipeline Dev release-3.0.0 +name: TFRS New Pipeline Dev release-3.0.1 on: push: - branches: [release-3.0.0] + branches: [release-3.0.1] paths: - frontend/** - backend/** @@ -13,14 +13,13 @@ env: GIT_URL: https://github.com/bcgov/tfrs.git TOOLS_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev - GIT_REF: "release-3.0.0" + GIT_REF: "release-3.0.1" concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: - install-oc: runs-on: ubuntu-latest outputs: @@ -33,7 +32,7 @@ jobs: id: cache uses: actions/cache@v4.2.0 with: - path: /usr/local/bin/oc # Path where the `oc` binary will be installed + path: /usr/local/bin/oc # Path where the `oc` binary will be installed key: oc-cli-${{ runner.os }} - name: Install OpenShift CLI (if not cached) @@ -58,7 +57,7 @@ jobs: steps: - id: get-version run: | - echo "VERSION=3.0.0" >> $GITHUB_OUTPUT + echo "VERSION=3.0.1" >> $GITHUB_OUTPUT build-backend: name: Build tfrs Backend diff --git a/.github/workflows/dev-release.yaml b/.github/workflows/dev-release.yaml index 9c04cc09b..2a0848832 100644 --- a/.github/workflows/dev-release.yaml +++ b/.github/workflows/dev-release.yaml @@ -1,11 +1,11 @@ ## For each release, the value of name, branches, RELEASE_NAME and PR_NUMBER need to be adjusted accordingly ## For each release, update lib/config.js: version and releaseBranch -name: TFRS Dev release-3.0.0 +name: TFRS Dev release-3.0.1 on: push: - branches: [release-3.0.0] + branches: [release-3.0.1] paths: - frontend/** - backend/** @@ -16,7 +16,7 @@ env: ## The pull request number of the Tracking pull request to merge the release branch to main ## Also remember to update the version in .pipeline/lib/config.js PR_NUMBER: 2956 - RELEASE_NAME: release-3.0.0 + RELEASE_NAME: release-3.0.1 concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/.github/workflows/prod-ci.yaml b/.github/workflows/prod-ci.yaml index 1c2e2a4be..44313b3f6 100644 --- a/.github/workflows/prod-ci.yaml +++ b/.github/workflows/prod-ci.yaml @@ -1,4 +1,4 @@ -name: TFRS New Pipeline Prod release-3.0.0 +name: TFRS New Pipeline Prod release-3.0.1 on: workflow_dispatch: @@ -13,7 +13,6 @@ concurrency: cancel-in-progress: true jobs: - install-oc: runs-on: ubuntu-latest outputs: @@ -23,7 +22,7 @@ jobs: id: cache uses: actions/cache@v4.2.0 with: - path: /usr/local/bin/oc # Path where the `oc` binary will be installed + path: /usr/local/bin/oc # Path where the `oc` binary will be installed key: oc-cli-${{ runner.os }} - name: Install OpenShift CLI (if not cached) @@ -38,7 +37,7 @@ jobs: run: oc version --client get-build-suffix: - name: Find Test deployment build suffix + name: Find Test deployment build suffix runs-on: ubuntu-latest needs: [install-oc] @@ -85,14 +84,14 @@ jobs: - id: get-current-time run: | echo "CURRENT_TIME=$(TZ='America/Vancouver' date '+%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_OUTPUT - + - name: Ask for approval for TFRS Prod deployment uses: trstringer/manual-approval@v1.6.0 with: secret: ${{ github.TOKEN }} approvers: AlexZorkin,kuanfandevops,prv-proton,JulianForeman,kevin-hashimoto,dhaselhan minimum-approvals: 2 - issue-title: "TFRS ${{ env.BUILD_SUFFIX }} Prod Deployment at ${{ steps.get-current-time.outputs.CURRENT_TIME }}" + issue-title: "TFRS ${{ env.BUILD_SUFFIX }} Prod Deployment at ${{ steps.get-current-time.outputs.CURRENT_TIME }}" - name: Checkout Manifest repository uses: actions/checkout@v4.1.1 diff --git a/.github/workflows/test-ci.yaml b/.github/workflows/test-ci.yaml index 59dfc2abf..f6394e89d 100644 --- a/.github/workflows/test-ci.yaml +++ b/.github/workflows/test-ci.yaml @@ -1,4 +1,4 @@ -name: TFRS New Pipeline Test release-3.0.0 +name: TFRS New Pipeline Test release-3.0.1 on: workflow_dispatch: @@ -13,18 +13,16 @@ concurrency: cancel-in-progress: true jobs: - install-oc: runs-on: ubuntu-latest outputs: cache-hit: ${{ steps.cache.outputs.cache-hit }} steps: - - name: Set up cache for OpenShift CLI id: cache uses: actions/cache@v4.2.0 with: - path: /usr/local/bin/oc # Path where the `oc` binary will be installed + path: /usr/local/bin/oc # Path where the `oc` binary will be installed key: oc-cli-${{ runner.os }} - name: Install OpenShift CLI (if not cached) @@ -39,7 +37,7 @@ jobs: run: oc version --client get-build-suffix: - name: Find Dev deployment build suffix + name: Find Dev deployment build suffix needs: install-oc runs-on: ubuntu-latest @@ -96,7 +94,7 @@ jobs: secret: ${{ github.TOKEN }} approvers: AlexZorkin,kuanfandevops,prv-proton,JulianForeman,kevin-hashimoto,dhaselhan minimum-approvals: 1 - issue-title: "TFRS ${{ env.BUILD_SUFFIX }} Test Deployment at ${{ steps.get-current-time.outputs.CURRENT_TIME }}" + issue-title: "TFRS ${{ env.BUILD_SUFFIX }} Test Deployment at ${{ steps.get-current-time.outputs.CURRENT_TIME }}" - name: Checkout Manifest repository uses: actions/checkout@v4.1.1 diff --git a/.github/workflows/tfrs-release.yaml b/.github/workflows/tfrs-release.yaml index 3ea44dd1d..077f31b96 100644 --- a/.github/workflows/tfrs-release.yaml +++ b/.github/workflows/tfrs-release.yaml @@ -1,7 +1,7 @@ ## For each release, the value of name, branches, RELEASE_NAME and PR_NUMBER need to be adjusted accordingly ## For each release, update lib/config.js: version and releaseBranch -name: TFRS release-3.0.0 +name: TFRS release-3.0.1 on: workflow_dispatch: @@ -11,7 +11,7 @@ env: ## The pull request number of the Tracking pull request to merge the release branch to main ## Also remember to update the version in .pipeline/lib/config.js PR_NUMBER: 2956 - RELEASE_NAME: release-3.0.0 + RELEASE_NAME: release-3.0.1 concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/.pipeline/lib/config.js b/.pipeline/lib/config.js index 602b1aaa8..53d81a23c 100644 --- a/.pipeline/lib/config.js +++ b/.pipeline/lib/config.js @@ -1,7 +1,7 @@ "use strict"; const options = require("@bcgov/pipeline-cli").Util.parseArguments(); const changeId = options.pr; //aka pull-request -const version = "3.0.0"; +const version = "3.0.1"; const name = "tfrs"; const ocpName = "apps.silver.devops"; @@ -20,7 +20,7 @@ const phases = { instance: `${name}-build-${changeId}`, version: `${version}-${changeId}`, tag: `build-${version}-${changeId}`, - releaseBranch: "release-3.0.0", + releaseBranch: "release-3.0.1", }, dev: { namespace: "0ab226-dev", diff --git a/charts/tfrs-apps/Chart.yaml b/charts/tfrs-apps/Chart.yaml index 0b9a41aee..3d4d85457 100644 --- a/charts/tfrs-apps/Chart.yaml +++ b/charts/tfrs-apps/Chart.yaml @@ -21,4 +21,4 @@ version: 1.0.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" +appVersion: "3.0.1" diff --git a/charts/tfrs-apps/charts/tfrs-backend/Chart.yaml b/charts/tfrs-apps/charts/tfrs-backend/Chart.yaml index e02f42332..076aad047 100644 --- a/charts/tfrs-apps/charts/tfrs-backend/Chart.yaml +++ b/charts/tfrs-apps/charts/tfrs-backend/Chart.yaml @@ -21,4 +21,4 @@ version: 0.2.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" +appVersion: "3.0.1" diff --git a/charts/tfrs-apps/charts/tfrs-backend/templates/_helpers.tpl b/charts/tfrs-apps/charts/tfrs-backend/templates/_helpers.tpl index 1b1c0be19..7bc1b2f3c 100644 --- a/charts/tfrs-apps/charts/tfrs-backend/templates/_helpers.tpl +++ b/charts/tfrs-apps/charts/tfrs-backend/templates/_helpers.tpl @@ -5,7 +5,7 @@ The labels for all components: helm.sh/chart: tfrs-backend-1.0.0 app.kubernetes.io/name: tfrs-backend app.kubernetes.io/instance: tfrs-backend-dev or tfrs-backend-dev-jan - app.kubernetes.io/version: "3.0.0" + app.kubernetes.io/version: "3.0.1" app.kubernetes.io/managed-by: Helm The selector lables: diff --git a/charts/tfrs-apps/charts/tfrs-celery/Chart.yaml b/charts/tfrs-apps/charts/tfrs-celery/Chart.yaml index 754644731..ea6cd1d94 100644 --- a/charts/tfrs-apps/charts/tfrs-celery/Chart.yaml +++ b/charts/tfrs-apps/charts/tfrs-celery/Chart.yaml @@ -21,6 +21,4 @@ version: 0.1.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" - - +appVersion: "3.0.1" diff --git a/charts/tfrs-apps/charts/tfrs-celery/templates/_helpers.tpl b/charts/tfrs-apps/charts/tfrs-celery/templates/_helpers.tpl index 95d14c7a9..9c2135b0f 100644 --- a/charts/tfrs-apps/charts/tfrs-celery/templates/_helpers.tpl +++ b/charts/tfrs-apps/charts/tfrs-celery/templates/_helpers.tpl @@ -5,7 +5,7 @@ The labels for all components: helm.sh/chart: tfrs-celery-1.0.0 app.kubernetes.io/name: tfrs-celery app.kubernetes.io/instance: tfrs-celery-dev or tfrs-celery-dev-jan - app.kubernetes.io/version: "3.0.0" + app.kubernetes.io/version: "3.0.1" app.kubernetes.io/managed-by: Helm The selector lables: diff --git a/charts/tfrs-apps/charts/tfrs-frontend/Chart.yaml b/charts/tfrs-apps/charts/tfrs-frontend/Chart.yaml index 2321fa7c9..4a0af9afb 100644 --- a/charts/tfrs-apps/charts/tfrs-frontend/Chart.yaml +++ b/charts/tfrs-apps/charts/tfrs-frontend/Chart.yaml @@ -21,4 +21,4 @@ version: 0.2.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" +appVersion: "3.0.1" diff --git a/charts/tfrs-apps/charts/tfrs-frontend/templates/_helpers.tpl b/charts/tfrs-apps/charts/tfrs-frontend/templates/_helpers.tpl index a34119769..c4eef93cd 100644 --- a/charts/tfrs-apps/charts/tfrs-frontend/templates/_helpers.tpl +++ b/charts/tfrs-apps/charts/tfrs-frontend/templates/_helpers.tpl @@ -5,7 +5,7 @@ The labels for all components: helm.sh/chart: tfrs-frontend-1.0.0 app.kubernetes.io/name: tfrs-frontend app.kubernetes.io/instance: tfrs-frontend-dev or tfrs-frontend-dev-jan - app.kubernetes.io/version: "3.0.0" + app.kubernetes.io/version: "3.0.1" app.kubernetes.io/managed-by: Helm The selector lables: diff --git a/charts/tfrs-apps/charts/tfrs-notification-server/Chart.yaml b/charts/tfrs-apps/charts/tfrs-notification-server/Chart.yaml index a36160a8b..1b31db74c 100644 --- a/charts/tfrs-apps/charts/tfrs-notification-server/Chart.yaml +++ b/charts/tfrs-apps/charts/tfrs-notification-server/Chart.yaml @@ -21,4 +21,4 @@ version: 0.1.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" +appVersion: "3.0.1" diff --git a/charts/tfrs-apps/charts/tfrs-notification-server/templates/_helpers.tpl b/charts/tfrs-apps/charts/tfrs-notification-server/templates/_helpers.tpl index 49f5f2f47..94fc94080 100644 --- a/charts/tfrs-apps/charts/tfrs-notification-server/templates/_helpers.tpl +++ b/charts/tfrs-apps/charts/tfrs-notification-server/templates/_helpers.tpl @@ -6,7 +6,7 @@ The labels for all components: helm.sh/chart: tfrs-backend-1.0.0 app.kubernetes.io/name: tfrs-backend app.kubernetes.io/instance: tfrs-backend-dev or tfrs-backend-dev-jan - app.kubernetes.io/version: "3.0.0" + app.kubernetes.io/version: "3.0.1" app.kubernetes.io/managed-by: Helm The selector lables: diff --git a/charts/tfrs-apps/charts/tfrs-scan-coordinator/templates/_helpers.tpl b/charts/tfrs-apps/charts/tfrs-scan-coordinator/templates/_helpers.tpl index 3a11fdba7..23ffba4ed 100644 --- a/charts/tfrs-apps/charts/tfrs-scan-coordinator/templates/_helpers.tpl +++ b/charts/tfrs-apps/charts/tfrs-scan-coordinator/templates/_helpers.tpl @@ -5,7 +5,7 @@ The labels for all components: helm.sh/chart: tfrs-scan-coordinator-1.0.0 app.kubernetes.io/name: tfrs-scan-coordinator app.kubernetes.io/instance: tfrs-scan-coordinator-dev or tfrs-scan-coordinator-dev-jan - app.kubernetes.io/version: "3.0.0" + app.kubernetes.io/version: "3.0.1" app.kubernetes.io/managed-by: Helm The selector lables: diff --git a/charts/tfrs-apps/charts/tfrs-scan-handler/Chart.yaml b/charts/tfrs-apps/charts/tfrs-scan-handler/Chart.yaml index a7145fffe..26b5115c7 100644 --- a/charts/tfrs-apps/charts/tfrs-scan-handler/Chart.yaml +++ b/charts/tfrs-apps/charts/tfrs-scan-handler/Chart.yaml @@ -21,4 +21,4 @@ version: 0.1.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "3.0.0" +appVersion: "3.0.1" diff --git a/frontend/package.json b/frontend/package.json index d0399930a..241842751 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "tfrs", - "version": "3.0.0", + "version": "3.0.1", "dependencies": { "@babel/eslint-parser": "^7.19.1", "@babel/plugin-proposal-object-rest-spread": "^7.20.7", From e9c9bd1697460696bfb6921ab359340fc9d3fc8c Mon Sep 17 00:00:00 2001 From: Kuan Fan Date: Fri, 21 Mar 2025 11:24:12 -0700 Subject: [PATCH 2/4] remove .pipeline --- .pipeline/.nvmrc | 1 - .pipeline/build.js | 5 - .pipeline/clean-knps.js | 5 - .pipeline/clean.js | 5 - .pipeline/deploy-db.js | 5 - .pipeline/deploy-knps.js | 5 - .pipeline/deploy-schemaspy.js | 5 - .pipeline/deploy.js | 5 - .pipeline/lib/build.js | 92 ---------- .pipeline/lib/clean-knps.js | 46 ----- .pipeline/lib/clean.js | 136 --------------- .pipeline/lib/config.js | 267 ------------------------------ .pipeline/lib/deploy-db.js | 76 --------- .pipeline/lib/deploy-knps.js | 34 ---- .pipeline/lib/deploy-schemaspy.js | 43 ----- .pipeline/lib/deploy.js | 253 ---------------------------- .pipeline/lib/keycloak.js | 146 ---------------- .pipeline/npmw | 12 -- .pipeline/package.json | 31 ---- 19 files changed, 1172 deletions(-) delete mode 100644 .pipeline/.nvmrc delete mode 100755 .pipeline/build.js delete mode 100755 .pipeline/clean-knps.js delete mode 100755 .pipeline/clean.js delete mode 100755 .pipeline/deploy-db.js delete mode 100755 .pipeline/deploy-knps.js delete mode 100755 .pipeline/deploy-schemaspy.js delete mode 100755 .pipeline/deploy.js delete mode 100755 .pipeline/lib/build.js delete mode 100755 .pipeline/lib/clean-knps.js delete mode 100755 .pipeline/lib/clean.js delete mode 100644 .pipeline/lib/config.js delete mode 100644 .pipeline/lib/deploy-db.js delete mode 100755 .pipeline/lib/deploy-knps.js delete mode 100755 .pipeline/lib/deploy-schemaspy.js delete mode 100755 .pipeline/lib/deploy.js delete mode 100644 .pipeline/lib/keycloak.js delete mode 100755 .pipeline/npmw delete mode 100644 .pipeline/package.json diff --git a/.pipeline/.nvmrc b/.pipeline/.nvmrc deleted file mode 100644 index 6b12bc745..000000000 --- a/.pipeline/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v10.15.2 \ No newline at end of file diff --git a/.pipeline/build.js b/.pipeline/build.js deleted file mode 100755 index 3ac899f86..000000000 --- a/.pipeline/build.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const task = require('./lib/build.js') -const settings = require('./lib/config.js') - -task(Object.assign(settings, { phase: 'build'})) diff --git a/.pipeline/clean-knps.js b/.pipeline/clean-knps.js deleted file mode 100755 index 09073f139..000000000 --- a/.pipeline/clean-knps.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/clean-knps.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/clean.js b/.pipeline/clean.js deleted file mode 100755 index 42231d7ff..000000000 --- a/.pipeline/clean.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/clean.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/deploy-db.js b/.pipeline/deploy-db.js deleted file mode 100755 index 99bc344f0..000000000 --- a/.pipeline/deploy-db.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/deploy-db.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/deploy-knps.js b/.pipeline/deploy-knps.js deleted file mode 100755 index a32f77476..000000000 --- a/.pipeline/deploy-knps.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/deploy-knps.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/deploy-schemaspy.js b/.pipeline/deploy-schemaspy.js deleted file mode 100755 index ae5dbf57b..000000000 --- a/.pipeline/deploy-schemaspy.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/deploy-schemaspy.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/deploy.js b/.pipeline/deploy.js deleted file mode 100755 index 595509459..000000000 --- a/.pipeline/deploy.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; -const settings = require('./lib/config.js') -const task = require('./lib/deploy.js') - -task(Object.assign(settings, { phase: settings.options.env})); diff --git a/.pipeline/lib/build.js b/.pipeline/lib/build.js deleted file mode 100755 index d40086530..000000000 --- a/.pipeline/lib/build.js +++ /dev/null @@ -1,92 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const path = require("path"); - -module.exports = settings => { - const phases = settings.phases; - const options = settings.options; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); - const phase = "build"; - let objects = []; - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, "../../openshift-v4")); - - // The building of your cool app goes here ▼▼▼ - // build frontend - console.log( oc.git.http_url); - console.log( oc.git.ref); - - //build backend - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/backend/backend-bc.yaml`, { - 'param':{ - 'NAME': 'tfrs', - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'GIT_URL': oc.git.http_url, - 'GIT_REF': oc.git.ref - } - })) - - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/frontend/frontend-bc-docker.yaml`, { - 'param':{ - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'GIT_URL': oc.git.http_url, - 'GIT_REF': oc.git.ref - } - })) - - //build celery - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/celery/celery-bc.yaml`, { - 'param':{ - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'GIT_URL': oc.git.http_url, - 'RELEASE_BRANCH': phases[phase].releaseBranch - } -})) - -//build notification server - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/notification/notification-server-bc.yaml`, { - 'param':{ - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'GIT_URL': oc.git.http_url, - 'GIT_REF': oc.git.ref - } -})) - - -//build scan coordinator server - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/scan-coordinator/scan-coordinator-bc.yaml`, { - 'param':{ - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'GIT_URL': oc.git.http_url, - 'GIT_REF': oc.git.ref - } -})) - - -//build scan handler server -objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/scan-handler/scan-handler-bc.yaml`, { - 'param':{ - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'RELEASE_BRANCH': phases[phase].releaseBranch - } -})) - - oc.applyRecommendedLabels( - objects, - phases[phase].name, - phase, - phases[phase].changeId, - phases[phase].instance, - ); - oc.applyAndBuild(objects); -}; diff --git a/.pipeline/lib/clean-knps.js b/.pipeline/lib/clean-knps.js deleted file mode 100755 index 057239984..000000000 --- a/.pipeline/lib/clean-knps.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const KeyCloakClient = require('./keycloak'); - -const getTargetPhases = (env, phases) => { - let target_phase = []; - for (const phase in phases) { - if (env.match(/^(all|transient)$/) && phases[phase].transient) { - target_phase.push(phase); - } else if (env === phase) { - target_phase.push(phase); - break; - } - } - - return target_phase; -}; - -module.exports = settings => { - const phases = settings.phases; - const options = settings.options; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); - const target_phases = getTargetPhases(options.env, phases); - - target_phases.forEach(k => { - if (phases.hasOwnProperty(k)) { - - const phase = phases[k]; - oc.namespace(phase.namespace); - - //remove all custom security policies create for specific pull request - const knps = oc.get("networkpolicies", { - selector: `app=${phase.name}${phase.suffix}`, - namespace: phase.namespace, - }); - knps.forEach(knp => { - oc.delete([`networkpolicy/${knp.metadata.name}`], { - "ignore-not-found": "true", - wait: "true", - namespace: phase.namespace, - }); - }); - - } - }); -}; diff --git a/.pipeline/lib/clean.js b/.pipeline/lib/clean.js deleted file mode 100755 index 588ef649e..000000000 --- a/.pipeline/lib/clean.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const KeyCloakClient = require("./keycloak"); - -const getTargetPhases = (env, phases) => { - let target_phase = []; - for (const phase in phases) { - if (env.match(/^(all|transient)$/) && phases[phase].transient) { - target_phase.push(phase); - } else if (env === phase) { - target_phase.push(phase); - break; - } - } - - return target_phase; -}; - -module.exports = (settings) => { - const phases = settings.phases; - const options = settings.options; - const oc = new OpenShiftClientX( - Object.assign({ namespace: phases.build.namespace }, options) - ); - const target_phases = getTargetPhases(options.env, phases); - - target_phases.forEach((k) => { - if (phases.hasOwnProperty(k)) { - const phase = phases[k]; - oc.namespace(phase.namespace); - /** - if(k === 'dev') { - const kc = new KeyCloakClient(settings, oc); - kc.removeUris(); - } - */ - - let buildConfigs = oc.get("bc", { - selector: `app=${phase.instance},env-id=${phase.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, - namespace: phase.namespace, - }); - buildConfigs.forEach((bc) => { - if (bc.spec.output.to.kind == "ImageStreamTag") { - oc.delete([`ImageStreamTag/${bc.spec.output.to.name}`], { - "ignore-not-found": "true", - wait: "true", - namespace: phase.namespace, - }); - } - }); - - let deploymentConfigs = oc.get("dc", { - selector: `app=${phase.instance},env-id=${phase.changeId},env-name=${k},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, - namespace: phase.namespace, - }); - deploymentConfigs.forEach((dc) => { - dc.spec.triggers.forEach((trigger) => { - if ( - trigger.type == "ImageChange" && - trigger.imageChangeParams.from.kind == "ImageStreamTag" - ) { - oc.delete( - [`ImageStreamTag/${trigger.imageChangeParams.from.name}`], - { - "ignore-not-found": "true", - wait: "true", - namespace: phase.namespace, - } - ); - } - }); - }); - - //get all statefulsets before they are deleted - const statefulsets = oc.get("statefulset", { - selector: `app=${phase.instance},env-id=${phase.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, - namespace: phase.namespace, - }); - - oc.raw("delete", ["all"], { - selector: `app=${phase.instance},env-id=${phase.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, - wait: "true", - namespace: phase.namespace, - }); - oc.raw( - "delete", - [ - "pvc,Secret,configmap,endpoints,RoleBinding,role,ServiceAccount,Endpoints", - ], - { - selector: `app=${phase.instance},env-id=${phase.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, - wait: "true", - namespace: phase.namespace, - } - ); - - //remove all the PVCs associated with each statefulset, after they get deleted by above delete all operation - statefulsets.forEach((statefulset) => { - //delete PVCs mounted for statfulset - oc.raw("delete", ["pvc"], { - selector: `statefulset=${statefulset.metadata.name}`, - "ignore-not-found": "true", - wait: "true", - namespace: phase.namespace, - }); - - //delete configmaps create by patroni - let patroniConfigmaps = oc.get("configmap", { - selector: `app.kubernetes.io/name=patroni,cluster-name=${statefulset.metadata.name}`, - namespace: phase.namespace, - }); - if (Object.entries(patroniConfigmaps).length > 0) { - oc.raw("delete", ["configmap"], { - selector: `app.kubernetes.io/name=patroni,cluster-name=${statefulset.metadata.name}`, - wait: "true", - "ignore-not-found": "true", - namespace: phase.namespace, - }); - } - }); - - //remove all custom security policies create for specific pull request - const knps = oc.get("networkpolicies", { - selector: `app=${phase.name}${phase.suffix}`, - namespace: phase.namespace, - }); - knps.forEach((knp) => { - oc.delete([`networkpolicy/${knp.metadata.name}`], { - "ignore-not-found": "true", - wait: "true", - namespace: phase.namespace, - }); - }); - } - }); -}; diff --git a/.pipeline/lib/config.js b/.pipeline/lib/config.js deleted file mode 100644 index 53d81a23c..000000000 --- a/.pipeline/lib/config.js +++ /dev/null @@ -1,267 +0,0 @@ -"use strict"; -const options = require("@bcgov/pipeline-cli").Util.parseArguments(); -const changeId = options.pr; //aka pull-request -const version = "3.0.1"; -const name = "tfrs"; -const ocpName = "apps.silver.devops"; - -options.git.owner = "bcgov"; -//Have to set options.git.repository to be zeva otherwise an error will be thrown as the label github-repo -//will contain https://github.com/bcgov/zeva which is not allowed as a valid label -options.git.repository = "tfrs"; - -const phases = { - build: { - namespace: "0ab226-tools", - name: `${name}`, - phase: "build", - changeId: changeId, - suffix: `-build-${changeId}`, - instance: `${name}-build-${changeId}`, - version: `${version}-${changeId}`, - tag: `build-${version}-${changeId}`, - releaseBranch: "release-3.0.1", - }, - dev: { - namespace: "0ab226-dev", - name: `${name}`, - phase: "dev", - changeId: changeId, - suffix: `-dev`, - instance: `${name}-dev`, - version: `${version}`, - tag: `dev-${version}`, - dbServiceName: "tfrs-spilo", - frontendCpuRequest: "100m", - frontendCpuLimit: "200m", - frontendMemoryRequest: "300Mi", - frontendMemoryLimit: "600Mi", - frontendReplicas: 2, - frontendKeycloakAuthority: "https://dev.loginproxy.gov.bc.ca/auth", - frontendKeycloakClientId: "tfrs-on-gold-4308", - frontendKeycloakCallbackUrl: `https://tfrs-dev.${ocpName}.gov.bc.ca`, - frontendKeycloakLogoutUrl: `https://tfrs-dev.${ocpName}.gov.bc.ca`, - frontendHost: `tfrs-dev.${ocpName}.gov.bc.ca`, - frontendSiteminderLogoutUrl: - "https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi?retnow=1&returl=", - frontendDebugEnabled: "true", - backendCpuRequest: "200m", - backendCpuLimit: "400m", - backendMemoryRequest: "600Mi", - backendMemoryLimit: "1200Mi", - backendHealthCheckDelay: 30, - backendHost: `tfrs-backend-dev.${ocpName}.gov.bc.ca`, - backendReplicas: 2, - backendKeycloakAudience: "tfrs-on-gold-4308", - backendWellKnownEndpoint: - "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/.well-known/openid-configuration", - backendKeycloakCertsUrl: - "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/certs", - redisHost: "lcfs-redis-dev-master.d2bd59-dev.svc.cluster.local", - redisPort: 6379, - celeryCpuRequest: "100m", - celeryCpuLimit: "250m", - celeryMemoryRequest: "1600Mi", - celeryMemoryLimit: "3Gi", - scanHandlerCpuRequest: "25m", - scanHandlerCpuLimit: "50m", - scanHandlerMemoryRequest: "50Mi", - scanHandlerMemoryLimit: "100Mi", - scanCoordinatorCpuRequest: "50m", - scanCoordinatorCpuLimit: "100m", - scanCoordinatorMemoryRequest: "30Mi", - scanCoordinatorMemoryLimit: "60Mi", - notificationServerCpuRequest: "100m", - notificationServerCpuLimit: "200m", - notificationServerMemoryRequest: "120Mi", - notificationServerMemoryLimit: "240Mi", - patroniCpuRequest: "500m", - patroniCpuLimit: "1000m", - patroniMemoryRequest: "250Mi", - patroniMemoryLimit: "1Gi", - patroniPvcSize: "2Gi", - patroniReplica: 1, - storageClass: "netapp-block-standard", - ocpName: `${ocpName}`, - rabbitmqCpuRequest: "250m", - rabbitmqCpuLimit: "700m", - rabbitmqMemoryRequest: "500Mi", - rabbitmqMemoryLimit: "1Gi", - rabbitmqPvcSize: "1Gi", - rabbitmqReplica: 1, - rabbitmqPostStartSleep: 120, - storageClass: "netapp-block-standard", - schemaSpyPublicCpuRequest: "50m", - schemaSpyPublicCpuLimit: "500m", - schemaSpyPublicMemoryRequest: "512Mi", - schemaSpyPublicMemoryLimit: "2Gi", - schemaSpyAuditCpuRequest: "50m", - schemaSpyAuditCpuLimit: "300m", - schemaSpyAuditMemoryRequest: "256Mi", - schemaSpyAuditMemoryLimit: "512Mi", - }, - test: { - namespace: "0ab226-test", - name: `${name}`, - phase: "test", - changeId: changeId, - suffix: `-test`, - instance: `${name}-test`, - version: `${version}`, - tag: `test-${version}`, - dbServiceName: "tfrs-spilo", - frontendCpuRequest: "40m", - frontendCpuLimit: "80m", - frontendMemoryRequest: "60Mi", - frontendMemoryLimit: "120Mi", - frontendReplicas: 2, - frontendKeycloakAuthority: "https://test.loginproxy.gov.bc.ca/auth", - frontendKeycloakClientId: "tfrs-on-gold-4308", - frontendKeycloakCallbackUrl: `https://tfrs-test.${ocpName}.gov.bc.ca`, - frontendKeycloakLogoutUrl: `https://tfrs-test.${ocpName}.gov.bc.ca`, - frontendHost: `tfrs-test.${ocpName}.gov.bc.ca`, - frontendSiteminderLogoutUrl: - "https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi?retnow=1&returl=", - frontendDebugEnabled: "true", - backendCpuRequest: "200m", - backendCpuLimit: "400m", - backendMemoryRequest: "600Mi", - backendMemoryLimit: "1200Mi", - backendHealthCheckDelay: 30, - backendHost: `tfrs-backend-test.${ocpName}.gov.bc.ca`, - backendReplicas: 4, - backendKeycloakAudience: "tfrs-on-gold-4308", - backendWellKnownEndpoint: - "https://test.loginproxy.gov.bc.ca/auth/realms/standard/.well-known/openid-configuration", - backendKeycloakCertsUrl: - "https://test.loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/certs", - redisHost: "lcfs-redis-test-master.d2bd59-test.svc.cluster.local", - redisPort: 6379, - celeryCpuRequest: "100m", - celeryCpuLimit: "250m", - celeryMemoryRequest: "1600Mi", - celeryMemoryLimit: "3Gi", - scanHandlerCpuRequest: "25m", - scanHandlerCpuLimit: "50m", - scanHandlerMemoryRequest: "50Mi", - scanHandlerMemoryLimit: "100Mi", - scanCoordinatorCpuRequest: "50m", - scanCoordinatorCpuLimit: "100m", - scanCoordinatorMemoryRequest: "30Mi", - scanCoordinatorMemoryLimit: "60Mi", - notificationServerCpuRequest: "100m", - notificationServerCpuLimit: "200m", - notificationServerMemoryRequest: "120Mi", - notificationServerMemoryLimit: "240Mi", - patroniCpuRequest: "300m", - patroniCpuLimit: "700m", - patroniMemoryRequest: "250Mi", - patroniMemoryLimit: "1Gi", - patroniPvcSize: "3Gi", - patroniReplica: 2, - storageClass: "netapp-block-standard", - ocpName: `${ocpName}`, - rabbitmqCpuRequest: "250m", - rabbitmqCpuLimit: "400m", - rabbitmqMemoryRequest: "500Mi", - rabbitmqMemoryLimit: "1Gi", - rabbitmqPvcSize: "1Gi", - rabbitmqReplica: 2, - rabbitmqPostStartSleep: 120, - storageClass: "netapp-block-standard", - schemaSpyPublicCpuRequest: "50m", - schemaSpyPublicCpuLimit: "500m", - schemaSpyPublicMemoryRequest: "512Mi", - schemaSpyPublicMemoryLimit: "2Gi", - schemaSpyAuditCpuRequest: "50m", - schemaSpyAuditCpuLimit: "300m", - schemaSpyAuditMemoryRequest: "256Mi", - schemaSpyAuditMemoryLimit: "512Mi", - }, - prod: { - namespace: "0ab226-prod", - name: `${name}`, - phase: "prod", - changeId: changeId, - suffix: `-prod`, - instance: `${name}-prod`, - version: `${version}`, - tag: `prod-${version}`, - dbServiceName: "tfrs-crunchy-prod-pgbouncer", - frontendCpuRequest: "40m", - frontendCpuLimit: "80m", - frontendMemoryRequest: "60Mi", - frontendMemoryLimit: "120Mi", - frontendReplicas: 4, - frontendKeycloakAuthority: "https://loginproxy.gov.bc.ca/auth", - frontendKeycloakClientId: "tfrs-on-gold-4308", - frontendKeycloakCallbackUrl: "https://lowcarbonfuels.gov.bc.ca", - frontendKeycloakLogoutUrl: "https://lowcarbonfuels.gov.bc.ca", - frontendHost: "lowcarbonfuels.gov.bc.ca", - frontendSiteminderLogoutUrl: - "https://logon7.gov.bc.ca/clp-cgi/logoff.cgi?retnow=1&returl=", - frontendDebugEnabled: "false", - backendCpuRequest: "200m", - backendCpuLimit: "400m", - backendMemoryRequest: "600Mi", - backendMemoryLimit: "1200Mi", - backendHealthCheckDelay: 30, - backendHost: `tfrs-backend-prod.${ocpName}.gov.bc.ca`, - backendReplicas: 4, - backendKeycloakAudience: "tfrs-on-gold-4308", - backendWellKnownEndpoint: - "https://loginproxy.gov.bc.ca/auth/realms/standard/.well-known/openid-configuration", - backendKeycloakCertsUrl: - "https://loginproxy.gov.bc.ca/auth/realms/standard/protocol/openid-connect/certs", - redisHost: "lcfs-redis-prod-master.d2bd59-prod.svc.cluster.local", - redisPort: 6379, - celeryCpuRequest: "100m", - celeryCpuLimit: "250mm", - celeryMemoryRequest: "1600Mi", - celeryMemoryLimit: "3Gi", - scanHandlerCpuRequest: "25m", - scanHandlerCpuLimit: "50m", - scanHandlerMemoryRequest: "50Mi", - scanHandlerMemoryLimit: "100Mi", - scanCoordinatorCpuRequest: "50m", - scanCoordinatorCpuLimit: "100m", - scanCoordinatorMemoryRequest: "30Mi", - scanCoordinatorMemoryLimit: "60Mi", - notificationServerCpuRequest: "100m", - notificationServerCpuLimit: "200m", - notificationServerMemoryRequest: "120Mi", - notificationServerMemoryLimit: "240Mi", - patroniCpuRequest: "300m", - patroniCpuLimit: "600m", - patroniMemoryRequest: "500Mi", - patroniMemoryLimit: "2Gi", - patroniPvcSize: "10Gi", - patroniReplica: 3, - storageClass: "netapp-block-standard", - ocpName: `${ocpName}`, - rabbitmqCpuRequest: "250m", - rabbitmqCpuLimit: "400m", - rabbitmqMemoryRequest: "500Mi", - rabbitmqMemoryLimit: "1Gi", - rabbitmqPvcSize: "1Gi", - rabbitmqReplica: 2, - rabbitmqPostStartSleep: 120, - storageClass: "netapp-block-standard", - schemaSpyPublicCpuRequest: "50m", - schemaSpyPublicCpuLimit: "500m", - schemaSpyPublicMemoryRequest: "512Mi", - schemaSpyPublicMemoryLimit: "2Gi", - schemaSpyAuditCpuRequest: "50m", - schemaSpyAuditCpuLimit: "300m", - schemaSpyAuditMemoryRequest: "256Mi", - schemaSpyAuditMemoryLimit: "512Mi", - }, -}; - -// This callback forces the node process to exit as failure. -process.on("unhandledRejection", (reason) => { - console.log(reason); - process.exit(1); -}); - -module.exports = exports = { phases, options }; diff --git a/.pipeline/lib/deploy-db.js b/.pipeline/lib/deploy-db.js deleted file mode 100644 index 8e3371aaa..000000000 --- a/.pipeline/lib/deploy-db.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const path = require("path"); - -module.exports = settings => { - const phases = settings.phases; - const options = settings.options; - const phase = options.env; - const changeId = phases[phase].changeId; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, "../../openshift-v4")); - var objects = []; - - //The deployment of your cool app goes here ▼▼▼ - - if(phases[phase].phase === 'dev') { - - //deploy Patroni - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/patroni/deployment-prereq.yaml`, { - 'param': { - 'NAME': 'patroni', - 'SUFFIX': phases[phase].suffix - } - })) - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/patroni/deployment.yaml`, { - 'param': { - 'NAME': 'patroni', - 'ENV_NAME': phases[phase].phase, - 'SUFFIX': phases[phase].suffix, - 'CPU_REQUEST': phases[phase].patroniCpuRequest, - 'CPU_LIMIT': phases[phase].patroniCpuLimit, - 'MEMORY_REQUEST': phases[phase].patroniMemoryRequest, - 'MEMORY_LIMIT': phases[phase].patroniMemoryLimit, - 'IMAGE_REGISTRY': 'image-registry.openshift-image-registry.svc:5000', - 'IMAGE_STREAM_NAMESPACE': phases[phase].namespace, - 'IMAGE_STREAM_TAG': 'patroni:v10-stable', - 'REPLICA': phases[phase].patroniReplica, - 'PVC_SIZE': phases[phase].patroniPvcSize, - 'STORAGE_CLASS': phases[phase].storageClass - } - })) - - //deploy rabbitmq, use docker image directly - //POST_START_SLEEP is harded coded in the rabbitmq template, replacement was not successful - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/rabbitmq/rabbitmq-cluster-dc.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'ENV_NAME': phases[phase].phase, - 'SUFFIX': phases[phase].suffix, - 'NAMESPACE': phases[phase].namespace, - 'CLUSTER_NAME': 'rabbitmq-cluster', - 'ISTAG': 'rabbitmq:3.8.3-management', - 'SERVICE_ACCOUNT': 'rabbitmq-discovery', - 'VOLUME_SIZE': phases[phase].rabbitmqPvcSize, - 'CPU_REQUEST': phases[phase].rabbitmqCpuRequest, - 'CPU_LIMIT': phases[phase].rabbitmqCpuLimit, - 'MEMORY_REQUEST': phases[phase].rabbitmqMemoryRequest, - 'MEMORY_LIMIT': phases[phase].rabbitmqMemoryLimit, - 'REPLICA': phases[phase].rabbitmqReplica, - 'POST_START_SLEEP': phases[phase].rabbitmqPostStartSleep, - 'STORAGE_CLASS': phases[phase].storageClass - } - })) - } - - oc.applyRecommendedLabels( - objects, - phases[phase].name, - phase, - `${changeId}`, - phases[phase].instance, - ); - oc.importImageStreams(objects, phases[phase].tag, phases.build.namespace, phases.build.tag); - oc.applyAndDeploy(objects, phases[phase].instance); -}; diff --git a/.pipeline/lib/deploy-knps.js b/.pipeline/lib/deploy-knps.js deleted file mode 100755 index 0f599d344..000000000 --- a/.pipeline/lib/deploy-knps.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const path = require("path"); -const KeyCloakClient = require('./keycloak'); - -module.exports = settings => { - const phases = settings.phases; - const options = settings.options; - const phase = options.env; - const changeId = phases[phase].changeId; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, "../../openshift-v4")); - var objects = []; - - //The deployment of your cool app goes here ▼▼▼ - - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/knp/knp-env-pr-new-clamav-rabbitmq.yaml`, { - 'param': { - 'ENVIRONMENT': phases[phase].phase, - 'SUFFIX': phases[phase].suffix - } - })) - - oc.applyRecommendedLabels( - objects, - phases[phase].name, - phase, - `${changeId}`, - phases[phase].instance, - ); - oc.importImageStreams(objects, phases[phase].tag, phases.build.namespace, phases.build.tag); - oc.applyAndDeploy(objects, phases[phase].instance); -}; diff --git a/.pipeline/lib/deploy-schemaspy.js b/.pipeline/lib/deploy-schemaspy.js deleted file mode 100755 index fd1ad220c..000000000 --- a/.pipeline/lib/deploy-schemaspy.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const path = require("path"); -const KeyCloakClient = require('./keycloak'); - -module.exports = settings => { - const phases = settings.phases; - const options = settings.options; - const phase = options.env; - const changeId = phases[phase].changeId; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, "../../openshift-v4")); - var objects = []; - - //The deployment of your cool app goes here ▼▼▼ - - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/schema-spy/schemaspy-dc.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'ENV_NAME': phases[phase].phase, - 'CPU_REQUEST_PUBLIC': phases[phase].schemaSpyPublicCpuRequest, - 'CPU_LIMIT_PUBLIC': phases[phase].schemaSpyPublicCpuLimit, - 'MEMORY_REQUEST_PUBLIC': phases[phase].schemaSpyPublicMemoryRequest, - 'MEMORY_LIMIT_PUBLIC': phases[phase].schemaSpyPublicMemoryLimit, - 'CPU_REQUEST_AUDIT': phases[phase].schemaSpyAuditCpuRequest, - 'CPU_LIMIT_AUDIT': phases[phase].schemaSpyAuditCpuLimit, - 'MEMORY_REQUEST_AUDIT': phases[phase].schemaSpyAuditMemoryRequest, - 'MEMORY_LIMIT_AUDIT': phases[phase].schemaSpyAuditMemoryLimit - } - })) - - oc.applyRecommendedLabels( - objects, - phases[phase].name, - phase, - `${changeId}`, - phases[phase].instance, - ); - oc.importImageStreams(objects, phases[phase].tag, phases.build.namespace, phases.build.tag); - oc.applyAndDeploy(objects, phases[phase].instance); -}; diff --git a/.pipeline/lib/deploy.js b/.pipeline/lib/deploy.js deleted file mode 100755 index e2f3099bc..000000000 --- a/.pipeline/lib/deploy.js +++ /dev/null @@ -1,253 +0,0 @@ -"use strict"; -const { OpenShiftClientX } = require("@bcgov/pipeline-cli"); -const path = require("path"); -const KeyCloakClient = require("./keycloak"); - -module.exports = (settings) => { - const phases = settings.phases; - const options = settings.options; - const phase = options.env; - const changeId = phases[phase].changeId; - const oc = new OpenShiftClientX( - Object.assign({ namespace: phases[phase].namespace }, options) - ); - - const templatesLocalBaseUrl = oc.toFileUrl( - path.resolve(__dirname, "../../openshift-v4") - ); - var objects = []; - - //The deployment of your cool app goes here ▼▼▼ - - //deploy backend - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/backend/backend-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - ENV_NAME: phases[phase].phase, - NAMESPACE: phases[phase].namespace, - VERSION: phases[phase].tag, - KEYCLOAK_AUDIENCE: phases[phase].backendKeycloakAudience, - CPU_REQUEST: phases[phase].backendCpuRequest, - CPU_LIMIT: phases[phase].backendCpuLimit, - MEMORY_REQUEST: phases[phase].backendMemoryRequest, - MEMORY_LIMIT: phases[phase].backendMemoryLimit, - REPLICAS: phases[phase].backendReplicas, - DB_SERVICE_NAME: phases[phase].dbServiceName, - WELL_KNOWN_ENDPOINT: phases[phase].backendWellKnownEndpoint, - REDIS_HOST: phases[phase].redisHost, - REDIS_PORT: phases[phase].redisPort, - }, - } - ) - ); - - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/backend/backend-dc-others.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - BACKEND_HOST: phases[phase].backendHost, - }, - } - ) - ); - - //deploy frontend - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/frontend/frontend-dc-docker.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - VERSION: phases[phase].tag, - NAMESPACE: phases[phase].namespace, - CPU_REQUEST: phases[phase].frontendCpuRequest, - CPU_LIMIT: phases[phase].frontendCpuLimit, - MEMORY_REQUEST: phases[phase].frontendMemoryRequest, - MEMORY_LIMIT: phases[phase].frontendMemoryLimit, - REPLICAS: phases[phase].frontendReplicas, - KEYCLOAK_AUTHORITY: phases[phase].frontendKeycloakAuthority, - KEYCLOAK_CLIENT_ID: phases[phase].frontendKeycloakClientId, - KEYCLOAK_CALLBACK_URL: phases[phase].frontendKeycloakCallbackUrl, - KEYCLOAK_LOGOUT_URL: phases[phase].frontendKeycloakLogoutUrl, - SITEMINDER_LOGOUT_URL: phases[phase].frontendSiteminderLogoutUrl, - BACKEND_HOST: phases[phase].backendHost, - DEBUG_ENABLED: phases[phase].frontendDebugEnabled, - }, - } - ) - ); - /* - //deploy frontend - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/frontend/frontend-dc-docker-others.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'FRONTEND_HOST': phases[phase].frontendHost, - } - })) -*/ - //deploy celery - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/celery/celery-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - VERSION: phases[phase].tag, - ENV_NAME: phases[phase].phase, - NAMESPACE: phases[phase].namespace, - CPU_REQUEST: phases[phase].celeryCpuRequest, - CPU_LIMIT: phases[phase].celeryCpuLimit, - MEMORY_REQUEST: phases[phase].celeryMemoryRequest, - MEMORY_LIMIT: phases[phase].celeryMemoryLimit, - DB_SERVICE_NAME: phases[phase].dbServiceName, - }, - } - ) - ); - - //deploy notification server - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/notification/notification-server-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - NAMESPACE: phases[phase].namespace, - VERSION: phases[phase].tag, - KEYCLOAK_CERTS_URL: phases[phase].backendKeycloakCertsUrl, - CPU_REQUEST: phases[phase].notificationServerCpuRequest, - CPU_LIMIT: phases[phase].notificationServerCpuLimit, - MEMORY_REQUEST: phases[phase].notificationServerMemoryRequest, - MEMORY_LIMIT: phases[phase].notificationServerMemoryLimit, - }, - } - ) - ); - - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/notification/notification-server-others-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - FRONTEND_HOST: phases[phase].frontendHost, - }, - } - ) - ); - - //deploy scan coordinator - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/scan-coordinator/scan-coordinator-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - NAMESPACE: phases[phase].namespace, - VERSION: phases[phase].tag, - ENV_NAME: phases[phase].phase, - CPU_REQUEST: phases[phase].scanCoordinatorCpuRequest, - CPU_LIMIT: phases[phase].scanCoordinatorCpuLimit, - MEMORY_REQUEST: phases[phase].scanCoordinatorMemoryRequest, - MEMORY_LIMIT: phases[phase].scanCoordinatorMemoryLimit, - }, - } - ) - ); - - //deploy scan handler - objects = objects.concat( - oc.processDeploymentTemplate( - `${templatesLocalBaseUrl}/templates/scan-handler/scan-handler-dc.yaml`, - { - param: { - NAME: phases[phase].name, - SUFFIX: phases[phase].suffix, - NAMESPACE: phases[phase].namespace, - VERSION: phases[phase].tag, - CPU_REQUEST: phases[phase].scanHandlerCpuRequest, - CPU_LIMIT: phases[phase].scanHandlerCpuLimit, - MEMORY_REQUEST: phases[phase].scanHandlerMemoryRequest, - MEMORY_LIMIT: phases[phase].scanHandlerMemoryLimit, - DB_SERVICE_NAME: phases[phase].dbServiceName, - }, - } - ) - ); - - /* - //only deploy on dev for Tracking PR - if(phases[phase].phase === 'dev') { - - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/frontend/frontend-dc-others.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'VERSION': phases[phase].tag, - 'KEYCLOAK_AUTHORITY': phases[phase].frontendKeycloakAuthority, - 'KEYCLOAK_CLIENT_ID': phases[phase].frontendKeycloakClientId, - 'KEYCLOAK_CALLBACK_URL': phases[phase].frontendKeycloakCallbackUrl, - 'KEYCLOAK_LOGOUT_URL': phases[phase].frontendKeycloakLogoutUrl, - 'FRONTEND_HOST': phases[phase].frontendHost, - 'BACKEND_HOST': phases[phase].backendHost - } - })) - - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/notification/notification-server-others-dc.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'FRONTEND_HOST': phases[phase].frontendHost - } - })) - - } - - //only deploy schemaspy for test and prod - if(phases[phase].phase === 'test' || phases[phase].phase === 'prod') { - objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/schema-spy/schemaspy-dc.yaml`, { - 'param': { - 'NAME': phases[phase].name, - 'SUFFIX': phases[phase].suffix, - 'ENV_NAME': phases[phase].phase, - 'CPU_REQUEST_PUBLIC': phases[phase].schemaSpyPublicCpuRequest, - 'CPU_LIMIT_PUBLIC': phases[phase].schemaSpyPublicCpuLimit, - 'MEMORY_REQUEST_PUBLIC': phases[phase].schemaSpyPublicMemoryRequest, - 'MEMORY_LIMIT_PUBLIC': phases[phase].schemaSpyPublicMemoryLimit, - 'CPU_REQUEST_AUDIT': phases[phase].schemaSpyAuditCpuRequest, - 'CPU_LIMIT_AUDIT': phases[phase].schemaSpyAuditCpuLimit, - 'MEMORY_REQUEST_AUDIT': phases[phase].schemaSpyAuditMemoryRequest, - 'MEMORY_LIMIT_AUDIT': phases[phase].schemaSpyAuditMemoryLimit - } - })) - } -*/ - oc.applyRecommendedLabels( - objects, - phases[phase].name, - phase, - `${changeId}`, - phases[phase].instance - ); - oc.importImageStreams( - objects, - phases[phase].tag, - phases.build.namespace, - phases.build.tag - ); - oc.applyAndDeploy(objects, phases[phase].instance); -}; diff --git a/.pipeline/lib/keycloak.js b/.pipeline/lib/keycloak.js deleted file mode 100644 index ea6a1b2b0..000000000 --- a/.pipeline/lib/keycloak.js +++ /dev/null @@ -1,146 +0,0 @@ -"use strict"; -const axios = require("axios"); -const _ = require("lodash"); -//code reference https://github.com/bcgov/HMCR/blob/0.7/.pipeline/lib/keycloak.js -module.exports = class KeyCloakClient { - constructor(settings, oc) { - this.phases = settings.phases; - this.options = settings.options; - this.oc = oc; - this.appHost = this.phases.dev.host; - } - - async init() { - - this.getSecrets(); - - this.apiTokenPath = `/auth/realms/${this.realmId}/protocol/openid-connect/token`; - this.tfrsPublicClientPath = `auth/admin/realms/${this.realmId}/clients/${this.tfrsClientId}`; - - this.api = axios.create({ - baseURL: `https://${this.ssoHost}` - }); - - const token = await this.getAccessToken(); - - this.api.defaults.headers.common = { - Authorization: `Bearer ${token}` - }; - } - - getSecrets() { - const keycloakSecret = this.oc.raw("get", [ - "secret", - "tfrs-keycloak", - "-o", - "json" - ]); - const secret = JSON.parse(keycloakSecret.stdout).data; - - this.clientId = Buffer.from(secret.clientId, "base64").toString(); - this.clientSecret = Buffer.from(secret.clientSecret, "base64").toString(); - this.tfrsClientId = Buffer.from(secret.tfrsPublic, "base64").toString(); - this.realmId = Buffer.from(secret.realmId, "base64").toString(); - this.ssoHost = Buffer.from(secret.host, "base64").toString(); - - if (!this.clientId || !this.clientSecret || !this.tfrsClientId) - throw new Error( - "Unable to retrieve Keycloak service account info from OpenShift" - ); - } - - getAccessToken() { - - return this.api - .post(this.apiTokenPath, "grant_type=client_credentials", { - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - auth: { - username: this.clientId, - password: this.clientSecret - } - }) - .then(function(response) { - if (!response.data.access_token) - throw new Error( - "Unable to retrieve Keycloak service account access token" - ); - - return Promise.resolve(response.data.access_token); - }); - } - - async getUris() { - - console.log("in getURis this.tfrsPublicClientPath=", this.tfrsPublicClientPath) - - const response = await this.api.get(this.tfrsPublicClientPath); - - console.log("in getURis 000000") - const data = { ...response.data }; - const redirectUris = data.redirectUris; - - return { data, redirectUris }; - } - - async addUris() { - await this.init(); - - console.log("111Attempting to add RedirectUri and WebOrigins"); - - const { data, redirectUris} = await this.getUris(); - - console.log("2222"); - - const putData = { id: data.id, clientId: data.clientId }; - - console.log("3333"); - - const hasRedirectUris = redirectUris.find(item => - item.includes(this.appHost) - ); - - console.log("4444"); - - if (!hasRedirectUris) { - redirectUris.push(`https://${this.appHost}/*`); - putData.redirectUris = redirectUris; - } - - if (!(hasRedirectUris)) { - this.api - .put(this.tfrsPublicClientPath, putData) - .then(() => console.log("RedirectUri and WebOrigins added.")); - } else { - console.log("RedirectUri and WebOrigins add skipped."); - } - } - - async removeUris() { - await this.init(); - - console.log("Attempting to remove RedirectUri and WebOrigins"); - - const { data, redirectUris } = await this.getUris(); - - const putData = { id: data.id, clientId: data.clientId }; - - const hasRedirectUris = redirectUris.find(item => - item.includes(this.appHost) - ); - - if (hasRedirectUris) { - putData.redirectUris = redirectUris.filter( - item => !item.includes(this.appHost) - ); - } - - if (hasRedirectUris) { - this.api - .put(this.tfrsPublicClientPath, putData) - .then(() => console.log("RedirectUri and WebOrigins removed.")); - } else { - console.log("RedirectUri and WebOrigins remove skipped."); - } - - } -}; diff --git a/.pipeline/npmw b/.pipeline/npmw deleted file mode 100755 index 1eed7c953..000000000 --- a/.pipeline/npmw +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -set +x -type -t nvm && nvm deactivate -export NVM_DIR="$(git rev-parse --show-toplevel)/.nvm" -if [ ! -f "$NVM_DIR/nvm.sh" ]; then - mkdir -p "${NVM_DIR}" - curl -sSL -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash &>/dev/null -fi -source "$NVM_DIR/nvm.sh" &>/dev/null -METHOD=script nvm install --no-progress &>/dev/null -nvm use &>/dev/null -exec npm "$@" diff --git a/.pipeline/package.json b/.pipeline/package.json deleted file mode 100644 index 1f5483b6e..000000000 --- a/.pipeline/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "pipeline", - "version": "1.0.0", - "description": "This a pipeliene script", - "engines": { - "node": ">=8" - }, - "scripts": { - "build": "node build.js", - "clean": "node clean.js", - "clean-knps": "node clean-knps.js", - "deploy": "node deploy.js", - "deploy-knps": "node deploy-knps.js", - "deploy-unittest": "node deploy-unittest.js", - "deploy-db": "node deploy-db.js", - "deploy-schemaspy": "node deploy-schemaspy.js", - "version": "echo \"node@$(node --version) ($(which node))\" && echo \"npm@$(npm --version) ($(which npm))\" && npm ls" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/bcgov/ocp-sso.git" - }, - "author": "", - "license": "Apache-2.0", - "dependencies": { - "@bcgov/gh-deploy": "^1.1.4", - "@bcgov/pipeline-cli": "^1.0.1", - "axios": "^0.21.1", - "lodash": "^4.17.15" - } -} From 32b61d4840d97ec9d51faaa81d34f7cb77f8840c Mon Sep 17 00:00:00 2001 From: Kuan Fan Date: Fri, 21 Mar 2025 11:27:33 -0700 Subject: [PATCH 3/4] remove tfrs-release.yaml dev-release.yaml --- .github/workflows/dev-release.yaml | 119 --------------------- .github/workflows/tfrs-release.yaml | 159 ---------------------------- 2 files changed, 278 deletions(-) delete mode 100644 .github/workflows/dev-release.yaml delete mode 100644 .github/workflows/tfrs-release.yaml diff --git a/.github/workflows/dev-release.yaml b/.github/workflows/dev-release.yaml deleted file mode 100644 index 2a0848832..000000000 --- a/.github/workflows/dev-release.yaml +++ /dev/null @@ -1,119 +0,0 @@ -## For each release, the value of name, branches, RELEASE_NAME and PR_NUMBER need to be adjusted accordingly -## For each release, update lib/config.js: version and releaseBranch - -name: TFRS Dev release-3.0.1 - -on: - push: - branches: [release-3.0.1] - paths: - - frontend/** - - backend/** - workflow_dispatch: - workflow_call: - -env: - ## The pull request number of the Tracking pull request to merge the release branch to main - ## Also remember to update the version in .pipeline/lib/config.js - PR_NUMBER: 2956 - RELEASE_NAME: release-3.0.1 - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - unit-test: - name: Run Backend Unit Tests - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Run coverage report for django tests - uses: kuanfandevops/django-test-action@itvr-django-test - continue-on-error: true - with: - settings-dir-path: "backend/api" - requirements-file: "backend/requirements.txt" - managepy-dir: backend - - lint: - name: Linting - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Frontend Linting - continue-on-error: true - run: | - cd frontend - pwd - npm install - npm run lint - - - name: Backend linting - uses: github/super-linter/slim@v4 - continue-on-error: true - env: - DEFAULT_BRANCH: ${{ env.RELEASE_NAME }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - FILTER_REGEX_INCLUDE: .*backend/.*.py - VALIDATE_PYTHON_PYLINT: true - LOG_LEVEL: WARN - - build: - name: Build TFRS on Openshift - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - ## it will checkout to /home/runner/work/itvr/itvr - - name: Check out repository - uses: actions/checkout@v3 - - ## Log in to Openshift with a token of service account - - name: Log in to Openshift - uses: redhat-actions/oc-login@v1.3 - with: - openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} - openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} - insecure_skip_tls_verify: true - namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools - - ## Run build on Openshift - - name: Run build - run: | - cd .pipeline - npm install - npm run build -- --pr=${{ env.PR_NUMBER }} --env=build - - deploy-on-dev: - name: Deploy TFRS on Dev - runs-on: ubuntu-latest - timeout-minutes: 240 - needs: build - - steps: - ## it will checkout to /home/runner/work/itvr/itvr - - name: Check out repository - uses: actions/checkout@v3 - - - name: Log in to Openshift - uses: redhat-actions/oc-login@v1.3 - with: - openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} - openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} - insecure_skip_tls_verify: true - namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools - - - name: Run deploy - run: | - cd .pipeline - npm install - npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=dev diff --git a/.github/workflows/tfrs-release.yaml b/.github/workflows/tfrs-release.yaml deleted file mode 100644 index 077f31b96..000000000 --- a/.github/workflows/tfrs-release.yaml +++ /dev/null @@ -1,159 +0,0 @@ -## For each release, the value of name, branches, RELEASE_NAME and PR_NUMBER need to be adjusted accordingly -## For each release, update lib/config.js: version and releaseBranch - -name: TFRS release-3.0.1 - -on: - workflow_dispatch: - workflow_call: - -env: - ## The pull request number of the Tracking pull request to merge the release branch to main - ## Also remember to update the version in .pipeline/lib/config.js - PR_NUMBER: 2956 - RELEASE_NAME: release-3.0.1 - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - unit-test: - name: Run Backend Unit Tests - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Run coverage report for django tests - uses: kuanfandevops/django-test-action@itvr-django-test - continue-on-error: true - with: - settings-dir-path: "backend/api" - requirements-file: "backend/requirements.txt" - managepy-dir: backend - - lint: - name: Linting - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Frontend Linting - continue-on-error: true - run: | - cd frontend - pwd - npm install - npm run lint - - - name: Backend linting - uses: github/super-linter/slim@v4 - continue-on-error: true - env: - DEFAULT_BRANCH: ${{ env.RELEASE_NAME }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - FILTER_REGEX_INCLUDE: .*backend/.*.py - VALIDATE_PYTHON_PYLINT: true - LOG_LEVEL: WARN - - build: - name: Build TFRS on Openshift - runs-on: ubuntu-latest - timeout-minutes: 60 - - steps: - ## it will checkout to /home/runner/work/itvr/itvr - - name: Check out repository - uses: actions/checkout@v3 - - ## Log in to Openshift with a token of service account - - name: Log in to Openshift - ##uses: redhat-actions/oc-login@v1 - uses: smlgbl/oc-login@main - with: - openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} - openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} - insecure_skip_tls_verify: true - namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools - - ## Run build on Openshift - - name: Run build - run: | - cd .pipeline - npm install - npm run build -- --pr=${{ env.PR_NUMBER }} --env=build - - # comment out deploy-on-test as the deployment configs have been repaced by deployments at 2.21.1 - # deploy-on-test: - # name: Deploy TFRS on Test - # runs-on: ubuntu-latest - # timeout-minutes: 240 - # needs: build - - # steps: - # ## it will checkout to /home/runner/work/itvr/itvr - # - name: Check out repository - # uses: actions/checkout@v3 - - # - name: Log in to Openshift - # ##uses: redhat-actions/oc-login@v1 - # uses: smlgbl/oc-login@main - # with: - # openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} - # openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} - # insecure_skip_tls_verify: true - # namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools - - # - name: Ask for approval for TFRS Test deployment - # uses: trstringer/manual-approval@v1.6.0 - # with: - # secret: ${{ github.TOKEN }} - # approvers: AlexZorkin,kuanfandevops,prv-proton,JulianForeman,kevin-hashimoto,dhaselhan - # minimum-approvals: 1 - # issue-title: "TFRS ${{ env.RELEASE_NAME }} Test Deployment" - - # - name: Run deploy - # run: | - # cd .pipeline - # npm install - # npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=test - - deploy-on-prod: - name: Deploy TFRS on Prod - runs-on: ubuntu-latest - timeout-minutes: 2880 - needs: build - - steps: - ## it will checkout to /home/runner/work/itvr/itvr - - name: Check out repository - uses: actions/checkout@v3 - - - name: Log in to Openshift - ##uses: redhat-actions/oc-login@v1 - uses: smlgbl/oc-login@main - with: - openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} - openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} - insecure_skip_tls_verify: true - namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools - - - name: Ask for approval for TFRS Prod deployment - uses: trstringer/manual-approval@v1.6.0 - with: - secret: ${{ github.TOKEN }} - approvers: AlexZorkin,kuanfandevops,prv-proton,JulianForeman,kevin-hashimoto,dhaselhan - minimum-approvals: 2 - issue-title: "TFRS ${{ env.RELEASE_NAME }} Prod Deployment" - - - name: Run deploy - run: | - cd .pipeline - npm install - npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=prod From f725b928cdfc99f7a60099c1412ee897f9d14e9e Mon Sep 17 00:00:00 2001 From: Alex Zorkin <47334977+AlexZorkin@users.noreply.github.com> Date: Fri, 30 May 2025 10:03:31 -0700 Subject: [PATCH 4/4] feat: added total schedule d values and ci (#2970) --- .../services/ComplianceReportSpreadSheet.py | 479 ++++++++++-------- docker-compose.yml | 15 + 2 files changed, 295 insertions(+), 199 deletions(-) diff --git a/backend/api/services/ComplianceReportSpreadSheet.py b/backend/api/services/ComplianceReportSpreadSheet.py index ede04739f..687c52b77 100644 --- a/backend/api/services/ComplianceReportSpreadSheet.py +++ b/backend/api/services/ComplianceReportSpreadSheet.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import xlwt from collections import namedtuple, defaultdict @@ -12,146 +13,178 @@ class ComplianceReportSpreadsheet(object): """ def __init__(self): - self.workbook = xlwt.Workbook(encoding='utf-8') + self.workbook = xlwt.Workbook(encoding="utf-8") def add_exclusion_agreement(self, exclusion_agreement): worksheet = self.workbook.add_sheet("Exclusion Agreement") row_index = 0 columns = [ - "Transaction Type", "Fuel Type", "Trading Partner", "Postal Address", - "Quantity", "Units", "Quantity Not Sold", "Units" + "Transaction Type", + "Fuel Type", + "Trading Partner", + "Postal Address", + "Quantity", + "Units", + "Quantity Not Sold", + "Units", ] - header_style = xlwt.easyxf('font: bold on') + header_style = xlwt.easyxf("font: bold on") # Build Column Headers for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - comment_format = xlwt.easyxf('align: wrap on, vert centre') - date_format = xlwt.easyxf(num_format_str='yyyy-mm-dd') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') + comment_format = xlwt.easyxf("align: wrap on, vert centre") + date_format = xlwt.easyxf(num_format_str="yyyy-mm-dd") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") if exclusion_agreement is None: return # Build the rows - for record in exclusion_agreement['records']: + for record in exclusion_agreement["records"]: row_index += 1 - worksheet.write(row_index, 0, record['transaction_type']) - worksheet.write(row_index, 1, record['fuel_type']) - worksheet.write(row_index, 2, record['transaction_partner']) - worksheet.write(row_index, 3, record['postal_address']) - worksheet.write(row_index, 4, Decimal(record['quantity']), quantity_format) - worksheet.write(row_index, 5, record['unit_of_measure']) - worksheet.write(row_index, 6, Decimal(record['quantity_not_sold']), quantity_format) - worksheet.write(row_index, 7, record['unit_of_measure']) + worksheet.write(row_index, 0, record["transaction_type"]) + worksheet.write(row_index, 1, record["fuel_type"]) + worksheet.write(row_index, 2, record["transaction_partner"]) + worksheet.write(row_index, 3, record["postal_address"]) + worksheet.write(row_index, 4, Decimal(record["quantity"]), quantity_format) + worksheet.write(row_index, 5, record["unit_of_measure"]) + worksheet.write( + row_index, 6, Decimal(record["quantity_not_sold"]), quantity_format + ) + worksheet.write(row_index, 7, record["unit_of_measure"]) def add_schedule_a(self, schedule_a): worksheet = self.workbook.add_sheet("Schedule A") row_index = 0 columns = [ - "Trading Partner", "Postal Address", - "Fuel Class", "Received or Transferred", - "Quantity" + "Trading Partner", + "Postal Address", + "Fuel Class", + "Received or Transferred", + "Quantity", ] - header_style = xlwt.easyxf('font: bold on') + header_style = xlwt.easyxf("font: bold on") # Build Column Headers for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - comment_format = xlwt.easyxf('align: wrap on, vert centre') - date_format = xlwt.easyxf(num_format_str='yyyy-mm-dd') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') + comment_format = xlwt.easyxf("align: wrap on, vert centre") + date_format = xlwt.easyxf(num_format_str="yyyy-mm-dd") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") if schedule_a is None: return # Build the rows - for record in schedule_a['records']: + for record in schedule_a["records"]: row_index += 1 - worksheet.write(row_index, 0, record['trading_partner']) - worksheet.write(row_index, 1, record['postal_address']) - worksheet.write(row_index, 2, record['fuel_class']) - worksheet.write(row_index, 3, record['transfer_type']) - worksheet.write(row_index, 4, Decimal(record['quantity']), quantity_format) + worksheet.write(row_index, 0, record["trading_partner"]) + worksheet.write(row_index, 1, record["postal_address"]) + worksheet.write(row_index, 2, record["fuel_class"]) + worksheet.write(row_index, 3, record["transfer_type"]) + worksheet.write(row_index, 4, Decimal(record["quantity"]), quantity_format) def add_schedule_b(self, schedule_b, compliance_period): worksheet = self.workbook.add_sheet("Schedule B") row_index = 0 columns = [ - "Fuel Type", "Fuel Class", "Provision", "Fuel Code or Schedule D Provision", - "Quantity", "Units", "Carbon Intensity Limit", "Carbon Intensity of Fuel", - "Energy Density", "EER", "Energy Content", "Credit", "Debit" + "Fuel Type", + "Fuel Class", + "Provision", + "Fuel Code or Schedule D Provision", + "Quantity", + "Units", + "Carbon Intensity Limit", + "Carbon Intensity of Fuel", + "Energy Density", + "EER", + "Energy Content", + "Credit", + "Debit", ] if compliance_period >= 2023: columns = [ - "Fuel Type", "Fuel Class", "Provision", "Fuel Code or Schedule D Provision", - "Quantity", "Units", "Carbon Intensity Limit", "Carbon Intensity of Fuel", - "Energy Density", "EER", "Energy Content", "Compliance Units" + "Fuel Type", + "Fuel Class", + "Provision", + "Fuel Code or Schedule D Provision", + "Quantity", + "Units", + "Carbon Intensity Limit", + "Carbon Intensity of Fuel", + "Energy Density", + "EER", + "Energy Content", + "Compliance Units", ] - header_style = xlwt.easyxf('font: bold on') + header_style = xlwt.easyxf("font: bold on") # Build Column Headers for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - comment_format = xlwt.easyxf('align: wrap on, vert centre') - date_format = xlwt.easyxf(num_format_str='yyyy-mm-dd') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') + comment_format = xlwt.easyxf("align: wrap on, vert centre") + date_format = xlwt.easyxf(num_format_str="yyyy-mm-dd") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") if schedule_b is None: return # Build the rows - for record in schedule_b['records']: + for record in schedule_b["records"]: row_index += 1 - worksheet.write(row_index, 0, record['fuel_type']) - worksheet.write(row_index, 1, record['fuel_class']) - worksheet.write(row_index, 2, record['provision_of_the_act']) - if record['fuel_code'] is not None: - fuel_code_id = record['fuel_code'] + worksheet.write(row_index, 0, record["fuel_type"]) + worksheet.write(row_index, 1, record["fuel_class"]) + worksheet.write(row_index, 2, record["provision_of_the_act"]) + if record["fuel_code"] is not None: + fuel_code_id = record["fuel_code"] fuel_code = FuelCode.objects.filter(id=fuel_code_id).first() if fuel_code: - fuel_code_string = fuel_code.fuel_code + \ - str(fuel_code.fuel_code_version) + '.' + \ - str(fuel_code.fuel_code_version_minor) + fuel_code_string = ( + fuel_code.fuel_code + + str(fuel_code.fuel_code_version) + + "." + + str(fuel_code.fuel_code_version_minor) + ) worksheet.write(row_index, 3, fuel_code_string) else: - worksheet.write(row_index, 3, record['fuel_code']) + worksheet.write(row_index, 3, record["fuel_code"]) else: - if record['schedule_d_sheet_index'] is not None: - worksheet.write(row_index, 3, 'From Schedule D') - worksheet.write(row_index, 4, Decimal(record['quantity']), quantity_format) - worksheet.write(row_index, 5, record['unit_of_measure']) - worksheet.write(row_index, 6, Decimal(record['ci_limit'])) - worksheet.write(row_index, 7, Decimal(record['effective_carbon_intensity'])) - worksheet.write(row_index, 8, Decimal(record['energy_density'])) - worksheet.write(row_index, 9, Decimal(record['eer'])) - worksheet.write(row_index, 10, Decimal(record['energy_content'])) + if record["schedule_d_sheet_index"] is not None: + worksheet.write(row_index, 3, "From Schedule D") + worksheet.write(row_index, 4, Decimal(record["quantity"]), quantity_format) + worksheet.write(row_index, 5, record["unit_of_measure"]) + worksheet.write(row_index, 6, Decimal(record["ci_limit"])) + worksheet.write(row_index, 7, Decimal(record["effective_carbon_intensity"])) + worksheet.write(row_index, 8, Decimal(record["energy_density"])) + worksheet.write(row_index, 9, Decimal(record["eer"])) + worksheet.write(row_index, 10, Decimal(record["energy_content"])) if compliance_period < 2023: - if record['credits'] is not None: - worksheet.write(row_index, 11, Decimal(record['credits'])) - if record['debits'] is not None: - worksheet.write(row_index, 12, Decimal(record['debits'])) + if record["credits"] is not None: + worksheet.write(row_index, 11, Decimal(record["credits"])) + if record["debits"] is not None: + worksheet.write(row_index, 12, Decimal(record["debits"])) else: compliance_units = None - if record['credits'] is not None: - compliance_units = Decimal(record['credits']) - if compliance_units is None and record['debits'] is not None: - compliance_units = Decimal(record['debits']) * -1 + if record["credits"] is not None: + compliance_units = Decimal(record["credits"]) + if compliance_units is None and record["debits"] is not None: + compliance_units = Decimal(record["debits"]) * -1 worksheet.write(row_index, 11, compliance_units) def add_schedule_c(self, schedule_c): @@ -159,227 +192,265 @@ def add_schedule_c(self, schedule_c): row_index = 0 columns = [ - "Fuel Type", "Fuel Class", "Quantity", "Units", - "Expected Use", "Rationale" + "Fuel Type", + "Fuel Class", + "Quantity", + "Units", + "Expected Use", + "Rationale", ] - header_style = xlwt.easyxf('font: bold on') + header_style = xlwt.easyxf("font: bold on") # Build Column Headers for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - comment_format = xlwt.easyxf('align: wrap on, vert centre') - date_format = xlwt.easyxf(num_format_str='yyyy-mm-dd') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') + comment_format = xlwt.easyxf("align: wrap on, vert centre") + date_format = xlwt.easyxf(num_format_str="yyyy-mm-dd") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") if schedule_c is None: return # Build the rows - for record in schedule_c['records']: + for record in schedule_c["records"]: row_index += 1 - worksheet.write(row_index, 0, record['fuel_type']) - worksheet.write(row_index, 1, record['fuel_class']) - worksheet.write(row_index, 2, Decimal(record['quantity']), quantity_format) - worksheet.write(row_index, 3, record['unit_of_measure']) - worksheet.write(row_index, 4, record['expected_use']) - if record['rationale'] is not None: - worksheet.write(row_index, 5, record['rationale']) + worksheet.write(row_index, 0, record["fuel_type"]) + worksheet.write(row_index, 1, record["fuel_class"]) + worksheet.write(row_index, 2, Decimal(record["quantity"]), quantity_format) + worksheet.write(row_index, 3, record["unit_of_measure"]) + worksheet.write(row_index, 4, record["expected_use"]) + if record["rationale"] is not None: + worksheet.write(row_index, 5, record["rationale"]) def add_schedule_d(self, schedule_d): worksheet = self.workbook.add_sheet("Schedule D") row_index = 0 - header_style = xlwt.easyxf('font: bold on') - comment_format = xlwt.easyxf('align: wrap on, vert centre') - date_format = xlwt.easyxf(num_format_str='yyyy-mm-dd') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') + header_style = xlwt.easyxf("font: bold on") + comment_format = xlwt.easyxf("align: wrap on, vert centre") + date_format = xlwt.easyxf(num_format_str="yyyy-mm-dd") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") if schedule_d is None: return - for sheet in schedule_d['sheets']: + for sheet in schedule_d["sheets"]: if row_index != 0: row_index += 2 - columns = [ - "Fuel Type", "Feedstock", "Fuel Class" - ] + columns = ["Fuel Type", "Feedstock", "Fuel Class"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) row_index += 1 - worksheet.write(row_index, 0, sheet['fuel_type']) - worksheet.write(row_index, 1, sheet['feedstock']) - worksheet.write(row_index, 2, sheet['fuel_class']) + worksheet.write(row_index, 0, sheet["fuel_type"]) + worksheet.write(row_index, 1, sheet["feedstock"]) + worksheet.write(row_index, 2, sheet["fuel_class"]) row_index += 1 - worksheet.write(row_index, 0, 'Inputs', header_style) + worksheet.write(row_index, 0, "Inputs", header_style) row_index += 1 - columns = [ - "Worksheet", "Cell", "Value", "Units", "Description" - ] + columns = ["Worksheet", "Cell", "Value", "Units", "Description"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - for input in sheet['inputs']: + for input in sheet["inputs"]: row_index += 1 - worksheet.write(row_index, 0, input['worksheet_name']) - worksheet.write(row_index, 1, input['cell']) - worksheet.write(row_index, 2, input['value']) - worksheet.write(row_index, 3, input['units']) - worksheet.write(row_index, 4, input['description']) + worksheet.write(row_index, 0, input["worksheet_name"]) + worksheet.write(row_index, 1, input["cell"]) + worksheet.write(row_index, 2, input["value"]) + worksheet.write(row_index, 3, input["units"]) + worksheet.write(row_index, 4, input["description"]) row_index += 1 - worksheet.write(row_index, 0, 'Outputs', header_style) + worksheet.write(row_index, 0, "Outputs", header_style) row_index += 1 - columns = [ - "Output", "Value" - ] + columns = ["Output", "Value"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - for output in sheet['outputs']: + # Calculate total intensity for this sheet + total_intensity = 0 + for output in sheet["outputs"]: row_index += 1 - worksheet.write(row_index, 0, output['description']) - worksheet.write(row_index, 1, Decimal(output['intensity']), value_format) + worksheet.write(row_index, 0, output["description"]) + worksheet.write( + row_index, 1, Decimal(output["intensity"]), value_format + ) + total_intensity += Decimal(output["intensity"]) + + # Add the missing Total (gCO2e/GJ) and Carbon Intensity (gCO2e/MJ) lines + row_index += 1 + worksheet.write(row_index, 0, "Total (gCO₂e/GJ)", header_style) + worksheet.write(row_index, 1, total_intensity, value_format) + + row_index += 1 + worksheet.write(row_index, 0, "Carbon Intensity (gCO₂e/MJ)", header_style) + worksheet.write(row_index, 1, total_intensity / 1000, value_format) def add_schedule_summary(self, summary, compliance_period): worksheet = self.workbook.add_sheet("Summary") row_index = 0 - header_style = xlwt.easyxf('font:bold on') - quantity_format = xlwt.easyxf(num_format_str='#,##0') - value_format = xlwt.easyxf(num_format_str='#,##0.00') - currency_format = xlwt.easyxf(num_format_str='$#,##0.00') - description_format = xlwt.easyxf('align: wrap on') + header_style = xlwt.easyxf("font:bold on") + quantity_format = xlwt.easyxf(num_format_str="#,##0") + value_format = xlwt.easyxf(num_format_str="#,##0.00") + currency_format = xlwt.easyxf(num_format_str="$#,##0.00") + description_format = xlwt.easyxf("align: wrap on") if summary is None: return line_details = { - '1': 'Volume of gasoline class non-renewable fuel supplied', - '2': 'Volume of gasoline class renewable fuel supplied', - '3': 'Total volume of gasoline class fuel supplied (Line 1 + Line 2)', - '4': 'Volume of Part 2 gasoline class renewable fuel required (5% of Line 3)', - '5': 'Net volume of renewable fuel notionally transferred to and received from other suppliers as' - ' reported in Schedule A', - '6': 'Volume of renewable fuel retained (up to 5% of Line 4)', - '7': 'Volume of renewable fuel previously retained (from Line 6 of previous compliance period)', - '8': 'Volume of renewable obligation deferred (up to 5% of Line 4)', - '9': 'Volume of renewable obligation added (from Line 8 of previous compliance period)', - '10': 'Net volume of renewable Part 2 gasoline class fuel supplied (Total of Line 2 + Line 5 - Line 6 ' - '+ Line 7 + Line 8 - Line 9)', - '11': 'Gasoline class non-compliance payable (Line 4 - Line 10) x $0.30/L', - '12': 'Volume of diesel class non-renewable fuel supplied', - '13': 'Volume of diesel class renewable fuel supplied', - '14': 'Total volume of diesel class fuel supplied (Line 12 + Line 13)', - '15': 'Volume of Part 2 diesel class renewable fuel required (4% of Line 14)', - '16': 'Net volume of renewable fuel notionally transferred to and received from other suppliers' - ' as reported in Schedule A', - '17': 'Volume of renewable fuel retained (up to 5% of Line 15)', - '18': 'Volume of renewable fuel previously retained (from Line 17 of previous compliance report)', - '19': 'Volume of renewable obligation deferred (up to 5% of Line 15)', - '20': 'Volume of renewable obligation added (from Line 19 of previous compliance period)', - '21': 'Net volume of renewable Part 2 diesel class fuel supplied (Total of Line 13 + Line 16 - Line 17 +' - ' Line 18 + Line 19 - Line 20)', - '22': 'Diesel class non-compliance payable (Line 15 - Line 21) x $0.45/L', - '23': 'Total credits from fuel supplied (from Schedule B)', - '24': 'Total debits from fuel supplied (from Schedule B)', - '25': 'Net credit or debit balance for compliance period', - '26': 'Banked credits used to offset outstanding debits (if applicable)', - '26A': 'Banked credits used to offset outstanding debits - Previous Reports', - '26B': 'Banked credits used to offset outstanding debits - Supplemental Report', - '26C': 'Banked credits spent that will be returned due to debit decrease - Supplemental Report', - '27': 'Outstanding debit balance', - '28': 'Part 3 non-compliance penalty payable' + "1": "Volume of gasoline class non-renewable fuel supplied", + "2": "Volume of gasoline class renewable fuel supplied", + "3": "Total volume of gasoline class fuel supplied (Line 1 + Line 2)", + "4": "Volume of Part 2 gasoline class renewable fuel required (5% of Line 3)", + "5": "Net volume of renewable fuel notionally transferred to and received from other suppliers as" + " reported in Schedule A", + "6": "Volume of renewable fuel retained (up to 5% of Line 4)", + "7": "Volume of renewable fuel previously retained (from Line 6 of previous compliance period)", + "8": "Volume of renewable obligation deferred (up to 5% of Line 4)", + "9": "Volume of renewable obligation added (from Line 8 of previous compliance period)", + "10": "Net volume of renewable Part 2 gasoline class fuel supplied (Total of Line 2 + Line 5 - Line 6 " + "+ Line 7 + Line 8 - Line 9)", + "11": "Gasoline class non-compliance payable (Line 4 - Line 10) x $0.30/L", + "12": "Volume of diesel class non-renewable fuel supplied", + "13": "Volume of diesel class renewable fuel supplied", + "14": "Total volume of diesel class fuel supplied (Line 12 + Line 13)", + "15": "Volume of Part 2 diesel class renewable fuel required (4% of Line 14)", + "16": "Net volume of renewable fuel notionally transferred to and received from other suppliers" + " as reported in Schedule A", + "17": "Volume of renewable fuel retained (up to 5% of Line 15)", + "18": "Volume of renewable fuel previously retained (from Line 17 of previous compliance report)", + "19": "Volume of renewable obligation deferred (up to 5% of Line 15)", + "20": "Volume of renewable obligation added (from Line 19 of previous compliance period)", + "21": "Net volume of renewable Part 2 diesel class fuel supplied (Total of Line 13 + Line 16 - Line 17 +" + " Line 18 + Line 19 - Line 20)", + "22": "Diesel class non-compliance payable (Line 15 - Line 21) x $0.45/L", + "23": "Total credits from fuel supplied (from Schedule B)", + "24": "Total debits from fuel supplied (from Schedule B)", + "25": "Net credit or debit balance for compliance period", + "26": "Banked credits used to offset outstanding debits (if applicable)", + "26A": "Banked credits used to offset outstanding debits - Previous Reports", + "26B": "Banked credits used to offset outstanding debits - Supplemental Report", + "26C": "Banked credits spent that will be returned due to debit decrease - Supplemental Report", + "27": "Outstanding debit balance", + "28": "Part 3 non-compliance penalty payable", } if compliance_period >= 2023: - line_details['25'] = 'Net compliance unit balance for compliance period' - line_details['29A'] = 'Available compliance unit balance on March 31, ' + str(int(compliance_period) + 1) - line_details['29B'] = 'Compliance unit balance change from assessment' - line_details['29C'] = 'Available compliance unit balance after assessment on March 31, ' + str(int(compliance_period) + 1) - line_details['28'] = 'Non-compliance penalty payable (' + str(int(Decimal(summary['lines']['28'])/600)) + ' units * $600 CAD per unit)' + line_details["25"] = "Net compliance unit balance for compliance period" + line_details["29A"] = ( + "Available compliance unit balance on March 31, " + + str(int(compliance_period) + 1) + ) + line_details["29B"] = "Compliance unit balance change from assessment" + line_details["29C"] = ( + "Available compliance unit balance after assessment on March 31, " + + str(int(compliance_period) + 1) + ) + line_details["28"] = ( + "Non-compliance penalty payable (" + + str(int(Decimal(summary["lines"]["28"]) / 600)) + + " units * $600 CAD per unit)" + ) line_format = defaultdict(lambda: quantity_format) - line_format['11'] = currency_format - line_format['22'] = currency_format - line_format['28'] = currency_format + line_format["11"] = currency_format + line_format["22"] = currency_format + line_format["28"] = currency_format - columns = [ - "Part 2 Gasoline Class - 5% Renewable Requirement", - "Line", - "Litres" - ] + columns = ["Part 2 Gasoline Class - 5% Renewable Requirement", "Line", "Litres"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - for line in range(1, 11+1): + for line in range(1, 11 + 1): row_index += 1 worksheet.write(row_index, 0, line_details[str(line)], description_format) - worksheet.write(row_index, 1, 'Line {}'.format(line)) - worksheet.write(row_index, 2, Decimal(summary['lines'][str(line)]), line_format[str(line)]) + worksheet.write(row_index, 1, "Line {}".format(line)) + worksheet.write( + row_index, + 2, + Decimal(summary["lines"][str(line)]), + line_format[str(line)], + ) row_index += 1 - columns = [ - "Diesel Class - 4% Renewable Requirement", - "Line", - "Litres" - ] + columns = ["Diesel Class - 4% Renewable Requirement", "Line", "Litres"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) - for line in range(12, 22+1): + for line in range(12, 22 + 1): row_index += 1 worksheet.write(row_index, 0, line_details[str(line)], description_format) - worksheet.write(row_index, 1, 'Line {}'.format(line)) - worksheet.write(row_index, 2, Decimal(summary['lines'][str(line)]), line_format[str(line)]) + worksheet.write(row_index, 1, "Line {}".format(line)) + worksheet.write( + row_index, + 2, + Decimal(summary["lines"][str(line)]), + line_format[str(line)], + ) row_index += 1 columns = [ - "Part 3 - Low Carbon Fuel Requirement Summary" if compliance_period < 2023 else "Low Carbon Fuel Requirement", + ( + "Part 3 - Low Carbon Fuel Requirement Summary" + if compliance_period < 2023 + else "Low Carbon Fuel Requirement" + ), "Line", - "Value" + "Value", ] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) if compliance_period >= 2023: - compliance_lines = ['25','29A','29B','28','29C'] + compliance_lines = ["25", "29A", "29B", "28", "29C"] for line in compliance_lines: - if line != '28' or (line == '28' and summary['lines'][line] > 0): + if line != "28" or (line == "28" and summary["lines"][line] > 0): row_index += 1 - worksheet.write(row_index, 0, line_details[line], description_format) + worksheet.write( + row_index, 0, line_details[line], description_format + ) if line.isdigit(): - worksheet.write(row_index, 1, f'Line {line}') - worksheet.write(row_index, 2, Decimal(summary['lines'][line]), line_format[str(line)]) + worksheet.write(row_index, 1, f"Line {line}") + worksheet.write( + row_index, + 2, + Decimal(summary["lines"][line]), + line_format[str(line)], + ) else: - for line in range(23, 28+1): + for line in range(23, 28 + 1): row_index += 1 - worksheet.write(row_index, 0, line_details[str(line)], description_format) - worksheet.write(row_index, 1, 'Line {}'.format(line)) - worksheet.write(row_index, 2, Decimal(summary['lines'][str(line)]), line_format[str(line)]) + worksheet.write( + row_index, 0, line_details[str(line)], description_format + ) + worksheet.write(row_index, 1, "Line {}".format(line)) + worksheet.write( + row_index, + 2, + Decimal(summary["lines"][str(line)]), + line_format[str(line)], + ) row_index += 1 - columns = [ - "Non-compliance Penalty Payable", - "Line", - "Value" - ] + columns = ["Non-compliance Penalty Payable", "Line", "Value"] for col_index, value in enumerate(columns): worksheet.write(row_index, col_index, value, header_style) @@ -387,17 +458,27 @@ def add_schedule_summary(self, summary, compliance_period): for line in [11, 22, 28]: row_index += 1 worksheet.write(row_index, 0, line_details[str(line)], description_format) - worksheet.write(row_index, 1, 'Line {}'.format(line)) - worksheet.write(row_index, 2, Decimal(summary['lines'][str(line)]), line_format[str(line)]) + worksheet.write(row_index, 1, "Line {}".format(line)) + worksheet.write( + row_index, + 2, + Decimal(summary["lines"][str(line)]), + line_format[str(line)], + ) row_index += 1 - worksheet.write(row_index, 0, 'Total non-compliance penalty payable (Line 11 + Line 22 + Line 28)', - description_format) - worksheet.write(row_index, 2, Decimal(summary['total_payable']), currency_format) + worksheet.write( + row_index, + 0, + "Total non-compliance penalty payable (Line 11 + Line 22 + Line 28)", + description_format, + ) + worksheet.write( + row_index, 2, Decimal(summary["total_payable"]), currency_format + ) worksheet.col(0).width = 12000 - def save(self, response): """ Appends the workbook to the response for streaming diff --git a/docker-compose.yml b/docker-compose.yml index 29e9af7dd..bf4c491fb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -36,10 +36,14 @@ services: - FUEL_CODES_API_ENABLED=True - CREDIT_CALCULATION_API_ENABLED=True - COMPLIANCE_REPORTING_API_ENABLED=True + - REDIS_HOST=redis + - REDIS_PORT=6379 + - REDIS_PASSWORD=development_only env_file: - minio.env depends_on: - db + - redis build: dockerfile: Dockerfile-django context: ./backend @@ -48,6 +52,7 @@ services: "pip install -q -r requirements.txt && /wfi/wait-for-it.sh -t 14400 rabbit:5672 && /wfi/wait-for-it.sh -t 14400 db:5432 && + /wfi/wait-for-it.sh -t 14400 redis:6379 && /wfi/wait-for-it.sh -t 14400 minio:9000 && /wfi/wait-for-it.sh -t 14400 smtplogger:2500 && python3 manage.py makemigrations && @@ -110,7 +115,17 @@ services: dockerfile: Dockerfile-smtplogger ports: - 2500:2500 + redis: + platform: linux/amd64 + image: redis:7-alpine + container_name: tfrs_redis + ports: + - 6379:6379 + command: redis-server --requirepass development_only + volumes: + - redis_data:/data volumes: node_modules: postgres_data: minio_data: + redis_data: