diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..c08e520886 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +__pycache__ +*.pyc +*.pyo +*.pyd +.env +venv/ \ No newline at end of file diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..ddfd6949e4 --- /dev/null +++ b/.env.example @@ -0,0 +1,12 @@ +# Env var +ENVIRONMENT= + +# Capella vars +DB_CONN_STR=couchbases://.cloud.couchbase.com +DB_USERNAME= +DB_PASSWORD= + +CB_CLUSTER= + +# AWS vars +AWS_REGION= diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..3f75f1cf17 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +--- +version: 2 +updates: + - package-ecosystem: pip + directory: ./ + schedule: + interval: weekly diff --git a/.github/workflows/CD.yml b/.github/workflows/CD.yml new file mode 100644 index 0000000000..0ee8213cdc --- /dev/null +++ b/.github/workflows/CD.yml @@ -0,0 +1,314 @@ +name: Continuous Deployment Pipeline + +on: + workflow_dispatch: + +# concurrency required to avoid terraform lock contention during ECR provisioning +concurrency: cd-${{ github.repository }}-pipeline + +jobs: + provision-capella: + runs-on: ubuntu-22.04 + + steps: + - name: Checkout codebase + uses: actions/checkout@v4.1.7 + + - name: Setup CBShell + uses: ldoguin/setup-cbsh@develop + with: + version: 'v1.0.0' + enable-plugins: true + config: ${{ secrets.CBSHELL_CONFIG }} + + - name: Print Couchbase Environment + run: | + echo "Printing Couchbase environment variables..." + cb-env + echo "Environment variables printed successfully." + shell: cbsh --script {0} + + - name: Ensure conduit bucket + run: | + echo "Attempting to create 'conduit' bucket..." + if cbsh --command "buckets create conduit 512"; then + echo "'conduit' bucket created successfully." + else + echo "Failed to create 'conduit' bucket or it already exists." + fi + shell: bash + + - name: Set 'conduit' bucket as active bucket + run: | + echo "Setting 'conduit' as the active bucket..." + cb-env bucket conduit + echo "'conduit' bucket set as active successfully." + shell: cbsh --script {0} + + - name: Ensure ${{ vars.ENVIRONMENT }} scope + run: | + echo "Attempting to create '${{ vars.ENVIRONMENT }}' scope..." + if cbsh --command "scopes create --clusters ${{ vars.CB_CLUSTER }} --bucket conduit ${{ vars.ENVIRONMENT }}"; then + echo "'${{ vars.ENVIRONMENT }}' scope created successfully." + else + echo "Failed to create '${{ vars.ENVIRONMENT }}' scope or it already exists." + fi + shell: bash + + - name: Set ${{ vars.ENVIRONMENT }} scope as active scope + run: | + echo "Setting '${{ vars.ENVIRONMENT }}' as the active scope..." + cb-env scope ${{ vars.ENVIRONMENT }} + echo "'${{ vars.ENVIRONMENT }}' scope set as active successfully." + shell: cbsh --script {0} + + - name: Ensure collections and primary indices + run: | + echo "Starting collection creation & indexing process..." + + create_collection() { + local COLLECTION=$1 + echo "Attempting to create collection: $COLLECTION" + + if cbsh --command "collections create --clusters \"$CB_CLUSTER\" --bucket conduit --scope \"$ENVIRONMENT\" $COLLECTION"; then + echo "Creating primary index for $COLLECTION..." + if cbsh --command "CREATE PRIMARY INDEX ON conduit.$ENVIRONMENT.$COLLECTION"; then + echo "Primary index for $COLLECTION created successfully." + else + echo "Failed to create primary index for $COLLECTION." + fi + echo "$COLLECTION collection created successfully." + else + echo "Failed to create $COLLECTION collection or it already exists." + fi + } + + for collection in article comment user; do + create_collection "$collection" + done + + echo "Collection creation and indexing process completed." + shell: bash + env: + CB_CLUSTER: ${{ vars.CB_CLUSTER }} + ENVIRONMENT: ${{ vars.ENVIRONMENT }} + + provision-aws: + needs: provision-capella + runs-on: ubuntu-22.04 + permissions: + id-token: write + contents: read + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ap-southeast-2 + + steps: + - name: Checkout codebase + uses: actions/checkout@v4.1.7 + + - name: Install less pager + run: | + echo "Installing less pager..." + apt-get update && apt-get install -y less + echo "Less pager installed successfully." + + - name: Install AWS CLI + run: | + echo "Installing AWS CLI..." + apt-get update && apt-get install -y unzip + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + ./aws/install + aws --version + echo "AWS CLI installed successfully." + shell: bash + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ env.AWS_REGION }} + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_wrapper: false + + - name: Provision TF Backend S3 Bucket + run: | + echo "Provisioning Terraform backend S3 bucket..." + set -eux + BUCKET_NAME=conduit-${{ vars.ENVIRONMENT }}-tfbackend-bucket-${{ secrets.AWS_ACCOUNT_ID }} + REGION=${{ vars.AWS_REGION }} + if ! aws s3api head-bucket --bucket "$BUCKET_NAME" 2>/dev/null; then + aws s3api create-bucket --bucket "$BUCKET_NAME" --region "$REGION" --create-bucket-configuration LocationConstraint="$REGION" + echo "Bucket $BUCKET_NAME created in $REGION." + else + echo "Bucket $BUCKET_NAME already exists." + fi + shell: bash + + - name: Provision TF State Lock DynamoDB Table + run: | + echo "Provisioning Terraform state lock DynamoDB table..." + set -eux + TABLE_NAME=conduit-${{ vars.ENVIRONMENT }}-tfbackend-table + REGION=${{ vars.AWS_REGION }} + if ! aws dynamodb describe-table --table-name "$TABLE_NAME" --region "$REGION" &>/dev/null; then + aws dynamodb create-table \ + --table-name "$TABLE_NAME" \ + --attribute-definitions AttributeName=LockID,AttributeType=S \ + --key-schema AttributeName=LockID,KeyType=HASH \ + --billing-mode PAY_PER_REQUEST \ + --region "$REGION" + echo "DynamoDB table $TABLE_NAME created in $REGION." + else + echo "DynamoDB table $TABLE_NAME already exists." + fi + shell: bash + + - name: TF init + run: | + echo "Initializing Terraform..." + set -eux + terraform init -upgrade -reconfigure \ + -backend-config='skip_metadata_api_check=true' \ + -backend-config='skip_region_validation=true' \ + -backend-config='skip_credentials_validation=true' \ + -backend-config='region=${{ vars.AWS_REGION }}' \ + -backend-config='bucket=conduit-${{ vars.ENVIRONMENT }}-tfbackend-bucket-${{ secrets.AWS_ACCOUNT_ID }}' \ + -backend-config='key=tf-${{ vars.ENVIRONMENT }}.tfstate' \ + -backend-config='dynamodb_table=conduit-${{ vars.ENVIRONMENT }}-tfbackend-table' + echo "Terraform initialized successfully." + shell: bash + working-directory: ./infrastructure + + - name: TF apply + run: | + echo "Applying Terraform configuration..." + set -eux + terraform apply \ + -var 'project_prefix=conduit-${{ vars.ENVIRONMENT }}' \ + -var 'task_cpu=256' \ + -var 'task_memory=512' \ + -var 'lifecycle_policy=./policy.json' \ + -var 'iam_role=${{ secrets.AWS_BACKEND_ACCESS_ROLE }}' \ + -var 'aws_account_id=${{ secrets.AWS_ACCOUNT_ID }}' \ + -auto-approve + echo "Terraform configuration applied successfully." + working-directory: ./infrastructure + shell: bash + + push-images-to-aws: + needs: provision-aws + runs-on: ubuntu-22.04 + strategy: + matrix: + image_type: [backend, frontend, cypress] + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ap-southeast-2 + + steps: + - name: Checkout codebase + uses: actions/checkout@v4.1.7 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + with: + registries: ${{ secrets.AWS_ACCOUNT_ID }} + mask-password: "true" + + - name: Build, tag, and push ${{ matrix.image_type }} image + run: | + echo "Preparing to build and push ${{ matrix.image_type }} image..." + + case "${{ matrix.image_type }}" in + backend) + BUILD_PATH="./api" + DOCKERFILE="./api/Dockerfile.api" + ;; + frontend) + BUILD_PATH="./angular-conduit-signals" + DOCKERFILE="./angular-conduit-signals/Dockerfile.frontend" + ;; + cypress) + BUILD_PATH="./angular-conduit-signals/cypress" + DOCKERFILE="./angular-conduit-signals/cypress/Dockerfile.cypress" + ;; + esac + + echo "Building Docker image from $BUILD_PATH with Dockerfile $DOCKERFILE..." + docker build "$BUILD_PATH" -f "$DOCKERFILE" -t "$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" + + echo "Pushing Docker image to $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG..." + docker push "$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" + echo "Docker image $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG pushed successfully." + shell: bash + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: "conduit-${{ vars.ENVIRONMENT }}-${{ matrix.image_type }}-repo" + IMAGE_TAG: "conduit-${{ vars.ENVIRONMENT }}-${{ matrix.image_type }}-image" + + update-ecs-services: + needs: push-images-to-aws + runs-on: ubuntu-22.04 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ap-southeast-2 + + steps: + + - name: Install less pager + run: | + echo "Installing less pager..." + apt-get update && apt-get install -y less + echo "Less pager installed successfully." + + - name: Install AWS CLI + run: | + echo "Installing AWS CLI..." + apt-get update && apt-get install -y unzip + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + ./aws/install + aws --version + echo "AWS CLI installed successfully." + shell: bash + + - name: Update ECS Service with new backend image + id: update-ecs-backend + run: | + echo "Updating ECS backend service with the new image..." + aws ecs update-service \ + --cluster "conduit-${{ vars.ENVIRONMENT }}-ecs-cluster" \ + --service "conduit-${{ vars.ENVIRONMENT }}-backend-service" \ + --force-new-deployment + echo "ECS backend service updated successfully." + shell: bash + + - name: Update ECS Service with new frontend image + id: update-ecs-frontend + run: | + echo "Updating ECS frontend service with the new image..." + aws ecs update-service \ + --cluster "conduit-${{ vars.ENVIRONMENT }}-ecs-cluster" \ + --service "conduit-${{ vars.ENVIRONMENT }}-frontend-service" \ + --force-new-deployment + echo "ECS frontend service updated successfully." + shell: bash + + - name: Update ECS Service with new cypress image + id: update-ecs-cypress + run: | + echo "Updating ECS cypress service with the new image..." + aws ecs update-service \ + --cluster "conduit-${{ vars.ENVIRONMENT }}-ecs-cluster" \ + --service "conduit-${{ vars.ENVIRONMENT }}-cypress-service" \ + --force-new-deployment + echo "ECS cypress service updated successfully." + shell: bash diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml new file mode 100644 index 0000000000..56a72417c2 --- /dev/null +++ b/.github/workflows/CI.yml @@ -0,0 +1,94 @@ +name: CI + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + workflow_dispatch: + +jobs: + unit_tests: + name: Run pytest unit tests + runs-on: ubuntu-latest + environment: development + env: + DB_CONN_STR: ${{ vars.DB_CONN_STR }} + DB_BUCKET_NAME: ${{ vars.DB_BUCKET_NAME }} + DB_SCOPE_NAME: ${{ vars.DB_SCOPE_NAME }} + DB_USERNAME: ${{ vars.DB_USERNAME }} + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + JWT_SECRET: ${{ secrets.JWT_SECRET }} + + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v1 + with: + submodules: recursive + + - name: Set up Python and requirements + run: | + sudo apt -y update + sudo apt -y install python3-pip python3-testresources + python -m pip install -r api/requirements.txt + + - name: Run tests + run: ./scripts/pytest-test.sh + + + api_tests: + name: Run realworld api tests + runs-on: ubuntu-latest + environment: development + env: + DB_CONN_STR: ${{ vars.DB_CONN_STR }} + DB_BUCKET_NAME: ${{ vars.DB_BUCKET_NAME }} + DB_SCOPE_NAME: ${{ vars.DB_SCOPE_NAME }} + DB_USERNAME: ${{ vars.DB_USERNAME }} + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + JWT_SECRET: ${{ secrets.JWT_SECRET }} + + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v1 + with: + submodules: recursive + + - name: Set up Python and requirements + run: | + sudo apt -y update + sudo apt -y install python3-pip python3-testresources + python -m pip install -r api/requirements.txt + + - name: Start the FastAPI server + run: | + ./scripts/start-api.sh & + # Wait for the server + while ! curl "http://localhost:8000/health" > /dev/null 2>&1 + do + sleep 1; + done + echo "Server ready." + + - name: Run realworld backend tests + run: ./scripts/realworld-test.sh + env: + APIURL: http://localhost:8000 + + + super_linter: + name: Lint code base + runs-on: ubuntu-latest + environment: development + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Run Super-Linter + uses: github/super-linter@v4 + env: + DEFAULT_BRANCH: master + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/TD.yml b/.github/workflows/TD.yml new file mode 100644 index 0000000000..a56722fa60 --- /dev/null +++ b/.github/workflows/TD.yml @@ -0,0 +1,171 @@ +name: Teardown Pipeline + +on: + workflow_dispatch: + +# concurrency required to avoid terraform lock contention during ECR deprovisioning +concurrency: cd-${{ github.repository }}-teardown + +jobs: + aws-teardown: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ap-southeast-2 + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install less pager + run: | + echo "Installing less pager..." + apt-get update && apt-get install -y less + echo "Less pager installed successfully." + + - name: Install AWS CLI + run: | + echo "Installing AWS CLI..." + apt-get update && apt-get install -y unzip + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + ./aws/install + aws --version + echo "AWS CLI installed successfully." + shell: bash + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.AWS_GITHUB_ACCESS_ROLE }} + aws-region: ${{ vars.AWS_REGION }} + role-session-name: GitHub-OIDC-Terraform + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_wrapper: false + + - name: Provision TF Backend S3 Bucket + run: | + echo "Provisioning Terraform backend S3 bucket..." + set -eux + BUCKET_NAME=conduit-${{ vars.ENVIRONMENT }}-tfbackend-bucket-${{ secrets.AWS_ACCOUNT_ID }} + REGION=${{ vars.AWS_REGION }} + if ! aws s3api head-bucket --bucket "$BUCKET_NAME" 2>/dev/null; then + aws s3api create-bucket --bucket "$BUCKET_NAME" --region "$REGION" --create-bucket-configuration LocationConstraint="$REGION" + echo "Bucket $BUCKET_NAME created in $REGION." + else + echo "Bucket $BUCKET_NAME already exists." + fi + shell: bash + + - name: Provision TF State Lock DynamoDB Table + run: | + echo "Provisioning Terraform state lock DynamoDB table..." + set -eux + TABLE_NAME=conduit-${{ vars.ENVIRONMENT }}-tfbackend-table + REGION=${{ vars.AWS_REGION }} + if ! aws dynamodb describe-table --table-name "$TABLE_NAME" --region "$REGION" &>/dev/null; then + aws dynamodb create-table \ + --table-name "$TABLE_NAME" \ + --attribute-definitions AttributeName=LockID,AttributeType=S \ + --key-schema AttributeName=LockID,KeyType=HASH \ + --billing-mode PAY_PER_REQUEST \ + --region "$REGION" + echo "DynamoDB table $TABLE_NAME created in $REGION." + else + echo "DynamoDB table $TABLE_NAME already exists." + fi + shell: bash + + - name: TF init + run: | + echo "Initializing Terraform..." + set -eux + terraform init -upgrade -reconfigure \ + -backend-config='skip_metadata_api_check=true' \ + -backend-config='skip_region_validation=true' \ + -backend-config='skip_credentials_validation=true' \ + -backend-config='region=${{ vars.AWS_REGION }}' \ + -backend-config='bucket=conduit-${{ vars.ENVIRONMENT }}-tfbackend-bucket-${{ secrets.AWS_ACCOUNT_ID }}' \ + -backend-config='key=tf-${{ vars.ENVIRONMENT }}.tfstate' \ + -backend-config='dynamodb_table=conduit-${{ vars.ENVIRONMENT }}-tfbackend-table' + echo "Terraform initialized successfully." + shell: bash + working-directory: ./infrastructure + + - name: Delete images in ECR repositories + shell: bash + run: | + set -eux + repos=( + "conduit-${{ vars.ENVIRONMENT }}-backend-repo" + "conduit-${{ vars.ENVIRONMENT }}-frontend-repo" + "conduit-${{ vars.ENVIRONMENT }}-cypress-repo" + ) + for repo in "${repos[@]}"; do + if aws ecr describe-repositories --repository-names "$repo" &>/dev/null; then + images=$(aws ecr list-images --repository-name "$repo" --query 'imageIds[*]' --output json) + if [ "$images" != "[]" ]; then + aws ecr batch-delete-image --repository-name "$repo" --image-ids "$images" + echo "Deleted images from $repo" + else + echo "No images to delete in $repo" + fi + else + echo "Repository $repo does not exist" + fi + done + + - name: TF destroy + shell: bash + run: | + set -eux + terraform destroy \ + -var 'project_prefix=conduit-${{ vars.ENVIRONMENT }}' \ + -var 'lifecycle_policy=./policy.json' \ + -var 'iam_role=${{ secrets.AWS_BACKEND_ACCESS_ROLE }}' \ + -var 'aws_account_id=${{ secrets.AWS_ACCOUNT_ID }}' \ + -auto-approve + working-directory: ./infrastructure + + capella-teardown: + needs: aws-teardown + runs-on: ubuntu-22.04 + + steps: + - name: Checkout codebase + uses: actions/checkout@v4.1.7 + + - name: Setup CBShell + uses: ldoguin/setup-cbsh@develop + with: + version: 'v1.0.0' + enable-plugins: true + config: ${{ secrets.CBSHELL_CONFIG }} + + - name: Print Couchbase Environment + run: | + echo "Printing Couchbase environment variables..." + cb-env + echo "Environment variables printed successfully." + shell: cbsh --script {0} + + - name: Delete ${{ vars.ENVIRONMENT }} scope + run: | + echo "Attempting to delete '${{ vars.ENVIRONMENT }}' scope..." + if cbsh --command "scopes drop --clusters ${{ vars.CB_CLUSTER }} --bucket conduit ${{ vars.ENVIRONMENT }}"; then + echo "'${{ vars.ENVIRONMENT }}' scope deleted successfully." + else + echo "Failed to delete '${{ vars.ENVIRONMENT }}' scope or it does not exist." + fi + shell: bash + env: + CB_CLUSTER: ${{ vars.CB_CLUSTER }} + ENVIRONMENT: ${{ vars.ENVIRONMENT }} + \ No newline at end of file diff --git a/.gitignore b/.gitignore index 19f908da89..ffb48f8623 100644 --- a/.gitignore +++ b/.gitignore @@ -1,33 +1,147 @@ -# See http://help.github.com/ignore-files/ for more about ignoring files. - -# dependencies -/node_modules -/dist - -# IDEs and editors -/.idea -.project -.classpath -.c9/ -*.launch -.settings/ -*.sublime-workspace - -# IDE - VSCode -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -.history/* - - -#System Files +# Ignore all .env files except .env.example +*.env +.env.* +!.env.example + +# Ignore all .secrets files except .secrets.example +*.secrets +.secrets.* +!.secrets.example + +# Cypress videos +*.mp4 + +# cbsh contents +api/cbsh/* +!api/cbsh/.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# macOS specific .DS_Store -Thumbs.db +# Terraform +.terraform.lock.hcl +main.tfplan + +**/.terraform/* + +*.tfstate +*.tfstate.* + +crash.log +crash.*.log + +*.tfvars +*.tfvars.json + +override.tf +override.tf.json +*_override.tf +*_override.tf.json -#log files -npm-debug.log* -yarn-debug.log* -yarn-error.log* +.terraformrc +terraform.rc diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..3183d6f48f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "realworld"] + path = realworld + url = https://github.com/gothinkster/realworld +[submodule "angular-conduit-signals"] + path = angular-conduit-signals + url = https://github.com/saschavonpapenCB/angular-conduit-signals diff --git a/.secrets.example b/.secrets.example new file mode 100644 index 0000000000..66be20b073 --- /dev/null +++ b/.secrets.example @@ -0,0 +1,9 @@ +# AWS secrets +AWS_ACCESS_KEY= +AWS_SECRET_ACCESS_KEY= + +AWS_ACCOUNT_ID= +AWS_BACKEND_ACCESS_ROLE=arn:aws:iam:::role/ + +# CBShell secret +CBSHELL_CONFIG='' diff --git a/README.md b/README.md index 7e107ce07e..33b57eeae4 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,124 @@ # ![RealWorld Example App](logo.png) -> ### [YOUR_FRAMEWORK] codebase containing real world examples (CRUD, auth, advanced patterns, etc) that adheres to the [RealWorld](https://github.com/gothinkster/realworld) spec and API. +> ### [FastAPI](https://github.com/tiangolo/fastapi) + Couchbase codebase containing real world examples (CRUD, auth, advanced patterns, etc) that adheres to the [RealWorld](https://github.com/gothinkster/realworld) spec and API. ### [Demo](https://demo.realworld.io/)    [RealWorld](https://github.com/gothinkster/realworld) -This codebase was created to demonstrate a fully fledged fullstack application built with **[YOUR_FRAMEWORK]** including CRUD operations, authentication, routing, pagination, and more. +This codebase was created to demonstrate a fully fledged fullstack application built with [FastAPI](https://github.com/tiangolo/fastapi) + Couchbase including CRUD operations, authentication, routing, pagination, and more. -We've gone to great lengths to adhere to the **[YOUR_FRAMEWORK]** community styleguides & best practices. -For more information on how to this works with other frontends/backends, head over to the [RealWorld](https://github.com/gothinkster/realworld) repo. +For more information on how to this works with other frontends/backends, head over to the [RealWorld](https://github.com/gothinkster/realworld) repository. -# How it works +## Prerequisites -> Describe the general architecture of your app here +To run this prebuilt project, you will need: -# Getting started +- [Couchbase Capella](https://www.couchbase.com/products/capella/) cluster with a bucket and scope loaded. +- [Python](https://www.python.org/downloads/) 3.9 or higher installed + - Ensure that the Python version is [compatible](https://docs.couchbase.com/python-sdk/current/project-docs/compatibility.html#python-version-compat) with the Couchbase SDK. +- Using the Capella UI, create the following collections in the loaded scope, and using the Query data tool, create primary indicies for both: + - `article` + - `client` (`client` because `user` is a reserved Couchbase keyword) +``` +CREATE PRIMARY INDEX ON `default`:``.``.``; +``` -> npm install, npm start, etc. +# Setting Up the Application +### Cloning Repository + +```shell +git clone https://github.com/couchbase-examples/python-quickstart-fastapi.git +``` + +### Install Dependencies + +The dependencies for the application are specified in the `requirements.txt` file in the root folder. Dependencies can be installed through `pip` the default package manager for Python. +``` +./scripts/install-dependencies.sh +``` +> Note: If your Python is not symbolically linked to python3, you need to run all commands using `python3` instead of `python`. + +### Setup Database Configuration + +To know more about connecting to your Capella cluster, please follow the [instructions](https://docs.couchbase.com/cloud/get-started/connect.html). + +Specifically, you need to do the following: + +- Create the [database credentials](https://docs.couchbase.com/cloud/clusters/manage-database-users.html) to access the travel-sample bucket (Read and Write) used in the application. +- [Allow access](https://docs.couchbase.com/cloud/clusters/allow-ip-address.html) to the Cluster from the IP on which the application is running. + +All configuration for communication with the database is read from the environment variables. We have provided a convenience feature to read the environment variables from a local file, `.env` in the source folder. + +Create a copy of `.env.example` in the app folder & rename it to `.env` add the values for the Couchbase connection. + +> Note: Files starting with `.` could be hidden in the file manager in your Unix based systems including GNU/Linux and Mac OS. + +```sh +DB_CONN_STR= +DB_USERNAME= +DB_PASSWORD= +DB_BUCKET_NAME= +DB_SCOPE_NAME= +``` + +> Note: The connection string expects the `couchbases://` or `couchbase://` part. + +### Setup JWT Token Configureation + +Create a random secret key that will be used to sign the JWT tokens. + +To generate a secure random secret key use the command: + +``` +./scripts/generate-secret-key.sh +``` + +And copy the output to the JWT_SECRET environment variable in the .env file. + +> Note: The CORS_ALLOWED_ORIGINS, CORS_ALLOWED_METHODS and CORS_ALLOWED_HEADERS environment variables can be left blank unless specific CORS options are required. + + +## Running The API + +### Directly on Machine + +At this point, we have installed the dependencies, setup the cluster and configured the API with the credentials. The API is now ready and you can run it. + +``` +./scripts/start-api.sh +``` + +### Using Docker + +- Build the Docker image + +```sh +./scripts/build-container.sh +``` + +- Run the Docker image + +```sh +./scripts/run-container.sh +``` + +> Note: The `.env` file has the connection information to connect to your Capella cluster. These will be part of the environment variables in the Docker container. + + +## Running Tests + +To run RealWorld API tests, use the following command: + +``` +./scripts/realworld-test.sh +``` + +To run tests, use the following command: + +``` +./scripts/pytest-test.sh +``` diff --git a/angular-conduit-signals b/angular-conduit-signals new file mode 160000 index 0000000000..bd5dd5e9ae --- /dev/null +++ b/angular-conduit-signals @@ -0,0 +1 @@ +Subproject commit bd5dd5e9ae251d999d50b2bfed878701f2dc013f diff --git a/api/.env.example b/api/.env.example new file mode 100644 index 0000000000..ae51531e44 --- /dev/null +++ b/api/.env.example @@ -0,0 +1,15 @@ +# Capella vars +DB_CONN_STR=couchbases://.cloud.couchbase.com +DB_PASSWORD= +DB_USERNAME= + +DB_BUCKET_NAME= +DB_SCOPE_NAME= + +# JWT secret var +JWT_SECRET= + +# CORS vars +CORS_ALLOWED_ORIGINS=http://127.0.0.1,http://localhost +CORS_ALLOWED_METHODS=GET,POST,PUT,DELETE,OPTIONS +CORS_ALLOWED_HEADERS=Content-Type,Authorization diff --git a/api/Dockerfile.api b/api/Dockerfile.api new file mode 100644 index 0000000000..f06fb9e867 --- /dev/null +++ b/api/Dockerfile.api @@ -0,0 +1,10 @@ +FROM python:3.9-slim-bullseye + +WORKDIR /code + +COPY ./requirements.txt /code/requirements.txt +RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt + +COPY . /code/api + +CMD ["uvicorn", "api.main:api", "--host", "0.0.0.0", "--port", "8000"] diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/__init__.py b/api/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/article.py b/api/core/article.py new file mode 100644 index 0000000000..ed287f4d21 --- /dev/null +++ b/api/core/article.py @@ -0,0 +1,25 @@ +from fastapi import HTTPException + +from ..core.exceptions import ArticleNotFoundException +from ..models.article import ArticleModel + + +async def query_articles_by_slug(slug: str, db) -> ArticleModel: + """Queries db for article instance by slug and returns article instance.""" + query = """ + SELECT article.* + FROM article + WHERE article.slug=$slug + ORDER BY article.createdAt; + """ + try: + query_result = db.query(query, slug=slug) + article_data = [r for r in query_result] + if not article_data: + raise ArticleNotFoundException() + else: + return ArticleModel(**article_data[0]) + except TimeoutError: + raise HTTPException(status_code=408, detail="Request timeout") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error: {e}") diff --git a/api/core/exceptions.py b/api/core/exceptions.py new file mode 100644 index 0000000000..c2b3605d47 --- /dev/null +++ b/api/core/exceptions.py @@ -0,0 +1,63 @@ +from fastapi import HTTPException, status + + +class EmptyEnvironmentVariableError(HTTPException): + def __init__(self, var_name: str) -> None: + super().__init__( + status_code=status.HTTP_204_NO_CONTENT, + detail=f"Environment variable '{var_name}' cannot be an empty string", + ) + + +class UserNotFoundException(HTTPException): + def __init__(self) -> None: + super().__init__(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + + +class ArticleNotFoundException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_404_NOT_FOUND, detail="Article not found" + ) + + +class CommentNotFoundException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_404_NOT_FOUND, detail="Comment not found" + ) + + +class NotArticleAuthorException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_403_FORBIDDEN, + detail="User is not author of the article", + ) + + +class NotAuthenticatedException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Token"}, + ) + + +class CredentialsException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Token"}, + ) + + +class InvalidCredentialsException(HTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Token"}, + ) diff --git a/api/core/user.py b/api/core/user.py new file mode 100644 index 0000000000..f20a06257f --- /dev/null +++ b/api/core/user.py @@ -0,0 +1,43 @@ +from typing import Union + +from fastapi import HTTPException, status + +from ..models.user import UserModel +from .exceptions import UserNotFoundException + + +async def query_users_db( + db, + id: Union[str, None] = None, + username: Union[str, None] = None, +) -> UserModel: + """Query db for user instance by ID or username and returns instance.""" + if id is not None: + query = """ + SELECT `user`.* FROM `user` WHERE `user`.id=$id; + """ + elif username is not None: + query = """ + SELECT `user`.* FROM `user` WHERE `user`.username=$username; + """ + else: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + details="No ID or username provided", + ) + try: + queryResult = db.query(query, id=id, username=username) + response_data = [r for r in queryResult] + if not response_data: + raise UserNotFoundException() + else: + return UserModel(**response_data[0]) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/database.py b/api/database.py new file mode 100644 index 0000000000..4abd4c1b83 --- /dev/null +++ b/api/database.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import logging +import os +from datetime import timedelta +from functools import cache + +from couchbase.auth import PasswordAuthenticator +from couchbase.cluster import Cluster +from couchbase.exceptions import CouchbaseException +from couchbase.options import ClusterOptions +from dotenv import load_dotenv + +from .core.exceptions import EmptyEnvironmentVariableError + + +class CouchbaseClient(object): + """Class to handle interactions with Couchbase cluster""" + + def __init__( + self, + conn_str: str, + username: str, + password: str, + bucket_name: str, + scope_name: str, + ): + self.cluster = None + self.bucket = None + self.scope = None + self.conn_str = conn_str + self.username = username + self.password = password + self.bucket_name = bucket_name + self.scope_name = scope_name + self.connect() + + def connect(self) -> None: + """Connect to the Couchbase cluster""" + if self.cluster: + return + logging.info("connecting to db") + try: + auth = PasswordAuthenticator(self.username, self.password) + cluster_opts = ClusterOptions(auth) + cluster_opts.apply_profile("wan_development") + self.cluster = Cluster(self.conn_str, cluster_opts) + self.cluster.wait_until_ready(timedelta(seconds=5)) + self.bucket = self.cluster.bucket(self.bucket_name) + except CouchbaseException as error: + self.connection_error(error) + if not self.check_scope_exists(): + logging.warning( + "Scope does not exist in the bucket. Ensure that you have the scope in your bucket." + ) + self.scope = self.bucket.scope(self.scope_name) + + def connection_error(self, error: CouchbaseException) -> None: + """Handle connection errors""" + logging.error(f"Could not connect to the cluster. Error: {error}") + logging.warning("Ensure that you have the bucket loaded in the cluster.") + + def check_scope_exists(self) -> bool: + """Check if the scope exists in the bucket""" + try: + scopes_in_bucket = [ + scope.name for scope in self.bucket.collections().get_all_scopes() + ] + return self.scope_name in scopes_in_bucket + except Exception: + logging.error( + "Error fetching scopes in cluster. \nEnsure that the bucket exists." + ) + return False + + def close(self) -> None: + """Close the connection to the Couchbase cluster""" + if self.cluster: + try: + self.cluster.close() + except Exception as e: + logging.error(f"Error closing cluster. \nError: {e}") + + def get_document(self, collection_name: str, key: str): + """Get document by key using KV operation""" + return self.scope.collection(collection_name).get(key) + + def insert_document(self, collection_name: str, key: str, doc: dict): + """Insert document using KV operation""" + return self.scope.collection(collection_name).insert(key, doc) + + def delete_document(self, collection_name: str, key: str): + """Delete document using KV operation""" + return self.scope.collection(collection_name).remove(key) + + def upsert_document(self, collection_name: str, key: str, doc: dict): + """Upsert document using KV operation""" + return self.scope.collection(collection_name).upsert(key, doc) + + def query(self, sql_query, *options, **kwargs): + """Query Couchbase using SQL++""" + return self.scope.query(sql_query, *options, **kwargs) + + +@cache +def get_db(): + """Get Couchbase client""" + load_dotenv() + env_vars = [ + "DB_CONN_STR", + "DB_USERNAME", + "DB_PASSWORD", + "DB_BUCKET_NAME", + "DB_SCOPE_NAME", + ] + try: + conn_str, username, password, bucket_name, scope_name = ( + os.getenv(var) for var in env_vars + ) + for env_var, var_name in zip( + [conn_str, username, password, bucket_name, scope_name], env_vars + ): + if not env_var: + raise EmptyEnvironmentVariableError(var_name) + return CouchbaseClient(conn_str, username, password, bucket_name, scope_name) + except Exception as e: + raise e diff --git a/api/main.py b/api/main.py new file mode 100644 index 0000000000..a04d101135 --- /dev/null +++ b/api/main.py @@ -0,0 +1,74 @@ +import logging +import os +import sys +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from starlette.middleware.cors import CORSMiddleware +from starlette.responses import RedirectResponse + +from .database import get_db +from .routers.article import router as article_router +from .routers.comment import router as comment_router +from .routers.profile import router as profile_router +from .routers.tag import router as tag_router +from .routers.user import router as user_router + + +# Initialize couchbase connection +@asynccontextmanager +async def lifespan(app: FastAPI): + """Method that gets called upon app initialization \ + to initialize couchbase connection & close the connection on exit""" + db = get_db() + yield + db.close() + + +api = FastAPI( + title="FastAPI & Capella Conduit API", + version="1.0.0", + description=""" + RealWorld Conduit API built with FastAPI and Couchbase Capella. + """, + lifespan=lifespan, +) + + +logging.basicConfig( + level=logging.INFO, + stream=sys.stdout, + format="%(asctime)s %(levelname)s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + + +allowed_origins = os.getenv("CORS_ALLOWED_ORIGINS", "*") +allowed_methods = os.getenv("CORS_ALLOWED_METHODS", "*") +allowed_headers = os.getenv("CORS_ALLOWED_HEADERS", "*") + + +api.add_middleware( + CORSMiddleware, + allow_origins=allowed_origins, + allow_credentials=True, + allow_methods= allowed_methods.split(","), + allow_headers= allowed_headers.split(","), +) + + +@api.get("/health", tags=["health"]) +async def health_check(): + return {"status": "ok"} + + +api.include_router(article_router, tags=["articles"]) +api.include_router(comment_router, tags=["comments"]) +api.include_router(profile_router, tags=["profiles"]) +api.include_router(tag_router, tags=["tags"]) +api.include_router(user_router, tags=["users"]) + + +@api.get("/", include_in_schema=False) +def redirect_to_docs(): + return RedirectResponse(url="/docs") diff --git a/api/models/__init__.py b/api/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/models/article.py b/api/models/article.py new file mode 100644 index 0000000000..935257e2a5 --- /dev/null +++ b/api/models/article.py @@ -0,0 +1,43 @@ +from datetime import datetime +from typing import List, Tuple + +from pydantic import BaseModel, Field, root_validator + +from .identifier import generate_id, generate_random_str +from .user import UserModel + + +class CommentModel(BaseModel): + id: str = Field(default_factory=generate_id) + body: str + createdAt: datetime = Field(default_factory=datetime.utcnow) + updatedAt: datetime = Field(default_factory=datetime.utcnow) + authorId: str + + +class ArticleModel(BaseModel): + slug: str + # NOTE: slug is not a primary field because it could change and this would imply to + # change all the references + title: str + description: str + body: str + tagList: List[str] = Field(default_factory=list) + createdAt: datetime = Field(default_factory=datetime.utcnow) + updatedAt: datetime = Field(default_factory=datetime.utcnow) + author: UserModel + favoritedUserIDs: Tuple[str, ...] = () + commentIDs: Tuple[str, ...] = () + + @root_validator(pre=True) + def generate_slug(cls, values): + if values.get("slug") is not None: + return values + title = values.get("title", "") + words = title.split()[:5] + words = [w.lower() for w in words] + slug = "-".join(words) + f"-{generate_random_str()}" + values["slug"] = slug + if values.get("tag_list") is not None and isinstance(values["tag_list"], list): + values["tag_list"].sort() + return values diff --git a/api/models/identifier.py b/api/models/identifier.py new file mode 100644 index 0000000000..6239137d8e --- /dev/null +++ b/api/models/identifier.py @@ -0,0 +1,12 @@ +from uuid import uuid4 + + +def generate_id(): + """Generates uuid ID.""" + return str(uuid4()) + + +def generate_random_str(): + """Generates uuid random string.""" + s = str(uuid4()) + return s.split("-")[0] diff --git a/api/models/user.py b/api/models/user.py new file mode 100644 index 0000000000..73b4127ac0 --- /dev/null +++ b/api/models/user.py @@ -0,0 +1,15 @@ +from typing import Tuple, Union + +from pydantic import BaseModel, Field + +from .identifier import generate_id + + +class UserModel(BaseModel): + id: str = Field(default_factory=generate_id) + username: str + email: str + hashed_password: str + bio: Union[str, None] = None + image: Union[str, None] = None + following_ids: Tuple[str, ...] = () diff --git a/api/requirements.txt b/api/requirements.txt new file mode 100644 index 0000000000..c45f1528b4 --- /dev/null +++ b/api/requirements.txt @@ -0,0 +1,15 @@ +bcrypt==4.2.0 +couchbase==4.3.0 +fastapi==0.110.0 +httpx==0.27.0 +passlib==1.7.4 +pydantic-settings==2.4.0 +pydantic==2.8.2 +pyjwt==2.9.0 +pytest-asyncio==0.23.8 +pytest==8.3.2 +python-jose==3.3.0 +setuptools==73.0.0 +starlette==0.36.3 +uvicorn==0.30.6 +wheel==0.43.0 diff --git a/api/routers/__init__.py b/api/routers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/routers/article.py b/api/routers/article.py new file mode 100644 index 0000000000..d4d996df1a --- /dev/null +++ b/api/routers/article.py @@ -0,0 +1,338 @@ +from datetime import datetime +from typing import Union + +from couchbase.exceptions import DocumentExistsException +from fastapi import APIRouter, Body, Depends, HTTPException, status +from fastapi.encoders import jsonable_encoder + +from ..core.article import query_articles_by_slug +from ..core.exceptions import NotArticleAuthorException +from ..database import get_db +from ..models.article import ArticleModel, CommentModel +from ..models.user import UserModel +from ..schemas.article import ( + ArticleResponseSchema, + CreateArticleSchema, + MultipleArticlesResponseSchema, + UpdateArticleSchema, +) +from ..utils.security import ( + get_current_user_instance, + get_current_user_optional_instance, + get_user_instance, +) + +router = APIRouter( + prefix="/api", + tags=["articles"], + responses={404: {"description": "Not found"}}, +) +ARTICLE_COLLECTION = "article" +COMMENT_COLLECTION = "comment" + + +async def build_query(filter_type: str, limit: int, offset: int) -> str: + """Builds SQL++ queries based on filter type and returns filter.""" + base_query = f"""SELECT article.* + FROM article + ORDER BY article.createdAt + LIMIT {limit} + OFFSET {offset}; + """ + if filter_type == "author": + return f"""SELECT article.* + FROM article + WHERE article.author.username=$author + ORDER BY article.createdAt + LIMIT {limit} + OFFSET {offset}; + """ + elif filter_type == "favorited": + return f"""SELECT article.* + FROM article + WHERE $favoritedId IN article.favoritedUserIDs + ORDER BY article.createdAt + LIMIT {limit} + OFFSET {offset}; + """ + elif filter_type == "tag": + return f"""SELECT article.* + FROM article + WHERE $tag IN article.tagList + ORDER BY article.createdAt + LIMIT {limit} + OFFSET {offset}; + """ + return base_query + + +async def get_article_filter_type( + author: Union[str, None] = None, + favorited: Union[str, None] = None, + tag: Union[str, None] = None, +) -> str: + """Queries db for article instances by author, favorited or tag with a limit and offset and returns multiple \ + articles schema.""" + if author: + return "author" + elif favorited: + return "favorited" + elif tag: + return "tag" + else: + return "all" + + +async def get_favorited_id(db, favorited: Union[str, None] = None): + favorited_user = await get_user_instance(db, username=favorited) + return favorited_user.id if favorited_user else None + + +async def get_comments_by_ids(db, comment_ids): + if not comment_ids: + return [] + query = f""" + SELECT comment.* + FROM {COMMENT_COLLECTION} AS comment + WHERE comment.id IN $comment_ids; + """ + try: + query_result = db.query(query, comment_ids=comment_ids) + return [CommentModel(**r) for r in query_result] + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to retrieve comments: {e}", + ) + + +@router.get("/articles", response_model=MultipleArticlesResponseSchema) +async def get_articles( + author: Union[str, None] = None, + favorited: Union[str, None] = None, + tag: Union[str, None] = None, + limit: int = 20, + offset: int = 0, + user_instance: Union[UserModel, None] = Depends(get_current_user_optional_instance), + db=Depends(get_db), +): + """Queries db for article instances by author, favorited or tag with a limit and offset and returns multiple \ + articles schema.""" + favorited_id = await get_favorited_id(db, favorited) + filter_type = await get_article_filter_type(author, favorited, tag) + query = await build_query(filter_type, limit, offset) + if query is None: + return MultipleArticlesResponseSchema(articles=[], articles_count=0) + try: + queryResult = db.query( + query, + author=author, + favoritedId=favorited_id, + tag=tag, + limit=limit, + offset=offset, + ) + article_list = [ArticleModel(**r) for r in queryResult] + return MultipleArticlesResponseSchema.from_article_instances( + article_list, len(article_list), user_instance + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.get("/articles/feed", response_model=MultipleArticlesResponseSchema) +async def get_feed_articles( + limit: int = 20, + offset: int = 0, + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Query db for article instances by author (that current user follows) and returns multiple articles schema.""" + query = """ + SELECT article.* + FROM article + WHERE article.author.id IN $users_followed + ORDER BY article.createdAt + LIMIT $limit + OFFSET $offset; + """ + try: + queryResult = db.query( + query, + users_followed=user_instance.following_ids, + limit=limit, + offset=offset, + ) + article_list = [ArticleModel(**article) for article in queryResult] + return MultipleArticlesResponseSchema( + articles=article_list, articlesCount=len(article_list) + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.post("/articles", response_model=ArticleResponseSchema) +async def create_article( + article: CreateArticleSchema = Body(..., embed=True), + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Create article instance from create schema, inserts instance to db then returns article schema.""" + response_article = ArticleModel(author=user_instance, **article.model_dump()) + response_article.tagList.sort() + try: + db.insert_document( + ARTICLE_COLLECTION, + response_article.slug, + jsonable_encoder(response_article), + ) + return ArticleResponseSchema.from_article_instance( + response_article, user_instance + ) + except DocumentExistsException: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail="Article already exists" + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.get("/articles/{slug}", response_model=ArticleResponseSchema) +async def get_single_article( + slug: str, + user_instance: Union[UserModel, None] = Depends(get_current_user_optional_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug and returns article schema.""" + article_model = await query_articles_by_slug(slug, db) + return ArticleResponseSchema.from_article_instance(article_model, user_instance) + + +@router.put("/articles/{slug}", response_model=ArticleResponseSchema) +async def update_article( + slug: str, + article: UpdateArticleSchema = Body(..., embed=True), + current_user: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug, updates instance with update schema, upserts article to db and \ + returns article schema.""" + article_instance = await query_articles_by_slug(slug, db) + if current_user != article_instance.author: + raise NotArticleAuthorException() + patch_dict = article.model_dump(exclude_none=True) + for name, value in patch_dict.items(): + setattr(article_instance, name, value) + article_instance.updatedAt = datetime.utcnow() + try: + db.upsert_document( + ARTICLE_COLLECTION, + article_instance.slug, + jsonable_encoder((article_instance)), + ) + return ArticleResponseSchema.from_article_instance( + article_instance, current_user + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.post("/articles/{slug}/favorite", response_model=ArticleResponseSchema) +async def favorite_article( + slug: str, + current_user: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug, adds user ID to favoritedUserIDs, upserts instance to db and returns \ + article schema.""" + article = await query_articles_by_slug(slug, db) + favorited_set = {*article.favoritedUserIDs, current_user.id} + article.favoritedUserIDs = tuple(favorited_set) + try: + db.upsert_document(ARTICLE_COLLECTION, article.slug, jsonable_encoder(article)) + return ArticleResponseSchema.from_article_instance(article, current_user) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.delete("/articles/{slug}/favorite", response_model=ArticleResponseSchema) +async def unfavorite_article( + slug: str, + current_user: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug, removes user ID from favoritedUserIDs, upserts instance to db and \ + returns article schema.""" + article = await query_articles_by_slug(slug, db) + favorited_set = {*article.favoritedUserIDs} - {current_user.id} + article.favoritedUserIDs = tuple(favorited_set) + try: + db.upsert_document(ARTICLE_COLLECTION, article.slug, jsonable_encoder(article)) + return ArticleResponseSchema.from_article_instance(article, current_user) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.delete("/articles/{slug}") +async def delete_article( + slug: str, + current_user: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug and deletes instance from db.""" + article = await query_articles_by_slug(slug, db) + if current_user.id != article.author.id: + raise NotArticleAuthorException() + try: + db.delete_document(ARTICLE_COLLECTION, article.slug) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/routers/comment.py b/api/routers/comment.py new file mode 100644 index 0000000000..682bf07db2 --- /dev/null +++ b/api/routers/comment.py @@ -0,0 +1,127 @@ +from fastapi import APIRouter, Body, Depends, HTTPException, status +from fastapi.encoders import jsonable_encoder + +from ..core.article import query_articles_by_slug +from ..core.exceptions import CommentNotFoundException +from ..core.user import query_users_db +from ..database import get_db +from ..models.article import CommentModel +from ..models.user import UserModel +from ..routers.article import ARTICLE_COLLECTION, COMMENT_COLLECTION +from ..schemas.comment import ( + CommentSchema, + CreateCommentSchema, + MultipleCommentsResponseSchema, + SingleCommentResponseSchema, +) +from ..schemas.user import ProfileSchema +from ..utils.security import get_current_user_instance + +router = APIRouter( + prefix="/api", + tags=["comments"], + responses={404: {"description": "Not found"}}, +) + + + +@router.post("/articles/{slug}/comments", response_model=SingleCommentResponseSchema) +async def add_article_comment( + slug: str, + comment: CreateCommentSchema = Body(..., embed=True), + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug, creates comment instance from create schema, adds comment instance \ + to article instance, upserts article instance and returns comment schema.""" + article = await query_articles_by_slug(slug, db) + comment_instance = CommentModel(authorId=user_instance.id, **comment.model_dump()) + try: + db.insert_document( + COMMENT_COLLECTION, + comment_instance.id, + jsonable_encoder(comment_instance) + ) + article.commentIDs = article.commentIDs + (comment_instance.id,) + db.upsert_document( + ARTICLE_COLLECTION, + article.slug, + jsonable_encoder(article) + ) + return SingleCommentResponseSchema( + comment=CommentSchema( + author=ProfileSchema(**user_instance.model_dump()), + **comment_instance.model_dump(), + ) + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.get("/articles/{slug}/comments", response_model=MultipleCommentsResponseSchema) +async def get_article_comments(slug: str, db=Depends(get_db)): + """Queries db for article instance by slug, queries db for user instances by id of article comments, creates \ + comment schemas and returns multiple comments schema.""" + article = await query_articles_by_slug(slug, db) + comment_ids = article.commentIDs + if not comment_ids: + return MultipleCommentsResponseSchema(comments=[]) + + query = """ + SELECT comment.* + FROM comment + WHERE comment.id IN $comment_ids; + """ + try: + queryResult = db.query(query, comment_ids=comment_ids) + comments = [CommentModel(**r) for r in queryResult] + data = [ + (comment, await query_users_db(db, id=comment.authorId)) for comment in comments + ] + return MultipleCommentsResponseSchema.from_comments_and_authors(data) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.delete("/articles/{slug}/comments/{id}") +async def delete_article_comment( + slug: str, + id: str, + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for article instance by slug, identifies comment by comment ID and user ID, removes comment from \ + article instance and upserts article instance to db.""" + try: + article = await query_articles_by_slug(slug, db) + if id in article.commentIDs: + article.commentIDs = [cid for cid in article.commentIDs if cid != id] + db.delete_document(COMMENT_COLLECTION, id) + db.upsert_document( + ARTICLE_COLLECTION, article.slug, jsonable_encoder(article) + ) + else: + raise CommentNotFoundException() + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/routers/profile.py b/api/routers/profile.py new file mode 100644 index 0000000000..1f993f53f3 --- /dev/null +++ b/api/routers/profile.py @@ -0,0 +1,91 @@ +from typing import Union + +from fastapi import APIRouter, Depends, HTTPException, status + +from ..core.user import query_users_db +from ..database import get_db +from ..models.user import UserModel +from ..schemas.user import ProfileResponseSchema, ProfileSchema +from ..utils.security import ( + get_current_user_instance, + get_current_user_optional_instance, +) +from .user import USER_COLLECTION + +router = APIRouter( + prefix="/api", + tags=["profiles"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/profiles/{username}", response_model=ProfileResponseSchema) +async def get_profile( + username: str, + logged_user: Union[UserModel, None] = Depends(get_current_user_optional_instance), + db=Depends(get_db), +): + """Queries db for user instance by username and returns profile schema.""" + user = await query_users_db(db, username=username) + following = logged_user is not None and user.id in logged_user.following_ids + return ProfileResponseSchema( + profile=ProfileSchema(following=following, **user.model_dump()) + ) + + +@router.post("/profiles/{username}/follow", response_model=ProfileResponseSchema) +async def follow_user( + username: str, + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for user instance by username, adds current user ID to instance's following_ids, upserts instance \ + to db and returns profile schema.""" + user_to_follow = await query_users_db(db, username=username) + following_set = set(user_instance.following_ids) | set((user_to_follow.id,)) + user_instance.following_ids = tuple(following_set) + try: + db.upsert_document( + USER_COLLECTION, user_instance.id, user_instance.model_dump() + ) + return ProfileResponseSchema( + profile=ProfileSchema(following=True, **user_to_follow.model_dump()) + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.delete("/profiles/{username}/follow", response_model=ProfileResponseSchema) +async def unfollow_user( + username: str, + user_instance: UserModel = Depends(get_current_user_instance), + db=Depends(get_db), +): + """Queries db for user instance by username, removes current user ID from instance's following_ids, upserts \ + instance to db and returns profile schema.""" + user_to_unfollow = await query_users_db(db, username=username) + following_set = set(user_instance.following_ids) - set((user_to_unfollow.id,)) + user_instance.following_ids = tuple(following_set) + try: + db.upsert_document( + USER_COLLECTION, user_instance.id, user_instance.model_dump() + ) + return ProfileResponseSchema( + profile=ProfileSchema(following=False, **user_to_unfollow.model_dump()) + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/routers/tag.py b/api/routers/tag.py new file mode 100644 index 0000000000..f522c6e5f7 --- /dev/null +++ b/api/routers/tag.py @@ -0,0 +1,37 @@ +from fastapi import APIRouter, Depends, HTTPException, status + +from ..database import get_db +from ..schemas.tag import TagsResponseSchema + +router = APIRouter( + prefix="/api", + tags=["tags"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/tags", response_model=TagsResponseSchema) +async def get_tags(db=Depends(get_db)): + """Queries db for tags and returns tags schema.""" + query = """ + SELECT article.tagList + FROM article as article; + """ + try: + queryResult = db.query(query) + result_list = [r for r in queryResult] + if len(result_list) > 0: + return TagsResponseSchema( + tags=[tags for tagList in result_list for tags in tagList["tagList"]] + ) + else: + return TagsResponseSchema(tags=[]) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/routers/user.py b/api/routers/user.py new file mode 100644 index 0000000000..dcd33673cb --- /dev/null +++ b/api/routers/user.py @@ -0,0 +1,105 @@ +from couchbase.exceptions import DocumentExistsException +from fastapi import APIRouter, Body, Depends, HTTPException, status + +from ..core.exceptions import InvalidCredentialsException +from ..database import get_db +from ..models.user import UserModel +from ..schemas.user import ( + AuthenticationSchema, + RegistrationSchema, + UpdateUserSchema, + UserResponseSchema, + UserSchema, +) +from ..utils.security import ( + OAUTH2_SCHEME, + authenticate_user, + create_access_token, + get_current_user, + get_current_user_instance, + get_password_hash, +) + +router = APIRouter( + prefix="/api", + tags=["users"], + responses={404: {"description": "Not found"}}, +) +USER_COLLECTION = "user" + + +@router.post("/users", response_model=UserResponseSchema) +async def register( + user: RegistrationSchema = Body(..., embed=True), db=Depends(get_db) +): + """Creates a user instance with registration data, then inserts instance to db and returns user schema.""" + user_model = UserModel( + **user.model_dump(), hashed_password=get_password_hash(user.password) + ) + try: + db.insert_document(USER_COLLECTION, user_model.id, user_model.model_dump()) + token = await create_access_token(user_model) + return UserResponseSchema( + user=UserSchema(token=token, **user_model.model_dump()) + ) + except DocumentExistsException: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail="User already exists" + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) + + +@router.post("/users/login", response_model=UserResponseSchema) +async def login_user( + user: AuthenticationSchema = Body(..., embed=True), db=Depends(get_db) +): + """Authenticates user with login data, creates a token and returns user schema.""" + user = await authenticate_user(user.email, user.password.get_secret_value(), db) + if user is None: + raise InvalidCredentialsException() + token = await create_access_token(user) + return UserResponseSchema(user=UserSchema(token=token, **user.model_dump())) + + +@router.get("/user", response_model=UserResponseSchema) +async def current_user(current_user: UserSchema = Depends(get_current_user)): + """Queries db for current user instance by token and returns user schema.""" + return UserResponseSchema(user=current_user) + + +@router.put("/user", response_model=UserResponseSchema) +async def update_user( + user: UpdateUserSchema = Body(..., embed=True), + user_instance: UserModel = Depends(get_current_user_instance), + token: str = Depends(OAUTH2_SCHEME), + db=Depends(get_db), +): + """Queries db for current user instance by token, updates it with update schema, upserts instance to db and \ + returns user schema.""" + patch_dict = user.model_dump(exclude_unset=True) + for name, value in patch_dict.items(): + setattr(user_instance, name, value) + try: + db.upsert_document( + USER_COLLECTION, user_instance.id, user_instance.model_dump() + ) + return UserResponseSchema( + user=UserSchema(token=token, **user_instance.model_dump()) + ) + except TimeoutError: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Request timeout" + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected error: {e}", + ) diff --git a/api/schemas/__init__.py b/api/schemas/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/schemas/article.py b/api/schemas/article.py new file mode 100644 index 0000000000..2810bea090 --- /dev/null +++ b/api/schemas/article.py @@ -0,0 +1,75 @@ +from datetime import datetime +from typing import List, Union + +from ..models.article import ArticleModel +from ..models.user import UserModel +from .base import BaseSchema +from .user import ProfileSchema + + +class CreateArticleSchema(BaseSchema): + title: str + description: str + body: str + tagList: Union[List[str], None] = None + + +class UpdateArticleSchema(BaseSchema): + title: Union[str, None] = None + description: Union[str, None] = None + body: Union[str, None] = None + + +class ArticleSchema(BaseSchema): + slug: str + title: str + description: str + body: str + tagList: List[str] + createdAt: datetime + updatedAt: datetime + favorited: bool = False + favoritesCount: int = 0 + author: ProfileSchema + + @classmethod + def from_article_instance( + cls, article: ArticleModel, user: Union[UserModel, None] = None + ) -> "ArticleSchema": + if user is None: + favorited = False + else: + favorited = user.id in article.favoritedUserIDs + + return cls( + favorited=favorited, + favoritesCount=len(article.favoritedUserIDs), + **article.model_dump() + ) + + +class ArticleResponseSchema(BaseSchema): + article: ArticleSchema + + @classmethod + def from_article_instance( + cls, article: ArticleModel, user: Union[UserModel, None] = None + ) -> "ArticleResponseSchema": + return cls( + article=ArticleSchema.from_article_instance(article=article, user=user) + ) + + +class MultipleArticlesResponseSchema(BaseSchema): + articles: List[ArticleSchema] + articlesCount: int = 0 + + @classmethod + def from_article_instances( + cls, + articles: List[ArticleModel], + total_count: int, + user: Union[UserModel, None] = None, + ) -> "MultipleArticlesResponseSchema": + articles = [ArticleSchema.from_article_instance(a, user) for a in articles] + return cls(articles=articles, articlesCount=total_count) diff --git a/api/schemas/base.py b/api/schemas/base.py new file mode 100644 index 0000000000..62fa7a3f46 --- /dev/null +++ b/api/schemas/base.py @@ -0,0 +1,13 @@ +from datetime import datetime + +from pydantic.main import BaseModel + + +class BaseSchema(BaseModel): + model_config = { + "populate_by_name": True, + "json_encoders": { + datetime: lambda d: d.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + }, + "from_attributes": True, + } diff --git a/api/schemas/comment.py b/api/schemas/comment.py new file mode 100644 index 0000000000..32124e48e6 --- /dev/null +++ b/api/schemas/comment.py @@ -0,0 +1,35 @@ +from datetime import datetime +from typing import List, Tuple + +from ..models.article import CommentModel +from ..models.user import UserModel +from ..schemas.user import ProfileSchema +from .base import BaseSchema + + +class CommentSchema(BaseSchema): + id: str + createdAt: datetime + updatedAt: datetime + body: str + author: ProfileSchema + + +class SingleCommentResponseSchema(BaseSchema): + comment: CommentSchema + + +class MultipleCommentsResponseSchema(BaseSchema): + comments: List[CommentSchema] + + @classmethod + def from_comments_and_authors(cls, data: List[Tuple[CommentModel, UserModel]]): + return cls( + comments=[ + {**comment.model_dump(), "author": author} for comment, author in data + ] + ) + + +class CreateCommentSchema(BaseSchema): + body: str diff --git a/api/schemas/tag.py b/api/schemas/tag.py new file mode 100644 index 0000000000..ef736783bd --- /dev/null +++ b/api/schemas/tag.py @@ -0,0 +1,7 @@ +from typing import List + +from .base import BaseSchema + + +class TagsResponseSchema(BaseSchema): + tags: List[str] diff --git a/api/schemas/user.py b/api/schemas/user.py new file mode 100644 index 0000000000..4a708dcb24 --- /dev/null +++ b/api/schemas/user.py @@ -0,0 +1,47 @@ +from typing import Union + +from pydantic import SecretStr + +from .base import BaseSchema + + +class AuthenticationSchema(BaseSchema): + email: str + password: SecretStr + + +class RegistrationSchema(BaseSchema): + username: str + email: str + password: str + + +class UserSchema(BaseSchema): + email: str + token: str + username: str + bio: Union[str, None] = None + image: Union[str, None] = None + + +class UserResponseSchema(BaseSchema): + user: UserSchema + + +class UpdateUserSchema(BaseSchema): + email: Union[str, None] = None + token: Union[str, None] = None + username: Union[str, None] = None + bio: Union[str, None] = None + image: Union[str, None] = None + + +class ProfileSchema(BaseSchema): + username: str + bio: Union[str, None] = None + image: Union[str, None] = None + following: bool = False + + +class ProfileResponseSchema(BaseSchema): + profile: ProfileSchema diff --git a/api/settings.py b/api/settings.py new file mode 100644 index 0000000000..e356359184 --- /dev/null +++ b/api/settings.py @@ -0,0 +1,17 @@ +import os + +from dotenv import load_dotenv +from pydantic import Field +from pydantic.types import SecretStr +from pydantic_settings import BaseSettings + + +class _Settings(BaseSettings): + load_dotenv() + SECRET_KEY: SecretStr = Field(os.getenv("JWT_SECRET")) + ALGORITHM: str = "HS256" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 + + +# Make this a singleton to avoid reloading it from the env everytime +SETTINGS = _Settings() diff --git a/api/test/__init__.py b/api/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/test/test_register.py b/api/test/test_register.py new file mode 100644 index 0000000000..fe64a86db6 --- /dev/null +++ b/api/test/test_register.py @@ -0,0 +1,39 @@ +import pytest +from unittest.mock import AsyncMock, patch +from fastapi.testclient import TestClient +from api.main import api + +client = TestClient(api) + +mock_RegistrationSchema = { + "user": { + "username": "testuser", + "email": "testuser@example.com", + "password": "securepassword", + } +} + +@pytest.mark.asyncio +async def test_register(): + # Mocking the database insertion and token creation + mock_insert_document = AsyncMock() + mock_create_access_token = AsyncMock(return_value="mock_token") + + # Patching the database insert and token creation functions + with patch('api.database.get_db') as mock_db, \ + patch('api.routers.user.create_access_token', mock_create_access_token): + mock_db.insert_document = mock_insert_document + response = client.post("/api/users", json=mock_RegistrationSchema) + + mock_UserResponseSchema = { + "user": { + "username": "testuser", + "email": "testuser@example.com", + "token": "mock_token", + "bio": None, + "image": None + } + } + + assert response.status_code == 200 + assert response.json() == mock_UserResponseSchema diff --git a/api/utils/__init__.py b/api/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/utils/security.py b/api/utils/security.py new file mode 100644 index 0000000000..2b4e0dc4ef --- /dev/null +++ b/api/utils/security.py @@ -0,0 +1,157 @@ +import json +from datetime import datetime, timedelta +from typing import Union, cast + +from fastapi import Depends, FastAPI, HTTPException +from fastapi.openapi.models import OAuthFlows +from fastapi.security import OAuth2 +from fastapi.security.utils import get_authorization_scheme_param +from jose import JWTError, jwt +from jose.exceptions import ExpiredSignatureError +from passlib.context import CryptContext +from pydantic import BaseModel, ValidationError +from starlette.requests import Request + +from ..core.exceptions import CredentialsException, NotAuthenticatedException +from ..database import get_db +from ..models.user import UserModel +from ..schemas.user import UserSchema +from ..settings import SETTINGS + + +class TokenModel(BaseModel): + access_token: str + token_type: str + + +class TokenContentModel(BaseModel): + username: str + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +class OAuth2PasswordToken(OAuth2): + def __init__( + self, + tokenUrl: str, + scheme_name: Union[str, None] = None, + scopes: Union[dict, None] = None, + ): + if not scopes: + scopes = {} + flows = OAuthFlows(password={"tokenUrl": tokenUrl, "scopes": scopes}) + super().__init__(flows=flows, scheme_name=scheme_name, auto_error=False) + + async def __call__(self, request: Request) -> Union[str, None]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "token": + return None + return cast(str, param) + + +OAUTH2_SCHEME = OAuth2PasswordToken(tokenUrl="/users") + + +api = FastAPI() + + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +async def get_user_instance( + db, + email: Union[str, None] = None, + username: Union[str, None] = None, +): + """Queries db for user instance by email or username and returns user instance or none.""" + if username is not None: + query = """ + SELECT `user`.* FROM `user` WHERE `user`.username=$username; + """ + elif email is not None: + query = """ + SELECT `user`.* FROM `user` WHERE `user`.email=$email; + """ + else: + return None + queryResult = db.query(query, email=email, username=username) + user_data = [r for r in queryResult] + if not user_data: + raise NotAuthenticatedException() + else: + return UserModel(**user_data[0]) + + +async def authenticate_user(email: str, password: str, db): + """Queries db for user instance by email, compares password to instance's hashed password and returns user \ + instance if verified.""" + user = await get_user_instance(db, email=email) + if not user or not verify_password(password, user.hashed_password): + return False + return user + + +async def create_access_token(user: UserModel) -> str: + """Create an access token based on the user's username.""" + token_content = TokenContentModel(username=user.username) + expire = datetime.utcnow() + timedelta(minutes=SETTINGS.ACCESS_TOKEN_EXPIRE_MINUTES) + to_encode = {"exp": expire, "sub": token_content.model_dump_json()} + encoded_jwt = jwt.encode( + to_encode, SETTINGS.SECRET_KEY.get_secret_value(), algorithm=SETTINGS.ALGORITHM + ) + return str(encoded_jwt) + + +async def get_current_user_instance( + db=Depends(get_db), + token: Union[str, None] = Depends(OAUTH2_SCHEME), +) -> UserModel: + """Decode JWT, queries db for user instance by username and returns user instance.""" + if token is None: + raise NotAuthenticatedException() + try: + payload = jwt.decode( + token, + SETTINGS.SECRET_KEY.get_secret_value(), + algorithms=[SETTINGS.ALGORITHM], + ) + except ExpiredSignatureError: + raise CredentialsException() + except JWTError: + raise CredentialsException() + try: + payload_model = json.loads(payload.get("sub")) + token_content = TokenContentModel(**payload_model) + except ValidationError: + raise CredentialsException() + user = await get_user_instance(db, username=token_content.username) + if user is None: + raise CredentialsException() + return user + + +async def get_current_user_optional_instance( + db=Depends(get_db), + token: str = Depends(OAUTH2_SCHEME), +) -> Union[UserModel, None]: + """Queries db for user instance by token and return user instance or none.""" + try: + user = await get_current_user_instance(db, token) + return user + except HTTPException: + return None + + +async def get_current_user( + user_instance: UserModel = Depends(get_current_user_instance), + token: str = Depends(OAUTH2_SCHEME), +) -> UserSchema: + """Queries db for user instance by token and returns user schema.""" + return UserSchema(token=token, **user_instance.model_dump()) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000..e47c361348 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,38 @@ +version: '3' +services: + backend: + build: + context: ./api + dockerfile: Dockerfile + ports: + - "8000:8000" + networks: + - real-world-network + + frontend: + build: + context: ./angular-conduit-signals + dockerfile: Dockerfile.frontend + ports: + - "80:80" + networks: + - real-world-network + + cypress: + build: + context: ./angular-conduit-signals/cypress + dockerfile: Dockerfile.cypress + depends_on: + - frontend + - backend + environment: + - CYPRESS_BASE_URL=http://frontend + volumes: + - ./angular-conduit-signals:/e2e + networks: + - real-world-network + + +networks: + real-world-network: + driver: bridge diff --git a/infrastructure/data.tf b/infrastructure/data.tf new file mode 100644 index 0000000000..7b9ae9c4df --- /dev/null +++ b/infrastructure/data.tf @@ -0,0 +1,46 @@ +locals { + account-id = data.aws_caller_identity.current.account_id +} + +# NOTE: this isn't really a public ECR repo; there's a condition +# only allows reads from an org, so this is a false positive +# in tfsec +# tfsec:ignore:aws-ecr-no-public-access +data "aws_iam_policy_document" "ecr_repo_policy" { + statement { + sid = "All Accounts in the Org can pull" + effect = "Allow" + principals { + type = "AWS" + identifiers = ["*"] + } + actions = [ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:ListImages" + ] + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = ["${var.aws_account_id}"] + } + } + statement { + sid = "Allow push only from github actions" + effect = "Allow" + principals { + type = "AWS" + identifiers = ["${var.iam_role}"] + } + actions = ["ecr:BatchCheckLayerAvailability", + "ecr:CompleteLayerUpload", + "ecr:InitiateLayerUpload", + "ecr:PutImage", + "ecr:UploadLayerPart"] + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = ["${var.aws_account_id}"] + } + } +} \ No newline at end of file diff --git a/infrastructure/main.tf b/infrastructure/main.tf new file mode 100644 index 0000000000..967a4cadb4 --- /dev/null +++ b/infrastructure/main.tf @@ -0,0 +1,255 @@ + +data "aws_caller_identity" "current" {} + +resource "aws_vpc" "main" { + cidr_block = "10.0.0.0/16" + tags = { + Name = "${var.project_prefix}-vpc" + } +} + +resource "aws_subnet" "subnet_a" { + vpc_id = aws_vpc.main.id + cidr_block = "10.0.1.0/24" + availability_zone = "ap-southeast-2a" + tags = { + Name = "${var.project_prefix}-subnet-a" + } +} + +resource "aws_subnet" "subnet_b" { + vpc_id = aws_vpc.main.id + cidr_block = "10.0.2.0/24" + availability_zone = "ap-southeast-2b" + tags = { + Name = "${var.project_prefix}-subnet-b" + } +} + +resource "aws_security_group" "ecs_sg" { + vpc_id = aws_vpc.main.id + + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + ingress { + from_port = 8000 + to_port = 8000 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "${var.project_prefix}-ecs-sg" + } +} + +resource "aws_ecs_service" "cypress" { + name = "${var.project_prefix}-cypress-service" + cluster = aws_ecs_cluster.main.id + task_definition = aws_ecs_task_definition.cypress.arn + desired_count = 1 + launch_type = "FARGATE" + + network_configuration { + subnets = [aws_subnet.subnet_a.id, aws_subnet.subnet_b.id] + security_groups = [aws_security_group.ecs_sg.id] + } +} + +resource "aws_ecs_service" "backend" { + name = "${var.project_prefix}-backend-service" + cluster = aws_ecs_cluster.main.id + task_definition = aws_ecs_task_definition.backend.arn + desired_count = 1 + launch_type = "FARGATE" + + network_configuration { + subnets = [aws_subnet.subnet_a.id, aws_subnet.subnet_b.id] + security_groups = [aws_security_group.ecs_sg.id] + } +} + +resource "aws_ecs_service" "frontend" { + name = "${var.project_prefix}-frontend-service" + cluster = aws_ecs_cluster.main.id + task_definition = aws_ecs_task_definition.frontend.arn + desired_count = 1 + launch_type = "FARGATE" + + network_configuration { + subnets = [aws_subnet.subnet_a.id, aws_subnet.subnet_b.id] + security_groups = [aws_security_group.ecs_sg.id] + } +} + +resource "aws_ecs_task_definition" "backend" { + family = "${var.project_prefix}-backend-task" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + execution_role_arn = aws_iam_role.ecs_task_execution_role.arn + cpu = var.task_cpu + memory = var.task_memory + + container_definitions = jsonencode([ + { + name = "backend-container" + image = "${aws_ecr_repository.backend.repository_url}:${var.project_prefix}-backend-image" + essential = true + portMappings = [ + { + containerPort = 8000 + hostPort = 8000 + } + ] + } + ]) +} + +resource "aws_ecs_task_definition" "frontend" { + family = "${var.project_prefix}-frontend-task" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + execution_role_arn = aws_iam_role.ecs_task_execution_role.arn + cpu = var.task_cpu + memory = var.task_memory + + container_definitions = jsonencode([ + { + name = "frontend-container" + image = "${aws_ecr_repository.frontend.repository_url}:${var.project_prefix}-frontend-image" + essential = true + portMappings = [ + { + containerPort = 80 + hostPort = 80 + } + ] + } + ]) +} + +resource "aws_ecs_task_definition" "cypress" { + family = "${var.project_prefix}-cypress-task" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + execution_role_arn = aws_iam_role.ecs_task_execution_role.arn + cpu = var.task_cpu + memory = var.task_memory + + container_definitions = jsonencode([ + { + name = "cypress-container" + image = "${aws_ecr_repository.cypress.repository_url}:${var.project_prefix}-cypress-image" + essential = true + volumes = [ + { + host = { + sourcePath = "/frontend" + } + containerPath = "/e2e" + } + ] + } + ]) +} + +resource "aws_iam_role" "ecs_task_execution_role" { + name = "${var.project_prefix}-ecsTaskExecutionRole" + + assume_role_policy = jsonencode({ + Version = "2012-10-17", + Statement = [ + { + Effect = "Allow", + Principal = { + Service = "ecs-tasks.amazonaws.com" + }, + Action = "sts:AssumeRole" + } + ] + }) + + managed_policy_arns = [ + "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" + ] +} + +resource "aws_ecs_cluster" "main" { + name = "${var.project_prefix}-ecs-cluster" +} + +# tfsec:ignore:aws-ecr-repository-customer-key +resource "aws_ecr_repository" "backend" { + name = "${var.project_prefix}-backend-repo" + image_tag_mutability = "MUTABLE" + image_scanning_configuration { + scan_on_push = true + } + + encryption_configuration { + encryption_type = "KMS" + } +} + +resource "aws_ecr_repository" "frontend" { + name = "${var.project_prefix}-frontend-repo" + image_tag_mutability = "MUTABLE" + image_scanning_configuration { + scan_on_push = true + } + + encryption_configuration { + encryption_type = "KMS" + } +} + +resource "aws_ecr_repository" "cypress" { + name = "${var.project_prefix}-cypress-repo" + image_tag_mutability = "MUTABLE" + image_scanning_configuration { + scan_on_push = true + } + + encryption_configuration { + encryption_type = "KMS" + } +} + +resource "aws_ecr_lifecycle_policy" "backend" { + repository = aws_ecr_repository.backend.name + policy = templatefile(var.lifecycle_policy, {}) +} + +resource "aws_ecr_lifecycle_policy" "frontend" { + repository = aws_ecr_repository.frontend.name + policy = templatefile(var.lifecycle_policy, {}) +} + +resource "aws_ecr_lifecycle_policy" "cypress" { + repository = aws_ecr_repository.cypress.name + policy = templatefile(var.lifecycle_policy, {}) +} + +resource "aws_ecr_registry_scanning_configuration" "scan_configuration" { + scan_type = "ENHANCED" + + rule { + scan_frequency = "CONTINUOUS_SCAN" + repository_filter { + filter = "*" + filter_type = "WILDCARD" + } + } +} diff --git a/infrastructure/outputs.tf b/infrastructure/outputs.tf new file mode 100644 index 0000000000..0798b77d62 --- /dev/null +++ b/infrastructure/outputs.tf @@ -0,0 +1,31 @@ +output "be_ecr_repo_url" { + value = aws_ecr_repository.backend.repository_url +} + +output "be_ecr_repo_arn" { + value = aws_ecr_repository.backend.arn +} + +output "fe_ecr_repo_url" { + value = aws_ecr_repository.frontend.repository_url +} + +output "fe_ecr_repo_arn" { + value = aws_ecr_repository.frontend.arn +} + +output "cy_ecr_repo_url" { + value = aws_ecr_repository.cypress.repository_url +} + +output "cy_ecr_repo_arn" { + value = aws_ecr_repository.cypress.arn +} + +output "subnet_ids" { + value = [aws_subnet.subnet_a.id, aws_subnet.subnet_b.id] +} + +output "security_group_id" { + value = aws_security_group.ecs_sg.id +} \ No newline at end of file diff --git a/infrastructure/policy.json b/infrastructure/policy.json new file mode 100644 index 0000000000..506269d062 --- /dev/null +++ b/infrastructure/policy.json @@ -0,0 +1,17 @@ +{ + "rules": [ + { + "rulePriority": 1, + "description": "Keep last 1 images", + "selection": { + "tagStatus": "tagged", + "tagPrefixList": [ "snapshot" ], + "countType": "imageCountMoreThan", + "countNumber": 1 + }, + "action": { + "type": "expire" + } + } + ] + } \ No newline at end of file diff --git a/infrastructure/provider.tf b/infrastructure/provider.tf new file mode 100644 index 0000000000..e0e0e97fb4 --- /dev/null +++ b/infrastructure/provider.tf @@ -0,0 +1,14 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.49.0" + } + } + + backend "s3" {} +} + +provider "aws" { + region = "ap-southeast-2" +} \ No newline at end of file diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf new file mode 100644 index 0000000000..36b91b03ae --- /dev/null +++ b/infrastructure/variables.tf @@ -0,0 +1,31 @@ +variable "project_prefix" { + description = "The prefix of the project" + type = string +} + +variable "task_cpu" { + description = "The CPU units to allocate for the tasks" + type = string + default = "256" +} + +variable "task_memory" { + description = "The memory in MiB to allocate for the tasks" + type = string + default = "512" +} + +variable "iam_role" { + description = "Self-hosted runner EC2 instance role" + type = string +} + +variable "lifecycle_policy" { + description = "the lifecycle policy to be applied to the ECR repos" + type = string +} + +variable "aws_account_id" { + description = "Target AWS Account ID" + type = string +} diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000000..2f4c80e307 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +asyncio_mode = auto diff --git a/realworld b/realworld new file mode 160000 index 0000000000..11c81f64f0 --- /dev/null +++ b/realworld @@ -0,0 +1 @@ +Subproject commit 11c81f64f04fff8cfcd60ddf4eb0064c01fa1730 diff --git a/scripts/container/build-be-container.sh b/scripts/container/build-be-container.sh new file mode 100755 index 0000000000..c130b6f623 --- /dev/null +++ b/scripts/container/build-be-container.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker build -t realworld-backend -f Dockerfile.backend . \ No newline at end of file diff --git a/scripts/container/build-fe-container.sh b/scripts/container/build-fe-container.sh new file mode 100755 index 0000000000..8c32fd2521 --- /dev/null +++ b/scripts/container/build-fe-container.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker build -t conduit-frontend -f ./angular-conduit-signals/Dockerfile.frontend angular-conduit-signals \ No newline at end of file diff --git a/scripts/container/composition-down.sh b/scripts/container/composition-down.sh new file mode 100644 index 0000000000..28bdde66d0 --- /dev/null +++ b/scripts/container/composition-down.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker-compose down \ No newline at end of file diff --git a/scripts/container/composition-up.sh b/scripts/container/composition-up.sh new file mode 100644 index 0000000000..a8b462b1c0 --- /dev/null +++ b/scripts/container/composition-up.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker-compose up --build \ No newline at end of file diff --git a/scripts/container/run-be-container.sh b/scripts/container/run-be-container.sh new file mode 100755 index 0000000000..2d53336d13 --- /dev/null +++ b/scripts/container/run-be-container.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker run --env-file api/.env -p 8000:8000 realworld-backend \ No newline at end of file diff --git a/scripts/container/run-fe-container.sh b/scripts/container/run-fe-container.sh new file mode 100755 index 0000000000..dead4655d4 --- /dev/null +++ b/scripts/container/run-fe-container.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker run -it -p 80:80 conduit-frontend \ No newline at end of file diff --git a/scripts/deployment/CD.sh b/scripts/deployment/CD.sh new file mode 100755 index 0000000000..dbab03c2fe --- /dev/null +++ b/scripts/deployment/CD.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Check if the environment argument is provided +if [ -z "$1" ]; then + echo "Usage: $0 " + echo "Environment must be one of 'dev', 'stage', or 'prod'." + exit 1 +fi + +# Get the environment argument +ENV=$1 + +# Validate the argument +if [[ "$ENV" != "dev" && "$ENV" != "stage" && "$ENV" != "prod" ]]; then + echo "Invalid environment: $ENV" + echo "Environment must be one of 'dev', 'stage', or 'prod'." + exit 1 +fi + +# Set the corresponding files +VAR_FILE=".env.$ENV" +SECRET_FILE=".secrets.$ENV" + +# Execute the act command +act workflow_dispatch \ + -W .github/workflows/CD.yml \ + --var-file "$VAR_FILE" \ + --secret-file "$SECRET_FILE" \ + --container-architecture linux/amd64 + # --local-repository ldoguin/setup-cbsh@develop=/home/ldoguing/Code/setup-cbsh diff --git a/scripts/deployment/TD.sh b/scripts/deployment/TD.sh new file mode 100755 index 0000000000..59dae51f2d --- /dev/null +++ b/scripts/deployment/TD.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Check if the environment argument is provided +if [ -z "$1" ]; then + echo "Usage: $0 " + echo "Environment must be one of 'dev', 'stage', or 'prod'." + exit 1 +fi + +# Get the environment argument +ENV=$1 + +# Validate the argument +if [[ "$ENV" != "dev" && "$ENV" != "stage" && "$ENV" != "prod" ]]; then + echo "Invalid environment: $ENV" + echo "Environment must be one of 'dev', 'stage', or 'prod'." + exit 1 +fi + +# Set the corresponding files +VAR_FILE=".env.$ENV" +SECRET_FILE=".secrets.$ENV" + +# Execute the act command +act workflow_dispatch \ + -W .github/workflows/TD.yml \ + --var-file "$VAR_FILE" \ + --secret-file "$SECRET_FILE" \ + --container-architecture linux/amd64 + # --local-repository ldoguin/setup-cbsh@develop=/home/ldoguing/Code/setup-cbsh diff --git a/scripts/local/generate-secret-key.sh b/scripts/local/generate-secret-key.sh new file mode 100755 index 0000000000..1dcb27dcba --- /dev/null +++ b/scripts/local/generate-secret-key.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +openssl rand -hex 32 diff --git a/scripts/local/install-dependencies.sh b/scripts/local/install-dependencies.sh new file mode 100755 index 0000000000..670a30c25f --- /dev/null +++ b/scripts/local/install-dependencies.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +REQUIREMENTS_FILE="./api/requirements.txt" + +if [ ! -f "$REQUIREMENTS_FILE" ]; then + echo "Error: $REQUIREMENTS_FILE not found." + exit 1 +fi + +echo "Installing dependencies from $REQUIREMENTS_FILE..." +pip install -r "$REQUIREMENTS_FILE" + +echo "Dependency installation completed." diff --git a/scripts/local/pytest-test.sh b/scripts/local/pytest-test.sh new file mode 100644 index 0000000000..1735967d65 --- /dev/null +++ b/scripts/local/pytest-test.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +pytest diff --git a/scripts/local/realworld-test.sh b/scripts/local/realworld-test.sh new file mode 100755 index 0000000000..e696935348 --- /dev/null +++ b/scripts/local/realworld-test.sh @@ -0,0 +1,3 @@ +#!/bin/bash +export APIURL=http://localhost:8000/api +./realworld/api/run-api-tests.sh diff --git a/scripts/local/start-api.sh b/scripts/local/start-api.sh new file mode 100755 index 0000000000..f044abc7de --- /dev/null +++ b/scripts/local/start-api.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +uvicorn api.main:api --reload