From 7169679cb0a9673264af630080611409be6befd8 Mon Sep 17 00:00:00 2001 From: Jens Troeger Date: Fri, 11 Jul 2025 19:45:44 +1000 Subject: [PATCH 1/4] feat: add backend composed of a Postgres database, Alembic to orchestrate migrations, a PostgREST web server, a message queue built on top of the Postgres database, and async Python jobs implemented by the Dramatiq framework --- .gitignore | 28 ++ .pre-commit-config.yaml | 99 +++++ Makefile | 101 +++++ README.md | 34 ++ backend/.flake8 | 52 +++ backend/LICENSE.md | 9 + backend/Makefile | 175 +++++++++ backend/README.md | 35 ++ backend/alembic-requirements.txt | 256 +++++++++++++ backend/alembic.ini | 42 ++ backend/alembic/env.py | 63 +++ backend/alembic/script.py.mako | 28 ++ .../7ce2fd1a52c8_initial_database_setup.py | 358 ++++++++++++++++++ backend/develop-requirements.txt | 36 ++ backend/develop.toml | 180 +++++++++ backend/docker/Dockerfile.alembic | 13 + backend/docker/Dockerfile.dramatiq | 12 + backend/docker/Dockerfile.pg | 24 ++ backend/pyproject.toml | 45 +++ backend/src/template_jobs/__init__.py | 9 + backend/src/template_jobs/actors.py | 16 + backend/src/template_jobs/broker.py | 18 + backend/src/template_jobs/py.typed | 1 + backend/tests/actors/test_job.py | 29 ++ backend/tests/api/test_login.py | 66 ++++ backend/tests/api/test_message_queue.py | 80 ++++ backend/tests/api/test_profile.py | 133 +++++++ backend/tests/api/test_signup.py | 52 +++ backend/tests/conftest.py | 24 ++ frontend/Makefile | 42 ++ infra/README.md | 1 + infra/_base-services.yaml | 44 +++ infra/docker-compose-develop.yaml | 41 ++ infra/docker-compose.yaml | 50 +++ 34 files changed, 2196 insertions(+) create mode 100644 Makefile create mode 100644 backend/.flake8 create mode 100644 backend/LICENSE.md create mode 100644 backend/Makefile create mode 100644 backend/README.md create mode 100644 backend/alembic-requirements.txt create mode 100644 backend/alembic.ini create mode 100644 backend/alembic/env.py create mode 100644 backend/alembic/script.py.mako create mode 100644 backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py create mode 100644 backend/develop-requirements.txt create mode 100644 backend/develop.toml create mode 100644 backend/docker/Dockerfile.alembic create mode 100644 backend/docker/Dockerfile.dramatiq create mode 100644 backend/docker/Dockerfile.pg create mode 100644 backend/pyproject.toml create mode 100644 backend/src/template_jobs/__init__.py create mode 100644 backend/src/template_jobs/actors.py create mode 100644 backend/src/template_jobs/broker.py create mode 100644 backend/src/template_jobs/py.typed create mode 100644 backend/tests/actors/test_job.py create mode 100644 backend/tests/api/test_login.py create mode 100644 backend/tests/api/test_message_queue.py create mode 100644 backend/tests/api/test_profile.py create mode 100644 backend/tests/api/test_signup.py create mode 100644 backend/tests/conftest.py create mode 100644 frontend/Makefile create mode 100644 infra/README.md create mode 100644 infra/_base-services.yaml create mode 100644 infra/docker-compose-develop.yaml create mode 100644 infra/docker-compose.yaml diff --git a/.gitignore b/.gitignore index 3d2d021..c46d846 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,34 @@ *.mo *.pot +# Python byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Python Distribution / packaging +.coverage +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +include/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + # macOS stuff .DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5ebe1a..e67862d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,3 +50,102 @@ repos: # Commenting this out because https://github.com/pappasam/toml-sort/issues/11 # - id: pretty-format-toml # args: [--autofix] + +# +# Backend. +# + +# Sort imports. +- repo: https://github.com/pycqa/isort + rev: 6.0.1 + hooks: + - id: isort + name: Sort import statements + files: ^backend/ + args: [--settings-path, backend/develop.toml] + stages: [pre-commit] + +# Add Black code formatters. +- repo: https://github.com/ambv/black + rev: 25.1.0 + hooks: + - id: black + name: Format code + files: ^backend/ + args: [--config, backend/develop.toml] +- repo: https://github.com/asottile/blacken-docs + rev: 1.19.1 + hooks: + - id: blacken-docs + name: Format code in docstrings + files: ^backend/ + types: [text, python] + args: [--line-length, '120'] + additional_dependencies: [black==25.1.0] + +# Upgrade and rewrite Python idioms. +- repo: https://github.com/asottile/pyupgrade + rev: v3.20.0 + hooks: + - id: pyupgrade + name: Upgrade code idioms + files: ^backend/src/template_jobs/|^backend/tests/ + args: [--py313-plus] + +# Similar to pylint, with a few more/different checks. For more available +# extensions: https://github.com/DmytroLitvinov/awesome-flake8-extensions +- repo: https://github.com/pycqa/flake8 + rev: 7.2.0 + hooks: + - id: flake8 + name: Check flake8 issues + files: ^backend/src/template_jobs/|^backend/tests/ + types: [text, python] + additional_dependencies: [flake8-bugbear==24.12.12, flake8-builtins==2.5.0, flake8-comprehensions==3.16.0, flake8-docstrings==1.7.0, flake8-logging==1.7.0, flake8-mutable==1.2.0, flake8-noqa==1.4.0, flake8-print==5.0.0, flake8-pyi==25.5.0, flake8-pytest-style==2.1.0, flake8-rst-docstrings==0.3.1, pep8-naming==0.15.1] + args: [--config, backend/.flake8] + +# Run Pylint from the local repo to make sure venv packages +# specified in pyproject.toml are available. +- repo: local + hooks: + - id: pylint + name: Check pylint issues + entry: pylint + language: python + files: ^backend/src/template_jobs/|^backend/tests/ + types: [text, python] + args: [--rcfile, backend/develop.toml] + +# Type-check all Python code. +- repo: local + hooks: + - id: mypy + name: Check typing annotations + entry: mypy + language: python + files: ^backend/src/template_jobs/|^backend/tests/ + types: [text, python] + args: [--config-file, backend/develop.toml] + +# Check for potential security issues. +- repo: https://github.com/PyCQA/bandit + rev: 1.8.3 + hooks: + - id: bandit + name: Check for security issues + files: ^backend/src/template_jobs/|^backend/tests/ + types: [text, python] + args: [--configfile, backend/develop.toml] + additional_dependencies: ['bandit[toml]'] + +# On push to the remote, run the unit tests. +- repo: local + hooks: + - id: pytest + name: Run unit tests + entry: env COVERAGE_CORE=sysmon pytest -c backend/develop.toml --cov-config backend/develop.toml backend/src/template_jobs/ backend/tests/ backend/docs/ + language: python + verbose: true + always_run: true + pass_filenames: false + stages: [pre-push] diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..a2f47d5 --- /dev/null +++ b/Makefile @@ -0,0 +1,101 @@ + +# Use bash as the shell when executing a rule's recipe. For more details: +# https://www.gnu.org/software/make/manual/html_node/Choosing-the-Shell.html +SHELL := bash + + +.PHONY: all all-frontend all-backend +all: all-frontend all-backend +all-frontend: + $(MAKE) --directory frontend all +all-backend: + $(MAKE) --directory backend all + + +.PHONY: init init-frontend init-backend +init: init-frontend init-backend +init-frontend: + $(MAKE) --directory frontend init +init-backend: + $(MAKE) --directory backend init + + +.PHONY: setup setup-frontend setup-backend +setup: setup-frontend setup-backend + pre-commit install +setup-frontend: + $(MAKE) --directory frontend setup +setup-backend: + $(MAKE) --directory backend setup + + +.PHONY: check check-frontend check-backend +check: check-frontend check-backend +check-frontend: + $(MAKE) --directory frontend check +check-backend: + $(MAKE) --directory backend check + + +.PHONY: test test-frontend test-backend +test: test-frontend test-backend +test-frontend: + $(MAKE) --directory frontend test +test-backend: + $(MAKE) --directory backend test + + +.PHONY: build build-frontend build-backend build-docker build-docker-frontend build-docker-backend +build: build-frontend build-backend build-docker +build-docker: build-docker-frontend build-docker-backend +build-frontend: + $(MAKE) --directory frontend build +build-docker-frontend: + $(MAKE) --directory frontend build-docker +build-backend: + $(MAKE) --directory backend build +build-docker-backend: + $(MAKE) --directory backend build-docker + + +.PHONY: docs docs-frontend docs-backend +docs: docs-frontend docs-backend +docs-frontend: + $(MAKE) --directory frontend docs +docs-backend: + $(MAKE) --directory frontend docs + + +.PHONY: compose-up compose-down compose-up-develop compose-down-develop +compose-up: + docker compose --file infra/docker-compose.yaml up +compose-down: + docker compose --file infra/docker-compose.yaml down +compose-up-develop: + docker compose --file infra/docker-compose-develop.yaml up +compose-down-develop: + docker compose --file infra/docker-compose-develop.yaml down + + +.PHONY: clean clean-frontend clean-backend +clean: clean-frontend clean-backend + rm -fr .coverage .mypy_cache/ # These backend/ files are created at the base of the repo. +clean-frontend: + $(MAKE) --directory frontend clean +clean-backend: + $(MAKE) --directory backend clean + + +.PHONY: nuke nuke-git-hooks nuke-frontend nuke-backend nuke-caches nuke-caches-frontend nuke-caches-backend +nuke: clean nuke-git-hooks nuke-caches nuke-frontend nuke-backend +nuke-caches: nuke-caches-frontend nuke-caches-backend +nuke-git-hooks: + find .git/hooks/ -type f ! -name '*.sample' -delete +nuke-caches-frontend: + $(MAKE) --directory frontend nuke-caches +nuke-frontend: + $(MAKE) --directory frontend nuke +nuke-caches-backend: + $(MAKE) --directory backend nuke-caches +nuke-backend: + $(MAKE) --directory backend nuke diff --git a/README.md b/README.md index 71d907c..7d8f9ed 100644 --- a/README.md +++ b/README.md @@ -3,3 +3,37 @@ # A Full-Stack Web Application Template This repository is an opinionated implementation of a full-stack web application template. + +## Prerequisites + +The following tools should be available on your machine to get started: + +- [GNU make](https://www.gnu.org/software/make/) and related GNU tools to run the Makefiles which, in turn, orchestrate checking, building, testing, and deploying the entire software stack. +- [pre-commit](https://pre-commit.com/) to manage various [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) to enforce coding standards. +- [commitizen](https://commitizen-tools.github.io/commitizen/) manages automatic version bumps for _semantic versioning_ based on the _conventional commit messages_ in this repository. +- [Docker](https://www.docker.com/) to build and deploy application containers. + +## Architecture + +There are three folders in this repository: + +- **Frontend**: TBD. For more details see [here](frontend/README.md). +- **Backend**: the backend is composed of a [PostgREST](https://github.com/PostgREST/postgrest) web server, a message queue based on Postgres, and asynchronous workers implemented in Python using the [Dramatiq](https://github.com/Bogdanp/dramatiq) framework. For more details see [here](backend/README.md). +- **Infrastructure**: both frontend and backend build Docker images which are then orchestrated using [Docker Compose](https://docs.docker.com/compose/). For more details see [here](infra/README.md). + +## Development + +All of the development — checking and compiling code, running tests, and building Docker images — is managed by `make` and each component has its own Makefile. + +To set up this project for development, follow these steps: + +1. `make init`: initialize both frontend and backend. +2. `make setup`: set up and install all tools and packages to build and test and run. +3. `make check`: run code checks for both frontend and backend. +4. `make test`: run all tests. +5. `make build`: build both frontend and backend packages, then build the Docker images. +6. `make docs`: generate documentation. +7. `make compose-up` and `make compose-down` stand up and tear down the application locally. +8. `make clean` and `make nuke` reset the build environment and remove all generated artifacts. + +More details can be found in the documentation for each of the components. diff --git a/backend/.flake8 b/backend/.flake8 new file mode 100644 index 00000000..8f61d11 --- /dev/null +++ b/backend/.flake8 @@ -0,0 +1,52 @@ +# Unfortunately, Flake8 does not support pyproject.toml configuration. +# https://github.com/PyCQA/flake8/issues/234 +# +# More details regarding Flake8 and Black interplay: +# https://github.com/psf/black/blob/main/docs/guides/using_black_with_other_tools.md#flake8 +[flake8] + +# Enable a few additional checks. +# +# https://github.com/PyCQA/flake8-bugbear#how-to-enable-opinionated-warnings +# B9: Bugbear's extended opinionated checks +# +# https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes +# W504: line break after binary operator (Black compliant) +extend-select = B9, W504 + +# Disable several warnings that don't play nice with PEP8 or Black, +# or that are a bit of a nuisance in general. +# +# http://www.pydocstyle.org/en/latest/error_codes.html +# D105: Missing docstring in magic method +# +# https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes +# E203: whitespace before ‘,’, ‘;’, or ‘:’ (not Black compliant) +# E501: line too long (managed better by Bugbear's B950) +# W503: line break before binary operator (not Black compliant) +# +# https://github.com/peterjc/flake8-rst-docstrings#configuration +# RST307: Error in "XXX" directive +ignore = D105, E203, E501, RST307, W503 +per-file-ignores = + +# More assorted goodness. +max-line-length = 120 +show-source = true + +# Ensure that Flake8 warnings are silenced correctly: +# https://github.com/plinss/flake8-noqa#options +noqa-require-code = true + +# Ensure that Sphinx extensions of .rst are recognized: +# https://github.com/peterjc/flake8-rst-docstrings#configuration +rst-roles = class, func, ref +rst-directives = envvar, exception +rst-substitutions = version + +# Ensure that Sphinx docstrings use Numpy format for docstrings: +# https://github.com/PyCQA/flake8-docstrings +# +# For details on the Numpy format: +# https://www.sphinx-doc.org/en/master/usage/extensions/example_numpy.html +docstring-convention = numpy diff --git a/backend/LICENSE.md b/backend/LICENSE.md new file mode 100644 index 00000000..8cbfe66 --- /dev/null +++ b/backend/LICENSE.md @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 1999–2025 + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/Makefile b/backend/Makefile new file mode 100644 index 00000000..43236d1 --- /dev/null +++ b/backend/Makefile @@ -0,0 +1,175 @@ + +# Use bash as the shell when executing a rule's recipe. For more details: +# https://www.gnu.org/software/make/manual/html_node/Choosing-the-Shell.html +SHELL := bash + +# Set the package's name and version for use throughout the Makefile. +PACKAGE_NAME := template_jobs +PACKAGE_VERSION := $(shell python -c $$'try: import $(PACKAGE_NAME); print($(PACKAGE_NAME).__version__);\nexcept: print("unknown");') + + +.PHONY: all +all: check test build + + +# Create a virtual environment, either for Python3.13 (default) or using +# the Python interpreter specified in the PYTHON environment variable. Also +# create an empty pip.conf file to ensure that `pip config` modifies this +# venv only, unless told otherwise. +.PHONY: init venv +init: venv +venv: + if [ ! -z "${VIRTUAL_ENV}" ]; then \ + echo "Found an activated Python virtual environment, exiting" && exit 1; \ + fi + if [ -d .venv/ ]; then \ + echo "Found an inactive Python virtual environment, please activate or nuke it" && exit 1; \ + fi + if [ -z "${PYTHON}" ]; then \ + echo "Creating virtual environment in .venv/ for python3.13"; \ + python3.13 -m venv --upgrade-deps --prompt . .venv; \ + else \ + echo "Creating virtual environment in .venv/ for ${PYTHON}"; \ + ${PYTHON} -m venv --upgrade-deps --prompt . .venv; \ + fi + touch .venv/pip.conf + + +# Set up a newly created virtual environment. Note: pre-commit uses the +# venv's Python interpreter, so if you've created multiple venvs then +# pre-commit's git hooks run against the most recently set up venv. +.PHONY: setup +setup: force-upgrade + pre-commit install + mkdir -p dist + + +# Install or upgrade an existing virtual environment based on the package +# dependencies declared in pyproject.toml. +.PHONY: upgrade force-upgrade +upgrade: .venv/upgraded-on +.venv/upgraded-on: pyproject.toml + python -m pip install --upgrade pip setuptools + python -m pip install --upgrade wheel + python -m pip install --upgrade --upgrade-strategy eager --editable . --requirement develop-requirements.txt + $(MAKE) upgrade-quiet +force-upgrade: + rm -f .venv/upgraded-on + $(MAKE) upgrade +upgrade-quiet: + echo "Automatically generated by Python Package Makefile on $$(date '+%Y-%m-%d %H:%M:%S %z')." > .venv/upgraded-on + + +# Run some or all checks over the package code base. +.PHONY: check check-code check-bandit check-flake8 check-lint check-mypy +check: + pre-commit run --all-files +check-code: check-bandit check-flake8 check-lint check-mypy +check-bandit: + pre-commit run bandit --all-files +check-flake8: + pre-commit run flake8 --all-files +check-lint: + pre-commit run pylint --all-files +check-mypy: + pre-commit run mypy --all-files + + +# Run all unit tests. The --files option avoids stashing but passes files; however, +# the hook setup itself does not pass files to pytest (see .pre-commit-config.yaml). +.PHONY: test +test: + pre-commit run pytest --hook-stage push --files tests/ + + +# Build a source distribution package and a binary wheel distribution artifact. +# When building these artifacts, we need the environment variable SOURCE_DATE_EPOCH +# set to the build date/epoch. For more details, see: https://flit.pypa.io/en/latest/reproducible.html +.PHONY: dist +dist: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-requirements.txt +dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt # check test + SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) flit build --setup-py --format wheel +dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt # check test + SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) flit build --setup-py --format sdist +dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt: + echo $(SOURCE_DATE_EPOCH) > dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt +dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-requirements.txt: requirements.txt + cp requirements.txt dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-requirements.txt + + +# Build a PEP-503 compatible Simple Repository directory inside of dist/. For details on +# the layout of that directory, see: https://peps.python.org/pep-0503/ +# The directory can then be used to install (hashed) artifacts by using `pip` and +# its `--extra-index-url` argument: https://pip.pypa.io/en/stable/cli/pip_install/#cmdoption-extra-index-url +PROJECT_NAME := $(shell python -c $$'import re; print(re.sub(r"[-_.]+", "-", "$(PACKAGE_NAME)").lower());') +.PHONY: simple-index +simple-index: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz + mkdir -p dist/simple-index/$(PROJECT_NAME) + echo -e "\n$(PROJECT_NAME)" > dist/simple-index/index.html + echo -e "\n$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz" > dist/simple-index/$(PROJECT_NAME)/index.html + cp -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/simple-index/$(PROJECT_NAME)/ + cp -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz dist/simple-index/$(PROJECT_NAME)/ + + +# Generate a requirements.txt file containing version and integrity hashes for all +# packages currently installed in the virtual environment. There's no easy way to +# do this, see also: https://github.com/pypa/pip/issues/4732 +# +# If using a private package index, make sure that it implements the JSON API: +# https://warehouse.pypa.io/api-reference/json.html +# +# We also want to make sure that this package itself is added to the requirements.txt +# file, and if possible even with proper hashes. +.PHONY: requirements +requirements: requirements.txt +requirements.txt: + echo -n "" > requirements.txt + for pkg in $$(python -m pip freeze --local --disable-pip-version-check --exclude-editable); do \ + pkg=$${pkg//[$$'\r\n']}; \ + echo -n $$pkg >> requirements.txt; \ + echo "Fetching package metadata for requirement '$$pkg'"; \ + [[ $$pkg =~ (.*)==(.*) ]] && curl -s https://pypi.org/pypi/$${BASH_REMATCH[1]}/$${BASH_REMATCH[2]}/json | python -c "import json, sys; print(''.join(f''' \\\\\n --hash=sha256:{pkg['digests']['sha256']}''' for pkg in json.load(sys.stdin)['urls']));" >> requirements.txt; \ + done + echo -e -n "$(PACKAGE_NAME)==$(PACKAGE_VERSION)" >> requirements.txt + if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz ]; then \ + echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz | grep '^\-\-hash')" >> requirements.txt; \ + fi + if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl ]; then \ + echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl | grep '^\-\-hash')" >> requirements.txt; \ + fi + echo "" >> requirements.txt + + +.PHONY: build build-docker docker-image-pg docker-image-alembic docker-image-dramatiq +build: dist simple-index +build-docker: docker-image-pg docker-image-alembic docker-image-dramatiq +docker-image-pg: + docker build --tag fullstack-template-backend-pg:0.0.0 --file docker/Dockerfile.pg . +docker-image-alembic: + docker build --tag fullstack-template-backend-alembic:0.0.0 --file docker/Dockerfile.alembic . +docker-image-dramatiq: + docker build --tag fullstack-template-backend-dramatiq:0.0.0 --file docker/Dockerfile.dramatiq . + + +.PHONY: docs +docs: + + +.PHONY: clean dist-clean +clean: dist-clean + rm -fr .coverage .hypothesis/ .mypy_cache/ .pytest_cache/ + rm -fr docs/_build/ +dist-clean: + rm -fr dist/* + rm -f requirements.txt + + +.PHONY: nuke nuke-caches +nuke-caches: + find src/ -type d -name __pycache__ -exec rm -fr {} + + find tests/ -type d -name __pycache__ -exec rm -fr {} + +nuke: clean nuke-caches + if [ ! -z "${VIRTUAL_ENV}" ]; then \ + echo "Please deactivate the virtual environment first!" && exit 1; \ + fi + rm -fr .venv/ diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 00000000..2f6c09c --- /dev/null +++ b/backend/README.md @@ -0,0 +1,35 @@ +# Backend + +The backend folder structure is heavily inspired by the [python-package-template](https://github.com/jenstroeger/python-package-template), also an opinionated template that sets the foundation for a Python package. + +## Architecture + +TODO + +## Developing + +Ensure that the code is somewhat clean and healthy: + +``` +make check +``` + +In order to run and test the backend’s asynchronous jobs, both Alembic migrations and Dramatiq actors should run on the host instead of the container. To achieve that, run `make compose-up-develop` in a terminal which + +- mounts the hosts `backend/alembic/versions/` folder into the Alembic container and runs this host migrations against the containered Postgres database; and +- does not run the Dramatiq container. + +Next, run Dramatiq in another terminal: +``` +DRAMATIQ_SQLA_URL=postgresql://dramatiq:dramatiq@localhost:5432/template_db dramatiq --verbose --processes 1 --threads 1 template_jobs.broker +``` + +which launches the message broker and workers, waiting for a message on the queue. + +With the development containers running and the Dramatiq broker ready, run the tests: + +``` +make test +``` + +which runs all tests, collect statement and branch coverage, various statistics, and then dump the results of the test run. diff --git a/backend/alembic-requirements.txt b/backend/alembic-requirements.txt new file mode 100644 index 00000000..fe9da0f --- /dev/null +++ b/backend/alembic-requirements.txt @@ -0,0 +1,256 @@ +# https://pip.pypa.io/en/stable/reference/requirements-file-format/ + +alembic==1.15.2 \ + --hash=sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53 \ + --hash=sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7 +greenlet==3.2.2 \ + --hash=sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6 \ + --hash=sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7 \ + --hash=sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c \ + --hash=sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907 \ + --hash=sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f \ + --hash=sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13 \ + --hash=sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5 \ + --hash=sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057 \ + --hash=sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f \ + --hash=sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068 \ + --hash=sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce \ + --hash=sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b \ + --hash=sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3 \ + --hash=sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74 \ + --hash=sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe \ + --hash=sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e \ + --hash=sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6 \ + --hash=sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b \ + --hash=sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330 \ + --hash=sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b \ + --hash=sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e \ + --hash=sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275 \ + --hash=sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65 \ + --hash=sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3 \ + --hash=sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e \ + --hash=sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5 \ + --hash=sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec \ + --hash=sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59 \ + --hash=sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf \ + --hash=sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325 \ + --hash=sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5 \ + --hash=sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825 \ + --hash=sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d \ + --hash=sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf \ + --hash=sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708 \ + --hash=sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418 \ + --hash=sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4 \ + --hash=sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763 \ + --hash=sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b \ + --hash=sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207 \ + --hash=sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8 \ + --hash=sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51 \ + --hash=sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421 \ + --hash=sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240 \ + --hash=sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370 \ + --hash=sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59 \ + --hash=sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e \ + --hash=sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa \ + --hash=sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819 \ + --hash=sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc \ + --hash=sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457 \ + --hash=sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659 \ + --hash=sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61 \ + --hash=sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834 \ + --hash=sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485 +Mako==1.3.10 \ + --hash=sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59 \ + --hash=sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28 +MarkupSafe==3.0.2 \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 +psycopg==3.2.9 \ + --hash=sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6 \ + --hash=sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700 +psycopg-binary==3.2.9 \ + --hash=sha256:528239bbf55728ba0eacbd20632342867590273a9bacedac7538ebff890f1093 \ + --hash=sha256:e4978c01ca4c208c9d6376bd585e2c0771986b76ff7ea518f6d2b51faece75e8 \ + --hash=sha256:1ed2bab85b505d13e66a914d0f8cdfa9475c16d3491cf81394e0748b77729af2 \ + --hash=sha256:799fa1179ab8a58d1557a95df28b492874c8f4135101b55133ec9c55fc9ae9d7 \ + --hash=sha256:bb37ac3955d19e4996c3534abfa4f23181333974963826db9e0f00731274b695 \ + --hash=sha256:001e986656f7e06c273dd4104e27f4b4e0614092e544d950c7c938d822b1a894 \ + --hash=sha256:fa5c80d8b4cbf23f338db88a7251cef8bb4b68e0f91cf8b6ddfa93884fdbb0c1 \ + --hash=sha256:39a127e0cf9b55bd4734a8008adf3e01d1fd1cb36339c6a9e2b2cbb6007c50ee \ + --hash=sha256:fb7599e436b586e265bea956751453ad32eb98be6a6e694252f4691c31b16edb \ + --hash=sha256:5d2c9fe14fe42b3575a0b4e09b081713e83b762c8dc38a3771dd3265f8f110e7 \ + --hash=sha256:7e4660fad2807612bb200de7262c88773c3483e85d981324b3c647176e41fdc8 \ + --hash=sha256:2504e9fd94eabe545d20cddcc2ff0da86ee55d76329e1ab92ecfcc6c0a8156c4 \ + --hash=sha256:093a0c079dd6228a7f3c3d82b906b41964eaa062a9a8c19f45ab4984bf4e872b \ + --hash=sha256:387c87b51d72442708e7a853e7e7642717e704d59571da2f3b29e748be58c78a \ + --hash=sha256:d9ac10a2ebe93a102a326415b330fff7512f01a9401406896e78a81d75d6eddc \ + --hash=sha256:72fdbda5b4c2a6a72320857ef503a6589f56d46821592d4377c8c8604810342b \ + --hash=sha256:f34e88940833d46108f949fdc1fcfb74d6b5ae076550cd67ab59ef47555dba95 \ + --hash=sha256:a3e0f89fe35cb03ff1646ab663dabf496477bab2a072315192dbaa6928862891 \ + --hash=sha256:6afb3e62f2a3456f2180a4eef6b03177788df7ce938036ff7f09b696d418d186 \ + --hash=sha256:cc19ed5c7afca3f6b298bfc35a6baa27adb2019670d15c32d0bb8f780f7d560d \ + --hash=sha256:bc75f63653ce4ec764c8f8c8b0ad9423e23021e1c34a84eb5f4ecac8538a4a4a \ + --hash=sha256:3db3ba3c470801e94836ad78bf11fd5fab22e71b0c77343a1ee95d693879937a \ + --hash=sha256:be7d650a434921a6b1ebe3fff324dbc2364393eb29d7672e638ce3e21076974e \ + --hash=sha256:6a76b4722a529390683c0304501f238b365a46b1e5fb6b7249dbc0ad6fea51a0 \ + --hash=sha256:96a551e4683f1c307cfc3d9a05fec62c00a7264f320c9962a67a543e3ce0d8ff \ + --hash=sha256:61d0a6ceed8f08c75a395bc28cb648a81cf8dee75ba4650093ad1a24a51c8724 \ + --hash=sha256:ad280bbd409bf598683dda82232f5215cfc5f2b1bf0854e409b4d0c44a113b1d \ + --hash=sha256:76eddaf7fef1d0994e3d536ad48aa75034663d3a07f6f7e3e601105ae73aeff6 \ + --hash=sha256:52e239cd66c4158e412318fbe028cd94b0ef21b0707f56dcb4bdc250ee58fd40 \ + --hash=sha256:08bf9d5eabba160dd4f6ad247cf12f229cc19d2458511cab2eb9647f42fa6795 \ + --hash=sha256:1b2cf018168cad87580e67bdde38ff5e51511112f1ce6ce9a8336871f465c19a \ + --hash=sha256:14f64d1ac6942ff089fc7e926440f7a5ced062e2ed0949d7d2d680dc5c00e2d4 \ + --hash=sha256:7a838852e5afb6b4126f93eb409516a8c02a49b788f4df8b6469a40c2157fa21 \ + --hash=sha256:98bbe35b5ad24a782c7bf267596638d78aa0e87abc7837bdac5b2a2ab954179e \ + --hash=sha256:72691a1615ebb42da8b636c5ca9f2b71f266be9e172f66209a361c175b7842c5 \ + --hash=sha256:25ab464bfba8c401f5536d5aa95f0ca1dd8257b5202eede04019b4415f491351 \ + --hash=sha256:0e8aeefebe752f46e3c4b769e53f1d4ad71208fe1150975ef7662c22cca80fab \ + --hash=sha256:b7e4e4dd177a8665c9ce86bc9caae2ab3aa9360b7ce7ec01827ea1baea9ff748 \ + --hash=sha256:7fc2915949e5c1ea27a851f7a472a7da7d0a40d679f0a31e42f1022f3c562e87 \ + --hash=sha256:a1fa38a4687b14f517f049477178093c39c2a10fdcced21116f47c017516498f \ + --hash=sha256:5be8292d07a3ab828dc95b5ee6b69ca0a5b2e579a577b39671f4f5b47116dfd2 \ + --hash=sha256:778588ca9897b6c6bab39b0d3034efff4c5438f5e3bd52fda3914175498202f9 \ + --hash=sha256:f0d5b3af045a187aedbd7ed5fc513bd933a97aaff78e61c3745b330792c4345b \ + --hash=sha256:2290bc146a1b6a9730350f695e8b670e1d1feb8446597bed0bbe7c3c30e0abcb \ + --hash=sha256:4df22ec17390ec5ccb38d211fb251d138d37a43344492858cea24de8efa15003 \ + --hash=sha256:eac3a6e926421e976c1c2653624e1294f162dc67ac55f9addbe8f7b8d08ce603 \ + --hash=sha256:cf789be42aea5752ee396d58de0538d5fcb76795c85fb03ab23620293fb81b6f \ + --hash=sha256:e0f05b9dafa5670a7503abc715af081dbbb176a8e6770de77bccaeb9024206c5 \ + --hash=sha256:b2d7a6646d41228e9049978be1f3f838b557a1bde500b919906d54c4390f5086 \ + --hash=sha256:a4d76e28df27ce25dc19583407f5c6c6c2ba33b443329331ab29b6ef94c8736d \ + --hash=sha256:418f52b77b715b42e8ec43ee61ca74abc6765a20db11e8576e7f6586488a266f \ + --hash=sha256:1f1736d5b21f69feefeef8a75e8d3bf1f0a1e17c165a7488c3111af9d6936e91 \ + --hash=sha256:5918c0fab50df764812f3ca287f0d716c5c10bedde93d4da2cefc9d40d03f3aa \ + --hash=sha256:7b617b81f08ad8def5edd110de44fd6d326f969240cc940c6f6b3ef21fe9c59f \ + --hash=sha256:587a3f19954d687a14e0c8202628844db692dbf00bba0e6d006659bf1ca91cbe \ + --hash=sha256:791759138380df21d356ff991265fde7fe5997b0c924a502847a9f9141e68786 \ + --hash=sha256:95315b8c8ddfa2fdcb7fe3ddea8a595c1364524f512160c604e3be368be9dd07 \ + --hash=sha256:18ac08475c9b971237fcc395b0a6ee4e8580bb5cf6247bc9b8461644bef5d9f4 \ + --hash=sha256:ac2c04b6345e215e65ca6aef5c05cc689a960b16674eaa1f90a8f86dfaee8c04 \ + --hash=sha256:4c1ab25e3134774f1e476d4bb9050cdec25f10802e63e92153906ae934578734 \ + --hash=sha256:4bfec4a73e8447d8fe8854886ffa78df2b1c279a7592241c2eb393d4499a17e2 \ + --hash=sha256:166acc57af5d2ff0c0c342aed02e69a0cd5ff216cae8820c1059a6f3b7cf5f78 \ + --hash=sha256:413f9e46259fe26d99461af8e1a2b4795a4e27cc8ac6f7919ec19bcee8945074 \ + --hash=sha256:354dea21137a316b6868ee41c2ae7cce001e104760cf4eab3ec85627aed9b6cd \ + --hash=sha256:24ddb03c1ccfe12d000d950c9aba93a7297993c4e3905d9f2c9795bb0764d523 +SQLAlchemy==2.0.41 \ + --hash=sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b \ + --hash=sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5 \ + --hash=sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747 \ + --hash=sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30 \ + --hash=sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29 \ + --hash=sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11 \ + --hash=sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda \ + --hash=sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08 \ + --hash=sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f \ + --hash=sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560 \ + --hash=sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f \ + --hash=sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6 \ + --hash=sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04 \ + --hash=sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582 \ + --hash=sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8 \ + --hash=sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504 \ + --hash=sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9 \ + --hash=sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1 \ + --hash=sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70 \ + --hash=sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e \ + --hash=sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078 \ + --hash=sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae \ + --hash=sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6 \ + --hash=sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0 \ + --hash=sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443 \ + --hash=sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc \ + --hash=sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1 \ + --hash=sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a \ + --hash=sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d \ + --hash=sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23 \ + --hash=sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f \ + --hash=sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df \ + --hash=sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8 \ + --hash=sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b \ + --hash=sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036 \ + --hash=sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2 \ + --hash=sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348 \ + --hash=sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2 \ + --hash=sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd \ + --hash=sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3 \ + --hash=sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5 \ + --hash=sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814 \ + --hash=sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea \ + --hash=sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd \ + --hash=sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6 \ + --hash=sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45 \ + --hash=sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda \ + --hash=sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2 \ + --hash=sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f \ + --hash=sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769 \ + --hash=sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b \ + --hash=sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826 \ + --hash=sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923 \ + --hash=sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440 \ + --hash=sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71 \ + --hash=sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576 \ + --hash=sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9 +typing_extensions==4.14.0 \ + --hash=sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af \ + --hash=sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4 diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 00000000..08d7fae --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,42 @@ +# Alembic configuration to set up and manage database migrations. +# For details: https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +[alembic] +sqlalchemy.url = postgresql+psycopg://postgres:postgres@postgres-db:5432/template_db +script_location = %(here)s/alembic +timezone = UTC + +# Logging configuration. +# For details: https://docs.python.org/3/library/logging.config.html#logging-config-fileformat +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 00000000..9200a2e --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,63 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool, text + +from alembic import context + +# This is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# We don't use a target model here and instead +# manage all migrations manually; no real need and +# support for Alembic autogenerate. +target_metadata = None + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + raise NotImplementedError("Offline migrations are not supported") + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + # Ensure that Alembic finds its own table in the public schema. + connection.execute(text("SET search_path TO public")) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 00000000..480b130 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py b/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py new file mode 100644 index 00000000..31742f8 --- /dev/null +++ b/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py @@ -0,0 +1,358 @@ +"""Initial database setup + +Revision ID: 7ce2fd1a52c8 +Revises: +Create Date: 2025-05-18 01:33:19.737471+00:00 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "7ce2fd1a52c8" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + + # Configure the JWT secret for the db. This should be set via environment variable. + # See also: https://github.com/PostgREST/postgrest/discussions/3765 + # See also: https://datatracker.ietf.org/doc/html/rfc7519 + op.execute(sa.text("""alter database template_db set "app.jwt_secret" to 'cLrngXnioRTsqo2vBKqiEPCN467PrrRl'""")) + + # Make sure that functions can be executed only by those roles we allow to excute. + # Thus, we first revoke all privileges and then add them incrementally back as needed. + # See also: https://docs.postgrest.org/en/v12/explanations/db_authz.html#functions + op.execute(sa.text("alter default privileges revoke execute on functions from public")) + + # We make use of these extensions: + # + # https://www.postgresql.org/docs/current/pgcrypto.html + # https://github.com/michelp/pgjwt + # https://github.com/nmandery/pg_byteamagic + # + # Install them into the default `public` schema such that they can be accessed from + # both our own `api` and `auth` and `data` schemas. + op.execute( + sa.text( + """ + create extension pgcrypto; + create extension pgjwt; + create extension byteamagic; + """ + ) + ) + + # Public `api` schema where the API related views and functions live. We configure + # this schema for PostgREST as the one for which REST endpoints are constructed. + op.execute(sa.text("create schema api")) + + # Private schema for handling user auth. The user table lives here which is not + # exposed through public REST endpoints. + op.execute(sa.text("create schema auth")) + + # Private schema that stores the actual tables. Data from these tables are exposed + # via views to the public `api` schema. + op.execute(sa.text("create schema data")) + + # This is a special role known to PostgREST from which other users (see below) are + # impersonated depending on auth. + # See also: https://postgrest.org/en/stable/references/auth.html#overview-of-role-system + op.execute(sa.text("create role authenticator login noinherit nocreatedb nocreaterole nosuperuser")) + + # The `anonymous` role PostgREST switches to if no auth header was passed along + # with a request. + # TODO What about granting usage on `auth` schema? See functions below. + op.execute( + sa.text( + """ + create role anonymous nologin noinherit; -- TODO Other settings? + grant anonymous to authenticator; + grant usage on schema auth, api to anonymous; + grant execute on function public.crypt, public.gen_salt(text), public.sign, public.url_encode, public.algorithm_sign, public.hmac(text, text, text) to anonymous; + """ + ) + ) + + # The `apiuser` role PostgREST switches to if JWT auth was successful. + # See also: https://postgrest.org/en/stable/references/auth.html#jwt-based-user-impersonation + op.execute( + sa.text( + """ + create role apiuser nologin noinherit; -- TODO Other settings? + grant apiuser to authenticator; + grant usage on schema auth, api, data to apiuser; + grant execute on function public.crypt, public.gen_salt(text), public.byteamagic_mime to apiuser; + """ + ) + ) + + # The `dramatiq` role is *not* used by PostgREST! Instead, async Dramatiq workers + # that need to access the db to fetch and write back data use this role. These + # workers are trusted because we built them ourselves. + op.execute( + sa.text( + """ + create role dramatiq login password 'dramatiq' noinherit; -- TODO Other settings? + grant usage on schema data to dramatiq; + """ + ) + ) + + # The `user` table in the private `auth` schema contains all users in the system. In the + # future we might add more roles (e.g. admin, or one role per user account). Furthermore, + # we grant only limited access to the table for the different roles, and further restrict + # row-level security: https://www.postgresql.org/docs/current/ddl-rowsecurity.html + # Lastly we add an insert/update trigger function to hash the password and store that hash. + # See also: https://postgrest.org/en/stable/how-tos/sql-user-management.html#storing-users-and-passwords + # TODO See also: https://github.com/PostgREST/postgrest/discussions/3696#discussioncomment-10408092 + op.execute( + sa.text( + """ + create table auth.user ( + id bigserial primary key, + created_at timestamp with time zone default now(), + email text unique not null, -- TODO CHECK constraint for email format. + password text not null, -- This is the hash, not the clear text password. + role text not null default 'apiuser' check (role = 'apiuser'), -- TODO FK to known roles? + first_name text, + last_name text + ) + """ + ) + ) + + op.execute( + sa.text( + """ + grant select, insert on auth.user to anonymous; -- TODO Limit select columns! + grant usage on auth.user_id_seq to anonymous; + grant select, update(first_name, last_name) on auth.user to apiuser; -- TODO Limit select columns! + grant select on auth.user to dramatiq; -- TODO Limit select columns! + """ + ) + ) + + op.execute( + sa.text( + """ + alter table auth.user enable row level security; + create policy user_email_policy on auth.user to anonymous, apiuser + using ( + current_role = 'anonymous' + or email = current_setting('request.jwt.claims', true)::json->>'email' + ); + """ + ) + ) + + op.execute( + sa.text( + """ + create function auth.encrypt_password() returns trigger language plpgsql as $$ + begin + if new.password is not null then + new.password = crypt(new.password, gen_salt('bf')); -- TODO min 8 chars, other rules? + end if; + return new; + end; + $$ + """ + ) + ) + + op.execute( + sa.text( + """ + create trigger encrypt_password + before insert or update on auth.user + for each row + execute procedure auth.encrypt_password(); + """ + ) + ) + + # The `dramatiq_queue` table implements a message queue of messages for the async framework + # Dramatiq. Messages are inserted and trigger a Postgres notification that the Dramatiq broker + # listens to; in addition the broker polls this table to ensure messages aren't dropped. Note + # that responses may be inserted as new rows if the original message was removed from the table; + # in that case it woulnd't be possible to find the owning user for that response anymore because + # users are associated with the now-deleted message. + # See also: https://gitlab.com/dalibo/dramatiq-pg + # See also: https://www.postgresql.org/docs/current/sql-notify.html + op.execute( + sa.text( + """ + create table data.dramatiq_queue( + message_id uuid primary key, + user_id bigint references auth.user(id), + queue_name text not null, + state text not null check (state in ('queued', 'consumed', 'rejected', 'done')), + mtime timestamp with time zone, + message jsonb, + result jsonb, + result_ttl timestamp with time zone + ) -- TODO Consider `without oids` as Dramatiq-PG uses. + """ + ) + ) + + op.execute( + sa.text( + """ + grant select, insert on data.dramatiq_queue to apiuser; + grant all privileges on table data.dramatiq_queue to dramatiq; + """ + ) + ) + + op.execute( + sa.text( + """ + alter table data.dramatiq_queue enable row level security; + create policy user_message_policy on data.dramatiq_queue to apiuser, dramatiq + using ( + current_role = 'dramatiq' + or user_id = ( + select id + from auth.user + where email = current_setting('request.jwt.claims', true)::json->>'email' + ) + ); + """ + ) + ) + + # The public `profile` view presents some columns from the `auth.user` table. An auth'ed + # user can view and update only some of the columns of the underlying table. + op.execute( + sa.text( + """ + create view api.profile with (security_invoker = true) as + select email, first_name, last_name, created_at from auth.user + """ + ) + ) + + op.execute(sa.text("grant select, update(first_name, last_name) on api.profile to apiuser")) + + # + op.execute( + sa.text( + """ + create view api.job with (security_invoker = true) as + select message_id as job_id, state, result from data.dramatiq_queue + """ + ) + ) + + op.execute(sa.text("grant select on api.job to apiuser")) + + # Public API function to sign up a new user: insert a row into the private `auth.user` table. + # TODO How do we return a 201 here? Should this function return any payload at all? + op.execute( + sa.text( + """ + create function api.signup(email text, password text) returns record language plpgsql as $$ + declare + ret record; + begin + insert into auth.user as u (email, password) + values (signup.email, signup.password) + returning u.created_at, u.email into ret; + return ret; + end; + $$ + """ + ) + ) + + op.execute(sa.text("grant execute on function api.signup to anonymous")) + + # Public API function to log in an existing user: return the JWT for the user which allows PostgREST + # to impersonate the `apiuser` and therewith get access to the resources. + op.execute( + sa.text( + """ + create function api.login(email text, password text) returns record language plpgsql as $$ + declare + role_ text; + token record; + begin + select role + from auth.user + where auth.user.email = login.email + and auth.user.password = crypt(login.password, auth.user.password) + into role_; + if not found then + raise invalid_password using message = 'invalid user or password'; + end if; + select sign(row_to_json(r), current_setting('app.jwt_secret')) as token + from ( + select role_ as role, login.email as email, extract(epoch from now())::integer + 60*60 as exp + ) r + into token; + return token; + end; + $$ + """ + ) + ) + + op.execute(sa.text("grant execute on function api.login to anonymous")) + + # Public API function to create & send a message to the async Dramatiq workers. + op.execute( + sa.text( + """ + create function api.job() returns record language sql as $$ + with "user" as ( + select id from auth.user where email = current_setting('request.jwt.claims', true)::json->>'email' + ), + message as ( + select + 'job_q' as queue_name, -- Dramatiq message queue name. + 'job' as actor_name, -- Dramatiq actor function. + jsonb_build_array() as args, -- Positional args for function. + jsonb_build_object() as kwargs, -- Keyword args for function. + jsonb_build_object() as options, -- Additional Dramatiq broker options. + gen_random_uuid() as message_id, + extract(epoch from now())::bigint as message_timestamp + ), + enque as ( + insert into data.dramatiq_queue (user_id, message_id, queue_name, state, mtime, message) + select + u.id, + m.message_id, + m.queue_name, + 'queued', + to_timestamp(m.message_timestamp), + (select to_json(message) from message) + from message m, "user" u + returning queue_name, message_id + ), + notify as ( + select + message_id, + pg_notify('dramatiq.' || queue_name || '.enqueue', jsonb_build_object('message_id', message_id)::text) + from enque + ) + select message_id as job_id from notify + $$ + """ + ) + ) + + op.execute(sa.text("grant execute on function api.job to apiuser")) + + +def downgrade() -> None: + """Downgrade schema.""" + raise NotImplementedError("No down migrations beyond this version") diff --git a/backend/develop-requirements.txt b/backend/develop-requirements.txt new file mode 100644 index 00000000..0863376 --- /dev/null +++ b/backend/develop-requirements.txt @@ -0,0 +1,36 @@ +# https://pip.pypa.io/en/stable/reference/requirements-file-format/ + +# Generate Software Bill of Materials (SBOM). +cyclonedx-bom >=4.0.0,<5.0.0 + +# Package build too. +flit >=3.2.0,<4.0.0 + +# Check the Python code. +mypy >=1.0.0,<1.15 +perflint >=0.8.0,<1.0.0 +pip-audit >=2.4.4,<3.0.0 +pylint >=3.0.0,<3.4.0 + +# Testing. Note that the `custom_exit_code` and `env` plugins may currently be unmaintained. +coverage ==7.6.12; python_version<"3.14" # https://github.com/pypi/warehouse/pull/17872#issuecomment-2845932281 +faker ==35.0.0 +hypothesis >=6.21.0,<6.122.8 +pytest >=7.2.0,<9.0.0 +pytest-cases ==3.8.6 +pytest-custom_exit_code ==0.3.0 +pytest-cov ==6.1.1 # Uses: coverage[toml] >=7.5 +pytest-doctestplus ==1.3.0 +pytest-env ==1.1.5 +pytest-docker ==3.2.1 +pytest-order ==1.3.0 +requests ==2.32.* +types-requests ==2.32.* + +# Sphinx is used to generate documentation from Python docstrings and reStructured text. +sphinx >=5.1.1,<9.0.0 + +# Alembic for managing the db migrations. This should match the version from the alembic-requirements.txt +# file, though that can't be used because it forces hash checking which, in turn, collides with the +# editable installation of the backend package for development. Ah so. +alembic ==1.15.2 diff --git a/backend/develop.toml b/backend/develop.toml new file mode 100644 index 00000000..81b2343 --- /dev/null +++ b/backend/develop.toml @@ -0,0 +1,180 @@ +# https://bandit.readthedocs.io/en/latest/config.html +# Skip test B101 because of issue https://github.com/PyCQA/bandit/issues/457 +[tool.bandit] +tests = [] +skips = ["B101"] + + +# https://github.com/psf/black#configuration +[tool.black] +line-length = 120 + + +# https://github.com/pytest-dev/pytest-cov +# https://github.com/nedbat/coveragepy +[tool.coverage.report] +fail_under = 100 +show_missing = true + +[tool.coverage.run] +branch = true +omit = [ +] + + +# https://flit.pypa.io/en/latest/pyproject_toml.html#sdist-section +# See also: https://github.com/pypa/flit/issues/565 +[tool.flit.sdist] +include = [] +exclude = [ + "alembic/", + "docker/", + "docs/", + "tests/", + ".flake8", + "alembic.ini", + "alembic-requirements.txt", + "develop-requirements.txt", + "Makefile", +] + + +# https://pycqa.github.io/isort/ +[tool.isort] +profile = "black" +multi_line_output = 3 +line_length = 120 +skip_gitignore = true + + +# https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml +[tool.mypy] +# mypy_path = +# exclude = +show_error_codes = true +show_column_numbers = true +pretty = true +show_traceback = true +check_untyped_defs = true +incremental = false +strict = true +warn_return_any = true +warn_redundant_casts = true +warn_unreachable = true +warn_unused_configs = true +warn_unused_ignores = true +disallow_any_explicit = true +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +# disable_error_code = +# allow_redefinition = + +[[tool.mypy.overrides]] +module = [ + "dramatiq_pg", + "pytest", +] +ignore_missing_imports = true + + +# https://pylint.pycqa.org/en/latest/user_guide/configuration/index.html +[tool.pylint.main] +fail-under = 10.0 +suggestion-mode = true +load-plugins = [ + # "perflint", # A Linter for performance anti-patterns. + "pylint.extensions.bad_builtin", + "pylint.extensions.broad_try_clause", + "pylint.extensions.check_elif", + "pylint.extensions.code_style", + "pylint.extensions.comparison_placement", + "pylint.extensions.confusing_elif", + "pylint.extensions.consider_refactoring_into_while_condition", + "pylint.extensions.consider_ternary_expression", + "pylint.extensions.dict_init_mutate", + # "pylint.extensions.docparams", + # "pylint.extensions.docstyle", + "pylint.extensions.dunder", + "pylint.extensions.empty_comment", + "pylint.extensions.for_any_all", + "pylint.extensions.magic_value", + # "pylint.extensions.mccabe", + "pylint.extensions.no_self_use", + "pylint.extensions.overlapping_exceptions", + "pylint.extensions.private_import", + "pylint.extensions.redefined_loop_name", + "pylint.extensions.redefined_variable_type", + "pylint.extensions.set_membership", + "pylint.extensions.typing", + "pylint.extensions.while_used", +] +disable = [ + "duplicate-code", + "fixme", + "line-too-long", # Replaced by Flake8 Bugbear B950 check. + "magic-value-comparison", + "too-few-public-methods", + "too-many-ancestors", + "too-many-arguments", + "too-many-boolean-expressions", + "too-many-branches", + "too-many-instance-attributes", + "too-many-lines", + "too-many-locals", + "too-many-nested-blocks", + "too-many-positional-arguments", + "too-many-public-methods", + "too-many-return-statements", + "too-many-statements", + "too-many-try-statements", +] + +[tool.pylint.MISCELLANEOUS] +notes = [ + "FIXME", + "TODO", + "BUGBUG", +] + +[tool.pylint.FORMAT] +max-line-length = 120 + + +# https://docs.pytest.org/en/latest/reference/customize.html#configuration-file-formats +# https://docs.pytest.org/en/latest/reference/reference.html#configuration-options +# https://docs.pytest.org/en/latest/reference/reference.html#command-line-flags +# +# To integrate Hypothesis into pytest and coverage, we use its native plugin: +# https://hypothesis.readthedocs.io/en/latest/details.html#the-hypothesis-pytest-plugin +# +# To discover tests in documentation, we use doctest and the doctest-plus plugin which +# adds multiple useful options to control tests in documentation. More details at: +# https://docs.python.org/3/library/doctest.html +# https://github.com/scientific-python/pytest-doctestplus +# +# To avoid failing pytest when no tests were dicovered, we need an extra plugin: +# https://docs.pytest.org/en/latest/reference/exit-codes.html +# https://github.com/yashtodi94/pytest-custom_exit_code +[tool.pytest.ini_options] +minversion = "7.0" +addopts = """-vv -ra --tb native --durations 0 \ + --hypothesis-show-statistics --hypothesis-explain --hypothesis-verbosity verbose \ + --doctest-modules --doctest-continue-on-failure --doctest-glob '*.rst' --doctest-plus \ + --suppress-no-test-exit-code \ + --cov template_jobs \ +""" # Consider adding --pdb +# https://docs.python.org/3/library/doctest.html#option-flags +doctest_optionflags = "IGNORE_EXCEPTION_DETAIL" +env = [ + "PYTHONDEVMODE=1", # https://docs.python.org/3/library/devmode.html + "DRAMATIQ_SQLA_URL=postgresql://dramatiq:dramatiq@postgres-db:5432/template_db", +] +filterwarnings = [ + "error", + "always::DeprecationWarning", + # https://docs.pytest.org/en/latest/how-to/failures.html#warning-about-unraisable-exceptions-and-unhandled-thread-exceptions + "error::pytest.PytestUnraisableExceptionWarning", + "error::pytest.PytestUnhandledThreadExceptionWarning", +] diff --git a/backend/docker/Dockerfile.alembic b/backend/docker/Dockerfile.alembic new file mode 100644 index 00000000..453c20a --- /dev/null +++ b/backend/docker/Dockerfile.alembic @@ -0,0 +1,13 @@ +FROM python:3.13-alpine3.21@sha256:452682e4648deafe431ad2f2391d726d7c52f0ff291be8bd4074b10379bb89ff + +# Copy over the migrations and configuration. +RUN mkdir -p /alembic/alembic +COPY alembic/ /alembic/alembic/ +COPY alembic.ini /alembic/ +COPY alembic-requirements.txt /alembic/ + +# Install Alembic and its dependencies. +RUN python -m pip install --require-hashes --requirement /alembic/alembic-requirements.txt + +# Entrypoint to the container is running Alembic and quitting. +ENTRYPOINT ["python", "-m", "alembic", "-c", "/alembic/alembic.ini", "upgrade", "head"] diff --git a/backend/docker/Dockerfile.dramatiq b/backend/docker/Dockerfile.dramatiq new file mode 100644 index 00000000..23744a5 --- /dev/null +++ b/backend/docker/Dockerfile.dramatiq @@ -0,0 +1,12 @@ +FROM python:3.13-alpine3.21@sha256:452682e4648deafe431ad2f2391d726d7c52f0ff291be8bd4074b10379bb89ff + +# Copy over the distribution files in Simple Repository (PEP 503) format. +RUN mkdir -p /tmp/dist/simple-index/ +COPY dist/simple-index/ /tmp/dist/simple-index/ +COPY dist/template_jobs-*-requirements.txt /tmp/dist/requirements.txt + +# Install the server package and its dependencies. +RUN python -m pip install --extra-index-url file:///tmp/dist/simple-index/ --require-hashes --requirement /tmp/dist/requirements.txt + +# Entrypoint to the container starts up Dramatiq consumers. +ENTRYPOINT dramatiq --processes ${DRAMATIQ_PROCESSES} --threads ${DRAMATIQ_THREADS} --verbose template_jobs.broker diff --git a/backend/docker/Dockerfile.pg b/backend/docker/Dockerfile.pg new file mode 100644 index 00000000..3dfd7a3 --- /dev/null +++ b/backend/docker/Dockerfile.pg @@ -0,0 +1,24 @@ +FROM alpine:3.21@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c AS build + +# https://pkgs.alpinelinux.org/packages +RUN apk update && \ + apk add git curl gcc openssl make && \ + apk add postgresql17-dev file-dev libpq-dev + +# https://github.com/michelp/pgjwt +RUN cd /tmp && \ + git clone --branch master --depth 1 https://github.com/michelp/pgjwt.git && \ + cd pgjwt && make install + +# https://github.com/nmandery/pg_byteamagic +RUN cd /tmp && \ + git clone --branch master --depth 1 https://github.com/nmandery/pg_byteamagic.git && \ + cd pg_byteamagic && make install + +FROM postgres:17-alpine3.21@sha256:f325a29ec9deb7039c5f07761d77d79d537dac836ecd99f982f6ca5476724604 + +RUN apk update && \ + apk add libmagic + +COPY --from=build /usr/share/postgresql17/extension/byteamagic* /usr/share/postgresql17/extension/pgflake* /usr/share/postgresql17/extension/pgjwt* /usr/local/share/postgresql/extension/ +COPY --from=build /usr/lib/postgresql17/byteamagic* /usr/lib/postgresql17/pgflake* /usr/lib/postgresql17/bitcode /usr/local/lib/postgresql/ diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 00000000..1b482cd --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,45 @@ +# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html +# https://flit.pypa.io/en/latest/pyproject_toml.html +[build-system] +requires = ["flit_core >=3.2.0,<4.0.0"] +build-backend = "flit_core.buildapi" + +[project] +name = "template_jobs" +requires-python = ">=3.13.0,<3.14.0" +authors = [{name = "Jens Troeger", email = "jens.troeger@light-speed.de"}] +maintainers = [{name = "Jens Troeger", email = "jens.troeger@light-speed.de"}] +dynamic = ["version", "description"] +license = {file = "LICENSE.md"} +readme = "README.md" +dependencies = [ + "dramatiq ==1.18.0", + "dramatiq-pg ==0.12.0", + "sqlalchemy[postgresql-psycopg2binary] ==2.0.41", + "greenlet ==3.2.3", # Remove with SQLA 2.1, see also https://github.com/sqlalchemy/sqlalchemy/issues/7714 +] +keywords = [] +# https://pypi.org/classifiers/ +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: Other/Proprietary License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: Implementation :: CPython", +] + +[project.scripts] + +[project.entry-points] + +[project.optional-dependencies] + +[project.urls] +#Homepage = +#Changelog = +#Documentation = +#Issues = diff --git a/backend/src/template_jobs/__init__.py b/backend/src/template_jobs/__init__.py new file mode 100644 index 00000000..99bb5d8 --- /dev/null +++ b/backend/src/template_jobs/__init__.py @@ -0,0 +1,9 @@ +"""The Template Server.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# The version of this package. There's no comprehensive, official list of other +# magic constants, so we stick with this one only for now. See also this conversation: +# https://stackoverflow.com/questions/38344848/is-there-a-comprehensive-table-of-pythons-magic-constants +__version__ = "0.0.0" diff --git a/backend/src/template_jobs/actors.py b/backend/src/template_jobs/actors.py new file mode 100644 index 00000000..31c0618 --- /dev/null +++ b/backend/src/template_jobs/actors.py @@ -0,0 +1,16 @@ +"""Asynchronous `Dramatiq `_ workers.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +import dramatiq + +# When this module run it initializes the Dramatiq actors below. +# It therefore needs a broker to register the actors with. +assert dramatiq.broker.global_broker is not None + + +@dramatiq.actor(queue_name="job_q", store_results=True) +def job() -> str: + """Do a job.""" + return "done" diff --git a/backend/src/template_jobs/broker.py b/backend/src/template_jobs/broker.py new file mode 100644 index 00000000..1023c65 --- /dev/null +++ b/backend/src/template_jobs/broker.py @@ -0,0 +1,18 @@ +"""Asynchronous `Dramatiq `_ workers.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +import os + +import dramatiq +import dramatiq_pg + +# Create the Postgres Broker instance that manages reading from and writing +# to the message queue (which is implemented by PG). +dramatiq.set_broker( + dramatiq_pg.PostgresBroker(url=os.environ["DRAMATIQ_SQLA_URL"], results=True, schema="data", prefix="dramatiq_") +) + +# Importing the actor module registers the Dramatiq actors with the broker. +from . import actors # noqa: F401,E402 # pylint: disable=unused-import,wrong-import-position diff --git a/backend/src/template_jobs/py.typed b/backend/src/template_jobs/py.typed new file mode 100644 index 00000000..89afa56 --- /dev/null +++ b/backend/src/template_jobs/py.typed @@ -0,0 +1 @@ +# PEP-561 marker. https://mypy.readthedocs.io/en/latest/installed_packages.html diff --git a/backend/tests/actors/test_job.py b/backend/tests/actors/test_job.py new file mode 100644 index 00000000..46af69a --- /dev/null +++ b/backend/tests/actors/test_job.py @@ -0,0 +1,29 @@ +"""Collection of tests for the ``job`` actor.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# flake8: noqa: D103 +# pylint: disable=missing-function-docstring, import-outside-toplevel + +import dramatiq +import dramatiq.brokers.stub +import pytest + +# Glogal ordering of test modules. +pytestmark = pytest.mark.order(-1) + + +@pytest.mark.skip("Requires: https://gitlab.com/dalibo/dramatiq-pg/-/issues/42") +def test_job(broker: dramatiq.Broker) -> None: + + # The `broker` fixture created the Broker, for which the job + # have been registered already. + from template_jobs.actors import job + + # Send a message to the async job, and wait for the job to finish. + message = job.send() + broker.join(job.queue_name) # type: ignore[no-untyped-call] + + # Ensure the result. + assert message.get_result() == "done" diff --git a/backend/tests/api/test_login.py b/backend/tests/api/test_login.py new file mode 100644 index 00000000..1821c0b --- /dev/null +++ b/backend/tests/api/test_login.py @@ -0,0 +1,66 @@ +"""Collection of tests for the ``/rpc/login`` endpoint.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# flake8: noqa: D103 +# pylint: disable=missing-function-docstring + +import pytest +import requests +from faker import Faker + +# Glogal ordering of test modules. +pytestmark = pytest.mark.order(2) + + +# Resource under test. +_URL = "http://localhost:3000/rpc/login" + + +@pytest.fixture(name="signup") +def _signup(faker: Faker) -> tuple[str, str]: + email = faker.email() + password = faker.password() + + response = requests.post( + "http://localhost:3000/rpc/signup", data={"email": email, "password": password}, timeout=0.5 + ) + assert response.status_code == 200 + + return email, password + + +def test_invalid_no_payload() -> None: + response = requests.post(_URL, data={}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + +def test_invalid_incomplete_payload(faker: Faker) -> None: + response = requests.post(_URL, data={"email": faker.email()}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + response = requests.post(_URL, data={"password": faker.password()}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + +def test_email_not_exist() -> None: + pass + + +def test_wrong_password() -> None: + pass + + +def test_valid(signup: tuple[str, str]) -> None: + email, password = signup + response = requests.post(_URL, data={"email": email, "password": password}, timeout=0.5) + assert response.status_code == 200 + assert "token" in response.json() + + +# TODO expired token +# TODO invalid token diff --git a/backend/tests/api/test_message_queue.py b/backend/tests/api/test_message_queue.py new file mode 100644 index 00000000..148d3cd --- /dev/null +++ b/backend/tests/api/test_message_queue.py @@ -0,0 +1,80 @@ +"""Collection of tests to exercise the message queue..""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# flake8: noqa: D103 +# pylint: disable=missing-function-docstring + +import time + +import pytest +import requests +from faker import Faker + +# Glogal ordering of test modules. +pytestmark = pytest.mark.order(4) + + +@pytest.fixture(name="bearer") +def _signup_login(faker: Faker) -> str: + email = faker.email() + password = faker.password() + + response = requests.post( + "http://localhost:3000/rpc/signup", data={"email": email, "password": password}, timeout=0.5 + ) + assert response.status_code == 200 + response = requests.post( + "http://localhost:3000/rpc/login", data={"email": email, "password": password}, timeout=0.5 + ) + assert response.status_code == 200 + + json = response.json() + return f"Bearer {json['token']}" + + +def test_job(bearer: str) -> None: + + # Post to the `job` endpoint which pushes a message into the queue + # and thus triggers the async worker. The immediate response from + # the web server is the job's id. + response = requests.post( + "http://localhost:3000/rpc/job", + headers={"Authorization": bearer, "Prefer": "return=representation"}, + timeout=0.5, + ) + assert response.status_code == 200 # TODO Return 201? + + # Get the job id from the response. + message_id = response.json()["job_id"] + + # Get all current jobs from the server, there should be exactly + # one for this user. + response = requests.get("http://localhost:3000/job", headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 200 + assert len(response.json()) == 1 + + # Now poll the job with the above id until a result becomes available. + for _ in range(5): + response = requests.get( + f"http://localhost:3000/job?job_id=eq.{message_id}", + headers={"Authorization": bearer, "Accept": "application/vnd.pgrst.object+json"}, + timeout=0.5, + ) + assert response.status_code == 200 + + # Once the job's `state` is done, its result is available. + payload = response.json() + if payload["state"] == "done": + assert payload["result"] == "done" + break + + # The job's not yet done, so wait and poll again. + time.sleep(0.5) + + else: + pytest.fail("Job did not produce a resul before timeout!") + + +# TODO multiple users pushing jobs, can see only their own diff --git a/backend/tests/api/test_profile.py b/backend/tests/api/test_profile.py new file mode 100644 index 00000000..e34e26e --- /dev/null +++ b/backend/tests/api/test_profile.py @@ -0,0 +1,133 @@ +"""Collection of tests for the ``/profile`` endpoint.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# flake8: noqa: D103 +# pylint: disable=missing-function-docstring + +import datetime + +import pytest +import requests +from faker import Faker + +# Glogal ordering of test modules. +pytestmark = pytest.mark.order(3) + + +# Resource under test. +_URL = "http://localhost:3000/profile" + + +@pytest.fixture(name="bearer") +def _signup_login(faker: Faker) -> str: + email = faker.email() + password = faker.password() + + response = requests.post( + "http://localhost:3000/rpc/signup", data={"email": email, "password": password}, timeout=0.5 + ) + assert response.status_code == 200 + response = requests.post( + "http://localhost:3000/rpc/login", data={"email": email, "password": password}, timeout=0.5 + ) + assert response.status_code == 200 + + json = response.json() + return f"Bearer {json['token']}" + + +def test_get_no_bearer() -> None: + response = requests.get(_URL, timeout=0.5) + assert response.status_code == 401 + assert response.json()["code"] == "42501" # insufficient privileges + + +def test_invalid_verb(faker: Faker, bearer: str) -> None: + response = requests.post(_URL, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 400 + assert response.json()["code"] == "PGRST102" # Empty or invalid json + + response = requests.post( + _URL, data={"first_name": faker.first_name()}, headers={"Authorization": bearer}, timeout=0.5 + ) + assert response.status_code == 403 + assert response.json()["code"] == "42501" # permission denied for view profile + + response = requests.put(_URL, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 400 + assert response.json()["code"] == "PGRST102" # Empty or invalid json + + response = requests.put( + _URL, data={"first_name": faker.first_name()}, headers={"Authorization": bearer}, timeout=0.5 + ) + assert response.status_code == 405 + assert response.json()["code"] == "PGRST105" # Filters must include all and only primary key columns ... + + response = requests.delete(_URL, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 403 + assert response.json()["code"] == "42501" # permission denied for view profile + + +def test_get(bearer: str) -> None: + response = requests.get(_URL, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 200 + + (profile,) = response.json() + assert "email" in profile + assert "first_name" in profile + assert "last_name" in profile + + response = requests.get( + _URL, headers={"Authorization": bearer, "Accept": "application/vnd.pgrst.object+json"}, timeout=0.5 + ) + assert response.status_code == 200 + + profile = response.json() + assert "email" in profile + assert "first_name" in profile + assert "last_name" in profile + + +def test_patch_no_payload(bearer: str) -> None: + response = requests.patch(_URL, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 400 + assert response.json()["code"] == "PGRST102" + + +def test_patch_invalid_payload(faker: Faker, bearer: str) -> None: + response = requests.patch(_URL, data={"some": "data"}, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 400 + assert response.json()["code"] == "PGRST204" + + response = requests.patch( + _URL, data={"first_name": faker.random_int()}, headers={"Authorization": bearer}, timeout=0.5 + ) + assert response.status_code == 204 + assert response.content == b"" + + response = requests.patch(_URL, data={"email": faker.email()}, headers={"Authorization": bearer}, timeout=0.5) + assert response.status_code == 403 + assert response.json()["code"] == "42501" # permission denied for view profile + + response = requests.patch( + _URL, data={"created_at": faker.date_time(tzinfo=datetime.UTC)}, headers={"Authorization": bearer}, timeout=0.5 + ) + assert response.status_code == 403 + assert response.json()["code"] == "42501" # permission denied for view profile + + +def test_patch(faker: Faker, bearer: str) -> None: + response = requests.patch( + _URL, data={"first_name": None, "last_name": None}, headers={"Authorization": bearer}, timeout=0.5 + ) + assert response.status_code == 204 + + response = requests.patch( + _URL, + data={"first_name": faker.first_name(), "last_name": faker.last_name()}, + headers={"Authorization": bearer}, + timeout=0.5, + ) + assert response.status_code == 204 diff --git a/backend/tests/api/test_signup.py b/backend/tests/api/test_signup.py new file mode 100644 index 00000000..00874d3 --- /dev/null +++ b/backend/tests/api/test_signup.py @@ -0,0 +1,52 @@ +"""Collection of tests for the ``/rpc/signup`` endpoint.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +# flake8: noqa: D103 +# pylint: disable=missing-function-docstring + +import pytest +import requests +from faker import Faker + +# Glogal ordering of test modules. +pytestmark = pytest.mark.order(1) + +# Resource under test. +_URL = "http://localhost:3000/rpc/signup" + + +def test_invalid_no_payload() -> None: + response = requests.post(_URL, data={}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + +def test_invalid_incomplete_payload(faker: Faker) -> None: + response = requests.post(_URL, data={"email": faker.email()}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + response = requests.post(_URL, data={"password": faker.password()}, timeout=0.5) + assert response.status_code == 404 + assert response.json()["code"] == "PGRST202" + + +def test_invalid_email() -> None: + pass + + +def test_invalid_password() -> None: + pass + + +def test_duplicate_email() -> None: + pass + + +def test_valid(faker: Faker) -> None: + response = requests.post(_URL, data={"email": faker.email(), "password": faker.password()}, timeout=0.5) + assert response.status_code == 200 + assert "created_at" in response.json() + assert "email" in response.json() diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 00000000..96df86f --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,24 @@ +"""Base configurations for all tests.""" + +# Copyright (c) 2025-2025 +# This code is licensed under MIT license, see LICENSE.md for details. + +import random + +import pytest + +# Import the broker module which imports the actor module to make sure +# that all code is loaded to coverage tracking. +import template_jobs.broker # noqa: F401 # pylint: disable=unused-import + + +@pytest.fixture(scope="session", autouse=True) +def faker_session_locale() -> list[str]: + """Override the Faker fixture’s default locale.""" + return ["en-US"] + + +@pytest.fixture(autouse=True) +def faker_seed() -> float: + """Override the Faker fixture’s default RNG seed.""" + return random.random() # nosec B311 diff --git a/frontend/Makefile b/frontend/Makefile new file mode 100644 index 00000000..8623d96 --- /dev/null +++ b/frontend/Makefile @@ -0,0 +1,42 @@ + +# Use bash as the shell when executing a rule's recipe. For more details: +# https://www.gnu.org/software/make/manual/html_node/Choosing-the-Shell.html +SHELL := bash + + +.PHONY: all +all: + + +.PHONY: init +init: + + +.PHONY: setup +setup: + + +.PHONY: check +check: + + +.PHONY: test +test: + + +.PHONY: build build-docker +build: +build-docker: + + +.PHONY: docs +docs: + + +.PHONY: clean +clean: + + +.PHONY: nuke nuke-caches +nuke-caches: +nuke: diff --git a/infra/README.md b/infra/README.md new file mode 100644 index 00000000..2b4e5f6 --- /dev/null +++ b/infra/README.md @@ -0,0 +1 @@ +# Infrastructure diff --git a/infra/_base-services.yaml b/infra/_base-services.yaml new file mode 100644 index 00000000..bc8c937 --- /dev/null +++ b/infra/_base-services.yaml @@ -0,0 +1,44 @@ +services: + + # https://hub.docker.com/_/postgres/ + postgres-db: + image: fullstack-template-backend-pg:0.0.0 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: template_db + healthcheck: + test: pg_isready --username postgres --dbname template_db + interval: 1s + timeout: 5s + retries: 10 + + # https://hub.docker.com/r/postgrest/postgrest + postgrest: + image: postgrest/postgrest:v13.0.4@sha256:a312f4b2e48530a01fc26f5310d547d6c26d087858360e164522e415723a7732 + ports: + - 3000:3000 + environment: + # https://docs.postgrest.org/en/stable/references/configuration.html + PGRST_DB_URI: postgres://postgres:postgres@postgres-db:5432/template_db + PGRST_OPENAPI_SERVER_PROXY_URI: http://localhost:3001 + PGRST_DB_ANON_ROLE: anonymous + PGRST_DB_SCHEMAS: api + PGRST_JWT_SECRET: cLrngXnioRTsqo2vBKqiEPCN467PrrRl + # PGRST_OPENAPI_SERVER_PROXY_URI: + PGRST_OPENAPI_MODE: ignore-privileges + PGRST_OPENAPI_SECURITY_ACTIVE: true + # PGRST_ADMIN_SERVER_HOST: postgres-db + # PGRST_ADMIN_SERVER_PORT: 3033 + healthcheck: + # https://docs.postgrest.org/en/stable/references/admin_server.html#health-check + # https://github.com/PostgREST/postgrest/discussions/3854 + disable: true + + # https://hub.docker.com/r/swaggerapi/swagger-ui/ + swagger-ui: + image: swaggerapi/swagger-ui:v5.20.2@sha256:666e61e5089643b7d634399ec6e82563e7f09a5367c899981bf63e44631ea364 + ports: + - 3001:8080 + environment: + - API_URL=http://localhost:3000/ diff --git a/infra/docker-compose-develop.yaml b/infra/docker-compose-develop.yaml new file mode 100644 index 00000000..0f52aa4 --- /dev/null +++ b/infra/docker-compose-develop.yaml @@ -0,0 +1,41 @@ +services: + + # https://hub.docker.com/_/postgres/ + postgres-db: + extends: + file: _base-services.yaml + service: postgres-db + ports: + - 5432:5432 + command: [postgres, -c, log_statement=all] + + # https://alembic.sqlalchemy.org/ + alembic: + image: fullstack-template-backend-alembic:0.0.0 + volumes: + - ../backend/alembic/versions:/alembic/alembic/versions:ro + depends_on: + postgres-db: + condition: service_healthy + + # https://hub.docker.com/r/postgrest/postgrest + postgrest: + extends: + file: _base-services.yaml + service: postgrest + depends_on: + postgres-db: + condition: service_healthy + alembic: + condition: service_completed_successfully + + # https://hub.docker.com/r/swaggerapi/swagger-ui/ + swagger-ui: + extends: + file: _base-services.yaml + service: swagger-ui + depends_on: # Should depend on postgrest. + postgres-db: + condition: service_healthy + alembic: + condition: service_completed_successfully diff --git a/infra/docker-compose.yaml b/infra/docker-compose.yaml new file mode 100644 index 00000000..2d516cf --- /dev/null +++ b/infra/docker-compose.yaml @@ -0,0 +1,50 @@ +services: + + # https://hub.docker.com/_/postgres/ + postgres-db: + extends: + file: _base-services.yaml + service: postgres-db + + # https://alembic.sqlalchemy.org/ + alembic: + image: fullstack-template-backend-alembic:0.0.0 + depends_on: + postgres-db: + condition: service_healthy + + # https://hub.docker.com/r/postgrest/postgrest + postgrest: + extends: + file: _base-services.yaml + service: postgrest + depends_on: + postgres-db: + condition: service_healthy + alembic: + condition: service_completed_successfully + + # https://hub.docker.com/r/swaggerapi/swagger-ui/ + swagger-ui: + extends: + file: _base-services.yaml + service: swagger-ui + depends_on: # Should depend on postgrest. + postgres-db: + condition: service_healthy + alembic: + condition: service_completed_successfully + + # https://dramatiq.io/ + # https://gitlab.com/dalibo/dramatiq-pg + dramatiq: + image: fullstack-template-backend-dramatiq:0.0.0 + environment: + DRAMATIQ_SQLA_URL: postgresql://dramatiq:dramatiq@postgres-db:5432/template_db + DRAMATIQ_PROCESSES: 2 + DRAMATIQ_THREADS: 2 + depends_on: # Should depend on postgrest. + postgres-db: + condition: service_healthy + alembic: + condition: service_completed_successfully From 68bad72ad3968fc801c483f3a83cd413bad3c353 Mon Sep 17 00:00:00 2001 From: Jens Troeger Date: Tue, 29 Jul 2025 08:38:16 +1000 Subject: [PATCH 2/4] chore: integrate feedback Link: https://github.com/PostgREST/postgrest/discussions/4198#discussioncomment-13806218 --- .../versions/7ce2fd1a52c8_initial_database_setup.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py b/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py index 31742f8..6cfe898 100644 --- a/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py +++ b/backend/alembic/versions/7ce2fd1a52c8_initial_database_setup.py @@ -29,7 +29,11 @@ def upgrade() -> None: # Make sure that functions can be executed only by those roles we allow to excute. # Thus, we first revoke all privileges and then add them incrementally back as needed. - # See also: https://docs.postgrest.org/en/v12/explanations/db_authz.html#functions + # See also: https://docs.postgrest.org/en/stable/explanations/db_authz.html#functions + # A word of warning from @steve-chavez of PostgREST: if you have another role that + # creates functions (like `functions_admin`) then the above won't apply for it. The + # `alter default privileges` above really is `alter default privileges for role postgres` + # -- will only affect objects created by the `postgres` role. op.execute(sa.text("alter default privileges revoke execute on functions from public")) # We make use of these extensions: @@ -147,7 +151,7 @@ def upgrade() -> None: create policy user_email_policy on auth.user to anonymous, apiuser using ( current_role = 'anonymous' - or email = current_setting('request.jwt.claims', true)::json->>'email' + or email = (select current_setting('request.jwt.claims', true)::json->>'email') ); """ ) @@ -223,7 +227,7 @@ def upgrade() -> None: or user_id = ( select id from auth.user - where email = current_setting('request.jwt.claims', true)::json->>'email' + where email = (select current_setting('request.jwt.claims', true)::json->>'email') ) ); """ From 8ab613f305ba4f17a2299dbeeca9979ecdf52338 Mon Sep 17 00:00:00 2001 From: Jens Troeger Date: Wed, 30 Jul 2025 21:50:36 +1000 Subject: [PATCH 3/4] chore: improve README and Makefile --- README.md | 16 +++++++++++++++- backend/Makefile | 6 +++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 7d8f9ed..b9abef9 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,23 @@ The following tools should be available on your machine to get started: - [commitizen](https://commitizen-tools.github.io/commitizen/) manages automatic version bumps for _semantic versioning_ based on the _conventional commit messages_ in this repository. - [Docker](https://www.docker.com/) to build and deploy application containers. +## Quick start + +After checking out this repository the following steps should stand up the entire stack locally on your machine: + +``` +make init +. backend/.venv/bin/activate +make setup +make build +make compose-up +``` + +Now navigate to [localhost:3001](http://localhost:3001/) to read the interactive Swagger documentation for the API… + ## Architecture -There are three folders in this repository: +There are three main componenst in this repository, structured into three directories: - **Frontend**: TBD. For more details see [here](frontend/README.md). - **Backend**: the backend is composed of a [PostgREST](https://github.com/PostgREST/postgrest) web server, a message queue based on Postgres, and asynchronous workers implemented in Python using the [Dramatiq](https://github.com/Bogdanp/dramatiq) framework. For more details see [here](backend/README.md). diff --git a/backend/Makefile b/backend/Makefile index 43236d1..4a6650f 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -144,11 +144,11 @@ requirements.txt: build: dist simple-index build-docker: docker-image-pg docker-image-alembic docker-image-dramatiq docker-image-pg: - docker build --tag fullstack-template-backend-pg:0.0.0 --file docker/Dockerfile.pg . + docker build --tag fullstack-template-backend-pg:$(PACKAGE_VERSION) --file docker/Dockerfile.pg . docker-image-alembic: - docker build --tag fullstack-template-backend-alembic:0.0.0 --file docker/Dockerfile.alembic . + docker build --tag fullstack-template-backend-alembic:$(PACKAGE_VERSION) --file docker/Dockerfile.alembic . docker-image-dramatiq: - docker build --tag fullstack-template-backend-dramatiq:0.0.0 --file docker/Dockerfile.dramatiq . + docker build --tag fullstack-template-backend-dramatiq:$(PACKAGE_VERSION) --file docker/Dockerfile.dramatiq . .PHONY: docs From 9573021ac99740493878d0661c32f80a2ceb6252 Mon Sep 17 00:00:00 2001 From: Jens Troeger Date: Thu, 31 Jul 2025 07:15:32 +1000 Subject: [PATCH 4/4] chore: add Frontend readme --- frontend/README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 frontend/README.md diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 00000000..986524d --- /dev/null +++ b/frontend/README.md @@ -0,0 +1 @@ +# Frontend