Skip to content
This repository was archived by the owner on Feb 24, 2023. It is now read-only.

Commit

Permalink
feat(docker): add docker and pgadmin to local docker-compose
Browse files Browse the repository at this point in the history
  • Loading branch information
jdhaines committed Nov 18, 2021
1 parent 687f76f commit 46058b7
Show file tree
Hide file tree
Showing 25 changed files with 407 additions and 1,319 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -173,4 +173,6 @@ src/backend/*.db
*.local
screenshots/
PAT.txt
dist
dist
db-data/
pgadmin-data/
Empty file modified .husky/commit-msg
100644 → 100755
Empty file.
Empty file modified .husky/pre-commit
100644 → 100755
Empty file.
5 changes: 5 additions & 0 deletions DEVELOPER.md
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,13 @@ docker-compose -f docker-compose.yml -f local-docker-compose.yml build

# Run the container in detached mode to return your command prompt
docker-compose -f docker-compose.yml -f local-docker-compose.yml up -d backend

# Migrate the database to the initial schema using alembic
docker-compose -f docker-compose.yml -f local-docker-compose.yml -p sfm exec backend alembic upgrade head
```

You can visit the backend by opening a browser to the address <http://localhost:8181>. You can open a gui to see data in your running postgres database by visiting <http://localhost:8182>. Login with the username `[email protected]` and the password `root`.

<!--
```
# Check the status of your container.
Expand Down
518 changes: 0 additions & 518 deletions pdm.lock

This file was deleted.

5 changes: 1 addition & 4 deletions src/backend/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@ ADMIN_KEY=admin_key
FRONTEND_URL=https://localhost:3000
GITHUB_API_TOKEN=XXXXXXXXXXX
API_AUTH_TOKEN=XXXXXXXXXXX
DBHOST=unset
DBNAME=unset
DBUSER=unset
DBPASS=unset
DATABASE_URL=postgresql+psycopg2://postgres:postgres@db:5432/sfm
AZURE_LOGGING_CONN_STR=b3e5cfbd-f5c1-fd7c-be44-651da5dfa00b
GITHUB_WEBHOOK_SECRET=XXXXXXXXXXX
7 changes: 6 additions & 1 deletion src/backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,19 @@ RUN apt-get update \
&& apt-get -y install unixodbc unixodbc-dev apt-utils netcat gunicorn libpq-dev python3-dev gcc libpq-dev build-essential vim \
&& apt-get autoremove -y \
&& apt-get clean all
RUN pip install --upgrade pip setuptools wheel
RUN pip install --upgrade pip setuptools wheel pdm


COPY requirements.txt .
# COPY pyproject.toml .
# COPY pdm.lock .
RUN pip install -r requirements.txt
# RUN pdm install
# RUN pdm run pip install sqlmodel
COPY . .
# COPY ./main.py __pypackages__/3.9/lib/sqlmodel

# CMD ["pdm", "run", "uvicorn","sfm.main:app", "--workers", "4", "--host", "0.0.0.0", "--port","8181"]
CMD ["uvicorn","sfm.main:app", "--workers", "4", "--host", "0.0.0.0", "--port","8181"]

EXPOSE 8181
27 changes: 19 additions & 8 deletions src/backend/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[alembic]
# path to migration scripts
script_location = ./alembic
script_location = migrations

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
Expand All @@ -11,8 +11,10 @@ script_location = ./alembic
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date
# within the migration file as well as the filename.
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
Expand All @@ -30,16 +32,25 @@ prepend_sys_path = .
# versions/ directory
# sourceless = false

# version location specification; this defaults
# to ./alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./alembic/versions
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url =
sqlalchemy.url = driver://user:pass@localhost/dbname


[post_write_hooks]
Expand Down
1 change: 0 additions & 1 deletion src/backend/alembic/README

This file was deleted.

Binary file removed src/backend/alembic/__pycache__/env.cpython-38.pyc
Binary file not shown.
53 changes: 0 additions & 53 deletions src/backend/alembic/versions/1d3daaeebc36_first_migration.py

This file was deleted.

Binary file not shown.
1 change: 1 addition & 0 deletions src/backend/migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration with an async dbapi.
50 changes: 26 additions & 24 deletions src/backend/alembic/env.py → src/backend/migrations/env.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,20 @@
import os
import sys
import asyncio
from logging.config import fileConfig
import os

from sqlalchemy import engine_from_config
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import AsyncEngine
from sqlmodel import SQLModel

from alembic import context
from sfm.config import get_settings

from dotenv import load_dotenv

from sfm.database import SQLModel

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
load_dotenv(os.path.join(BASE_DIR, ".env"))
sys.path.append(BASE_DIR)
from sfm.models import *

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# this will overwrite the ini-file sqlalchemy.url path
# with the path given in the config of the main code
config.set_main_option("sqlalchemy.url", os.environ["DATABASE_URL"])

# Interpret the config file for Python logging.
# This line sets up loggers basically.
Expand All @@ -30,13 +24,14 @@
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata

target_metadata = SQLModel.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
app_settings = get_settings()
config.set_main_option("sqlalchemy.url", app_settings.DATABASE_URL)


def run_migrations_offline():
Expand All @@ -63,27 +58,34 @@ def run_migrations_offline():
context.run_migrations()


def run_migrations_online():
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()


async def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
connectable = AsyncEngine(
engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
future=True,
)
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
asyncio.run(run_migrations_online())
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
${imports if imports else ""}

# revision identifiers, used by Alembic.
Expand Down
86 changes: 86 additions & 0 deletions src/backend/migrations/versions/29f3e3aaae5b_init.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
"""init
Revision ID: 29f3e3aaae5b
Revises:
Create Date: 2021-11-18 19:21:18.627686
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel


# revision identifiers, used by Alembic.
revision = "29f3e3aaae5b"
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"project",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("repo_url", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("on_prem", sa.Boolean(), nullable=True),
sa.Column("lead_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("lead_email", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("location", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("github_id", sa.Integer(), nullable=True),
sa.Column("id", sa.Integer(), nullable=True),
sa.Column(
"project_auth_token_hashed",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_project_id"), "project", ["id"], unique=False)
op.create_table(
"workitem",
sa.Column("category", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("start_time", sa.DateTime(), nullable=True),
sa.Column("end_time", sa.DateTime(), nullable=True),
sa.Column("failed", sa.Boolean(), nullable=True),
sa.Column("comments", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("issue_num", sa.Integer(), nullable=True),
sa.Column("duration_open", sa.Integer(), nullable=True),
sa.Column("project_id", sa.Integer(), nullable=True),
sa.Column("id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["project_id"],
["project.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_workitem_id"), "workitem", ["id"], unique=False)
op.create_table(
"commit",
sa.Column("sha", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column("message", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("author", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("work_item_id", sa.Integer(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("time_to_pull", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["work_item_id"],
["workitem.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_commit_id"), "commit", ["id"], unique=False)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_commit_id"), table_name="commit")
op.drop_table("commit")
op.drop_index(op.f("ix_workitem_id"), table_name="workitem")
op.drop_table("workitem")
op.drop_index(op.f("ix_project_id"), table_name="project")
op.drop_table("project")
# ### end Alembic commands ###
Loading

0 comments on commit 46058b7

Please sign in to comment.