diff --git a/api/.env.sample b/api/.env.sample index a5095b331..c97f917df 100644 --- a/api/.env.sample +++ b/api/.env.sample @@ -3,24 +3,23 @@ GUNICORN_PROCESSES='2' GUNICORN_THREADS='5' # Database -NAMEX_DATABASE_HOST= -NAMEX_DATABASE_NAME= -NAMEX_DATABASE_PASSWORD= -NAMEX_DATABASE_PORT= -NAMEX_DATABASE_USERNAME= - -DATABASE_TEST_HOST= -DATABASE_TEST_NAME= -DATABASE_TEST_PASSWORD= -DATABASE_TEST_PORT= -DATABASE_TEST_USERNAME= - -# Oracle Database -NRO_USER= -NRO_PASSWORD= -NRO_DB_NAME= -ORACLE_HOST= -ORACLE_PORT=1521 +# only for local db +# check dev-scripts/local-db/docker-compose.yml for exact values +# similar values are also used in https://github.com/bcgov/bcregistry-sre/blob/main/.github/workflows/backend-ci.yaml +DATABASE_USERNAME=postgres +DATABASE_PASSWORD="postgres" +DATABASE_NAME="unittesting" +DATABASE_HOST="localhost" +DATABASE_PORT="54345" +DATABASE_SCHEMA="public" +DATABASE_OWNER="postgres" + +# only when conenecting to clousql db +DATABASE_INSTANCE_CONNECTION_NAME= +DATABASE_NAME=namex +DATABASE_USERNAME="...@gov.bc.ca" # your email, which needs to be added as IAM user to cloudsql instance and granted readwrite access +DATABASE_IP_TYPE=public +DATABASE_OWNER=userHQH # APIs SOLR_BASE_URL= @@ -67,7 +66,6 @@ JWT_OIDC_CACHING_ENABLED=True JWT_OIDC_JWKS_CACHE_TIMEOUT=300 # PUBSUB -BUSINESS_GCP_AUTH_KEY= NAMEX_MAILER_TOPIC= NAMEX_NR_STATE_TOPIC= diff --git a/api/Dockerfile b/api/Dockerfile index 551f092cc..a31cbfa21 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,42 +1,42 @@ -# platform=linux/amd64 -FROM python:3.12.2 +FROM python:3.12-slim + +# Always ensure the latest security patches are applied +RUN apt-get update && apt-get upgrade -y && apt-get clean && rm -rf /var/lib/apt/lists/* ARG VCS_REF="missing" ARG BUILD_DATE="missing" - -ENV VCS_REF=${VCS_REF} -ENV BUILD_DATE=${BUILD_DATE} +ARG APP_ENV +ARG UID=1000 +ARG GID=1000 + +ENV VCS_REF=${VCS_REF} \ + BUILD_DATE=${BUILD_DATE} \ + APP_ENV=${APP_ENV} \ + # python: + PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PYTHONDONTWRITEBYTECODE=1 \ + # pip: + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PIP_DEFAULT_TIMEOUT=100 \ + PIP_ROOT_USER_ACTION=ignore \ + # poetry: + POETRY_VERSION=1.8.3 \ + POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_CREATE=false \ + POETRY_CACHE_DIR='/var/cache/pypoetry' \ + POETRY_HOME='/usr/local' \ + # app: + PYTHONPATH=/code LABEL org.label-schema.vcs-ref=${VCS_REF} \ org.label-schema.build-date=${BUILD_DATE} -USER root - -ARG APP_ENV \ - # Needed for fixing permissions of files created by Docker: - UID=1000 \ - GID=1000 - -ENV APP_ENV=${APP_ENV} \ - # python: - PYTHONFAULTHANDLER=1 \ - PYTHONUNBUFFERED=1 \ - PYTHONHASHSEED=random \ - PYTHONDONTWRITEBYTECODE=1 \ - # pip: - PIP_NO_CACHE_DIR=1 \ - PIP_DISABLE_PIP_VERSION_CHECK=1 \ - PIP_DEFAULT_TIMEOUT=100 \ - PIP_ROOT_USER_ACTION=ignore \ - # poetry: - POETRY_VERSION=1.3.2 \ - POETRY_NO_INTERACTION=1 \ - POETRY_VIRTUALENVS_CREATE=false \ - POETRY_CACHE_DIR='/var/cache/pypoetry' \ - POETRY_HOME='/usr/local' - SHELL ["/bin/bash", "-eo", "pipefail", "-c"] +# Install system dependencies and Poetry in a single layer RUN apt-get update && apt-get upgrade -y \ && apt-get install --no-install-recommends -y \ bash \ @@ -47,44 +47,54 @@ RUN apt-get update && apt-get upgrade -y \ git \ libpq-dev \ wait-for-it \ - && curl -sSL 'https://install.python-poetry.org' | python - \ + ca-certificates \ + openssl \ + && pip install --no-cache-dir setuptools \ + && pip install --no-cache-dir "poetry==${POETRY_VERSION}" \ && poetry --version \ + && poetry config installer.max-workers 2 \ + && poetry config installer.parallel false \ # Cleaning cache: && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* - +# Create user and workspace WORKDIR /code - RUN groupadd -g "${GID}" -r web \ && useradd -d '/code' -g web -l -r -u "${UID}" web \ - && chown web:web -R '/code' + && chown web:web -R '/code' \ + && mkdir -p /var/cache/pypoetry \ + && chown web:web -R /var/cache/pypoetry # Copy only requirements, to cache them in docker layer COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/ -COPY --chown=web:web ./namex /code/namex -COPY --chown=web:web ./README.md /code - -# Project initialization: +# Install dependencies with GitHub Actions optimizations RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \ - echo "$APP_ENV" \ + echo "Building for environment: ${APP_ENV:-development}" \ && poetry version \ - # Install deps: - && poetry run pip install -U pip \ + && poetry config installer.max-workers 1 \ && poetry install \ - $(if [ -z ${APP_ENV+x} ] | [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \ - --no-interaction --no-ansi + $(if [ "$APP_ENV" = 'production' ] || [ -z "$APP_ENV" ]; then echo '--only main'; else echo '--with dev'; fi) \ + --no-interaction --no-ansi --verbose -# Running as non-root user: +# Switch to non-root user after installations USER web -# The following stage is only for production: -# FROM development_build AS production_build +# Copy application code after dependencies are installed +COPY --chown=web:web ./namex /code/namex +COPY --chown=web:web ./README.md /code/ COPY --chown=web:web . /code +# Set secure permissions immediately after copying to remove write permissions +# Files: read-only (644), Directories: read+execute (755), scripts: executable (755) +RUN find /code -type f -exec chmod 644 {} \; \ + && find /code -type d -exec chmod 755 {} \; \ + && chmod 755 /code/wsgi.py \ + && chmod 755 /code/update_db.sh + # Run the server ENV PYTHONPATH=/code EXPOSE 8080 -CMD gunicorn --bind 0.0.0.0:8080 --config /code/gunicorn_config.py wsgi:app +CMD ["gunicorn", "--bind", "0.0.0.0:8080", "--config", "/code/gunicorn_config.py", "wsgi:app"] \ No newline at end of file diff --git a/api/Makefile b/api/Makefile index 77d0bd5b8..96b9f3a75 100644 --- a/api/Makefile +++ b/api/Makefile @@ -58,30 +58,30 @@ install: clean ## Install python virtrual environment ################################################################################# # COMMANDS - CI # ################################################################################# -ci: pylint flake8 test ## CI flow +ci: docker-build-check ruff test ## CI flow -pylint: ## Linting with pylint - . .venv/bin/activate && pylint --rcfile=setup.cfg $(PROJECT_FOLDER_NAME) +docker-build-check: ## Check if Dockerfile builds successfully + docker build -f Dockerfile -t namex-api . -flake8: ## Linting with flake8 - . .venv/bin/activate && flake8 $(PROJECT_FOLDER_NAME) tests +ruff: ## ruff linter + poetry run ruff check -lint: pylint flake8 ## run all lint type scripts +ruff-fix: ## auto fix lint issues with ruff + poetry run ruff check --fix -test: ## Unit testing - . .venv/bin/activate && pytest - -mac-cov: local-test ## Run the coverage report and display in a browser window (mac) - open -a "Google Chrome" htmlcov/index.html +test: local-db ## unit testing with local db + poetry run pytest ################################################################################# # COMMANDS - Local # ################################################################################# -run: db ## Run the project in local - . venv/bin/activate && python3.12 -m flask run -p 5000 -db: ## Update the local database - . venv/bin/activate && python3.12 -m manage db upgrade +run: local-db ## Run the project in local + . .venv/bin/activate && python3.12 -m flask run -p 5000 + +local-db: ## Set up the local development database + docker compose -f dev-scripts/local-db/docker-compose.yml up -d + ./update_db.sh ################################################################################# # Self Documenting Commands # diff --git a/api/README.md b/api/README.md index 5ea828c53..db99361ae 100755 --- a/api/README.md +++ b/api/README.md @@ -8,7 +8,7 @@ BC Registries Names Examination, research and approval system API ## Technology Stack Used * Python, Flask -* Postgres - SQLAlchemy, psycopg2-binary & alembic +* Postgres - SQLAlchemy, ppg8000 & alembic ## Third-Party Products/Libraries used and the the License they are covert by diff --git a/api/config.py b/api/config.py index 29fe36ca3..ca6f6f5a1 100644 --- a/api/config.py +++ b/api/config.py @@ -12,6 +12,7 @@ 'development': 'config.DevConfig', 'testing': 'config.TestConfig', 'production': 'config.Config', + 'migration': 'config.MigrationConfig', 'default': 'config.Config', } @@ -66,15 +67,20 @@ class Config(object): NAMEX_LD_SDK_ID = os.getenv('NAMEX_LD_SDK_ID', '') # POSTGRESQL - DB_USER = os.getenv('NAMEX_DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('NAMEX_DATABASE_PASSWORD', '') - DB_NAME = os.getenv('NAMEX_DATABASE_NAME', '') - DB_HOST = os.getenv('NAMEX_DATABASE_HOST', '') - DB_PORT = os.getenv('NAMEX_DATABASE_PORT', '5432') - if DB_UNIX_SOCKET := os.getenv('NAMEX_DATABASE_UNIX_SOCKET', None): - SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}' + DB_USER = os.getenv('DATABASE_USERNAME', '') + DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') + DB_NAME = os.getenv('DATABASE_NAME', '') + DB_HOST = os.getenv('DATABASE_HOST', '') + DB_PORT = int(os.getenv('DATABASE_PORT', '5432')) + + DB_SCHEMA = os.getenv('DATABASE_SCHEMA', 'public') + DB_IP_TYPE = os.getenv('DATABASE_IP_TYPE', 'private') + DB_OWNER = os.getenv('DATABASE_OWNER', 'postgres') + + if DB_INSTANCE_CONNECTION_NAME := os.getenv('DATABASE_INSTANCE_CONNECTION_NAME', None): + SQLALCHEMY_DATABASE_URI = 'postgresql+pg8000://' else: - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' + SQLALCHEMY_DATABASE_URI = f'postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}' # KEYCLOAK & JWT_OIDC Settings JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG') @@ -120,24 +126,32 @@ class DevConfig(Config): DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)) -class TestConfig(Config): - """Test config used for pytests.""" +class MigrationConfig(Config): # pylint: disable=too-few-public-methods + """Config for db migration.""" + + TESTING = (False,) + DEBUG = True + +class TestConfig(Config): # pylint: disable=too-few-public-methods + """In support of unit testing only. Used by the pytest suite.""" DEBUG = True TESTING = True # POSTGRESQL - DB_USER = os.getenv('DATABASE_TEST_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_TEST_NAME', '') - DB_HOST = os.getenv('DATABASE_TEST_HOST', '') - DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') - - LOCAL_DEV_MODE = os.getenv('LOCAL_DEV_MODE', False) - # Set this in your .env to debug SQL Alchemy queries (for local development) - SQLALCHEMY_ECHO = 'debug' if os.getenv('DEBUG_SQL_QUERIES', False) else False - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=int(DB_PORT), name=DB_NAME - ) + DB_USER = os.getenv('DATABASE_TEST_USERNAME', 'postgres') + DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', 'postgres') + DB_NAME = os.getenv('DATABASE_TEST_NAME', 'unittesting') + DB_HOST = os.getenv('DATABASE_TEST_HOST', 'localhost') + DB_PORT = os.getenv('DATABASE_TEST_PORT', '54345') + SQLALCHEMY_DATABASE_URI = f'postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' + + # Ensure SQLAlchemy is properly configured for Flask-Marshmallow compatibility + SQLALCHEMY_TRACK_MODIFICATIONS = False + SQLALCHEMY_ENGINE_OPTIONS = { + 'pool_pre_ping': True, + 'pool_recycle': 300, + } + EMAILER_TOPIC = os.getenv('NAMEX_MAILER_TOPIC', '') DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)) diff --git a/api/dev-scripts/local-db/docker-compose.yml b/api/dev-scripts/local-db/docker-compose.yml new file mode 100644 index 000000000..73da2dcde --- /dev/null +++ b/api/dev-scripts/local-db/docker-compose.yml @@ -0,0 +1,21 @@ +services: + postgres: + container_name: namex-postgres + image: postgres:15 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: unittesting + ports: + - "54345:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d unittesting"] + interval: 10s + timeout: 5s + retries: 5 + restart: always + +volumes: + postgres_data: \ No newline at end of file diff --git a/api/devops/vaults.gcp.env b/api/devops/vaults.gcp.env index 9638a2538..33c8ca498 100644 --- a/api/devops/vaults.gcp.env +++ b/api/devops/vaults.gcp.env @@ -8,11 +8,6 @@ NAME_REQUEST_SERVICE_ACCOUNT_CLIENT_ID="op://keycloak/$APP_ENV/name-request-serv NAME_REQUEST_SERVICE_ACCOUNT_CLIENT_SECRET="op://keycloak/$APP_ENV/name-request-service-account/NAME_REQUEST_SERVICE_ACCOUNT_CLIENT_SECRET" ENTITY_SERVICE_ACCOUNT_CLIENT_ID="op://keycloak/$APP_ENV/entity-service-account/ENTITY_SERVICE_ACCOUNT_CLIENT_ID" ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET="op://keycloak/$APP_ENV/entity-service-account/ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET" -NAMEX_DATABASE_UNIX_SOCKET="op://database/$APP_ENV/namex-db-gcp/DATABASE_UNIX_SOCKET" -NAMEX_DATABASE_PORT="op://database/$APP_ENV/namex-db-gcp/DATABASE_PORT" -NAMEX_DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME" -NAMEX_DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME" -NAMEX_DATABASE_PASSWORD="op://database/$APP_ENV/namex-db-gcp/DATABASE_PASSWORD" REPORT_API_URL="op://API/$APP_ENV/report-api/REPORT_API_URL" REPORT_API_VERSION="op://API/$APP_ENV/report-api/REPORT_API_VERSION" PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL" @@ -47,5 +42,9 @@ NAMEX_NR_STATE_TOPIC="op://gcp-queue/$APP_ENV/topics/NAMEX_NR_STATE_TOPIC" NAMEX_MAILER_TOPIC="op://gcp-queue/$APP_ENV/topics/NAMEX_MAILER_TOPIC" NOTIFY_DELIVERY_GCNOTIFY_TOPIC="op://gcp-queue/$APP_ENV/topics/NOTIFY_DELIVERY_GCNOTIFY_TOPIC" NOTIFY_DELIVERY_SMTP_TOPIC="op://gcp-queue/$APP_ENV/topics/NOTIFY_DELIVERY_SMTP_TOPIC" -BUSINESS_GCP_AUTH_KEY="op://gcp-queue/$APP_ENV/a083gt/BUSINESS_GCP_AUTH_KEY" -VPC_CONNECTOR="op://CD/$APP_ENV/namex-api/VPC_CONNECTOR" \ No newline at end of file +VPC_CONNECTOR="op://CD/$APP_ENV/namex-api/VPC_CONNECTOR" +DATABASE_USERNAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_USERNAME" +DATABASE_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_NAME" +DATABASE_INSTANCE_CONNECTION_NAME="op://database/$APP_ENV/namex-db-gcp/DATABASE_INSTANCE_CONNECTION_NAME" +DATABASE_SCHEMA="op://database/$APP_ENV/namex-db-gcp/DATABASE_SCHEMA" +DATABASE_OWNER="op://database/$APP_ENV/namex-db-gcp/DATABASE_OWNER" \ No newline at end of file diff --git a/api/gunicorn_config.py b/api/gunicorn_config.py index af5e32c5f..7d2f3d931 100755 --- a/api/gunicorn_config.py +++ b/api/gunicorn_config.py @@ -1,7 +1,9 @@ import os -workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) -threads = int(os.environ.get('GUNICORN_THREADS', '1')) +workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) # pylint: disable=invalid-name +threads = int(os.environ.get('GUNICORN_THREADS', '8')) # pylint: disable=invalid-name +timeout = int(os.environ.get('GUNICORN_TIMEOUT', '0')) # pylint: disable=invalid-name + forwarded_allow_ips = '*' secure_scheme_headers = {'X-Forwarded-Proto': 'https'} diff --git a/api/migrations/alembic.ini b/api/migrations/alembic.ini index f8ed4801f..048f2baeb 100644 --- a/api/migrations/alembic.ini +++ b/api/migrations/alembic.ini @@ -2,7 +2,8 @@ [alembic] # template used to generate migration files -# file_template = %%(rev)s_%%(slug)s +script_location = migrations +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(rev)s_%%(slug)s # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate @@ -11,7 +12,7 @@ # Logging configuration [loggers] -keys = root,sqlalchemy,alembic +keys = root,sqlalchemy,alembic,flask_migrate [handlers] keys = console @@ -34,6 +35,11 @@ level = INFO handlers = qualname = alembic +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + [handler_console] class = StreamHandler args = (sys.stderr,) @@ -41,5 +47,5 @@ level = NOTSET formatter = generic [formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S +format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s +datefmt= diff --git a/api/migrations/env.py b/api/migrations/env.py index 711bdf84d..4de106722 100755 --- a/api/migrations/env.py +++ b/api/migrations/env.py @@ -1,94 +1,90 @@ from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool -from logging.config import fileConfig + import logging +from logging.config import fileConfig -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config +from alembic import context +from flask import current_app +from sqlalchemy import text + +from config import MigrationConfig -# Interpret the config file for Python logging. -# This line sets up loggers basically. +config = context.config fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') +logger = logging.getLogger("alembic.env") -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -from flask import current_app -config.set_main_option('sqlalchemy.url', - current_app.config.get('SQLALCHEMY_DATABASE_URI')) -target_metadata = current_app.extensions['migrate'].db.metadata +def get_engine(): + try: + return current_app.extensions["migrate"].db.get_engine() + except (TypeError, AttributeError): + return current_app.extensions["migrate"].db.engine -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace("%", "%%") + except AttributeError: + return str(get_engine().url).replace("%", "%%") +config.set_main_option("sqlalchemy.url", get_engine_url()) +target_metadata = current_app.extensions["migrate"].db.metadata -def run_migrations_offline(): - """Run migrations in 'offline' mode. +def get_list_from_config(config, key): + arr = config.get_main_option(key, []) + if arr: + arr = [token for a in arr.split("\n") for b in a.split(",") if (token := b.strip())] + return arr - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. +exclude_tables = get_list_from_config(config, "exclude_tables") - Calls to context.execute() here emit the given string to the - script output. +def include_object(object, name, type_, reflected, compare_to): + return not (type_ == "table" and name in exclude_tables) - """ +def run_migrations_offline(): url = config.get_main_option("sqlalchemy.url") - context.configure(url=url) + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + include_object=include_object + ) with context.begin_transaction(): context.run_migrations() - def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): + if getattr(config.cmd_opts, "autogenerate", False): script = directives[0] if script.upgrade_ops.is_empty(): directives[:] = [] - logger.info('No changes in schema detected.') + logger.info("No changes in schema detected.") - def include_object(object, name, type_, reflected, compare_to): - if (type_ == "table" and reflected): - return False - else: - return True + connectable = get_engine() - engine = engine_from_config(config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) + with connectable.connect() as connection: + # Get existing configure args but remove compare_type if present + configure_args = current_app.extensions["migrate"].configure_args or {} + if 'compare_type' in configure_args: + del configure_args['compare_type'] - connection = engine.connect() - context.configure(connection=connection, - target_metadata=target_metadata, - include_object=include_object, - process_revision_directives=process_revision_directives, - **current_app.extensions['migrate'].configure_args) + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + include_object=include_object, + compare_type=True, # Only set here explicitly + **configure_args + ) - try: with context.begin_transaction(): + owner_role = MigrationConfig.DB_OWNER + connection.execute(text(f'SET ROLE "{owner_role}";')) + result = connection.execute(text("SELECT current_user, session_user;")) + logger.info(f"User running migration is: {result.fetchone()}") context.run_migrations() - finally: - connection.close() + connection.execute(text("RESET ROLE;")) if context.is_offline_mode(): run_migrations_offline() else: - run_migrations_online() + run_migrations_online() \ No newline at end of file diff --git a/api/migrations/versions/00952c5a4109_.py b/api/migrations/versions/00952c5a4109_.py index daf4bd922..ca25f8b22 100644 --- a/api/migrations/versions/00952c5a4109_.py +++ b/api/migrations/versions/00952c5a4109_.py @@ -5,9 +5,9 @@ Create Date: 2021-06-02 11:24:29.085137 """ -from alembic import op import sqlalchemy as sa - +from alembic import op +from sqlalchemy.sql import text # revision identifiers, used by Alembic. revision = '00952c5a4109' @@ -23,7 +23,7 @@ def upgrade(): # ### end Alembic commands ### conn = op.get_bind() # add all names to nameSearch column - cd_exists = conn.execute("select * from states where cd='PENDING_PAYMENT'") + cd_exists = conn.execute(text("select * from states where cd='PENDING_PAYMENT'")) def downgrade(): diff --git a/api/migrations/versions/2025_09_10_e5e4e389f21d_move_to_custom_schema.py b/api/migrations/versions/2025_09_10_e5e4e389f21d_move_to_custom_schema.py new file mode 100644 index 000000000..af8ff1c65 --- /dev/null +++ b/api/migrations/versions/2025_09_10_e5e4e389f21d_move_to_custom_schema.py @@ -0,0 +1,102 @@ +"""move to custom schema + +Revision ID: e5e4e389f21d +Revises: 179a7b0089ce +Create Date: 2025-09-10 13:47:17.696445 + +""" +import logging +import os +import re + +from alembic import op +from flask import current_app +from sqlalchemy.sql import text + +# revision identifiers, used by Alembic. +revision = 'e5e4e389f21d' +down_revision = '179a7b0089ce' +branch_labels = None +depends_on = None + +logger = current_app.logger if hasattr(current_app, 'logger') else logging.getLogger("namex_api") + +def get_target_schema(): + """Minimal schema name fetch with validation.""" + schema = os.getenv("DATABASE_SCHEMA", "public") + if not re.match(r'^[a-z_][a-z0-9_]*$', schema, re.I): + raise ValueError(f"Invalid schema name: {schema}") + return schema + +def upgrade(): + target_schema = get_target_schema() + if target_schema == 'public': + logger.info("Target schema is public, skipping migration") + return + + conn = op.get_bind() + + try: + # Check if target schema already exists + schema_exists = conn.execute(text(f""" + SELECT 1 FROM information_schema.schemata + WHERE schema_name = '{target_schema}' + """)).scalar() + + if schema_exists: + logger.info(f"Schema {target_schema} already exists, skipping migration") + return + + conn.execute(text(f"ALTER SCHEMA public RENAME TO {target_schema};")) + conn.execute(text("CREATE SCHEMA public;")) + + conn.execute(text(""" + CREATE TABLE public.alembic_version ( + version_num character varying(32) NOT NULL, + CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num) + ) + """)) + + conn.execute(text(f""" + INSERT INTO public.alembic_version (version_num) + VALUES ('{down_revision}') + """)) + conn.commit() + + except Exception as e: + logger.error(f"Migration failed: {str(e)}") + conn.rollback() + conn.execute(text("DROP SCHEMA IF EXISTS public CASCADE")) + conn.execute(text(f"ALTER SCHEMA {target_schema} RENAME TO public")) + conn.commit() + raise + + +def downgrade(): + target_schema = get_target_schema() + + if target_schema == 'public': + logger.info("Target schema is public, skipping downgrade") + return + + conn = op.get_bind() + + try: + schema_exists = conn.execute( + text(f"SELECT 1 FROM information_schema.schemata WHERE schema_name = '{target_schema}'") + ).scalar() + + if not schema_exists: + logger.info(f"Schema {target_schema} does not exist, nothing to downgrade") + return + + logger.info("Dropping current public schema") + conn.execute(text("DROP SCHEMA public CASCADE")) + + logger.info(f"Renaming {target_schema} back to public") + conn.execute(text(f"ALTER SCHEMA {target_schema} RENAME TO public")) + + logger.info("Downgrade completed successfully") + except Exception as e: + logger.error(f"Downgrade failed: {str(e)}") + raise \ No newline at end of file diff --git a/api/migrations/versions/21b272432e62_add_nro_sync_tables_and_seq.py b/api/migrations/versions/21b272432e62_add_nro_sync_tables_and_seq.py index ba0ee4995..8e408462c 100644 --- a/api/migrations/versions/21b272432e62_add_nro_sync_tables_and_seq.py +++ b/api/migrations/versions/21b272432e62_add_nro_sync_tables_and_seq.py @@ -13,9 +13,8 @@ The table generation code is auto-generated but the sequence is manually added. """ -from alembic import op import sqlalchemy as sa - +from alembic import op # revision identifiers, used by Alembic. revision = '21b272432e62' @@ -41,7 +40,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id') ) - op.execute(sa.schema.CreateSequence(sa.schema.Sequence("nro_job_seq"))) + op.execute(sa.schema.CreateSequence(sa.schema.Sequence("nro_job_seq"), if_not_exists=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/5e1d992e46d5_perf_indexes.py b/api/migrations/versions/5e1d992e46d5_perf_indexes.py index baa514c23..716405091 100644 --- a/api/migrations/versions/5e1d992e46d5_perf_indexes.py +++ b/api/migrations/versions/5e1d992e46d5_perf_indexes.py @@ -5,9 +5,8 @@ Create Date: 2018-11-08 20:54:59.981331 """ -from alembic import op import sqlalchemy as sa - +from alembic import op # revision identifiers, used by Alembic. revision = '5e1d992e46d5' @@ -32,18 +31,18 @@ def upgrade(): op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False) op.execute('CREATE INDEX ' + op.f('ix_names_name') - + ' ON public.names USING btree' + + ' ON names USING btree' + ' (name COLLATE pg_catalog."default" varchar_pattern_ops)' + ' TABLESPACE pg_default;' ) - op.execute('ALTER TABLE public.names CLUSTER ON ' + op.execute('ALTER TABLE names CLUSTER ON ' + op.f('ix_names_nr_id')) - op.execute('ALTER TABLE public.comments CLUSTER ON ' + op.execute('ALTER TABLE comments CLUSTER ON ' + op.f('ix_comments_nr_id')) - op.execute('ALTER TABLE public.partner_name_system CLUSTER ON ' + op.execute('ALTER TABLE partner_name_system CLUSTER ON ' + op.f('ix_partner_name_system_nr_id')) diff --git a/api/migrations/versions/8b99aacb139b_add_nr_refund_requested_state.py b/api/migrations/versions/8b99aacb139b_add_nr_refund_requested_state.py index 6bab42b6e..2f973498b 100644 --- a/api/migrations/versions/8b99aacb139b_add_nr_refund_requested_state.py +++ b/api/migrations/versions/8b99aacb139b_add_nr_refund_requested_state.py @@ -6,8 +6,7 @@ """ from alembic import op -from sqlalchemy import Table, MetaData - +from sqlalchemy import MetaData, Table # revision identifiers, used by Alembic. revision = '8b99aacb139b' @@ -18,10 +17,10 @@ def upgrade(): # Get metadata from current connection - meta = MetaData(bind=op.get_bind()) + meta = MetaData() # Pass in tuple with tables we want to reflect, otherwise whole database will get reflected - meta.reflect(only=('states',)) + meta.reflect(bind=op.get_bind(), only=('states',)) # Define table representation states_tbl = Table('states', meta) diff --git a/api/migrations/versions/add_restricted_word_tables.py b/api/migrations/versions/add_restricted_word_tables.py index d829aca54..98e0c62c1 100644 --- a/api/migrations/versions/add_restricted_word_tables.py +++ b/api/migrations/versions/add_restricted_word_tables.py @@ -5,9 +5,8 @@ Create Date: 2018-06-26 13:43:00 """ -from alembic import op import sqlalchemy as sa - +from alembic import op # revision identifiers, used by Alembic. revision = 'add_restricted_word_tables' @@ -25,13 +24,13 @@ def upgrade(): sa.Column('consenting_body', sa.String(length=195), nullable=True), sa.Column('instructions', sa.String(length=195), nullable=True)) op.execute( - "COMMENT ON TABLE public.restricted_condition IS 'The conditions against a restricted word. The conditions can apply to one or more rows.'; " + "COMMENT ON TABLE restricted_condition IS 'The conditions against a restricted word. The conditions can apply to one or more rows.'; " ) op.execute( """ - CREATE SEQUENCE public.restricted_condition_id + CREATE SEQUENCE IF NOT EXISTS restricted_condition_id START WITH 1 INCREMENT BY 1 NO MINVALUE @@ -45,7 +44,7 @@ def upgrade(): sa.Column('word_phrase', sa.String(length=60), nullable=True)) op.execute( - "COMMENT ON TABLE public.restricted_word IS 'Restricted words or word phrases that may not be used in a name at all or require consent from a specific organization.';" + "COMMENT ON TABLE restricted_word IS 'Restricted words or word phrases that may not be used in a name at all or require consent from a specific organization.';" ) restricted_word_condition_table = op.create_table('restricted_word_condition', @@ -53,7 +52,7 @@ def upgrade(): sa.Column('word_id', sa.Integer, nullable=False)) op.execute( - "COMMENT ON TABLE public.restricted_word_condition IS 'An associative entity to resolve a restricted word having multiple conditions and a condition applying to multiple words.';" + "COMMENT ON TABLE restricted_word_condition IS 'An associative entity to resolve a restricted word having multiple conditions and a condition applying to multiple words.';" ) @@ -72,7 +71,7 @@ def upgrade(): op.execute( """ - CREATE OR REPLACE FUNCTION public.get_restricted_words(p_name_choice text) RETURNS text + CREATE OR REPLACE FUNCTION get_restricted_words(p_name_choice text) RETURNS text LANGUAGE plpgsql AS $$DECLARE v_word_phrase CHARACTER(100); diff --git a/api/migrations/versions/b0fc67a096fe_.py b/api/migrations/versions/b0fc67a096fe_.py index b4cf4ed33..0cab20d6b 100644 --- a/api/migrations/versions/b0fc67a096fe_.py +++ b/api/migrations/versions/b0fc67a096fe_.py @@ -5,11 +5,10 @@ Create Date: 2021-05-05 14:24:03.870443 """ -from alembic import op import sqlalchemy as sa -from sqlalchemy.sql import table, column +from alembic import op from sqlalchemy import String - +from sqlalchemy.sql import column, table, text # revision identifiers, used by Alembic. revision = 'b0fc67a096fe' @@ -29,7 +28,7 @@ def upgrade(): # ### end Alembic commands ### conn = op.get_bind() - cd_exists = conn.execute("select * from states where cd='PENDING_PAYMENT'") + cd_exists = conn.execute(text("select * from states where cd='PENDING_PAYMENT'")) if not cd_exists.one_or_none(): states_table = table( 'states', diff --git a/api/namex/VERSION.py b/api/namex/VERSION.py index 9f2b7effe..661076476 100644 --- a/api/namex/VERSION.py +++ b/api/namex/VERSION.py @@ -1 +1 @@ -__version__ = '1.2.66' +__version__ = '1.2.67' diff --git a/api/namex/__init__.py b/api/namex/__init__.py index fb8da90f2..7788b15f7 100644 --- a/api/namex/__init__.py +++ b/api/namex/__init__.py @@ -6,30 +6,32 @@ TODO: Fill in a larger description once the API is defined for V1 """ -import config import os +import traceback from flask import Flask from flask_jwt_oidc import JwtManager +import config + from .VERSION import __version__ jwt = JwtManager() +from cloud_sql_connector import DBConfig, setup_search_path_event_listener from flask_cors import CORS -from flask_migrate import Migrate - -from namex.services.cache import cache -from namex.services.lookup import nr_filing_actions -from .services import queue -from namex.utils.synonyms_api_auth import patch_synonyms_api_requests +from flask_migrate import Migrate, upgrade from namex import models from namex.models import db, ma from namex.resources import api -from namex.utils.run_version import get_run_version from namex.services import flags, logging_config +from namex.services.cache import cache +from namex.services.lookup import nr_filing_actions +from namex.utils.run_version import get_run_version +from namex.utils.synonyms_api_auth import patch_synonyms_api_requests +from .services import queue run_version = get_run_version() @@ -41,31 +43,69 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): # noqa: B008 app.config.from_object(config.CONFIGURATION[run_mode]) logging_config.configure_logging(app) - flags.init_app(app) - queue.init_app(app) - - db.init_app(app) - Migrate(app, db) - ma.init_app(app) + schema = app.config.get('DB_SCHEMA', 'public') - api.init_app(app) - setup_jwt_manager(app, jwt) + if app.config.get('DB_INSTANCE_CONNECTION_NAME'): + db_config = DBConfig( + instance_name=app.config.get('DB_INSTANCE_CONNECTION_NAME'), + database=app.config.get('DB_NAME'), + user=app.config.get('DB_USER'), + ip_type=app.config.get('DB_IP_TYPE'), + schema=schema if run_mode != 'migration' else None, + pool_recycle=300, + ) - cache.init_app(app) - nr_filing_actions.init_app(app) - - # Install request middleware for identity token injecttion for synonyms API requests. - with app.app_context(): - patch_synonyms_api_requests() - - @app.after_request - def add_version(response): - os.getenv('OPENSHIFT_BUILD_COMMIT', '') - response.headers['API'] = 'NameX/{ver}'.format(ver=run_version) - return response + app.config['SQLALCHEMY_ENGINE_OPTIONS'] = db_config.get_engine_options() + db.init_app(app) - register_shellcontext(app) + # Initialize Marshmallow WITHOUT SQLAlchemy auto-detection to avoid GitHub Actions issues + # Flask-Marshmallow tries to auto-detect SQLAlchemy but fails in some CI environments + # We'll initialize it manually and set the db reference afterward + try: + # Try normal initialization first + ma.init_app(app) + except AttributeError as e: + if 'db' in str(e): + # If the auto-detection fails, initialize without SQLAlchemy and set it manually + ma.app = app + ma._db = db # Manually set the database reference + app.extensions['marshmallow'] = ma + else: + raise + + if run_mode != 'migration': + with app.app_context(): + engine = db.engine + setup_search_path_event_listener(engine, schema) + + if run_mode == 'migration': + Migrate(app, db) + app.logger.info('Running migration upgrade.') + with app.app_context(): + execute_migrations(app) + app.logger.info('Finished migration upgrade.') + app.logger.info('Note: endpoints will 404 until the FLASK_ENV is switched off of migration.') + else: + flags.init_app(app) + queue.init_app(app) + + api.init_app(app) + setup_jwt_manager(app, jwt) + + cache.init_app(app) + nr_filing_actions.init_app(app) + + # Install request middleware for identity token injecttion for synonyms API requests. + with app.app_context(): + patch_synonyms_api_requests() + + @app.after_request + def add_version(response): + response.headers['API'] = 'NameX/{ver}'.format(ver=run_version) + return response + + register_shellcontext(app) return app @@ -91,3 +131,14 @@ def shell_context(): return {'app': app, 'jwt': jwt, 'db': db, 'models': models} app.shell_context_processor(shell_context) + + +def execute_migrations(app): + """Execute the database migrations.""" + try: + upgrade(directory='migrations', revision='head', sql=False, tag=None) + except Exception as e: # NOQA pylint: disable=broad-except + app.logger.disabled = False + error_message = f'Error processing migrations: {e}\n{traceback.format_exc()}' + app.logger.error(error_message) + raise e diff --git a/api/namex/analytics/restricted_words.py b/api/namex/analytics/restricted_words.py index 1d39ce80c..1e1d96a7a 100644 --- a/api/namex/analytics/restricted_words.py +++ b/api/namex/analytics/restricted_words.py @@ -73,11 +73,13 @@ def find_restricted_words(content): - query for list of all restricted words - strip each word/phrase of spaces and check if they are a substring of 'stripped_content' """ - restricted_words_obj = db.engine.execute('select * from restricted_word;') - restricted_words_dict = [] - for row in restricted_words_obj: - if ' ' + row[1].upper().strip() + ' ' in content: - restricted_words_dict.append({'id': row[0], 'phrase': row[1].upper()}) + from sqlalchemy import text + with db.engine.connect() as connection: + restricted_words_obj = connection.execute(text('select * from restricted_word;')) + restricted_words_dict = [] + for row in restricted_words_obj: + if ' ' + row[1].upper().strip() + ' ' in content: + restricted_words_dict.append({'id': row[0], 'phrase': row[1].upper()}) return restricted_words_dict @@ -85,33 +87,34 @@ def find_restricted_words(content): def find_cnd_info(word_id): """Get the condition info corresponding to the given word id""" get_cnd_id_sql = text('SELECT cnd_id FROM restricted_word_condition WHERE word_id = :word_id') - cnd_id_obj = db.engine.execute(get_cnd_id_sql, {'word_id': word_id}) - cnd_ids = cnd_id_obj.fetchall() - - cnd_obj_list = [] - for id in cnd_ids: - cnd_id = id[0] - get_cnd_sql = text('select * from restricted_condition where cnd_id = :cnd_id') - cnd_obj_list.append(db.engine.execute(get_cnd_sql, {'cnd_id': cnd_id})) - - cnd_info = [] - for obj in cnd_obj_list: - obj_tuple = obj.fetchall()[0] - cnd_text = obj_tuple[1] - cnd_allow_use = obj_tuple[2] - cnd_consent_req = obj_tuple[3] - cnd_consent_body = obj_tuple[4] - cnd_instr = obj_tuple[5] - - cnd_info.append( - { - 'id': cnd_id, - 'text': cnd_text, - 'allow_use': cnd_allow_use, - 'consent_required': cnd_consent_req, - 'consenting_body': cnd_consent_body, - 'instructions': cnd_instr, - } - ) - - return cnd_info + with db.engine.connect() as connection: + cnd_id_obj = connection.execute(get_cnd_id_sql, {'word_id': word_id}) + cnd_ids = cnd_id_obj.fetchall() + + cnd_obj_list = [] + for id in cnd_ids: + cnd_id = id[0] + get_cnd_sql = text('select * from restricted_condition where cnd_id = :cnd_id') + cnd_obj_list.append(connection.execute(get_cnd_sql, {'cnd_id': cnd_id})) + + cnd_info = [] + for obj in cnd_obj_list: + obj_tuple = obj.fetchall()[0] + cnd_text = obj_tuple[1] + cnd_allow_use = obj_tuple[2] + cnd_consent_req = obj_tuple[3] + cnd_consent_body = obj_tuple[4] + cnd_instr = obj_tuple[5] + + cnd_info.append( + { + 'id': cnd_id, + 'text': cnd_text, + 'allow_use': cnd_allow_use, + 'consent_required': cnd_consent_req, + 'consenting_body': cnd_consent_body, + 'instructions': cnd_instr, + } + ) + + return cnd_info diff --git a/api/namex/analytics/solr.py b/api/namex/analytics/solr.py index 3eb12f79c..1d31990b0 100644 --- a/api/namex/analytics/solr.py +++ b/api/namex/analytics/solr.py @@ -821,7 +821,6 @@ def _get_identity_token(cls, audience): current_app.logger.warning(f'Error in getting identity token: {e.message}') return None - @classmethod def _synonyms_exist(cls, token, col): solr_synonyms_api_url = current_app.config.get('SOLR_SYNONYMS_API_URL', None) @@ -839,9 +838,7 @@ def _synonyms_exist(cls, token, col): if id_token is None: connection = request.urlopen(query) else: - connection = request.urlopen(request.Request( - query, headers={'Authorization': f'Bearer {id_token}'} - )) + connection = request.urlopen(request.Request(query, headers={'Authorization': f'Bearer {id_token}'})) except HTTPError as http_error: # Expected when the token does not have synonyms. if http_error.code == 404: @@ -870,9 +867,7 @@ def _get_synonym_list(cls, token): if id_token is None: connection = request.urlopen(query) else: - connection = request.urlopen(request.Request( - query, headers={'Authorization': f'Bearer {id_token}'} - )) + connection = request.urlopen(request.Request(query, headers={'Authorization': f'Bearer {id_token}'})) except HTTPError as http_error: # Expected when the token does not have synonyms. if http_error.code == 404: diff --git a/api/namex/models/name.py b/api/namex/models/name.py index dc445713d..03e28e6a4 100644 --- a/api/namex/models/name.py +++ b/api/namex/models/name.py @@ -100,18 +100,17 @@ def delete_from_db(self): def update_nr_name_search(mapper, connection, target): """Add any changes to the name to the request.nameSearch column and publish name state changes where applicable.""" - from namex.models import Request + from sqlalchemy import text name = target - nr = Request.find_by_id(name.nrId) - if nr: - # get the names associated with the NR + if name.nrId: + # get the names associated with the NR using the same connection names_q = connection.execute( - f""" + text(f""" SELECT names.name from names JOIN requests on requests.id = names.nr_id - WHERE requests.id={nr.id} - """ # noqa: S608 + WHERE requests.id={name.nrId} + """) # noqa: S608 ) # format the names into a string like: |1|2|3 names = [x[0] for x in names_q.all()] @@ -120,12 +119,12 @@ def update_nr_name_search(mapper, connection, target): name_search += f'|{index + 1}{item}{index + 1}|' # update the name_search field of the nr with the formatted string connection.execute( - """ + text(""" UPDATE requests - SET name_search=%s - WHERE id=%s - """, - ('(' + name_search + ')', nr.id), + SET name_search = :name_search + WHERE id = :nr_id + """), + {'name_search': '(' + name_search + ')', 'nr_id': name.nrId}, ) diff --git a/api/namex/models/payment.py b/api/namex/models/payment.py index 674b79f01..c5cf3210e 100644 --- a/api/namex/models/payment.py +++ b/api/namex/models/payment.py @@ -2,7 +2,7 @@ from enum import Enum -from sqlalchemy import event +from sqlalchemy import event, text from sqlalchemy.orm.attributes import get_history from namex.constants import PaymentState, PaymentStatusCode @@ -110,11 +110,11 @@ def update_nr_state(mapper, connection, target): if payment.payment_status_code != 'REFUND_REQUESTED': if payment.payment_status_code in completed_payment_status and nr.stateCd == State.PENDING_PAYMENT: connection.execute( - f""" + text(f""" UPDATE requests SET state_cd='{State.DRAFT}' WHERE id={nr.id} - """ # noqa: S608 + """) # noqa: S608 ) queue_util.send_name_request_state_msg(nr.nrNum, State.DRAFT, State.PENDING_PAYMENT) diff --git a/api/namex/models/request.py b/api/namex/models/request.py index 9226c2a78..a341d3a6b 100644 --- a/api/namex/models/request.py +++ b/api/namex/models/request.py @@ -13,16 +13,11 @@ from sqlalchemy.orm import backref from sqlalchemy.orm.attributes import get_history -from namex.constants import ( - EntityTypes, - LegacyEntityTypes, - NameState, -) +from namex.constants import EntityTypes, LegacyEntityTypes, NameState from namex.exceptions import BusinessException # TODO: Only trace if LOCAL_DEV_MODE / DEBUG conf exists # from flask_sqlalchemy import get_debug_queries -from namex.services.lookup import nr_filing_actions from namex.utils import queue_util # noinspection PyPep8Naming @@ -216,6 +211,9 @@ def json(self): 'notifiedBeforeExpiry': self.notifiedBeforeExpiry, 'notifiedExpiry': self.notifiedExpiry, } + # Lazy import to avoid circular dependency + from namex.services.lookup import nr_filing_actions + if nr_actions := nr_filing_actions.get_actions(self.requestTypeCd, self.entity_type_cd, self.request_action_cd): nr_json['legalType'] = nr_actions.get('legalType') nr_json['target'] = nr_actions.get('target') @@ -531,59 +529,63 @@ def get_waiting_time(cls, priority_queue=False): unit_time = 60 * 60 # Default to hours for priority queue # Step 1: decision_candidates CTE - decision_candidates = select( - Event.nrId.label('nr_id'), - Event.eventDate.label('event_dt'), - Event.stateCd.label('state_cd'), - Event.action, - Event.userId.label('user_id') - ).where( - Event.action == 'patch', - Event.stateCd.in_(['APPROVED', 'CONDITIONAL', 'REJECTED', 'CANCELLED']), - Event.userId != 1, - cast(Event.eventDate, Date) >= cast(func.now() - timedelta(days=7), Date) - ).cte('decision_candidates') + decision_candidates = ( + select( + Event.nrId.label('nr_id'), + Event.eventDate.label('event_dt'), + Event.stateCd.label('state_cd'), + Event.action, + Event.userId.label('user_id'), + ) + .where( + Event.action == 'patch', + Event.stateCd.in_(['APPROVED', 'CONDITIONAL', 'REJECTED', 'CANCELLED']), + Event.userId != 1, + cast(Event.eventDate, Date) >= cast(func.now() - timedelta(days=7), Date), + ) + .cte('decision_candidates') + ) # Step 2: decision_counts CTE - decision_counts = select( - Event.nrId.label('nr_id'), - func.count().label('cnt') - ).join( - decision_candidates, - decision_candidates.c.nr_id == Event.nrId - ).where( - Event.action == 'patch', - Event.stateCd.in_(['APPROVED', 'CONDITIONAL', 'REJECTED', 'CANCELLED']), - Event.userId != 1 - ).group_by( - Event.nrId - ).cte('decision_counts') + decision_counts = ( + select(Event.nrId.label('nr_id'), func.count().label('cnt')) + .join(decision_candidates, decision_candidates.c.nr_id == Event.nrId) + .where( + Event.action == 'patch', + Event.stateCd.in_(['APPROVED', 'CONDITIONAL', 'REJECTED', 'CANCELLED']), + Event.userId != 1, + ) + .group_by(Event.nrId) + .cte('decision_counts') + ) # Step 3: first_decision_events CTE (join and filter cnt == 1) - first_decision_events = select( - decision_candidates - ).join( - decision_counts, - decision_candidates.c.nr_id == decision_counts.c.nr_id - ).where( - decision_counts.c.cnt == 1 - ).cte('first_decision_events') + first_decision_events = ( + select(decision_candidates) + .join(decision_counts, decision_candidates.c.nr_id == decision_counts.c.nr_id) + .where(decision_counts.c.cnt == 1) + .cte('first_decision_events') + ) # Step 4: Final median calculation median_waiting_time_query = ( select( - (func.percentile_cont(0.5).within_group( - func.extract('epoch', first_decision_events.c.event_dt) - - func.extract('epoch', Request.__table__.c.submitted_date) - ) / unit_time).label('examinationTime') + ( + func.percentile_cont(0.5).within_group( + func.extract('epoch', first_decision_events.c.event_dt) + - func.extract('epoch', Request.__table__.c.submitted_date) + ) + / unit_time + ).label('examinationTime') ) .select_from( - first_decision_events - .join(Request, Request.__table__.c.id == first_decision_events.c.nr_id) - .join(Payment, Payment.__table__.c.nr_id == Request.id) + first_decision_events.join(Request, Request.__table__.c.id == first_decision_events.c.nr_id).join( + Payment, Payment.__table__.c.nr_id == Request.id + ) ) .where( - (Payment.__table__.c.payment_completion_date - Request.__table__.c.submitted_date) <= text("interval '5 days'") + (Payment.__table__.c.payment_completion_date - Request.__table__.c.submitted_date) + <= text("interval '5 days'") ) ) @@ -756,7 +758,7 @@ def on_insert_or_update_nr(mapper, connection, request): Temporary NRs (nrNum starting with 'NR L') are discarded. """ - if not request.nrNum.startswith('NR L'): + if request.nrNum and not request.nrNum.startswith('NR L'): state_cd_history = get_history(request, 'stateCd') nr_num_history = get_history(request, 'nrNum') if len(nr_num_history.added) or len(state_cd_history.added): diff --git a/api/namex/resources/colin.py b/api/namex/resources/colin.py index db3386820..ef237b0df 100644 --- a/api/namex/resources/colin.py +++ b/api/namex/resources/colin.py @@ -16,7 +16,9 @@ # Register a local namespace for the NR reserve -colin_api = Namespace('Colin Info', description='Fetch business, office, and party details from Corporate Online (Colin)') +colin_api = Namespace( + 'Colin Info', description='Fetch business, office, and party details from Corporate Online (Colin)' +) @cors_preflight('GET') diff --git a/api/namex/resources/entities.py b/api/namex/resources/entities.py index 4e25efdca..10fc85fc7 100644 --- a/api/namex/resources/entities.py +++ b/api/namex/resources/entities.py @@ -14,7 +14,9 @@ # Register a local namespace for the NR reserve -entity_api = Namespace('Entity Info', description='Fetch business details such as legal name, status, and filing options') +entity_api = Namespace( + 'Entity Info', description='Fetch business details such as legal name, status, and filing options' +) @cors_preflight('GET') diff --git a/api/namex/resources/events.py b/api/namex/resources/events.py index 5c7ae3641..79e5d7745 100644 --- a/api/namex/resources/events.py +++ b/api/namex/resources/events.py @@ -66,7 +66,7 @@ def get(nr): 'user_name': None, ## the following are for notification events 'option': None, - 'email': None + 'email': None, } # previous event (used for 'user_action' logic) e_dict_previous = {} @@ -295,10 +295,15 @@ def __read_event_json(nr_event_info, event_json_data): nr_event_info['requestTypeCd'] = event_json_data['entity_type_cd'] @staticmethod - @api.expect(api.model('EventPayload', { - 'action': fields.String(required=True, description='Action name for the event'), - 'eventJson': fields.Raw(required=False, description='Additional event data (optional)') - })) + @api.expect( + api.model( + 'EventPayload', + { + 'action': fields.String(required=True, description='Action name for the event'), + 'eventJson': fields.Raw(required=False, description='Additional event data (optional)'), + }, + ) + ) @api.doc( description='Record a new event for a specific name request', params={'nr': 'NR number'}, @@ -324,7 +329,7 @@ def post(nr): EventRecorder.record_as_system( payload.get('action'), nrd, # Pass the Name Request ID instead of the request object - event_json + event_json, ) return make_response(jsonify({'message': 'Event recorded successfully'}), 201) @@ -433,4 +438,3 @@ def patch(event_id): except Exception as e: current_app.logger.error(f'Failed to update event {event_id}: {e}') return make_response(jsonify({'message': f'Failed to update event {event_id}: {str(e)}'}), 500) - diff --git a/api/namex/resources/name_requests/api_namespace.py b/api/namex/resources/name_requests/api_namespace.py index d6a0a6a48..89d1d60ed 100644 --- a/api/namex/resources/name_requests/api_namespace.py +++ b/api/namex/resources/name_requests/api_namespace.py @@ -2,4 +2,8 @@ from flask_restx import Namespace # Register a local namespace for the NR reserve -api = Namespace('Name Request', description='Public-facing API for name request creation, search, retrieval, and report generation', decorators=[cross_origin()]) +api = Namespace( + 'Name Request', + description='Public-facing API for name request creation, search, retrieval, and report generation', + decorators=[cross_origin()], +) diff --git a/api/namex/resources/name_requests/base_nr_resource.py b/api/namex/resources/name_requests/base_nr_resource.py index bb4137526..a0fc89b4a 100644 --- a/api/namex/resources/name_requests/base_nr_resource.py +++ b/api/namex/resources/name_requests/base_nr_resource.py @@ -1,10 +1,8 @@ -import os -from flask import current_app, request +from flask import request from namex.constants import NameRequestPatchActions from namex.models import Request, State -from namex.services.name_request.exceptions import NameRequestException from namex.services.name_request.name_request import NameRequestService from .abstract_nr_resource import AbstractNameRequestResource @@ -19,8 +17,6 @@ class BaseNameRequestResource(AbstractNameRequestResource): """ def initialize(self): - self.validate_config(current_app) - # Store a copy of the request data to our class instance request_json = request.get_json() self.request_data = request_json if request_json else {} @@ -32,14 +28,6 @@ def initialize(self): # Set the request data to the service self.nr_service.request_data = self.request_data - @classmethod - def validate_config(cls, app): - db_host = app.config.get('DB_HOST', None) - db_unix_socket = os.getenv('NAMEX_DATABASE_UNIX_SOCKET', None) - if not db_host and not db_unix_socket: - cls.log_error('ENV is not set', None) - raise NameRequestException(message='Internal server error') - """ The actual methods that map the request data to our domain models and persist the data. These are implemented statically so we can call them statically from our tests without having to instantiate a NameRequestResource. diff --git a/api/namex/resources/name_requests/name_request.py b/api/namex/resources/name_requests/name_request.py index dd23e4fc8..b0ac887da 100644 --- a/api/namex/resources/name_requests/name_request.py +++ b/api/namex/resources/name_requests/name_request.py @@ -87,14 +87,14 @@ def get(self, nr_id): @api.expect(nr_request) @api.doc( description="Update a name request's state and key fields. This endpoint supports state transitions including: " - "DRAFT, COND_RESERVE, RESERVED, PENDING_PAYMENT, COND_RESERVE → CONDITIONAL, and RESERVED → APPROVED. " - "Use PATCH instead for partial updates or name-only changes. Requires full access to the name request.", + 'DRAFT, COND_RESERVE, RESERVED, PENDING_PAYMENT, COND_RESERVE → CONDITIONAL, and RESERVED → APPROVED. ' + 'Use PATCH instead for partial updates or name-only changes. Requires full access to the name request.', params={'nr_id': 'Internal ID of the name request'}, responses={ 200: 'Successfully updated name request', 403: 'Forbidden', 400: 'invalid update state or payload', - 500: 'Internal server error' + 500: 'Internal server error', }, ) def put(self, nr_id): @@ -190,7 +190,6 @@ def patch(self, nr_id, nr_action: str): nr_model = Request.query.get(nr_id) def initialize(_self): - _self.validate_config(current_app) request_json = request.get_json() if nr_action: diff --git a/api/namex/resources/name_requests/report_resource.py b/api/namex/resources/name_requests/report_resource.py index 9fef48437..8c40ae0f3 100644 --- a/api/namex/resources/name_requests/report_resource.py +++ b/api/namex/resources/name_requests/report_resource.py @@ -33,7 +33,7 @@ class ReportResource(Resource): @api.doc( description='Generate and return the name request PDF results report. Requires either applicant email or ' - 'phone in headers, and the name request must be approved, consumed, expired, or rejected.', + 'phone in headers, and the name request must be approved, consumed, expired, or rejected.', params={'nr_id': 'Internal ID of the name request'}, responses={ 200: 'PDF report generated and returned successfully', diff --git a/api/namex/resources/ops.py b/api/namex/resources/ops.py index 28b34b465..bd08ea911 100644 --- a/api/namex/resources/ops.py +++ b/api/namex/resources/ops.py @@ -20,7 +20,8 @@ class Healthz(Resource): @staticmethod def get(): try: - db.engine.execute(sql) + with db.engine.connect() as connection: + connection.execute(sql) except exc.SQLAlchemyError: return {'message': 'api is down'}, 500 diff --git a/api/namex/resources/payment_societies.py b/api/namex/resources/payment_societies.py index 1608f887e..0d7ea7db1 100644 --- a/api/namex/resources/payment_societies.py +++ b/api/namex/resources/payment_societies.py @@ -15,22 +15,29 @@ # Register a local namespace for the payment_society api = Namespace('Payment Society', description='Manage payment records for societies') + @api.errorhandler(AuthError) def handle_auth_error(ex): return {'message': 'Unauthorized', 'details': ex.error.get('description') or 'Invalid or missing token'}, 401 + # Swagger input model for POST payload -payment_society_payload = api.model('PaymentSocietyPayload', { - 'nrNum': fields.String(required=True, description='Name Request number (e.g., NR1234567)'), - 'corpNum': fields.String(required=False, description='Corporation number'), - 'paymentCompletionDate': fields.DateTime(required=False, description='Payment completion timestamp (ISO format)'), - 'paymentStatusCode': fields.String(required=False, description='Status code for payment'), - 'paymentFeeCode': fields.String(required=False, description='Fee code used'), - 'paymentType': fields.String(required=False, description='Type of payment'), - 'paymentAmount': fields.Float(required=False, description='Payment amount in dollars'), - 'paymentJson': fields.Raw(required=False, description='Raw payment metadata (JSON object)'), - 'paymentAction': fields.String(required=False, description='Action taken (e.g., create, refund)') -}) +payment_society_payload = api.model( + 'PaymentSocietyPayload', + { + 'nrNum': fields.String(required=True, description='Name Request number (e.g., NR1234567)'), + 'corpNum': fields.String(required=False, description='Corporation number'), + 'paymentCompletionDate': fields.DateTime( + required=False, description='Payment completion timestamp (ISO format)' + ), + 'paymentStatusCode': fields.String(required=False, description='Status code for payment'), + 'paymentFeeCode': fields.String(required=False, description='Fee code used'), + 'paymentType': fields.String(required=False, description='Type of payment'), + 'paymentAmount': fields.Float(required=False, description='Payment amount in dollars'), + 'paymentJson': fields.Raw(required=False, description='Raw payment metadata (JSON object)'), + 'paymentAction': fields.String(required=False, description='Action taken (e.g., create, refund)'), + }, +) @cors_preflight('GET') diff --git a/api/namex/resources/requests.py b/api/namex/resources/requests.py index 6251fa0da..8f2b46632 100644 --- a/api/namex/resources/requests.py +++ b/api/namex/resources/requests.py @@ -12,7 +12,7 @@ from pytz import timezone from sqlalchemy import and_, exists, func, or_, text from sqlalchemy.inspection import inspect -from sqlalchemy.orm import eagerload, lazyload, load_only +from sqlalchemy.orm import joinedload, lazyload, load_only from sqlalchemy.orm.exc import NoResultFound from namex import jwt @@ -50,7 +50,9 @@ from .utils import DateUtils # Register a local namespace for the requests -api = Namespace('Name Examination', description='Staff-facing name request operations for state, analysis, and name editing') +api = Namespace( + 'Name Examination', description='Staff-facing name request operations for state, analysis, and name editing' +) # Marshmallow schemas request_schema = RequestsSchema(many=False) @@ -109,7 +111,7 @@ class RequestsQueue(Resource): @jwt.requires_roles([User.APPROVER]) @api.doc( description='Fetches the next draft name request from the queue and assigns it to the current user. ' - 'If the user already has an in-progress NR, that one is returned instead.', + 'If the user already has an in-progress NR, that one is returned instead.', params={'priorityQueue': 'Set to true to fetch from the priority queue'}, responses={ 200: 'Name request assigned successfully', @@ -448,7 +450,7 @@ def get(*args, **kwargs): q = q.order_by(text(sort_by)) # get a count of the full set size, this ignore the offset & limit settings - count_q = q.statement.with_only_columns([func.count()]).order_by(None) + count_q = q.statement.with_only_columns(func.count()).order_by(None) count = db.session.execute(count_q).scalar() # Add the paging @@ -541,7 +543,7 @@ def get(): ) .options( lazyload('*'), - eagerload(RequestDAO.names).load_only(Name.name), + joinedload(RequestDAO.names).load_only(Name.name), load_only(RequestDAO.id, RequestDAO.nrNum), ) .order_by(RequestDAO.submittedDate.desc()) @@ -616,7 +618,7 @@ def _get_next_set_from_solr(solr_query, start, rows): ), ).options( lazyload('*'), - eagerload(RequestDAO.names).load_only(Name.name), + joinedload(RequestDAO.names).load_only(Name.name), load_only(RequestDAO.id, RequestDAO.nrNum), ).all(), have_more_data @@ -625,18 +627,20 @@ def _get_next_set_from_solr(solr_query, start, rows): @staticmethod @cors.crossdomain(origin='*') @jwt.has_one_of_roles([User.SYSTEM]) - @api.expect(api.model( - 'AffiliationInvitationSearch', - { - 'identifiers': fields.List(fields.String, description='List of NR identifiers to search'), - 'identifier': fields.String(description='Search for a specific NR number'), - 'status': fields.List(fields.String, description='Filter by status (e.g. DRAFT, INPROGRESS)'), - 'name': fields.String(description='Partial name to search'), - 'type': fields.List(fields.String, description='Request types to filter'), - 'page': fields.Integer(description='Page number for pagination'), - 'limit': fields.Integer(description='Limit the number of results per page'), - }, - )) + @api.expect( + api.model( + 'AffiliationInvitationSearch', + { + 'identifiers': fields.List(fields.String, description='List of NR identifiers to search'), + 'identifier': fields.String(description='Search for a specific NR number'), + 'status': fields.List(fields.String, description='Filter by status (e.g. DRAFT, INPROGRESS)'), + 'name': fields.String(description='Partial name to search'), + 'type': fields.List(fields.String, description='Request types to filter'), + 'page': fields.Integer(description='Page number for pagination'), + 'limit': fields.Integer(description='Limit the number of results per page'), + }, + ) + ) @api.doc( description='Searches name requests by partially matching NR number or business name using a JSON payload', responses={ @@ -674,12 +678,7 @@ def post(): conditions.append( and_( RequestDAO.stateCd.in_({State.DRAFT, State.HOLD}), - exists().where( - and_( - Name.nrId == RequestDAO.id, - Name.state == NameState.NOT_EXAMINED.value - ) - ) + exists().where(and_(Name.nrId == RequestDAO.id, Name.state == NameState.NOT_EXAMINED.value)), ) ) @@ -699,8 +698,8 @@ def post(): q = q.filter(RequestDAO._entity_type_cd.in_([query_spgp])) q = q.options( lazyload('*'), - eagerload(RequestDAO.names).load_only(Name.state, Name.name), - eagerload(RequestDAO.applicants).load_only(Applicant.emailAddress, Applicant.phoneNumber), + joinedload(RequestDAO.names).load_only(Name.state, Name.name), + joinedload(RequestDAO.applicants).load_only(Applicant.emailAddress, Applicant.phoneNumber), load_only( RequestDAO.id, RequestDAO.nrNum, @@ -719,19 +718,20 @@ def post(): and search_details.limit is not None and search_details.limit > 0 ): - q = q.offset((search_details.page - 1) * search_details.limit).limit(search_details.limit+1) - q = q.offset((search_details.page - 1) * search_details.limit).limit(search_details.limit+1) + q = q.offset((search_details.page - 1) * search_details.limit).limit(search_details.limit + 1) + q = q.offset((search_details.page - 1) * search_details.limit).limit(search_details.limit + 1) requests = request_auth_search_schemas.dump(q.all()) - has_more = len(requests)> search_details.limit + has_more = len(requests) > search_details.limit actions_array = [ nr_filing_actions.get_actions(r['requestTypeCd'], r['entity_type_cd'], r['request_action_cd']) - for r in requests[:search_details.limit] + for r in requests[: search_details.limit] ] for r, additional_fields in zip(requests, actions_array): if additional_fields: r.update(additional_fields) requests = requests or [] - return jsonify({'requests': requests[:search_details.limit], 'hasMore': has_more}) + return jsonify({'requests': requests[: search_details.limit], 'hasMore': has_more}) + # noinspection PyUnresolvedReferences @cors_preflight('GET, PATCH, PUT, DELETE') @@ -771,20 +771,32 @@ def delete(nr): @staticmethod @cors.crossdomain(origin='*') @jwt.has_one_of_roles([User.APPROVER, User.EDITOR, User.SYSTEM]) - @api.expect(api.model('PatchNRPayload', { - 'state': fields.String(description='New state to apply to the Name Request'), - 'previousStateCd': fields.String(description='Optional previous state code'), - 'corpNum': fields.String(description='Corporation number (required if consuming name)'), - 'comments': fields.List(fields.Nested(api.model('PatchNRComment', { - 'comment': fields.String(required=True, description='Comment text'), - 'id': fields.Integer(description='Set to 0 or omit for new comments') - }))) - })) + @api.expect( + api.model( + 'PatchNRPayload', + { + 'state': fields.String(description='New state to apply to the Name Request'), + 'previousStateCd': fields.String(description='Optional previous state code'), + 'corpNum': fields.String(description='Corporation number (required if consuming name)'), + 'comments': fields.List( + fields.Nested( + api.model( + 'PatchNRComment', + { + 'comment': fields.String(required=True, description='Comment text'), + 'id': fields.Integer(description='Set to 0 or omit for new comments'), + }, + ) + ) + ), + }, + ) + ) @api.doc( description=( "Updates a name request's state, records the previous state, optionally adds comments, assigns a corpNum if consumption state, " - "and calculates expiration if approval state. Only users with APPROVER, EDITOR, or SYSTEM roles may update state, " - "and certain transitions may be restricted based on role or current state." + 'and calculates expiration if approval state. Only users with APPROVER, EDITOR, or SYSTEM roles may update state, ' + 'and certain transitions may be restricted based on role or current state.' ), params={'nr': 'NR number'}, responses={ @@ -1667,10 +1679,7 @@ def common(nr, choice): @jwt.requires_auth @api.doc( description='Fetches the name record for the specified name request and choice number', - params={ - 'nr': 'NR number', - 'choice': 'Choice number (1, 2, or 3)' - }, + params={'nr': 'NR number', 'choice': 'Choice number (1, 2, or 3)'}, responses={ 200: 'Name record fetched successfully', 400: 'Invalid NR format', @@ -1685,25 +1694,29 @@ def get(nr, choice, *args, **kwargs): return names_schema.dumps(nrd_name).data, 200 - name_model = api.model('NameModel', { - 'choice': fields.Integer(description='Name choice number (1, 2, or 3)', example=1), - 'conflict1': fields.String(description='First conflict name'), - 'conflict2': fields.String(description='Second conflict name'), - 'conflict3': fields.String(description='Third conflict name'), - 'conflict1_num': fields.String(description='First conflict NR number'), - 'conflict2_num': fields.String(description='Second conflict NR number'), - 'conflict3_num': fields.String(description='Third conflict NR number'), - 'consumptionDate': fields.String(description='Consumption date in ISO format'), - 'corpNum': fields.String(description='Corporation number if consumed'), - 'decision_text': fields.String(description='Decision rationale or notes'), - 'designation': fields.String(description='Designation like INC, LTD, etc.'), - 'name_type_cd': fields.String(description='Name type code (e.g., CR, XPRO)'), - 'name': fields.String(description='The business name'), - 'state': fields.String(description='State of the name (e.g., APPROVED, REJECTED)'), - 'comment': fields.Nested(api.model('Comment', { - 'comment': fields.String(description='Comment about the name decision') - }), description='Optional comment on the decision') - }) + name_model = api.model( + 'NameModel', + { + 'choice': fields.Integer(description='Name choice number (1, 2, or 3)', example=1), + 'conflict1': fields.String(description='First conflict name'), + 'conflict2': fields.String(description='Second conflict name'), + 'conflict3': fields.String(description='Third conflict name'), + 'conflict1_num': fields.String(description='First conflict NR number'), + 'conflict2_num': fields.String(description='Second conflict NR number'), + 'conflict3_num': fields.String(description='Third conflict NR number'), + 'consumptionDate': fields.String(description='Consumption date in ISO format'), + 'corpNum': fields.String(description='Corporation number if consumed'), + 'decision_text': fields.String(description='Decision rationale or notes'), + 'designation': fields.String(description='Designation like INC, LTD, etc.'), + 'name_type_cd': fields.String(description='Name type code (e.g., CR, XPRO)'), + 'name': fields.String(description='The business name'), + 'state': fields.String(description='State of the name (e.g., APPROVED, REJECTED)'), + 'comment': fields.Nested( + api.model('Comment', {'comment': fields.String(description='Comment about the name decision')}), + description='Optional comment on the decision', + ), + }, + ) @staticmethod @cors.crossdomain(origin='*') @@ -1947,7 +1960,7 @@ def get(*args, **kwargs): q = q.filter(RequestDAO.userId == user.id) q = q.order_by(RequestDAO.lastUpdate.desc()) - count_q = q.statement.with_only_columns([func.count()]).order_by(None) + count_q = q.statement.with_only_columns(func.count()).order_by(None) count = db.session.execute(count_q).scalar() q = q.offset(start) diff --git a/api/namex/resources/user_settings.py b/api/namex/resources/user_settings.py index 9eef34575..393ee49cd 100644 --- a/api/namex/resources/user_settings.py +++ b/api/namex/resources/user_settings.py @@ -9,14 +9,16 @@ api = Namespace('User Settings', description='Fetch or update saved user preferences') + @api.errorhandler(AuthError) def handle_auth_error(ex): return {'message': 'Unauthorized', 'details': ex.error.get('description') or 'Invalid or missing token'}, 401 -user_settings_model = api.model('UserSettings', { - 'searchColumns': fields.List(fields.String, required=True, description='List of search column names') -}) +user_settings_model = api.model( + 'UserSettings', + {'searchColumns': fields.List(fields.String, required=True, description='List of search column names')}, +) @cors_preflight('GET, PUT') diff --git a/api/namex/resources/word_classification/word_classification.py b/api/namex/resources/word_classification/word_classification.py index 2e0d256e7..187866a55 100644 --- a/api/namex/resources/word_classification/word_classification.py +++ b/api/namex/resources/word_classification/word_classification.py @@ -17,7 +17,7 @@ 'name': fields.String(description='Full name associated with the word'), 'classification': fields.String(description='Classification code'), 'examiner': fields.String(description='Examiner who submitted the word'), - } + }, ) word_classification = api.model( diff --git a/api/namex/services/__init__.py b/api/namex/services/__init__.py index 84ce9db94..3e135d79c 100644 --- a/api/namex/services/__init__.py +++ b/api/namex/services/__init__.py @@ -1,11 +1,10 @@ -from gcp_queue.pubsub import GcpQueue +from gcp_queue import GcpQueue queue = GcpQueue() from .audit_trail import EventRecorder from .exceptions import ServicesError -from .messages import MessageServices -from .name_request.name_request_state import is_reapplication_eligible from .flags import Flags +from .messages import MessageServices flags = Flags() diff --git a/api/namex/services/audit_trail/event_recorder.py b/api/namex/services/audit_trail/event_recorder.py index 418888cbe..3d595341e 100644 --- a/api/namex/services/audit_trail/event_recorder.py +++ b/api/namex/services/audit_trail/event_recorder.py @@ -3,8 +3,6 @@ from flask import current_app -from namex.models import Event, User - class EventRecorder(object): @staticmethod @@ -27,6 +25,9 @@ def record(user, action, nr, data_dict, save_to_session=False): @staticmethod def record_as_system(action, nr, data_dict, save_to_session=False): """Record an event as a system user.""" + # Lazy import to avoid circular dependency + from namex.models import User + try: user = User.get_service_account_user() if user: @@ -46,6 +47,9 @@ def record_as_system(action, nr, data_dict, save_to_session=False): @staticmethod def create_event(user, action, nr, data_dict): """Create an event object.""" + # Lazy import to avoid circular dependency + from namex.models import Event + event = Event( eventDate=datetime.utcnow(), action=action, diff --git a/api/namex/services/flags.py b/api/namex/services/flags.py index 8f388f08d..e79d3084a 100644 --- a/api/namex/services/flags.py +++ b/api/namex/services/flags.py @@ -13,6 +13,8 @@ # limitations under the License. """Manage the Feature Flags initialization, setup and service.""" +from typing import TYPE_CHECKING + from flask import current_app from ldclient import Context from ldclient import get as ldclient_get @@ -20,7 +22,8 @@ from ldclient.config import Config from ldclient.integrations import Files -from namex.models import User +if TYPE_CHECKING: + from namex.models import User class Flags: @@ -79,10 +82,10 @@ def _get_anonymous_user(): return Context.create('anonymous') @staticmethod - def _user_as_key(user: User): + def _user_as_key(user: 'User'): return Context.builder(user.idp_userid).set('firstName', user.firstname).set('lastName', user.lastname).build() - def is_on(self, flag: str, default: bool = False, user: User = None) -> bool: + def is_on(self, flag: str, default: bool = False, user: 'User' = None) -> bool: """Assert that the flag is set for this user.""" client = self._get_client() @@ -96,7 +99,7 @@ def is_on(self, flag: str, default: bool = False, user: User = None) -> bool: return bool(client.variation(flag, flag_user, default)) - def value(self, flag: str, default=None, user: User = None): + def value(self, flag: str, default=None, user: 'User' = None): """Retrieve the value of the (flag, user) tuple.""" client = self._get_client() diff --git a/api/namex/services/logging_config.py b/api/namex/services/logging_config.py index 8e4094361..6aa71f707 100644 --- a/api/namex/services/logging_config.py +++ b/api/namex/services/logging_config.py @@ -15,16 +15,12 @@ def configure_logging(app): app.logger = structured_logger.get_logger() # Prepend context-binding to the processor chain - structlog.configure( - processors=[structlog.contextvars.merge_contextvars] + structlog.get_config()['processors'] - ) + structlog.configure(processors=[structlog.contextvars.merge_contextvars] + structlog.get_config()['processors']) # Add request metadata to the log context @app.before_request def bind_app_name(): structlog.contextvars.clear_contextvars() structlog.contextvars.bind_contextvars( - app_name=request.headers.get('App-Name', 'unknown'), - method=request.method, - endpoint=request.path + app_name=request.headers.get('App-Name', 'unknown'), method=request.method, endpoint=request.path ) diff --git a/api/namex/services/lookup/name_request_filing_actions.py b/api/namex/services/lookup/name_request_filing_actions.py index 18f8c1eaa..33133857f 100644 --- a/api/namex/services/lookup/name_request_filing_actions.py +++ b/api/namex/services/lookup/name_request_filing_actions.py @@ -641,7 +641,7 @@ def get_request_type_array(self, request_type): return {self.requestTypecd.get(request_type, 'Key not found')} def get_entity_type_sole_general_nrs(self, request_type): - # special case for sole and general partnerships + # special case for sole and general partnerships if request_type == ['GP']: return NameRequestFilingActions.get_actions(self, 'FR', 'GP', 'NEW').get('legalType') elif request_type == ['SP']: diff --git a/api/namex/services/name_request/abstract_name_request.py b/api/namex/services/name_request/abstract_name_request.py index 569736ddf..0dee44db8 100644 --- a/api/namex/services/name_request/abstract_name_request.py +++ b/api/namex/services/name_request/abstract_name_request.py @@ -119,19 +119,19 @@ def get_mapped_entity_and_action_code(cls, request_type): @classmethod def get_request_sequence(cls): seq = db.Sequence('requests_id_seq') - nr_id = db.engine.execute(seq) + nr_id = db.session.execute(seq) return nr_id @classmethod def get_applicant_sequence(cls): seq = db.Sequence('applicants_party_id_seq') - party_id = db.engine.execute(seq) + party_id = db.session.execute(seq) return party_id @classmethod def get_name_sequence(cls): seq = db.Sequence('names_id_seq') - name_id = db.engine.execute(seq) + name_id = db.session.execute(seq) return name_id @classmethod diff --git a/api/namex/services/name_request/auto_analyse/name_analysis_utils.py b/api/namex/services/name_request/auto_analyse/name_analysis_utils.py index 85f2b7c85..a543b9f45 100644 --- a/api/namex/services/name_request/auto_analyse/name_analysis_utils.py +++ b/api/namex/services/name_request/auto_analyse/name_analysis_utils.py @@ -12,23 +12,6 @@ from . import porter -# TODO: Fix caps and stuff... -def data_frame_to_list(df): - df_dist = df.loc[df.word_classification == DataFrameFields.DISTINCTIVE.value] - df_desc = df.loc[df.word_classification == DataFrameFields.DESCRIPTIVE.value] - df_none = df.loc[df.word_classification == DataFrameFields.UNCLASSIFIED.value] - - list_dist = list(df_dist.word) - list_desc = list(df_desc.word) - list_none = list(df_none.word) - - return list_dist, list_desc, list_none - - -def get_dataframe_list(df, field): - return df[field].str.split(',').tolist() - - def get_flat_list(lst): subs_list = [item for sublist in lst for item in sublist] return [x.strip() for x in subs_list] diff --git a/api/namex/services/name_request/name_request_state.py b/api/namex/services/name_request/name_request_state.py index 812685dda..a830322d5 100644 --- a/api/namex/services/name_request/name_request_state.py +++ b/api/namex/services/name_request/name_request_state.py @@ -21,13 +21,9 @@ PaymentState, ) from namex.models import State +from namex.utils.pg8000_compat import safe_date_extraction -from .exceptions import ( - InvalidStateError, - NameRequestActionError, - NameRequestIsConsumedError, - NameRequestIsExpiredError, -) +from .exceptions import InvalidStateError, NameRequestActionError, NameRequestIsConsumedError, NameRequestIsExpiredError from .utils import has_complete_payment, has_completed_or_refunded_payment state_transition_error_msg = 'Invalid state transition [{current_state}] -> [{next_state}]' @@ -105,7 +101,7 @@ def display_reapply_action(nr_model=None) -> Boolean: def is_reapplication_eligible(expiration_date) -> Boolean: if expiration_date: todays_date = datetime.now(timezone.utc).date() - expiry_date = expiration_date.date() + expiry_date = safe_date_extraction(expiration_date) delta = expiry_date - todays_date return delta.days <= 14 diff --git a/api/namex/services/statistics/wait_time_statistics.py b/api/namex/services/statistics/wait_time_statistics.py index ce9626472..3db3d28f1 100644 --- a/api/namex/services/statistics/wait_time_statistics.py +++ b/api/namex/services/statistics/wait_time_statistics.py @@ -1,8 +1,9 @@ -from datetime import datetime, timedelta +from datetime import timedelta from namex.models import Event, Request from namex.services.statistics import response_keys from namex.utils.api_resource import handle_exception +from namex.utils.pg8000_compat import pg8000_utcnow, safe_datetime_delta from namex.utils.sql_alchemy import query_result_to_dict @@ -20,12 +21,12 @@ def get_approved_names_counter(cls): def get_waiting_time_dict(cls): try: if not (oldest_draft := Request.get_oldest_draft()): - oldest_draft_date = datetime.now().astimezone() + oldest_draft_date = pg8000_utcnow() else: oldest_draft_date = oldest_draft.submittedDate # add one to waiting time to account for current day - delta = datetime.now().astimezone() - oldest_draft_date + timedelta(days=1) + delta = safe_datetime_delta(pg8000_utcnow(), oldest_draft_date) + timedelta(days=1) response_data = {'oldest_draft': oldest_draft_date.isoformat(), 'waiting_time': delta.days} except Exception as err: return handle_exception(err, repr(err), 500) diff --git a/api/namex/services/word_classification/token_classifier.py b/api/namex/services/word_classification/token_classifier.py index b91ccd105..a0ee7cab1 100644 --- a/api/namex/services/word_classification/token_classifier.py +++ b/api/namex/services/word_classification/token_classifier.py @@ -1,9 +1,26 @@ from enum import Enum -import pandas as pd from flask import current_app -from ..name_request.auto_analyse.name_analysis_utils import data_frame_to_list + +def classifications_to_lists(classifications): + """Convert list of classification dictionaries to separate lists by type.""" + list_dist = [] + list_desc = [] + list_none = [] + + for item in classifications: + classification = item['word_classification'] + word = item['word'] + + if classification == DataFrameFields.DISTINCTIVE.value: + list_dist.append(word) + elif classification == DataFrameFields.DESCRIPTIVE.value: + list_desc.append(word) + elif classification == DataFrameFields.UNCLASSIFIED.value: + list_none.append(word) + + return list_dist, list_desc, list_none class DataFrameFields(Enum): @@ -68,18 +85,6 @@ def __init__(self, svc): self.descriptive_word_tokens = [] self.unclassified_word_tokens = [] - @classmethod - def dataframe_to_list(df): - df_dist = df.loc[df.word_classification == DataFrameFields.DISTINCTIVE.value] - df_desc = df.loc[df.word_classification == DataFrameFields.DESCRIPTIVE.value] - df_none = df.loc[df.word_classification == DataFrameFields.UNCLASSIFIED.value] - - list_dist = list(df_dist.word) - list_desc = list(df_desc.word) - list_none = list(df_none.word) - - return list_dist, list_desc, list_none - """ Utility for adding unclassified words to distinctive and descriptive list Override the abstract / base class method @@ -107,29 +112,26 @@ def handle_unclassified_words(list_dist, list_desc, list_none, list_name): def _classify_tokens(self, word_tokens): try: - cf = pd.DataFrame(columns=['word', 'word_classification']) + classifications = [] wc_svc = self.word_classification_service # Get the word classification for each word in the supplied name name for word in word_tokens: word_classification = wc_svc.find_one(word) - new_row = [] if not word_classification: current_app.logger.debug('No word classification found for: ' + word) - new_row.append( + classifications.append( {'word': word.lower().strip(), 'word_classification': DataFrameFields.UNCLASSIFIED.value} ) else: for row in word_classification: - new_row.append( + classifications.append( {'word': word.lower().strip(), 'word_classification': row.classification.strip()} ) - cf = cf.append(new_row, ignore_index=True) - self.distinctive_word_tokens, self.descriptive_word_tokens, self.unclassified_word_tokens = ( - data_frame_to_list(cf) + classifications_to_lists(classifications) ) except Exception as error: diff --git a/api/namex/utils/pg8000_compat.py b/api/namex/utils/pg8000_compat.py new file mode 100644 index 000000000..bd60c13a7 --- /dev/null +++ b/api/namex/utils/pg8000_compat.py @@ -0,0 +1,99 @@ +""" +pg8000 compatibility utilities for datetime handling. + +This module provides utility functions to handle datetime objects consistently +with pg8000 driver, avoiding timezone-related errors without modifying business logic. +""" +from datetime import datetime, timezone + + +def ensure_timezone_aware(dt_obj): + """ + Ensure a datetime object is timezone-aware for pg8000 compatibility. + + Args: + dt_obj: datetime object or None + + Returns: + timezone-aware datetime object or None + """ + if dt_obj is None: + return None + + if hasattr(dt_obj, 'date') and not hasattr(dt_obj, 'tzinfo'): + # Handle date objects - convert to datetime with UTC timezone + return datetime.combine(dt_obj, datetime.min.time()).replace(tzinfo=timezone.utc) + elif hasattr(dt_obj, 'tzinfo') and dt_obj.tzinfo is None: + # Handle naive datetime objects - assume UTC + return dt_obj.replace(tzinfo=timezone.utc) + else: + # Already timezone-aware or not a datetime + return dt_obj + + +def safe_datetime_delta(dt1, dt2): + """ + Safely calculate delta between two datetime objects, ensuring timezone compatibility. + + Args: + dt1: First datetime object + dt2: Second datetime object + + Returns: + timedelta object + """ + dt1_aware = ensure_timezone_aware(dt1) + dt2_aware = ensure_timezone_aware(dt2) + + return dt1_aware - dt2_aware + + +def normalize_db_datetime(dt_obj): + """ + Normalize datetime object retrieved from database for consistent handling. + + This function can be used in model property getters to ensure consistent + datetime handling without changing business logic. + + Args: + dt_obj: datetime object from database + + Returns: + normalized datetime object + """ + return ensure_timezone_aware(dt_obj) + + +def safe_date_extraction(dt_or_date_obj): + """ + Safely extract date from datetime or date object for pg8000 compatibility. + + This addresses the "'datetime.date' object has no attribute 'date'" error. + + Args: + dt_or_date_obj: datetime or date object + + Returns: + date object + """ + if dt_or_date_obj is None: + return None + + # If it's already a date object, return it directly + if hasattr(dt_or_date_obj, 'year') and not hasattr(dt_or_date_obj, 'hour'): + return dt_or_date_obj + + # If it's a datetime object, extract the date + if hasattr(dt_or_date_obj, 'date'): + return dt_or_date_obj.date() + + return dt_or_date_obj + + +def pg8000_utcnow(): + """ + Return timezone-aware UTC datetime for pg8000 compatibility. + + Use this instead of datetime.utcnow() in areas that need timezone-aware datetimes. + """ + return datetime.now(timezone.utc) diff --git a/api/namex/utils/queue_util.py b/api/namex/utils/queue_util.py index 626ca857e..ed37c9133 100644 --- a/api/namex/utils/queue_util.py +++ b/api/namex/utils/queue_util.py @@ -36,7 +36,7 @@ def publish_resend_email_notification(nr_num: str, option: str, resend_event_id: 'request': { 'nrNum': nr_num, 'option': option, - 'resendEventId': resend_event_id # Include the resend event ID + 'resendEventId': resend_event_id, # Include the resend event ID } } @@ -51,7 +51,9 @@ def publish_resend_email_notification(nr_num: str, option: str, resend_event_id: email_topic = current_app.config.get('EMAILER_TOPIC', 'mailer') payload = queue.to_queue_message(ce) - current_app.logger.debug('About to publish resend email for %s nrNum=%s, resendEventId=%s', option, nr_num, resend_event_id) + current_app.logger.debug( + 'About to publish resend email for %s nrNum=%s, resendEventId=%s', option, nr_num, resend_event_id + ) queue.publish(topic=email_topic, payload=payload) diff --git a/api/namex/utils/synonyms_api_auth.py b/api/namex/utils/synonyms_api_auth.py index a7d7b73ac..b314c3e06 100644 --- a/api/namex/utils/synonyms_api_auth.py +++ b/api/namex/utils/synonyms_api_auth.py @@ -57,4 +57,3 @@ def custom_request(self, method, url, *args, **kwargs): RESTClientObject.request = custom_request swagger_client._synonyms_auth_patched = True - diff --git a/api/poetry.lock b/api/poetry.lock index 8bc6ef2ae..79b37e81c 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,12 +1,24 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] [[package]] name = "aiohttp" version = "3.9.5" description = "Async http client/server framework (asyncio)" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, @@ -94,15 +106,15 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -115,9 +127,9 @@ frozenlist = ">=1.1.0" name = "alembic" version = "1.13.1" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, @@ -129,15 +141,15 @@ SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["backports.zoneinfo ; python_version < \"3.9\""] [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, @@ -146,13 +158,25 @@ files = [ [package.extras] dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + [[package]] name = "attrs" version = "22.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, @@ -163,48 +187,27 @@ cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +tests-no-zope = ["cloudpickle ; platform_python_implementation == \"CPython\"", "cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990) ; platform_python_implementation == \"CPython\"", "mypy (>=0.971,<0.990) ; platform_python_implementation == \"CPython\"", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version < \"3.11\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version < \"3.11\"", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "blinker" version = "1.8.2" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] -[[package]] -name = "cachecontrol" -version = "0.14.0" -description = "httplib2 caching for requests" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, - {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, -] - -[package.dependencies] -msgpack = ">=0.5.2,<2.0.0" -requests = ">=2.16.0" - -[package.extras] -dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] -filecache = ["filelock (>=3.8.0)"] -redis = ["redis (>=2.10.5)"] - [[package]] name = "cachelib" version = "0.13.0" description = "A collection of cache libraries in the same API interface." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, @@ -214,9 +217,9 @@ files = [ name = "cachetools" version = "5.3.3" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, @@ -226,9 +229,9 @@ files = [ name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, @@ -238,9 +241,10 @@ files = [ name = "cffi" version = "1.17.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -318,9 +322,9 @@ pycparser = "*" name = "charset-normalizer" version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.6.0" +groups = ["main", "dev"] files = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, @@ -333,9 +337,9 @@ unicode-backport = ["unicodedata2"] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -344,13 +348,61 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloud-sql-connector" +version = "0.2.0" +description = "Cloud SQL connection utilities for database connectivity with authentication and schema management" +optional = false +python-versions = "^3.12" +groups = ["main"] +files = [] +develop = false + +[package.dependencies] +cloud-sql-python-connector = "^1.16.0" +pg8000 = "^1.30.0" +sqlalchemy = "^2.0.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "87012397b3ca62e15ffa542214d80f2d2ae7e72d" +subdirectory = "python/cloud-sql-connector" + +[[package]] +name = "cloud-sql-python-connector" +version = "1.18.4" +description = "Google Cloud SQL Python Connector library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "cloud_sql_python_connector-1.18.4-py3-none-any.whl", hash = "sha256:0a77a16ab2d93fc78d8593175cb69fedfbc1c67aa99f9b3ba70b5026343db092"}, + {file = "cloud_sql_python_connector-1.18.4.tar.gz", hash = "sha256:dd2b015245d77771b5e7566e2817e279e9daca90e0cf30dac032155e813afe76"}, +] + +[package.dependencies] +aiofiles = "*" +aiohttp = "*" +cryptography = ">=42.0.0" +dnspython = ">=2.0.0" +google-auth = ">=2.28.0" +Requests = "*" + +[package.extras] +asyncpg = ["asyncpg (>=0.30.0)"] +pg8000 = ["pg8000 (>=1.31.1)"] +pymysql = ["PyMySQL (>=1.1.0)"] +pytds = ["python-tds (>=1.15.0)"] + [[package]] name = "cmudict" version = "1.0.23" description = "A versioned python wrapper package for The CMU Pronouncing Dictionary data files." -category = "main" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ {file = "cmudict-1.0.23-py3-none-any.whl", hash = "sha256:a6b933bd5777afafb18ea5d4989c24f326bf4076b12f49f8d5de5177b7b173a6"}, {file = "cmudict-1.0.23.tar.gz", hash = "sha256:a1f53a140f867a62ce10e344df082be0cb4bb6b8fbaa63f4c9c6ae13db501b8c"}, @@ -364,21 +416,22 @@ importlib-resources = ">=5" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} [[package]] name = "coverage" version = "7.5.1" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, @@ -435,15 +488,15 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -482,10 +535,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -495,9 +548,9 @@ test-randomorder = ["pytest-randomly"] name = "dataclasses" version = "0.6" description = "A backport of the dataclasses module for Python 3.6" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, @@ -507,21 +560,42 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] +[[package]] +name = "dnspython" +version = "2.8.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, +] + +[package.extras] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] + [[package]] name = "dpath" version = "2.1.6" description = "Filesystem-like pathing and searching for dictionaries" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "dpath-2.1.6-py3-none-any.whl", hash = "sha256:31407395b177ab63ef72e2f6ae268c15e938f2990a8ecf6510f5686c02b6db73"}, {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, @@ -531,9 +605,9 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, @@ -550,9 +624,9 @@ gmpy2 = ["gmpy2"] name = "expiringdict" version = "1.2.2" description = "Dictionary with auto-expiring values for caching purposes" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"}, {file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"}, @@ -565,9 +639,9 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] name = "flask" version = "3.0.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, @@ -588,9 +662,9 @@ dotenv = ["python-dotenv"] name = "flask-caching" version = "1.11.1" description = "Adds caching support to Flask applications." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "Flask-Caching-1.11.1.tar.gz", hash = "sha256:28af189e97defb9e39b43ebe197b54a58aaee81bdeb759f46d969c26d7aa7810"}, {file = "Flask_Caching-1.11.1-py3-none-any.whl", hash = "sha256:36592812eec6cba86eca48bcda74eff24bfd6c8eaf6056ca0184474bb78c0dc4"}, @@ -604,9 +678,9 @@ Flask = "*" name = "flask-cors" version = "4.0.1" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"}, {file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"}, @@ -619,9 +693,9 @@ Flask = ">=0.9" name = "flask-jwt-oidc" version = "0.8.1" description = "Opinionated flask oidc client" -category = "main" optional = false python-versions = ">=3.9,<4" +groups = ["main"] files = [] develop = false @@ -642,9 +716,9 @@ resolved_reference = "bba7bb26625b213f4be817b01f28fb8bbb5b05d1" name = "flask-marshmallow" version = "0.14.0" description = "Flask + marshmallow for beautiful APIs" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, @@ -656,19 +730,19 @@ marshmallow = ">=2.0.0" six = ">=1.9.0" [package.extras] -dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4) ; python_version >= \"3.5\"", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0) ; python_version >= \"3.6\"", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0) ; python_version < \"3.6\"", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] +lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4) ; python_version >= \"3.5\"", "pre-commit (>=2.4,<3.0)"] +sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0) ; python_version >= \"3.6\"", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0) ; python_version < \"3.6\""] +tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0) ; python_version >= \"3.6\"", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0) ; python_version < \"3.6\"", "mock", "pytest"] [[package]] name = "flask-migrate" version = "2.7.0" description = "SQLAlchemy database migrations for Flask applications using Alembic" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Migrate-2.7.0.tar.gz", hash = "sha256:ae2f05671588762dd83a21d8b18c51fe355e86783e24594995ff8d7380dffe38"}, {file = "Flask_Migrate-2.7.0-py2.py3-none-any.whl", hash = "sha256:26871836a4e46d2d590cf8e558c6d60039e1c003079b240689d845726b6b57c0"}, @@ -683,9 +757,9 @@ Flask-SQLAlchemy = ">=1.0" name = "flask-moment" version = "0.11.0" description = "Formatting of dates and times in Flask templates using moment.js." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Moment-0.11.0.tar.gz", hash = "sha256:ff4cc0c4f8ec6798e19ba17fac409a8090f21677da6b21e3e1e4450344d8ed71"}, {file = "Flask_Moment-0.11.0-py2.py3-none-any.whl", hash = "sha256:75e1ae59b7562731acf9faf295c0bfd8165f51f67a62bd779e0c57e5f1c66dbf"}, @@ -698,9 +772,9 @@ Flask = "*" name = "flask-opentracing" version = "1.1.0" description = "OpenTracing support for Flask applications" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"}, ] @@ -716,9 +790,9 @@ tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] name = "flask-restx" version = "1.3.0" description = "Fully featured framework for fast, easy and documented API development with Flask" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, @@ -741,9 +815,9 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.0.5" description = "Add SQLAlchemy support to your Flask application." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, @@ -757,9 +831,9 @@ sqlalchemy = ">=1.4.18" name = "freezegun" version = "1.5.1" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, @@ -772,9 +846,9 @@ python-dateutil = ">=2.7" name = "frozenlist" version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, @@ -857,35 +931,34 @@ files = [ [[package]] name = "gcp-queue" -version = "2.0.0" +version = "0.3.0" description = "" -category = "main" optional = false -python-versions = "^3.12" +python-versions = "^3.8" +groups = ["main"] files = [] develop = false [package.dependencies] -cachecontrol = "^0.14.0" -flask = "^3.0.2" -google-auth = "^2.27.0" -google-cloud-pubsub = "^2.19.4" -simple_cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py"} +flask = ">=1" +google-auth = "^2.28.2" +google-cloud-pubsub = "^2.20.2" +simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} [package.source] type = "git" -url = "https://github.com/bcgov/namex.git" -reference = "HEAD" -resolved_reference = "76f8488951061f251a4f28c009b377d0baa1cca7" -subdirectory = "services/pubsub" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "9240434e42a3c21187abe59f4c1b0318680f7c84" +subdirectory = "python/gcp-queue" [[package]] name = "google-api-core" version = "1.34.1" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"}, {file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"}, @@ -908,9 +981,9 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] name = "google-auth" version = "2.29.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, @@ -932,16 +1005,16 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-cloud-pubsub" version = "2.21.1" description = "Google Cloud Pub/Sub API client library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google-cloud-pubsub-2.21.1.tar.gz", hash = "sha256:31fcf07444b7f813a616c4b650e1fbf1dc998a088fe0059a76164855ac17f05c"}, {file = "google_cloud_pubsub-2.21.1-py2.py3-none-any.whl", hash = "sha256:55a3602ec45bc09626604d712032288a8ee3566145cb83523cff908938f69a4b"}, ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-auth = ">=2.14.1,<3.0.0dev" grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" grpcio = ">=1.51.3,<2.0dev" @@ -956,9 +1029,9 @@ libcst = ["libcst (>=0.3.10)"] name = "googleapis-common-protos" version = "1.63.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, @@ -975,9 +1048,9 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "greenlet" version = "3.0.3" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, @@ -1047,9 +1120,9 @@ test = ["objgraph", "psutil"] name = "grpc-google-iam-v1" version = "0.13.0" description = "IAM API client library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, @@ -1064,9 +1137,9 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.63.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, @@ -1123,9 +1196,9 @@ protobuf = ["grpcio-tools (>=1.63.0)"] name = "grpcio-status" version = "1.48.2" description = "Status proto mapping for gRPC" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, @@ -1140,9 +1213,9 @@ protobuf = ">=3.12.0" name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, @@ -1161,9 +1234,9 @@ tornado = ["tornado (>=0.2)"] name = "idna" version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" +groups = ["main", "dev"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -1173,9 +1246,9 @@ files = [ name = "importlib-metadata" version = "7.1.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, @@ -1187,15 +1260,15 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" version = "5.13.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, @@ -1203,15 +1276,15 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-ruff"] [[package]] name = "inflect" version = "6.2.0" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "inflect-6.2.0-py3-none-any.whl", hash = "sha256:5a005e0c9afe152cc95d552a59b8b0c19efc51823405b43d89e984f0c33bc243"}, {file = "inflect-6.2.0.tar.gz", hash = "sha256:518088ef414a4e15df70e6bcb40d021da4d423cc6c2fd4c0cad5500d39f86627"}, @@ -1223,15 +1296,15 @@ typing-extensions = "*" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1241,9 +1314,9 @@ files = [ name = "itsdangerous" version = "2.2.0" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -1253,9 +1326,9 @@ files = [ name = "jaeger-client" version = "4.8.0" description = "Jaeger Python OpenTracing Tracer implementation" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -1273,9 +1346,9 @@ tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-import name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, @@ -1291,9 +1364,9 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.4.2" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -1303,9 +1376,9 @@ files = [ name = "jsonpickle" version = "2.2.0" description = "Python library for serializing any arbitrary object graph into JSON" -category = "main" optional = false python-versions = ">=2.7" +groups = ["main"] files = [ {file = "jsonpickle-2.2.0-py2.py3-none-any.whl", hash = "sha256:de7f2613818aa4f234138ca11243d6359ff83ae528b2185efdd474f62bcf9ae1"}, {file = "jsonpickle-2.2.0.tar.gz", hash = "sha256:7b272918b0554182e53dc340ddd62d9b7f902fec7e7b05620c04f3ccef479a0e"}, @@ -1313,16 +1386,16 @@ files = [ [package.extras] docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson", "yajl"] +testing = ["ecdsa", "enum34 ; python_version == \"2.7\"", "feedparser", "jsonlib ; python_version == \"2.7\"", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0) ; python_version <= \"3.6\"", "pytest-flake8 (>=1.1.1) ; python_version >= \"3.7\"", "scikit-learn", "sqlalchemy"] +testing-libs = ["simplejson", "ujson", "yajl ; python_version == \"2.7\""] [[package]] name = "jsonschema" version = "4.22.0" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, @@ -1342,9 +1415,9 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, @@ -1357,9 +1430,9 @@ referencing = ">=0.31.0" name = "launchdarkly-server-sdk" version = "8.3.0" description = "LaunchDarkly SDK for Python" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "launchdarkly_server_sdk-8.3.0-py3-none-any.whl", hash = "sha256:bc59dbf9897fd2d9c70098c13bb073983bef29f58cae9439e70b2463982f1bb5"}, {file = "launchdarkly_server_sdk-8.3.0.tar.gz", hash = "sha256:cdb8fadd457e6ae569c0cb0d5de112d7f2a9c84a0ba03167bb7d68710dde7283"}, @@ -1382,9 +1455,9 @@ test-filesource = ["pyyaml (>=3.0,<5.2)", "watchdog (>=0.9,!=0.10.5,<1.0)"] name = "lxml" version = "5.4.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, @@ -1531,9 +1604,9 @@ source = ["Cython (>=3.0.11,<3.1.0)"] name = "mako" version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, @@ -1551,9 +1624,9 @@ testing = ["pytest"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -1621,9 +1694,9 @@ files = [ name = "marshmallow" version = "3.21.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, @@ -1639,100 +1712,34 @@ tests = ["pytest", "pytz", "simplejson"] [[package]] name = "marshmallow-sqlalchemy" -version = "0.28.2" +version = "0.28.1" description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "marshmallow-sqlalchemy-0.28.2.tar.gz", hash = "sha256:2ab0f1280c793e5aec81deab3e63ec23688ddfe05e5f38ac960368a1079520a1"}, - {file = "marshmallow_sqlalchemy-0.28.2-py2.py3-none-any.whl", hash = "sha256:c31b3bdf794de1d78c53e1c495502cbb3eeb06ed216869980c71d6159e7e9e66"}, + {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, + {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, ] [package.dependencies] marshmallow = ">=3.0.0" packaging = ">=21.3" -SQLAlchemy = ">=1.3.0,<2.0" +SQLAlchemy = ">=1.3.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.13)", "sphinx (==6.1.3)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)"] +dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] -[[package]] -name = "msgpack" -version = "1.0.8" -description = "MessagePack serializer" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, -] - [[package]] name = "multidict" version = "6.0.5" description = "multidict implementation" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, @@ -1830,9 +1837,9 @@ files = [ name = "nltk" version = "3.8.1" description = "Natural Language Toolkit" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, @@ -1856,9 +1863,9 @@ twitter = ["twython"] name = "numpy" version = "1.26.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1902,9 +1909,9 @@ files = [ name = "opentracing" version = "2.4.0" description = "OpenTracing API for Python. See documentation at http://opentracing.io" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1916,9 +1923,9 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -1928,60 +1935,28 @@ files = [ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] -name = "pandas" -version = "1.5.3" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" +name = "pg8000" +version = "1.31.4" +description = "PostgreSQL interface library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, -] -python-dateutil = ">=2.8.1" -pytz = ">=2020.1" + {file = "pg8000-1.31.4-py3-none-any.whl", hash = "sha256:d14fb2054642ee80f9a216721892e99e19db60a005358460ffa48872351423d4"}, + {file = "pg8000-1.31.4.tar.gz", hash = "sha256:e7ecce4339891f27b0b22e2f79eb9efe44118bd384207359fc18350f788ace00"}, +] -[package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.5" [[package]] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, @@ -1991,9 +1966,9 @@ files = [ name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2007,9 +1982,9 @@ testing = ["pytest", "pytest-benchmark"] name = "pronouncing" version = "0.2.0" description = "A simple interface for the CMU pronouncing dictionary" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pronouncing-0.2.0.tar.gz", hash = "sha256:ff7856e1d973b3e16ff490c5cf1abdb52f08f45e2c35e463249b75741331e7c4"}, ] @@ -2021,9 +1996,9 @@ cmudict = ">=0.4.0" name = "proto-plus" version = "1.23.0" description = "Beautiful, Pythonic protocol buffers." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, @@ -2039,9 +2014,9 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] name = "protobuf" version = "3.20.3" description = "Protocol Buffers" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, @@ -2067,95 +2042,13 @@ files = [ {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - [[package]] name = "pyasn1" version = "0.4.8" description = "ASN.1 types and codecs" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, @@ -2165,9 +2058,9 @@ files = [ name = "pyasn1-modules" version = "0.4.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, @@ -2180,9 +2073,9 @@ pyasn1 = ">=0.4.6,<0.7.0" name = "pycountry" version = "22.3.5" description = "ISO country, subdivision, language, currency and script definitions and their translations" -category = "main" optional = false python-versions = ">=3.6, <4" +groups = ["main"] files = [ {file = "pycountry-22.3.5.tar.gz", hash = "sha256:b2163a246c585894d808f18783e19137cb70a0c18fb36748dc01fc6f109c1646"}, ] @@ -2194,9 +2087,10 @@ setuptools = "*" name = "pycparser" version = "2.22" description = "C parser in Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -2206,9 +2100,9 @@ files = [ name = "pydantic" version = "1.10.15" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, @@ -2259,9 +2153,9 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, @@ -2271,15 +2165,15 @@ files = [ snowballstemmer = ">=2.2.0" [package.extras] -toml = ["tomli (>=1.2.3)"] +toml = ["tomli (>=1.2.3) ; python_version < \"3.11\""] [[package]] name = "pyhamcrest" version = "2.1.0" description = "Hamcrest framework for matcher objects" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"}, {file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"}, @@ -2288,16 +2182,16 @@ files = [ [package.extras] dev = ["black", "doc2dash", "flake8", "pyhamcrest[docs,tests]", "pytest-mypy", "towncrier", "tox", "tox-asdf", "twine"] docs = ["alabaster (>=0.7,<1.0)", "sphinx (>=4.0,<5.0)"] -tests = ["coverage[toml]", "dataclasses", "mypy (!=0.940)", "pytest (>=5.0)", "pytest-mypy-plugins", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses", "types-mock"] +tests = ["coverage[toml]", "dataclasses ; python_version < \"3.7\"", "mypy (!=0.940) ; platform_python_implementation != \"PyPy\"", "pytest (>=5.0)", "pytest-mypy-plugins ; platform_python_implementation != \"PyPy\"", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses ; python_version < \"3.7\"", "types-mock"] tests-numpy = ["numpy", "pyhamcrest[tests]"] [[package]] name = "pyjwt" version = "2.10.1" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -2313,9 +2207,9 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pyparsing" version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" +groups = ["main", "dev"] files = [ {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, @@ -2328,9 +2222,9 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyrfc3339" version = "1.1" description = "Generate and parse RFC 3339 timestamps" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, @@ -2343,9 +2237,9 @@ pytz = "*" name = "pyrsistent" version = "0.18.1" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, @@ -2374,9 +2268,9 @@ files = [ name = "pysolr" version = "3.9.0" description = "Lightweight Python client for Apache Solr" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pysolr-3.9.0.tar.gz", hash = "sha256:6ef05feb87c614894243eddc62e9b0a6134a889c159ae868655cf6cd749545e6"}, ] @@ -2391,9 +2285,9 @@ solrcloud = ["kazoo (>=2.5.0)"] name = "pytest" version = "8.2.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, @@ -2412,9 +2306,9 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-aiohttp" version = "1.0.5" description = "Pytest plugin for aiohttp support" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-aiohttp-1.0.5.tar.gz", hash = "sha256:880262bc5951e934463b15e3af8bb298f11f7d4d3ebac970aab425aff10a780a"}, {file = "pytest_aiohttp-1.0.5-py3-none-any.whl", hash = "sha256:63a5360fd2f34dda4ab8e6baee4c5f5be4cd186a403cabd498fced82ac9c561e"}, @@ -2432,9 +2326,9 @@ testing = ["coverage (==6.2)", "mypy (==0.931)"] name = "pytest-asyncio" version = "0.23.6" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, @@ -2451,9 +2345,9 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -2470,9 +2364,9 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -2488,9 +2382,9 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2503,9 +2397,9 @@ six = ">=1.5" name = "python-dotenv" version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, @@ -2518,9 +2412,9 @@ cli = ["click (>=5.0)"] name = "python-editor" version = "1.0.4" description = "Programmatically open an editor, capture the result." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"}, {file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"}, @@ -2531,9 +2425,9 @@ files = [ name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, @@ -2553,9 +2447,9 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "pytz" version = "2022.7.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2565,9 +2459,9 @@ files = [ name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2628,9 +2522,9 @@ files = [ name = "referencing" version = "0.35.1" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, @@ -2644,9 +2538,9 @@ rpds-py = ">=0.7.0" name = "regex" version = "2022.10.31" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, @@ -2742,9 +2636,9 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, @@ -2764,9 +2658,9 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "responses" version = "0.25.7" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, @@ -2778,15 +2672,15 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rpds-py" version = "0.18.1" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, @@ -2893,9 +2787,9 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -2908,9 +2802,9 @@ pyasn1 = ">=0.1.3" name = "ruff" version = "0.11.6" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.11.6-py3-none-linux_armv6l.whl", hash = "sha256:d84dcbe74cf9356d1bdb4a78cf74fd47c740bf7bdeb7529068f69b08272239a1"}, {file = "ruff-0.11.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9bc583628e1096148011a5d51ff3c836f51899e61112e03e5f2b1573a9b726de"}, @@ -2936,9 +2830,9 @@ files = [ name = "sbc_common_components" version = "0.0.0" description = "" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [] develop = false @@ -2956,13 +2850,28 @@ reference = "HEAD" resolved_reference = "c3da5ee1d95622ce4f125f2fa3626af8b25874eb" subdirectory = "python" +[[package]] +name = "scramp" +version = "1.4.6" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "scramp-1.4.6-py3-none-any.whl", hash = "sha256:a0cf9d2b4624b69bac5432dd69fecfc55a542384fe73c3a23ed9b138cda484e1"}, + {file = "scramp-1.4.6.tar.gz", hash = "sha256:fe055ebbebf4397b9cb323fcc4b299f219cd1b03fd673ca40c97db04ac7d107e"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + [[package]] name = "semver" version = "3.0.2" description = "Python helper for Semantic Versioning (https://semver.org)" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, @@ -2972,9 +2881,9 @@ files = [ name = "setuptools" version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, @@ -2982,16 +2891,16 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov ; platform_python_implementation != \"PyPy\"", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simple-cloudevent" version = "0.0.2" description = "A short description of the project" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [] develop = false @@ -3000,7 +2909,7 @@ strict-rfc3339 = "*" [package.source] type = "git" -url = "https://github.com/daxiom/simple-cloudevent.py" +url = "https://github.com/daxiom/simple-cloudevent.py.git" reference = "HEAD" resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" @@ -3008,9 +2917,9 @@ resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main", "dev"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -3020,9 +2929,9 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -3030,91 +2939,107 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "2.0.43" description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07097c0a1886c150ef2adba2ff7437e84d40c0f7dcb44a2c2b9c905ccfc6361c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bcf0724a62a5670e5718957e05c56ec2d6850267ea859f8ad2481838f889b42c"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"}, + {file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"}, + {file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"}, + {file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"}, + {file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"}, + {file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e6aeb2e0932f32950cf56a8b4813cb15ff792fc0c9b3752eaf067cfe298496a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f964a05356f4bca4112e6334ed7c208174511bd56e6b8fc86dad4d024d4185"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:136063a68644eca9339d02e6693932116f6a8591ac013b0014479a1de664e40a"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:44337823462291f17f994d64282a71c51d738fc9ef561bf265f1d0fd9116a782"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"}, + {file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"}, + {file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"}, + {file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"}, + {file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] [[package]] name = "strict-rfc3339" version = "0.7" description = "Strict, simple, lightweight RFC3339 functions" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, ] @@ -3123,9 +3048,9 @@ files = [ name = "structlog" version = "24.4.0" description = "Structured Logging for Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, @@ -3141,9 +3066,9 @@ typing = ["mypy (>=1.4)", "rich", "twisted"] name = "structured-logging" version = "0.4.0" description = "" -category = "main" optional = false python-versions = "^3.9" +groups = ["main"] files = [] develop = false @@ -3162,9 +3087,9 @@ subdirectory = "python/structured-logging" name = "swagger-client" version = "1.0.0" description = "Synonyms API" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [] develop = false @@ -3184,9 +3109,9 @@ resolved_reference = "2e2f45bd733c544e9f87650819712c02586f7be1" name = "threadloop" version = "1.0.2" description = "Tornado IOLoop Backed Concurrent Futures" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -3199,9 +3124,9 @@ tornado = "*" name = "thrift" version = "0.20.0" description = "Python bindings for the Apache Thrift RPC system" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, ] @@ -3218,9 +3143,9 @@ twisted = ["twisted"] name = "toolz" version = "0.12.1" description = "List processing tools and functional utilities" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, @@ -3230,9 +3155,9 @@ files = [ name = "tornado" version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "main" optional = false python-versions = ">= 3.8" +groups = ["main"] files = [ {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, @@ -3251,9 +3176,9 @@ files = [ name = "tqdm" version = "4.66.4" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, @@ -3272,9 +3197,9 @@ telegram = ["requests"] name = "typing-extensions" version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, @@ -3284,26 +3209,26 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main", "dev"] files = [ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "werkzeug" version = "3.0.3" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, @@ -3319,9 +3244,9 @@ watchdog = ["watchdog (>=2.3)"] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "main" optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, @@ -3331,9 +3256,9 @@ files = [ name = "yarl" version = "1.9.4" description = "Yet another URL library" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, @@ -3435,9 +3360,9 @@ multidict = ">=4.0" name = "zipp" version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, @@ -3445,9 +3370,9 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-ruff (>=0.2.1)"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.12,<3.13" -content-hash = "0c445a7698ebe8bb848ad4feb19df220018383a0def51821b9bf0b21a21df2bd" +content-hash = "9d681ded18610936db40156072434a9db91879cb49c124d45d959a104f8f553a" diff --git a/api/pyproject.toml b/api/pyproject.toml index 59f68cd9a..f609da487 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -8,12 +8,13 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.12,<3.13" -gcp_queue = { git = "https://github.com/bcgov/namex.git", subdirectory = "services/pubsub" } +gcp_queue = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/gcp-queue", branch = "main" } swagger_client = { git = "https://github.com/bcgov/namex-synonyms-api-py-client.git" } -simple_cloudevent = { git = "https://github.com/daxiom/simple-cloudevent.py" } flask-jwt-oidc = { git = "https://github.com/seeker25/flask-jwt-oidc.git" } sbc-common-components = { git = "https://github.com/bcgov/sbc-common-components.git", subdirectory = "python" } structured_logging = { git = "https://github.com/bcgov/sbc-connect-common.git", rev = "7f1cc0ea4a374310ac558ff435fa6b7ea7bb2f8b", subdirectory = "python/structured-logging" } +cloud-sql-connector = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/cloud-sql-connector", branch = "main" } + gunicorn = "^20.1.0" flask-cors = "^4.0.0" @@ -29,10 +30,8 @@ markupSafe = "^2.1.1" itsdangerous = "^2.1.2" werkzeug = "^3.0.0" protobuf = "^3.20.1" -SQLAlchemy = "^1.4.18" marshmallow-sqlalchemy = "^0.28.1" marshmallow = "^3.18.0" -pandas = "^1.5.0" inflect = "^6.0.0" Mako = "^1.1.4" alembic = "^1.5.8" @@ -60,7 +59,6 @@ numpy = "^1.26.4" packaging = "^21.3" pkgutil_resolve_name = "^1.3.10" pronouncing = "^0.2.0" -psycopg2-binary = "^2.9.4" pyasn1 = "^0.4.8" pycountry = "^22.3.5" pydantic = "^1.10.2" diff --git a/api/tests/conftest.py b/api/tests/conftest.py index 364066ced..e4988246e 100644 --- a/api/tests/conftest.py +++ b/api/tests/conftest.py @@ -11,6 +11,7 @@ from namex import create_app from namex import jwt as _jwt +from namex.models import db from namex.models import db as _db from .python import FROZEN_DATETIME @@ -79,23 +80,25 @@ def db(app, request): # Clear out any existing tables metadata = MetaData() metadata.reflect(bind=_db.engine) - for table in metadata.tables.values(): - for fk in table.foreign_keys: - _db.engine.execute(DropConstraint(fk.constraint)) + with _db.engine.connect() as connection: + for table in metadata.tables.values(): + for fk in table.foreign_keys: + connection.execute(DropConstraint(fk.constraint)) with suppress(Exception): metadata.drop_all(bind=_db.engine) with suppress(Exception): _db.drop_all() - sequence_sql = """SELECT sequence_name FROM information_schema.sequences - WHERE sequence_schema='public' + sequence_sql = f"""SELECT sequence_name FROM information_schema.sequences + WHERE sequence_schema='{app.config.get("DB_SCHEMA", "public")}' """ sess = _db.session() for seq in [name for (name,) in sess.execute(text(sequence_sql))]: try: - sess.execute(text('DROP SEQUENCE public.%s ;' % seq)) - print('DROP SEQUENCE public.%s ' % seq) + schema = app.config.get('DB_SCHEMA', 'public') + sess.execute(text(f'DROP SEQUENCE {schema}.{seq} ;')) + print(f'DROP SEQUENCE {schema}.{seq} ') except Exception as e: print('Error: {}'.format(e)) sess.commit() @@ -118,39 +121,61 @@ def db(app, request): @pytest.fixture(scope='function', autouse=True) def session(app, db, request): """ - Returns function-scoped session. + Returns function-scoped session with proper transaction isolation for pg8000. """ with app.app_context(): - conn = db.engine.connect() - txn = conn.begin() + # Create a new connection and transaction for each test + connection = db.engine.connect() + transaction = connection.begin() - options = dict(bind=conn, binds={}) - sess = db._make_scoped_session(options=options) + # Create a nested transaction (savepoint) for isolation + nested_transaction = connection.begin_nested() - # establish a SAVEPOINT just before beginning the test - # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) - sess.begin_nested() + # Configure session to use this connection + session_options = dict(bind=connection, binds={}) + test_session = db._make_scoped_session(options=session_options) - @event.listens_for(sess(), 'after_transaction_end') - def restart_savepoint(sess2, trans): - # Detecting whether this is indeed the nested transaction of the test - if trans.nested and not trans._parent.nested: - # Handle where test DOESN'T session.commit(), - sess2.expire_all() - sess.begin_nested() + # Store original session and methods + original_session = db.session + original_commit = test_session.commit + original_rollback = test_session.rollback - db.session = sess + # Override commit to only flush, not actually commit + def patched_commit(): + test_session.flush() - sql = text('select 1') - sess.execute(sql) + # Override rollback to rollback to savepoint + def patched_rollback(): + nonlocal nested_transaction + if nested_transaction.is_active: + nested_transaction.rollback() + nested_transaction = connection.begin_nested() + else: + nested_transaction = connection.begin_nested() - yield sess + # Apply patches + test_session.commit = patched_commit + test_session.rollback = patched_rollback - # Cleanup - sess.remove() - # This instruction rollsback any commit that were executed in the tests. - txn.rollback() - conn.close() + # Replace the global session + db.session = test_session + + # Test the connection + db.session.execute(text('SELECT 1')) + + yield db.session + + # Cleanup: restore everything and rollback + test_session.commit = original_commit + test_session.rollback = original_rollback + db.session = original_session + test_session.remove() + + # Rollback all changes + if nested_transaction.is_active: + nested_transaction.rollback() + transaction.rollback() + connection.close() @pytest.fixture(autouse=True) @@ -169,10 +194,21 @@ def mock_auth_affiliation(): Prevents failures in CI or local testing environments that don't have access to real Auth API credentials. """ - def _mock(nr_num='NR 123456', org_id='1234'): - escaped_nr = urllib.parse.quote(nr_num) - mocked_auth_url = f'https://mock-auth-api/api/v1/orgs/{org_id}/affiliations/{escaped_nr}' - responses.add(responses.GET, mocked_auth_url, json={}, status=200) + def _mock(nr_num=None, org_id='1234'): + if nr_num: + # Mock specific NR number if provided + escaped_nr = urllib.parse.quote(nr_num) + mocked_auth_url = f'https://mock-auth-api/api/v1/orgs/{org_id}/affiliations/{escaped_nr}' + responses.add(responses.GET, mocked_auth_url, json={}, status=200) + else: + # Mock any NR number with regex pattern + import re + responses.add( + responses.GET, + re.compile(r'https://mock-auth-api/api/v1/orgs/\d+/affiliations/NR%20\d+'), + json={}, + status=200 + ) return _mock @@ -186,3 +222,138 @@ def mock_gcp_queue_publish(): with patch('namex.utils.queue_util.queue.publish') as mock_publish: mock_publish.return_value = None yield mock_publish + + +# ============================================================================ +# TEST DATA ISOLATION FIXTURES +# ============================================================================ + +@pytest.fixture +def test_data_factory(): + """ + Provides a TestDataFactory instance for creating unique test data. + This ensures every test gets unique data and prevents conflicts. + """ + from .fixtures.test_data_factory import TestDataFactory + return TestDataFactory() + + +@pytest.fixture +def test_nr_builder(test_data_factory): + """ + Provides a TestNameRequestBuilder for creating test name requests. + Automatically handles user creation and data uniqueness. + """ + from .fixtures.test_data_factory import TestNameRequestBuilder + return TestNameRequestBuilder(test_data_factory) + + +@pytest.fixture +def unique_user(test_data_factory): + """ + Creates a unique test user for each test. + The user is automatically cleaned up by the session fixture's transaction rollback. + """ + return test_data_factory.create_test_user(commit=False) + + +@pytest.fixture +def unique_user_committed(test_data_factory): + """ + Creates a unique test user and commits it to the database. + Use this when you need the user to be available across multiple operations. + """ + user = test_data_factory.create_test_user(commit=True) + db.session.commit() + return user + + +@pytest.fixture +def unique_draft_nr_data(): + """ + Provides unique name request data for API calls. + Each test gets completely unique data to prevent conflicts. + """ + import random + import uuid + + unique_id = uuid.uuid4().hex[:8] + unique_num = random.randint(1000, 9999) + + return { + 'applicants': [ + { + 'addrLine1': f'{random.randint(100, 999)}-{random.randint(1000, 9999)} Test Blvd', + 'addrLine2': None, + 'addrLine3': None, + 'city': 'Victoria', + 'clientFirstName': None, + 'clientLastName': None, + 'contact': '', + 'countryTypeCd': 'CA', + 'declineNotificationInd': None, + 'emailAddress': f'test{unique_id}@example.com', + 'faxNumber': None, + 'firstName': 'John', + 'lastName': f'Doe{unique_num}', + 'middleName': None, + 'partyId': '', # must be empty + 'phoneNumber': f'250{random.randint(1000000, 9999999)}', + 'postalCd': 'V8W 3P6', + 'stateProvinceCd': 'BC', + } + ], + 'names': [ + { + 'choice': 1, + 'consent_words': '', + 'conflict1': '', + 'conflict1_num': '', + 'designation': 'CORP.', + 'name': f'TESTING CORP {unique_id.upper()}.', + 'name_type_cd': 'CO', + } + ], + 'additionalInfo': f'*** Additional Info for test {unique_id} ***', + 'natureBusinessInfo': f'Test business {unique_id}', + 'priorityCd': 'N', + 'entity_type_cd': '', + 'request_action_cd': '', + 'stateCd': 'DRAFT', + 'english': True, + 'nameFlag': False, + 'submit_count': 0, + 'corpNum': '', + 'homeJurisNum': '', + } + + +@pytest.fixture +def draft_nr_with_user(test_nr_builder, unique_user): + """ + Creates a complete draft name request with associated user in the database. + All data is unique and isolated per test. + """ + return test_nr_builder.with_user(unique_user).create_draft_nr() + + +@pytest.fixture +def clean_database_state(session): + """ + Ensures clean database state by checking for any test pollution. + This fixture runs after each test to verify isolation is working. + """ + yield + + # Verify no test pollution remains (optional - can be disabled for performance) + # This helps catch isolation issues during development + from sqlalchemy import text + + # Check for any uncommitted data that might leak between tests + result = session.execute(text("SELECT COUNT(*) FROM users WHERE username LIKE 'test_%'")) + test_user_count = result.scalar() + + if test_user_count > 0: + # This indicates test data might be leaking - but it's expected in a transaction + # The session fixture should clean this up with rollback + pass diff --git a/api/tests/fixtures/__init__.py b/api/tests/fixtures/__init__.py new file mode 100644 index 000000000..c4a2f19c4 --- /dev/null +++ b/api/tests/fixtures/__init__.py @@ -0,0 +1 @@ +# Fixtures package diff --git a/api/tests/fixtures/test_data_factory.py b/api/tests/fixtures/test_data_factory.py new file mode 100644 index 000000000..d60cc8049 --- /dev/null +++ b/api/tests/fixtures/test_data_factory.py @@ -0,0 +1,215 @@ +""" +Centralized test data factory for creating unique test data across all tests. +This ensures test isolation by generating unique identifiers for all shared resources. +""" +import random +import string +import uuid +from datetime import datetime +from typing import Any, Dict, Optional + +from namex.models import Applicant, Name, Request, State, User, db + + +class TestDataFactory: + """Factory for creating unique test data that prevents conflicts between tests.""" + + @staticmethod + def generate_unique_id(prefix: str = '') -> str: + """Generate a unique identifier.""" + return f'{prefix}{uuid.uuid4().hex[:8]}' + + @staticmethod + def generate_unique_username() -> str: + """Generate a unique username for test users.""" + return f'test_user_{uuid.uuid4().hex[:8]}' + + @staticmethod + def generate_unique_sub() -> str: + """Generate a unique sub identifier for test users.""" + return f'idir/test_{uuid.uuid4().hex[:8]}' + + @staticmethod + def generate_unique_nr_num() -> str: + """Generate a unique NR number for testing.""" + return f'NR {random.randint(1000000, 9999999)}' + + @staticmethod + def generate_unique_name(suffix: str = 'LTD') -> str: + """Generate a unique company name.""" + unique_part = ''.join(random.choices(string.ascii_uppercase, k=6)) + return f'TEST COMPANY {unique_part} {suffix}' + + @classmethod + def create_test_user(cls, username: Optional[str] = None, commit: bool = True) -> User: + """Create a unique test user.""" + user = User( + username=username or cls.generate_unique_username(), + firstname='Test', + lastname='User', + sub=cls.generate_unique_sub(), + iss='keycloak', + idp_userid=cls.generate_unique_id(), + login_source='IDIR' + ) + + if commit: + db.session.add(user) + db.session.flush() # Get the ID without committing + + return user + + @classmethod + def create_test_nr_data(cls, **overrides) -> Dict[str, Any]: + """Create unique name request data.""" + unique_suffix = cls.generate_unique_id() + + base_data = { + 'additionalInfo': '', + 'consentFlag': None, + 'consent_dt': None, + 'corpNum': '', + 'entity_type_cd': 'CR', + 'expirationDate': None, + 'furnished': 'N', + 'hasBeenReset': False, + 'natureBusinessInfo': f'Test business {unique_suffix}', + 'priorityCd': 'N', + 'requestTypeCd': 'CR', + 'request_action_cd': 'NEW', + 'submitCount': 1, + 'submitter_userid': cls.generate_unique_username(), + 'userId': cls.generate_unique_username(), + 'xproJurisdiction': '', + 'names': [ + { + 'name': cls.generate_unique_name(), + 'choice': 1, + 'designation': 'LTD', + 'name_type_cd': 'CO', + 'consent_words': '', + 'conflict1': '', + 'conflict2': '', + 'conflict3': '' + } + ], + 'applicants': { + 'firstName': 'John', + 'lastName': f'Doe{unique_suffix[:6]}', + 'addrLine1': f'{random.randint(100, 999)} Test St', + 'city': 'Victoria', + 'stateProvinceCd': 'BC', + 'countryCd': 'CA', + 'postalCd': 'V8W 3P6', + 'phoneNumber': f'250-{random.randint(100, 999)}-{random.randint(1000, 9999)}', + 'emailAddress': f'test{unique_suffix}@example.com' + } + } + + # Apply any overrides + base_data.update(overrides) + return base_data + + +class TestNameRequestBuilder: + """Builder pattern for creating test name requests with proper isolation.""" + + def __init__(self, factory: TestDataFactory = None): + self.factory = factory or TestDataFactory() + self.data = self.factory.create_test_nr_data() + self.user = None + + def with_user(self, user: User = None) -> 'TestNameRequestBuilder': + """Set or create a user for this name request.""" + self.user = user or self.factory.create_test_user() + self.data['submitter_userid'] = self.user.username + self.data['userId'] = self.user.username + return self + + def with_entity_type(self, entity_type: str) -> 'TestNameRequestBuilder': + """Set the entity type.""" + self.data['entity_type_cd'] = entity_type + return self + + def with_request_action(self, action: str) -> 'TestNameRequestBuilder': + """Set the request action.""" + self.data['request_action_cd'] = action + return self + + def with_names(self, names: list) -> 'TestNameRequestBuilder': + """Set custom names.""" + if isinstance(names, list) and names: + self.data['names'] = [] + for i, name in enumerate(names): + if isinstance(name, str): + name_data = { + 'name': name, + 'choice': i + 1, + 'designation': 'LTD', + 'name_type_cd': 'CO', + 'consent_words': '', + 'conflict1': '', + 'conflict2': '', + 'conflict3': '' + } + else: + name_data = name + self.data['names'].append(name_data) + return self + + def build_data(self) -> Dict[str, Any]: + """Build the data dictionary.""" + return self.data.copy() + + def create_draft_nr(self) -> Request: + """Create a draft name request in the database.""" + if not self.user: + self.with_user() + + # Create the request + nr = Request() + nr.nrNum = None # Draft NRs don't have numbers yet + nr.stateCd = 'DRAFT' + nr.requestTypeCd = self.data['requestTypeCd'] + nr.request_action_cd = self.data['request_action_cd'] + nr.entity_type_cd = self.data['entity_type_cd'] + nr.natureBusinessInfo = self.data['natureBusinessInfo'] + nr.priorityCd = self.data['priorityCd'] + nr.submitCount = self.data['submitCount'] + nr.submitter_userid = self.user.username + nr.userId = self.user.id + nr.furnished = self.data['furnished'] + + db.session.add(nr) + db.session.flush() # Get the ID + + # Create applicant + applicant_data = self.data['applicants'] + applicant = Applicant() + applicant.nrId = nr.id + applicant.firstName = applicant_data['firstName'] + applicant.lastName = applicant_data['lastName'] + applicant.addrLine1 = applicant_data['addrLine1'] + applicant.city = applicant_data['city'] + applicant.stateProvinceCd = applicant_data['stateProvinceCd'] + applicant.countryCd = applicant_data['countryCd'] + applicant.postalCd = applicant_data['postalCd'] + applicant.phoneNumber = applicant_data['phoneNumber'] + applicant.emailAddress = applicant_data['emailAddress'] + + db.session.add(applicant) + db.session.flush() + + # Create names + for name_data in self.data['names']: + name = Name() + name.nrId = nr.id + name.name = name_data['name'] + name.choice = name_data['choice'] + name.designation = name_data.get('designation', '') + name.name_type_cd = name_data.get('name_type_cd', 'CO') + + db.session.add(name) + + db.session.flush() + return nr diff --git a/api/tests/python/end_points/name_requests/test_nr_actions.py b/api/tests/python/end_points/name_requests/test_nr_actions.py index ae9f13b1c..5032d46b8 100644 --- a/api/tests/python/end_points/name_requests/test_nr_actions.py +++ b/api/tests/python/end_points/name_requests/test_nr_actions.py @@ -22,9 +22,9 @@ # from ..common import token_header, claims from ..common.http import build_request_uri, build_test_query, get_test_headers from ..common.logging import log_request_path -from ..name_requests.test_setup_utils.test_helpers import add_test_user_to_db from .configuration import API_BASE_URI from .test_setup_utils.test_helpers import ( + add_test_user_to_db, assert_applicant_is_mapped_correctly, assert_names_are_mapped_correctly, create_approved_nr, @@ -38,6 +38,12 @@ # Define our data # Check NR number is the same because these are PATCH and call change_nr def build_test_input_fields(): + """Build test input fields with unique data to prevent conflicts.""" + import random + import uuid + + unique_id = uuid.uuid4().hex[:8] + return { 'additionalInfo': '', 'consentFlag': None, @@ -47,24 +53,13 @@ def build_test_input_fields(): 'expirationDate': None, 'furnished': 'N', 'hasBeenReset': False, - # 'lastUpdate': None, - 'natureBusinessInfo': 'Test', - # 'nrNum': '', - # 'nwpta': '', - # 'previousNr': '', - # 'previousRequestId': '', - # 'previousStateCd': '', + 'natureBusinessInfo': f'Test business {unique_id}', 'priorityCd': 'N', - # 'priorityDate': None, 'requestTypeCd': 'CR', 'request_action_cd': 'NEW', - # 'source': 'NAMEREQUEST', - # 'state': 'DRAFT', - # 'stateCd': 'DRAFT', 'submitCount': 1, - # 'submittedDate': None, - 'submitter_userid': 'name_request_service_account', - 'userId': 'name_request_service_account', + 'submitter_userid': f'test_user_{unique_id}', + 'userId': f'test_user_{unique_id}', 'xproJurisdiction': '', } @@ -915,18 +910,20 @@ def mock_publish(topic: str, payload: bytes): ('Resubmit BC', 'RESUBMIT', EntityTypes.BENEFIT_COMPANY.value), ], ) -def test_temp_nr(client, test_name, request_action_cd, entity_type_cd): +def test_temp_nr(client, test_name, request_action_cd, entity_type_cd, unique_draft_nr_data): """ - Test temp NRs + Test temp NRs - now uses fixture for unique data isolation """ - draft_input_fields['request_action_cd'] = request_action_cd - draft_input_fields['entity_type_cd'] = entity_type_cd + # Use unique data from fixture instead of global shared data + test_input_fields = unique_draft_nr_data.copy() + test_input_fields['request_action_cd'] = request_action_cd + test_input_fields['entity_type_cd'] = entity_type_cd add_test_user_to_db() path = build_request_uri(API_BASE_URI, '') headers = get_test_headers() - post_response = client.post(path, data=json.dumps(draft_input_fields), headers=headers) + post_response = client.post(path, data=json.dumps(test_input_fields), headers=headers) draft_nr = json.loads(post_response.data) assert draft_nr['id'] > 0 diff --git a/api/tests/python/end_points/name_requests/test_nr_response.py b/api/tests/python/end_points/name_requests/test_nr_response.py index 36c1cbdf6..40b25c178 100644 --- a/api/tests/python/end_points/name_requests/test_nr_response.py +++ b/api/tests/python/end_points/name_requests/test_nr_response.py @@ -70,7 +70,7 @@ def test_draft_response(priorityCd, queue_time_returned, status_cd, client, jwt, assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() # Mock any NR number get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None diff --git a/api/tests/python/end_points/name_requests/test_nr_response_actions.py b/api/tests/python/end_points/name_requests/test_nr_response_actions.py index 2df993f8e..154e208c2 100644 --- a/api/tests/python/end_points/name_requests/test_nr_response_actions.py +++ b/api/tests/python/end_points/name_requests/test_nr_response_actions.py @@ -67,7 +67,7 @@ def test_draft_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -117,7 +117,7 @@ def test_approved_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -163,7 +163,7 @@ def test_approved_and_expired_response_actions(client, jwt, app, mock_auth_affil assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -207,7 +207,7 @@ def test_conditional_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -253,7 +253,7 @@ def test_conditional_and_expired_response_actions(client, jwt, app, mock_auth_af assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -312,7 +312,7 @@ def test_consumed_and_conditional_response_actions(client, jwt, app, mock_auth_a assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -370,7 +370,7 @@ def test_consumed_and_approved_response_actions(client, jwt, app, mock_auth_affi assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -413,7 +413,7 @@ def test_rejected_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -455,7 +455,7 @@ def test_historical_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -498,7 +498,7 @@ def test_hold_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -535,7 +535,7 @@ def test_inprogress_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None @@ -572,7 +572,7 @@ def test_cancelled_response_actions(client, jwt, app, mock_auth_affiliation): assert test_nr is not None # Grab the record using the API - mock_auth_affiliation(nr_num='NR 123456') + mock_auth_affiliation() get_response = get_nr(client, test_nr.id, jwt) nr = json.loads(get_response.data) assert nr is not None diff --git a/api/tests/python/end_points/name_requests/test_setup_utils/test_helpers.py b/api/tests/python/end_points/name_requests/test_setup_utils/test_helpers.py index b3dc3d41c..c014aedd2 100644 --- a/api/tests/python/end_points/name_requests/test_setup_utils/test_helpers.py +++ b/api/tests/python/end_points/name_requests/test_setup_utils/test_helpers.py @@ -62,7 +62,6 @@ ] -@pytest.mark.skip def assert_names_are_mapped_correctly(req_names, res_names): print('\n-------- Test names --------\n') for req_name in req_names: @@ -88,7 +87,6 @@ def assert_names_are_mapped_correctly(req_names, res_names): print('\n-------- Test names complete --------\n') -@pytest.mark.skip def assert_applicant_is_mapped_correctly(req_applicant, res_applicant): print('\n-------- Test applicant --------\n') print('\nCompare request applicant: \n' + repr(req_applicant) + '\n') @@ -104,17 +102,29 @@ def assert_applicant_is_mapped_correctly(req_applicant, res_applicant): print('\n-------- Test applicant complete --------\n') -@pytest.mark.skip def add_states_to_db(states): + """Add states to database only if they don't already exist.""" for code, desc in states: - state = State(cd=code, description=desc) - state.save_to_db() + # Check if state already exists (migrations may have loaded it) + existing_state = State.query.filter_by(cd=code).first() + if not existing_state: + state = State(cd=code, description=desc) + state.save_to_db() -@pytest.mark.skip def add_test_user_to_db(): + """Create or get the service account user expected by the API.""" + # The API expects this specific username + expected_username = 'name_request_service_account' + + # Check if user already exists + existing_user = User.query.filter_by(username=expected_username).first() + if existing_user: + return existing_user + + # Create the expected service account user user = User( - username='name_request_service_account', + username=expected_username, firstname='Test', lastname='User', sub='idir/name_request_service_account', @@ -122,32 +132,35 @@ def add_test_user_to_db(): idp_userid='123', login_source='IDIR', ) - user.save_to_db() - return user + try: + user.save_to_db() + return user + except Exception: + # If there's a conflict, try to fetch the existing user again + # This can happen in concurrent test runs + existing_user = User.query.filter_by(username=expected_username).first() + if existing_user: + return existing_user + raise -@pytest.mark.skip def create_approved_nr(client, nr_data=None): return create_test_nr(nr_data, State.APPROVED) -@pytest.mark.skip def create_cancelled_nr(client, nr_data=None): return create_test_nr(nr_data, State.CANCELLED) -@pytest.mark.skip def create_expired_nr(client, nr_data=None): return create_test_nr(nr_data, State.EXPIRED) -@pytest.mark.skip def create_consumed_nr(client, nr_data=None): return create_test_nr(nr_data, State.CONSUMED) -@pytest.mark.skip def create_draft_nr(client, nr_data=None, use_api=True): """ You can optionally set the use_api param to False to create an NR using model persistence as opposed to the API! @@ -162,7 +175,6 @@ def create_draft_nr(client, nr_data=None, use_api=True): return create_test_nr(nr_data, State.DRAFT) -@pytest.mark.skip def create_test_nr(nr_data=None, nr_state=State.DRAFT): """ Create a draft NR and persist (NOT using the API) to use as the initial state for each test. @@ -198,7 +210,10 @@ def create_test_nr(nr_data=None, nr_state=State.DRAFT): nr.activeUser = user nr.submitter = user nr.submitter_userid = user.id - nr.nrNum = 'NR 123456' + # Generate unique NR number using timestamp to prevent conflicts (max 10 chars) + import time + unique_suffix = str(int(time.time()))[-6:] # Last 6 digits of timestamp + nr.nrNum = f'NR {unique_suffix}' nr.save_to_db() @@ -207,7 +222,6 @@ def create_test_nr(nr_data=None, nr_state=State.DRAFT): print(repr(err)) -@pytest.mark.skip def post_test_nr(client, nr_data=None, nr_state=State.DRAFT): """ Create a draft NR, using the API, to use as the initial state for each test. diff --git a/api/tests/python/end_points/payments/test_payments.py b/api/tests/python/end_points/payments/test_payments.py index e9f5341a9..f0fab4fc9 100644 --- a/api/tests/python/end_points/payments/test_payments.py +++ b/api/tests/python/end_points/payments/test_payments.py @@ -546,7 +546,7 @@ def test_create_payment( do_refund, cancel_payment, request_receipt, - mocker + mocker, ): # stub out the internal create_payment helper so endpoint returns 201 mock_create_payment.return_value = PaymentInvoice( @@ -559,7 +559,7 @@ def test_create_payment( statusCode='CREATED', businessIdentifier='NR L000001', lineItems=[{'filingTypeCode': 'NM620', 'priority': False, 'waiveFees': False}], - references=[] + references=[], ) topics = [] @@ -673,8 +673,10 @@ def get_mock_logger(logs): class DummyLogger: def error(self, msg): logs.append(msg) + def warning(self, msg): logs.append(msg) + return DummyLogger() diff --git a/api/tests/python/end_points/statistics/test_statistics.py b/api/tests/python/end_points/statistics/test_statistics.py index b73c042e2..6fcd4b169 100644 --- a/api/tests/python/end_points/statistics/test_statistics.py +++ b/api/tests/python/end_points/statistics/test_statistics.py @@ -59,13 +59,16 @@ def test_get_statistics_wait_time(client, jwt, app, oldest_draft_nr_date, todays cache.clear() # Mock out Request.get_waiting_time to simulate wait time calculation for regular and priority queues - with patch.object(Request, 'get_oldest_draft') as mock_get_oldest_draft, \ - patch.object(Request, 'get_waiting_time') as mock_get_waiting_time: + with ( + patch.object(Request, 'get_oldest_draft') as mock_get_oldest_draft, + patch.object(Request, 'get_waiting_time') as mock_get_waiting_time, + ): mock_get_oldest_draft.return_value = MagicMock(submittedDate=oldest_draft_nr_dt) mock_get_waiting_time.side_effect = lambda priority_queue: expected_wait_days if not priority_queue else 0 response = client.get(request_uri) payload = json.loads(response.data) assert payload assert isinstance(payload.get('regular_wait_time'), int) - assert payload['regular_wait_time'] == expected_wait_days, \ - f"[ASSERT FAILED] Expected {expected_wait_days} but got {payload['regular_wait_time']}" + assert payload['regular_wait_time'] == expected_wait_days, ( + f'[ASSERT FAILED] Expected {expected_wait_days} but got {payload["regular_wait_time"]}' + ) diff --git a/api/tests/python/end_points/test_requests.py b/api/tests/python/end_points/test_requests.py index 329540178..6ef068cfb 100644 --- a/api/tests/python/end_points/test_requests.py +++ b/api/tests/python/end_points/test_requests.py @@ -3,25 +3,12 @@ from flask import json, jsonify -from namex.models import ( - Applicant as ApplicantDAO, -) -from namex.models import ( - Comment as CommentDAO, -) -from namex.models import ( - Event as EventDAO, -) -from namex.models import ( - Name as NameDAO, -) -from namex.models import ( - Request as RequestDAO, -) -from namex.models import ( - State, - User, -) +from namex.models import Applicant as ApplicantDAO +from namex.models import Comment as CommentDAO +from namex.models import Event as EventDAO +from namex.models import Name as NameDAO +from namex.models import Request as RequestDAO +from namex.models import State, User from .. import integration_oracle_namesdb from ..end_points.util import create_header diff --git a/api/tests/python/models/test_request.py b/api/tests/python/models/test_request.py index 910b6bcb7..c1614a246 100644 --- a/api/tests/python/models/test_request.py +++ b/api/tests/python/models/test_request.py @@ -114,8 +114,16 @@ def test_name_search_populated_by_name(): name.name = 'CHANGED' name.save_to_db() + # Ensure the session is flushed and clear any cached data + from namex.models import db + db.session.flush() + db.session.expunge_all() # Clear all objects from the session + + # refresh the request object to get updated nameSearch + test_updated = RequestDAO.find_by_id(nr.id) + # check nameSearch - assert nr.nameSearch == '(|1CHANGED1|)' + assert test_updated.nameSearch == '(|1CHANGED1|)' def test_has_consumed_name(): diff --git a/api/tests/python/services/word_classification/test_token_classifier.py b/api/tests/python/services/word_classification/test_token_classifier.py new file mode 100644 index 000000000..6fa5c5725 --- /dev/null +++ b/api/tests/python/services/word_classification/test_token_classifier.py @@ -0,0 +1,271 @@ +"""Unit tests for TokenClassifier and classifications_to_lists function.""" + +from unittest.mock import Mock, patch + +import pytest + +from namex.services.word_classification.token_classifier import ( + DataFrameFields, + TokenClassifier, + classifications_to_lists, +) + + +class TestClassificationsToLists: + """Test the classifications_to_lists function for word classification processing.""" + + def test_classifications_to_lists_empty_input(self): + """Test with empty classifications list.""" + result = classifications_to_lists([]) + distinctive, descriptive, unclassified = result + + assert distinctive == [] + assert descriptive == [] + assert unclassified == [] + + def test_classifications_to_lists_single_distinctive(self): + """Test with single distinctive word.""" + classifications = [ + {'word': 'tech', 'word_classification': DataFrameFields.DISTINCTIVE.value} + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == ['tech'] + assert descriptive == [] + assert unclassified == [] + + def test_classifications_to_lists_single_descriptive(self): + """Test with single descriptive word.""" + classifications = [ + {'word': 'amazing', 'word_classification': DataFrameFields.DESCRIPTIVE.value} + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == [] + assert descriptive == ['amazing'] + assert unclassified == [] + + def test_classifications_to_lists_single_unclassified(self): + """Test with single unclassified word.""" + classifications = [ + {'word': 'unknown', 'word_classification': DataFrameFields.UNCLASSIFIED.value} + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == [] + assert descriptive == [] + assert unclassified == ['unknown'] + + def test_classifications_to_lists_mixed_classifications(self): + """Test with multiple words of different classifications.""" + classifications = [ + {'word': 'amazing', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + {'word': 'tech', 'word_classification': DataFrameFields.DISTINCTIVE.value}, + {'word': 'solutions', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + {'word': 'unknown', 'word_classification': DataFrameFields.UNCLASSIFIED.value}, + {'word': 'innovative', 'word_classification': DataFrameFields.DISTINCTIVE.value}, + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == ['tech', 'innovative'] + assert descriptive == ['amazing', 'solutions'] + assert unclassified == ['unknown'] + + def test_classifications_to_lists_preserves_order(self): + """Test that order is preserved within each classification type.""" + classifications = [ + {'word': 'first', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + {'word': 'second', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + {'word': 'third', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert descriptive == ['first', 'second', 'third'] + + def test_classifications_to_lists_duplicate_words(self): + """Test handling of duplicate words with same classification.""" + classifications = [ + {'word': 'tech', 'word_classification': DataFrameFields.DISTINCTIVE.value}, + {'word': 'tech', 'word_classification': DataFrameFields.DISTINCTIVE.value}, + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == ['tech', 'tech'] # Should preserve duplicates + + def test_classifications_to_lists_case_sensitivity(self): + """Test that word case is preserved.""" + classifications = [ + {'word': 'Tech', 'word_classification': DataFrameFields.DISTINCTIVE.value}, + {'word': 'AMAZING', 'word_classification': DataFrameFields.DESCRIPTIVE.value}, + ] + + distinctive, descriptive, unclassified = classifications_to_lists(classifications) + + assert distinctive == ['Tech'] + assert descriptive == ['AMAZING'] + + +class TestTokenClassifier: + """Test the TokenClassifier class, focusing on the _classify_tokens method.""" + + def setup_method(self): + """Set up test fixtures.""" + self.mock_word_classification_service = Mock() + self.classifier = TokenClassifier(self.mock_word_classification_service) + + def test_init(self): + """Test TokenClassifier initialization.""" + assert self.classifier.word_classification_service == self.mock_word_classification_service + assert self.classifier.distinctive_word_tokens == [] + assert self.classifier.descriptive_word_tokens == [] + assert self.classifier.unclassified_word_tokens == [] + + def test_classify_tokens_empty_input(self): + """Test _classify_tokens with empty word list.""" + self.classifier._classify_tokens([]) + + assert self.classifier.distinctive_word_tokens == [] + assert self.classifier.descriptive_word_tokens == [] + assert self.classifier.unclassified_word_tokens == [] + + def test_classify_tokens_no_classification_found(self): + """Test _classify_tokens when no classification is found for words.""" + # Mock the service to return None (no classification found) + self.mock_word_classification_service.find_one.return_value = None + + words = ['unknown', 'mystery'] + self.classifier._classify_tokens(words) + + # All words should be unclassified + assert self.classifier.distinctive_word_tokens == [] + assert self.classifier.descriptive_word_tokens == [] + assert self.classifier.unclassified_word_tokens == ['unknown', 'mystery'] + + def test_classify_tokens_with_classifications(self): + """Test _classify_tokens with words that have classifications.""" + # Create mock classification objects + mock_distinctive_classification = Mock() + mock_distinctive_classification.classification = 'DIST' + + mock_descriptive_classification = Mock() + mock_descriptive_classification.classification = 'DESC' + + # Configure the mock service to return different classifications + def mock_find_one(word): + if word == 'tech': + return [mock_distinctive_classification] + elif word == 'amazing': + return [mock_descriptive_classification] + else: + return None + + self.mock_word_classification_service.find_one.side_effect = mock_find_one + + words = ['amazing', 'tech', 'unknown'] + self.classifier._classify_tokens(words) + + assert self.classifier.distinctive_word_tokens == ['tech'] + assert self.classifier.descriptive_word_tokens == ['amazing'] + assert self.classifier.unclassified_word_tokens == ['unknown'] + + def test_classify_tokens_multiple_classifications_per_word(self): + """Test _classify_tokens when a word has multiple classifications.""" + # Create mock classification objects + mock_classification_1 = Mock() + mock_classification_1.classification = 'DIST' + + mock_classification_2 = Mock() + mock_classification_2.classification = 'DESC' + + # Configure service to return multiple classifications for one word + self.mock_word_classification_service.find_one.return_value = [ + mock_classification_1, + mock_classification_2 + ] + + words = ['multi'] + self.classifier._classify_tokens(words) + + # The word should appear in both lists + assert self.classifier.distinctive_word_tokens == ['multi'] + assert self.classifier.descriptive_word_tokens == ['multi'] + assert self.classifier.unclassified_word_tokens == [] + + def test_classify_tokens_word_normalization(self): + """Test that words are normalized (lowercased and stripped).""" + mock_classification = Mock() + mock_classification.classification = ' DIST ' # With extra spaces + + self.mock_word_classification_service.find_one.return_value = [mock_classification] + + words = [' TECH '] # Word with spaces and uppercase + self.classifier._classify_tokens(words) + + # Should be normalized to lowercase and stripped + assert self.classifier.distinctive_word_tokens == ['tech'] + + def test_classify_tokens_classification_normalization(self): + """Test that classification values are normalized (stripped).""" + mock_classification = Mock() + mock_classification.classification = ' DIST ' # With extra spaces + + self.mock_word_classification_service.find_one.return_value = [mock_classification] + + words = ['tech'] + self.classifier._classify_tokens(words) + + # Should still be classified as distinctive despite extra spaces + assert self.classifier.distinctive_word_tokens == ['tech'] + + @patch('namex.services.word_classification.token_classifier.current_app') + def test_classify_tokens_logs_unclassified_words(self, mock_current_app): + """Test that unclassified words are logged.""" + # Mock the service to return None + self.mock_word_classification_service.find_one.return_value = None + + words = ['unknown'] + self.classifier._classify_tokens(words) + + # Verify logging was called + mock_current_app.logger.debug.assert_called_with('No word classification found for: unknown') + + @patch('namex.services.word_classification.token_classifier.current_app') + def test_classify_tokens_handles_exceptions(self, mock_current_app): + """Test that exceptions during classification are handled properly.""" + # Configure the mock service to raise an exception + self.mock_word_classification_service.find_one.side_effect = Exception('Database error') + + words = ['test'] + + # Should re-raise the exception + with pytest.raises(Exception, match='Database error'): + self.classifier._classify_tokens(words) + + # Should log the error + mock_current_app.logger.error.assert_called() + + def test_name_tokens_property_triggers_classification(self): + """Test that setting name_tokens triggers classification.""" + mock_classification = Mock() + mock_classification.classification = 'DIST' + self.mock_word_classification_service.find_one.return_value = [mock_classification] + + # Setting name_tokens should trigger _classify_tokens + self.classifier.name_tokens = ['tech', 'solutions'] + + assert self.classifier.name_tokens == ['tech', 'solutions'] + assert self.classifier.distinctive_word_tokens == ['tech', 'solutions'] + + def test_name_tokens_property_empty_list_no_classification(self): + """Test that setting empty name_tokens doesn't trigger classification.""" + # Setting empty list should not trigger classification + self.classifier.name_tokens = [] + + # find_one should not be called + self.mock_word_classification_service.find_one.assert_not_called() diff --git a/api/tests/python/services/word_classification/test_token_classifier_integration.py b/api/tests/python/services/word_classification/test_token_classifier_integration.py new file mode 100644 index 000000000..e7c852c15 --- /dev/null +++ b/api/tests/python/services/word_classification/test_token_classifier_integration.py @@ -0,0 +1,164 @@ +"""Integration test for word classification - testing the complete word classification flow.""" + +from unittest.mock import Mock + +import pytest + +from namex.services.word_classification.token_classifier import TokenClassifier + + +class TestTokenClassifierIntegration: + """Integration tests to verify the complete word classification flow works correctly.""" + + def test_full_classification_workflow(self): + """Test the complete workflow from word input to classified output.""" + # Create a mock word classification service that simulates database responses + mock_service = Mock() + + # Create mock classification objects that simulate database records + def create_mock_classification(classification_type): + mock_obj = Mock() + mock_obj.classification = classification_type + return mock_obj + + # Configure mock responses for different words + def mock_find_one(word): + word = word.lower().strip() + if word == 'tech': + return [create_mock_classification('DIST')] + elif word == 'amazing': + return [create_mock_classification('DESC')] + elif word == 'solutions': + return [create_mock_classification('DESC')] + elif word == 'innovative': + return [create_mock_classification('DIST')] + elif word == 'consulting': + # Word with multiple classifications + return [ + create_mock_classification('DIST'), + create_mock_classification('DESC') + ] + else: + return None # Unclassified + + mock_service.find_one.side_effect = mock_find_one + + # Create classifier and test the complete workflow + classifier = TokenClassifier(mock_service) + + # Test setting name_tokens which should trigger classification + test_words = ['AMAZING', ' tech ', 'Solutions', 'UnknownWord', 'INNOVATIVE', 'consulting'] + classifier.name_tokens = test_words + + # Verify the results + # Words that should be distinctive: tech, innovative, consulting (DIST classification) + expected_distinctive = {'tech', 'innovative', 'consulting'} + # Words that should be descriptive: amazing, solutions, consulting (DESC classification) + expected_descriptive = {'amazing', 'solutions', 'consulting'} + # Words that should be unclassified: unknownword + expected_unclassified = {'unknownword'} + + assert set(classifier.distinctive_word_tokens) == expected_distinctive + assert set(classifier.descriptive_word_tokens) == expected_descriptive + assert set(classifier.unclassified_word_tokens) == expected_unclassified + + # Verify service was called for each word + assert mock_service.find_one.call_count == len(test_words) + + def test_expected_classification_behavior(self): + """Test that our implementation produces logically correct classification results.""" + # This test validates the expected behavior based on business logic + mock_service = Mock() + + # Set up mock data that represents expected classification behavior + mock_distinctive = Mock() + mock_distinctive.classification = 'DIST' + + mock_descriptive = Mock() + mock_descriptive.classification = 'DESC' + + test_data = { + 'innovative': [mock_distinctive], + 'amazing': [mock_descriptive], + 'tech': [mock_distinctive], + 'solutions': [mock_descriptive], + } + + def mock_find_one(word): + return test_data.get(word.lower().strip()) + + mock_service.find_one.side_effect = mock_find_one + + classifier = TokenClassifier(mock_service) + classifier.name_tokens = ['innovative', 'amazing', 'tech', 'solutions'] + + # Expected results based on business logic + expected_distinctive = ['innovative', 'tech'] + expected_descriptive = ['amazing', 'solutions'] + expected_unclassified = [] + + # Verify our implementation produces expected results + assert classifier.distinctive_word_tokens == expected_distinctive + assert classifier.descriptive_word_tokens == expected_descriptive + assert classifier.unclassified_word_tokens == expected_unclassified + + def test_edge_cases_handling(self): + """Test edge cases that the implementation should handle correctly.""" + mock_service = Mock() + + # Test empty input + classifier = TokenClassifier(mock_service) + classifier.name_tokens = [] + + assert classifier.distinctive_word_tokens == [] + assert classifier.descriptive_word_tokens == [] + assert classifier.unclassified_word_tokens == [] + + # Test all unclassified + mock_service.find_one.return_value = None + classifier.name_tokens = ['unknown1', 'unknown2'] + + assert classifier.distinctive_word_tokens == [] + assert classifier.descriptive_word_tokens == [] + assert classifier.unclassified_word_tokens == ['unknown1', 'unknown2'] + + def test_memory_efficiency(self): + """Test that our implementation is memory efficient with larger datasets.""" + mock_service = Mock() + mock_classification = Mock() + mock_classification.classification = 'DIST' + mock_service.find_one.return_value = [mock_classification] + + classifier = TokenClassifier(mock_service) + + # Test with a larger dataset + large_word_list = [f'word{i}' for i in range(100)] + classifier.name_tokens = large_word_list + + # Should complete without memory issues + assert len(classifier.distinctive_word_tokens) == 100 + assert len(classifier.descriptive_word_tokens) == 0 + assert len(classifier.unclassified_word_tokens) == 0 + + def test_performance_characteristics(self): + """Test that our implementation performs well with reasonable datasets.""" + import time + + mock_service = Mock() + mock_classification = Mock() + mock_classification.classification = 'DIST' + mock_service.find_one.return_value = [mock_classification] + + classifier = TokenClassifier(mock_service) + + # Time the operation + start_time = time.time() + classifier.name_tokens = [f'word{i}' for i in range(50)] + end_time = time.time() + + # Should complete reasonably quickly + execution_time = end_time - start_time + assert execution_time < 1.0 # Should take less than 1 second + + # Verify results are correct + assert len(classifier.distinctive_word_tokens) == 50 diff --git a/api/tests/python/unit/utils/test_utils.py b/api/tests/python/unit/utils/test_utils.py index ba0832d18..452536530 100644 --- a/api/tests/python/unit/utils/test_utils.py +++ b/api/tests/python/unit/utils/test_utils.py @@ -12,137 +12,63 @@ @pytest.mark.parametrize( - """ -test_name, -name_request_number,temp_request_number,user_email,user_phone, -header_name_request_number,header_temp_request_number,header_user_email,header_user_phone, -expected""", + 'test_scenario,has_nr_num,has_temp_num,has_email,has_phone,header_has_nr,header_has_temp,header_has_email,header_has_phone,expected', [ - ( - 'valid_nr', # test_name - 'NR 0000001', # name_request_number - None, # temp_request_number - 'info@example.com', # user_email - '1231231234', # user_phone - 'NR 0000001', # header_name_request_number - None, # header_temp_request_number - 'info@example.com', # header_user_email - '1231231234', # header_user_phone - True, - ), # expected - ( - 'valid_temp_nr', - None, - 'NR L000001', - 'info@example.com', - '1231231234', - None, - 'NR L000001', - 'info@example.com', - '1231231234', - True, - ), - ( - 'no_nr', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - None, - None, - 'info@example.com', - '1231231234', - False, - ), - ( - 'valid_nr_skip_nrl', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - True, - ), - ( - 'valid_nr_only_email', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - None, - True, - ), - ( - 'valid_nr_only_phone', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - 'NR 0000001', - 'NR L000001', - None, - '1231231234', - True, - ), - ( - 'valid_nr_no_phone_no_email', - 'NR 0000001', - 'NR L000001', - 'info@example.com', - '1231231234', - 'NR 0000001', - 'NR L000001', - None, - None, - False, - ), + ('valid_nr', True, False, True, True, True, False, True, True, True), + ('valid_temp_nr', False, True, True, True, False, True, True, True, True), + ('no_nr', True, True, True, True, False, False, True, True, False), + ('valid_nr_skip_nrl', True, True, True, True, True, True, True, True, True), + ('valid_nr_only_email', True, True, True, True, True, True, True, False, True), + ('valid_nr_only_phone', True, True, True, True, True, True, False, True, True), + ('valid_nr_no_phone_no_email', True, True, True, True, True, True, False, False, False), ], ) def test_full_access_to_name_request( - test_name, - name_request_number, - temp_request_number, - user_email, - user_phone, - header_name_request_number, - header_temp_request_number, - header_user_email, - header_user_phone, + test_scenario, + has_nr_num, + has_temp_num, + has_email, + has_phone, + header_has_nr, + header_has_temp, + header_has_email, + header_has_phone, expected, + test_data_factory, ): """Assure that this contains the headers required to fully access an NR.""" from namex.utils.auth import full_access_to_name_request - # setup + # Generate unique data for this test run + unique_nr_num = test_data_factory.generate_unique_nr_num() if has_nr_num else None + unique_temp_num = f'NR L{test_data_factory.generate_unique_id()[:6]}' if has_temp_num else None + unique_email = f'test{test_data_factory.generate_unique_id()}@example.com' if has_email else None + unique_phone = f'250{test_data_factory.generate_unique_id()[:7]}' if has_phone else None + + # Setup NR nr = RequestDAO() - nr.nrNum = name_request_number or temp_request_number + nr.nrNum = unique_nr_num or unique_temp_num nr.stateCd = State.DRAFT nr._source = ValidSources.NAMEREQUEST.value applicant = Applicant() - applicant.phoneNumber = user_phone - applicant.emailAddress = user_email + applicant.phoneNumber = unique_phone + applicant.emailAddress = unique_email nr.applicants.append(applicant) nr.save_to_db() - builder = EnvironBuilder( - method='POST', - data={}, - headers={ - 'BCREG_NR': header_name_request_number, - 'BCREG_NRL': header_temp_request_number, - 'BCREG-User-Email': header_user_email, - 'BCREG-User-Phone': header_user_phone, - }, - ) + # Setup headers based on test scenario + headers = {} + if header_has_nr and unique_nr_num: + headers['BCREG_NR'] = unique_nr_num + if header_has_temp and unique_temp_num: + headers['BCREG_NRL'] = unique_temp_num + if header_has_email and unique_email: + headers['BCREG-User-Email'] = unique_email + if header_has_phone and unique_phone: + headers['BCREG-User-Phone'] = unique_phone + + builder = EnvironBuilder(method='POST', data={}, headers=headers) env = builder.get_environ() req = Request(env) - print(req) - assert expected == full_access_to_name_request(req) diff --git a/api/update_db.sh b/api/update_db.sh new file mode 100755 index 000000000..63baff3c6 --- /dev/null +++ b/api/update_db.sh @@ -0,0 +1,4 @@ +#! /bin/sh +echo 'starting upgrade' +export FLASK_ENV=migration +flask db upgrade \ No newline at end of file