-
Notifications
You must be signed in to change notification settings - Fork 1
Dev #12
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Dev #12
Changes from all commits
3e44bde
1ad4ec4
4683862
9c3f5d3
d793f6a
c4cec28
68d85f0
8e85aae
5e1e371
adfde6b
f85e0e5
fb15db2
bdf24d8
68cec85
1ad61a1
7cda45c
214b4ac
9e34eb9
143cdf2
cf96694
d8876a7
91c808e
fa0c821
aa9223c
6cd3182
a91aaed
7c81d87
1f4eb61
fadac10
ab3e15e
f33f088
619b6c8
c2cc9cc
90a9579
819901e
403a67a
6ec3cb3
24e9688
011deea
19d7be8
2503c6e
d709186
ca8d6bf
39d02ff
df5d080
58dece1
732313c
76b7a39
7864057
4d83dfa
facdbf0
54a0e94
3af6be8
9bbd391
a4127fa
7ddcf4a
5e81721
ef25da4
8d8f5b2
be5b2d2
fab5e6a
5aadf99
9a05225
3c7ec32
6bfd0f5
4727371
20708a5
762f762
2366c21
8614051
8cec4f6
553b2bc
f102d99
2c5c6f5
b26914e
a387eee
3cab342
328c057
38a5b5b
3ad4181
26d749b
d52bfe0
8555e23
475e4fc
3aa23d7
3ecf756
7909c22
8a68499
e5d26f2
06a38c8
34990a7
7658691
e626204
a172e0e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,8 +1,12 @@ | ||
| .venv | ||
| .pixi | ||
| pixi.lock | ||
| __pycache__ | ||
| *.db | ||
| data | ||
| .env | ||
| _data/ | ||
| _logs/ | ||
| *.backup | ||
| .DS_Store |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,149 @@ | ||
| """refactor: separate Record from RecordImage models | ||
|
|
||
| Revision ID: 19e2aefe5b17 | ||
| Revises: 48189f9482e3 | ||
| Create Date: 2026-02-16 22:01:11.720584 | ||
|
|
||
| """ | ||
| from typing import Sequence, Union | ||
|
|
||
| from alembic import op | ||
| import sqlalchemy as sa | ||
| from sqlalchemy.dialects import postgresql | ||
|
|
||
| # revision identifiers, used by Alembic. | ||
| revision: str = '19e2aefe5b17' | ||
| down_revision: Union[str, None] = '48189f9482e3' | ||
| branch_labels: Union[str, Sequence[str], None] = None | ||
| depends_on: Union[str, Sequence[str], None] = None | ||
|
|
||
|
|
||
| def upgrade() -> None: | ||
| # ### commands auto generated by Alembic - please adjust! ### | ||
| # Step 1: Create the new records table | ||
| op.create_table('records', | ||
| sa.Column('id', sa.Integer(), nullable=False), | ||
| sa.Column('title', sa.String(length=255), nullable=False), | ||
| sa.Column('description', sa.Text(), nullable=True), | ||
| sa.Column('object_typology', sa.String(length=50), nullable=True), | ||
| sa.Column('author', sa.String(length=255), nullable=True), | ||
| sa.Column('material', sa.String(length=255), nullable=True), | ||
| sa.Column('date', sa.String(length=50), nullable=True), | ||
| sa.Column('custom_attributes', sa.Text(), nullable=True), | ||
| sa.Column('project_id', sa.Integer(), nullable=True), | ||
| sa.Column('collection_id', sa.Integer(), nullable=True), | ||
| sa.Column('created_at', sa.DateTime(), nullable=False), | ||
| sa.Column('modified_at', sa.DateTime(), nullable=True), | ||
| sa.Column('created_by', sa.String(length=255), nullable=True), | ||
| sa.CheckConstraint('NOT (project_id IS NOT NULL AND collection_id IS NOT NULL)', name='check_record_single_parent'), | ||
| sa.ForeignKeyConstraint(['collection_id'], ['collections.id'], ondelete='SET NULL'), | ||
| sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='SET NULL'), | ||
| sa.PrimaryKeyConstraint('id') | ||
| ) | ||
| op.create_index(op.f('ix_records_id'), 'records', ['id'], unique=False) | ||
|
|
||
| # Step 2: Add new columns to record_images (nullable first for data migration) | ||
| op.add_column('record_images', sa.Column('record_id', sa.Integer(), nullable=True)) | ||
| op.add_column('record_images', sa.Column('capture_id', sa.String(length=36), nullable=True)) | ||
| op.add_column('record_images', sa.Column('pair_id', sa.String(length=36), nullable=True)) | ||
| op.add_column('record_images', sa.Column('sequence', sa.Integer(), nullable=True)) | ||
| op.add_column('record_images', sa.Column('role', sa.String(length=50), nullable=True)) | ||
|
|
||
| # Step 3: Migrate existing data - create one Record per RecordImage | ||
| # This preserves all existing descriptive metadata | ||
| connection = op.get_bind() | ||
|
|
||
| # Get all existing record_images | ||
| result = connection.execute(sa.text(""" | ||
| SELECT id, title, description, object_typology, author, material, date, | ||
| custom_attributes, project_id, collection_id, created_at, uploaded_by | ||
| FROM record_images | ||
| """)) | ||
|
|
||
| for row in result: | ||
| # Create a Record from each RecordImage's descriptive metadata | ||
| record_title = row.title or f"Untitled Record {row.id}" | ||
|
|
||
| connection.execute(sa.text(""" | ||
| INSERT INTO records (title, description, object_typology, author, material, date, | ||
| custom_attributes, project_id, collection_id, created_at, created_by) | ||
| VALUES (:title, :description, :object_typology, :author, :material, :date, | ||
| :custom_attributes, :project_id, :collection_id, :created_at, :created_by) | ||
| """), { | ||
| 'title': record_title, | ||
| 'description': row.description, | ||
| 'object_typology': row.object_typology, | ||
| 'author': row.author, | ||
| 'material': row.material, | ||
| 'date': row.date, | ||
| 'custom_attributes': row.custom_attributes, | ||
| 'project_id': row.project_id, | ||
| 'collection_id': row.collection_id, | ||
| 'created_at': row.created_at, | ||
| 'created_by': row.uploaded_by | ||
| }) | ||
|
|
||
| # Get the ID of the just-created record | ||
| new_record_id_result = connection.execute(sa.text("SELECT lastval()")) | ||
| new_record_id = new_record_id_result.scalar() | ||
|
|
||
| # Link the RecordImage to the new Record | ||
| connection.execute(sa.text(""" | ||
| UPDATE record_images SET record_id = :record_id WHERE id = :image_id | ||
| """), {'record_id': new_record_id, 'image_id': row.id}) | ||
|
|
||
| # Step 4: Now make record_id NOT NULL (all rows should have values now) | ||
| op.alter_column('record_images', 'record_id', nullable=False) | ||
|
|
||
| # Step 5: Update indexes and constraints | ||
| op.drop_index(op.f('ix_record_images_filename'), table_name='record_images') | ||
| op.create_index(op.f('ix_record_images_filename'), 'record_images', ['filename'], unique=False) | ||
| op.create_index(op.f('ix_record_images_capture_id'), 'record_images', ['capture_id'], unique=False) | ||
| op.create_index(op.f('ix_record_images_pair_id'), 'record_images', ['pair_id'], unique=False) | ||
| op.create_index(op.f('ix_record_images_record_id'), 'record_images', ['record_id'], unique=False) | ||
| op.drop_constraint('record_images_collection_id_fkey', 'record_images', type_='foreignkey') | ||
| op.drop_constraint('record_images_project_id_fkey', 'record_images', type_='foreignkey') | ||
| op.create_foreign_key(None, 'record_images', 'records', ['record_id'], ['id'], ondelete='CASCADE') | ||
|
|
||
| # Step 6: Drop old columns from record_images | ||
| op.drop_column('record_images', 'modified_at') | ||
| op.drop_column('record_images', 'material') | ||
| op.drop_column('record_images', 'collection_id') | ||
| op.drop_column('record_images', 'project_id') | ||
| op.drop_column('record_images', 'custom_attributes') | ||
| op.drop_column('record_images', 'description') | ||
| op.drop_column('record_images', 'object_typology') | ||
| op.drop_column('record_images', 'title') | ||
| op.drop_column('record_images', 'author') | ||
| op.drop_column('record_images', 'date') | ||
| # ### end Alembic commands ### | ||
|
|
||
|
|
||
| def downgrade() -> None: | ||
| # ### commands auto generated by Alembic - please adjust! ### | ||
| op.add_column('record_images', sa.Column('date', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('author', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('object_typology', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('custom_attributes', sa.TEXT(), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('project_id', sa.INTEGER(), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('collection_id', sa.INTEGER(), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('material', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) | ||
| op.add_column('record_images', sa.Column('modified_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) | ||
| op.drop_constraint(None, 'record_images', type_='foreignkey') | ||
| op.create_foreign_key(op.f('record_images_project_id_fkey'), 'record_images', 'projects', ['project_id'], ['id'], ondelete='SET NULL') | ||
| op.create_foreign_key(op.f('record_images_collection_id_fkey'), 'record_images', 'collections', ['collection_id'], ['id'], ondelete='SET NULL') | ||
| op.drop_index(op.f('ix_record_images_record_id'), table_name='record_images') | ||
| op.drop_index(op.f('ix_record_images_pair_id'), table_name='record_images') | ||
| op.drop_index(op.f('ix_record_images_capture_id'), table_name='record_images') | ||
| op.drop_index(op.f('ix_record_images_filename'), table_name='record_images') | ||
| op.create_index(op.f('ix_record_images_filename'), 'record_images', ['filename'], unique=True) | ||
| op.drop_column('record_images', 'role') | ||
| op.drop_column('record_images', 'sequence') | ||
| op.drop_column('record_images', 'pair_id') | ||
| op.drop_column('record_images', 'capture_id') | ||
| op.drop_column('record_images', 'record_id') | ||
| op.drop_index(op.f('ix_records_id'), table_name='records') | ||
| op.drop_table('records') | ||
| # ### end Alembic commands ### | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,59 @@ | ||
| """add collections table and update records with collection_id | ||
|
|
||
| Revision ID: 48189f9482e3 | ||
| Revises: c3d4e5f6a7b8 | ||
| Create Date: 2026-02-16 20:22:02.487411 | ||
|
|
||
| """ | ||
| from typing import Sequence, Union | ||
|
|
||
| from alembic import op | ||
| import sqlalchemy as sa | ||
|
|
||
|
|
||
| # revision identifiers, used by Alembic. | ||
| revision: str = '48189f9482e3' | ||
| down_revision: Union[str, None] = 'c3d4e5f6a7b8' | ||
| branch_labels: Union[str, Sequence[str], None] = None | ||
| depends_on: Union[str, Sequence[str], None] = None | ||
|
|
||
|
|
||
| def upgrade() -> None: | ||
| # ### commands auto generated by Alembic - please adjust! ### | ||
| op.create_table('collections', | ||
| sa.Column('id', sa.Integer(), nullable=False), | ||
| sa.Column('name', sa.String(length=255), nullable=False), | ||
| sa.Column('description', sa.Text(), nullable=True), | ||
| sa.Column('collection_type', sa.String(length=50), nullable=True), | ||
| sa.Column('project_id', sa.Integer(), nullable=True), | ||
| sa.Column('parent_collection_id', sa.Integer(), nullable=True), | ||
| sa.Column('archival_metadata', sa.JSON(), nullable=True), | ||
| sa.Column('created_by', sa.String(length=255), nullable=True), | ||
| sa.Column('created_at', sa.DateTime(), nullable=False), | ||
| sa.Column('updated_at', sa.DateTime(), nullable=True), | ||
| sa.CheckConstraint('(project_id IS NOT NULL AND parent_collection_id IS NULL) OR (project_id IS NULL AND parent_collection_id IS NOT NULL)', name='check_collection_parent'), | ||
| sa.ForeignKeyConstraint(['parent_collection_id'], ['collections.id'], ondelete='CASCADE'), | ||
| sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='CASCADE'), | ||
| sa.PrimaryKeyConstraint('id') | ||
| ) | ||
| op.create_index(op.f('ix_collections_id'), 'collections', ['id'], unique=False) | ||
| op.create_index(op.f('ix_collections_name'), 'collections', ['name'], unique=False) | ||
| op.add_column('record_images', sa.Column('collection_id', sa.Integer(), nullable=True)) | ||
| op.drop_constraint(op.f('document_images_project_id_fkey'), 'record_images', type_='foreignkey') | ||
| op.create_foreign_key(None, 'record_images', 'projects', ['project_id'], ['id'], ondelete='SET NULL') | ||
| op.create_foreign_key(None, 'record_images', 'collections', ['collection_id'], ['id'], ondelete='SET NULL') | ||
| op.create_check_constraint('check_record_single_parent', 'record_images', 'NOT (project_id IS NOT NULL AND collection_id IS NOT NULL)') | ||
| # ### end Alembic commands ### | ||
|
|
||
|
|
||
| def downgrade() -> None: | ||
| # ### commands auto generated by Alembic - please adjust! ### | ||
| op.drop_constraint('check_record_single_parent', 'record_images', type_='check') | ||
| op.drop_constraint(None, 'record_images', type_='foreignkey') | ||
| op.drop_constraint(None, 'record_images', type_='foreignkey') | ||
| op.create_foreign_key(op.f('document_images_project_id_fkey'), 'record_images', 'projects', ['project_id'], ['id']) | ||
| op.drop_column('record_images', 'collection_id') | ||
| op.drop_index(op.f('ix_collections_name'), table_name='collections') | ||
| op.drop_index(op.f('ix_collections_id'), table_name='collections') | ||
| op.drop_table('collections') | ||
| # ### end Alembic commands ### |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,87 @@ | ||
| """rename documents to records | ||
|
|
||
| Revision ID: c3d4e5f6a7b8 | ||
| Revises: b7c8d9e0f1a2 | ||
| Create Date: 2026-02-16 19:36:06.000000 | ||
|
|
||
| """ | ||
| from typing import Sequence, Union | ||
|
|
||
| from alembic import op | ||
| import sqlalchemy as sa | ||
|
|
||
|
|
||
| # revision identifiers, used by Alembic. | ||
| revision: str = 'c3d4e5f6a7b8' | ||
| down_revision: Union[str, None] = 'b7c8d9e0f1a2' | ||
| branch_labels: Union[str, Sequence[str], None] = None | ||
| depends_on: Union[str, Sequence[str], None] = None | ||
|
|
||
|
|
||
| def upgrade() -> None: | ||
| # Rename the document_images table to record_images | ||
| op.rename_table('document_images', 'record_images') | ||
|
|
||
| # Rename the foreign key column in camera_settings | ||
| op.alter_column('camera_settings', 'document_image_id', | ||
| new_column_name='record_image_id', | ||
| existing_type=sa.Integer(), | ||
| existing_nullable=False) | ||
|
|
||
| # Rename the foreign key column in exif_data | ||
| op.alter_column('exif_data', 'document_image_id', | ||
| new_column_name='record_image_id', | ||
| existing_type=sa.Integer(), | ||
| existing_nullable=False) | ||
|
|
||
| # Update the foreign key constraint in camera_settings | ||
| # Drop old foreign key constraint | ||
| op.drop_constraint('camera_settings_document_image_id_fkey', 'camera_settings', type_='foreignkey') | ||
| # Create new foreign key constraint | ||
| op.create_foreign_key('camera_settings_record_image_id_fkey', | ||
| 'camera_settings', 'record_images', | ||
| ['record_image_id'], ['id']) | ||
|
|
||
| # Update the foreign key constraint in exif_data | ||
| # Drop old foreign key constraint | ||
| op.drop_constraint('exif_data_document_image_id_fkey', 'exif_data', type_='foreignkey') | ||
| # Create new foreign key constraint | ||
| op.create_foreign_key('exif_data_record_image_id_fkey', | ||
| 'exif_data', 'record_images', | ||
| ['record_image_id'], ['id']) | ||
|
|
||
| # Rename indexes | ||
| op.execute('ALTER INDEX ix_document_images_id RENAME TO ix_record_images_id') | ||
| op.execute('ALTER INDEX ix_document_images_filename RENAME TO ix_record_images_filename') | ||
|
|
||
|
|
||
| def downgrade() -> None: | ||
| # Reverse the index renames | ||
| op.execute('ALTER INDEX ix_record_images_filename RENAME TO ix_document_images_filename') | ||
| op.execute('ALTER INDEX ix_record_images_id RENAME TO ix_document_images_id') | ||
|
|
||
| # Drop the new foreign key constraints | ||
| op.drop_constraint('exif_data_record_image_id_fkey', 'exif_data', type_='foreignkey') | ||
| op.drop_constraint('camera_settings_record_image_id_fkey', 'camera_settings', type_='foreignkey') | ||
|
|
||
| # Recreate the old foreign key constraints | ||
| op.create_foreign_key('exif_data_document_image_id_fkey', | ||
| 'exif_data', 'document_images', | ||
| ['record_image_id'], ['id']) | ||
| op.create_foreign_key('camera_settings_document_image_id_fkey', | ||
| 'camera_settings', 'document_images', | ||
| ['record_image_id'], ['id']) | ||
|
|
||
| # Rename the foreign key columns back | ||
| op.alter_column('exif_data', 'record_image_id', | ||
| new_column_name='document_image_id', | ||
| existing_type=sa.Integer(), | ||
| existing_nullable=False) | ||
|
|
||
| op.alter_column('camera_settings', 'record_image_id', | ||
| new_column_name='document_image_id', | ||
| existing_type=sa.Integer(), | ||
| existing_nullable=False) | ||
|
|
||
| # Rename the table back | ||
| op.rename_table('record_images', 'document_images') |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,14 +1,18 @@ | ||
| from fastapi import APIRouter, Depends, HTTPException, Security | ||
| from fastapi import APIRouter, Depends, HTTPException, Security, Query | ||
| from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer | ||
| from sqlalchemy.orm import Session | ||
| from typing import Optional | ||
|
|
||
| from app.api.deps import get_db_dependency | ||
| from app.models.user import User | ||
| from app.schemas.user import UserCreate, UserRead, PasswordReset, PasswordResetRequest, TokenRefresh | ||
| from app.schemas.user import UserCreate, UserLogin, UserRead, PasswordReset, PasswordResetRequest, TokenRefresh | ||
| from app.core.security import hash_password, verify_password, create_access_token, verify_access_token | ||
|
|
||
| router = APIRouter() | ||
| security = HTTPBearer() | ||
| # auto_error=False so the dependency doesn't raise when header is absent | ||
| # (allows falling back to ?token= query param for browser src= requests) | ||
| _optional_bearer = HTTPBearer(auto_error=False) | ||
|
|
||
|
|
||
| @router.post("/register", response_model=UserRead) | ||
|
|
@@ -23,7 +27,7 @@ def register(payload: UserCreate, db: Session = Depends(get_db_dependency)): | |
|
|
||
|
|
||
| @router.post("/login") | ||
| def login(payload: UserCreate, db: Session = Depends(get_db_dependency)): | ||
| def login(payload: UserLogin, db: Session = Depends(get_db_dependency)): | ||
| user = db.query(User).filter(User.username == payload.username).first() | ||
| if not user or not verify_password(payload.password, user.hashed_password): | ||
| raise HTTPException(status_code=401, detail="Invalid credentials") | ||
|
|
@@ -72,9 +76,16 @@ def reset_password( | |
| return {"detail": "password updated successfully"} | ||
|
|
||
|
|
||
| def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security), db: Session = Depends(get_db_dependency)): | ||
| token = credentials.credentials | ||
| payload = verify_access_token(token) | ||
| def get_current_user( | ||
| credentials: Optional[HTTPAuthorizationCredentials] = Security(_optional_bearer), | ||
| token: Optional[str] = Query(default=None), | ||
| db: Session = Depends(get_db_dependency) | ||
| ): | ||
| # Accept token from Authorization header OR ?token= query param (needed for <img src>) | ||
| raw_token = credentials.credentials if credentials else token | ||
| if not raw_token: | ||
| raise HTTPException(status_code=401, detail="Not authenticated") | ||
| payload = verify_access_token(raw_token) | ||
|
Comment on lines
+84
to
+88
|
||
| if not payload: | ||
| raise HTTPException(status_code=401, detail="Invalid or expired token") | ||
| user_id = int(payload.get("sub")) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The migration drops columns project_id and collection_id from record_images (lines 111-112) without first dropping the check_record_single_parent constraint that was created in the previous migration (48189f9482e3) which references these columns. This could cause the migration to fail. The constraint should be explicitly dropped before dropping the columns it references.