diff --git a/.gitignore b/.gitignore index 6759997..52500a0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,12 @@ .venv +.pixi +pixi.lock __pycache__ *.db data .env _data/ _logs/ -*.pdf \ No newline at end of file +*.pdf +*.backup +.DS_Store diff --git a/alembic/env.py b/alembic/env.py index 9540131..194c3fd 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -16,9 +16,10 @@ from app.core.config import settings # Import all models so they are registered with Base.metadata -import app.models.document # noqa: F401 +import app.models.record # noqa: F401 import app.models.camera # noqa: F401 import app.models.project # noqa: F401 +import app.models.collection # noqa: F401 import app.models.user # noqa: F401 # this is the Alembic Config object, which provides diff --git a/alembic/versions/19e2aefe5b17_refactor_separate_record_from_.py b/alembic/versions/19e2aefe5b17_refactor_separate_record_from_.py new file mode 100644 index 0000000..b420a48 --- /dev/null +++ b/alembic/versions/19e2aefe5b17_refactor_separate_record_from_.py @@ -0,0 +1,149 @@ +"""refactor: separate Record from RecordImage models + +Revision ID: 19e2aefe5b17 +Revises: 48189f9482e3 +Create Date: 2026-02-16 22:01:11.720584 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '19e2aefe5b17' +down_revision: Union[str, None] = '48189f9482e3' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + # Step 1: Create the new records table + op.create_table('records', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('object_typology', sa.String(length=50), nullable=True), + sa.Column('author', sa.String(length=255), nullable=True), + sa.Column('material', sa.String(length=255), nullable=True), + sa.Column('date', sa.String(length=50), nullable=True), + sa.Column('custom_attributes', sa.Text(), nullable=True), + sa.Column('project_id', sa.Integer(), nullable=True), + sa.Column('collection_id', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('modified_at', sa.DateTime(), nullable=True), + sa.Column('created_by', sa.String(length=255), nullable=True), + sa.CheckConstraint('NOT (project_id IS NOT NULL AND collection_id IS NOT NULL)', name='check_record_single_parent'), + sa.ForeignKeyConstraint(['collection_id'], ['collections.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='SET NULL'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_records_id'), 'records', ['id'], unique=False) + + # Step 2: Add new columns to record_images (nullable first for data migration) + op.add_column('record_images', sa.Column('record_id', sa.Integer(), nullable=True)) + op.add_column('record_images', sa.Column('capture_id', sa.String(length=36), nullable=True)) + op.add_column('record_images', sa.Column('pair_id', sa.String(length=36), nullable=True)) + op.add_column('record_images', sa.Column('sequence', sa.Integer(), nullable=True)) + op.add_column('record_images', sa.Column('role', sa.String(length=50), nullable=True)) + + # Step 3: Migrate existing data - create one Record per RecordImage + # This preserves all existing descriptive metadata + connection = op.get_bind() + + # Get all existing record_images + result = connection.execute(sa.text(""" + SELECT id, title, description, object_typology, author, material, date, + custom_attributes, project_id, collection_id, created_at, uploaded_by + FROM record_images + """)) + + for row in result: + # Create a Record from each RecordImage's descriptive metadata + record_title = row.title or f"Untitled Record {row.id}" + + connection.execute(sa.text(""" + INSERT INTO records (title, description, object_typology, author, material, date, + custom_attributes, project_id, collection_id, created_at, created_by) + VALUES (:title, :description, :object_typology, :author, :material, :date, + :custom_attributes, :project_id, :collection_id, :created_at, :created_by) + """), { + 'title': record_title, + 'description': row.description, + 'object_typology': row.object_typology, + 'author': row.author, + 'material': row.material, + 'date': row.date, + 'custom_attributes': row.custom_attributes, + 'project_id': row.project_id, + 'collection_id': row.collection_id, + 'created_at': row.created_at, + 'created_by': row.uploaded_by + }) + + # Get the ID of the just-created record + new_record_id_result = connection.execute(sa.text("SELECT lastval()")) + new_record_id = new_record_id_result.scalar() + + # Link the RecordImage to the new Record + connection.execute(sa.text(""" + UPDATE record_images SET record_id = :record_id WHERE id = :image_id + """), {'record_id': new_record_id, 'image_id': row.id}) + + # Step 4: Now make record_id NOT NULL (all rows should have values now) + op.alter_column('record_images', 'record_id', nullable=False) + + # Step 5: Update indexes and constraints + op.drop_index(op.f('ix_record_images_filename'), table_name='record_images') + op.create_index(op.f('ix_record_images_filename'), 'record_images', ['filename'], unique=False) + op.create_index(op.f('ix_record_images_capture_id'), 'record_images', ['capture_id'], unique=False) + op.create_index(op.f('ix_record_images_pair_id'), 'record_images', ['pair_id'], unique=False) + op.create_index(op.f('ix_record_images_record_id'), 'record_images', ['record_id'], unique=False) + op.drop_constraint('record_images_collection_id_fkey', 'record_images', type_='foreignkey') + op.drop_constraint('record_images_project_id_fkey', 'record_images', type_='foreignkey') + op.create_foreign_key(None, 'record_images', 'records', ['record_id'], ['id'], ondelete='CASCADE') + + # Step 6: Drop old columns from record_images + op.drop_column('record_images', 'modified_at') + op.drop_column('record_images', 'material') + op.drop_column('record_images', 'collection_id') + op.drop_column('record_images', 'project_id') + op.drop_column('record_images', 'custom_attributes') + op.drop_column('record_images', 'description') + op.drop_column('record_images', 'object_typology') + op.drop_column('record_images', 'title') + op.drop_column('record_images', 'author') + op.drop_column('record_images', 'date') + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('record_images', sa.Column('date', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('author', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('object_typology', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('custom_attributes', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('project_id', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('collection_id', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('material', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + op.add_column('record_images', sa.Column('modified_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) + op.drop_constraint(None, 'record_images', type_='foreignkey') + op.create_foreign_key(op.f('record_images_project_id_fkey'), 'record_images', 'projects', ['project_id'], ['id'], ondelete='SET NULL') + op.create_foreign_key(op.f('record_images_collection_id_fkey'), 'record_images', 'collections', ['collection_id'], ['id'], ondelete='SET NULL') + op.drop_index(op.f('ix_record_images_record_id'), table_name='record_images') + op.drop_index(op.f('ix_record_images_pair_id'), table_name='record_images') + op.drop_index(op.f('ix_record_images_capture_id'), table_name='record_images') + op.drop_index(op.f('ix_record_images_filename'), table_name='record_images') + op.create_index(op.f('ix_record_images_filename'), 'record_images', ['filename'], unique=True) + op.drop_column('record_images', 'role') + op.drop_column('record_images', 'sequence') + op.drop_column('record_images', 'pair_id') + op.drop_column('record_images', 'capture_id') + op.drop_column('record_images', 'record_id') + op.drop_index(op.f('ix_records_id'), table_name='records') + op.drop_table('records') + # ### end Alembic commands ### diff --git a/alembic/versions/48189f9482e3_add_collections_table_and_update_.py b/alembic/versions/48189f9482e3_add_collections_table_and_update_.py new file mode 100644 index 0000000..bac4b05 --- /dev/null +++ b/alembic/versions/48189f9482e3_add_collections_table_and_update_.py @@ -0,0 +1,59 @@ +"""add collections table and update records with collection_id + +Revision ID: 48189f9482e3 +Revises: c3d4e5f6a7b8 +Create Date: 2026-02-16 20:22:02.487411 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '48189f9482e3' +down_revision: Union[str, None] = 'c3d4e5f6a7b8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('collections', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('collection_type', sa.String(length=50), nullable=True), + sa.Column('project_id', sa.Integer(), nullable=True), + sa.Column('parent_collection_id', sa.Integer(), nullable=True), + sa.Column('archival_metadata', sa.JSON(), nullable=True), + sa.Column('created_by', sa.String(length=255), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.CheckConstraint('(project_id IS NOT NULL AND parent_collection_id IS NULL) OR (project_id IS NULL AND parent_collection_id IS NOT NULL)', name='check_collection_parent'), + sa.ForeignKeyConstraint(['parent_collection_id'], ['collections.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_collections_id'), 'collections', ['id'], unique=False) + op.create_index(op.f('ix_collections_name'), 'collections', ['name'], unique=False) + op.add_column('record_images', sa.Column('collection_id', sa.Integer(), nullable=True)) + op.drop_constraint(op.f('document_images_project_id_fkey'), 'record_images', type_='foreignkey') + op.create_foreign_key(None, 'record_images', 'projects', ['project_id'], ['id'], ondelete='SET NULL') + op.create_foreign_key(None, 'record_images', 'collections', ['collection_id'], ['id'], ondelete='SET NULL') + op.create_check_constraint('check_record_single_parent', 'record_images', 'NOT (project_id IS NOT NULL AND collection_id IS NOT NULL)') + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('check_record_single_parent', 'record_images', type_='check') + op.drop_constraint(None, 'record_images', type_='foreignkey') + op.drop_constraint(None, 'record_images', type_='foreignkey') + op.create_foreign_key(op.f('document_images_project_id_fkey'), 'record_images', 'projects', ['project_id'], ['id']) + op.drop_column('record_images', 'collection_id') + op.drop_index(op.f('ix_collections_name'), table_name='collections') + op.drop_index(op.f('ix_collections_id'), table_name='collections') + op.drop_table('collections') + # ### end Alembic commands ### diff --git a/alembic/versions/c3d4e5f6a7b8_rename_documents_to_records.py b/alembic/versions/c3d4e5f6a7b8_rename_documents_to_records.py new file mode 100644 index 0000000..220f34a --- /dev/null +++ b/alembic/versions/c3d4e5f6a7b8_rename_documents_to_records.py @@ -0,0 +1,87 @@ +"""rename documents to records + +Revision ID: c3d4e5f6a7b8 +Revises: b7c8d9e0f1a2 +Create Date: 2026-02-16 19:36:06.000000 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'c3d4e5f6a7b8' +down_revision: Union[str, None] = 'b7c8d9e0f1a2' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Rename the document_images table to record_images + op.rename_table('document_images', 'record_images') + + # Rename the foreign key column in camera_settings + op.alter_column('camera_settings', 'document_image_id', + new_column_name='record_image_id', + existing_type=sa.Integer(), + existing_nullable=False) + + # Rename the foreign key column in exif_data + op.alter_column('exif_data', 'document_image_id', + new_column_name='record_image_id', + existing_type=sa.Integer(), + existing_nullable=False) + + # Update the foreign key constraint in camera_settings + # Drop old foreign key constraint + op.drop_constraint('camera_settings_document_image_id_fkey', 'camera_settings', type_='foreignkey') + # Create new foreign key constraint + op.create_foreign_key('camera_settings_record_image_id_fkey', + 'camera_settings', 'record_images', + ['record_image_id'], ['id']) + + # Update the foreign key constraint in exif_data + # Drop old foreign key constraint + op.drop_constraint('exif_data_document_image_id_fkey', 'exif_data', type_='foreignkey') + # Create new foreign key constraint + op.create_foreign_key('exif_data_record_image_id_fkey', + 'exif_data', 'record_images', + ['record_image_id'], ['id']) + + # Rename indexes + op.execute('ALTER INDEX ix_document_images_id RENAME TO ix_record_images_id') + op.execute('ALTER INDEX ix_document_images_filename RENAME TO ix_record_images_filename') + + +def downgrade() -> None: + # Reverse the index renames + op.execute('ALTER INDEX ix_record_images_filename RENAME TO ix_document_images_filename') + op.execute('ALTER INDEX ix_record_images_id RENAME TO ix_document_images_id') + + # Drop the new foreign key constraints + op.drop_constraint('exif_data_record_image_id_fkey', 'exif_data', type_='foreignkey') + op.drop_constraint('camera_settings_record_image_id_fkey', 'camera_settings', type_='foreignkey') + + # Recreate the old foreign key constraints + op.create_foreign_key('exif_data_document_image_id_fkey', + 'exif_data', 'document_images', + ['record_image_id'], ['id']) + op.create_foreign_key('camera_settings_document_image_id_fkey', + 'camera_settings', 'document_images', + ['record_image_id'], ['id']) + + # Rename the foreign key columns back + op.alter_column('exif_data', 'record_image_id', + new_column_name='document_image_id', + existing_type=sa.Integer(), + existing_nullable=False) + + op.alter_column('camera_settings', 'record_image_id', + new_column_name='document_image_id', + existing_type=sa.Integer(), + existing_nullable=False) + + # Rename the table back + op.rename_table('record_images', 'document_images') diff --git a/app/api/auth.py b/app/api/auth.py index 273f55c..083ceb1 100644 --- a/app/api/auth.py +++ b/app/api/auth.py @@ -1,14 +1,18 @@ -from fastapi import APIRouter, Depends, HTTPException, Security +from fastapi import APIRouter, Depends, HTTPException, Security, Query from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer from sqlalchemy.orm import Session +from typing import Optional from app.api.deps import get_db_dependency from app.models.user import User -from app.schemas.user import UserCreate, UserRead, PasswordReset, PasswordResetRequest, TokenRefresh +from app.schemas.user import UserCreate, UserLogin, UserRead, PasswordReset, PasswordResetRequest, TokenRefresh from app.core.security import hash_password, verify_password, create_access_token, verify_access_token router = APIRouter() security = HTTPBearer() +# auto_error=False so the dependency doesn't raise when header is absent +# (allows falling back to ?token= query param for browser src= requests) +_optional_bearer = HTTPBearer(auto_error=False) @router.post("/register", response_model=UserRead) @@ -23,7 +27,7 @@ def register(payload: UserCreate, db: Session = Depends(get_db_dependency)): @router.post("/login") -def login(payload: UserCreate, db: Session = Depends(get_db_dependency)): +def login(payload: UserLogin, db: Session = Depends(get_db_dependency)): user = db.query(User).filter(User.username == payload.username).first() if not user or not verify_password(payload.password, user.hashed_password): raise HTTPException(status_code=401, detail="Invalid credentials") @@ -72,9 +76,16 @@ def reset_password( return {"detail": "password updated successfully"} -def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security), db: Session = Depends(get_db_dependency)): - token = credentials.credentials - payload = verify_access_token(token) +def get_current_user( + credentials: Optional[HTTPAuthorizationCredentials] = Security(_optional_bearer), + token: Optional[str] = Query(default=None), + db: Session = Depends(get_db_dependency) +): + # Accept token from Authorization header OR ?token= query param (needed for ) + raw_token = credentials.credentials if credentials else token + if not raw_token: + raise HTTPException(status_code=401, detail="Not authenticated") + payload = verify_access_token(raw_token) if not payload: raise HTTPException(status_code=401, detail="Invalid or expired token") user_id = int(payload.get("sub")) diff --git a/app/api/cameras.py b/app/api/cameras.py index 09f2621..cca623e 100644 --- a/app/api/cameras.py +++ b/app/api/cameras.py @@ -1,7 +1,7 @@ from fastapi import APIRouter, Depends, HTTPException, Query from sqlalchemy.orm import Session from sqlalchemy.exc import IntegrityError -from app.models.document import DocumentImage +from app.models.record import Record, RecordImage from typing import List, Optional from pydantic import BaseModel import logging @@ -11,6 +11,7 @@ from app.models.camera import CameraSettings from app.models.user import User from app.schemas.camera import CameraSettingsCreate, CameraSettingsRead, CameraSettingsUpdate +from app.core.thumbnail import generate_thumbnail router = APIRouter() logger = logging.getLogger(__name__) @@ -33,6 +34,8 @@ class CaptureRequest(BaseModel): camera_index: int = 0 resolution: str = "medium" # low, medium, high include_resolution_in_filename: bool = False + record_id: Optional[int] = None # Link to existing record, or create new if None + record_title: Optional[str] = None # Used if creating new record class DualCaptureRequest(BaseModel): @@ -41,6 +44,9 @@ class DualCaptureRequest(BaseModel): resolution: str = "medium" include_resolution_in_filename: bool = False stagger_ms: int = 20 + record_id: Optional[int] = None # Link to existing record, or create new if None + record_title: Optional[str] = None # Used if creating new record + sequence: Optional[int] = None # Page number/order class CaptureResponse(BaseModel): @@ -48,6 +54,8 @@ class CaptureResponse(BaseModel): success: bool file_path: Optional[str] = None file_paths: Optional[List[str]] = None + record_id: Optional[int] = None + image_ids: Optional[List[int]] = None timing: Optional[dict] = None error: Optional[str] = None @@ -100,32 +108,32 @@ def _get_camera_registry(): def list_camera_devices(): """ Return available camera devices detected via libcamera/picamera2. - + Returns hardware IDs, models, and calibration status for each camera. On non-Pi systems or if camera libraries aren't available, returns empty list. """ registry = _get_camera_registry() if registry is None: return [] - + try: detected = registry.detect_cameras() devices = [] - + for idx, (hw_id, info) in detected.items(): # Check if camera is registered and has calibration camera_data = registry.get_camera_by_id(hw_id) calibrated = False machine_id = None label = None - + if camera_data: calibrated = bool( camera_data.get("calibration", {}).get("focus", {}).get("success") ) machine_id = camera_data.get("machine_id") label = camera_data.get("label") - + devices.append(DeviceInfo( hardware_id=hw_id, model=info.get("model", "unknown"), @@ -133,9 +141,9 @@ def list_camera_devices(): location=info.get("location"), machine_id=machine_id, label=label, - calibrated=calibrated + calibrated=calibrated, )) - + return devices except Exception as e: logger.error(f"Failed to detect cameras: {e}") @@ -151,8 +159,7 @@ def trigger_capture( """ Trigger a single image capture on the specified camera. - Requires a project to exist (use /projects/{id}/initialize first). - Captures image locally to microSD, then creates database record with metadata. + Creates or links to existing Record, then creates RecordImage with capture manifest linkage. """ try: from capture.service import single_capture_image, is_camera_connected @@ -160,6 +167,8 @@ def trigger_capture( from capture.project_manager import default_camera_config_from_registry from PIL import Image from PIL.ExifTags import TAGS + from app.models.project import Project + from app.models.record import ExifData except ImportError as e: return CaptureResponse(success=False, error=f"Capture system not available: {e}") @@ -178,7 +187,8 @@ def trigger_capture( ) camera_config = CameraConfig(**config_dict) - output_path = single_capture_image( + # Capture image and get manifest IDs + output_path, capture_id, pair_id = single_capture_image( project_name=request.project_name, camera_config=camera_config, check_camera=False, # Already checked @@ -186,15 +196,16 @@ def trigger_capture( ) # Extract image dimensions and EXIF data + from pathlib import Path + file_path = Path(output_path) + file_size = file_path.stat().st_size if file_path.exists() else 0 resolution_width = None resolution_height = None exif_dict = {} try: - from PIL import Image with Image.open(output_path) as img: resolution_width, resolution_height = img.size - # Extract EXIF data if available try: exif_data = img._getexif() @@ -207,41 +218,58 @@ def trigger_capture( except Exception as e: logger.warning(f"Could not extract image metadata: {e}") - # Get file size - from pathlib import Path - file_path = Path(output_path) - file_size = file_path.stat().st_size if file_path.exists() else 0 - - # Create database record for the captured image - from app.models.document import DocumentImage, ExifData - from app.models.camera import CameraSettings - from app.models.project import Project - # Get or find project by name project = db.query(Project).filter(Project.name == request.project_name).first() project_id = project.id if project else None - # Create document record - doc = DocumentImage( + # Get or create Record + if request.record_id: + # Link to existing record + record = db.query(Record).filter(Record.id == request.record_id).first() + if not record: + raise HTTPException(status_code=404, detail=f"Record {request.record_id} not found") + else: + # Create new record for this capture + record = Record( + title=request.record_title or f"{request.project_name} - {file_path.stem}", + description=f"Captured at {request.resolution} resolution", + object_typology="document", + project_id=project_id, + created_by=current_user.username, + ) + db.add(record) + db.flush() # Get the ID + + # Generate thumbnail alongside the captured images + thumbnail_path = None + try: + thumbnails_dir = file_path.parent.parent / "thumbnails" + thumbnail_path = generate_thumbnail(file_path, thumbnails_dir) + except Exception as e: + logger.warning(f"Failed to generate thumbnail for {file_path.name}: {e}") + + # Create RecordImage with capture linkage + img = RecordImage( + record_id=record.id, filename=file_path.name, - title=f"{request.project_name} - Camera {request.camera_index}", - description=f"Captured via API at {request.resolution} resolution", file_path=str(output_path), + thumbnail_path=thumbnail_path, file_size=file_size, format="jpg", resolution_width=resolution_width, resolution_height=resolution_height, + capture_id=capture_id, + pair_id=pair_id, + role="single", uploaded_by=current_user.username, - project_id=project_id, - object_typology="document", ) - db.add(doc) + db.add(img) db.flush() # Get the ID # Save camera settings cs = CameraSettings( - document_image_id=doc.id, + record_image_id=img.id, camera_model=camera_config.__class__.__name__, iso=None, aperture=None, @@ -253,20 +281,25 @@ def trigger_capture( # Save EXIF data if exif_dict: ex = ExifData( - document_image_id=doc.id, + record_image_id=img.id, raw_exif=str(exif_dict), ) db.add(ex) db.commit() - db.refresh(doc) + db.refresh(record) + db.refresh(img) - logger.info(f"Created database record for captured image: {doc.id}") + logger.info(f"Created record {record.id}, image {img.id}, capture_id={capture_id}") return CaptureResponse( success=True, - file_path=str(output_path) + file_path=str(output_path), + record_id=record.id, + image_ids=[img.id] ) + except HTTPException: + raise except Exception as e: logger.exception(f"Capture failed: {e}") db.rollback() @@ -283,7 +316,7 @@ def trigger_dual_capture( Trigger simultaneous capture on both cameras (index 0 and 1). Used for book scanning where left and right pages are captured together. - Both images stored locally; metadata saved to database. + Creates or links to existing Record, then creates two linked RecordImages. """ try: from capture.service import dual_capture_image, is_camera_connected @@ -291,6 +324,9 @@ def trigger_dual_capture( from capture.project_manager import default_camera_config_from_registry from PIL import Image from PIL.ExifTags import TAGS + from pathlib import Path + from app.models.project import Project + from app.models.record import ExifData except ImportError as e: return CaptureResponse(success=False, error=f"Capture system not available: {e}") @@ -310,7 +346,8 @@ def trigger_dual_capture( cam0_config = CameraConfig(**config0_dict) cam1_config = CameraConfig(**config1_dict) - path0, path1 = dual_capture_image( + # Capture both images and get manifest IDs + path0, path1, capture_id, pair_id = dual_capture_image( project_name=request.project_name, cam1_config=cam0_config, cam2_config=cam1_config, @@ -319,17 +356,30 @@ def trigger_dual_capture( stagger_ms=request.stagger_ms ) - from pathlib import Path - from app.models.document import DocumentImage, ExifData - from app.models.camera import CameraSettings - from app.models.project import Project - # Get project project = db.query(Project).filter(Project.name == request.project_name).first() project_id = project.id if project else None + # Get or create Record + if request.record_id: + # Link to existing record (adding new pages to multi-page document) + record = db.query(Record).filter(Record.id == request.record_id).first() + if not record: + raise HTTPException(status_code=404, detail=f"Record {request.record_id} not found") + else: + # Create new record for this dual capture + record = Record( + title=request.record_title or f"{request.project_name} - Dual capture", + description=f"Dual camera capture at {request.resolution} resolution", + object_typology="book", # Default to book for dual captures + project_id=project_id, + created_by=current_user.username, + ) + db.add(record) + db.flush() # Get the ID + # Helper to process captured image - def create_document_from_capture(file_path_str: str, camera_idx: int): + def create_image_record(file_path_str: str, camera_idx: int, role: str): file_path = Path(file_path_str) file_size = file_path.stat().st_size if file_path.exists() else 0 @@ -352,28 +402,38 @@ def create_document_from_capture(file_path_str: str, camera_idx: int): except Exception as e: logger.warning(f"Could not extract image metadata for {file_path}: {e}") - # Create document record - doc = DocumentImage( + # Generate thumbnail alongside the captured images + thumbnail_path = None + try: + thumbnails_dir = file_path.parent.parent / "thumbnails" + thumbnail_path = generate_thumbnail(file_path, thumbnails_dir) + except Exception as e: + logger.warning(f"Failed to generate thumbnail for {file_path.name}: {e}") + + # Create RecordImage with capture linkage + img = RecordImage( + record_id=record.id, filename=file_path.name, - title=f"{request.project_name} - Camera {camera_idx} (Dual)", - description=f"Dual capture via API at {request.resolution} resolution", file_path=str(file_path_str), + thumbnail_path=thumbnail_path, file_size=file_size, format="jpg", resolution_width=resolution_width, resolution_height=resolution_height, + capture_id=capture_id, # Both images share same capture event + pair_id=pair_id, # Both images share same pair_id + sequence=request.sequence, + role=role, uploaded_by=current_user.username, - project_id=project_id, - object_typology="document", ) - db.add(doc) + db.add(img) db.flush() # Camera settings cam_config = cam0_config if camera_idx == 0 else cam1_config cs = CameraSettings( - document_image_id=doc.id, + record_image_id=img.id, camera_model=cam_config.__class__.__name__, iso=None, aperture=None, @@ -385,25 +445,33 @@ def create_document_from_capture(file_path_str: str, camera_idx: int): # EXIF data if exif_dict: ex = ExifData( - document_image_id=doc.id, + record_image_id=img.id, raw_exif=str(exif_dict), ) db.add(ex) - return doc + return img - # Create records for both captures - doc0 = create_document_from_capture(str(path0), 0) - doc1 = create_document_from_capture(str(path1), 1) + # Create RecordImages for both captures with appropriate roles + img0 = create_image_record(str(path0), 0, "left") + img1 = create_image_record(str(path1), 1, "right") db.commit() + db.refresh(record) - logger.info(f"Created dual capture database records: {doc0.id}, {doc1.id}") + logger.info( + f"Created dual capture: record {record.id}, images [{img0.id}, {img1.id}], " + f"capture_id={capture_id}, pair_id={pair_id}" + ) return CaptureResponse( success=True, - file_paths=[str(path0), str(path1)] + file_paths=[str(path0), str(path1)], + record_id=record.id, + image_ids=[img0.id, img1.id] ) + except HTTPException: + raise except Exception as e: logger.exception(f"Dual capture failed: {e}") db.rollback() @@ -515,8 +583,8 @@ def create_camera_settings( current_user: User = Depends(get_current_user), db: Session = Depends(get_db_dependency) ): - if not db.query(DocumentImage).filter(DocumentImage.id == payload.document_image_id).first(): - raise HTTPException(status_code=404, detail="Document not found") + if not db.query(RecordImage).filter(RecordImage.id == payload.record_image_id).first(): + raise HTTPException(status_code=404, detail="Record not found") try: cs = CameraSettings(**payload.dict()) @@ -525,7 +593,7 @@ def create_camera_settings( db.refresh(cs) except IntegrityError: db.rollback() - raise HTTPException(status_code=409, detail="Camera settings already exist for this document") + raise HTTPException(status_code=409, detail="Camera settings already exist for this record") return CameraSettingsRead.model_validate(cs) diff --git a/app/api/collections.py b/app/api/collections.py new file mode 100644 index 0000000..00a3a95 --- /dev/null +++ b/app/api/collections.py @@ -0,0 +1,202 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from typing import List, Optional +from sqlalchemy.orm import Session, selectinload +from sqlalchemy import select, func +import logging + +from app.api.deps import get_db_dependency +from app.api.auth import get_current_user +from app.models.collection import Collection +from app.models.project import Project +from app.models.record import Record, RecordImage +from app.models.user import User +from app.schemas.collection import CollectionCreate, CollectionRead, CollectionUpdate, CollectionWithChildren + +router = APIRouter() +logger = logging.getLogger(__name__) + + +@router.post("/", response_model=CollectionRead, status_code=201) +def create_collection( + payload: CollectionCreate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """ + Create a new collection. + + Must specify either project_id (for top-level collection) OR parent_collection_id (for nested subcollection). + """ + # Validate that exactly one parent is specified + if payload.project_id is None and payload.parent_collection_id is None: + raise HTTPException(status_code=400, detail="Must specify either project_id or parent_collection_id") + + if payload.project_id is not None and payload.parent_collection_id is not None: + raise HTTPException(status_code=400, detail="Cannot specify both project_id and parent_collection_id") + + # Validate parent exists + if payload.project_id is not None: + project = db.query(Project).filter(Project.id == payload.project_id).first() + if not project: + raise HTTPException(status_code=404, detail=f"Project {payload.project_id} not found") + + if payload.parent_collection_id is not None: + parent = db.query(Collection).filter(Collection.id == payload.parent_collection_id).first() + if not parent: + raise HTTPException(status_code=404, detail=f"Parent collection {payload.parent_collection_id} not found") + + collection = Collection( + name=payload.name, + description=payload.description, + collection_type=payload.collection_type, + archival_metadata=payload.archival_metadata, + project_id=payload.project_id, + parent_collection_id=payload.parent_collection_id, + created_by=payload.created_by or current_user.username + ) + + db.add(collection) + db.commit() + db.refresh(collection) + return CollectionRead.model_validate(collection) + + +@router.get("/", response_model=List[CollectionRead]) +def list_collections( + project_id: Optional[int] = Query(None, description="Filter by project"), + parent_collection_id: Optional[int] = Query(None, description="Filter by parent collection (use 'null' for top-level)"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=100, ge=1, le=1000), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency), +): + """ + List collections with optional filters. + + - No filters: Returns all collections + - project_id: Returns all collections under this project (top-level only) + - parent_collection_id: Returns all subcollections of this collection + """ + query = db.query(Collection) + + if project_id is not None: + query = query.filter(Collection.project_id == project_id) + + if parent_collection_id is not None: + query = query.filter(Collection.parent_collection_id == parent_collection_id) + + items = query.offset(skip).limit(limit).all() + return [CollectionRead.model_validate(i) for i in items] + + +@router.get("/{collection_id}", response_model=CollectionRead) +def get_collection( + collection_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Get a specific collection by ID.""" + collection = db.query(Collection).filter(Collection.id == collection_id).first() + if not collection: + raise HTTPException(status_code=404, detail=f"Collection {collection_id} not found") + return CollectionRead.model_validate(collection) + + +@router.get("/{collection_id}/hierarchy", response_model=CollectionWithChildren) +def get_collection_hierarchy( + collection_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """ + Get collection with nested child collections (full hierarchy tree). + Includes record counts at each level. + """ + collection = db.query(Collection).options( + selectinload(Collection.child_collections) + ).filter(Collection.id == collection_id).first() + + if not collection: + raise HTTPException(status_code=404, detail=f"Collection {collection_id} not found") + + # Count records in this collection + record_count = db.query(func.count(RecordImage.id)).filter( + RecordImage.collection_id == collection_id + ).scalar() + + result = CollectionWithChildren.model_validate(collection) + result.record_count = record_count + return result + + +@router.patch("/{collection_id}", response_model=CollectionRead) +def update_collection( + collection_id: int, + payload: CollectionUpdate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """ + Update a collection. + + Can update name, description, type, metadata, or move to different parent collection. + """ + collection = db.query(Collection).filter(Collection.id == collection_id).first() + if not collection: + raise HTTPException(status_code=404, detail=f"Collection {collection_id} not found") + + # Validate new parent if specified + if payload.parent_collection_id is not None: + # Prevent circular references + if payload.parent_collection_id == collection_id: + raise HTTPException(status_code=400, detail="Collection cannot be its own parent") + + new_parent = db.query(Collection).filter(Collection.id == payload.parent_collection_id).first() + if not new_parent: + raise HTTPException(status_code=404, detail=f"Parent collection {payload.parent_collection_id} not found") + + # Check if new parent is a descendant of this collection (would create cycle) + current = new_parent + while current.parent_collection_id is not None: + if current.parent_collection_id == collection_id: + raise HTTPException(status_code=400, detail="Cannot create circular collection hierarchy") + current = db.query(Collection).filter(Collection.id == current.parent_collection_id).first() + if not current: + break + + # Update fields + update_data = payload.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(collection, field, value) + + db.commit() + db.refresh(collection) + return CollectionRead.model_validate(collection) + + +@router.delete("/{collection_id}", status_code=204) +def delete_collection( + collection_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """ + Delete a collection. + + Warning: This will cascade delete all child collections and orphan any records in this collection. + """ + collection = db.query(Collection).filter(Collection.id == collection_id).first() + if not collection: + raise HTTPException(status_code=404, detail=f"Collection {collection_id} not found") + + # Check if collection has records + record_count = db.query(func.count(Record.id)).filter( + Record.collection_id == collection_id + ).scalar() + + if record_count > 0: + logger.warning(f"Deleting collection {collection_id} with {record_count} records - records will be orphaned") + + db.delete(collection) + db.commit() + return None diff --git a/app/api/documents.py b/app/api/documents.py deleted file mode 100644 index 7dcbde1..0000000 --- a/app/api/documents.py +++ /dev/null @@ -1,357 +0,0 @@ -from fastapi import APIRouter, Depends, HTTPException, Query, UploadFile, File -from fastapi.responses import FileResponse -from typing import List, Optional -from sqlalchemy.orm import Session -from sqlalchemy.exc import IntegrityError -from pathlib import Path -import shutil -import hashlib -import logging - -from app.api.deps import get_db_dependency -from app.api.auth import get_current_user -from app.models.document import DocumentImage, ExifData -from app.models.camera import CameraSettings -from app.models.user import User -from app.schemas.document import DocumentCreate, DocumentRead, DocumentUpdate -from app.core.config import settings -from app.core.thumbnail import generate_thumbnail, delete_thumbnail - -router = APIRouter() -logger = logging.getLogger(__name__) - - -@router.post("/", response_model=DocumentRead) -def create_document( - doc_in: DocumentCreate, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - # create document - doc = DocumentImage( - filename=doc_in.filename, - title=doc_in.title, - description=doc_in.description, - file_path=doc_in.file_path, - file_size=doc_in.file_size, - format=doc_in.format, - resolution_width=doc_in.resolution_width, - resolution_height=doc_in.resolution_height, - uploaded_by=doc_in.uploaded_by or current_user.username, - object_typology=doc_in.object_typology, - author=doc_in.author, - material=doc_in.material, - date=doc_in.date, - custom_attributes=doc_in.custom_attributes, - ) - try: - db.add(doc) - db.commit() - db.refresh(doc) - except IntegrityError: - db.rollback() - raise HTTPException(status_code=409, detail="Document with this filename already exists") - - # optional camera settings - if doc_in.camera_settings: - cs = CameraSettings(document_image_id=doc.id, **doc_in.camera_settings.dict()) - db.add(cs) - - # optional exif - if doc_in.exif_data: - ex = ExifData(document_image_id=doc.id, **doc_in.exif_data.dict()) - db.add(ex) - - db.commit() - db.refresh(doc) - - return DocumentRead.model_validate(doc) - - -@router.get("/", response_model=List[DocumentRead]) -def list_documents( - skip: int = Query(default=0, ge=0), - limit: int = Query(default=100, ge=1, le=1000), - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - docs = db.query(DocumentImage).offset(skip).limit(limit).all() - return [DocumentRead.model_validate(d) for d in docs] - - -@router.get("/{doc_id}", response_model=DocumentRead) -def get_document(doc_id: int, db: Session = Depends(get_db_dependency)): - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - return DocumentRead.model_validate(doc) - - -@router.patch("/{doc_id}", response_model=DocumentRead) -def update_document( - doc_id: int, - payload: DocumentUpdate, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - # Update only provided fields - for field, value in payload.dict(exclude_unset=True).items(): - setattr(doc, field, value) - - db.add(doc) - db.commit() - db.refresh(doc) - return DocumentRead.model_validate(doc) - - -@router.put("/{doc_id}", response_model=DocumentRead) -def replace_document( - doc_id: int, - payload: DocumentCreate, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - # Replace all fields - doc.filename = payload.filename - doc.title = payload.title - doc.description = payload.description - doc.file_path = payload.file_path - doc.file_size = payload.file_size - doc.format = payload.format - doc.resolution_width = payload.resolution_width - doc.resolution_height = payload.resolution_height - doc.uploaded_by = payload.uploaded_by - doc.object_typology = payload.object_typology - doc.author = payload.author - doc.material = payload.material - doc.date = payload.date - doc.custom_attributes = payload.custom_attributes - - db.add(doc) - db.commit() - db.refresh(doc) - return DocumentRead.model_validate(doc) - - -@router.delete("/{doc_id}") -def delete_document( - doc_id: int, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - # Clean up thumbnail if it exists - if doc.thumbnail_path: - delete_thumbnail(doc.thumbnail_path) - - db.delete(doc) - db.commit() - return {"detail": "document deleted"} - - -def _compute_sha256(file_path: Path) -> str: - """Compute SHA256 hash of a file.""" - sha256_hash = hashlib.sha256() - with open(file_path, "rb") as f: - for chunk in iter(lambda: f.read(8192), b""): - sha256_hash.update(chunk) - return sha256_hash.hexdigest() - - -@router.post("/upload", response_model=DocumentRead) -async def upload_document( - file: UploadFile = File(...), - title: Optional[str] = None, - description: Optional[str] = None, - project_id: Optional[int] = None, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - """ - Upload a document image file. - - Creates a document record and stores the file in the uploads directory. - """ - # Validate file type - allowed_types = {"image/jpeg", "image/png", "image/tiff", "image/webp"} - if file.content_type not in allowed_types: - raise HTTPException( - status_code=400, - detail=f"Invalid file type. Allowed: {', '.join(allowed_types)}" - ) - - # Create upload directory - uploads_dir = settings.data_dir / "uploads" - uploads_dir.mkdir(parents=True, exist_ok=True) - - # Generate unique filename to avoid collisions - import uuid - ext = Path(file.filename).suffix if file.filename else ".jpg" - unique_filename = f"{uuid.uuid4().hex}{ext}" - file_path = uploads_dir / unique_filename - - # Save file - try: - with open(file_path, "wb") as buffer: - shutil.copyfileobj(file.file, buffer) - except Exception as e: - logger.exception(f"Failed to save uploaded file: {e}") - raise HTTPException(status_code=500, detail="Failed to save file") - - # Get file info - file_size = file_path.stat().st_size - file_format = ext.lstrip(".").lower() - - # Try to get image dimensions - resolution_width = None - resolution_height = None - thumbnail_path = None - try: - from PIL import Image - with Image.open(file_path) as img: - resolution_width, resolution_height = img.size - except Exception: - pass # PIL not available or invalid image - - # Generate thumbnail - try: - thumbnails_dir = settings.data_dir / "thumbnails" - thumbnail_path = generate_thumbnail(file_path, thumbnails_dir) - except Exception as e: - logger.warning(f"Failed to generate thumbnail for {file.filename}: {e}") - # Don't fail the upload if thumbnail generation fails - - # Create document record - doc = DocumentImage( - filename=file.filename or unique_filename, - title=title or file.filename, - description=description, - file_path=str(file_path), - thumbnail_path=thumbnail_path, - file_size=file_size, - format=file_format, - resolution_width=resolution_width, - resolution_height=resolution_height, - uploaded_by=current_user.username, - project_id=project_id, - ) - - try: - db.add(doc) - db.commit() - db.refresh(doc) - except IntegrityError: - db.rollback() - # Clean up file - file_path.unlink(missing_ok=True) - raise HTTPException(status_code=409, detail="Document with this filename already exists") - - return DocumentRead.model_validate(doc) - - -@router.get("/{doc_id}/file") -def download_document_file( - doc_id: int, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - """ - Download the actual image file for a document. - - Returns the file as a binary response with appropriate content type. - """ - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - if not doc.file_path: - raise HTTPException(status_code=404, detail="Document has no associated file") - - file_path = Path(doc.file_path) - if not file_path.exists(): - raise HTTPException(status_code=404, detail="File not found on disk") - - # Determine media type - media_type_map = { - "jpg": "image/jpeg", - "jpeg": "image/jpeg", - "png": "image/png", - "tiff": "image/tiff", - "tif": "image/tiff", - "webp": "image/webp", - } - ext = file_path.suffix.lstrip(".").lower() - media_type = media_type_map.get(ext, "application/octet-stream") - - return FileResponse( - path=file_path, - filename=doc.filename, - media_type=media_type - ) - - -@router.get("/{doc_id}/checksum") -def get_document_checksum( - doc_id: int, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - """ - Get the SHA256 checksum of a document's file. - - Useful for verifying file integrity. - """ - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - if not doc.file_path: - raise HTTPException(status_code=404, detail="Document has no associated file") - - file_path = Path(doc.file_path) - if not file_path.exists(): - raise HTTPException(status_code=404, detail="File not found on disk") - - checksum = _compute_sha256(file_path) - return {"document_id": doc_id, "sha256": checksum} - - -@router.get("/{doc_id}/thumbnail") -def get_document_thumbnail( - doc_id: int, - current_user: User = Depends(get_current_user), - db: Session = Depends(get_db_dependency) -): - """ - Download the thumbnail image for a document. - - Returns the thumbnail file as a JPEG image response. - """ - doc = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not doc: - raise HTTPException(status_code=404, detail="Document not found") - - if not doc.thumbnail_path: - raise HTTPException(status_code=404, detail="Document has no thumbnail") - - thumbnail_path = Path(doc.thumbnail_path) - if not thumbnail_path.exists(): - raise HTTPException(status_code=404, detail="Thumbnail file not found on disk") - - return FileResponse( - path=thumbnail_path, - filename=f"{Path(doc.filename).stem}_thumb.jpg", - media_type="image/jpeg" - ) - diff --git a/app/api/projects.py b/app/api/projects.py index 98acb94..104a144 100644 --- a/app/api/projects.py +++ b/app/api/projects.py @@ -7,7 +7,7 @@ from app.api.deps import get_db_dependency from app.api.auth import get_current_user from app.models.project import Project -from app.models.document import DocumentImage +from app.models.record import Record, RecordImage from app.models.user import User from app.schemas.project import ProjectCreate, ProjectRead, ProjectBase, ProjectUpdate @@ -65,42 +65,42 @@ def get_project( return ProjectRead.model_validate(p) -@router.post("/{project_id}/add_document/{doc_id}") -def add_document_to_project( +@router.post("/{project_id}/add_record/{rec_id}") +def add_record_to_project( project_id: int, - doc_id: int, + rec_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db_dependency) ): p = db.query(Project).filter(Project.id == project_id).first() if not p: raise HTTPException(status_code=404, detail="Project not found") - d = db.query(DocumentImage).filter(DocumentImage.id == doc_id).first() - if not d: - raise HTTPException(status_code=404, detail="Document not found") - d.project_id = p.id - db.add(d) + r = db.query(Record).filter(Record.id == rec_id).first() + if not r: + raise HTTPException(status_code=404, detail="Record not found") + r.project_id = p.id + db.add(r) db.commit() - return {"detail": "document added"} + return {"detail": "record added"} -@router.post("/{project_id}/remove_document/{doc_id}") -def remove_document_from_project( +@router.post("/{project_id}/remove_record/{rec_id}") +def remove_record_from_project( project_id: int, - doc_id: int, + rec_id: int, current_user: User = Depends(get_current_user), db: Session = Depends(get_db_dependency) ): p = db.query(Project).filter(Project.id == project_id).first() if not p: raise HTTPException(status_code=404, detail="Project not found") - d = db.query(DocumentImage).filter(DocumentImage.id == doc_id, DocumentImage.project_id == p.id).first() - if not d: - raise HTTPException(status_code=404, detail="Document not found on this project") - d.project_id = None - db.add(d) + r = db.query(Record).filter(Record.id == rec_id, Record.project_id == p.id).first() + if not r: + raise HTTPException(status_code=404, detail="Record not found on this project") + r.project_id = None + db.add(r) db.commit() - return {"detail": "document removed"} + return {"detail": "record removed"} @router.put("/{project_id}", response_model=ProjectRead) @@ -139,15 +139,15 @@ def delete_project( """ Delete a project. - Note: This only deletes the database record. Associated documents + Note: This only deletes the database record. Associated records are unlinked but not deleted. Filesystem cleanup must be done separately. """ p = db.query(Project).filter(Project.id == project_id).first() if not p: raise HTTPException(status_code=404, detail="Project not found") - # Unlink all documents from this project - db.query(DocumentImage).filter(DocumentImage.project_id == project_id).update( + # Unlink all records from this project + db.query(Record).filter(Record.project_id == project_id).update( {"project_id": None} ) @@ -196,22 +196,22 @@ def initialize_project_filesystem( return ProjectInitResponse(success=False, error=str(e)) -@router.get("/{project_id}/documents", response_model=List) -def list_project_documents( +@router.get("/{project_id}/records", response_model=List) +def list_project_records( project_id: int, skip: int = Query(default=0, ge=0), limit: int = Query(default=100, ge=1, le=1000), current_user: User = Depends(get_current_user), db: Session = Depends(get_db_dependency) ): - """List all documents associated with a project.""" + """List all records associated with a project.""" p = db.query(Project).filter(Project.id == project_id).first() if not p: raise HTTPException(status_code=404, detail="Project not found") - from app.schemas.document import DocumentRead - docs = db.query(DocumentImage).filter( - DocumentImage.project_id == project_id + from app.schemas.record import RecordRead + recs = db.query(Record).filter( + Record.project_id == project_id ).offset(skip).limit(limit).all() - return [DocumentRead.model_validate(d) for d in docs] \ No newline at end of file + return [RecordRead.model_validate(r) for r in recs] \ No newline at end of file diff --git a/app/api/records.py b/app/api/records.py new file mode 100644 index 0000000..8d358cc --- /dev/null +++ b/app/api/records.py @@ -0,0 +1,393 @@ +from fastapi import APIRouter, Depends, HTTPException, Query, UploadFile, File +from fastapi.responses import FileResponse +from typing import List, Optional +from sqlalchemy.orm import Session, joinedload +from sqlalchemy.exc import IntegrityError +from pathlib import Path +import shutil +import uuid +import logging + +from app.api.deps import get_db_dependency +from app.api.auth import get_current_user +from app.models.record import Record, RecordImage, ExifData +from app.models.camera import CameraSettings +from app.models.user import User +from app.schemas.record import ( + RecordCreate, RecordRead, RecordUpdate, + RecordImageCreate, RecordImageRead, RecordImageUpdate +) +from app.core.config import settings +from app.core.thumbnail import generate_thumbnail, delete_thumbnail + +router = APIRouter() +logger = logging.getLogger(__name__) + + +# ============================================================================== +# Record endpoints (archival documents/objects) +# ============================================================================== + +@router.post("/", response_model=RecordRead) +def create_record( + rec_in: RecordCreate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Create a new archival record (document/object like a book, map, document).""" + rec = Record( + title=rec_in.title, + description=rec_in.description, + object_typology=rec_in.object_typology, + author=rec_in.author, + material=rec_in.material, + date=rec_in.date, + custom_attributes=rec_in.custom_attributes, + project_id=rec_in.project_id, + collection_id=rec_in.collection_id, + created_by=rec_in.created_by or current_user.username, + ) + try: + db.add(rec) + db.commit() + db.refresh(rec) + except IntegrityError as e: + db.rollback() + raise HTTPException(status_code=409, detail=f"Database integrity error: {str(e)}") + + return RecordRead.model_validate(rec) + + +@router.get("/", response_model=List[RecordRead]) +def list_records( + skip: int = Query(default=0, ge=0), + limit: int = Query(default=100, ge=1, le=1000), + project_id: Optional[int] = Query(default=None, description="Filter by project ID"), + collection_id: Optional[int] = Query(default=None, description="Filter by collection ID"), + object_typology: Optional[str] = Query(default=None, description="Filter by object type"), + orphaned: Optional[bool] = Query(default=None, description="If true, return only records with no project or collection"), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """List all records with optional filtering.""" + query = db.query(Record).options(joinedload(Record.images)) + + # Apply filters if provided + if project_id is not None: + query = query.filter(Record.project_id == project_id) + if collection_id is not None: + query = query.filter(Record.collection_id == collection_id) + if object_typology is not None: + query = query.filter(Record.object_typology == object_typology) + if orphaned is True: + query = query.filter(Record.project_id == None, Record.collection_id == None) + + recs = query.offset(skip).limit(limit).all() + return [RecordRead.model_validate(r) for r in recs] + + +@router.get("/{rec_id}", response_model=RecordRead) +def get_record( + rec_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Get a specific record with all its images.""" + rec = db.query(Record).options(joinedload(Record.images)).filter(Record.id == rec_id).first() + if not rec: + raise HTTPException(status_code=404, detail="Record not found") + return RecordRead.model_validate(rec) + + +@router.patch("/{rec_id}", response_model=RecordRead) +def update_record( + rec_id: int, + payload: RecordUpdate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Update a record's descriptive metadata.""" + rec = db.query(Record).filter(Record.id == rec_id).first() + if not rec: + raise HTTPException(status_code=404, detail="Record not found") + + # Update only provided fields + for field, value in payload.model_dump(exclude_unset=True).items(): + setattr(rec, field, value) + + db.add(rec) + db.commit() + db.refresh(rec) + return RecordRead.model_validate(rec) + + +@router.delete("/{rec_id}") +def delete_record( + rec_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Delete a record and all its associated images (CASCADE).""" + rec = db.query(Record).filter(Record.id == rec_id).first() + if not rec: + raise HTTPException(status_code=404, detail="Record not found") + + # Clean up image files and thumbnails + for img in rec.images: + if img.file_path: + Path(img.file_path).unlink(missing_ok=True) + if img.thumbnail_path: + delete_thumbnail(img.thumbnail_path) + + db.delete(rec) + db.commit() + return {"detail": f"Record {rec_id} and {len(rec.images)} images deleted"} + + +# ============================================================================== +# RecordImage endpoints (individual captures/images) +# ============================================================================== + +@router.post("/{rec_id}/images", response_model=RecordImageRead) +async def add_image_to_record( + rec_id: int, + file: UploadFile = File(...), + capture_id: Optional[str] = None, + pair_id: Optional[str] = None, + sequence: Optional[int] = None, + role: Optional[str] = None, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """ + Upload and attach an image to an existing record. + This is used when adding captures to a multi-page document. + """ + # Verify record exists + rec = db.query(Record).filter(Record.id == rec_id).first() + if not rec: + raise HTTPException(status_code=404, detail="Record not found") + + # Validate file type + allowed_types = {"image/jpeg", "image/png", "image/tiff", "image/webp"} + if file.content_type not in allowed_types: + raise HTTPException( + status_code=400, + detail=f"Invalid file type. Allowed: {', '.join(allowed_types)}" + ) + + # Create upload directory + uploads_dir = settings.data_dir / "uploads" + uploads_dir.mkdir(parents=True, exist_ok=True) + + # Generate unique filename + ext = Path(file.filename).suffix if file.filename else ".jpg" + unique_filename = f"{uuid.uuid4().hex}{ext}" + file_path = uploads_dir / unique_filename + + # Save file + try: + with open(file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + except Exception as e: + logger.exception(f"Failed to save uploaded file: {e}") + raise HTTPException(status_code=500, detail="Failed to save file") + + # Get file info + file_size = file_path.stat().st_size + file_format = ext.lstrip(".").lower() + + # Try to get image dimensions + resolution_width = None + resolution_height = None + try: + from PIL import Image + with Image.open(file_path) as img: + resolution_width, resolution_height = img.size + except Exception: + pass # PIL not available or invalid image + + # Generate thumbnail + thumbnail_path = None + try: + thumbnails_dir = settings.data_dir / "thumbnails" + thumbnail_path = generate_thumbnail(file_path, thumbnails_dir) + except Exception as e: + logger.warning(f"Failed to generate thumbnail for {file.filename}: {e}") + + # Create RecordImage + img = RecordImage( + record_id=rec_id, + filename=file.filename or unique_filename, + file_path=str(file_path), + thumbnail_path=thumbnail_path, + file_size=file_size, + format=file_format, + resolution_width=resolution_width, + resolution_height=resolution_height, + capture_id=capture_id, + pair_id=pair_id, + sequence=sequence, + role=role, + uploaded_by=current_user.username, + ) + + db.add(img) + db.commit() + db.refresh(img) + + return RecordImageRead.model_validate(img) + + +@router.get("/{rec_id}/images", response_model=List[RecordImageRead]) +def list_record_images( + rec_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Get all images for a specific record, ordered by sequence.""" + # Verify record exists + rec = db.query(Record).filter(Record.id == rec_id).first() + if not rec: + raise HTTPException(status_code=404, detail="Record not found") + + images = db.query(RecordImage).filter( + RecordImage.record_id == rec_id + ).order_by(RecordImage.sequence.nullslast(), RecordImage.created_at).all() + + return [RecordImageRead.model_validate(img) for img in images] + + +@router.get("/images/{img_id}", response_model=RecordImageRead) +def get_image( + img_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Get details about a specific image.""" + img = db.query(RecordImage).filter(RecordImage.id == img_id).first() + if not img: + raise HTTPException(status_code=404, detail="Image not found") + return RecordImageRead.model_validate(img) + + +@router.patch("/images/{img_id}", response_model=RecordImageRead) +def update_image( + img_id: int, + payload: RecordImageUpdate, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Update image metadata (sequence, role, etc.).""" + img = db.query(RecordImage).filter(RecordImage.id == img_id).first() + if not img: + raise HTTPException(status_code=404, detail="Image not found") + + for field, value in payload.model_dump(exclude_unset=True).items(): + setattr(img, field, value) + + db.add(img) + db.commit() + db.refresh(img) + return RecordImageRead.model_validate(img) + + +@router.delete("/images/{img_id}") +def delete_image( + img_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Delete a specific image from a record.""" + img = db.query(RecordImage).filter(RecordImage.id == img_id).first() + if not img: + raise HTTPException(status_code=404, detail="Image not found") + + # Clean up files + if img.file_path: + Path(img.file_path).unlink(missing_ok=True) + if img.thumbnail_path: + delete_thumbnail(img.thumbnail_path) + + db.delete(img) + db.commit() + return {"detail": "Image deleted"} + + +@router.get("/images/{img_id}/file") +def download_image_file( + img_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Download the actual image file.""" + img = db.query(RecordImage).filter(RecordImage.id == img_id).first() + if not img: + raise HTTPException(status_code=404, detail="Image not found") + + if not img.file_path: + raise HTTPException(status_code=404, detail="Image has no associated file") + + file_path = Path(img.file_path) + if not file_path.exists(): + raise HTTPException(status_code=404, detail="File not found on disk") + + # Determine media type + media_type_map = { + "jpg": "image/jpeg", + "jpeg": "image/jpeg", + "png": "image/png", + "tiff": "image/tiff", + "tif": "image/tiff", + "webp": "image/webp", + } + ext = file_path.suffix.lstrip(".").lower() + media_type = media_type_map.get(ext, "application/octet-stream") + + return FileResponse( + path=file_path, + filename=img.filename, + media_type=media_type + ) + + +@router.get("/images/{img_id}/thumbnail") +def get_image_thumbnail( + img_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db_dependency) +): + """Download the thumbnail for an image. Generates it on demand if missing.""" + img = db.query(RecordImage).filter(RecordImage.id == img_id).first() + if not img: + raise HTTPException(status_code=404, detail="Image not found") + + # If thumbnail is missing or the file was deleted, try to generate it now + thumbnail_path = Path(img.thumbnail_path) if img.thumbnail_path else None + if thumbnail_path is None or not thumbnail_path.exists(): + if not img.file_path: + raise HTTPException(status_code=404, detail="Image has no source file for thumbnail generation") + source_path = Path(img.file_path) + if not source_path.exists(): + raise HTTPException(status_code=404, detail="Source image file not found on disk") + try: + # Store alongside the source: PROJECTS_ROOT/{project}/images/thumbnails/ + thumbnails_dir = source_path.parent.parent / "thumbnails" + generated = generate_thumbnail(source_path, thumbnails_dir) + if generated: + img.thumbnail_path = generated + db.add(img) + db.commit() + thumbnail_path = Path(generated) + except Exception as e: + logger.warning(f"On-demand thumbnail generation failed for image {img_id}: {e}") + raise HTTPException(status_code=500, detail="Failed to generate thumbnail") + + if thumbnail_path is None or not thumbnail_path.exists(): + raise HTTPException(status_code=404, detail="Thumbnail could not be generated") + + return FileResponse( + path=thumbnail_path, + filename=f"{Path(img.filename).stem}_thumb.jpg", + media_type="image/jpeg" + ) diff --git a/app/api/system.py b/app/api/system.py new file mode 100644 index 0000000..21e535a --- /dev/null +++ b/app/api/system.py @@ -0,0 +1,33 @@ +import re +import subprocess + +from fastapi import APIRouter, Depends + +from app.api.auth import get_current_user +from app.models.user import User + +router = APIRouter() + + +@router.get("/temperature") +def get_temperature(current_user: User = Depends(get_current_user)): + """Get Raspberry Pi CPU temperature via vcgencmd measure_temp. + + Returns temperature in Celsius, or available=False if vcgencmd is not + present (e.g. development environment without camera hardware). + """ + try: + result = subprocess.run( + ["vcgencmd", "measure_temp"], + capture_output=True, + text=True, + timeout=5, + ) + # Output format: temp=47.2'C + match = re.search(r"temp=([\d.]+)", result.stdout) + if match: + temperature = float(match.group(1)) + return {"temperature": temperature, "unit": "C", "available": True} + return {"temperature": None, "unit": "C", "available": False} + except (FileNotFoundError, subprocess.TimeoutExpired, Exception): + return {"temperature": None, "unit": "C", "available": False} diff --git a/app/core/db.py b/app/core/db.py index db819ff..c7ab7df 100644 --- a/app/core/db.py +++ b/app/core/db.py @@ -47,9 +47,10 @@ def init_db() -> None: ImportError: If any model cannot be imported (app should not start) """ # Import model modules so they register with Base - import app.models.document # noqa: F401 + import app.models.record # noqa: F401 import app.models.camera # noqa: F401 import app.models.project # noqa: F401 + import app.models.collection # noqa: F401 import app.models.user # noqa: F401 # Note: Tables are created via Alembic migrations, not create_all() diff --git a/app/core/thumbnail.py b/app/core/thumbnail.py index f447b9c..10ea308 100644 --- a/app/core/thumbnail.py +++ b/app/core/thumbnail.py @@ -1,7 +1,7 @@ """ -Thumbnail generation utilities for document images. +Thumbnail generation utilities for record images. -This module provides functions for generating and managing thumbnails of document images. +This module provides functions for generating and managing thumbnails of record images. """ import logging diff --git a/app/main.py b/app/main.py index 81d0477..c5600b3 100644 --- a/app/main.py +++ b/app/main.py @@ -6,10 +6,12 @@ from app.core.db import init_db # routers -from app.api.documents import router as documents_router +from app.api.records import router as records_router from app.api.cameras import router as cameras_router from app.api.projects import router as projects_router +from app.api.collections import router as collections_router from app.api.auth import router as auth_router +from app.api.system import router as system_router # Define lifespan event to initialize the database @asynccontextmanager @@ -17,24 +19,25 @@ async def lifespan(app: FastAPI): init_db() yield -app = FastAPI() - # Create FastAPI app with lifespan app = FastAPI(lifespan=lifespan) -# Allow the Svelte dev server to call the API +# Allow the Svelte frontend to call the API +# :5173 = Vite dev server, :3000 = production Node server app.add_middleware( CORSMiddleware, - allow_origins=["http://localhost:5173"], + allow_origins=["http://localhost:5173", "http://localhost:3000"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) -app.include_router(documents_router, prefix="/documents", tags=["documents"]) +app.include_router(records_router, prefix="/records", tags=["records"]) app.include_router(cameras_router, prefix="/cameras", tags=["cameras"]) app.include_router(projects_router, prefix="/projects", tags=["projects"]) +app.include_router(collections_router, prefix="/collections", tags=["collections"]) app.include_router(auth_router, prefix="/auth", tags=["auth"]) +app.include_router(system_router, prefix="/system", tags=["system"]) @app.get("/health") diff --git a/app/models/camera.py b/app/models/camera.py index df7f073..f23e028 100644 --- a/app/models/camera.py +++ b/app/models/camera.py @@ -10,7 +10,7 @@ class CameraSettings(Base): __tablename__ = "camera_settings" id = Column(Integer, primary_key=True, index=True) - document_image_id = Column(Integer, ForeignKey("document_images.id"), unique=True, nullable=False) + record_image_id = Column(Integer, ForeignKey("record_images.id"), unique=True, nullable=False) camera_model = Column(String(255), nullable=True) camera_manufacturer = Column(String(255), nullable=True) @@ -30,4 +30,4 @@ class CameraSettings(Base): created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) - document_image = relationship("DocumentImage", back_populates="camera_settings") + record_image = relationship("RecordImage", back_populates="camera_settings") diff --git a/app/models/collection.py b/app/models/collection.py new file mode 100644 index 0000000..2251d03 --- /dev/null +++ b/app/models/collection.py @@ -0,0 +1,41 @@ +from datetime import datetime, timezone +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, JSON, CheckConstraint +from sqlalchemy.orm import relationship + +from app.core.db import Base + + +class Collection(Base): + __tablename__ = "collections" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(255), nullable=False, index=True) + description = Column(Text, nullable=True) + + # Type of collection (e.g., "fonds", "series", "box", "folder", "volume") + collection_type = Column(String(50), nullable=True) + + # Parent relationships - either under a project OR another collection + project_id = Column(Integer, ForeignKey("projects.id", ondelete="CASCADE"), nullable=True) + parent_collection_id = Column(Integer, ForeignKey("collections.id", ondelete="CASCADE"), nullable=True) + + # Metadata for archival-specific fields (flexible JSON) + archival_metadata = Column(JSON, nullable=True) + + created_by = Column(String(255), nullable=True) + created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) + updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc)) + + # Relationships + project = relationship("Project", back_populates="collections") + parent_collection = relationship("Collection", remote_side=[id], back_populates="child_collections") + child_collections = relationship("Collection", back_populates="parent_collection", cascade="all, delete-orphan") + records = relationship("Record", back_populates="collection") + + # Constraint: must have either project_id OR parent_collection_id (but not both) + __table_args__ = ( + CheckConstraint( + '(project_id IS NOT NULL AND parent_collection_id IS NULL) OR (project_id IS NULL AND parent_collection_id IS NOT NULL)', + name='check_collection_parent' + ), + ) diff --git a/app/models/document.py b/app/models/document.py deleted file mode 100644 index d33514b..0000000 --- a/app/models/document.py +++ /dev/null @@ -1,77 +0,0 @@ -from datetime import datetime, timezone -from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey -from sqlalchemy.orm import relationship - -from app.core.db import Base - - -class DocumentImage(Base): - __tablename__ = "document_images" - - id = Column(Integer, primary_key=True, index=True) - filename = Column(String(255), unique=True, index=True, nullable=False) - title = Column(String(255), nullable=True) - description = Column(Text, nullable=True) - file_path = Column(String(512), nullable=False) - thumbnail_path = Column(String(512), nullable=True) - file_size = Column(Integer, nullable=True) - format = Column(String(50), nullable=False) - resolution_width = Column(Integer, nullable=True) - resolution_height = Column(Integer, nullable=True) - project_id = Column(Integer, ForeignKey("projects.id"), nullable=True) - # Object typology: book, dossier, document, map, planimetry, other - object_typology = Column(String(50), nullable=True) - author = Column(String(255), nullable=True) - material = Column(String(255), nullable=True) - date = Column(String(50), nullable=True) - custom_attributes = Column(Text, nullable=True) - created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) - modified_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc)) - uploaded_by = Column(String(255), nullable=True) - - camera_settings = relationship("CameraSettings", back_populates="document_image", uselist=False, cascade="all, delete-orphan") - exif_data = relationship("ExifData", back_populates="document_image", uselist=False, cascade="all, delete-orphan") - project = relationship("Project", back_populates="documents") - - -class ExifData(Base): - __tablename__ = "exif_data" - - id = Column(Integer, primary_key=True, index=True) - document_image_id = Column(Integer, ForeignKey("document_images.id"), nullable=False) - - make = Column(String(255), nullable=True) - model = Column(String(255), nullable=True) - orientation = Column(Integer, nullable=True) - x_resolution = Column(Integer, nullable=True) - y_resolution = Column(Integer, nullable=True) - resolution_unit = Column(String(50), nullable=True) - software = Column(String(255), nullable=True) - datetime_original = Column(DateTime, nullable=True) - datetime_digitized = Column(DateTime, nullable=True) - - thumbnail_data = Column(String(255), nullable=True) - - exposure_time = Column(String(50), nullable=True) - f_number = Column(String(50), nullable=True) - iso_speed_ratings = Column(Integer, nullable=True) - focal_length = Column(String(50), nullable=True) - focal_length_in_35mm = Column(Integer, nullable=True) - lens_model = Column(String(255), nullable=True) - flash = Column(String(100), nullable=True) - white_balance = Column(String(100), nullable=True) - exposure_compensation = Column(String(50), nullable=True) - metering_mode = Column(String(100), nullable=True) - light_source = Column(String(100), nullable=True) - color_space = Column(String(100), nullable=True) - - gps_latitude = Column(String(100), nullable=True) - gps_longitude = Column(String(100), nullable=True) - gps_altitude = Column(String(100), nullable=True) - gps_timestamp = Column(DateTime, nullable=True) - - raw_exif = Column(Text, nullable=True) - - created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) - - document_image = relationship("DocumentImage", back_populates="exif_data") diff --git a/app/models/project.py b/app/models/project.py index c4e18e5..d6c090d 100644 --- a/app/models/project.py +++ b/app/models/project.py @@ -14,4 +14,5 @@ class Project(Base): created_by = Column(String(255), nullable=True) created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) - documents = relationship("DocumentImage", back_populates="project") + records = relationship("Record", back_populates="project") + collections = relationship("Collection", back_populates="project", cascade="all, delete-orphan") diff --git a/app/models/record.py b/app/models/record.py new file mode 100644 index 0000000..719815f --- /dev/null +++ b/app/models/record.py @@ -0,0 +1,130 @@ +from datetime import datetime, timezone +from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey, CheckConstraint +from sqlalchemy.orm import relationship + +from app.core.db import Base + + +class Record(Base): + """ + Represents a conceptual archival document/object (book, map, document, etc.). + A Record can have multiple associated images (captures). + """ + __tablename__ = "records" + + id = Column(Integer, primary_key=True, index=True) + title = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + + # Archival/descriptive metadata + object_typology = Column(String(50), nullable=True) # book, dossier, document, map, planimetry, other + author = Column(String(255), nullable=True) + material = Column(String(255), nullable=True) + date = Column(String(50), nullable=True) + custom_attributes = Column(Text, nullable=True) # JSON string for custom fields + + # Organizational hierarchy + project_id = Column(Integer, ForeignKey("projects.id", ondelete="SET NULL"), nullable=True) + collection_id = Column(Integer, ForeignKey("collections.id", ondelete="SET NULL"), nullable=True) + + # Audit fields + created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) + modified_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc)) + created_by = Column(String(255), nullable=True) + + # Relationships + images = relationship("RecordImage", back_populates="record", cascade="all, delete-orphan") + project = relationship("Project", back_populates="records") + collection = relationship("Collection", back_populates="records") + + # Constraint: must have either project_id OR collection_id (or neither, but not both) + __table_args__ = ( + CheckConstraint( + 'NOT (project_id IS NOT NULL AND collection_id IS NOT NULL)', + name='check_record_single_parent' + ), + ) + + +class RecordImage(Base): + """ + Represents a single captured image file that belongs to a Record. + Links to the capture manifest via capture_id. + """ + __tablename__ = "record_images" + + id = Column(Integer, primary_key=True, index=True) + + # Link to parent Record + record_id = Column(Integer, ForeignKey("records.id", ondelete="CASCADE"), nullable=False, index=True) + + # Capture traceability - links to manifest.jsonl entries + capture_id = Column(String(36), nullable=True, index=True) # UUID from CaptureRecord + pair_id = Column(String(36), nullable=True, index=True) # Groups dual-camera captures + + # Ordering/sequencing within the record + sequence = Column(Integer, nullable=True) # Page number, capture order, etc. + role = Column(String(50), nullable=True) # "left", "right", "single", "overview" + + # File metadata + filename = Column(String(255), nullable=False, index=True) + file_path = Column(String(512), nullable=False) + thumbnail_path = Column(String(512), nullable=True) + file_size = Column(Integer, nullable=True) + format = Column(String(50), nullable=False) + + # Image technical properties + resolution_width = Column(Integer, nullable=True) + resolution_height = Column(Integer, nullable=True) + + # Audit fields + created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) + uploaded_by = Column(String(255), nullable=True) + + # Relationships + record = relationship("Record", back_populates="images") + camera_settings = relationship("CameraSettings", back_populates="record_image", uselist=False, cascade="all, delete-orphan") + exif_data = relationship("ExifData", back_populates="record_image", uselist=False, cascade="all, delete-orphan") + + +class ExifData(Base): + __tablename__ = "exif_data" + + id = Column(Integer, primary_key=True, index=True) + record_image_id = Column(Integer, ForeignKey("record_images.id"), nullable=False) + + make = Column(String(255), nullable=True) + model = Column(String(255), nullable=True) + orientation = Column(Integer, nullable=True) + x_resolution = Column(Integer, nullable=True) + y_resolution = Column(Integer, nullable=True) + resolution_unit = Column(String(50), nullable=True) + software = Column(String(255), nullable=True) + datetime_original = Column(DateTime, nullable=True) + datetime_digitized = Column(DateTime, nullable=True) + + thumbnail_data = Column(String(255), nullable=True) + + exposure_time = Column(String(50), nullable=True) + f_number = Column(String(50), nullable=True) + iso_speed_ratings = Column(Integer, nullable=True) + focal_length = Column(String(50), nullable=True) + focal_length_in_35mm = Column(Integer, nullable=True) + lens_model = Column(String(255), nullable=True) + flash = Column(String(100), nullable=True) + white_balance = Column(String(100), nullable=True) + exposure_compensation = Column(String(50), nullable=True) + metering_mode = Column(String(100), nullable=True) + light_source = Column(String(100), nullable=True) + color_space = Column(String(100), nullable=True) + + gps_latitude = Column(String(100), nullable=True) + gps_longitude = Column(String(100), nullable=True) + gps_altitude = Column(String(100), nullable=True) + gps_timestamp = Column(DateTime, nullable=True) + + raw_exif = Column(Text, nullable=True) + + created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False) + + record_image = relationship("RecordImage", back_populates="exif_data") diff --git a/app/schemas/camera.py b/app/schemas/camera.py index b53d50f..ceb9625 100644 --- a/app/schemas/camera.py +++ b/app/schemas/camera.py @@ -4,7 +4,7 @@ class CameraSettingsBase(BaseModel): - document_image_id: Optional[int] = None + record_image_id: Optional[int] = None camera_model: Optional[str] = None camera_manufacturer: Optional[str] = None lens_model: Optional[str] = None @@ -19,7 +19,7 @@ class CameraSettingsBase(BaseModel): class CameraSettingsCreate(CameraSettingsBase): - document_image_id: int + record_image_id: int class CameraSettingsUpdate(BaseModel): diff --git a/app/schemas/collection.py b/app/schemas/collection.py new file mode 100644 index 0000000..63fa20a --- /dev/null +++ b/app/schemas/collection.py @@ -0,0 +1,58 @@ +from typing import Optional, Dict, Any, List +from pydantic import BaseModel, Field +from datetime import datetime + + +class CollectionBase(BaseModel): + name: str = Field(..., min_length=1, max_length=255) + description: Optional[str] = None + collection_type: Optional[str] = Field(None, max_length=50, description="Type: fonds, series, box, folder, volume, etc.") + archival_metadata: Optional[Dict[str, Any]] = Field(None, description="Flexible JSON metadata for archival fields") + + +class CollectionCreate(CollectionBase): + """Create a collection. Must specify either project_id OR parent_collection_id.""" + project_id: Optional[int] = Field(None, description="Parent project (for top-level collections)") + parent_collection_id: Optional[int] = Field(None, description="Parent collection (for nested subcollections)") + created_by: Optional[str] = None + + class Config: + json_schema_extra = { + "example": { + "name": "Box 42", + "description": "Contains correspondence from 1920-1925", + "collection_type": "box", + "project_id": 1, + "archival_metadata": {"box_number": "42", "shelf_location": "A-3-2"} + } + } + + +class CollectionUpdate(BaseModel): + """Update a collection. All fields optional.""" + name: Optional[str] = Field(None, min_length=1, max_length=255) + description: Optional[str] = None + collection_type: Optional[str] = Field(None, max_length=50) + archival_metadata: Optional[Dict[str, Any]] = None + parent_collection_id: Optional[int] = Field(None, description="Move to different parent collection") + + +class CollectionRead(CollectionBase): + id: int + project_id: Optional[int] = None + parent_collection_id: Optional[int] = None + created_by: Optional[str] = None + created_at: datetime + updated_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class CollectionWithChildren(CollectionRead): + """Collection with nested child collections (for hierarchical views).""" + child_collections: List["CollectionRead"] = [] + record_count: Optional[int] = None + + class Config: + from_attributes = True diff --git a/app/schemas/document.py b/app/schemas/record.py similarity index 52% rename from app/schemas/document.py rename to app/schemas/record.py index 70a8cc8..21be920 100644 --- a/app/schemas/document.py +++ b/app/schemas/record.py @@ -1,9 +1,13 @@ from __future__ import annotations -from typing import Optional +from typing import Optional, List from pydantic import BaseModel from datetime import datetime +# ============================================================================== +# ExifData Schemas +# ============================================================================== + class ExifDataBase(BaseModel): make: Optional[str] = None model: Optional[str] = None @@ -14,6 +18,22 @@ class ExifDataBase(BaseModel): raw_exif: Optional[str] = None +class ExifDataCreate(ExifDataBase): + pass + + +class ExifDataRead(ExifDataBase): + id: int + created_at: Optional[datetime] + + class Config: + from_attributes = True + + +# ============================================================================== +# CameraSettings Schemas +# ============================================================================== + class CameraSettingsBase(BaseModel): camera_model: Optional[str] = None camera_manufacturer: Optional[str] = None @@ -31,71 +51,103 @@ class CameraSettingsCreate(CameraSettingsBase): pass -class ExifDataCreate(ExifDataBase): - pass +class CameraSettingsRead(CameraSettingsBase): + id: int + record_image_id: int + created_at: Optional[datetime] + + class Config: + from_attributes = True + +# ============================================================================== +# RecordImage Schemas (Individual capture/image) +# ============================================================================== -class DocumentBase(BaseModel): +class RecordImageBase(BaseModel): filename: str - title: Optional[str] = None - description: Optional[str] = None file_path: str thumbnail_path: Optional[str] = None file_size: Optional[int] = None format: str resolution_width: Optional[int] = None resolution_height: Optional[int] = None + capture_id: Optional[str] = None + pair_id: Optional[str] = None + sequence: Optional[int] = None + role: Optional[str] = None # "left", "right", "single", "overview" uploaded_by: Optional[str] = None - object_typology: Optional[str] = None # book, dossier, document, map, planimetry, other - author: Optional[str] = None - material: Optional[str] = None - date: Optional[str] = None - custom_attributes: Optional[str] = None # JSON string for custom fields -class DocumentCreate(DocumentBase): +class RecordImageCreate(RecordImageBase): camera_settings: Optional[CameraSettingsCreate] = None exif_data: Optional[ExifDataCreate] = None -class ExifDataRead(ExifDataBase): +class RecordImageUpdate(BaseModel): + sequence: Optional[int] = None + role: Optional[str] = None + thumbnail_path: Optional[str] = None + + +class RecordImageRead(RecordImageBase): id: int + record_id: int created_at: Optional[datetime] + camera_settings: Optional[CameraSettingsRead] = None + exif_data: Optional[ExifDataRead] = None class Config: from_attributes = True -class CameraSettingsRead(CameraSettingsBase): - id: int - document_image_id: int - created_at: Optional[datetime] +# ============================================================================== +# Record Schemas (Conceptual document/object) +# ============================================================================== - class Config: - from_attributes = True +class RecordBase(BaseModel): + title: str + description: Optional[str] = None + object_typology: Optional[str] = None # book, dossier, document, map, planimetry, other + author: Optional[str] = None + material: Optional[str] = None + date: Optional[str] = None + custom_attributes: Optional[str] = None # JSON string for custom fields -class DocumentUpdate(BaseModel): +class RecordCreate(RecordBase): + project_id: Optional[int] = None + collection_id: Optional[int] = None + created_by: Optional[str] = None + + +class RecordUpdate(BaseModel): title: Optional[str] = None description: Optional[str] = None - file_size: Optional[int] = None - resolution_width: Optional[int] = None - resolution_height: Optional[int] = None - project_id: Optional[int] = None object_typology: Optional[str] = None author: Optional[str] = None material: Optional[str] = None date: Optional[str] = None custom_attributes: Optional[str] = None + project_id: Optional[int] = None + collection_id: Optional[int] = None -class DocumentRead(DocumentBase): +class RecordRead(RecordBase): id: int project_id: Optional[int] = None + collection_id: Optional[int] = None + created_by: Optional[str] = None created_at: Optional[datetime] modified_at: Optional[datetime] - camera_settings: Optional[CameraSettingsRead] = None - exif_data: Optional[ExifDataRead] = None + images: List[RecordImageRead] = [] class Config: from_attributes = True + + +# ============================================================================== +# Legacy compatibility type alias (for gradual migration) +# ============================================================================== +# These can help during API transition +RecordWithImages = RecordRead # Explicit name for record with images loaded diff --git a/app/schemas/user.py b/app/schemas/user.py index 1fb429d..de0909c 100644 --- a/app/schemas/user.py +++ b/app/schemas/user.py @@ -18,6 +18,11 @@ def validate_email(cls, v: str) -> str: return v.lower() +class UserLogin(BaseModel): + username: str + password: str + + class UserRead(BaseModel): id: int username: str diff --git a/capture/service.py b/capture/service.py index bdd9fc1..fbe8325 100644 --- a/capture/service.py +++ b/capture/service.py @@ -157,7 +157,7 @@ def single_capture_image( project_name: str, camera_config: CameraConfig, check_camera: bool = True, - include_resolution: bool = False) -> str: + include_resolution: bool = False) -> tuple: """ Capture an image from a single camera. @@ -167,7 +167,7 @@ def single_capture_image( check_camera (bool): Whether to check camera availability before capture. include_resolution (bool): Include resolution in auto-generated filename. Returns: - str: The path to the captured image file. + tuple: (output_path, capture_id, pair_id) - path to image and manifest IDs. """ if check_camera and not is_camera_connected(camera_config.camera_index): @@ -196,10 +196,10 @@ def single_capture_image( append_manifest_record(project_root, record) subprocess_logger.info( - f"Single capture: cam{camera_config.camera_index}={elapsed_time:.3f}s" + f"Single capture: cam{camera_config.camera_index}={elapsed_time:.3f}s, capture_id={record.capture_id}" ) - return output_path + return output_path, record.capture_id, record.pair_id def dual_capture_image( project_name: str, @@ -219,12 +219,12 @@ def dual_capture_image( include_resolution (bool): Include resolution in auto-generated filenames. stagger_ms (int): Delay in ms between starting cameras (default is 20ms). Returns: - tuple: (path1, path2, timing_dict) with paths and timing metrics. + tuple: (path1, path2, capture_id, pair_id) - paths to images and manifest IDs. Example: cam1 = CameraConfig(camera_index=0, vflip=True, awb="auto") cam2 = CameraConfig(camera_index=1, hflip=True, awb="indoor") - path1, path2, timing = dual_capture_image("myproject", cam1, cam2) + path1, path2, capture_id, pair_id = dual_capture_image("myproject", cam1, cam2) """ if check_camera: @@ -296,10 +296,11 @@ def capture_with_timing(config, fname): append_manifest_record(project_root, record) subprocess_logger.info( - f"Parallel capture: cam{cam1_config.camera_index}={time1:.3f}s, cam{cam2_config.camera_index}={time2:.3f}s, stagger={stagger_ms}ms" + f"Parallel capture: cam{cam1_config.camera_index}={time1:.3f}s, cam{cam2_config.camera_index}={time2:.3f}s, " + f"stagger={stagger_ms}ms, capture_id={record.capture_id}, pair_id={record.pair_id}" ) - return img1_path, img2_path + return img1_path, img2_path, record.capture_id, record.pair_id def main(): """ diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 0000000..0088958 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,75 @@ +[workspace] +name = "digitization-toolkit-backend" +version = "0.1.0" +description = "Backend for digitization toolkit - native camera support" +channels = ["conda-forge"] +platforms = ["linux-aarch64", "linux-64"] + +[dependencies] +python = "~=3.11.0" +# Web framework +fastapi = ">=0.116.2" +uvicorn = ">=0.35.0" +python-multipart = ">=0.0.20" +starlette = ">=0.48.0" + +# Database +sqlalchemy = ">=2.0.25" +psycopg = ">=3.2.12" +alembic = ">=1.14.0" + +# Configuration & validation +pydantic = ">=2.11.9" +pydantic-settings = ">=2.7.0" +python-dotenv = ">=1.1.1" +pyyaml = ">=6.0.2" + +# Image processing +pillow = ">=11.0.0" + +# Testing +pytest = ">=9.0.2" + +# Camera support - these require system packages +# Note: picamera2 and rpicam need to be accessed via system packages +# We'll handle this in the tasks + +[tasks] +# Development server +dev = "uvicorn app.main:app --reload --host 0.0.0.0 --port 8000" + +# Production server +start = "uvicorn app.main:app --host 0.0.0.0 --port 8000" + +# Database migrations +db-upgrade = "alembic upgrade head" +db-migrate = { cmd = "alembic revision --autogenerate -m", depends-on = [] } +db-downgrade = "alembic downgrade -1" +db-history = "alembic history" + +# Testing +test = "pytest tests/" +test-verbose = "pytest tests/ -v" +test-cameras = "python test/test_cameras.py" + +# Setup system camera packages link (Raspberry Pi specific) +setup-camera-link = { cmd = """ +python -c \" +import sys +from pathlib import Path +site_packages = Path(sys.prefix) / 'lib' / f'python{sys.version_info.major}.{sys.version_info.minor}' / 'site-packages' +site_packages.mkdir(parents=True, exist_ok=True) +pth_file = site_packages / 'system-packages.pth' +pth_file.write_text('/usr/lib/python3/dist-packages\\n') +print(f'Created {pth_file}') +\" +""" } + +[feature.dev.dependencies] +# Development-only dependencies +watchfiles = ">=1.1.0" +httptools = ">=0.6.4" + +[environments] +default = { solve-group = "default" } +dev = { features = ["dev"], solve-group = "default" } diff --git a/setup_camera_backends.sh b/scripts/setup_camera_backends.sh similarity index 100% rename from setup_camera_backends.sh rename to scripts/setup_camera_backends.sh