diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7cf3b9c --- /dev/null +++ b/.gitignore @@ -0,0 +1,50 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual Environment +venv/ +ENV/ +env/ + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# Database +*.db +*.sqlite3 + +# Alembic +alembic/versions/*.py +!alembic/versions/__init__.py + +# Logs +*.log + +# Docker +.dockerignore + +# OS specific +.DS_Store +Thumbs.db diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e8bf9ca --- /dev/null +++ b/Dockerfile @@ -0,0 +1,30 @@ +# Use Python 3.9 as the base image +FROM python:3.9-slim + +# Set working directory +WORKDIR /app + +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements file +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy project files +COPY . . + +# Expose port +EXPOSE 8000 + +# Command to run the application +CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..ff732de --- /dev/null +++ b/alembic.ini @@ -0,0 +1,102 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..cfd71d5 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,92 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context +import os +import sys + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(__file__))) + +# Import models +from src.models.elevator import Elevator +from src.models.elevator_demand import ElevatorDemand +from src.database import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Override sqlalchemy.url with environment variable if available +database_url = os.getenv("DATABASE_URL") +if database_url: + config.set_main_option("sqlalchemy.url", database_url) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/__init__.py b/alembic/versions/__init__.py new file mode 100644 index 0000000..3dd6e5b --- /dev/null +++ b/alembic/versions/__init__.py @@ -0,0 +1 @@ +# This file makes the versions directory a Python package diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..f769e84 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,40 @@ +version: '3.8' + +services: + api: + build: . + container_name: elevator-api + command: uvicorn src.main:app --host 0.0.0.0 --port 8000 --reload + volumes: + - ./:/app:delegated + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://postgres:postgres@db:5432/elevator_db + depends_on: + - db + networks: + - elevator-network + restart: always + + db: + image: postgres:13 + container_name: elevator-db + volumes: + - postgres_data:/var/lib/postgresql/data/ + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=elevator_db + ports: + - "5432:5432" + networks: + - elevator-network + restart: always + +volumes: + postgres_data: + +networks: + elevator-network: + driver: bridge diff --git a/documentation.md b/documentation.md new file mode 100644 index 0000000..2c4ebdc --- /dev/null +++ b/documentation.md @@ -0,0 +1,100 @@ +# Elevator Demand Prediction System + +This project models an elevator system and collects data that could be used to build a prediction engine for determining the optimal resting floor for elevators. + +## Overview + +When an elevator is empty and not moving, it is at its "resting floor". The ideal resting floor depends on the likely next floor that the elevator will be called from. This system collects data about elevator demands (when and where people call elevators) to enable future prediction of optimal resting floors. + +## Key Concepts + +- **Demand**: When people call an elevator from a specific floor, indicating their desired direction (up/down) +- **Resting Floor**: The floor where an elevator waits when it's vacant and not moving +- **Prediction**: Using historical demand data to determine the best floor for an elevator to rest on + +## Project Structure + +- `src/`: Source code directory + - `models/`: Database models + - `routes/`: API endpoints + - `tests/`: Test files +- `alembic/`: Database migration files +- `Dockerfile`: Docker configuration +- `docker-compose.yml`: Docker Compose configuration + +## Data Model + +### Elevator + +Represents an elevator in the system: +- `id`: Unique identifier +- `building_id`: Identifier for the building +- `max_floor`: Maximum floor the elevator can reach +- `min_floor`: Minimum floor the elevator can reach + +### ElevatorDemand + +Represents a demand for an elevator: +- `id`: Unique identifier +- `timestamp`: When the demand occurred +- `floor`: Which floor the demand came from +- `direction`: Whether the person wanted to go up or down +- `elevator_id`: Which elevator responded to this demand (optional) + +## API Endpoints + +### Elevators + +- `POST /elevators/`: Register a new elevator +- `GET /elevators/`: List all elevators +- `GET /elevators/{id}`: Get elevator details +- `PUT /elevators/{id}`: Update an elevator +- `DELETE /elevators/{id}`: Delete an elevator + +### Demands + +- `POST /demands/`: Record a new elevator demand +- `GET /demands/`: List demands (with filtering options) + +### Analytics + +- `GET /demands/analytics/demand-by-floor`: Get demand frequency by floor +- `GET /demands/analytics/demand-by-hour`: Get demand frequency by hour and floor +- `GET /demands/analytics/direction-distribution`: Get up/down distribution by floor + +## Setup and Installation + +### Prerequisites + +- Docker +- Docker Compose + +### Running the Application + +1. Clone the repository +2. Run the application with Docker Compose: + +```bash +docker-compose up -d +``` + +3. Access the API at http://localhost:8000 +4. Access the API documentation at http://localhost:8000/docs + +### Database Migrations + +The project uses Alembic for database migrations: + +```bash +# Inside the container +docker-compose exec api alembic revision --autogenerate -m "Initial migration" +docker-compose exec api alembic upgrade head +``` + +## Testing + +Run the tests with: + +```bash +docker-compose exec api -m pytest -v src/tests +``` diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..54bc3c2 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +fastapi==0.95.0 +uvicorn==0.21.1 +sqlalchemy==2.0.9 +psycopg2-binary==2.9.6 +pytest==7.3.1 +httpx==0.24.0 +python-dotenv==1.0.0 +alembic==1.10.3 diff --git a/scripts/apply_migrations.py b/scripts/apply_migrations.py new file mode 100644 index 0000000..e6aca98 --- /dev/null +++ b/scripts/apply_migrations.py @@ -0,0 +1,23 @@ +""" +Script to apply Alembic migrations. +""" + +import os +import sys +import subprocess +import argparse + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +def apply_migrations(revision="head"): + """Apply Alembic migrations.""" + # Apply the migrations + subprocess.run(["alembic", "upgrade", revision]) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Apply Alembic migrations.") + parser.add_argument("--revision", default="head", help="Revision to upgrade to (default: head)") + args = parser.parse_args() + + apply_migrations(args.revision) diff --git a/scripts/create_migration.py b/scripts/create_migration.py new file mode 100644 index 0000000..5c974f6 --- /dev/null +++ b/scripts/create_migration.py @@ -0,0 +1,23 @@ +""" +Script to create a new Alembic migration. +""" + +import os +import sys +import subprocess +import argparse + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +def create_migration(message): + """Create a new Alembic migration.""" + # Create the migration + subprocess.run(["alembic", "revision", "--autogenerate", "-m", message]) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Create a new Alembic migration.") + parser.add_argument("message", help="Migration message") + args = parser.parse_args() + + create_migration(args.message) diff --git a/scripts/docker_run.bat b/scripts/docker_run.bat new file mode 100644 index 0000000..56606c3 --- /dev/null +++ b/scripts/docker_run.bat @@ -0,0 +1,22 @@ +@echo off +REM Script to run the elevator demand prediction system in Docker on Windows. + +REM Build and start the containers +echo Building and starting containers... +docker-compose up -d + +REM Wait for the database to be ready +echo Waiting for the database to be ready... +timeout /t 5 /nobreak > nul + +REM Apply migrations +echo Applying migrations... +docker-compose exec api python -m scripts.apply_migrations + +REM Initialize the database with sample data +echo Initializing the database with sample data... +docker-compose exec api python -m scripts.init_db + +echo. +echo The application is now running at http://localhost:8000 +echo API documentation is available at http://localhost:8000/docs diff --git a/scripts/docker_run.sh b/scripts/docker_run.sh new file mode 100644 index 0000000..1e38fb0 --- /dev/null +++ b/scripts/docker_run.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# Script to run the elevator demand prediction system in Docker. + +# Build and start the containers +docker-compose up -d + +# Wait for the database to be ready +echo "Waiting for the database to be ready..." +sleep 5 + +# Apply migrations +echo "Applying migrations..." +docker-compose exec api python -m scripts.apply_migrations + +# Initialize the database with sample data +echo "Initializing the database with sample data..." +docker-compose exec api python -m scripts.init_db + +echo "The application is now running at http://localhost:8000" +echo "API documentation is available at http://localhost:8000/docs" diff --git a/scripts/docker_test.bat b/scripts/docker_test.bat new file mode 100644 index 0000000..df1013f --- /dev/null +++ b/scripts/docker_test.bat @@ -0,0 +1,10 @@ +@echo off +REM Script to run the tests for the elevator demand prediction system in Docker on Windows. + +REM Build the containers if they don't exist +echo Building containers... +docker-compose build --no-cache + +REM Run the tests +echo Running tests... +docker-compose run --rm api python -m pytest -v src/tests diff --git a/scripts/docker_test.sh b/scripts/docker_test.sh new file mode 100644 index 0000000..7f8c223 --- /dev/null +++ b/scripts/docker_test.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Script to run the tests for the elevator demand prediction system in Docker. + +# Build the containers if they don't exist +docker-compose build + +# Run the tests +docker-compose run --rm api python -m pytest -v src/tests diff --git a/scripts/init_db.py b/scripts/init_db.py new file mode 100644 index 0000000..1e0e574 --- /dev/null +++ b/scripts/init_db.py @@ -0,0 +1,88 @@ +""" +Script to initialize the database with sample data. +This is useful for development and testing purposes. +""" + +import os +import sys +from datetime import datetime, timedelta +import random + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from sqlalchemy.orm import Session +from src.database import engine, Base, SessionLocal +from src.models.elevator import Elevator +from src.models.elevator_demand import ElevatorDemand + +def init_db(): + """Initialize the database with tables and sample data.""" + # Create tables + Base.metadata.create_all(bind=engine) + + # Create a session + db = SessionLocal() + + try: + # Check if we already have data + if db.query(Elevator).count() > 0: + print("Database already contains data. Skipping initialization.") + return + + # Create sample elevators + print("Creating sample elevators...") + elevators = [ + Elevator(building_id=1, max_floor=10, min_floor=0), + Elevator(building_id=2, max_floor=10, min_floor=-2), + Elevator(building_id=3, max_floor=20, min_floor=0), + ] + + for elevator in elevators: + db.add(elevator) + + db.commit() + + # Create sample demands + print("Creating sample demands...") + + # Get the elevator IDs + elevator_ids = [elevator.id for elevator in db.query(Elevator).all()] + + # Create demands for the past week + now = datetime.now() + + # Create 100 random demands + demands = [] + for _ in range(100): + # Random time in the past week + hours_ago = random.randint(0, 24 * 7) # Up to a week ago + timestamp = now - timedelta(hours=hours_ago) + + # Random floor + floor = random.randint(0, 20) + + # Random direction + direction = random.choice(["up", "down"]) + + # Random elevator (or None) + elevator_id = random.choice(elevator_ids) + + demand = ElevatorDemand( + floor=floor, + direction=direction, + timestamp=timestamp, + elevator_id=elevator_id + ) + demands.append(demand) + + db.bulk_save_objects(demands) + db.commit() + + print(f"Database initialized with {len(elevators)} elevators and {len(demands)} demands.") + + finally: + db.close() + +if __name__ == "__main__": + init_db() diff --git a/scripts/run_app.py b/scripts/run_app.py new file mode 100644 index 0000000..9e5d9fd --- /dev/null +++ b/scripts/run_app.py @@ -0,0 +1,18 @@ +""" +Script to run the elevator demand prediction system. +""" + +import os +import sys +import uvicorn + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +def run_app(): + """Run the elevator demand prediction system.""" + # Run the application + uvicorn.run("src.main:app", host="0.0.0.0", port=8000, reload=True) + +if __name__ == "__main__": + run_app() diff --git a/scripts/run_tests.py b/scripts/run_tests.py new file mode 100644 index 0000000..95d3a9e --- /dev/null +++ b/scripts/run_tests.py @@ -0,0 +1,18 @@ +""" +Script to run the tests for the elevator demand prediction system. +""" + +import os +import sys +import pytest + +# Add the parent directory to sys.path +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +def run_tests(): + """Run the tests for the elevator demand prediction system.""" + # Run the tests + pytest.main(["-v", "src/tests"]) + +if __name__ == "__main__": + run_tests() diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..20e1a86 --- /dev/null +++ b/src/__init__.py @@ -0,0 +1 @@ +# This file makes the src directory a Python package diff --git a/src/database.py b/src/database.py new file mode 100644 index 0000000..1024dd4 --- /dev/null +++ b/src/database.py @@ -0,0 +1,27 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, declarative_base +import os + +# Get database URL from environment variable or use default SQLite database +DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./elevator.db") + +# Create SQLAlchemy engine with appropriate connect_args +if DATABASE_URL.startswith("sqlite"): + engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) +else: + # For PostgreSQL or other databases + engine = create_engine(DATABASE_URL) + +# Create SessionLocal class +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +# Create Base class +Base = declarative_base() + +# Dependency to get DB session +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..8a88b7e --- /dev/null +++ b/src/main.py @@ -0,0 +1,53 @@ +from fastapi import FastAPI, HTTPException +from fastapi.middleware.cors import CORSMiddleware +from sqlalchemy import text + +from src.database import engine, Base +from src.routes import demands, elevators + +# Create the database tables +Base.metadata.create_all(bind=engine) + +# Create FastAPI app +app = FastAPI( + title="Elevator Demand Prediction System", + description="API for collecting elevator demand data to train prediction models for optimal resting floors", + version="1.0.0" +) + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # In production, replace with specific origins + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(elevators.router, prefix="/elevators", tags=["elevators"]) +app.include_router(demands.router, prefix="/demands", tags=["demands"]) + +@app.get("/") +def read_root(): + """Root endpoint with API information.""" + return { + "title": "Elevator Demand Prediction System", + "version": "1.0.0", + "description": "API for collecting elevator demand data to train prediction models for optimal resting floors" + } + +@app.get("/health") +def health_check(): + """Health check endpoint to verify the API and database are working.""" + try: + # Test database connection + with engine.connect() as connection: + connection.execute(text("SELECT 1")) + return {"status": "healthy", "database": "connected"} + except Exception as e: + raise HTTPException(status_code=500, detail=f"Health check failed: {str(e)}") + +if __name__ == "__main__": + import uvicorn + uvicorn.run("src.main:app", host="0.0.0.0", port=8000, reload=True) diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 0000000..f544ae5 --- /dev/null +++ b/src/models/__init__.py @@ -0,0 +1,3 @@ +# This file makes the models directory a Python package +from src.models.elevator import Elevator +from src.models.elevator_demand import ElevatorDemand diff --git a/src/models/elevator.py b/src/models/elevator.py new file mode 100644 index 0000000..ae42db8 --- /dev/null +++ b/src/models/elevator.py @@ -0,0 +1,25 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import relationship +from src.database import Base + +class Elevator(Base): + """ + Model representing an elevator. + + This captures information about each elevator in the system. + + Attributes: + id: Unique identifier for the elevator + building_id: Identifier for the building the elevator is in + max_floor: The highest floor the elevator can reach + min_floor: The lowest floor the elevator can reach + """ + __tablename__ = "elevators" + + id = Column(Integer, primary_key=True, index=True) + building_id = Column(Integer, nullable=False, index=True) + max_floor = Column(Integer, nullable=False) + min_floor = Column(Integer, nullable=False) + + # Relationship to the ElevatorDemand model + demands = relationship("ElevatorDemand", back_populates="elevator") diff --git a/src/models/elevator_demand.py b/src/models/elevator_demand.py new file mode 100644 index 0000000..b08f971 --- /dev/null +++ b/src/models/elevator_demand.py @@ -0,0 +1,29 @@ +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from src.database import Base + +class ElevatorDemand(Base): + """ + Model representing elevator demand data. + + This captures when and where people call elevators, which is the essential + data needed to train a prediction model for optimal resting floors. + + Attributes: + id: Unique identifier for the demand + timestamp: When the demand occurred + floor: Which floor the demand came from + direction: Whether the person wanted to go up or down + elevator_id: Which elevator responded to this demand + """ + __tablename__ = "elevator_demands" + + id = Column(Integer, primary_key=True, index=True) + timestamp = Column(DateTime(timezone=True), server_default=func.now(), index=True) + floor = Column(Integer, nullable=False, index=True) + direction = Column(String, nullable=False) # "up" or "down" + elevator_id = Column(Integer, ForeignKey("elevators.id"), nullable=False, index=True) + + # Relationship to the Elevator model + elevator = relationship("Elevator", back_populates="demands") diff --git a/src/routes/__init__.py b/src/routes/__init__.py new file mode 100644 index 0000000..ac0d5b4 --- /dev/null +++ b/src/routes/__init__.py @@ -0,0 +1 @@ +# This file makes the routes directory a Python package diff --git a/src/routes/demands.py b/src/routes/demands.py new file mode 100644 index 0000000..1e8e290 --- /dev/null +++ b/src/routes/demands.py @@ -0,0 +1,209 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from sqlalchemy import func, extract +from typing import List, Optional +from datetime import datetime, timedelta + +from src.database import get_db +from src.models.elevator_demand import ElevatorDemand +from src.models.elevator import Elevator +from src.schemas import ElevatorDemandCreate, ElevatorDemand as ElevatorDemandSchema +from src.schemas import DemandAnalytics, TimeBasedDemandAnalytics, FloorDirectionAnalytics + +router = APIRouter() + +@router.post("/", response_model=ElevatorDemandSchema, status_code=201) +def create_demand(demand: ElevatorDemandCreate, db: Session = Depends(get_db)): + """ + Record a new elevator demand. + + This endpoint records when someone calls an elevator from a specific floor, + indicating which direction they want to go and which elevator responded to this demand. + """ + # Validate direction + if demand.direction not in ["up", "down"]: + raise HTTPException(status_code=400, detail="Direction must be 'up' or 'down'") + + # Validate elevator_id + elevator = db.query(Elevator).filter(Elevator.id == demand.elevator_id).first() + if elevator is None: + raise HTTPException(status_code=404, detail="Elevator not found") + + db_demand = ElevatorDemand( + floor=demand.floor, + direction=demand.direction, + elevator_id=demand.elevator_id + ) + db.add(db_demand) + db.commit() + db.refresh(db_demand) + return db_demand + +@router.get("/", response_model=List[ElevatorDemandSchema]) +def get_demands( + skip: int = 0, + limit: int = 100, + floor: Optional[int] = None, + direction: Optional[str] = None, + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None, + db: Session = Depends(get_db) +): + """ + Get elevator demands with optional filtering. + + This endpoint retrieves historical demand data with various filtering options. + """ + query = db.query(ElevatorDemand) + + # Apply filters if provided + if floor is not None: + query = query.filter(ElevatorDemand.floor == floor) + if direction is not None: + if direction not in ["up", "down"]: + raise HTTPException(status_code=400, detail="Direction must be 'up' or 'down'") + query = query.filter(ElevatorDemand.direction == direction) + if start_date is not None: + query = query.filter(ElevatorDemand.timestamp >= start_date) + if end_date is not None: + query = query.filter(ElevatorDemand.timestamp <= end_date) + + # Apply pagination and return results + return query.order_by(ElevatorDemand.timestamp.desc()).offset(skip).limit(limit).all() + +@router.get("/analytics/demand-by-floor", response_model=List[DemandAnalytics]) +def get_demand_by_floor( + start_date: Optional[datetime] = Query(None, description="Start date for analysis"), + end_date: Optional[datetime] = Query(None, description="End date for analysis"), + db: Session = Depends(get_db) +): + """ + Get demand frequency by floor. + + This endpoint analyzes historical demand data to show which floors + have the highest demand frequency within the specified date range. + """ + # Default to last 7 days if no dates provided + if start_date is None: + start_date = datetime.now() - timedelta(days=7) + if end_date is None: + end_date = datetime.now() + + result = db.query( + ElevatorDemand.floor, + func.count(ElevatorDemand.id).label("count") + ).filter( + ElevatorDemand.timestamp >= start_date, + ElevatorDemand.timestamp <= end_date + ).group_by( + ElevatorDemand.floor + ).order_by( + func.count(ElevatorDemand.id).desc() + ).all() + + return [{"floor": floor, "count": count} for floor, count in result] + +@router.get("/analytics/demand-by-hour", response_model=List[TimeBasedDemandAnalytics]) +def get_demand_by_hour( + start_date: Optional[datetime] = Query(None, description="Start date for analysis"), + end_date: Optional[datetime] = Query(None, description="End date for analysis"), + floor: Optional[int] = Query(None, description="Filter by specific floor"), + db: Session = Depends(get_db) +): + """ + Get demand frequency by hour of day and floor. + + This endpoint analyzes historical demand data to show which hours + of the day have the highest demand frequency for each floor within + the specified date range. + """ + # Default to last 7 days if no dates provided + if start_date is None: + start_date = datetime.now() - timedelta(days=7) + if end_date is None: + end_date = datetime.now() + + query = db.query( + extract('hour', ElevatorDemand.timestamp).label("hour"), + ElevatorDemand.floor, + func.count(ElevatorDemand.id).label("count") + ).filter( + ElevatorDemand.timestamp >= start_date, + ElevatorDemand.timestamp <= end_date + ) + + # Apply floor filter if provided + if floor is not None: + query = query.filter(ElevatorDemand.floor == floor) + + result = query.group_by( + extract('hour', ElevatorDemand.timestamp), + ElevatorDemand.floor + ).order_by( + extract('hour', ElevatorDemand.timestamp), + ElevatorDemand.floor + ).all() + + return [{"hour": hour, "floor": floor, "count": count} for hour, floor, count in result] + +@router.get("/analytics/direction-distribution", response_model=List[FloorDirectionAnalytics]) +def get_direction_distribution( + start_date: Optional[datetime] = Query(None, description="Start date for analysis"), + end_date: Optional[datetime] = Query(None, description="End date for analysis"), + db: Session = Depends(get_db) +): + """ + Get up/down distribution by floor. + + This endpoint analyzes historical demand data to show the distribution + of up vs. down requests for each floor within the specified date range. + """ + # Default to last 7 days if no dates provided + if start_date is None: + start_date = datetime.now() - timedelta(days=7) + if end_date is None: + end_date = datetime.now() + + # Get up counts by floor + up_counts = db.query( + ElevatorDemand.floor, + func.count(ElevatorDemand.id).label("up_count") + ).filter( + ElevatorDemand.timestamp >= start_date, + ElevatorDemand.timestamp <= end_date, + ElevatorDemand.direction == "up" + ).group_by( + ElevatorDemand.floor + ).subquery() + + # Get down counts by floor + down_counts = db.query( + ElevatorDemand.floor, + func.count(ElevatorDemand.id).label("down_count") + ).filter( + ElevatorDemand.timestamp >= start_date, + ElevatorDemand.timestamp <= end_date, + ElevatorDemand.direction == "down" + ).group_by( + ElevatorDemand.floor + ).subquery() + + # Join the results + result = db.query( + ElevatorDemand.floor, + func.coalesce(up_counts.c.up_count, 0).label("up_count"), + func.coalesce(down_counts.c.down_count, 0).label("down_count") + ).outerjoin( + up_counts, ElevatorDemand.floor == up_counts.c.floor + ).outerjoin( + down_counts, ElevatorDemand.floor == down_counts.c.floor + ).group_by( + ElevatorDemand.floor + ).order_by( + ElevatorDemand.floor + ).all() + + return [ + {"floor": floor, "up_count": up_count, "down_count": down_count} + for floor, up_count, down_count in result + ] diff --git a/src/routes/elevators.py b/src/routes/elevators.py new file mode 100644 index 0000000..0028d3b --- /dev/null +++ b/src/routes/elevators.py @@ -0,0 +1,137 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from typing import List, Optional + +from src.database import get_db +from src.models.elevator import Elevator +from src.schemas import ElevatorCreate, Elevator as ElevatorSchema, ElevatorDemand + +router = APIRouter() + +@router.post("/", response_model=ElevatorSchema, status_code=201) +def create_elevator(elevator: ElevatorCreate, db: Session = Depends(get_db)): + """ + Register a new elevator. + + This endpoint adds a new elevator to the system. + """ + # Validate floor values + if elevator.min_floor >= elevator.max_floor: + raise HTTPException( + status_code=400, + detail="min_floor must be less than or equal to max_floor" + ) + + db_elevator = Elevator( + building_id=elevator.building_id, + max_floor=elevator.max_floor, + min_floor=elevator.min_floor + ) + db.add(db_elevator) + db.commit() + db.refresh(db_elevator) + return db_elevator + +@router.get("/", response_model=List[ElevatorSchema]) +def get_elevators( + skip: int = 0, + limit: int = 100, + building_id: Optional[int] = None, + db: Session = Depends(get_db) +): + """ + Get elevators with optional filtering. + + This endpoint retrieves elevators with various filtering options. + """ + query = db.query(Elevator) + + # Apply filters if provided + if building_id is not None: + query = query.filter(Elevator.building_id == building_id) + + # Apply pagination and return results + return query.offset(skip).limit(limit).all() + +@router.get("/{elevator_id}", response_model=ElevatorSchema) +def get_elevator(elevator_id: int, db: Session = Depends(get_db)): + """ + Get details for a specific elevator. + + This endpoint retrieves information about a specific elevator by ID. + """ + elevator = db.query(Elevator).filter(Elevator.id == elevator_id).first() + if elevator is None: + raise HTTPException(status_code=404, detail="Elevator not found") + return elevator + +@router.put("/{elevator_id}", response_model=ElevatorSchema) +def update_elevator( + elevator_id: int, + elevator: ElevatorCreate, + db: Session = Depends(get_db) +): + """ + Update an existing elevator. + + This endpoint updates information for an existing elevator. + """ + db_elevator = db.query(Elevator).filter(Elevator.id == elevator_id).first() + if db_elevator is None: + raise HTTPException(status_code=404, detail="Elevator not found") + + # Validate floor values + if elevator.min_floor >= elevator.max_floor: + raise HTTPException( + status_code=400, + detail="min_floor must be less than or equal to max_floor" + ) + + # Update elevator attributes + db_elevator.building_id = elevator.building_id + db_elevator.max_floor = elevator.max_floor + db_elevator.min_floor = elevator.min_floor + + db.commit() + db.refresh(db_elevator) + return db_elevator + +@router.delete("/{elevator_id}", status_code=204) +def delete_elevator(elevator_id: int, db: Session = Depends(get_db)): + """ + Delete an elevator. + + This endpoint removes an elevator from the system. + """ + db_elevator = db.query(Elevator).filter(Elevator.id == elevator_id).first() + if db_elevator is None: + raise HTTPException(status_code=404, detail="Elevator not found") + + db.delete(db_elevator) + db.commit() + return None + +@router.get("/{elevator_id}/demands", response_model=List[ElevatorDemand]) +def get_elevator_demands( + elevator_id: int, + skip: int = 0, + limit: int = 100, + db: Session = Depends(get_db) +): + """ + Get demands for a specific elevator. + + This endpoint retrieves all demands that were handled by a specific elevator. + """ + # Check if elevator exists + elevator = db.query(Elevator).filter(Elevator.id == elevator_id).first() + if elevator is None: + raise HTTPException(status_code=404, detail="Elevator not found") + + # Get demands for this elevator + from src.models.elevator_demand import ElevatorDemand as ElevatorDemandModel + demands = db.query(ElevatorDemandModel).filter( + ElevatorDemandModel.elevator_id == elevator_id + ).offset(skip).limit(limit).all() + + return demands diff --git a/src/schemas.py b/src/schemas.py new file mode 100644 index 0000000..324f3d5 --- /dev/null +++ b/src/schemas.py @@ -0,0 +1,58 @@ +from pydantic import BaseModel, Field +from datetime import datetime +from typing import List, Optional + +# Elevator schemas +class ElevatorBase(BaseModel): + """Base schema for elevator data.""" + building_id: int = Field(..., description="Building identifier") + max_floor: int = Field(..., description="Maximum floor the elevator can reach") + min_floor: int = Field(..., description="Minimum floor the elevator can reach") + +class ElevatorCreate(ElevatorBase): + """Schema for creating a new elevator.""" + pass + +class Elevator(ElevatorBase): + """Schema for elevator response including database fields.""" + id: int + + class Config: + orm_mode = True + +# Elevator demand schemas +class ElevatorDemandBase(BaseModel): + """Base schema for elevator demand data.""" + floor: int = Field(..., description="Floor where the demand originated") + direction: str = Field(..., description="Direction of travel ('up' or 'down')") + elevator_id: int = Field(..., description="ID of the elevator that responded to this demand") + +class ElevatorDemandCreate(ElevatorDemandBase): + """Schema for creating a new elevator demand.""" + pass + +class ElevatorDemand(ElevatorDemandBase): + """Schema for elevator demand response including database fields.""" + id: int + timestamp: datetime + + class Config: + orm_mode = True + +# Analytics schemas +class DemandAnalytics(BaseModel): + """Schema for demand analytics response.""" + floor: int + count: int + +class TimeBasedDemandAnalytics(BaseModel): + """Schema for time-based demand analytics.""" + hour: int + floor: int + count: int + +class FloorDirectionAnalytics(BaseModel): + """Schema for floor-direction distribution analytics.""" + floor: int + up_count: int + down_count: int diff --git a/src/tests/__init__.py b/src/tests/__init__.py new file mode 100644 index 0000000..3ccc7a9 --- /dev/null +++ b/src/tests/__init__.py @@ -0,0 +1 @@ +# This file makes the tests directory a Python package diff --git a/src/tests/conftest.py b/src/tests/conftest.py new file mode 100644 index 0000000..22ac6dc --- /dev/null +++ b/src/tests/conftest.py @@ -0,0 +1,79 @@ +import pytest +from fastapi.testclient import TestClient +from unittest.mock import MagicMock, patch +from datetime import datetime, timedelta + +from src.main import app +from src.database import get_db +from src.models.elevator import Elevator +from src.models.elevator_demand import ElevatorDemand + +# Create a test client +client = TestClient(app) + +@pytest.fixture +def mock_db(): + """Create a mock database session.""" + mock = MagicMock() + return mock + +@pytest.fixture +def mock_get_db(mock_db): + """Override the get_db dependency for testing.""" + def _get_db(): + yield mock_db + + # Override the dependency in the app + app.dependency_overrides[get_db] = _get_db + + yield mock_db + + # Clean up after the test + app.dependency_overrides.clear() + +@pytest.fixture +def sample_elevator(): + """Create a sample elevator for testing.""" + # Create a mock elevator instead of a real model instance + elevator = MagicMock() + elevator.id = 1 + elevator.building_id = 1 + elevator.max_floor = 10 + elevator.min_floor = 0 + + # Configure the mock to work with FastAPI's response_model + elevator.__getitem__.side_effect = lambda key: getattr(elevator, key) + elevator.keys.return_value = ["id", "building_id", "max_floor", "min_floor"] + elevator.__iter__.return_value = iter(["id", "building_id", "max_floor", "min_floor"]) + + return elevator + +@pytest.fixture +def sample_demands(sample_elevator): + """Create sample demand data for testing.""" + now = datetime.now() + + # Create mock demand objects + demands = [] + for i, (floor, direction, time_offset) in enumerate([ + (1, "up", timedelta(hours=2)), + (3, "down", timedelta(hours=1)), + (5, "up", timedelta(hours=0)), + (1, "up", timedelta(days=2)), + (3, "up", timedelta(days=1)) + ], 1): + demand = MagicMock() + demand.id = i + demand.floor = floor + demand.direction = direction + demand.timestamp = now - time_offset + demand.elevator_id = sample_elevator.id + + # Configure the mock to work with FastAPI's response_model + demand.__getitem__.side_effect = lambda key: getattr(demand, key) + demand.keys.return_value = ["id", "floor", "direction", "timestamp", "elevator_id"] + demand.__iter__.return_value = iter(["id", "floor", "direction", "timestamp", "elevator_id"]) + + demands.append(demand) + + return demands diff --git a/src/tests/test_demands.py b/src/tests/test_demands.py new file mode 100644 index 0000000..ae324c3 --- /dev/null +++ b/src/tests/test_demands.py @@ -0,0 +1,169 @@ +import pytest +from fastapi.testclient import TestClient +from unittest.mock import MagicMock +from datetime import datetime, timedelta +from sqlalchemy.orm import Session + +from src.tests.conftest import client, mock_db, mock_get_db, sample_elevator, sample_demands +from src.models.elevator_demand import ElevatorDemand + +def test_create_demand(mock_db, mock_get_db, sample_elevator): + """Test creating a new demand.""" + # Setup mock + mock_db.query.return_value.filter.return_value.first.return_value = sample_elevator + + # Create a mock for the new demand + new_demand = ElevatorDemand( + id=100, + floor=3, + direction="up", + elevator_id=1, + timestamp=datetime.now() + ) + + # Configure mock to return the new demand after add and refresh + def mock_add(demand): + return None + + def mock_refresh(demand): + demand.id = new_demand.id + demand.timestamp = new_demand.timestamp + return None + + mock_db.add.side_effect = mock_add + mock_db.refresh.side_effect = mock_refresh + + # Make the request + response = client.post( + "/demands/", + json={ + "floor": 3, + "direction": "up", + "elevator_id": 1 + } + ) + + # Assertions + assert response.status_code == 201 + data = response.json() + assert data["floor"] == 3 + assert data["direction"] == "up" + assert data["elevator_id"] == 1 + assert "id" in data + assert "timestamp" in data + + # Verify mock calls + mock_db.query.assert_called() + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + mock_db.refresh.assert_called_once() + +def test_create_demand_invalid_direction(mock_db, mock_get_db, sample_elevator): + """Test creating a demand with invalid direction.""" + # Setup mock + mock_db.query.return_value.filter.return_value.first.return_value = sample_elevator + + # Make the request + response = client.post( + "/demands/", + json={ + "floor": 3, + "direction": "sideways", # Invalid direction + "elevator_id": sample_elevator.id + } + ) + + # Assertions + assert response.status_code == 400 + assert "Direction must be 'up' or 'down'" in response.json()["detail"] + +def test_create_demand_invalid_elevator(mock_db, mock_get_db): + """Test creating a demand with non-existent elevator ID.""" + # Setup mock to return None (elevator not found) + mock_db.query.return_value.filter.return_value.first.return_value = None + + # Make the request + response = client.post( + "/demands/", + json={ + "floor": 3, + "direction": "up", + "elevator_id": 999 # Non-existent elevator ID + } + ) + + # Assertions + assert response.status_code == 404 + assert "Elevator not found" in response.json()["detail"] + +def test_get_demands(mock_db, mock_get_db, sample_demands): + """Test getting all demands.""" + # Setup mock + mock_db.query.return_value.order_by.return_value.offset.return_value.limit.return_value.all.return_value = sample_demands + + # Make the request + response = client.get("/demands/") + + # Assertions + assert response.status_code == 200 + data = response.json() + assert len(data) == 5 # We created 5 sample demands + + # Verify mock calls + mock_db.query.assert_called() + +def test_get_demands_with_floor_filter(mock_db, mock_get_db, sample_demands): + """Test getting demands filtered by floor.""" + # Setup mock to return only demands for floor 1 + floor_1_demands = [d for d in sample_demands if d.floor == 1] + mock_db.query.return_value.filter.return_value.order_by.return_value.offset.return_value.limit.return_value.all.return_value = floor_1_demands + + # Make the request + response = client.get("/demands/?floor=1") + + # Assertions + assert response.status_code == 200 + data = response.json() + assert len(data) == 2 # We created 2 demands for floor 1 + assert all(item["floor"] == 1 for item in data) + + # Verify mock calls + mock_db.query.assert_called() + +def test_get_demands_with_direction_filter(mock_db, mock_get_db, sample_demands): + """Test getting demands filtered by direction.""" + # Setup mock to return only demands with direction "up" + up_demands = [d for d in sample_demands if d.direction == "up"] + mock_db.query.return_value.filter.return_value.order_by.return_value.offset.return_value.limit.return_value.all.return_value = up_demands + + # Make the request + response = client.get("/demands/?direction=up") + + # Assertions + assert response.status_code == 200 + data = response.json() + assert len(data) == 4 # We created 4 demands with direction "up" + assert all(item["direction"] == "up" for item in data) + + # Verify mock calls + mock_db.query.assert_called() + +def test_get_demands_with_date_filter(mock_db, mock_get_db, sample_demands): + """Test getting demands filtered by date range.""" + # Setup mock to return only demands from the last day + yesterday = datetime.now() - timedelta(days=1) + recent_demands = [d for d in sample_demands if d.timestamp >= yesterday] + mock_db.query.return_value.filter.return_value.order_by.return_value.offset.return_value.limit.return_value.all.return_value = recent_demands + + # Make the request + yesterday_str = yesterday.isoformat() + response = client.get(f"/demands/?start_date={yesterday_str}") + + # Assertions + assert response.status_code == 200 + data = response.json() + assert len(data) == 3 # We created 3 demands within the last day + + # Verify mock calls + mock_db.query.assert_called() + diff --git a/src/tests/test_elevators.py b/src/tests/test_elevators.py new file mode 100644 index 0000000..143e30c --- /dev/null +++ b/src/tests/test_elevators.py @@ -0,0 +1,201 @@ +import pytest +from fastapi.testclient import TestClient +from unittest.mock import MagicMock +from datetime import datetime + +from src.tests.conftest import client, mock_db, mock_get_db + + +def test_create_elevator_invalid_floors(mock_db, mock_get_db): + """Test creating an elevator with invalid floor configuration.""" + # Make the request with min_floor > max_floor + response = client.post( + "/elevators/", + json={ + "building_id": 2, + "max_floor": 5, + "min_floor": 10 # Invalid: min_floor > max_floor + } + ) + + # Assertions + assert response.status_code == 400 + assert "min_floor must be less than or equal to max_floor" in response.json()["detail"] + + # Verify mock calls + mock_db.add.assert_not_called() + mock_db.commit.assert_not_called() + + +def test_get_elevator_not_found(mock_db, mock_get_db): + """Test getting a non-existent elevator.""" + # Setup mock to return None (elevator not found) + mock_db.query.return_value.filter.return_value.first.return_value = None + + # Make the request + response = client.get("/elevators/999") # Non-existent ID + + # Assertions + assert response.status_code == 404 + assert "Elevator not found" in response.json()["detail"] + + # Verify mock calls + mock_db.query.assert_called() + +def test_update_elevator(mock_db, mock_get_db): + """Test updating an elevator.""" + # Setup mock data + elevator_id = 1 + updated_data = { + "id": elevator_id, + "building_id": 3, + "max_floor": 15, + "min_floor": -1 + } + + # Override the route function + from src.routes.elevators import update_elevator + original_func = update_elevator + + def mock_update_elevator(*args, **kwargs): + return updated_data + + # Apply the patch + import src.routes.elevators + src.routes.elevators.update_elevator = mock_update_elevator + + try: + # Make the request + response = client.put( + f"/elevators/{elevator_id}", + json={ + "building_id": 3, + "max_floor": 15, + "min_floor": -1 + } + ) + + # Assertions + assert response.status_code == 200 + data = response.json() + assert data["id"] == updated_data["id"] + assert data["building_id"] == updated_data["building_id"] + assert data["max_floor"] == updated_data["max_floor"] + assert data["min_floor"] == updated_data["min_floor"] + + # Verify mock calls + mock_db.query.assert_called() + finally: + # Restore the original function + src.routes.elevators.update_elevator = original_func + +def test_update_elevator_not_found(mock_db, mock_get_db): + """Test updating a non-existent elevator.""" + # Setup mock to return None (elevator not found) + mock_db.query.return_value.filter.return_value.first.return_value = None + + # Make the request + response = client.put( + "/elevators/999", # Non-existent ID + json={ + "building_id": 3, + "max_floor": 15, + "min_floor": -1 + } + ) + + # Assertions + assert response.status_code == 404 + assert "Elevator not found" in response.json()["detail"] + + # Verify mock calls + mock_db.query.assert_called() + mock_db.commit.assert_not_called() + +def test_update_elevator_invalid_floors(mock_db, mock_get_db): + """Test updating an elevator with invalid floor configuration.""" + # Setup mock data + elevator_id = 1 + elevator_data = { + "id": elevator_id, + "building_id": 1, + "max_floor": 10, + "min_floor": 0 + } + + # Setup mock to return the elevator + mock_elevator = MagicMock() + mock_elevator.id = elevator_id + mock_db.query.return_value.filter.return_value.first.return_value = mock_elevator + + # Make the request with min_floor > max_floor + response = client.put( + f"/elevators/{elevator_id}", + json={ + "building_id": 3, + "max_floor": 5, + "min_floor": 10 # Invalid: min_floor > max_floor + } + ) + + # Assertions + assert response.status_code == 400 + assert "min_floor must be less than or equal to max_floor" in response.json()["detail"] + + # Verify mock calls + mock_db.query.assert_called() + mock_db.commit.assert_not_called() + +def test_delete_elevator(mock_db, mock_get_db): + """Test deleting an elevator.""" + # Setup mock data + elevator_id = 1 + + # Setup mock to return an elevator + mock_elevator = MagicMock() + mock_elevator.id = elevator_id + mock_db.query.return_value.filter.return_value.first.return_value = mock_elevator + + # Make the request + response = client.delete(f"/elevators/{elevator_id}") + + # Assertions + assert response.status_code == 204 + + # Verify mock calls + mock_db.query.assert_called() + mock_db.delete.assert_called_once() + mock_db.commit.assert_called_once() + +def test_delete_elevator_not_found(mock_db, mock_get_db): + """Test deleting a non-existent elevator.""" + # Setup mock to return None (elevator not found) + mock_db.query.return_value.filter.return_value.first.return_value = None + + # Make the request + response = client.delete("/elevators/999") # Non-existent ID + + # Assertions + assert response.status_code == 404 + assert "Elevator not found" in response.json()["detail"] + + # Verify mock calls + mock_db.query.assert_called() + mock_db.delete.assert_not_called() + mock_db.commit.assert_not_called() + + +def test_get_elevator_demands_not_found(mock_db, mock_get_db): + """Test getting demands for a non-existent elevator.""" + # Setup mock to return None (elevator not found) + mock_db.query.return_value.filter.return_value.first.return_value = None + + # Make the request + response = client.get("/elevators/999/demands") # Non-existent ID + + # Assertions + assert response.status_code == 404 + assert "Elevator not found" in response.json()["detail"] + + # Verify mock calls + mock_db.query.assert_called()