diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9e774f0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +__pycache__/ +*.pyc +*.db +.venv +build +*.egg-info \ No newline at end of file diff --git a/nextlevel-elevator/.tool-versions b/nextlevel-elevator/.tool-versions new file mode 100644 index 0000000..47cd22e --- /dev/null +++ b/nextlevel-elevator/.tool-versions @@ -0,0 +1 @@ +python 3.10.13 diff --git a/nextlevel-elevator/Dockerfile b/nextlevel-elevator/Dockerfile new file mode 100644 index 0000000..3b7de3f --- /dev/null +++ b/nextlevel-elevator/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.10-slim-bookworm + +RUN mkdir -p /app +WORKDIR /app +COPY pyproject.toml /app/pyproject.toml +RUN pip install -e ./ + +COPY ./src /app/src \ No newline at end of file diff --git a/nextlevel-elevator/__init__.py b/nextlevel-elevator/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nextlevel-elevator/docker-compose.yml b/nextlevel-elevator/docker-compose.yml new file mode 100644 index 0000000..acbf7ce --- /dev/null +++ b/nextlevel-elevator/docker-compose.yml @@ -0,0 +1,10 @@ +version: "3.9" + +services: + api: + build: . + ports: + - "1337:8000" + volumes: + - .:/app + command: uvicorn main:app --host 0.0.0.0 --port 8000 diff --git a/nextlevel-elevator/main.py b/nextlevel-elevator/main.py new file mode 100644 index 0000000..5e67aeb --- /dev/null +++ b/nextlevel-elevator/main.py @@ -0,0 +1,21 @@ +from fastapi import FastAPI +from sqlmodel import SQLModel + +from src import db +from src import api + +app = FastAPI() + + +def create_db_and_tables(): + engine = db.Engine() + SQLModel.metadata.create_all(engine) + + +@app.on_event("startup") +def on_startup(): + # It's not the Ideal but since this project is only for didatical pourpose + create_db_and_tables() + + +app.include_router(api.router) \ No newline at end of file diff --git a/nextlevel-elevator/pyproject.toml b/nextlevel-elevator/pyproject.toml new file mode 100644 index 0000000..565f0ab --- /dev/null +++ b/nextlevel-elevator/pyproject.toml @@ -0,0 +1,20 @@ +[project] +name = "nextlevel-elevator" +version = "0.1.0" +description = "A basic FastAPI project for NextLevel Elevator." +dependencies = [ + "fastapi==0.115.14", + "pydantic==2.11.7", + "sqlmodel==0.0.24", + "uvicorn==0.34.3", + "httpx==0.28.1" +] + +[project.optional-dependencies] +test = [ + "pytest<8.0.0,>=7.4.3" +] + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/nextlevel-elevator/readme.md b/nextlevel-elevator/readme.md new file mode 100644 index 0000000..771d26f --- /dev/null +++ b/nextlevel-elevator/readme.md @@ -0,0 +1,20 @@ +## Instanlling +```bash +pip install -e ./ +pip install .[test] +``` + +## Testsing + +```bash +pytest tests/ +``` + +## Running + +```bash +docker compose up -d +``` +## API Reference + +http://127.0.0.1:1337/docs \ No newline at end of file diff --git a/nextlevel-elevator/src/__init__.py b/nextlevel-elevator/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nextlevel-elevator/src/api/__init__.py b/nextlevel-elevator/src/api/__init__.py new file mode 100644 index 0000000..750152e --- /dev/null +++ b/nextlevel-elevator/src/api/__init__.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter + +from . import v1 + +router = APIRouter( + prefix="/api", + responses={404: {"description": "Not found"}}, +) + +router.include_router(v1.router) diff --git a/nextlevel-elevator/src/api/v1/__init__.py b/nextlevel-elevator/src/api/v1/__init__.py new file mode 100644 index 0000000..18868c9 --- /dev/null +++ b/nextlevel-elevator/src/api/v1/__init__.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter + +from . import elevator + +router = APIRouter( + prefix="/v1", + responses={404: {"description": "Not found"}}, +) + +router.include_router(elevator.router) \ No newline at end of file diff --git a/nextlevel-elevator/src/api/v1/elevator.py b/nextlevel-elevator/src/api/v1/elevator.py new file mode 100644 index 0000000..13e86f9 --- /dev/null +++ b/nextlevel-elevator/src/api/v1/elevator.py @@ -0,0 +1,177 @@ +import io +import csv +from src.models import Elevator, ElevatorDemand, ElevatorDemandHistory +from fastapi import APIRouter, HTTPException +from fastapi.responses import StreamingResponse + +from datetime import date, datetime +from pydantic import BaseModel + +from sqlalchemy.exc import IntegrityError +from sqlmodel import select + +from typing import List + + +from src.db import Session + + +router = APIRouter( + prefix="/elevator" +) + + +class DemandParameters(BaseModel): + level: int + + +class SteteParameters(BaseModel): + level: int + + +class DatasetParameters(BaseModel): + format: str = "csv" + +class ElevatorParameters(BaseModel): + min_level: int + max_level: int + + +@router.post("/", status_code=201) +def create_elevator(params: ElevatorParameters, session: Session) -> Elevator: + elevator = Elevator( + min_level=params.min_level, + max_level=params.max_level + ) + session.add(elevator) + try: + session.flush() + except IntegrityError: + session.rollback() + raise HTTPException(status_code=400) + finally: + session.commit() + session.refresh(elevator) + return elevator + + +@router.put("/{elevator_id}", status_code=202) +async def call(elevator_id: int, params: DemandParameters, session: Session): + """ + Provide the API to call the given elevator stores a ElevatorDemand, + and the demand is unique for the given level, if there is a Intergrity + Error it will returns 409 Conflict + """ + elevator = session.get(Elevator, elevator_id) + if not elevator: + session.rollback() + raise HTTPException(status_code=404, detail="Not found") + + if not (elevator.min_level <= params.level <= elevator.max_level): + session.rollback() + raise HTTPException(status_code=400, detail="Level overflow") + + now = datetime.now() + demand = ElevatorDemand( + elevator_id=elevator_id, + timestamp=now.timestamp(), + level=params.level + ) + session.add(demand) + try: + session.flush() + except IntegrityError: + session.rollback() + raise HTTPException(status_code=409, detail="Conflict, demaind already has been made") + finally: + session.commit() + + return "Accepted" + + +def create_history(demand: ElevatorDemand): + dt = datetime.fromtimestamp(demand.timestamp) + history = ElevatorDemandHistory( + elevator_id=demand.elevator_id, + week_day=dt.weekday(), + hour=dt.hour, + minute=dt.minute, + second=dt.second, + level=demand.level + ) + return history + + +@router.post("/{elevator_id}/state") +async def set_state(elevator_id: int, params: SteteParameters, session: Session): + """ + Provide the sufficient API to set the state, the main business logic + is whenever a elevetor reach the the level, check if there is an + open demand to that level, since demand has unique for level the + very first demand will be stored to that level it will be cleared when + the elevator reach that level. + + For that purpose we don't need to store the state itself, just reacting + """ + + elevator = session.get(Elevator, elevator_id) + if not elevator: + raise HTTPException(status_code=404, detail="Not found") + + demand_stmt = select(ElevatorDemand)\ + .where(ElevatorDemand.elevator_id == elevator.id)\ + .where(ElevatorDemand.level == params.level) + + demand = session.exec(demand_stmt).first() + if not demand: + session.rollback() + return "Noop" + + history = create_history(demand) + session.add(history) + session.flush() + session.delete(demand) + session.commit() + return "Accepted" + + +def format_dataset_csv(history: List[ElevatorDemandHistory]): + output = io.StringIO() + writer = csv.writer(output) + + writer.writerow([ + "elevator_id", + "week_day", + "hour", + "minute", + "second", + "level" + ]) + for h in history: + row = [ + h.elevator_id, + h.week_day, + h.hour, + h.minute, + h.second, + h.level, + ] + writer.writerow(row) + + output.seek(0) # Rewind to the beginning of the stream + + return StreamingResponse( + io.BytesIO(output.getvalue().encode('utf-8')), + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=dataset.csv"} + ) + +@router.get("/dataset.{format}") +async def get_dataset(format: str, session: Session): + history_stmt = select(ElevatorDemandHistory) + + history = session.exec(history_stmt) + if format == "csv": + return format_dataset_csv(history) + + raise HTTPException(status_code=400, detail="Format Not suppoted") \ No newline at end of file diff --git a/nextlevel-elevator/src/db.py b/nextlevel-elevator/src/db.py new file mode 100644 index 0000000..8f5962a --- /dev/null +++ b/nextlevel-elevator/src/db.py @@ -0,0 +1,27 @@ +import sqlmodel +from fastapi import Depends + +from typing import Annotated +from src import db + +def create_engine(): + sqlite_file_name = "database.db" + sqlite_url = f"sqlite:///{sqlite_file_name}" + return sqlmodel.create_engine(sqlite_url) + + +class Engine: + __instance__ = None + def __new__(cls): + if cls.__instance__ is None: + cls.__instance__ = create_engine() + return cls.__instance__ + + +def get_session(): + engine = Engine() + with sqlmodel.Session(engine) as session: + yield session + + +Session = Annotated[sqlmodel.Session, Depends(db.get_session)] \ No newline at end of file diff --git a/nextlevel-elevator/src/models.py b/nextlevel-elevator/src/models.py new file mode 100644 index 0000000..0c0b298 --- /dev/null +++ b/nextlevel-elevator/src/models.py @@ -0,0 +1,53 @@ +import enum +from typing import Annotated, Union + +from sqlmodel import Field, SQLModel, UniqueConstraint + +MAX_LEVEL = 10 +MIN_LEVEL = 1 + + +class WeekDay(enum.IntEnum): + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + +class Elevator(SQLModel, table=True): + id: int = Field(primary_key=True) + min_level: int = Field(), + max_level: int = Field() + + +class ElevatorDemand(SQLModel, table=True): + __table_args__ = ( + UniqueConstraint( + "elevator_id", + "level", + name="uniq_elevator_id_timestamp_level" + ), + ) + id: int = Field(primary_key=True) + elevator_id: int = Field(foreign_key="elevator.id") + timestamp: int = Field() + level: int = Field() + + +class ElevatorDemandHistory(SQLModel, table=True): + """ + Storing the demand that was completelly attended by the Elevator + and splitting the timestamp into week_day, hour, minute and second + for the given demand, so it will be more easy to group demand by any + time heuristics with second precision. + """ + id: int = Field(primary_key=True) + elevator_id: int = Field(foreign_key="elevator.id") + week_day: int = Field() + hour: int = Field() + minute: int = Field() + second: int = Field() + level: int = Field() \ No newline at end of file diff --git a/nextlevel-elevator/tests/__init__.py b/nextlevel-elevator/tests/__init__.py new file mode 100644 index 0000000..b28b04f --- /dev/null +++ b/nextlevel-elevator/tests/__init__.py @@ -0,0 +1,3 @@ + + + diff --git a/nextlevel-elevator/tests/test_api_v1_elevator.py b/nextlevel-elevator/tests/test_api_v1_elevator.py new file mode 100644 index 0000000..9ed31b2 --- /dev/null +++ b/nextlevel-elevator/tests/test_api_v1_elevator.py @@ -0,0 +1,198 @@ +import os +import pytest +from typing import List +from main import app +from src.models import Elevator, ElevatorDemand, ElevatorDemandHistory +from src import db +from src.db import Session, get_session + +from sqlmodel import select, create_engine, SQLModel + +from fastapi.testclient import TestClient + +from datetime import datetime + + +@pytest.fixture() +def session(): + engine = create_engine( + "sqlite:///testing.db", connect_args={"check_same_thread": False} + ) + SQLModel.metadata.create_all(engine) + with db.sqlmodel.Session(engine) as s: + yield s + + os.remove("testing.db") + +@pytest.fixture() +def client(session): + def get_session_override(): + return session + + app.dependency_overrides[get_session] = get_session_override + client = TestClient(app) + yield client + app.dependency_overrides.clear() + + +@pytest.fixture() +def elevator(session): + elevator = Elevator( + id=1, + min_level=1, + max_level=10 + ) + session.add(elevator) + session.commit() + return elevator + + +@pytest.fixture() +def demands(session, elevator: Elevator) -> List[ElevatorDemand]: + testdata = [ + (datetime(2001, 12, 12, 8, 1, 0), 1), + (datetime(2001, 12, 12, 8, 15, 0), 7), + (datetime(2001, 12, 12, 8, 18, 0), 9), + (datetime(2001, 12, 12, 8, 24, 0), 6), + ] + demands = [] + for date, level in testdata: + demands.append( + ElevatorDemand( + elevator_id=elevator.id, + timestamp=date.timestamp(), + level=level + ) + ) + session.add_all(demands) + session.commit() + return demands + +@pytest.fixture() +def demand_history(session, elevator: Elevator) -> List[ElevatorDemand]: + testdata = [ + (datetime(2001, 12, 12, 8, 1, 0), 1), + (datetime(2001, 12, 12, 8, 15, 0), 7), + (datetime(2001, 12, 12, 8, 18, 0), 9), + (datetime(2001, 12, 12, 8, 24, 0), 6), + ] + history = [] + for date, level in testdata: + dt = date + history.append( + ElevatorDemandHistory( + elevator_id=elevator.id, + week_day=dt.weekday(), + hour=dt.hour, + minute=dt.minute, + second=dt.second, + level=level + ) + ) + + session.add_all(history) + session.commit() + return history + +def test_create_elevator(client: TestClient): + resp = client.post(f"/api/v1/elevator", json={"min_level": 1, "max_level": 10}) + assert resp.status_code == 201 + assert resp.json() == { + 'id': 1, + "min_level": 1, + "max_level": 10 + } + +def test_call_elevator_not_found(client: TestClient, session: db.sqlmodel.Session): + resp = client.put(f"/api/v1/elevator/1", json={"level": 1}) + assert resp.status_code == 404 + demand_stmt = select(ElevatorDemand).where( + ElevatorDemand.elevator_id == 1 and + ElevatorDemand.level == 1 + ) + demands = session.exec(demand_stmt) + assert len(list(demands)) == 0 + +def test_call_elevator(client: TestClient, session: db.sqlmodel.Session, elevator: Elevator): + resp = client.put(f"/api/v1/elevator/{elevator.id}", json={"level": 1}) + assert resp.status_code == 202 + demand_stmt = select(ElevatorDemand).where( + ElevatorDemand.elevator_id == elevator.id and + ElevatorDemand.level == 1 + ) + demands = session.exec(demand_stmt) + assert len(list(demands)) == 1 + + resp = client.put(f"/api/v1/elevator/{elevator.id}", json={"level": 1}) + assert resp.status_code == 409 + +def test_call_elevator_overflow(client: TestClient, session: db.sqlmodel.Session, elevator: Elevator): + resp = client.put(f"/api/v1/elevator/{elevator.id}", json={"level": elevator.min_level-1}) + assert resp.status_code == 400 + demand_stmt = select(ElevatorDemand).where( + ElevatorDemand.elevator_id == elevator.id + ) + demands = session.exec(demand_stmt) + assert len(list(demands)) == 0 + + resp = client.put(f"/api/v1/elevator/{elevator.id}", json={"level": elevator.max_level+1}) + assert resp.status_code == 400 + demand_stmt = select(ElevatorDemand).where( + ElevatorDemand.elevator_id == elevator.id + ) + demands = session.exec(demand_stmt) + assert len(list(demands)) == 0 + +def test_set_state( + client: TestClient, + session: db.sqlmodel.Session, + elevator: Elevator, + demands: List[Elevator] + ): + resp = client.post(f"/api/v1/elevator/{elevator.id}/state", json={"level": 1}) + assert resp.status_code == 200 + + demand_stmt = select(ElevatorDemand)\ + .where(ElevatorDemand.elevator_id == elevator.id)\ + .where(ElevatorDemand.level == 1) + + demands_for_level_1 = session.exec(demand_stmt).first() + assert demands_for_level_1 is None + + history_stmt = select(ElevatorDemandHistory)\ + .where(ElevatorDemandHistory.elevator_id == elevator.id)\ + .where(ElevatorDemandHistory.level == 1) + + history = session.exec(history_stmt).first() + expected_datetime = datetime(2001, 12, 12, 8, 1, 0) + assert history.week_day == expected_datetime.weekday() + assert history.hour == expected_datetime.hour + assert history.minute == expected_datetime.minute + assert history.second == expected_datetime.second + + +def test_get_dataset_csv( + client: TestClient, + demand_history: List[ElevatorDemandHistory] +): + resp = client.get(f"/api/v1/elevator/dataset.csv") + assert resp.status_code == 200 + + assert resp.status_code == 200 + assert "Content-Disposition" in resp.headers + assert "filename=dataset.csv" in resp.headers["Content-Disposition"] + + processed_content = resp.text.splitlines() + expected_headers = "elevator_id,week_day,hour,minute,second,level" + assert processed_content[0] == expected_headers + for i, h in enumerate(demand_history): + print(processed_content) + row = [ + str(h.elevator_id), + str(h.week_day), + str(h.hour), + str(h.minute), + str(h.second), + str(h.level), + ] + assert processed_content[i+1] == ",".join(row) \ No newline at end of file