Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
2971578
chore: basic structure of the backend
martinsaieh96 Jun 22, 2025
fb99ef4
chore: add requirements, Dockerfile, docker-compose, .env.example
martinsaieh96 Jun 22, 2025
f580000
feat: update python version
martinsaieh96 Jun 22, 2025
0e6a57c
feat: update docker-compose.yml
martinsaieh96 Jun 22, 2025
46ec24f
chore: add .gitignore
martinsaieh96 Jun 22, 2025
679a2cf
feat: changes in initial setup
martinsaieh96 Jun 24, 2025
39f0297
chore: added Alembic migrations
martinsaieh96 Jun 24, 2025
f0c6830
feat: add DB models, Pydantic schemas, services logic and endpoints
martinsaieh96 Jun 24, 2025
3884568
feat: correct identation
martinsaieh96 Jun 24, 2025
62ce068
add destination_floor
martinsaieh96 Jun 24, 2025
3a32fd7
feat: change routes
martinsaieh96 Jun 24, 2025
4173e8c
feat: add floor range
martinsaieh96 Jun 24, 2025
8a761aa
feat: add unitary tests
martinsaieh96 Jun 24, 2025
00f869e
feat: add a fake data generator to train the model
martinsaieh96 Jun 24, 2025
90a0cce
feat: add weekend logic
martinsaieh96 Jun 25, 2025
47cb73a
feat: eda of fake data
martinsaieh96 Jun 25, 2025
e441971
feat: delete file
martinsaieh96 Jun 25, 2025
003a9f5
feat:edit route
martinsaieh96 Jun 25, 2025
e368064
feat: add training jupyter
martinsaieh96 Jun 25, 2025
fc2d406
feat: added joblib
martinsaieh96 Jun 25, 2025
437c45c
feat: schema for ml model
martinsaieh96 Jun 25, 2025
e32c116
add ml endpoint
martinsaieh96 Jun 25, 2025
0742828
feat: ml schema
martinsaieh96 Jun 25, 2025
3c246c4
feat: change model route
martinsaieh96 Jun 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
DATABASE_URL=postgresql://devsaieh:saiehpass@localhost:5433/devtest_db
ENV=development
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
__pycache__/
*.pyc

docker-compose.override.yml

pgdata/

alembic/versions/
14 changes: 14 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
FROM python:3.12-slim

WORKDIR /DEVTEST

RUN apt-get update && apt-get install -y build-essential

COPY requirements.txt .

RUN pip install --no-cache-dir -r requirements.txt

COPY . .
EXPOSE 8000

CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
116 changes: 116 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.

# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = postgresql://devsaieh:saiehpass@db:5432/devtest_db


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
81 changes: 81 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# IMPORTA Base desde donde defines tus modelos
from app.db.models import Base
target_metadata = Base.metadata


# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.

This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.

Calls to context.execute() here emit the given string to the
script output.

"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode.

In this scenario we need to create an Engine
and associate a connection with the context.

"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
26 changes: 26 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}


def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade() -> None:
${downgrades if downgrades else "pass"}
69 changes: 69 additions & 0 deletions app/api/v1/endpoints/routes_demand.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
"""
Endpoints para manejar las demandas (llamadas) del ascensor.

Incluye lógica de negocio que cierra automáticamente el último resting_period abierto
para el ascensor cuando se recibe una nueva demanda, y validaciones realistas de dominio.

"""

from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from app.schemas.demand import DemandCreate, DemandRead
from app.db.models import Demand, RestingPeriod
from app.db.db import get_db
from datetime import datetime, timezone

router = APIRouter()

# Defino el rango de pisos permitido.
MIN_FLOOR = 1
MAX_FLOOR = 12

@router.post("/demands/", response_model=DemandRead)
def create_demand(demand: DemandCreate, db: Session = Depends(get_db)):
"""
Registra una nueva demanda de ascensor.
- Valida que el piso esté en rango permitido.
- Cierra el último resting_period abierto (sin resting_end) para el ascensor, si existe.
"""
if demand.destination_floor < MIN_FLOOR or demand.destination_floor > MAX_FLOOR:
raise HTTPException(
status_code=400,
detail=f"El piso destino debe estar entre {MIN_FLOOR} y {MAX_FLOOR}."
)
if demand.floor < MIN_FLOOR or demand.floor > MAX_FLOOR:
raise HTTPException(
status_code=400,
detail=f"El piso debe estar entre {MIN_FLOOR} y {MAX_FLOOR}."
)

# Al registrar una demanda, cerramos automáticamente el resting actual (idle) si existe.
last_resting = db.query(RestingPeriod).filter(
RestingPeriod.elevator_id == demand.elevator_id,
RestingPeriod.resting_end.is_(None)
).order_by(RestingPeriod.resting_start.desc()).first()

if last_resting:
# Usamos el mismo timestamp de la demanda para cerrar el periodo idle.
last_resting.resting_end = demand.timestamp_called or datetime.now(timezone.utc)
db.add(last_resting)

db_demand = Demand(
elevator_id=demand.elevator_id,
floor=demand.floor,
destination_floor=demand.destination_floor,
timestamp_called=demand.timestamp_called or datetime.now(timezone.utc)
)

db.add(db_demand)
db.commit()
db.refresh(db_demand)
return db_demand

@router.get("/demands/", response_model=list[DemandRead])
def list_demands(db: Session = Depends(get_db)):
"""
Lista todas las demandas registradas.
Pensado para debug y análisis histórico.
"""
return db.query(Demand).all()
27 changes: 27 additions & 0 deletions app/api/v1/endpoints/routes_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from fastapi import APIRouter, HTTPException
from app.schemas.model_input import RestingFloorRequest
import joblib
import numpy as np
import os

router = APIRouter()

MODEL_PATH = os.path.join(os.path.dirname(__file__), "../../../ml/resting_floor_model.joblib")
model = joblib.load(MODEL_PATH)

@router.post("/predict_resting_floor/")
def predict_resting_floor(request: RestingFloorRequest):
try:
features = [
request.hour,
request.weekday,
request.demand_count,
request.avg_floor,
request.most_common_floor,
request.avg_direction,
request.peak_hours
]
pred = model.predict([features])[0]
return {"best_resting_floor": int(pred)}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
Loading