diff --git a/.env b/.env new file mode 100644 index 0000000..2e9ff3e --- /dev/null +++ b/.env @@ -0,0 +1,2 @@ +MONGO_INITDB_ROOT_USERNAME=root +MONGO_INITDB_ROOT_PASSWORD=toor \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bf96a5a --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.venv +**/__pycache__ +**/.pytest_cache \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..7119865 --- /dev/null +++ b/README.md @@ -0,0 +1,26 @@ +# DevTest Citric Sheep +Solution by: Edmar Caixeta Filho +Date: 05/July/2025 + +## Domain Modelation + +The technical challenge could be as complicated as you wanted it to be. In a first scenario, I imagined an elevator system in a commercial building: where there is usually a totem where you can enter the floor you want to go to and from that totem there is all the management of all the elevators, where the nearest one could probably be sent and if there were many users requesting it, the capacity control could be carried out, etc. This modeling could translate a real problem, but it would bring a lot of complexity and the challenge was very clear when it mentioned that there was no need for complications when modeling the problem. + +So I thought in a simpler way, there is only one elevator - so there are no different instances for elevators, I don't need to do route optimization as was necessary in the aforementioned analysis. It has 1 business rule relating to its load weight - I assumed that there is a weighing scale in the elevator for this control - and it also validates the floors - so in src.elevator.py there is a definition of maximum floor and minimum floor. The elevator only has two statuses: IDDLE and RUNNING. If the user's home floor is different from the current floor, a trip is first requested whose origin is the current floor and destination is the user's home floor, and then the actual trip is made. + +## API +### Building +I used the **Python3** programming language with the **FastAPI** framework for my solution in conjunction with the **Mongo** noSQL database. In addition, the environment is containerized in **Docker** and to run it just use the command in your terminal: +```bash +docker-compose --env-file .env up -d +``` +### API endpoints +**/call** -> The endpoint where the user creates a demand for the elevator, entered in a json file: {‘src_floor’ : int, ‘dest_floor’ : int, ‘weight’ : float}. This endpoint receives the call, validates it and, if executed, writes the change of state and its variables to the database. Returns a JSONResponse. + +**/formated_data** -> The endpoint where the user requests the data stored in the database formatted in CSV, one of the most common formats for ingesting data into ML models (as requested in the problem definition). There is no need to pass any arguments and the return is the CSV itself if the database is already populated, otherwise it will return a response warning that it is empty. + +### Tests +I have developed 6 tests for my API to ensure the quality and completeness of the returns from each endpoint according to the business rules and guard-cases. The tests can be seen in the file api/test_apy.py. To run them, use the following command in the api directory: +```bash +pytest -v +``` diff --git a/api/Dockerfile b/api/Dockerfile new file mode 100644 index 0000000..a842731 --- /dev/null +++ b/api/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.13-slim + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +EXPOSE 5000 + +COPY . . + +CMD ["fastapi", "run", "app.py", "--port", "5000"] \ No newline at end of file diff --git a/api/app.py b/api/app.py new file mode 100644 index 0000000..d10b59e --- /dev/null +++ b/api/app.py @@ -0,0 +1,64 @@ +import io +from pathlib import Path +from fastapi import FastAPI, status +from fastapi.responses import JSONResponse +from starlette.responses import StreamingResponse +from src.elevator import Elevator +from src.models import Demand +from src.mongo import read_all +from dotenv import load_dotenv +import pandas as pd + +dotenv_path = Path(__file__).resolve().parents[1] / ".env" +load_dotenv(dotenv_path) + +app = FastAPI() +elevator = Elevator() + +''' +call endpoint +''' +@app.post('/call') +def call_elevator(demand : Demand) -> JSONResponse: + result = elevator.process_demand(src_floor=demand.src_floor, + dest_floor=demand.dest_floor, + load_weight=demand.weight + ) + if 'error' in result: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={"error" : result['error']} + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content={"message" : result['message']} + ) + +''' +formated_data endpoint +''' + +@app.get('/formated_data') +def get_data() -> StreamingResponse: + docs = read_all() + + if not docs: + return StreamingResponse( + iter(['No data available']), + media_type='text/plain' + ) + + df = pd.DataFrame(docs) + df['event_timestamp'] = df['start_timestamp'].combine_first(df['timestamp']) + df.drop(columns=['start_timestamp', 'timestamp'], inplace=True) + df.fillna("", inplace=True) + stream = io.StringIO() + df.to_csv(stream, index=False) + csv_data = stream.getvalue() + + return StreamingResponse( + iter([csv_data]), + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=elevator_data.csv"} + ) \ No newline at end of file diff --git a/api/requirements.txt b/api/requirements.txt new file mode 100644 index 0000000..db89eaa --- /dev/null +++ b/api/requirements.txt @@ -0,0 +1,48 @@ +annotated-types==0.7.0 +anyio==4.9.0 +certifi==2025.6.15 +click==8.2.1 +dnspython==2.7.0 +dotenv==0.9.9 +email_validator==2.2.0 +fastapi==0.115.14 +fastapi-cli==0.0.7 +h11==0.16.0 +httpcore==1.0.9 +httptools==0.6.4 +httpx==0.28.1 +idna==3.10 +iniconfig==2.1.0 +Jinja2==3.1.6 +loadenv==0.1.1 +markdown-it-py==3.0.0 +MarkupSafe==3.0.2 +mdurl==0.1.2 +numpy==2.3.1 +packaging==25.0 +pandas==2.3.0 +pluggy==1.6.0 +pydantic==2.11.7 +pydantic_core==2.33.2 +Pygments==2.19.2 +pymongo==4.13.2 +pytest==8.4.1 +python-dateutil==2.9.0.post0 +python-dotenv==1.1.1 +python-multipart==0.0.20 +pytz==2025.2 +PyYAML==6.0.2 +rich==14.0.0 +rich-toolkit==0.14.8 +shellingham==1.5.4 +six==1.17.0 +sniffio==1.3.1 +starlette==0.46.2 +typer==0.16.0 +typing-inspection==0.4.1 +typing_extensions==4.14.0 +tzdata==2025.2 +uvicorn==0.35.0 +uvloop==0.21.0 +watchfiles==1.1.0 +websockets==15.0.1 diff --git a/api/src/elevator.py b/api/src/elevator.py new file mode 100644 index 0000000..e94eae8 --- /dev/null +++ b/api/src/elevator.py @@ -0,0 +1,66 @@ +from datetime import datetime +from .enums import Status +from .mongo import create_log + + +MAX_FLOOR = 10 +MIN_FLOOR = -1 +MAX_WEIGHT = 900.0 # kilograms + +class Elevator: + def __init__(self) -> None: + self.current_floor = 0 + self.weight = 0.0 + + def is_floor_valid(self, floor : int) -> bool: + if floor < MIN_FLOOR: + return False + if floor > MAX_FLOOR: + return False + return True + + def process_demand(self, + src_floor : int, + dest_floor : int, + load_weight : float + ) -> dict: + ''' + Guard Cases + ''' + if not self.is_floor_valid(src_floor) or not self.is_floor_valid(dest_floor): + return {"error" : "Invalid Floor"} + + if src_floor == dest_floor: + return {"error" : "Source Floor equals Destination Floor"} + + if load_weight > MAX_WEIGHT: + return {"error" : "Overweight load"} + rest_floor = self.current_floor + + + if not self.current_floor == src_floor: + _ = self.process_demand(self.current_floor, src_floor, load_weight=0) + + # Operating + log = { + 'status' : Status.RUNNING.value, + 'src_floor' : src_floor, + 'dest_floor' : dest_floor, + 'rest_floor' : rest_floor, + 'start_timestamp' : datetime.now(), + 'weight' : load_weight + } + create_log(log) + + # Done + self.current_floor = dest_floor + + finish_log = { + 'status' : Status.IDDLE.value, + 'rest_floor' : self.current_floor, + 'timestamp' : datetime.now() + } + create_log(finish_log) + + return {"message" : "OK"} + diff --git a/api/src/enums.py b/api/src/enums.py new file mode 100644 index 0000000..77b9c55 --- /dev/null +++ b/api/src/enums.py @@ -0,0 +1,5 @@ +from enum import Enum + +class Status(Enum): + IDDLE = 'IDDLE' + RUNNING = 'RUNNING' \ No newline at end of file diff --git a/api/src/models.py b/api/src/models.py new file mode 100644 index 0000000..ed28e6b --- /dev/null +++ b/api/src/models.py @@ -0,0 +1,6 @@ +from pydantic import BaseModel + +class Demand(BaseModel): + src_floor : int + dest_floor : int + weight : float \ No newline at end of file diff --git a/api/src/mongo.py b/api/src/mongo.py new file mode 100644 index 0000000..3d63d43 --- /dev/null +++ b/api/src/mongo.py @@ -0,0 +1,32 @@ +import pymongo +from pymongo.collection import Collection +import uuid +import os + +import pymongo.collection + +MONGO_HOST = os.getenv('MONGO_HOST') +MONGO_PORT = os.getenv('MONGO_PORT') +MONGO_USER = os.getenv('MONGO_INITDB_ROOT_USERNAME') +MONGO_PASSWORD = os.getenv('MONGO_INITDB_ROOT_PASSWORD') +MONGO_URI = f'mongodb://{MONGO_USER}:{MONGO_PASSWORD}@mongo:27017' + +client = pymongo.MongoClient(MONGO_URI) +collection : Collection = client['citric-sheep']['elevator-logs'] + +# CRUD +def create_log(log : dict): + log['trip_id'] = str(uuid.uuid1()) + collection.insert_one(log) + +def read_log(trip_id : str): + doc = collection.find_one({'trip_id' : trip_id}) + return doc + +def delete_log(trip_id : str): + doc = collection.find_one_and_delete({'trip_id' : trip_id}) + return doc + +def read_all(): + docs = collection.find({}, {'_id' : 0}) + return list(docs) \ No newline at end of file diff --git a/api/test_api.py b/api/test_api.py new file mode 100644 index 0000000..49e380c --- /dev/null +++ b/api/test_api.py @@ -0,0 +1,66 @@ +from fastapi.testclient import TestClient +from src.models import Demand +from unittest.mock import patch, MagicMock + +from app import app + +client = TestClient(app) + +@patch('src.mongo.collection') +def test_call_elevator_success(mock_collection : MagicMock): + ''' + Happy Request :) + ''' + mock_collection.insert_one.return_value = MagicMock(inserted_id="mock_id_123") + request_payload = {'src_floor' : -1, 'dest_floor' : 8, 'weight' : 75} + response = client.post('/call', json=request_payload) + assert response.status_code == 200 + assert response.json() == {'message' : 'OK'} + +def test_call_elevator_invalid_floor(): + ''' + Business Logic Reproval + ''' + request_payload = {"src_floor": 1, "dest_floor": 99, "weight": 75} + response = client.post("/call", json=request_payload) + + assert response.status_code == 400 + assert "error" in response.json() + +def test_call_elevator_invalid_floors(): + ''' + Both src and dst floor are invalid and equal + ''' + request_payload = {"src_floor": -99, "dest_floor": -99, "weight": 75} + response = client.post("/call", json=request_payload) + + assert response.status_code == 400 + assert "error" in response.json() + +def test_same_floors(): + ''' + Same valid floors + ''' + request_payload = {"src_floor": 10, "dest_floor": 10, "weight": 75} + response = client.post("/call", json=request_payload) + + assert response.status_code == 400 + assert "error" in response.json() + +def test_call_overweight(): + ''' + Weight Logic Reproval + ''' + request_payload = {"src_floor": 0, "dest_floor": 10, "weight": 900.1} + response = client.post("/call", json=request_payload) + + assert response.status_code == 400 + assert "error" in response.json() + +@patch('src.mongo.collection') +def test_get_formated_data_when_empty(mock_collection: MagicMock): + mock_collection.find.return_value = [] + response = client.get("/formated_data") + + assert response.status_code == 200 + assert response.headers['content-type'] == 'text/plain; charset=utf-8' \ No newline at end of file diff --git a/chatgpt/app_tests.py b/chatgpt/app_tests.py deleted file mode 100644 index 258a8a6..0000000 --- a/chatgpt/app_tests.py +++ /dev/null @@ -1,10 +0,0 @@ -def test_create_demand(client): - response = client.post('/demand', json={'floor': 3}) - assert response.status_code == 201 - assert response.get_json() == {'message': 'Demand created'} - - -def test_create_state(client): - response = client.post('/state', json={'floor': 5, 'vacant': True}) - assert response.status_code == 201 - assert response.get_json() == {'message': 'State created'} diff --git a/chatgpt/db.sql b/chatgpt/db.sql deleted file mode 100644 index 1555ffe..0000000 --- a/chatgpt/db.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE elevator_demands ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, - floor INTEGER -); - -CREATE TABLE elevator_states ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, - floor INTEGER, - vacant BOOLEAN -); diff --git a/chatgpt/main.py b/chatgpt/main.py deleted file mode 100644 index 7f97d98..0000000 --- a/chatgpt/main.py +++ /dev/null @@ -1,43 +0,0 @@ -from flask import Flask, request, jsonify -from flask_sqlalchemy import SQLAlchemy -from datetime import datetime - -app = Flask(__name__) -app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///elevator.db' -db = SQLAlchemy(app) - - -class ElevatorDemand(db.Model): - id = db.Column(db.Integer, primary_key=True) - timestamp = db.Column(db.DateTime, default=datetime.utcnow) - floor = db.Column(db.Integer, nullable=False) - - -class ElevatorState(db.Model): - id = db.Column(db.Integer, primary_key=True) - timestamp = db.Column(db.DateTime, default=datetime.utcnow) - floor = db.Column(db.Integer, nullable=False) - vacant = db.Column(db.Boolean, nullable=False) - - -@app.route('/demand', methods=['POST']) -def create_demand(): - data = request.get_json() - new_demand = ElevatorDemand(floor=data['floor']) - db.session.add(new_demand) - db.session.commit() - return jsonify({'message': 'Demand created'}), 201 - - -@app.route('/state', methods=['POST']) -def create_state(): - data = request.get_json() - new_state = ElevatorState(floor=data['floor'], vacant=data['vacant']) - db.session.add(new_state) - db.session.commit() - return jsonify({'message': 'State created'}), 201 - - -if __name__ == '__main__': - db.create_all() - app.run(debug=True) diff --git a/chatgpt/requirements.txt b/chatgpt/requirements.txt deleted file mode 100644 index 14d1bb0..0000000 --- a/chatgpt/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -Flask==2.0.2 -Flask-SQLAlchemy==2.5.1 -pytest==6.2.5 -pytest-flask==1.2.0 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..ef3a075 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,25 @@ +services: + api: + build: + context: ./api + dockerfile: Dockerfile + container_name: api + ports: + - "5000:5000" + env_file: + - .env + networks: + - backend + mongo: + image: mongo:latest + container_name: mongo + ports: + - "27017:27017" + env_file: + - .env + networks: + - backend + +networks: + backend: + driver: bridge \ No newline at end of file diff --git a/readme.md b/readme.md deleted file mode 100644 index ea5e444..0000000 --- a/readme.md +++ /dev/null @@ -1,58 +0,0 @@ -# Dev Test - -## Elevators -When an elevator is empty and not moving this is known as it's resting floor. -The ideal resting floor to be positioned on depends on the likely next floor that the elevator will be called from. - -We can build a prediction engine to predict the likely next floor based on historical demand, if we have the data. - -The goal of this project is to model an elevator and save the data that could later be used to build a prediction engine for which floor is the best resting floor at any time -- When people call an elevator this is considered a demand -- When the elevator is vacant and not moving between floors, the current floor is considered its resting floor -- When the elevator is vacant, it can stay at the current position or move to a different floor -- The prediction model will determine what is the best floor to rest on - - -_The requirement isn't to complete this system but to start building a system that would feed into the training and prediction -of an ML system_ - -You will need to talk through your approach, how you modelled the data and why you thought that data was important, provide endpoints to collect the data and -a means to store the data. Testing is important and will be used verify your system - -## A note on AI generated code -This project isn't about writing code, AI can and will do that for you. -The next step in this process is to talk through your solution and the decisions you made to come to them. It makes for an awkward and rather boring interview reviewing chatgpt's solution. - -If you use a tool to help you write code, that's fine, but we want to see _your_ thought process. - -Provided under the chatgpt folder is the response you get back from chat4o. -If your intention isn't to complete the project but to get an AI to spec it for you please, feel free to submit this instead of wasting OpenAI's server resources. - - -## Problem statement recap -This is a domain modeling problem to build a fit for purpose data storage with a focus on ai data ingestion -- Model the problem into a storage schema (SQL DB schema or whatever you prefer) -- CRUD some data -- Add some flair with a business rule or two -- Have the data in a suitable format to feed to a prediction training algorithm - ---- - -#### To start -- Fork this repo and begin from there -- For your submission, PR into the main repo. We will review it, a offer any feedback and give you a pass / fail if it passes PR -- Don't spend more than 4 hours on this. Projects that pass PR are paid at the standard hourly rate - -#### Marking -- You will be marked on how well your tests cover the code and how useful they would be in a prod system -- You will need to provide storage of some sort. This could be as simple as a sqlite or as complicated as a docker container with a migrations file -- Solutions will be marked against the position you are applying for, a Snr Dev will be expected to have a nearly complete solution and to have thought out the domain and built a schema to fit any issues that could arise -A Jr. dev will be expected to provide a basic design and understand how ML systems like to ingest data - - -#### Trip-ups from the past -Below is a list of some things from previous submissions that haven't worked out -- Built a prediction engine -- Built a full website with bells and whistles -- Spent more than the time allowed (you won't get bonus points for creating an intricate solution, we want a fit for purpose solution) -- Overcomplicated the system mentally and failed to start