diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..07672f9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,57 @@ +# Python Bytecode +*.pyc +*.pyo +__pycache__/ + +# Virtual Environment +venv/ +env/ +.venv/ +ENV/ +env.bak/ +venv.bak/ + +# IDE or Editor configurations +.vscode/ +.idea/ +*.swp +*.swo + +# Dependency directories +node_modules/ +*.egg-info/ +dist/ +build/ +*.log + +# .env file for environment variables +.env + +# Database files (if you're using SQLite, for example) +*.sqlite3 + +# FastAPI (and other Python web frameworks) related files +*.db +*.sqlite + +# Pytest cache +.cache/ + +# mypy +.mypy_cache/ + +# Pydantic generated models or any auto-generated files +*.pydantic/ + +# Docker-related files +docker-compose.override.yml +Dockerfile + +# Coverage reports from testing +.coverage +coverage.xml +*.cover + +# Temporary files +*.bak +*.tmp diff --git a/Ticket Booking System.postman_collection.json b/Ticket Booking System.postman_collection.json new file mode 100644 index 0000000..37bd635 --- /dev/null +++ b/Ticket Booking System.postman_collection.json @@ -0,0 +1,1281 @@ +{ + "info": { + "_postman_id": "356b096b-b97d-415c-9b1f-2e4517aa7624", + "name": "Ticket Booking System", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "_exporter_id": "23667429", + "_collection_link": "https://red-space-314271.postman.co/workspace/Pipclimber~ec93909c-c7ba-4f15-a9c6-f2c778d11dce/collection/23667429-356b096b-b97d-415c-9b1f-2e4517aa7624?action=share&source=collection_link&creator=23667429" + }, + "item": [ + { + "name": "api/v1", + "item": [ + { + "name": "auth", + "item": [ + { + "name": "Create User", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"String\",\n \"email\": \"user@example.com\",\n \"phone\": \"9876543210\",\n \"password\": \"string\"\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/add/user/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "add", + "user", + "" + ] + }, + "description": "Create new user\n\nArgs:\n newUser (UserCreate): User's data\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Validation Error", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"\",\n \"email\": \"\",\n \"phone\": \"\",\n \"password\": \"\"\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/add/user/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "add", + "user", + "" + ] + } + }, + "status": "Unprocessable Entity (WebDAV) (RFC 4918)", + "code": 422, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" + }, + { + "name": "successful response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"String\",\n \"email\": \"user@example.com\",\n \"phone\": \"9876543210\",\n \"password\": \"string\"\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/add/user/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "add", + "user", + "" + ] + } + }, + "status": "Created", + "code": 201, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 10:58:10 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "132" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"User created successfully\",\n \"status\": 201,\n \"data\": {\n \"id\": 1,\n \"name\": \"String\",\n \"email\": \"user@example.com\",\n \"phone\": \"9876543210\"\n }\n}" + } + ] + }, + { + "name": "Login", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/x-www-form-urlencoded" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "urlencoded", + "urlencoded": [ + { + "key": "username", + "value": "user@example.com", + "description": "(Required) " + }, + { + "key": "password", + "value": "string", + "description": "(Required) " + }, + { + "key": "grant_type", + "value": "", + "disabled": true + }, + { + "key": "scope", + "value": "", + "disabled": true + }, + { + "key": "client_id", + "value": "", + "disabled": true + }, + { + "key": "client_secret", + "value": "", + "disabled": true + } + ] + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/login/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "login", + "" + ] + }, + "description": "Log in the user\n\nArgs:\n cred (OAuth2PasswordRequestForm): User's Credentials like email and password\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "urlencoded", + "urlencoded": [ + { + "key": "username", + "value": "", + "description": "(Required) " + }, + { + "key": "password", + "value": "", + "description": "(Required) " + }, + { + "key": "grant_type", + "value": "" + }, + { + "key": "scope", + "value": "" + }, + { + "key": "client_id", + "value": "" + }, + { + "key": "client_secret", + "value": "" + } + ] + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/login/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "login", + "" + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"\",\n \"status\": \"\",\n \"access_token\": \"\",\n \"token_type\": \"Bearer\"\n}" + }, + { + "name": "Validation Error", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "urlencoded", + "urlencoded": [ + { + "key": "username", + "value": "", + "description": "(Required) " + }, + { + "key": "password", + "value": "", + "description": "(Required) " + }, + { + "key": "grant_type", + "value": "" + }, + { + "key": "scope", + "value": "" + }, + { + "key": "client_id", + "value": "" + }, + { + "key": "client_secret", + "value": "" + } + ] + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/login/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "login", + "" + ] + } + }, + "status": "Unprocessable Entity (WebDAV) (RFC 4918)", + "code": 422, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" + }, + { + "name": "exampl response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/x-www-form-urlencoded" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "urlencoded", + "urlencoded": [ + { + "key": "username", + "value": "user@example.com", + "description": "(Required) " + }, + { + "key": "password", + "value": "string", + "description": "(Required) " + }, + { + "key": "grant_type", + "value": "", + "disabled": true + }, + { + "key": "scope", + "value": "", + "disabled": true + }, + { + "key": "client_id", + "value": "", + "disabled": true + }, + { + "key": "client_secret", + "value": "", + "disabled": true + } + ] + }, + "url": { + "raw": "{{baseUrl}}/api/v1/auth/login/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "auth", + "login", + "" + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 11:01:16 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "211" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"User Logged in Successfully\",\n \"status\": 200,\n \"access_token\": \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEiLCJleHAiOjE3MzMzMzA3Nzh9._FbQgsil5HFWvjAJC1m3AupYBeLOnmB6zkCcHJaG8q8\",\n \"token_type\": \"Bearer\"\n}" + } + ] + } + ] + }, + { + "name": "events", + "item": [ + { + "name": "Get All Events", + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEiLCJleHAiOjE3MzMzMzA3Nzh9._FbQgsil5HFWvjAJC1m3AupYBeLOnmB6zkCcHJaG8q8", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + }, + "description": "Get list of all events\n\nArgs:\n db (AsyncSession): Async database connection\n current_user (user): Current logged in user\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"\",\n \"status\": \"\",\n \"data\": [\n {\n \"id\": \"\",\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n },\n {\n \"id\": \"\",\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n }\n ]\n}" + }, + { + "name": "example response", + "originalRequest": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 11:02:19 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "66" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"Events retrieved successfully\",\n \"status\": 200,\n \"data\": []\n}" + } + ] + }, + { + "name": "Create New Event", + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEiLCJleHAiOjE3MzMzMzA3Nzh9._FbQgsil5HFWvjAJC1m3AupYBeLOnmB6zkCcHJaG8q8", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"Rock Concert\",\n \"schedule_date\": \"2025-03-12T15:30:00Z\",\n \"venue\": \"Rose Mary\",\n \"price\": 30000,\n \"total_tickets\": 30\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + }, + "description": "Create new event\nArgs:\n newEvent (EventCreate): Event Descriptions like date, venue, tickets\n background_taks (BackgroundTaks): Background task for creating the tickets assigned for the event\n db (AsyncSession): Async database connection\n current_user (user): Current logged in user with admin rights i.e. to create the events\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + } + }, + "status": "Created", + "code": 201, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"\",\n \"status\": \"\",\n \"data\": {\n \"id\": \"\",\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n }\n}" + }, + { + "name": "Validation Error", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + } + }, + "status": "Unprocessable Entity (WebDAV) (RFC 4918)", + "code": 422, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" + }, + { + "name": "example response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"Rock Concert\",\n \"schedule_date\": \"2025-03-12T15:30:00Z\",\n \"venue\": \"Rose Mary\",\n \"price\": 30000,\n \"total_tickets\": 30\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{baseUrl}}/api/v1/events/", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "" + ] + } + }, + "status": "Created", + "code": 201, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 11:07:15 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "182" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"Event created successfully\",\n \"status\": 201,\n \"data\": {\n \"id\": 1,\n \"name\": \"Rock Concert\",\n \"schedule_date\": \"2025-03-12T15:30:00\",\n \"venue\": \"Rose Mary\",\n \"price\": 30000,\n \"total_tickets\": 30\n }\n}" + } + ] + }, + { + "name": "Book Event Ticket", + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEiLCJleHAiOjE3MzMzMzA3Nzh9._FbQgsil5HFWvjAJC1m3AupYBeLOnmB6zkCcHJaG8q8", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/:event_id/book/:ticket_id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + ":event_id", + "book", + ":ticket_id" + ], + "variable": [ + { + "key": "event_id", + "value": "1", + "description": "(Required) " + }, + { + "key": "ticket_id", + "value": "3", + "description": "(Required) " + } + ] + }, + "description": "Book ticket\n\nArgs:\n event_id (int): The ID of event for which ticket is to bought\n ticket_id (int): The ID of ticket that is meant to be bought\n current_user (user): Currently logged in user who is booking the ticket,\n db (AsyncSession): Async database connection\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/:event_id/book/:ticket_id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + ":event_id", + "book", + ":ticket_id" + ], + "variable": [ + { + "key": "event_id", + "value": "", + "description": "(Required) " + }, + { + "key": "ticket_id", + "value": "", + "description": "(Required) " + } + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{}" + }, + { + "name": "Validation Error", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/:event_id/book/:ticket_id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + ":event_id", + "book", + ":ticket_id" + ], + "variable": [ + { + "key": "event_id", + "value": "", + "description": "(Required) " + }, + { + "key": "ticket_id", + "value": "", + "description": "(Required) " + } + ] + } + }, + "status": "Unprocessable Entity (WebDAV) (RFC 4918)", + "code": 422, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" + }, + { + "name": "example response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/:event_id/book/:ticket_id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + ":event_id", + "book", + ":ticket_id" + ], + "variable": [ + { + "key": "event_id", + "value": "1", + "description": "(Required) " + }, + { + "key": "ticket_id", + "value": "3", + "description": "(Required) " + } + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 11:11:36 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "531" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"Ticket booked successfully. Please proceed to payment\",\n \"status\": 200,\n \"data\": {\n \"id\": \"cs_test_a1gvT3epPTGt7Ct7z25hqBKaAsTnbY2MvCRoahJ7yyBfu4qm6IZZ65KTlC\",\n \"url\": \"https://checkout.stripe.com/c/pay/cs_test_a1gvT3epPTGt7Ct7z25hqBKaAsTnbY2MvCRoahJ7yyBfu4qm6IZZ65KTlC#fidkdWxOYHwnPyd1blpxYHZxWjA0VFdAU2RETm9IVEJta2BPa0tTNGFrZlJxQFRMXWh2fUIwSmtnSnNTckR3M0RHX2AwPXQ0VWJfTmBqSj08UGl0R3U8YTdiYn1PM1JRd01hR250VktMcjdmNTVqN3xmZndhUScpJ2N3amhWYHdzYHcnP3F3cGApJ2lkfGpwcVF8dWAnPyd2bGtiaWBabHFgaCcpJ2BrZGdpYFVpZGZgbWppYWB3dic%2FcXdwYHgl\"\n }\n}" + } + ] + }, + { + "name": "Stripe Webhook", + "request": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/webhook", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "webhook" + ] + }, + "description": "Endpoint for stripe to be pinged after the payment processing is done(completed or failed)\n\nArgs:\n request (Request): Request came from the stripe\n db (AsyncSession): Async database connection\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/webhook", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "webhook" + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{}" + } + ] + }, + { + "name": "Get Order Status", + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjEiLCJleHAiOjE3MzMzMzA3Nzh9._FbQgsil5HFWvjAJC1m3AupYBeLOnmB6zkCcHJaG8q8", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/orders/:id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "orders", + ":id" + ], + "variable": [ + { + "key": "id", + "value": "1", + "description": "(Required) " + } + ] + }, + "description": "Get status of particular order\n\nArgs:\n id (int): Order id\n db (AsyncSession): Async database connection\n current_user (user): Current logged in user\n \nReturns:\n _type_: resp" + }, + "response": [ + { + "name": "Successful Response", + "originalRequest": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/orders/:id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "orders", + ":id" + ], + "variable": [ + { + "key": "id", + "value": "", + "description": "(Required) " + } + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"\",\n \"status\": \"\",\n \"data\": {\n \"event\": {\n \"id\": \"\",\n \"name\": \"\",\n \"schedule_date\": \"\",\n \"venue\": \"\",\n \"price\": \"\",\n \"total_tickets\": \"\"\n },\n \"ticket_id\": \"\",\n \"status\": \"\"\n }\n}" + }, + { + "name": "Validation Error", + "originalRequest": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "", + "description": "Added as a part of security scheme: oauth2" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/orders/:id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "orders", + ":id" + ], + "variable": [ + { + "key": "id", + "value": "", + "description": "(Required) " + } + ] + } + }, + "status": "Unprocessable Entity (WebDAV) (RFC 4918)", + "code": 422, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n },\n {\n \"loc\": [\n \"\",\n \"\"\n ],\n \"msg\": \"\",\n \"type\": \"\"\n }\n ]\n}" + }, + { + "name": "example response", + "originalRequest": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "{{baseUrl}}/api/v1/events/orders/:id", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "events", + "orders", + ":id" + ], + "variable": [ + { + "key": "id", + "value": "1", + "description": "(Required) " + } + ] + } + }, + "status": "OK", + "code": 200, + "_postman_previewlanguage": "json", + "header": [ + { + "key": "date", + "value": "Tue, 03 Dec 2024 11:14:35 GMT" + }, + { + "key": "server", + "value": "uvicorn" + }, + { + "key": "content-length", + "value": "237" + }, + { + "key": "content-type", + "value": "application/json" + } + ], + "cookie": [], + "body": "{\n \"message\": \"Customer's order retrieved successfully\",\n \"status\": 200,\n \"data\": {\n \"event\": {\n \"id\": 1,\n \"name\": \"Rock Concert\",\n \"schedule_date\": \"2025-03-12T15:30:00\",\n \"venue\": \"Rose Mary\",\n \"price\": 30000,\n \"total_tickets\": 30\n },\n \"ticket_id\": 3,\n \"status\": \"failed\"\n }\n}" + } + ] + } + ] + } + ] + } + ], + "variable": [ + { + "key": "baseUrl", + "value": "/", + "type": "string" + } + ] +} \ No newline at end of file diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..97e6ebf --- /dev/null +++ b/alembic.ini @@ -0,0 +1,117 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = migration + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migration/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migration/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/controllers/__init__.py b/controllers/__init__.py new file mode 100644 index 0000000..cd57199 --- /dev/null +++ b/controllers/__init__.py @@ -0,0 +1 @@ +# database interactions \ No newline at end of file diff --git a/controllers/event_controllers.py b/controllers/event_controllers.py new file mode 100644 index 0000000..c4be4b4 --- /dev/null +++ b/controllers/event_controllers.py @@ -0,0 +1,145 @@ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import selectinload + +# application imports +from models.event_model import Events, Tickets, Orders +from schemas.event_schema import TicketStatus, OrderStatus + +class EventControl: + def __init__(self, db: AsyncSession): + self.db = db + + async def create_new_event(self,event: dict): + """Inserts the new event data into database""" + event=Events(**event) + self.db.add(event) + await self.db.commit() + await self.db.refresh(event) + return event + + async def get_event_by_id(self, event_id: int): + """Queries the db to get Events based on id""" + stmt = ( + select(Events) + .where(Events.id == event_id) + ) + result = await self.db.execute(stmt) + event = result.scalar_one_or_none() # Fetch a single result or None + return event + + async def get_all_events(self): + """Queries the database to get all the events""" + stmt = ( + select(Events) + ) + result = await self.db.execute(stmt) + events = [row[0] for row in result.all()] # Fetch all + return events + + async def create_bulk_new_tickets(self, tickets: list): + """ + Function is used to bulk insert tickets at once in db + + """ + tickets = [Tickets(**ticket) for ticket in tickets] + self.db.add_all(tickets) + await self.db.commit() + + async def get_tickets_by_event_id(self, event_id:int): + """Queries the tickets based on event id""" + stmt = ( + select(Tickets) + .where(Tickets.event_id == event_id) + ) + result = await self.db.execute(stmt) + tickets = [row[0] for row in result.all()] # Fetch all + return tickets + + async def get_ticket_by_id_and_event_id(self, ticket_id, event_id): + """Queries the db to get ticket based on id and event_id""" + stmt = ( + select(Tickets) + .where(Tickets.id == ticket_id, Tickets.event_id == event_id) + ) + result = await self.db.execute(stmt) + ticket = result.scalar_one_or_none() # Fetch a single result or None + return ticket + + async def get_locked_ticket_by_id(self, ticket_id:int): + """Queries the database for the locked ticket based on id""" + stmt = ( + select(Tickets) + .where(Tickets.id == ticket_id, Tickets.status != TicketStatus.SOLD) + ) + result = await self.db.execute(stmt) + ticket = result.scalar_one_or_none() # Fetch a single result or None + return ticket + + async def get_ticket_by_id(self, ticket_id:int): + """Queries the db to get Ticktes based on id""" + ticket = await self.db.get(Tickets,ticket_id) + return ticket + + async def update_ticket_status(self, ticket: Tickets, status: TicketStatus): + """Updates the ticket status in database""" + ticket.status = status + await self.db.commit() + await self.db.refresh(ticket) + + async def add_new_order(self, order: dict): + """Inserts the order details in database""" + order = Orders(**order) + self.db.add(order) + await self.db.commit() + await self.db.refresh(order) + return order + + async def get_order_by_stripe_session_id(self, session_id: str): + """Queries the db to get order based on stripe session id""" + stmt = ( + select(Orders) + .where(Orders.stripe_session_id == session_id) + ) + result = await self.db.execute(stmt) + order = result.scalar_one_or_none() # Fetch a single result or None + return order + + async def get_order_by_ticket_id(self, ticket_id: str): + """Queries the database to get the order based on ticket id""" + stmt = ( + select(Orders) + .where(Orders.ticket_id == ticket_id, Orders.status == OrderStatus.PENDING) + ) + result = await self.db.execute(stmt) + order = result.scalar_one_or_none() # Fetch a single result or None + return order + + async def get_order_by_user_id(self, user_id: int): + """Queries the db to get order based on user id""" + stmt = ( + select(Orders) + .where(Orders.user_id == user_id) + ) + result = await self.db.execute(stmt) + order = [row[0] for row in result.all()] # Fetch all + return order + + async def get_order_by_order_and_user_id(self, order_id: int, user_id: int): + """Queries the db to get order based on order id and user id""" + stmt = ( + select(Orders) + .where(Orders.id == order_id, Orders.user_id == user_id) + ) + result = await self.db.execute(stmt) + order = result.scalar_one_or_none() # Fetch a single result or None + return order + + async def update_order_status(self, order: Orders, status: OrderStatus): + """Updates the order in the database""" + order.status = status + await self.db.commit() + await self.db.refresh(order) + return order + +event_control = EventControl \ No newline at end of file diff --git a/controllers/user_controller.py b/controllers/user_controller.py new file mode 100644 index 0000000..f95e307 --- /dev/null +++ b/controllers/user_controller.py @@ -0,0 +1,62 @@ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from pydantic import EmailStr + +from models.user_model import Users + + +class UserController: + + def __init__(self, db: AsyncSession): + self.db = db + + async def add_user(self, user: dict): + """Add new user into databse + + Args: + user (dict): User's data + + Returns: + _type_: user + """ + user = Users(**user) + self.db.add(user) + await self.db.commit() + await self.db.refresh(user) + return user + + async def get_user_by_email(self, email: EmailStr): + """Query user's details from db by email: + + Args: + email (EmailStr): User's email + + Returns: + _type_: user + """ + stmt = ( + select(Users) + .where(Users.email == email) + ) + result = await self.db.execute(stmt) + user = result.scalar_one_or_none() # Fetch a single result or None + return user + + async def get_user_by_id(self, id: int): + """Query user's details from db by id + + Args: + id (int): User's id + + Returns: + _type_: user + """ + stmt = ( + select(Users) + .where(Users.id == id) + ) + result = await self.db.execute(stmt) + user = result.scalar_one_or_none() # Fetch a single result or None + return user + +user_control = UserController \ No newline at end of file diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..5d4ef8e --- /dev/null +++ b/core/__init__.py @@ -0,0 +1 @@ +# secret variables are loaded from .env file \ No newline at end of file diff --git a/core/config.py b/core/config.py new file mode 100644 index 0000000..01a38f8 --- /dev/null +++ b/core/config.py @@ -0,0 +1,39 @@ +from utils.schema_utils import AbstractSettings + +class DBSettings(AbstractSettings): + """ + This class holds the configuration settings for + the database connection, including the database name, + user, password, host, and port. It is used for connecting + to the database in the application. + """ + DATABASE_NAME: str + DATABASE_PORT: int + DATABASE_HOST: str + DATABASE_PASSWORD: str + DATABASE_USER: str + +class StripeSettings(AbstractSettings): + """ + This class stores the settings for integrating Stripe payments, + including the public and private keys, account ID, webhook secret, + and price ID. These settings are used to interact with Stripe's payment services. + """ + PUBLIC_KEY: str + PRIVATE_KEY: str + ACCOUNT_ID: str + WEBHOOK_CODE: str + +class AuthSettings(AbstractSettings): + """ + This class contains the configuration for user authentication, + including the secret key used for generating JWT tokens, the + expiration time of the tokens, and the algorithm used to sign the tokens. + """ + access_secret_key: str + access_time_exp: int + Algorithm: str + +db_settings = DBSettings() +stripe_settings = StripeSettings() +auth_settings = AuthSettings() \ No newline at end of file diff --git a/example.env b/example.env new file mode 100644 index 0000000..25d9b92 --- /dev/null +++ b/example.env @@ -0,0 +1,17 @@ +# Database Secrets +DATABASE_NAME= +DATABASE_PORT= +DATABASE_HOST= +DATABASE_PASSWORD= +DATABASE_USER= + +# Stripe Secrets +PUBLIC_KEY= +PRIVATE_KEY= +ACCOUNT_ID= +WEBHOOK_CODE= + +# Auth Secrets +access_secret_key= # generate the secret key using python secrets module: print(secrets.token_hex(32)) +access_time_exp= +Algorithm= # like HS256 \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..94afd2a --- /dev/null +++ b/main.py @@ -0,0 +1,37 @@ +import asyncio +from fastapi import FastAPI +from fastapi.responses import RedirectResponse +from routes.user_routes import user_route +from routes.event_routes import event_router +from utils.redis_utils import redis_manager + +app=FastAPI() + + +async def lifespan(app): + """ + This code defines an asynchronous lifespan context + manager for the FastAPI application, managing the + startup and shutdown processes. During startup, it + initializes the Redis manager, and on shutdown, it + ensures the Redis connection is properly closed. + """ + # Startup + await redis_manager.start() + yield + # Shutdown + await redis_manager.stop() + +app = FastAPI(lifespan=lifespan) + +@app.get("/") +async def redirect_url(): + """ + This code redirects the url `http://localhost:8000` + to `http://localhost:8000/docs` + """ + return RedirectResponse("/docs") + +app.include_router(user_route) +app.include_router(event_router) + diff --git a/migration/README b/migration/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migration/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migration/env.py b/migration/env.py new file mode 100644 index 0000000..998a882 --- /dev/null +++ b/migration/env.py @@ -0,0 +1,94 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context +from models.database import Base +from core.config import db_settings +from models import ( + user_model, + event_model, +) + +db_user=db_settings.DATABASE_USER +db_pwd=db_settings.DATABASE_PASSWORD +db_host=db_settings.DATABASE_HOST +db_port=db_settings.DATABASE_PORT +db_name=db_settings.DATABASE_NAME + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +config.set_main_option( + "sqlalchemy.url", + f"postgresql://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}", +) +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migration/script.py.mako b/migration/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/migration/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/migration/versions/1076acdc8513_added_ticket_id_field_in_order_table.py b/migration/versions/1076acdc8513_added_ticket_id_field_in_order_table.py new file mode 100644 index 0000000..4dbee11 --- /dev/null +++ b/migration/versions/1076acdc8513_added_ticket_id_field_in_order_table.py @@ -0,0 +1,32 @@ +"""Added ticket id field in order table + +Revision ID: 1076acdc8513 +Revises: e6e2ee705002 +Create Date: 2024-12-02 12:50:43.150436 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '1076acdc8513' +down_revision: Union[str, None] = 'e6e2ee705002' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('orders', sa.Column('ticket_id', sa.Integer(), nullable=True)) + op.create_foreign_key(None, 'orders', 'tickets', ['ticket_id'], ['id'], ondelete='CASCADE') + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'orders', type_='foreignkey') + op.drop_column('orders', 'ticket_id') + # ### end Alembic commands ### diff --git a/migration/versions/109b56e84db5_removed_constraint_for_failed_order_.py b/migration/versions/109b56e84db5_removed_constraint_for_failed_order_.py new file mode 100644 index 0000000..9348708 --- /dev/null +++ b/migration/versions/109b56e84db5_removed_constraint_for_failed_order_.py @@ -0,0 +1,32 @@ +"""removed constraint for failed order status + +Revision ID: 109b56e84db5 +Revises: 73a6388c7a01 +Create Date: 2024-12-02 16:33:29.791895 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '109b56e84db5' +down_revision: Union[str, None] = '73a6388c7a01' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uix_ticket_status', 'orders', type_='unique') + op.create_index('ix_ticket_status_unique', 'orders', ['ticket_id', 'status'], unique=True, postgresql_where=sa.text("status != 'FAILED'")) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_ticket_status_unique', table_name='orders', postgresql_where=sa.text("status != 'FAILED'")) + op.create_unique_constraint('uix_ticket_status', 'orders', ['ticket_id', 'status']) + # ### end Alembic commands ### diff --git a/migration/versions/15b4f0d5b194_created_at_colum_is_set_steady_during_.py b/migration/versions/15b4f0d5b194_created_at_colum_is_set_steady_during_.py new file mode 100644 index 0000000..e3a0b50 --- /dev/null +++ b/migration/versions/15b4f0d5b194_created_at_colum_is_set_steady_during_.py @@ -0,0 +1,30 @@ +"""created at colum is set steady during update + +Revision ID: 15b4f0d5b194 +Revises: 441a16feab7e +Create Date: 2024-12-01 13:05:29.399556 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '15b4f0d5b194' +down_revision: Union[str, None] = '441a16feab7e' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migration/versions/34ee47923345_added_is_admin_field_in_users_table.py b/migration/versions/34ee47923345_added_is_admin_field_in_users_table.py new file mode 100644 index 0000000..1278ca7 --- /dev/null +++ b/migration/versions/34ee47923345_added_is_admin_field_in_users_table.py @@ -0,0 +1,38 @@ +"""added is_admin field in users table + +Revision ID: 34ee47923345 +Revises: 109b56e84db5 +Create Date: 2024-12-02 16:55:07.958676 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '34ee47923345' +down_revision: Union[str, None] = '109b56e84db5' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + 'users', + sa.Column( + 'is_admin', + sa.Boolean(), + nullable=False, + server_default=sa.text('false') # Set default value to False + ) + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users', 'is_admin') + # ### end Alembic commands ### diff --git a/migration/versions/441a16feab7e_removed_the_name_field_from_ticket_table.py b/migration/versions/441a16feab7e_removed_the_name_field_from_ticket_table.py new file mode 100644 index 0000000..b4229a1 --- /dev/null +++ b/migration/versions/441a16feab7e_removed_the_name_field_from_ticket_table.py @@ -0,0 +1,38 @@ +"""Removed the name field from ticket table + +Revision ID: 441a16feab7e +Revises: 79524cc45145 +Create Date: 2024-12-01 10:54:59.638876 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '441a16feab7e' +down_revision: Union[str, None] = '79524cc45145' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint(None, 'events', ['id']) + op.create_unique_constraint(None, 'orders', ['id']) + op.create_unique_constraint(None, 'tickets', ['id']) + op.drop_column('tickets', 'name') + op.create_unique_constraint(None, 'users', ['id']) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'users', type_='unique') + op.add_column('tickets', sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.drop_constraint(None, 'tickets', type_='unique') + op.drop_constraint(None, 'orders', type_='unique') + op.drop_constraint(None, 'events', type_='unique') + # ### end Alembic commands ### diff --git a/migration/versions/73a6388c7a01_added_the_composite_unique_constraint_.py b/migration/versions/73a6388c7a01_added_the_composite_unique_constraint_.py new file mode 100644 index 0000000..2c4aace --- /dev/null +++ b/migration/versions/73a6388c7a01_added_the_composite_unique_constraint_.py @@ -0,0 +1,30 @@ +"""added the composite unique constraint for ticket and status + +Revision ID: 73a6388c7a01 +Revises: 1076acdc8513 +Create Date: 2024-12-02 16:19:18.295554 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '73a6388c7a01' +down_revision: Union[str, None] = '1076acdc8513' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint('uix_ticket_status', 'orders', ['ticket_id', 'status']) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uix_ticket_status', 'orders', type_='unique') + # ### end Alembic commands ### diff --git a/migration/versions/79524cc45145_model_tables_creation.py b/migration/versions/79524cc45145_model_tables_creation.py new file mode 100644 index 0000000..32427ac --- /dev/null +++ b/migration/versions/79524cc45145_model_tables_creation.py @@ -0,0 +1,81 @@ +"""model tables creation + +Revision ID: 79524cc45145 +Revises: +Create Date: 2024-11-30 22:33:39.372961 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '79524cc45145' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('events', + sa.Column('name', sa.String(), nullable=False), + sa.Column('schedule_date', sa.DateTime(), nullable=False), + sa.Column('venue', sa.String(), nullable=False), + sa.Column('price', sa.Float(), nullable=False), + sa.Column('total_tickets', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_table('users', + sa.Column('name', sa.String(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('phone', sa.String(), nullable=False), + sa.Column('password', sa.String(), nullable=False), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email'), + sa.UniqueConstraint('id'), + sa.UniqueConstraint('phone') + ) + op.create_table('orders', + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('event_id', sa.Integer(), nullable=False), + sa.Column('status', sa.Enum('PENDING', 'CONFIRMED', 'FAILED', name='orderstatus'), nullable=False), + sa.Column('stripe_session_id', sa.String(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.ForeignKeyConstraint(['event_id'], ['events.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id') + ) + op.create_table('tickets', + sa.Column('name', sa.String(), nullable=True), + sa.Column('status', sa.Enum('AVAILABLE', 'LOCKED', 'SOLD', name='ticketstatus'), nullable=False), + sa.Column('event_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('NOW()'), nullable=True), + sa.ForeignKeyConstraint(['event_id'], ['events.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('tickets') + op.drop_table('orders') + op.drop_table('users') + op.drop_table('events') + # ### end Alembic commands ### diff --git a/migration/versions/e6e2ee705002_converted_price_from_float_to_int.py b/migration/versions/e6e2ee705002_converted_price_from_float_to_int.py new file mode 100644 index 0000000..6fa1443 --- /dev/null +++ b/migration/versions/e6e2ee705002_converted_price_from_float_to_int.py @@ -0,0 +1,36 @@ +"""Converted price from float to int + +Revision ID: e6e2ee705002 +Revises: f9d0f99c5b17 +Create Date: 2024-12-02 01:28:26.058475 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'e6e2ee705002' +down_revision: Union[str, None] = 'f9d0f99c5b17' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('events', 'price', + existing_type=sa.DOUBLE_PRECISION(precision=53), + type_=sa.Integer(), + existing_nullable=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('events', 'price', + existing_type=sa.Integer(), + type_=sa.DOUBLE_PRECISION(precision=53), + existing_nullable=False) + # ### end Alembic commands ### diff --git a/migration/versions/f9d0f99c5b17_added_stripe_price_id_for_each_events.py b/migration/versions/f9d0f99c5b17_added_stripe_price_id_for_each_events.py new file mode 100644 index 0000000..2f1f254 --- /dev/null +++ b/migration/versions/f9d0f99c5b17_added_stripe_price_id_for_each_events.py @@ -0,0 +1,30 @@ +"""added stripe price id for each events + +Revision ID: f9d0f99c5b17 +Revises: 15b4f0d5b194 +Create Date: 2024-12-02 00:24:42.823300 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'f9d0f99c5b17' +down_revision: Union[str, None] = '15b4f0d5b194' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('events', sa.Column('stripe_price_id', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('events', 'stripe_price_id') + # ### end Alembic commands ### diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000..c68772c --- /dev/null +++ b/models/__init__.py @@ -0,0 +1,5 @@ +from models.user_model import Users +from models.event_model import Events, Tickets + + +# database tables \ No newline at end of file diff --git a/models/database.py b/models/database.py new file mode 100644 index 0000000..3a94d04 --- /dev/null +++ b/models/database.py @@ -0,0 +1,23 @@ +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from core.config import db_settings + +db_user=db_settings.DATABASE_USER +db_pwd=db_settings.DATABASE_PASSWORD +db_host=db_settings.DATABASE_HOST +db_port=db_settings.DATABASE_PORT +db_name=db_settings.DATABASE_NAME + +DATABASE_URL = f"postgresql+asyncpg://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}" + +# Create asynchronous engine +engine = create_async_engine(DATABASE_URL, echo=True) + +# Create async sessionmaker +session = sessionmaker( + engine, class_=AsyncSession, autocommit=False, autoflush=False +) + +# Create base class for models +Base = declarative_base() diff --git a/models/event_model.py b/models/event_model.py new file mode 100644 index 0000000..429de1c --- /dev/null +++ b/models/event_model.py @@ -0,0 +1,54 @@ +from sqlalchemy import ( + Column, + Float, + Integer, + String, + ForeignKey, + DateTime, + text, + Enum, + Index, +) +from sqlalchemy.orm import relationship +from utils.model_utils import AbstractModels +from schemas.event_schema import TicketStatus, OrderStatus + +class Events(AbstractModels): + __tablename__ = "events" + + name=Column(String, nullable=False) + schedule_date=Column(DateTime, nullable=False) + venue=Column(String, nullable=False) + price=Column(Integer, nullable=False) + total_tickets=Column(Integer, nullable=False) + stripe_price_id=Column(String, nullable=True) + tickets=relationship("Tickets", back_populates="event", cascade="all,delete-orphan") + orders=relationship("Orders", back_populates="event") + +class Tickets(AbstractModels): + __tablename__ = "tickets" + + status=Column(Enum(TicketStatus), nullable=False, default=TicketStatus.AVAILABLE) + event_id=Column(Integer, ForeignKey("events.id", ondelete="CASCADE"), nullable=False) + event=relationship("Events", back_populates="tickets") + +class Orders(AbstractModels): + __tablename__ = "orders" + + user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"),nullable=False) + event_id = Column(Integer, ForeignKey("events.id", ondelete="CASCADE"), nullable=False) + ticket_id = Column(Integer, ForeignKey("tickets.id", ondelete="CASCADE"), nullable=True) + status = Column(Enum(OrderStatus), nullable=False) + stripe_session_id = Column(String, nullable=True) + user=relationship("Users", back_populates="orders") + event=relationship("Events", back_populates="orders") + + __table_args__ = ( + Index( + "ix_ticket_status_unique", + "ticket_id", + "status", + unique=True, + postgresql_where=(status != OrderStatus.FAILED), + ), + ) \ No newline at end of file diff --git a/models/user_model.py b/models/user_model.py new file mode 100644 index 0000000..3669ee9 --- /dev/null +++ b/models/user_model.py @@ -0,0 +1,18 @@ +from sqlalchemy import ( + Column, + String, + Boolean, + text, +) +from sqlalchemy.orm import relationship +from utils.model_utils import AbstractModels + +class Users(AbstractModels): + __tablename__ = "users" + + name = Column(String, nullable=False) + email = Column(String, nullable=False, unique=True) + phone = Column(String, nullable=False, unique=True) + password = Column(String, nullable=False) + is_admin = Column(Boolean, default=False, server_default=text("False")) + orders = relationship("Orders", back_populates="user", cascade="all,delete-orphan") \ No newline at end of file diff --git a/permissions/__init__.py b/permissions/__init__.py new file mode 100644 index 0000000..6525870 --- /dev/null +++ b/permissions/__init__.py @@ -0,0 +1 @@ +# dependencies rights or permission based access to features \ No newline at end of file diff --git a/permissions/permission_dep.py b/permissions/permission_dep.py new file mode 100644 index 0000000..33df07c --- /dev/null +++ b/permissions/permission_dep.py @@ -0,0 +1,16 @@ +from fastapi import HTTPException, status, Depends + +from utils.oauth import get_current_user + +async def admin_permission_right(current_user = Depends(get_current_user)): + """ + This function is a dependency used to enforce admin-level permissions. It checks + if the current user is an admin. If the user is not an admin, it raises an HTTP + 403 Forbidden error with a message informing the user that they do not have permission + to create an event. It suggests contacting the service provider in case this is a mistake. + """ + if not current_user.is_admin: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User don't have permssion to create event. If this is mistake please contact the service provider" + ) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..9e1dd4e Binary files /dev/null and b/requirements.txt differ diff --git a/routes/__init__.py b/routes/__init__.py new file mode 100644 index 0000000..4617f51 --- /dev/null +++ b/routes/__init__.py @@ -0,0 +1 @@ +# API routes are defined \ No newline at end of file diff --git a/routes/event_routes.py b/routes/event_routes.py new file mode 100644 index 0000000..f1d400b --- /dev/null +++ b/routes/event_routes.py @@ -0,0 +1,150 @@ +from fastapi import APIRouter, status, HTTPException, Depends, BackgroundTasks, Request, Header +from fastapi.responses import JSONResponse +from sqlalchemy.ext.asyncio import AsyncSession +from pydantic import BaseModel +from typing import Optional + +# application imports +from utils.db_utils import get_db +from schemas.event_schema import ( + EventCreate, + EventResp, + OrderRep, + EventListsResp, + TicketListResp, + OrderListResp, +) +from services.event_service import event_service +from core.config import stripe_settings +from utils.stripe_utils import stripe +from utils.oauth import get_current_user +from permissions.permission_dep import admin_permission_right + +event_router = APIRouter(prefix="/api/v1/events", tags=["Events"]) + +@event_router.post( + "/", + status_code=status.HTTP_201_CREATED, + response_model=EventResp, +) +async def create_new_event(newEvent: EventCreate, background_tasks: BackgroundTasks,db: AsyncSession=Depends(get_db), current_user = Depends(admin_permission_right)): + """Create new event + Args: + newEvent (EventCreate): Event Descriptions like date, venue, tickets + background_taks (BackgroundTaks): Background task for creating the tickets assigned for the event + db (AsyncSession): Async database connection + current_user (user): Current logged in user with admin rights i.e. to create the events + + Returns: + _type_: resp + """ + resp = await event_service(db).create_new_event(newEvent, background_tasks) + return resp + +@event_router.get( + "/{event_id}/", + status_code=status.HTTP_200_OK, + response_model=TicketListResp, +) +async def get_event_tickets(event_id: int, current_user=Depends(get_current_user),db: AsyncSession=Depends(get_db)): + """ Get Tickets List + + Args: + event_id (int): The ID of event for which ticket is to bought + current_user (user): Currently logged in user who is booking the ticket, + db (AsyncSession): Async database connection + + Returns: + _type_: resp + """ + resp = await event_service(db).get_event_tickets(event_id) + return resp + +@event_router.post( + "/{event_id}/book/{ticket_id}", + status_code=status.HTTP_200_OK +) +async def book_event_ticket(event_id: int, ticket_id: int, current_user = Depends(get_current_user), db: AsyncSession = Depends(get_db)): + """Book ticket + + Args: + event_id (int): The ID of event for which ticket is to bought + ticket_id (int): The ID of ticket that is meant to be bought + current_user (user): Currently logged in user who is booking the ticket, + db (AsyncSession): Async database connection + + Returns: + _type_: resp + """ + resp = await event_service(db).book_ticket(ticket_id=ticket_id, event_id=event_id, user_id = current_user.id) + return resp + +@event_router.post("/webhook") +async def stripe_webhook(request: Request, db: AsyncSession = Depends(get_db)): + """Endpoint for stripe to be pinged after the payment processing is done(completed or failed) + + Args: + request (Request): Request came from the stripe + db (AsyncSession): Async database connection + + Returns: + _type_: resp + """ + resp = await event_service(db).stripe_webhook_service(request) + return resp + +@event_router.get( + "/customer/orders/", + response_model=OrderListResp, + status_code=status.HTTP_200_OK +) +async def get_orders_of_user(db: AsyncSession = Depends(get_db), current_user = Depends(get_current_user)): + """Get List of current user's orders + + Args: + db (AsyncSession): Async database connection + current_user (user): Current logged in user + + Returns: + _type_: resp + """ + resp = await event_service(db).get_customer_orders(user_id=current_user.id) + return resp + + +@event_router.get( + "/orders/{id}", + response_model=OrderRep, + status_code=status.HTTP_200_OK +) +async def get_order_status(id: int, db: AsyncSession = Depends(get_db), current_user = Depends(get_current_user)): + """Get status of particular order + + Args: + id (int): Order id + db (AsyncSession): Async database connection + current_user (user): Current logged in user + + Returns: + _type_: resp + """ + resp = await event_service(db).get_customer_order_by_order_id(order_id=id, user_id=current_user.id) + return resp + +@event_router.get( + "/", + response_model=EventListsResp, + status_code=status.HTTP_200_OK +) +async def get_all_events(db:AsyncSession = Depends(get_db), current_user = Depends(get_current_user)): + """Get list of all events + + Args: + db (AsyncSession): Async database connection + current_user (user): Current logged in user + + Returns: + _type_: resp + """ + resp = await event_service(db).get_all_events() + return resp \ No newline at end of file diff --git a/routes/user_routes.py b/routes/user_routes.py new file mode 100644 index 0000000..a9038ce --- /dev/null +++ b/routes/user_routes.py @@ -0,0 +1,49 @@ +from fastapi import APIRouter, Depends, status +from fastapi.security.oauth2 import OAuth2PasswordRequestForm +from sqlalchemy.ext.asyncio import AsyncSession + +from utils.db_utils import get_db +from services.user_service import user_service +from schemas.user_schema import UserCreate, UserResp, LoginResponse + +user_route=APIRouter(prefix="/api/v1/auth", tags=["Users"]) + +@user_route.post( + "/add/user/", + status_code=status.HTTP_201_CREATED, + response_model=UserResp +) +async def create_user( + newUser: UserCreate, + db: AsyncSession = Depends(get_db), +): + """Create new user + + Args: + newUser (UserCreate): User's data + + Returns: + _type_: resp + """ + resp = await user_service(db).create_user(newUser) + return resp + +@user_route.post( + "/login/", + status_code=status.HTTP_200_OK, + response_model=LoginResponse, +) +async def login( + cred:OAuth2PasswordRequestForm = Depends(), + db:AsyncSession=Depends(get_db) +): + """Log in the user + + Args: + cred (OAuth2PasswordRequestForm): User's Credentials like email and password + + Returns: + _type_: resp + """ + resp = await user_service(db).login_user(cred) + return resp \ No newline at end of file diff --git a/schemas/event_schema.py b/schemas/event_schema.py new file mode 100644 index 0000000..c07a9ea --- /dev/null +++ b/schemas/event_schema.py @@ -0,0 +1,62 @@ +from datetime import datetime +from pydantic import Field +from enum import Enum +from typing import List + +from utils.schema_utils import AbstractModel, ResponseModel + +class TicketStatus(Enum): + AVAILABLE="available" + LOCKED="locked" + SOLD="sold" + +# for order status +class OrderStatus(Enum): + PENDING = "pending" + CONFIRMED= "confirmed" + FAILED= "failed" + +class EventCreate(AbstractModel): + name: str + schedule_date: datetime + venue: str + price: float = Field(..., description="Price is in cents(e.g. 2000 for $20)") + total_tickets: int + +class EventDetails(AbstractModel): + id: int + name: str + schedule_date: datetime + venue: str + price: int + total_tickets: int + +class EventListsResp(ResponseModel): + data: List[EventDetails] + +class EventResp(ResponseModel): + data: EventDetails + +class CreateCheckoutSession(AbstractModel): + price_id: str + quantity: int = 1 + success_url: str + cancel_url: str + +class OrderDetails(AbstractModel): + event: EventDetails + ticket_id: int + status: str + +class OrderRep(ResponseModel): + data: OrderDetails + +class OrderListResp(ResponseModel): + data: List[OrderDetails] + +class TicketDetails(AbstractModel): + id: int + status: TicketStatus + +class TicketListResp(ResponseModel): + data: List[TicketDetails] \ No newline at end of file diff --git a/schemas/user_schema.py b/schemas/user_schema.py new file mode 100644 index 0000000..f276628 --- /dev/null +++ b/schemas/user_schema.py @@ -0,0 +1,41 @@ +from pydantic import EmailStr, BaseModel, field_validator, SecretStr + +from utils.schema_utils import AbstractModel, ResponseModel + +class UserCreate(AbstractModel): + name: str + email: EmailStr + phone: str + password: SecretStr + + # Custom field validator to trim leading and trailing spaces + @field_validator('name', 'email', 'phone', 'password', mode='before') + def trim_and_check_empty(cls, v, field): + trimmed_value = v.strip() or None # Trim whitespace + if not trimmed_value: # Check if the field is empty after trimming + raise ValueError(f'{field.field_name} cannot be empty') + return trimmed_value + + @field_validator('phone') + @classmethod + def phone_length(cls, v:str): + if len(v) != 10: + raise ValueError("Phone number must be of 10 digits") + return v + +class UserDetails(AbstractModel): + id: int + name: str + email: EmailStr + phone: str + +class UserResp(ResponseModel): + data: UserDetails + +# Email DTO (Used for token verification) +class TokenData(AbstractModel): + id: str + +class LoginResponse(ResponseModel): + access_token: str + token_type: str = "Bearer" \ No newline at end of file diff --git a/scripts/admin.py b/scripts/admin.py new file mode 100644 index 0000000..117b531 --- /dev/null +++ b/scripts/admin.py @@ -0,0 +1,51 @@ +# to run this file, navigate to root folder and run command: python -m scripts.admin +import asyncio +import click +import asyncpg +from pydantic import EmailStr + +from core.config import db_settings + +db_user=db_settings.DATABASE_USER +db_pwd=db_settings.DATABASE_PASSWORD +db_host=db_settings.DATABASE_HOST +db_port=db_settings.DATABASE_PORT +db_name=db_settings.DATABASE_NAME + +DATABASE_URL = f"postgresql://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}" + +async def set_admin(email: EmailStr, is_admin: bool): + """ + Asynchronously sets the is_admin flag for a user. + """ + conn = await asyncpg.connect(DATABASE_URL) + try: + query = """ + UPDATE users + SET is_admin = $1 + WHERE email = $2 + """ + await conn.execute(query, is_admin, email) + status = "Admin" if is_admin else "Not Admin" + print(f"Successfully updated User with email {email} to {status}.") + except Exception as e: + print(f"Error updating user: {e}") + finally: + await conn.close() + +@click.command() +@click.argument('email', type=str) +@click.argument('is_admin', type=bool) +def cli_set_admin(email, is_admin): + """ + Set the is_admin flag for a user asynchronously. + + \b + Arguments: + email -- The Email of the user to update + is_admin -- True or False to set the admin status + """ + asyncio.run(set_admin(email, is_admin)) + +if __name__ == "__main__": + cli_set_admin() diff --git a/services/__init__.py b/services/__init__.py new file mode 100644 index 0000000..6789d46 --- /dev/null +++ b/services/__init__.py @@ -0,0 +1 @@ +# it links the controllers and routes \ No newline at end of file diff --git a/services/event_service.py b/services/event_service.py new file mode 100644 index 0000000..ea675c2 --- /dev/null +++ b/services/event_service.py @@ -0,0 +1,342 @@ +import datetime +from fastapi import HTTPException, status, BackgroundTasks, Request +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.exc import IntegrityError +from fastapi.encoders import jsonable_encoder + +# application imports +from schemas.event_schema import ( + EventCreate, + EventResp, + TicketStatus, + CreateCheckoutSession, + OrderStatus, + OrderRep, + EventListsResp, + EventDetails, + TicketDetails, + TicketListResp, + OrderListResp, +) +from controllers.event_controllers import event_control +from utils.redis_utils import acquire_ticket_lock +from utils.stripe_utils import stripe, create_stripe_price +from core.config import stripe_settings + +class EventService: + def __init__(self, db: AsyncSession): + self.db = db + self.event_control = event_control(self.db) + + async def create_tickets(self, event_id: int, total_ticket: int): + """ + This function creates the total number of tickets as specified for the + particular event + """ + tickets=[] + for i in range(0,total_ticket): + tickets.append({"status":TicketStatus.AVAILABLE,"event_id":event_id}) + await self.event_control.create_bulk_new_tickets(tickets) + + async def get_all_events(self)->EventListsResp: + """ + This function retrieves the list of all the events + """ + events = await self.event_control.get_all_events() + events = [EventDetails.model_validate(event) for event in events] + resp = { + "message":"Events retrieved successfully", + "data": events, + "status": status.HTTP_200_OK + } + return resp + + async def create_new_event(self, new_event: EventCreate, background_task: BackgroundTasks)-> EventResp: + """ + This function handles the creation of a new event by validating the scheduled date, creating a Stripe + price for the event, and saving the event details. It ensures the scheduled date is in the future, + processes the event data, and asynchronously generates tickets using a background task. The response + includes a success message, event data, and an HTTP 201 status. + """ + # validates the schedule date + if new_event.schedule_date < datetime.datetime.now(datetime.timezone.utc): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Scheduled date or time should be greater than current date or time" + ) + # ignoring the timezone + new_event.schedule_date = new_event.schedule_date.replace(tzinfo=None) + + # creates the stripe price id for the new event + stripe_price_id = await create_stripe_price(event_name=new_event.name, price=new_event.price) + new_event = new_event.model_dump(exclude_unset=True) + new_event["stripe_price_id"] = stripe_price_id + + # creates the new event + created_event = await self.event_control.create_new_event(new_event) + + # creates the total tickets available for the event in the background + background_task.add_task(self.create_tickets,event_id=created_event.id, total_ticket=created_event.total_tickets) + resp = { + "message": "Event created successfully", + "data": created_event, + "status": status.HTTP_201_CREATED + } + return resp + + async def get_event_tickets(self, event_id: int)->TicketListResp: + """ + This function retrieves the list of all the tickets associated with the event + """ + event = await self.event_control.get_event_by_id(event_id=event_id) + if not event: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such event exist" + ) + tickets = await self.event_control.get_tickets_by_event_id(event_id) + tickets = [TicketDetails.model_validate(ticket) for ticket in tickets] + resp = { + "message": "Tickets retrieved successfully", + "data": tickets, + "status": status.HTTP_200_OK + } + return resp + + async def book_ticket(self, ticket_id: int, event_id: int, user_id: int): + """ + This function facilitates the process of booking a ticket for an event + by validating the event and ticket existence, ensuring the ticket's availability, + and locking it for the booking process. If successfully locked, it initiates a + Stripe checkout session for payment and creates a new order with a pending status. + The function handles potential conflicts and provides appropriate responses, ensuring + a secure and seamless booking experience for the user. + """ + # validates the event and ticket + event = await self.event_control.get_event_by_id(event_id=event_id) + if not event: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such event exist" + ) + ticket = await self.event_control.get_ticket_by_id(ticket_id=ticket_id) + if not ticket: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such ticket exists" + ) + stripe_price_id = event.stripe_price_id + + # checks the status of the tickets + if ticket.status == TicketStatus.SOLD or ticket.status == TicketStatus.LOCKED: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Ticket is already sold or booked" + ) + + # acquiring the lock on the particular ticket + locked = await acquire_ticket_lock(ticket_id) + if locked: + + # if ticket lock is successful update the database with ticket status + ticket = await self.event_control.get_ticket_by_id_and_event_id(ticket_id, event_id=event_id) + updated_ticket = await self.event_control.update_ticket_status(ticket=ticket, status=TicketStatus.LOCKED) + + # creating the session checkout for the particular event + session_data = { + "price_id":stripe_price_id, + "quantity":1, + "success_url":"http://127.0.0.1:8000", + "cancel_url":"http://127.0.0.1:8000" + } + checkout_session = CreateCheckoutSession(**session_data) + resp = await self.create_stripe_session_checkout(data = checkout_session) # session id and stripe chekout url + + # creating a new order + new_order_dict = { + "user_id": user_id, + "event_id": event_id, + "status": OrderStatus.PENDING, + "stripe_session_id": resp["id"], + "ticket_id": ticket_id, + } + try: + new_order = await self.event_control.add_new_order(new_order_dict) + except IntegrityError: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Order for this ticket is in pending state" + ) + message = "Ticket booked successfully. Please proceed to payment" + status_code = status.HTTP_200_OK + data = resp + resp ={ + "message": message, + "status": status_code, + "data": data or None + } + else: + message = "Ticket is already booked" + status_code = status.HTTP_409_CONFLICT + resp ={ + "message": message, + "status": status_code, + } + return resp + + async def update_ticket_status(self, ticket_id: int, status:TicketStatus): + """ + This function updates the ticket status + """ + ticket = await self.event_control.get_ticket_by_id(ticket_id) + updated_ticket = await self.event_control.update_ticket_status(ticket, status) + + + # stripe session checkout + async def create_stripe_session_checkout(self, data:CreateCheckoutSession): + """ + This function creates a Stripe checkout session for processing payments. + It utilizes the Stripe API to generate a session with specified line items, + payment mode, and URLs for success and cancellation redirects. Upon success, + it returns the session ID and URL. In case of any Stripe-related errors, it + raises an HTTP exception with an appropriate error message. + + """ + try: + session = stripe.checkout.Session.create( + line_items=[{ + 'price': data.price_id, + 'quantity': data.quantity, + }], + mode='payment', + success_url=data.success_url, + cancel_url=data.cancel_url, + ) + resp = { + "id": session.id, + "url": session.url + } + return resp + except stripe.error.StripeError as e: + raise HTTPException(status_code=400, detail=str(e)) + + async def stripe_webhook_service(self, request: Request): + """ + This function handles Stripe webhook events to manage ticket orders + based on payment outcomes. It verifies the webhook's signature for + authenticity and processes the checkout.session.completed event to + update order and ticket statuses accordingly. If the payment is successful, + the order status is set to confirmed, and the ticket is marked as sold. For + failed payments, the order is marked as failed, the ticket status is reset to + available, and any ticket lock is released. It ensures secure and accurate + handling of Stripe notifications and maintains system integrity. + """ + try: + # strip signature validation + stripe_signature = request.headers.get('stripe-signature') + if not stripe_signature: + raise HTTPException( + status_code=400, + detail="Missing stripe-signature header" + ) + + # Get the raw request body + payload = await request.body() + + # Verify webhook signature + event = stripe.Webhook.construct_event( + payload, + stripe_signature, + stripe_settings.WEBHOOK_CODE + ) + + # Handle checkout.session.completed event + if event.type == 'checkout.session.completed': + session = event.data.object + + # update the order status with Confirmed + order = await self.event_control.get_order_by_stripe_session_id(session_id=session.get("id")) + updated_order = await self.event_control.update_order_status(order=order, status=OrderStatus.CONFIRMED) + + # update ticket status with Sold + ticket = await self.event_control.get_ticket_by_id(int(updated_order.ticket_id)) + updated_ticket = await self.event_control.update_ticket_status(ticket,TicketStatus.SOLD) + + return {"status": "success"} + + else: + # update the order status with Failed + order = await self.event_control.get_order_by_stripe_session_id(session_id=session.get("id")) + updated_order = await self.event_control.update_order_status(order=order, status=OrderStatus.FAILED) + + # update ticket status with Available + ticket = await self.event_control.get_ticket_by_id(int(updated_order.ticket_id)) + updated_ticket = await self.event_control.update_ticket_status(ticket,TicketStatus.AVAILABLE) + + # release the lock from the ticket + from utils.redis_utils import release_ticket_lock + removed_lock = await release_ticket_lock(ticket_id=int(updated_order.ticket_id)) + + return {"status": "failed"} + + except stripe.error.SignatureVerificationError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + async def get_customer_orders(self, user_id: int)-> OrderListResp: + """ + This function retrieves a specific customer's order by validating the user association. + If the order exists, it compiles order details, including the associated event, ticket ID, and order + status. The response includes a success message, status code, and the retrieved data. If the order is + not found, it raises a 404 Not Found exception with an appropriate message. + """ + orders = await self.event_control.get_order_by_user_id(user_id=user_id) + if not orders: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Ready to order." + ) + data = [] + for order in orders: + data.append({ + "event": await self.event_control.get_event_by_id(event_id=int(order.event_id)), + "ticket_id": order.ticket_id, + "status": order.status + }) + + resp = { + "message": "Customer orders retrieved successfully", + "status": status.HTTP_200_OK, + "data": data + } + return resp + + + async def get_customer_order_by_order_id(self, order_id: int, user_id: int)-> OrderRep: + """ + This function retrieves a specific customer's order by validating the order and user association. + If the order exists, it compiles order details, including the associated event, ticket ID, and order + status. The response includes a success message, status code, and the retrieved data. If the order is + not found, it raises a 404 Not Found exception with an appropriate message. + """ + orders = await self.event_control.get_order_by_order_and_user_id(order_id=order_id,user_id=user_id) + if not orders: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Ready to order." + ) + data ={ + "event": await self.event_control.get_event_by_id(event_id=int(orders.event_id)), + "ticket_id": orders.ticket_id, + "status": orders.status + } + + resp = { + "message": "Customer's order retrieved successfully", + "status": status.HTTP_200_OK, + "data": data + } + return resp + +event_service = EventService \ No newline at end of file diff --git a/services/user_service.py b/services/user_service.py new file mode 100644 index 0000000..cf5938a --- /dev/null +++ b/services/user_service.py @@ -0,0 +1,87 @@ +from fastapi import HTTPException, status +import re +from fastapi.security.oauth2 import OAuth2PasswordRequestForm +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.exc import IntegrityError + +# application imports +from schemas.user_schema import UserCreate, UserResp, LoginResponse +from controllers.user_controller import user_control +from utils.security_utils import hash_password, verify_password +from utils.oauth import credential_exception, create_access_token + +class UserService: + def __init__(self, db: AsyncSession): + self.db = db + self.user_control = user_control(self.db) + + async def create_user(self, user: UserCreate)->UserResp: + """Create new user + + Args: + user: (UserCreate): data of new user + + Returns: + resp: Added user information + """ + try: + user.password = await hash_password(user.password.get_secret_value()) + user = user.model_dump(exclude_unset=True) + created_user = await self.user_control.add_user(user) + resp = { + "message": "User created successfully", + "data": created_user, + "status": 201 + } + return resp + # catches the duplicates and raises the exception + except IntegrityError as e: + error_message = e.args[0] + match = re.search(r"Key \((.*?)\)=", error_message) + if match: + violated_key = match.group(1) + key = violated_key.split(", ") + if len(key) > 1: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"{key[-1].capitalize()} already exists", + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"{key[0]} already exists", + ) + + + async def login_user(self, user: OAuth2PasswordRequestForm)->LoginResponse: + """Login user + + Args: + user: (OAuth2PasswordRequestForm): credentials of user + + Returns: + resp: Access token of logged in user + """ + db_user = await self.user_control.get_user_by_email(email=user.username) + if not user: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="User does not exist" + ) + # password verification + check_password = await verify_password(db_user.password, user.password) + # raise credential error + if not check_password: + await credential_exception() + # create access token + tokenizer = {"id": str(db_user.id)} + access_token = await create_access_token(tokenizer) + resp = { + "message": "User Logged in Successfully", + "status": status.HTTP_200_OK, + "access_token": access_token, + "token_type": "Bearer" + } + return resp + +user_service = UserService \ No newline at end of file diff --git a/suraj_readme.md b/suraj_readme.md new file mode 100644 index 0000000..643311a --- /dev/null +++ b/suraj_readme.md @@ -0,0 +1,150 @@ +> - name: Suraj Sharma +> - email: er.surajsharma1389@gmail.com +> - phone: 9819692679 +> - Role: Backend, Leader & Architect +> - Location: Kadaghari, Kathmandu +> - Github profile: https://github.com/S8r2j +> - Linkedin profile: https://www.linkedin.com/in/suraj-sharma-a049a31a8/ +> - Resume: https://github.com/S8r2j/ticket_management_system/blob/master/suraj_sharma.pdf + + +# Ticket Booking System with FastAPI + +FastAPI is an excellent choice for building a ticket booking system due to its high performance and scalability. Built on Starlette and Pydantic, it can handle a large number of concurrent requests with low latency, making it ideal for peak traffic times like ticket sales. Its asynchronous support ensures the system remains responsive, even under heavy load. Additionally, FastAPI’s easy API development, automatic documentation, and seamless integration with databases streamline building and maintaining the system. + +## Key Features + +- **High Performance**: Built for low latency and handling concurrent requests, ideal for peak traffic times. +- **Asynchronous Support**: Ensures the system remains responsive under heavy load, especially during I/O-bound operations like database and payment gateway interactions. +- **Easy API Development**: Quick creation of RESTful APIs, with built-in support for validation, documentation, and database integration. +- **Automatic Documentation**: Swagger UI and ReDoc for interactive API documentation, making development and debugging easier. +- **Data Validation**: Uses Pydantic for strong validation, ensuring correct user inputs and reducing errors. +- **Security**: Supports modern security practices like OAuth2, JWT, and API key validation for safe user authentication. +- **Scalability**: Designed to scale horizontally, efficiently handling multiple concurrent connections, especially with load balancing. +- **WebSockets**: Real-time updates, useful for pushing live information to users (e.g., ticket availability). +- **Microservices Support**: Easily integrates with a microservices-based architecture for handling different aspects like user management, payment processing, and ticket management. + +## How to Run the Backend System + +Follow the steps below to set up and run the backend system: + +### Steps to Run +*Make sure you have python installed* +1. **Clone the Repository** + Clone this repository to your local machine. + +2. **Setup PostgreSQL Database** + Configure your PostgreSQL database with the following: + - Database name + - Username + - Password + +3. **Navigate to the Project Directory** + Move to the project directory using the terminal. + +4. **Configure Environment Variables** + - Rename `example.env` to `.env`. + - Update the `.env` file with your secret credentials. + +5. **Setup Redis Server** + Ensure you have Redis server installed and running on your local PC. If Redis is not already set up: + - Install WSL (Windows Subsystem for Linux) and configure the user and password. + - Run the following commands step by step in WSL: + ```bash + sudo apt install redis + ``` + Check Redis installation: + ```bash + redis-server --version + ``` + Enable Redis server to start automatically: + ```bash + sudo systemctl enable redis-server + ``` + Check redis server status: + ```bash + sudo service redis-server status + ``` + +6. **Install the stripe application for windows** + Install stripe `.exe` file and it's path to environment variables. After the installation, + login into the stripe from command prompt: + ```bash + stripe login + ``` + This will open a browser tab asking for allowing access to the cli and once the permissions are given, execute the command in the command prompt: + ```bash + stripe listen --forward-to localhost:8000/api/v1/events/webhook + ``` + +6. **Install dependencies** + Execute the dependency installation command to set up required packages: + ```bash + pip install -r requirements.txt + ``` + +6. **Run Alembic Migration** + Execute the Alembic migration command to set up your database: + ```bash + alembic upgrade heads + ``` + +7. **Run the command to run backend:** + ```bash + uvicorn main:app + ``` +8. **Access the APIs:** + ```bash + http://localhost:8000/docs + or + https://127.0.0.1:8000/docs + ``` +*Note: Ensure redis server is active before running the backend server* + +## How to set the admin? + python -m scripts.admin +`True` is for making the user admin and `False` is for removing the user from admin role + +## How the locking mechanisms work? +The locking mechanism works as follows: + +1. **Redis Locking with `SETNX`** + The function `acquire_ticket_lock` attempts to set a Redis key (`lock_ticket:{ticket_id}`). If the key doesn't exist, it locks the ticket using `SETNX`. If the key exists, the lock is not acquired. + +2. **Expiration Time** + To prevent deadlocks, an expiration time is set on the lock using `expire`. For now the locking time `3000ms` i.e. `30s` + +3. **Validation** + In the `book_ticket` function, before acquiring the lock, it checks if the ticket is already sold or locked. If not, it proceeds to lock the ticket. + +4. **Lock Status** + If the lock is acquired, booking continues; if not, the operation stops, ensuring no conflicts. + +This mechanism prevents concurrent ticket bookings and ensures only one process can lock and book the ticket at a time. + +## How Stripe is integrated? +Stripe is integrated through the `stripe` Python library, which is used to interact with the Stripe API. The integration includes: + +1. **Creating Stripe Prices** + The `create_stripe_price` function creates a price for events by specifying the price and product details. + +2. **Stripe Checkout Sessions** + The `create_stripe_session_checkout` function creates a checkout session for processing payments, with line items, success, and cancellation URLs. + +3. **Stripe Webhooks** + The `stripe_webhook_service` function handles Stripe webhook events to process payment outcomes, updating order and ticket statuses based on payment success or failure. It also verifies the webhook signature for security. + +## Details Related to Kafka +As I am currently involved in another company as a backend dev, I had very limited time to work on the project. Redis, locking mechanism and Stripe were both new things for me but then also I had implemented them in the system with all the docs in the code. This is the system that I have completed in 2 nights since I had work during the office hours. So, I was unable to integrate Kafka in the system. Hope you will understand my position. + +# Time Log +| Date | Time Spent | Work | +|------------|------------|-------------------------| +| 2024-12-01 | 5 hours | Redis integration within ticket booking and other features like user creation, event creation, ticket booking and testing & error handeling to some level| +| 2024-12-02 | 4 hours | Stripe integration within ticket booking and other features like getting event details, admin right dependency, admin creation, getting customer orders, testing & error handeling to some level | +| 2024-12-03 | 30 mins | I couldn't do anything due to my other job since I am responsible for the role until I am involved in that company. However, I tested the built system through postman and saved the example response for each request in postman. | + +## Further Improvement in current system +- A new endpoint to list the tickets of the events +- Locking time can be set manually instead of specifying from the code +- A new endpoint to get the list of current user's orders \ No newline at end of file diff --git a/suraj_sharma.pdf b/suraj_sharma.pdf new file mode 100644 index 0000000..cb86d88 Binary files /dev/null and b/suraj_sharma.pdf differ diff --git a/utils/db_utils.py b/utils/db_utils.py new file mode 100644 index 0000000..8bb5e1f --- /dev/null +++ b/utils/db_utils.py @@ -0,0 +1,13 @@ +from models.database import session + +async def get_db(): + """ + This function provides a database session for use within API route + handlers or services. It utilizes an asynchronous session context to + ensure proper initialization and cleanup of the database connection. + The session is yielded for use and automatically closed after the operation + completes, ensuring efficient resource management. + """ + db = session() + async with session() as db: + yield db \ No newline at end of file diff --git a/utils/model_utils.py b/utils/model_utils.py new file mode 100644 index 0000000..4010062 --- /dev/null +++ b/utils/model_utils.py @@ -0,0 +1,20 @@ +from sqlalchemy import ( + Column, + Integer, + TIMESTAMP, + text, +) +from models.database import Base + + +class AbstractModels(Base): + """Base Models + + Args: + Base (_type_): Inherits Base from SQLAlchemy and specifies columns for inheritance. + """ + id=Column(Integer, autoincrement=True, nullable=False, unique=True, primary_key=True) + created_at=Column(TIMESTAMP, server_default=text("NOW()"), onupdate=None) + updated_at=Column(TIMESTAMP, server_default=text("NOW()")) + + __abstract__ = True \ No newline at end of file diff --git a/utils/oauth.py b/utils/oauth.py new file mode 100644 index 0000000..94e153e --- /dev/null +++ b/utils/oauth.py @@ -0,0 +1,73 @@ +# python import +from datetime import datetime, timedelta + +# framework imports +from fastapi import Depends, Header, HTTPException, status +from fastapi.security.oauth2 import OAuth2PasswordBearer + +# JWT imports +from jose import JWTError, jwt +from sqlalchemy.ext.asyncio import AsyncSession + +# Apoplication imports +from core.config import auth_settings +from schemas.user_schema import TokenData +from controllers.user_controller import user_control +from utils.db_utils import get_db + +# OAUTH Login Endpoint +oauth_schemes = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/login/") + + +# AUTH SECRETS AND TIME LIMITS +access_secret_key = auth_settings.access_secret_key +access_time_exp = auth_settings.access_time_exp +Algorithm = auth_settings.Algorithm + + +async def create_access_token(data: dict) -> str: + """ + Generates a JWT token with an expiration date using the given data. + """ + to_encode = data.copy() + expire = datetime.now() + timedelta(days=access_time_exp) + to_encode["exp"] = expire + encode_jwt = jwt.encode(to_encode, access_secret_key, algorithm=Algorithm) + return encode_jwt + + +async def credential_exception(): + """ + Handles and raises authentication failure exceptions. + """ + raise HTTPException( + detail="Could not validate credentials", + status_code=status.HTTP_401_UNAUTHORIZED, + headers={"WWW-Authenticate": "Bearer"}, + ) + + +async def get_current_user( + token: str = Depends(oauth_schemes), + db: AsyncSession = Depends(get_db), +): + """ + Validates a JWT token, retrieves the user from the database, + and returns the user if authenticated. + """ + try: + decode_data = jwt.decode(token, access_secret_key, algorithms=Algorithm) + id = decode_data.get("id") + if id is None: + await credential_exception() + + token_data = TokenData(id=id) + except JWTError: + await credential_exception() + + user_check = await user_control(db).get_user_by_id(int(token_data.id)) + + if not user_check: + await credential_exception() + + return user_check diff --git a/utils/redis_utils.py b/utils/redis_utils.py new file mode 100644 index 0000000..9ee09c2 --- /dev/null +++ b/utils/redis_utils.py @@ -0,0 +1,115 @@ +import asyncio +import redis.asyncio as redis_async + +from schemas.event_schema import TicketStatus, OrderStatus +from models.database import session +from controllers.event_controllers import event_control + +async def acquire_ticket_lock(ticket_id:int, timeout=30): + """ + Acquires the lock on a ticket. + + Args: + ticket_id (int): The ID of the ticket to lock. + timeout (miliseconds): Milliseconds to hold the lock onto the ticket + + Returns: + bool: True if the lock was acquired successfully, False otherwise. + """ + redis = await redis_async.from_url('redis://localhost') + # Check if notify-keyspace-events is configured + config = await redis.config_get('notify-keyspace-events') + if 'Ex' not in config['notify-keyspace-events']: + print("Warning: Redis keyspace events not properly configured") + await redis.config_set('notify-keyspace-events', 'Ex') + lock_key = f"lock_ticket:{ticket_id}" + lock_value = TicketStatus.LOCKED.value # This can be any unique value like a session ID + + # Attempt to acquire the lock using SETNX (SET if Not Exists) + lock_acquired = await redis.setnx(lock_key, lock_value) + if lock_acquired: + # Set an expiration time on the lock to avoid deadlock + await redis.expire(lock_key, timeout) + await redis.close() + return True + else: + await redis.close() + return False + +async def release_ticket_lock(ticket_id: int): + """ + Removes the lock from a ticket manually. + + Args: + ticket_id (int): The ID of the ticket to unlock. + + Returns: + bool: True if the lock was removed successfully, False otherwise. + """ + redis = await redis_async.from_url('redis://localhost') + lock_key = f"lock_ticket:{ticket_id}" + + # Delete the lock key + lock_removed = await redis.delete(lock_key) + await redis.close() + + if lock_removed: + print(f"Lock for ticket {ticket_id} successfully removed.") + return True + else: + print(f"No lock exists for ticket {ticket_id}.") + return False + +class RedisManager: + """ + Manages the Redis connection and listens for key expiration events. + It starts and stops the Redis client and subscribes to the expiration + events. When an event occurs, it updates the ticket status and order + status in the database to reflect the change (e.g., setting the ticket + to "AVAILABLE" and order to "FAILED"). + """ + def __init__(self): + self.redis_client = None + self.listener_task = None + + async def start(self): + self.redis_client = await redis_async.from_url('redis://localhost') + self.listener_task = asyncio.create_task(self.lock_expiration_listener()) + print("Redis listener started") + + async def stop(self): + if self.listener_task: + self.listener_task.cancel() + try: + await self.listener_task + except asyncio.CancelledError: + pass + + if self.redis_client: + await self.redis_client.close() + print("Redis connection closed") + + async def lock_expiration_listener(self): + try: + pubsub = self.redis_client.pubsub() + await pubsub.psubscribe("__keyevent@0__:expired") + print("Subscribed to expiration events") + + async for message in pubsub.listen(): + if message["type"] == "pmessage": + ticket_id = message["data"].decode("utf-8").split(":")[1] + async with session() as db: + # update ticket status + ticket = await event_control(db).get_locked_ticket_by_id(ticket_id=int(ticket_id)) + updated_ticket = await event_control(db).update_ticket_status(ticket, TicketStatus.AVAILABLE) + # update order status to failed + order = await event_control(db).get_order_by_ticket_id(int(ticket_id)) + updated_order = await event_control(db).update_order_status(order=order, status=OrderStatus.FAILED) + print(f"Updated ticket {ticket_id}") + except asyncio.CancelledError: + await pubsub.close() + raise + except Exception as e: + print(f"Error in listener: {e}") + +redis_manager = RedisManager() \ No newline at end of file diff --git a/utils/schema_utils.py b/utils/schema_utils.py new file mode 100644 index 0000000..d36c133 --- /dev/null +++ b/utils/schema_utils.py @@ -0,0 +1,35 @@ +from typing import Optional +from pydantic import BaseModel +from pydantic_settings import BaseSettings + +class AbstractSettings(BaseSettings): + """Settings Models + + Args: + BaseModel (_type_): Inherits from Pydantic and specifies Config + """ + + class Config: + env_file = ".env" + extra= "allow" + +class AbstractModel(BaseModel): + """Schema Models + + Args: + BaseModel (_type_): Inherits from Pydantic and specifies Config + """ + + class Config: + from_attributes = True + use_enum_values = True + +class ResponseModel(AbstractModel): + """Base Response Models + + Args: + BaseModel (_type_): Inherits from Pydantic and specifies Config + """ + + message: str + status: int \ No newline at end of file diff --git a/utils/security_utils.py b/utils/security_utils.py new file mode 100644 index 0000000..eee9c87 --- /dev/null +++ b/utils/security_utils.py @@ -0,0 +1,30 @@ +from passlib.context import CryptContext + + +# Password Hashing +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +async def verify_password(hashed_password: str, plain_password: str) -> bool: + """Verify Password + + Args: + hashed_password (str): Stored passwoed in the DB compared with the raw string + plain_password (str): Raw string to be compared. + + Returns: + _type_: Bool + """ + return pwd_context.verify(plain_password, hashed_password) + + +async def hash_password(password: str) -> str: + """Hashes Password string + + Args: + password (str): String + + Returns: + str: Hashed string + """ + return pwd_context.hash(password) \ No newline at end of file diff --git a/utils/stripe_utils.py b/utils/stripe_utils.py new file mode 100644 index 0000000..5889351 --- /dev/null +++ b/utils/stripe_utils.py @@ -0,0 +1,14 @@ +import stripe + +from core.config import stripe_settings + +stripe.api_key = stripe_settings.PRIVATE_KEY + +async def create_stripe_price(event_name: str, price: float): + """Function to create the Stripe price for each new events registered""" + response = stripe.Price.create( + currency="usd", + unit_amount_decimal= price, + product_data={"name":event_name} + ) + return response.id