From 6bafe871f1be309c51c42a5fc5f38e76c5c3efdb Mon Sep 17 00:00:00 2001 From: thapasamir Date: Mon, 2 Dec 2024 16:30:43 +0545 Subject: [PATCH 1/8] initial commit --- Master_Readme.md | 183 ----------- Readme.md | 183 ----------- Readme_Backend_Leader_Architect.md | 198 ------------ Readme_Devops_Fullstack.md | 187 ----------- Readme_Frontend_Dev.md | 146 --------- Readme_Generalist.md | 134 -------- ticket-booking-platform/.gitignore | 7 + ticket-booking-platform/Dockerfile | 15 + ticket-booking-platform/README.md | 87 +++++ ticket-booking-platform/__init__.py | 0 ticket-booking-platform/alembic.ini | 116 +++++++ ticket-booking-platform/app/__init__.py | 0 ticket-booking-platform/app/api/__init__.py | 0 .../app/api/route_handlers.py | 297 ++++++++++++++++++ ticket-booking-platform/app/api/routes.py | 86 +++++ ticket-booking-platform/app/core/__init__.py | 0 ticket-booking-platform/app/core/config.py | 53 ++++ ticket-booking-platform/app/core/database.py | 17 + ticket-booking-platform/app/main.py | 55 ++++ .../app/models/__init__.py | 0 ticket-booking-platform/app/models/event.py | 21 ++ ticket-booking-platform/app/models/order.py | 39 +++ ticket-booking-platform/app/models/ticket.py | 24 ++ ticket-booking-platform/app/models/user.py | 17 + .../app/repositories/__init__.py | 0 .../app/repositories/event.py | 64 ++++ .../app/repositories/order.py | 94 ++++++ .../app/repositories/redis.py | 55 ++++ .../app/repositories/ticket.py | 116 +++++++ .../app/repositories/user.py | 57 ++++ .../app/schemas/__init__.py | 0 .../app/schemas/event_schema.py | 27 ++ .../app/schemas/user_schema.py | 20 ++ .../app/services/__init__.py | 0 .../app/services/kafka_consumer.py | 100 ++++++ .../app/services/kafka_producer.py | 54 ++++ ticket-booking-platform/app/services/mail.py | 31 ++ .../app/services/stripe.py | 90 ++++++ ticket-booking-platform/app/services/utils.py | 0 ticket-booking-platform/app/utils/__init__.py | 0 .../app/utils/redis_utils.py | 48 +++ ticket-booking-platform/docker-compose.yaml | 89 ++++++ ticket-booking-platform/migrations/README | 1 + ticket-booking-platform/migrations/env.py | 79 +++++ .../migrations/script.py.mako | 26 ++ .../1d29b99395fd_initial_migration.py | 90 ++++++ ticket-booking-platform/requirements.txt | 35 +++ ticket-booking-platform/tests/__init__.py | 0 48 files changed, 1910 insertions(+), 1031 deletions(-) delete mode 100644 Master_Readme.md delete mode 100644 Readme.md delete mode 100644 Readme_Backend_Leader_Architect.md delete mode 100644 Readme_Devops_Fullstack.md delete mode 100644 Readme_Frontend_Dev.md delete mode 100644 Readme_Generalist.md create mode 100644 ticket-booking-platform/.gitignore create mode 100644 ticket-booking-platform/Dockerfile create mode 100644 ticket-booking-platform/README.md create mode 100644 ticket-booking-platform/__init__.py create mode 100644 ticket-booking-platform/alembic.ini create mode 100644 ticket-booking-platform/app/__init__.py create mode 100644 ticket-booking-platform/app/api/__init__.py create mode 100644 ticket-booking-platform/app/api/route_handlers.py create mode 100644 ticket-booking-platform/app/api/routes.py create mode 100644 ticket-booking-platform/app/core/__init__.py create mode 100644 ticket-booking-platform/app/core/config.py create mode 100644 ticket-booking-platform/app/core/database.py create mode 100644 ticket-booking-platform/app/main.py create mode 100644 ticket-booking-platform/app/models/__init__.py create mode 100644 ticket-booking-platform/app/models/event.py create mode 100644 ticket-booking-platform/app/models/order.py create mode 100644 ticket-booking-platform/app/models/ticket.py create mode 100644 ticket-booking-platform/app/models/user.py create mode 100644 ticket-booking-platform/app/repositories/__init__.py create mode 100644 ticket-booking-platform/app/repositories/event.py create mode 100644 ticket-booking-platform/app/repositories/order.py create mode 100644 ticket-booking-platform/app/repositories/redis.py create mode 100644 ticket-booking-platform/app/repositories/ticket.py create mode 100644 ticket-booking-platform/app/repositories/user.py create mode 100644 ticket-booking-platform/app/schemas/__init__.py create mode 100644 ticket-booking-platform/app/schemas/event_schema.py create mode 100644 ticket-booking-platform/app/schemas/user_schema.py create mode 100644 ticket-booking-platform/app/services/__init__.py create mode 100644 ticket-booking-platform/app/services/kafka_consumer.py create mode 100644 ticket-booking-platform/app/services/kafka_producer.py create mode 100644 ticket-booking-platform/app/services/mail.py create mode 100644 ticket-booking-platform/app/services/stripe.py create mode 100644 ticket-booking-platform/app/services/utils.py create mode 100644 ticket-booking-platform/app/utils/__init__.py create mode 100644 ticket-booking-platform/app/utils/redis_utils.py create mode 100644 ticket-booking-platform/docker-compose.yaml create mode 100644 ticket-booking-platform/migrations/README create mode 100644 ticket-booking-platform/migrations/env.py create mode 100644 ticket-booking-platform/migrations/script.py.mako create mode 100644 ticket-booking-platform/migrations/versions/1d29b99395fd_initial_migration.py create mode 100644 ticket-booking-platform/requirements.txt create mode 100644 ticket-booking-platform/tests/__init__.py diff --git a/Master_Readme.md b/Master_Readme.md deleted file mode 100644 index e31ed41..0000000 --- a/Master_Readme.md +++ /dev/null @@ -1,183 +0,0 @@ - - ---- -Below is the list that defines which file is for which role: - -1. **Readme_Backend_Leader_Architect.md** - This file is for the role: **Backend Developer + Leader + Architect** -2. **Readme_Frontend_Dev.md** - This file is for the role: **Frontend Developer** -3. **Readme_Devops_Fullstack.md** - This file is for the role: **Fullstack Developer + DevOps** -4. **Readme_Generalist.md** - This is for the role: **Generalist Developer** - -Please make sure you are submitting the correct file for the role you are applying for. If you are applying for multiple roles, please submit the corresponding files. Note that we will not provide reenumeration for multiple submissions. And only completed submissions will be considered for the evaluation and reenumeration as discussed. - ---- - -# **Congratulations and Welcome!** - -Dear Candidate, - -First of all, congratulations and thank you for accepting this challenge. Please read this document to its entirety before starting any project. - -You are among a select few who have been given this opportunity, and we are truly excited to see what you will build. - -We understand that the scope of any of above project within the given time frame is demanding. However, we want to clarify that this is not because we do not value your time or effort; rather, it is because we are looking for someone extraordinary—someone who thrives under pressure, thinks critically, and delivers well-thought-out solutions even in challenging circumstances. - -This project is not just about technical skills; it is a reflection of qualities like determination, problem-solving, time management, and the ability to persevere. These are the attributes we believe are essential for success in the role we are hiring for. - ---- - -## **Implementation Guidelines** - -We want to assure you that we are flexible with the architecture you choose to implement(for applicable roles) as long as you follow the **MVC architecture** (optinal, for backend tasks, if it makes you slow but come up with reasoning why you opt for something else). For frontend, we do want you to use NextJS. - -To make it easier, you can start by cloning one of the following boilerplate repositories: - -### **Node.js/Express/Next.js** -- Clone this repository: [Node.js/Express/Next.js Boilerplate](https://github.com/nishanprime/NextJSNodeJSDockerNginxBoilerPlate.git) - -### **Flask** -- Clone this repository: [Flask MVC Boilerplate](https://github.com/nishanprime/FlaskMVCSetup.git) - -### **Preference**: -- For backend roles, while we would prefer the **Node.js/Express/Next.js** setup, we are open to you using **Flask** if you believe the implementation would be better suited for the project. If you choose Flask, provide reasoning for your decision—it will help us understand your thought process. - -- Additionally, if you need parts of the **Next.js/Node.js** boilerplate while working with Flask, feel free to clone both repositories and utilize whatever is necessary. - -- For Frontend roles, we would prefer you to use **Next.js**. - -- For DevOps roles, we would want you to make use of: -1. Docker -2. Nginx -3. Github Actions for CI/CD -4. Any Ubuntu based server for deployment -5. Let's Encrypt for SSL -6. PM2 for process management or any other process manager -7. Any other tools you think are necessary - -You can use any of these VPS provider to get free VPS to get started: -1. https://gratisvps.net/cvps/ -2. https://vpswala.org/free-vps-no-credit-card.html - -Or any other VPS provider you are comfortable with. ---- - -## **A Unique Opportunity** - -Rest assured, this challenge is not just a test; it’s an introduction to the kind of work culture you can expect here—a culture like no other in Nepal. We are building something exceptional, a workplace that fosters innovation, collaboration, and growth at a level rarely seen before. If you manage to pull this off, you will not just have the opportunity to join a team—you will become part of a transformative journey, one where your contributions will have real impact and where you will thrive in an environment designed to bring out the best in you. - -We are rooting for your success, and we can’t wait to see your solution! - -## **Time Tracking Instructions** - -As part of this project, we kindly request you to **self-report the time you spend** on each task. This approach relies on **honesty and transparency**, which are qualities we highly value and will consider during the evaluation process and the interview. - ---- - -## **Why Time Tracking?** -We understand that different people work at different paces, and this is not about how fast you complete the project. Instead, this is about: -1. **Understanding your approach** to time management. -2. **Seeing how you prioritize tasks** within the given timeframe. -3. **Getting insight into your problem-solving process.** - ---- - -## **What to Track** -Please maintain a simple log that includes the following: -- **Date**: The day you worked on the project. -- **Time Spent**: How many hours you worked (approximately). -- **Task Description**: A brief description of what you worked on during that time. - ---- - -## **Example Time Log** -| Date | Time Spent (hrs) | Task Description | -|------------|------------------|------------------------------------------| -| 2024-11-28 | 3 | Implemented Ticket Locking | -| 2024-11-29 | 4 | Integrated Stripe Checkout | -| 2024-11-29 | 2 | Implemented Context API etc | - ---- - -## **Important Notes** -- **Honesty is key**: We trust you to report your time accurately. This will be considered during your evaluation and interview. -- **It's not about speed**: We are looking for thoughtful implementation and prioritization, not rushing through tasks just for the sake of completion. -- **Doesn't mean we don't value fast work**: While we appreciate thoughtful work, we also value efficiency. If you can complete the project quickly without compromising quality, that's a plus. -- **Document thoroughly**: The quality of your self-reporting reflects on your ability to work transparently and methodically. - ---- - -## **Project Timeline** - -- You will have a total of **1–3 days** (calendar days) to complete the project from the time you receive this email. -- For example: - - **Email Sent**: Thursday, November 28 - - **Submission Deadline**: Sunday, December 1, 11:59 PM (Nepal Time Zone) - ---- - -## **Why This Timeline?** - -We believe this timeline provides: -1. **Sufficient Time**: To research, implement, test, and document the project. -2. **A Realistic Challenge**: To showcase your ability to work under time constraints while delivering quality results. - ---- - -## **Need an Extension?** - -We understand unexpected situations can arise. If you believe you need an extension, please communicate this to us as soon as possible. We may extend the deadline by up to **2 additional days**, depending on the circumstances. -1. Contact us at **contact@cloudquish.com** to request an extension. Please provide a valid reason for the extension request and the details of yours: - - Your name in the application - - Role you applied for - - Email address - - Phone number - - -## **Submission** -- Include your time log as part of the final project submission, either: - - In the project `README.md`. - - As a separate file named `TimeLog.md`. -- Be prepared to discuss your time log and decisions during the interview. - ---- - -Thank you for your hard work and honesty! - - ---- - -**Best of luck! You’ve got this.** - -Warm regards, -**Cloudquish Team** -Cloudquish LLC - -**Project Deliverables** - -Apart from mentioned deliverables in the respective Readme files, below are the deliverables we are expecting from you: - -1. Make sure to have below details on top of the readme.md file - - Your Name (same name as in application submission) - - Your Email - - Your Phone Number - - Role you applied for (you can find this in the top of this readme file) - - Your Address - - Time log (in the format mentioned above in Readme.md or TimeLog.md) - - Your Github Profile - - Your LinkedIn Profile - - Your Resume (If updated) - - Any other links you want to share (Optional) -2. Fully functional system as per the requirements in each Readme file. -3. Note that you can just push it to the github repo that you cloned from github classroom and we will be able to see it. No need to submit it anywhere else. - - -**How to start the project?** - -1. Clone the repository that you have been given access to. -2. Read the Readme file of the role you are applying for. -3. Start working on the project. -4. Push the code to the repository. -5. Make sure to have the time log in the Readme.md or TimeLog.md file. -6. Make sure to have all the deliverables mentioned above and in the respective Readme files. -7. We will review the code and get back to you for the next steps. -8. Done \ No newline at end of file diff --git a/Readme.md b/Readme.md deleted file mode 100644 index 58af337..0000000 --- a/Readme.md +++ /dev/null @@ -1,183 +0,0 @@ -**This is Copy of Master_Readme.md file.** - ---- -Below is the list that defines which file is for which role: - -1. **Readme_Backend_Leader_Architect.md** - This file is for the role: **Backend Developer + Leader + Architect** -2. **Readme_Frontend_Dev.md** - This file is for the role: **Frontend Developer** -3. **Readme_Devops_Fullstack.md** - This file is for the role: **Fullstack Developer + DevOps** -4. **Readme_Generalist.md** - This is for the role: **Generalist Developer** - -Please make sure you are submitting the correct file for the role you are applying for. If you are applying for multiple roles, please submit the corresponding files. Note that we will not provide reenumeration for multiple submissions. And only completed submissions will be considered for the evaluation and reenumeration as discussed. - ---- - -# **Congratulations and Welcome!** - -Dear Candidate, - -First of all, congratulations and thank you for accepting this challenge. Please read this document to its entirety before starting any project. - -You are among a select few who have been given this opportunity, and we are truly excited to see what you will build. - -We understand that the scope of any of above project within the given time frame is demanding. However, we want to clarify that this is not because we do not value your time or effort; rather, it is because we are looking for someone extraordinary—someone who thrives under pressure, thinks critically, and delivers well-thought-out solutions even in challenging circumstances. - -This project is not just about technical skills; it is a reflection of qualities like determination, problem-solving, time management, and the ability to persevere. These are the attributes we believe are essential for success in the role we are hiring for. - ---- - -## **Implementation Guidelines** - -We want to assure you that we are flexible with the architecture you choose to implement(for applicable roles) as long as you follow the **MVC architecture** (optinal, for backend tasks, if it makes you slow but come up with reasoning why you opt for something else). For frontend, we do want you to use NextJS. - -To make it easier, you can start by cloning one of the following boilerplate repositories: - -### **Node.js/Express/Next.js** -- Clone this repository: [Node.js/Express/Next.js Boilerplate](https://github.com/nishanprime/NextJSNodeJSDockerNginxBoilerPlate.git) - -### **Flask** -- Clone this repository: [Flask MVC Boilerplate](https://github.com/nishanprime/FlaskMVCSetup.git) - -### **Preference**: -- For backend roles, while we would prefer the **Node.js/Express/Next.js** setup, we are open to you using **Flask** if you believe the implementation would be better suited for the project. If you choose Flask, provide reasoning for your decision—it will help us understand your thought process. - -- Additionally, if you need parts of the **Next.js/Node.js** boilerplate while working with Flask, feel free to clone both repositories and utilize whatever is necessary. - -- For Frontend roles, we would prefer you to use **Next.js**. - -- For DevOps roles, we would want you to make use of: -1. Docker -2. Nginx -3. Github Actions for CI/CD -4. Any Ubuntu based server for deployment -5. Let's Encrypt for SSL -6. PM2 for process management or any other process manager -7. Any other tools you think are necessary - -You can use any of these VPS provider to get free VPS to get started: -1. https://gratisvps.net/cvps/ -2. https://vpswala.org/free-vps-no-credit-card.html - -Or any other VPS provider you are comfortable with. ---- - -## **A Unique Opportunity** - -Rest assured, this challenge is not just a test; it’s an introduction to the kind of work culture you can expect here—a culture like no other in Nepal. We are building something exceptional, a workplace that fosters innovation, collaboration, and growth at a level rarely seen before. If you manage to pull this off, you will not just have the opportunity to join a team—you will become part of a transformative journey, one where your contributions will have real impact and where you will thrive in an environment designed to bring out the best in you. - -We are rooting for your success, and we can’t wait to see your solution! - -## **Time Tracking Instructions** - -As part of this project, we kindly request you to **self-report the time you spend** on each task. This approach relies on **honesty and transparency**, which are qualities we highly value and will consider during the evaluation process and the interview. - ---- - -## **Why Time Tracking?** -We understand that different people work at different paces, and this is not about how fast you complete the project. Instead, this is about: -1. **Understanding your approach** to time management. -2. **Seeing how you prioritize tasks** within the given timeframe. -3. **Getting insight into your problem-solving process.** - ---- - -## **What to Track** -Please maintain a simple log that includes the following: -- **Date**: The day you worked on the project. -- **Time Spent**: How many hours you worked (approximately). -- **Task Description**: A brief description of what you worked on during that time. - ---- - -## **Example Time Log** -| Date | Time Spent (hrs) | Task Description | -|------------|------------------|------------------------------------------| -| 2024-11-28 | 3 | Implemented Ticket Locking | -| 2024-11-29 | 4 | Integrated Stripe Checkout | -| 2024-11-29 | 2 | Implemented Context API etc | - ---- - -## **Important Notes** -- **Honesty is key**: We trust you to report your time accurately. This will be considered during your evaluation and interview. -- **It's not about speed**: We are looking for thoughtful implementation and prioritization, not rushing through tasks just for the sake of completion. -- **Doesn't mean we don't value fast work**: While we appreciate thoughtful work, we also value efficiency. If you can complete the project quickly without compromising quality, that's a plus. -- **Document thoroughly**: The quality of your self-reporting reflects on your ability to work transparently and methodically. - ---- - -## **Project Timeline** - -- You will have a total of **1–3 days** (calendar days) to complete the project from the time you receive this email. -- For example: - - **Email Sent**: Thursday, November 28 - - **Submission Deadline**: Sunday, December 1, 11:59 PM (Nepal Time Zone) - ---- - -## **Why This Timeline?** - -We believe this timeline provides: -1. **Sufficient Time**: To research, implement, test, and document the project. -2. **A Realistic Challenge**: To showcase your ability to work under time constraints while delivering quality results. - ---- - -## **Need an Extension?** - -We understand unexpected situations can arise. If you believe you need an extension, please communicate this to us as soon as possible. We may extend the deadline by up to **2 additional days**, depending on the circumstances. -1. Contact us at **contact@cloudquish.com** to request an extension. Please provide a valid reason for the extension request and the details of yours: - - Your name in the application - - Role you applied for - - Email address - - Phone number - - -## **Submission** -- Include your time log as part of the final project submission, either: - - In the project `README.md`. - - As a separate file named `TimeLog.md`. -- Be prepared to discuss your time log and decisions during the interview. - ---- - -Thank you for your hard work and honesty! - - ---- - -**Best of luck! You’ve got this.** - -Warm regards, -**Cloudquish Team** -Cloudquish LLC - -**Project Deliverables** - -Apart from mentioned deliverables in the respective Readme files, below are the deliverables we are expecting from you: - -1. Make sure to have below details on top of the readme.md file - - Your Name (same name as in application submission) - - Your Email - - Your Phone Number - - Role you applied for (you can find this in the top of this readme file) - - Your Address - - Time log (in the format mentioned above in Readme.md or TimeLog.md) - - Your Github Profile - - Your LinkedIn Profile - - Your Resume (If updated) - - Any other links you want to share (Optional) -2. Fully functional system as per the requirements in each Readme file. -3. Note that you can just push it to the github repo that you cloned from github classroom and we will be able to see it. No need to submit it anywhere else. - - -**How to start the project?** - -1. Clone the repository that you have been given access to. -2. Read the Readme file of the role you are applying for. -3. Start working on the project. -4. Push the code to the repository. -5. Make sure to have the time log in the Readme.md or TimeLog.md file. -6. Make sure to have all the deliverables mentioned above and in the respective Readme files. -7. We will review the code and get back to you for the next steps. -8. Done \ No newline at end of file diff --git a/Readme_Backend_Leader_Architect.md b/Readme_Backend_Leader_Architect.md deleted file mode 100644 index f9d1f80..0000000 --- a/Readme_Backend_Leader_Architect.md +++ /dev/null @@ -1,198 +0,0 @@ - -# **Please do not start before reading Master_Readme.md. and instructions provided below:** - -# **Ticket Booking Platform** - -## **Objective** - -Build a ticket booking backend system that handles: - -1. **Concurrency Management**: Prevent overselling tickets by implementing a locking mechanism. -2. **Stripe Integration**: Process payments securely. For now, use Stripe test mode's Checkout or Payment Intents. You should be able to read through the [Stripe API documentation](https://stripe.com/docs/api) to understand how to integrate it. -3. **Real-Time Updates**: Use Kafka for event-driven updates about ticket availability and payment status. For now, focus on publishing and consuming messages for ticket availability and payment notifications. However, feel free to add any other topics or consumers as needed. - ---- - -## **Key Features** - -### **1. Ticket Booking** - -#### **Booking Tickets** - -- **Endpoint**: `POST /events/:id/book` (or similar that makes sense) -- **Functionality**: - - Lock tickets when a user starts the booking process to prevent overselling. - - Return a Stripe payment session link or client secret for completing the payment. - - Store booking details (user, event, tickets, status) in the database. - -#### **Concurrency Management** - -- Use **Redis** or a similar mechanism to: - - Lock tickets for a user during the booking process. - - Release the lock: - - Immediately if payment fails. - - After a timeout (e.g., 5 minutes) if the user does not complete the payment. - - Prevent other users from booking the same tickets until the lock is released. - -#### **Order Expiration** - -- Pending orders expire automatically after the timeout. -- Expired orders should release their associated tickets. - ---- - -### **2. Payment Processing** - -#### **Stripe Checkout Integration** - -- **Endpoint**: Use Stripe Checkout or Payment Intents for payments. -- **Flow**: - 1. Create a Stripe Checkout session or Payment Intent when the user starts the booking. - 2. Redirect the user to Stripe to complete the payment. - -#### **Webhook Validation** - -- **Endpoint**: `POST /webhook` (or similar) -- **Functionality**: - - Listen for Stripe webhook events: - - `payment_intent.succeeded` or `checkout.session.completed` to confirm successful payments. - - Mark the order as "Confirmed" upon payment success. - - Release ticket locks and mark the order as "Failed" if the payment fails. - - Validate the webhook signature to ensure authenticity. - ---- - -### **3. Ticket Management** - -#### **Event Management** - -- **Endpoint**: `POST /events` -- **Functionality**: - - Admins can create or update events with details like: - - Event name, description, date, venue, ticket price, and total tickets available. - - Store event details in the database. - -#### **Ticket Status** - -- **Endpoint**: `GET /orders/:id` -- **Functionality**: - - Allow users to check the status of their bookings (Pending, Confirmed, Failed). - -#### **Real-Time Ticket Availability** - -- Use **Kafka** to publish messages about ticket availability: - - Notify when tickets are locked, unlocked, or sold. - - Consumers can listen to these updates for real-time UI updates or waitlist processing. - ---- - -### **4. Real-Time Messaging** - -#### **Kafka Topics** - -- **`ticket_availability`**: - - Messages about ticket locks, unlocks, and sales. -- **`payment_notifications`**: - - Messages about payment statuses (success or failure). - -#### **Consumers** - -- **Notification Service**: - - Send booking confirmations via email or SMS when payment succeeds. -- **Waitlist Processor**: - - Notify users on the waitlist when tickets become available. - ---- - -## **Stretch Features** - -1. **Waitlist Management** - - - Add a waitlist feature for sold-out events. - - Notify waitlisted users via Kafka when tickets are unlocked. - -2. **Refund System** - - - Allow users to cancel bookings within a certain timeframe. - - Process refunds through Stripe and release tickets back to inventory. - -3. **Admin Dashboard** - - Add a real-time admin dashboard to display: - - Tickets sold, revenue, and waitlist metrics. - ---- - -## **Database Schema** (Decide based on your implementation, Given below is just a suggestion) - -### Suggested Tables: - -1. **Users** - - - `id`: Unique identifier. - - `name`: User name. - - `email`: User email. - -2. **Events** - - - `id`: Unique identifier. - - `name`: Event name. - - `date`: Event date. - - `venue`: Event venue. - - `price`: Ticket price. - - `total_tickets`: Total tickets available. - -3. **Tickets** - - - `id`: Unique identifier. - - `event_id`: Associated event ID. - - `status`: Ticket status (Available, Locked, Sold). - -4. **Orders** - - `id`: Unique identifier. - - `user_id`: Associated user ID. - - `event_id`: Associated event ID. - - `status`: Order status (Pending, Confirmed, Failed). - - `stripe_session_id`: Stripe session or payment intent ID. - ---- - -## **Evaluation Criteria** - -1. **Concurrency Management** - - - Locks are implemented to prevent ticket overselling. - - Locks are released correctly on payment failure or timeout. - -2. **Stripe Integration** - - - Payments are processed securely in test mode. - - Stripe webhook events are validated and handled properly. - -3. **Real-Time Messaging** - - - Kafka topics and consumers are correctly implemented for ticket updates and notifications. - -4. **Code Quality** - - - Modular and maintainable code. - - Clear documentation for API endpoints and project setup. - -5. **Scalability** - - The system can handle high-concurrency scenarios during ticket sales. - ---- - -## **Project Deliverables** -1. Required stuffs from master readme -2. Fully functional backend system adhering to the requirements. -3. Postman or cURL examples for all API endpoints. (optional but recommended) -4. Documentation explaining: (optional but recommended, but we atleast need the readme.md explaining the project setup and how to run the project) - - Locking mechanism. - - Stripe integration details. - - Kafka messaging implementation. - ---- - - - - diff --git a/Readme_Devops_Fullstack.md b/Readme_Devops_Fullstack.md deleted file mode 100644 index e557c8d..0000000 --- a/Readme_Devops_Fullstack.md +++ /dev/null @@ -1,187 +0,0 @@ -# **Please do not start before reading Master_Readme.md. and instructions provided below:** - - -# **Project: Sticky Notes Whiteboard with Dependencies** - ---- - -## **Objective** - -This project evaluates your **fullstack development** and **DevOps expertise**(Focus on DevOps). Your primary focus will be on setting up a **robust, secure, and automated DevOps pipeline** for deployment while building a functional application. - -You will: -1. Build a **sticky notes whiteboard application** to demonstrate your frontend and backend development skills (Nothing too serious). -2. Set up and maintain a **CI/CD pipeline** to automate testing, building, and deployment processes. -3. Ensure secure handling of credentials, containerization, SSL configuration, and monitoring of the deployed application. - ---- - -## **Project Overview** - -### **Sticky Notes Whiteboard Application** -- Users can: - 1. Add, edit, and delete sticky notes on a canvas (Canvas of React Flow iteself). - 2. Connect notes to define dependencies using **React Flow**. - 3. Save and load the whiteboard as a reusable flow. - ---- - -## **Primary DevOps Requirements** - -### **1. CI/CD Pipeline** -Set up an automated pipeline using **GitHub Actions** that includes: -- **Pre-Commit and Pre-Push Hooks**: - - Use **Husky** to implement hooks for: - - Running linting tools (e.g., ESLint, Prettier). - - Running basic unit tests. - - Preventing commits or pushes if checks fail. - - Setup custom actions runners to your ubuntu servers as non-root user -- **Pipeline Steps**: - - Trigger the pipeline on every push and pull request to the `main` branch. - - Steps: - 1. **Run Tests**: - - Execute both frontend and backend unit tests. - 2. **Build**: - - Build the application. - 3. **Security Checks**: - - Include a step for scanning vulnerabilities (e.g., **npm audit** or **Trivy**). - 4. **Deploy**: - - Deploy the application to the server (To know what servers/vps to use, refer to the master file). - -### **2. Containerization** -- Use **Docker** to containerize the application: - - Separate containers for: - - **Frontend**: Serve the React application. - - **Backend**: Host the APIs. - - **Database**: Use SQLite or PostgreSQL in a container or use free postgres trial available online if available. -- Use **Docker Compose** to orchestrate the containers for local development and production. -- Apply best practices for container security: - - Use lightweight images (e.g., `node:alpine`). - - Avoid running containers as the root user. - - Limit container privileges. - -### **3. Credentials Management** -- Use **GitHub Secrets** to securely store: - - Database credentials. - - JWT secret keys. - - Any other sensitive environment variables. -- **Local Setup**: - - Use a `.env` file for local development but ensure it’s added to `.gitignore`. - -### **4. Secure Deployment** -- Deploy the application to a **Ubuntu server** (e.g., AWS EC2, DigitalOcean or the one mentioned in the master read me). -- Use **Nginx** as a reverse proxy to serve the application. -- Enforce HTTPS: - - Configure **Let’s Encrypt** for free SSL/TLS certificates. - - Redirect all HTTP traffic to HTTPS. - -### **5. Branch Protection** -- Protect the `main` branch: - - Require CI checks to pass before merging pull requests. - - Enable code review for all pull requests. - -### **6. Monitoring and Logging** -- Implement basic logging for: - - API requests (e.g., request time, status codes). - - Errors for debugging.githu -- Monitoring (Optional): - - Integrate **Prometheus** and **Grafana** or use cloud-native monitoring tools. - ---- - -## **Development Requirements** - -### **Frontend (Sticky Notes Whiteboard)** -1. **Whiteboard Interface**: - - Use **React Flow** to: - - Add sticky notes with customizable content. - - Connect notes to define dependencies visually. - - Enable drag-and-drop positioning for notes. - -2. **Save and Load**: - - Save the whiteboard (notes and connections) to the backend as a flow. - - Fetch and render saved flows on the canvas. - -3. **Styling**: - - Use **Tailwind CSS** or any modern UI framework to create a clean and responsive(not mandatory) design. - -### **Backend** -1. **API Endpoints**: - - `POST /flows`: Save a new whiteboard flow. - - `GET /flows`: Fetch all saved flows. - - `GET /flows/:id`: Fetch a specific flow by ID. - - `DELETE /flows/:id`: Delete a specific flow. - -2. **Authentication**: - - Implement token-based authentication using **JWT**. - - Protect the endpoints to ensure only authenticated users can manage flows. - -3. **Database**: - - Use **SQLite** or **PostgreSQL** to store: - - Flows: A JSON representation of notes and connections. - - User authentication data. - ---- - -## **Stretch Goals** (optional) -1. **Real-Time Collaboration**: - - Use WebSockets to enable real-time updates for multiple users on the same whiteboard. -2. **Role-Based Access Control**: - - Allow admins to manage all flows while users can manage only their own. -3. **Error Reporting**: - - Integrate tools like **Sentry** for error tracking and reporting. - ---- - -## **Deliverables** - -1. **GitHub Repository**: - - Include the entire codebase with separate folders for frontend and backend. - - Add a `readme.md` file with: - - CI/CD pipeline details. - - Steps to set up and deploy the application locally and in production. - - Explanation of any challenges and solutions implemented. - -2. **Live Demo**: - - Provide a public URL for the deployed application with HTTPS enabled. - -3. **CI/CD Pipeline**: - - Ensure the pipeline automates testing, building, and deployment. - -4. **Documentation**: - - Include detailed instructions in the `README.md` for: - - Local setup. - - API usage and endpoints. - - DevOps practices followed. - -5. **Time Log**: - - Refer to master readme for timelog details - ---- - -## **Evaluation Criteria** - -### **1. Fullstack Development** (Less priority) -- Is the sticky notes whiteboard functional and intuitive? -- Are the APIs robust, secure, and well-documented? - -### **2. DevOps Practices** (High priority) -- Is the CI/CD pipeline automated and reliable? -- Are containers well-organized and secure? -- Are SSL and credential management implemented effectively? -- Is the deployment process clearly documented and reproducible? - -### **3. Code Quality** -- Is the code modular, maintainable, and readable? -- Are pre-commit hooks implemented effectively? - -### **4. Creativity** -- Does the candidate go beyond basic requirements with enhancements? -- Is the UI polished and user-friendly? - ---- - - - -## **Good Luck!** - diff --git a/Readme_Frontend_Dev.md b/Readme_Frontend_Dev.md deleted file mode 100644 index b7fc273..0000000 --- a/Readme_Frontend_Dev.md +++ /dev/null @@ -1,146 +0,0 @@ -# **Please do not start before reading Master_Readme.md. and instructions provided below:** - - -# **Frontend Developer Assessment** - -Welcome to the assessment for the Frontend Developer role! This project is designed to test your **technical skills**, **research ability**, and **UI/UX implementation expertise**. You will work on a single project that evaluates both your ability to use new libraries effectively and your skill in creating pixel-perfect, responsive user interfaces. - ---- - -## **Objective** - -The goal of this assessment is to: -1. **Test your ability to research and implement new libraries**, such as **React Flow** and **React DnD**, for building complex, functional components. -2. **Evaluate your UI/UX development skills** by recreating a provided Figma design with responsiveness and interactivity. -3. Understand your approach to **state management**, **context handling**, and **integrating mock data**. - ---- - -## **Project Overview** - -You will build a **Task and Relationship Management Tool** that has two key components: -1. **Database Diagramming Tool**: - - Allows users to create, connect, and manage database tables visually using **React Flow**. - - Implements global state using **Context API**. -2. **Pixel-Perfect UI Clone**: - - Recreate a responsive UI based on a provided Figma design to demonstrate your **attention to detail** and **UI development skills**. - ---- - -## **Key Features** - -### **1. Database Diagramming Tool** - -#### **Core Features** -- **Diagram Builder**: - - Users can: - - Add new tables (nodes) to a canvas. - - Connect tables with lines to define relationships. - - Edit table properties such as column names, data types, and constraints. - -- **State Management**: - - Use **Context API** to manage: - - The list of tables and their properties. - - Connections between tables. - - Diagram state (e.g., zoom, pan). - -- **Export and Save**: - - Save the diagram as a JSON file. - - Reload saved diagrams to resume editing. - -#### **Custom UI Requirements** -- Add a **Sidebar**: - - Manage table properties (e.g., rename, add/remove columns). - - List all connections and allow users to edit/delete them. -- Add a **Toolbar**: - - Include options like "Undo," "Redo," and "Export." - - A button to switch between the diagram and table list views. - -#### **Libraries**: -- Use **React Flow** for diagramming functionality. -- Use **Tailwind CSS** or any UI kit (e.g., Chakra UI, ShadCN) for styling. - ---- - -### **2. Pixel-Perfect UI Clone** - -#### **Core Features** -- Recreate the provided [Figma Design](https://www.figma.com/proto/PpWBQopL2PrIsAhDvOxCRa/HonestValue-Main-File?page-id=24%3A90&node-id=102-1405&node-type=frame&viewport=2518%2C2147%2C0.19&t=iHGEZAiMrFpuxZbu-1&scaling=min-zoom&content-scaling=fixed) using **Next.js or Vite**. -(Figma Link: https://www.figma.com/proto/PpWBQopL2PrIsAhDvOxCRa/HonestValue-Main-File?page-id=24%3A90&node-id=102-1405&node-type=frame&viewport=2518%2C2147%2C0.19&t=fLDHkbsal80rr9VM-1&scaling=min-zoom&content-scaling=fixed) -- Ensure **responsiveness**: - - The UI should work seamlessly on mobile, tablet, and desktop screens (since figma does not have smaller screen design, we want to see your creativity on how you think it should be. Nothing too serious, but we want you to be independent thinker.). - -#### **Mock Data Integration** -- Use JSON files to populate dynamic sections of the UI. -- Example: Display property details, comparisons, or user data dynamically. - -#### **Customizations** -- Add subtle animations and transitions (e.g., hover effects, loading states) to enhance the user experience where you deem fit. -- Ensure **accessibility**: - - Use semantic HTML, ARIA roles, and keyboard navigation where necessary. - -#### **Styling** -- Use **Tailwind CSS** for styling or integrate with Chakra UI/ShadCN as needed. -- Match colors, fonts, and layout details exactly as provided in the design. - ---- - -## **Deliverables** - -1. **GitHub Repository**: - - Push your code to a GitHub repository with clear and meaningful commit messages. - - Include a `README.md` file with: - - Project setup instructions. - - An explanation of the libraries/tools you used and why. - - Challenges faced and how you overcame them. - -2. **Live Demo**: - - Deploy the project to a hosting platform (e.g., Vercel, Netlify). or let us know how to run it locally. - - Provide a live link to the hosted app (if not possible, provide a video demo). - -3. **Screenshots**: - - Include screenshots of the completed UI in your repository. - -4. **Time Tracking**: - - Maintain a log of the time spent on each task (e.g., research, development, testing). - - Include this log in your repository as mentioned in the Master Readme. - ---- - -## **Evaluation Criteria** - -1. **Research and Implementation**: - - How effectively did you use **React Flow** and **React DnD** to implement the diagramming tool? - - Is the Context API used appropriately for state management? - -2. **UI Development**: - - Is the recreated Figma design pixel-perfect and responsive? - - Are animations and transitions smooth and polished? - -3. **Code Quality**: - - Is your code modular, readable, and well-documented? - - Did you follow best practices for React and state management? - -4. **Creativity and Initiative**: - - Have you added enhancements or features beyond the requirements? - - Does your app look polished and professional? - -5. **Functionality**: - - Are all features working as expected? - - Is the app bug-free? - ---- - -## **Bonus Points** (not mandatory) - -- Add real-time collaboration features using WebSockets. -- Implement advanced accessibility features (e.g., screen reader support). -- Add animations or creative visual effects to enhance the overall experience. - ---- - -## **Good Luck!** - -We’re excited to see your creativity and skills shine in this project. Feel free to use any libraries, tools, or frameworks you think will make your project stand out. - -If you have any questions, feel free to reach out. Good luck, and happy coding! 🚀 diff --git a/Readme_Generalist.md b/Readme_Generalist.md deleted file mode 100644 index f3528d8..0000000 --- a/Readme_Generalist.md +++ /dev/null @@ -1,134 +0,0 @@ -# **Please do not start before reading Master_Readme.md. and instructions provided below:** - - -# **Generalist Developer Assessment: Research, Build, and Document** - ---- - -## **Objective** - -This challenge evaluates your ability to: -1. **Research and implement a combination of new technologies** from the provided list. -2. **Build a functional application or tool** using these technologies. -3. **Document your process and implementation in detail**, focusing on your thought process, challenges, and learnings. - ---- - -## **Project Overview** - -### **What You Need to Do** -1. Choose a **combination of technologies** from the provided list below. -2. Build **a functional application (any, of your choice but with tech mentioned below)** showcasing the integration of these technologies. -3. Provide **comprehensive documentation**: - - Why did you choose these technologies? - - How did you integrate them? - - Challenges faced and how you overcame them. - - Step-by-step instructions for running the project locally and in production. - -### **Focus on Documentation** -The most important part of this challenge is **clear and detailed documentation**. Your documentation should enable someone to understand your project, your choices, and how to replicate your setup. - ---- - -## **Suggested Combinations** - -### **Frontend + Backend** -1. **React Flow + GraphQL Apollo Server**: - - Build an interactive flowchart where data is fetched and updated via GraphQL. -2. **React DND + tRPC**: - - Create a drag-and-drop task manager with real-time updates using tRPC. - -### **Fullstack** -3. **React Flow + Redis Streams**: - - Build a real-time workflow diagram with live updates powered by Redis Streams. -4. **Supabase + Framer Motion**: - - Build a user authentication system with Supabase and add polished animations using Framer Motion. - -### **AI/ML Integration** -5. **LangChain + Kafka**: - - Build a document processing pipeline where Kafka manages document ingestion, and LangChain summarizes them. -6. **Autogen + React Flow**: - - Create an automated workflow generator where users define workflows, and Autogen executes them with AI agents. - -### **DevOps + Fullstack** -7. **Kubernetes + Supabase**: - - Deploy a scalable Supabase-backed application on Kubernetes. -8. **Terraform + Docker + Redis Streams**: - - Use Terraform to manage infrastructure for a Dockerized application that streams real-time data via Redis. - - - -### **Note**: Above mentioned suggestions are not the only exact things you should build. Make use of tech mentioned above and feel free to build anything you wish. Just make sure to document it well and be able to justify it as mentioned above. - ---- - -## **Deliverables** - -### **1. GitHub Repository** -- Push your project code to a **public GitHub repository** with: - - A clear `README.md` file. - - Well-structured and documented code. - -### **2. Documentation** -Your documentation is critical. Include: -1. **Project Overview**: - - What the project does and why you built it. - - The combination of technologies you chose and why. -2. **Research Process**: - - Key learnings about each technology. - - Challenges faced and how you solved them. -3. **Implementation**: - - A step-by-step walkthrough of how you integrated the chosen technologies. - - Include code snippets where helpful. -4. **Setup Instructions**: - - **Local Setup**: Detailed steps for running the project locally. - - **Production Setup**: How you deployed the application. -5. **API Documentation** (if applicable): - - Include endpoints, request/response examples, and any necessary setup instructions. -6. **Future Improvements**: - - Ideas for extending the project. - -### **3. Live Demo** -- Deploy your project to a cloud platform (e.g., Vercel, AWS, Netlify). -- Provide a **public URL** to access the live demo. -- Also note, we still need to know how to replicate it locally. - ---- - -## **Evaluation Criteria** - -1. **Documentation**: - - Is your documentation clear, detailed, and professional? - - Does it provide a thorough walkthrough of your research and implementation? - -2. **Research and Understanding**: - - How well did you understand and implement the chosen technologies? - - Are your design and technology choices justified? - -3. **Functionality**: - - Is the application/tool working as intended? - - Are APIs functional and well-documented? - -4. **Creativity**: - - Is the project unique or innovative? - - Does it showcase your ability to think creatively? - -5. **Code Quality**: - - Is the code clean, modular, and maintainable? - - Are best practices followed? - ---- - ---- - -## **Stretch Goals** (Optional) -- Add **real-time features** or **advanced UI/UX enhancements**. -- Set up a **CI/CD pipeline** to automate testing and deployment. -- Use **monitoring tools** (e.g., Grafana, Prometheus) for application performance. - ---- - -## **Good Luck!** - -We’re excited to see how you combine technologies, research, and document your journey. Remember, this challenge is about showcasing your **ability to learn, adapt, and deliver**. Focus on creating a well-documented and functional project. - -Happy coding! 🚀 diff --git a/ticket-booking-platform/.gitignore b/ticket-booking-platform/.gitignore new file mode 100644 index 0000000..869f928 --- /dev/null +++ b/ticket-booking-platform/.gitignore @@ -0,0 +1,7 @@ +venv/ +__pycache__/ +*.pyc +.env +.pytest_cache/ +logs/ +*.log diff --git a/ticket-booking-platform/Dockerfile b/ticket-booking-platform/Dockerfile new file mode 100644 index 0000000..0bee37b --- /dev/null +++ b/ticket-booking-platform/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . + + +RUN pip install --no-cache-dir --upgrade pip \ + && pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/ticket-booking-platform/README.md b/ticket-booking-platform/README.md new file mode 100644 index 0000000..c5474e7 --- /dev/null +++ b/ticket-booking-platform/README.md @@ -0,0 +1,87 @@ +Here’s the setup-focused `README.md` with project information and technologies used: + +--- + +# Ticket Booking Platform + +## Project Information + +The **Ticket Booking Platform** is a backend system designed to facilitate secure and real-time ticket booking for events. It handles high-concurrency scenarios, processes payments securely, and provides real-time updates using an event-driven architecture. + +### Key Objectives: +- **Concurrency Management**: Prevent overselling tickets with a locking mechanism. +- **Secure Payment Processing**: Integrate with Stripe for handling payments. +- **Real-Time Notifications**: Use Kafka to manage updates about ticket availability and payment status. + +--- + +## Technologies Used + +- **FastAPI**: For creating a high-performance and scalable REST API. +- **Redis**: As an in-memory store for ticket locks and concurrency control. +- **PostgreSQL**: For persistent data storage. +- **Stripe API**: For secure payment processing using test mode. +- **Kafka**: For event-driven real-time messaging. +- **Docker**: To containerize the application and its dependencies. +- **Alembic**: For database migrations. +- **Python**: The primary programming language for backend logic. +- **Zookeeper**: To manage Kafka services. + +--- + +## Setup Instructions + +### Prerequisites + +1. Install **Docker** and **Docker Compose**. +2. Clone the repository: + + ```bash + git clone + cd ticket-booking-platform + ``` + +3. Create a `.env` file in the root directory with the following variables: + + ```env + POSTGRES_DB=ticket_booking + POSTGRES_USER=postgres + POSTGRES_PASSWORD=yourpassword + REDIS_PORT=6379 + STRIPE_SECRET_KEY=your_stripe_secret + STRIPE_WEBHOOK_SECRET=your_webhook_secret + ``` + +### Running the Application + +1. Build and start the services using Docker Compose: + + ```bash + docker-compose up --build + ``` + +2. Once started: + - The API is available at: `http://localhost:8000` + - API documentation is accessible at: `http://localhost:8000/docs` + +3. Apply database migrations: + + ```bash + docker exec -it backend alembic upgrade head + ``` + +4. Verify Kafka and Zookeeper are running for message handling. + +--- + +## Stopping the Application + +To stop all running services: + +```bash +docker-compose down +``` + +--- + +This `README.md` provides all necessary information to start the project and includes the technologies used. Let me know if you need more details or additional sections! \ No newline at end of file diff --git a/ticket-booking-platform/__init__.py b/ticket-booking-platform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/alembic.ini b/ticket-booking-platform/alembic.ini new file mode 100644 index 0000000..45a4df6 --- /dev/null +++ b/ticket-booking-platform/alembic.ini @@ -0,0 +1,116 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql://postgres:ticketpass@db/ticket_booking + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/ticket-booking-platform/app/__init__.py b/ticket-booking-platform/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/api/__init__.py b/ticket-booking-platform/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/api/route_handlers.py b/ticket-booking-platform/app/api/route_handlers.py new file mode 100644 index 0000000..7a5150b --- /dev/null +++ b/ticket-booking-platform/app/api/route_handlers.py @@ -0,0 +1,297 @@ +import uuid +from typing import List +from pydantic import BaseModel +from fastapi import HTTPException, Request, Depends, Query +from fastapi.responses import RedirectResponse + +from app.repositories.redis import get_redis_client +from app.models import ( + ticket as ticket_orm, + order as order_orm, + event as event_orm, + user as user_orm, +) +from sqlalchemy.orm import Session +from app.core.database import get_db +from app.repositories.event import EventSqlRepository +from app.repositories.user import UserSqlRepository +from app.repositories.ticket import TicketRepository +from app.repositories.order import OrderRepository +from app.schemas.event_schema import EventCreate, EventUpdate +from app.schemas.user_schema import UserCreate, UserUpdate +from app.services.stripe import create_stripe_checkout_session +from app.models.order import Order +from app.models.ticket import Ticket +from app.services.stripe import StripeWebhookService +from app.services.kafka_producer import KafkaEventProducer +from app.models.ticket import Ticket, TicketStatus +import logging + +logger = logging.getLogger(__name__) + + +class EventView: + @classmethod + def create_event( + cls, request: Request, event: EventCreate, db: Session = Depends(get_db) + ): + event_repository = EventSqlRepository(db) + created_event = event_repository.create_event(event.dict()) + + ticket_repo = TicketRepository(db) + + # Create tickets for the event + for _ in range(event.total_tickets): + ticket_repo.create_ticket(event_id=created_event.id) + + return {"event_id": created_event.id, "message": "Event created successfully."} + + @classmethod + def update_event( + cls, event_id: uuid.UUID, event: EventUpdate, db: Session = Depends(get_db) + ): + event_repository = EventSqlRepository(db) + updated_event = event_repository.update_event(event_id, event.dict()) + return {"event_id": updated_event.id, "message": "Event updated successfully."} + + @classmethod + def delete_event(cls, event_id: uuid.UUID, db: Session = Depends(get_db)): + event_repository = EventSqlRepository(db) + event_repository.delete_event(event_id) + + @classmethod + async def list_events( + cls, + request: Request, + page: int = Query( + 1, ge=1 + ), # Default to page 1, must be greater than or equal to 1 + size: int = Query( + 10, ge=1, le=100 + ), # Default size 10, must be between 1 and 100 + db: Session = Depends(get_db), + ): + event_repo = EventSqlRepository(db) + events, total_events = event_repo.get_events(page, size) + return { + "page": page, + "size": size, + "total_events": total_events, + "events": events, + } + + +class UserView: + @classmethod + async def list_users( + cls, + request: Request, + page: int = Query( + 1, ge=1 + ), # Default to page 1, must be greater than or equal to 1 + size: int = Query( + 10, ge=1, le=100 + ), # Default size 10, must be between 1 and 100 + db: Session = Depends(get_db), + ): + user_repo = UserSqlRepository(db) + users, total_users = user_repo.get_users(page, size) + return { + "page": page, + "size": size, + "total_users": total_users, + "users": users, + } + + @classmethod + async def create_user(cls, user: UserCreate, db: Session = Depends(get_db)): + user_repository = UserSqlRepository(db) + + # Check if the email already exists + existing_user = user_repository.get_user_by_email(user.email) + if existing_user: + raise HTTPException(status_code=400, detail="Email already registered.") + + # Create a new user + new_user = user_repository.create_user(user) + return new_user + + @classmethod + async def update_user( + cls, user_id: uuid.UUID, user: UserUpdate, db: Session = Depends(get_db) + ): + user_repository = UserSqlRepository(db) + + updated_user = user_repository.update_user(user_id, user) + if not updated_user: + raise HTTPException(status_code=404, detail="User not found.") + + return updated_user + + +class BookingView: + @classmethod + def book_ticket( + cls, + request: Request, + event_id: uuid.UUID, + user_id: uuid.UUID, + quantity: int, + db: Session = Depends(get_db), + ): + + ticket_repo = TicketRepository(db) + event_repo = EventSqlRepository(db) + order_repo = OrderRepository(db) + + # Get Redis client + redis_client = get_redis_client() + + # Check ticket availability + tickets = ticket_repo.get_available_tickets(event_id) + logger.info(f"Fetched tickets: {tickets}") + + if len(tickets) < quantity: + raise HTTPException(status_code=400, detail="Not enough tickets available.") + + # Prepare to lock the tickets + locked_tickets = [] + try: + for ticket in tickets[:quantity]: + lock_key = f"ticket_{ticket.id}_lock" + if redis_client.acquire_lock( + lock_key, timeout=300 + ): # 5 minutes timeout + locked_tickets.append(ticket) + else: + logger.warning(f"Lock failed for ticket ID: {ticket.id}") + raise HTTPException( + status_code=423, + detail="Some tickets are currently locked. Try again later.", + ) + + # Reserve the tickets + ticket_repo.lock_tickets([ticket.id for ticket in locked_tickets]) + + # Get event details + event = event_repo.get_event_by_id(event_id) + + # Create a Stripe checkout session + stripe_session, stripe_tracking_id = create_stripe_checkout_session( + amount=event.price * quantity, # Total amount based on quantity + quantity=quantity, + product_name=event.name, + currency="usd", + ) + logger.info(f"Stripe session created: {stripe_tracking_id}") + # Create an order in the database + order = order_repo.create_order( + user_id=user_id, + event_id=event_id, + stripe_session_id=stripe_tracking_id, + ) + + logger.info( + f"Trying to create order: {order.id} and for tickets: {locked_tickets}" + ) + order_repo.create_tickets_for_order( + order_id=order.id, + ticket_ids=[ticket.id for ticket in locked_tickets], + ) + + # Publish ticket availability to Kafka + kafka_producer = KafkaEventProducer() + kafka_producer.publish_ticket_availability( + event_id=event_id, status="locked", tickets=quantity + ) + + return { + "order_id": str(order.id), + "message": "Order created. Complete payment to confirm booking.", + "url": stripe_session.url, # Include this in the response + } + except HTTPException as http_exc: + logger.error(f"HTTP Exception occurred: {http_exc.detail}") + raise + except Exception as e: + logger.error(f"An error occurred: {str(e)}") + # Release the locks on tickets in case of an error + for ticket in locked_tickets: + redis_client.release_lock(f"ticket_{ticket.id}_lock") + raise HTTPException( + status_code=500, + detail="An unexpected error occurred. Please try again.", + ) + + +class StripeWebhookView: + @classmethod + async def handle_payment_webhook( + cls, request: Request, db: Session = Depends(get_db) + ): + payload = await request.json() # Extract the payload from the request body + event_type = payload.get("type") + data = payload.get("data", {}).get("object", {}) + + strip_tracking_id = data.get("metadata", {}).get("tracking_id") + + total_amount = data.get("amount") + + print(f"Received webhook event: {strip_tracking_id}", flush=True) + + print(f"Payload {payload}", flush=True) + + redis_client = get_redis_client() + + service = StripeWebhookService(db) + order_repo = OrderRepository(db) + ticket_repo = TicketRepository(db) + kafka_producer = KafkaEventProducer() + user_repo = UserSqlRepository(db) + + order = order_repo.get_order_by_stripe_session_id(strip_tracking_id) + if not order: + raise HTTPException(status_code=404, detail="Order not found.") + + ticket_ids = order_repo.get_tickets_for_order(order.id) + + if not ticket_ids: + raise HTTPException(status_code=404, detail="Tickets not found.") + + print(f"Received webhook event: {event_type}", flush=True) + + if event_type == "payment_intent.succeeded": + print(f"Payment succeeded for order: {order.id}", flush=True) + + ticket_repo.update_ticket_status(ticket_ids, ticket_orm.TicketStatus.SOLD) + + service.handle_payment_intent_succeeded(strip_tracking_id) + kafka_producer.publish_payment_notification( + order_id=order.id, + status="success", + amount=total_amount, + user_id=order.user_id, + ) + elif ( + event_type == "payment_intent.payment_failed" + or event_type == "payment_intent.cancelled" + ): + ticket_repo.release_tickets(ticket_ids) + print(f"Releasing locks for tickets: {ticket_ids}", flush=True) + for ticket_id in ticket_ids: + lock_key = f"ticket_{ticket_id}_lock" + redis_client.release_lock(lock_key) + print(f"Lock released for ticket: {ticket_id}", flush=True) + + print(f"Payment failed for order: {order.id}", flush=True) + service.handle_payment_intent_failed(strip_tracking_id) + kafka_producer.publish_payment_notification( + order_id=order.id, + status="failed", + amount=total_amount, + user_id=order.user_id, + ) + print(f"Payment failed for order: {order.id}", flush=True) + else: + raise HTTPException(status_code=400, detail="Unsupported event type.") + return {"message": "Webhook handled successfully."} diff --git a/ticket-booking-platform/app/api/routes.py b/ticket-booking-platform/app/api/routes.py new file mode 100644 index 0000000..95ada61 --- /dev/null +++ b/ticket-booking-platform/app/api/routes.py @@ -0,0 +1,86 @@ +from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, Request +from app.api.route_handlers import ( + EventView, + UserView, + BookingView, + StripeWebhookView, +) +from app.core.database import get_db + + +router = APIRouter() + +# Route for listing events with pagination +router.add_api_route( + path="/events/", + endpoint=EventView.list_events, + methods=["GET"], + tags=["Event Management"], + summary="List all events with pagination", + description="Retrieve a paginated list of events, providing page number and size.", +) + + +# Event Management Routes +router.add_api_route( + path="/events/", + endpoint=EventView.create_event, + methods=["POST"], + tags=["Event Management"], + summary="Create or update event details", + description="Create or update events with details like name, description, date, venue, ticket price, and total tickets available.", +) + +router.add_api_route( + path="/events/{event_id}/book/", + endpoint=BookingView.book_ticket, + methods=["POST"], + tags=["Ticket Booking"], + summary="Book tickets for an event", + description=( + "Locks tickets to prevent overselling and returns a Stripe payment session " + "link or client secret for completing the payment." + ), +) + + +# User Management Routes + +# Route for listing users with pagination +router.add_api_route( + path="/users/", + endpoint=UserView.list_users, + methods=["GET"], + tags=["User Management"], + summary="List all users with pagination", + description="Retrieve a paginated list of users, providing page number and size.", +) + + +router.add_api_route( + path="/users/", + endpoint=UserView.create_user, + methods=["POST"], + tags=["User Management"], + summary="Create a new user", + description="Create a new user with a username, email, and password.", +) + +router.add_api_route( + path="/users/{user_id}/", + endpoint=UserView.update_user, + methods=["PUT"], + tags=["User Management"], + summary="Update user details", + description="Update an existing user's username, email, or password.", +) + + +router.add_api_route( + path="/webhook/payment/", + endpoint=StripeWebhookView.handle_payment_webhook, + methods=["POST"], + tags=["Payment Processing"], + summary="Stripe webhook handler", + description="Handle Stripe webhook events to confirm or fail payment.", +) diff --git a/ticket-booking-platform/app/core/__init__.py b/ticket-booking-platform/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/core/config.py b/ticket-booking-platform/app/core/config.py new file mode 100644 index 0000000..3f85b31 --- /dev/null +++ b/ticket-booking-platform/app/core/config.py @@ -0,0 +1,53 @@ +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + PROJECT_NAME: str = "Ticket Booking Platform" + + POSTGRES_DB: str + POSTGRES_HOST: str + POSTGRES_PORT: str + POSTGRES_USER: str + POSTGRES_PASSWORD: str + + SECRET_KEY: str + ALGORITHM: str = "HS256" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 + + EMAIL_HOST: str + EMAIL_PORT: str + EMAIL_HOST_USER: str + EMAIL_HOST_PASSWORD: str + DEFAULT_FROM_EMAIL: str + + class Config: + env_file = ".env" + extra = "allow" # This allows extra inputs without validation errors + + @property + def DATABASE_URL(self) -> str: + return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}" + + +settings = Settings() + + +class KafkaSettings(BaseSettings): + KAFKA_HOST: str = "kafka" + KAFKA_PORT: str = "29092" + ticket_availability_topic: str = "ticket_availability" + payment_notifications_topic: str = "payment_notifications" + notification_service_group: str = "notification_service" + waitlist_service_group: str = "waitlist_service" + + class Config: + env_file = ".env" + env_file_encoding = "utf-8" + extra = "allow" + + @property + def bootstrap_servers(self) -> str: + return f"{self.KAFKA_HOST}:{self.KAFKA_PORT}" + + +kafka_settings = KafkaSettings() diff --git a/ticket-booking-platform/app/core/database.py b/ticket-booking-platform/app/core/database.py new file mode 100644 index 0000000..8e4255d --- /dev/null +++ b/ticket-booking-platform/app/core/database.py @@ -0,0 +1,17 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from app.core.config import settings + +engine = create_engine(settings.DATABASE_URL) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +Base = declarative_base() + + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/ticket-booking-platform/app/main.py b/ticket-booking-platform/app/main.py new file mode 100644 index 0000000..74077c6 --- /dev/null +++ b/ticket-booking-platform/app/main.py @@ -0,0 +1,55 @@ +import threading +from fastapi import FastAPI +from app.core.database import engine, Base +from app.api.routes import router as api_router +from app.services.kafka_consumer import WaitlistProcessor, NotificationService + + +app = FastAPI() + +# Create database tables +Base.metadata.create_all(bind=engine) + +# Include routers +app.include_router(api_router) + + +notification_service = NotificationService() +waitlist_processor = WaitlistProcessor() + + +threads = [] + + +@app.on_event("startup") +def startup_event(): + # Start the Kafka consumer for NotificationService in a separate thread + notification_thread = threading.Thread( + target=notification_service.start_consuming, daemon=True + ) + threads.append(notification_thread) + notification_thread.start() + + # Start the Kafka consumer for WaitlistProcessor in a separate thread + waitlist_thread = threading.Thread( + target=waitlist_processor.start_consuming, daemon=True + ) + threads.append(waitlist_thread) + waitlist_thread.start() + + print("Kafka consumers started.") + + +@app.on_event("shutdown") +def shutdown_event(): + # Gracefully stop Kafka consumers + print("Shutting down Kafka consumers...") + notification_service.consumer.close() + waitlist_processor.consumer.close() + for thread in threads: + thread.join() # Ensure threads exit properly + + +@app.get("/") +def read_root(): + return {"message": "Ticket Booking Platform"} diff --git a/ticket-booking-platform/app/models/__init__.py b/ticket-booking-platform/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/models/event.py b/ticket-booking-platform/app/models/event.py new file mode 100644 index 0000000..0952145 --- /dev/null +++ b/ticket-booking-platform/app/models/event.py @@ -0,0 +1,21 @@ +import uuid +from sqlalchemy import Column, String, DateTime, Float, Integer +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import relationship +from app.core.database import Base +import uuid + + +class Event(Base): + __tablename__ = "events" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True) + name = Column(String, nullable=False) + description = Column(String, nullable=True) + date = Column(DateTime, nullable=False) + venue = Column(String, nullable=False) + price = Column(Float, nullable=False) + total_tickets = Column(Integer, nullable=False) + + tickets = relationship("Ticket", back_populates="event") + orders = relationship("Order", back_populates="event") diff --git a/ticket-booking-platform/app/models/order.py b/ticket-booking-platform/app/models/order.py new file mode 100644 index 0000000..69a7ebd --- /dev/null +++ b/ticket-booking-platform/app/models/order.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum +from sqlalchemy.orm import relationship +from app.core.database import Base +import enum +from sqlalchemy.dialects.postgresql import UUID +import uuid +from sqlalchemy import Index + + +class OrderStatus(enum.Enum): + PENDING = "PENDING" + COMPLETED = "COMPLETED" + CANCELLED = "CANCELLED" + + +class Order(Base): + __tablename__ = "orders" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True) + user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False) + event_id = Column(UUID(as_uuid=True), ForeignKey("events.id"), nullable=False) + status = Column(Enum(OrderStatus), default=OrderStatus.PENDING) + stripe_session_id = Column(String, nullable=True) + user = relationship("User", back_populates="orders") + event = relationship("Event", back_populates="orders") + order_tickets = relationship("OrderTicket", back_populates="order") + created_at = Column(DateTime) + + +class OrderTicket(Base): + __tablename__ = "order_tickets" + + order_id = Column(UUID, ForeignKey("orders.id"), primary_key=True) + ticket_id = Column(UUID, ForeignKey("tickets.id"), primary_key=True) + order = relationship("Order", back_populates="order_tickets") + ticket = relationship("Ticket", back_populates="order_tickets") + + # Add index for performance + __table_args__ = (Index("idx_order_ticket", "order_id", "ticket_id"),) diff --git a/ticket-booking-platform/app/models/ticket.py b/ticket-booking-platform/app/models/ticket.py new file mode 100644 index 0000000..0fbb8d8 --- /dev/null +++ b/ticket-booking-platform/app/models/ticket.py @@ -0,0 +1,24 @@ +from sqlalchemy import Column, Integer, String, ForeignKey, Enum +from sqlalchemy.orm import relationship +from app.core.database import Base +import enum +from sqlalchemy.dialects.postgresql import UUID +import uuid + + +class TicketStatus(enum.Enum): + AVAILABLE = "AVAILABLE" + LOCKED = "LOCKED" + SOLD = "SOLD" + + +class Ticket(Base): + __tablename__ = "tickets" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True) + event_id = Column(UUID(as_uuid=True), ForeignKey("events.id"), nullable=False) + status = Column(Enum(TicketStatus), default=TicketStatus.AVAILABLE) + + event = relationship("Event", back_populates="tickets") + + order_tickets = relationship("OrderTicket", back_populates="ticket") diff --git a/ticket-booking-platform/app/models/user.py b/ticket-booking-platform/app/models/user.py new file mode 100644 index 0000000..82ea380 --- /dev/null +++ b/ticket-booking-platform/app/models/user.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, Integer, String, DateTime +from sqlalchemy.orm import relationship +from app.core.database import Base +from sqlalchemy.dialects.postgresql import UUID +import uuid + + +class User(Base): + __tablename__ = "users" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True) + + username = Column(String, unique=True, index=True) + email = Column(String, unique=True, index=True) + hashed_password = Column(String) + created_at = Column(DateTime) + orders = relationship("Order", back_populates="user") diff --git a/ticket-booking-platform/app/repositories/__init__.py b/ticket-booking-platform/app/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/repositories/event.py b/ticket-booking-platform/app/repositories/event.py new file mode 100644 index 0000000..c1731c5 --- /dev/null +++ b/ticket-booking-platform/app/repositories/event.py @@ -0,0 +1,64 @@ +from sqlalchemy.orm import Session +from sqlalchemy.exc import IntegrityError +from app.models import event as event_orm +from fastapi import HTTPException +import uuid + + +class EventSqlRepository: + def __init__(self, session: Session): + self.session = session + + def create_event(self, event_data: dict) -> event_orm.Event: + event = event_orm.Event( + id=uuid.uuid4(), # Generate a new UUID for the event + name=event_data["name"], + description=event_data.get("description"), + date=event_data["date"], + venue=event_data["venue"], + price=event_data["price"], + total_tickets=event_data["total_tickets"], + ) + self.session.add(event) + try: + self.session.commit() + self.session.refresh( + event + ) # Refresh the instance to get the latest data from the database + return event + except IntegrityError: + self.session.rollback() # Rollback the session on error + raise HTTPException( + status_code=400, detail="Event with this name already exists." + ) + + def get_event_by_id(self, event_id: uuid.UUID) -> event_orm.Event: + event = ( + self.session.query(event_orm.Event) + .filter(event_orm.Event.id == event_id) + .first() + ) + if not event: + raise HTTPException(status_code=404, detail="Event not found.") + return event + + def update_event(self, event_id: uuid.UUID, event_data: dict) -> event_orm.Event: + event = self.get_event_by_id(event_id) + for key, value in event_data.items(): + setattr(event, key, value) + self.session.add(event) + self.session.commit() + self.session.refresh(event) + return event + + def delete_event(self, event_id: uuid.UUID) -> dict: + event = self.get_event_by_id(event_id) + self.session.delete(event) + self.session.commit() + return {"message": "Event deleted successfully."} + + def get_events(self, page: int, size: int): + query = self.session.query(event_orm.Event) + total_events = query.count() + events = query.offset((page - 1) * size).limit(size).all() # Pagination logic + return events, total_events diff --git a/ticket-booking-platform/app/repositories/order.py b/ticket-booking-platform/app/repositories/order.py new file mode 100644 index 0000000..ab4ba21 --- /dev/null +++ b/ticket-booking-platform/app/repositories/order.py @@ -0,0 +1,94 @@ +from sqlalchemy.orm import Session +from sqlalchemy import select, update, insert, delete +from uuid import UUID +import logging +from app.models.order import Order, OrderStatus, OrderTicket +from sqlalchemy.dialects.postgresql import insert + +logger = logging.getLogger(__name__) + + +class OrderRepository: + def __init__(self, db: Session): + self.db = db + + def create_order( + self, user_id: UUID, event_id: UUID, stripe_session_id: str = None + ) -> Order: + """Create a new order.""" + order = Order( + user_id=user_id, + event_id=event_id, + stripe_session_id=stripe_session_id, + ) + self.db.add(order) + self.db.commit() + self.db.refresh(order) + logger.info( + f"Order created: {order.id} for user {user_id} and event {event_id}." + ) + return order + + def get_order_by_id(self, order_id: UUID) -> Order: + """Retrieve an order by its ID.""" + query = select(Order).where(Order.id == order_id) + result = self.db.execute(query) + order = result.scalar_one_or_none() + if order: + logger.info(f"Order retrieved: {order.id}.") + else: + logger.warning(f"Order not found: {order_id}.") + return order + + def update_order_status(self, order_id: UUID, status: str) -> Order: + """Update the status of an order.""" + query = update(Order).where(Order.id == order_id).values(status=status) + self.db.execute(query) + self.db.commit() + logger.info(f"Order status updated: {order_id} to {status}.") + return self.get_order_by_id(order_id) # Return the updated order + + def get_orders_by_user_id(self, user_id: UUID): + """Retrieve all orders for a specific user.""" + query = select(Order).where(Order.user_id == user_id) + result = self.db.execute(query) + orders = result.scalars().all() + logger.info(f"Retrieved {len(orders)} orders for user {user_id}.") + return orders + + def get_orders_by_event_id(self, event_id: UUID): + """Retrieve all orders for a specific event.""" + query = select(Order).where(Order.event_id == event_id) + result = self.db.execute(query) + orders = result.scalars().all() + logger.info(f"Retrieved {len(orders)} orders for event {event_id}.") + return orders + + def get_order_by_stripe_session_id(self, stripe_session_id: str) -> Order: + return ( + self.db.query(Order) + .filter(Order.stripe_session_id == stripe_session_id) + .first() + ) + + def create_tickets_for_order(self, order_id: UUID, ticket_ids: list[UUID]): + """Create order-ticket associations in bulk.""" + order_tickets = [ + {"order_id": order_id, "ticket_id": ticket_id} for ticket_id in ticket_ids + ] + self.db.execute(insert(OrderTicket).values(order_tickets)) + self.db.commit() + for i in range(len(ticket_ids)): + logger.info( + f"Created order-ticket association for order {order_id} and ticket {ticket_ids[i]}." + ) + logger.info(f"Created {len(ticket_ids)} order-ticket associations in bulk.") + return True + + def get_tickets_for_order(self, order_id: UUID): + """Retrieve ticket IDs directly to avoid unnecessary overhead.""" + query = select(OrderTicket.ticket_id).where(OrderTicket.order_id == order_id) + result = self.db.execute(query) + tickets = [row[0] for row in result.fetchall()] + logger.info(f"Retrieved {len(tickets)} tickets for order {order_id}.") + return tickets diff --git a/ticket-booking-platform/app/repositories/redis.py b/ticket-booking-platform/app/repositories/redis.py new file mode 100644 index 0000000..2a9cf03 --- /dev/null +++ b/ticket-booking-platform/app/repositories/redis.py @@ -0,0 +1,55 @@ +import redis +import time +import uuid +import os + +REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0") + + +class RedisClient: + def __init__(self, host="redis", port=6379, db=0): + + if REDIS_URL: + self.redis_client = redis.from_url(REDIS_URL, decode_responses=True) + else: + self.redis_client = redis.Redis( + host=host, port=port, db=db, decode_responses=True + ) + + def acquire_lock(self, lock_name, timeout=10): + """ + Acquire a distributed lock using Redis + """ + # Generate a unique lock identifier + lock_value = str(uuid.uuid4()) + + # Try to set the lock with an expiration + if self.redis_client.set(lock_name, lock_value, nx=True, ex=timeout): + return lock_value + return None + + def release_lock(self, lock_name, lock_value=None): + """ + Release a distributed lock + """ + # If no specific lock_value is provided, just delete the lock + if lock_value is None: + return self.redis_client.delete(lock_name) + + # Ensure we only release the lock if it matches the original lock value + lua_script = """ + if redis.call('get', KEYS[1]) == ARGV[1] then + return redis.call('del', KEYS[1]) + else + return 0 + end + """ + return self.redis_client.eval(lua_script, 1, lock_name, lock_value) + + +# Global Redis client instance +redis_client = RedisClient() + + +def get_redis_client() -> RedisClient: + return redis_client diff --git a/ticket-booking-platform/app/repositories/ticket.py b/ticket-booking-platform/app/repositories/ticket.py new file mode 100644 index 0000000..d336347 --- /dev/null +++ b/ticket-booking-platform/app/repositories/ticket.py @@ -0,0 +1,116 @@ +from sqlalchemy.orm import Session +from sqlalchemy import select, update, insert, and_, or_ +from uuid import UUID +from app.models.ticket import Ticket, TicketStatus +from app.models.order import Order, OrderStatus +from typing import List +import logging + + +class TicketRepository: + def __init__(self, db: Session): + self.db = db + + def get_available_tickets(self, event_id: UUID) -> List[Ticket]: + """ + Fetch available tickets for a specific event. + + Args: + event_id (UUID): The ID of the event. + + Returns: + List[Ticket]: A list of available tickets. + """ + try: + query = select(Ticket).where( + and_( + Ticket.event_id == event_id, + or_( + Ticket.status == TicketStatus.AVAILABLE, + Ticket.status == TicketStatus.LOCKED, + ), + ) + ) + result = self.db.execute(query) + tickets = result.scalars().all() + logging.info( + f"Retrieved {len(tickets)} available tickets for event {event_id}." + ) + return tickets + except Exception as e: + logging.exception( + f"Error fetching available tickets for event {event_id}: {e}" + ) + return [] # Return an empty list on error + + def lock_tickets(self, ticket_ids: list[UUID]): + """Lock tickets by updating their status.""" + for ticket_id in ticket_ids: + query = ( + update(Ticket) + .where(Ticket.id == ticket_id) + .values(status=TicketStatus.LOCKED) + ) + self.db.execute(query) + self.db.commit() # Commit the transaction + + def release_tickets(self, ticket_ids: list[UUID]): + """Release tickets by updating their status to 'Available'.""" + for ticket_id in ticket_ids: + query = ( + update(Ticket) + .where(Ticket.id == ticket_id) + .values(status=TicketStatus.AVAILABLE) + ) + self.db.execute(query) + self.db.commit() # Commit the transaction + + def create_ticket( + self, + event_id: UUID, + ): + """Create a new ticket for an event.""" + query = insert(Ticket).values( + event_id=event_id, + status=TicketStatus.AVAILABLE, # New tickets are available by default + ) + self.db.execute(query) + self.db.commit() # Commit the transaction + + def get_ticket_status(self, ticket_id: UUID): + """Get the current status of a ticket.""" + query = select(Ticket).where(Ticket.id == ticket_id) + result = self.db.execute(query) + return ( + result.scalar_one_or_none() + ) # Returns a single ticket or None if not found + + def update_ticket_status_by_event(self, event_id: UUID, status: TicketStatus): + self.db.query(Ticket).filter(Ticket.event_id == event_id).update( + {"status": status} + ) + self.db.commit() + + def get_ticket_ids_by_order(self, order_id: UUID): + """Fetch ticket IDs associated with a specific order.""" + try: + # Get the event_id from the order + order_query = select(Order.event_id).where(Order.id == order_id) + order_result = self.db.execute(order_query) + order = order_result.scalar_one_or_none() + + if not order: + return [] # Return an empty list if the order doesn't exist + + # Now that we have the event_id, fetch the ticket IDs for that event + event_id = order + ticket_query = select(Ticket.id).where(Ticket.event_id == event_id) + ticket_result = self.db.execute(ticket_query) + return [ + ticket_id for (ticket_id,) in ticket_result.fetchall() + ] # Fetch and return list of ticket IDs + + except Exception as e: + # Log the exception or handle it as needed + print(f"Error fetching ticket IDs for order {order_id}: {e}") + return [] diff --git a/ticket-booking-platform/app/repositories/user.py b/ticket-booking-platform/app/repositories/user.py new file mode 100644 index 0000000..9226282 --- /dev/null +++ b/ticket-booking-platform/app/repositories/user.py @@ -0,0 +1,57 @@ +from sqlalchemy.orm import Session +from sqlalchemy.future import select +from app.schemas.user_schema import UserCreate, UserUpdate +from app.models.user import User +from datetime import datetime, timezone +from uuid import UUID +from pydantic import BaseModel +from passlib.context import CryptContext + + +class UserSqlRepository: + def __init__(self, db: Session): + self.db = db + self.pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + def hash_password(self, password: str) -> str: + return self.pwd_context.hash(password) + + def create_user(self, user_data: UserCreate) -> User: + db_user = User( + username=user_data.username, + email=user_data.email, + hashed_password=self.hash_password(user_data.password), + created_at=datetime.now(timezone.utc), + ) + self.db.add(db_user) + self.db.commit() # Commit the transaction + self.db.refresh(db_user) # Refresh to get the updated user with ID + return db_user + + def update_user(self, user_id: UUID, user_data: UserUpdate) -> User: + db_user = self.db.get(User, user_id) + if db_user: + if user_data.username is not None: + db_user.username = user_data.username + if user_data.email is not None: + db_user.email = user_data.email + if user_data.password is not None: + db_user.hashed_password = self.hash_password(user_data.password) + self.db.commit() # Commit the transaction + self.db.refresh(db_user) # Refresh to get updated data + return db_user + return None + + def get_user_by_email(self, email: str) -> User: + stmt = select(User).where(User.email == email) + result = self.db.execute(stmt) + return result.scalar_one_or_none() + + def get_user(self, user_id: UUID) -> User: + return self.db.get(User, user_id) + + def get_users(self, page: int, size: int): + query = self.db.query(User) + total_users = query.count() + users = query.offset((page - 1) * size).limit(size).all() # Pagination logic + return users, total_users diff --git a/ticket-booking-platform/app/schemas/__init__.py b/ticket-booking-platform/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/schemas/event_schema.py b/ticket-booking-platform/app/schemas/event_schema.py new file mode 100644 index 0000000..631a913 --- /dev/null +++ b/ticket-booking-platform/app/schemas/event_schema.py @@ -0,0 +1,27 @@ +from pydantic import BaseModel +from datetime import datetime +from typing import Optional + + +class EventUpdate(BaseModel): + name: Optional[str] + description: Optional[str] + date: Optional[datetime] + venue: Optional[str] + price: Optional[float] + total_tickets: Optional[int] # Total tickets is optional for updates + + class Config: + orm_mode = True # Allows compatibility with ORM models + + +class EventCreate(BaseModel): + name: str + description: str = None + date: datetime + venue: str + price: float + total_tickets: int + + class Config: + orm_mode = True # Allows compatibility with ORM models diff --git a/ticket-booking-platform/app/schemas/user_schema.py b/ticket-booking-platform/app/schemas/user_schema.py new file mode 100644 index 0000000..83ce8ee --- /dev/null +++ b/ticket-booking-platform/app/schemas/user_schema.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel, EmailStr +from datetime import datetime + + +class UserCreate(BaseModel): + username: str + email: EmailStr + password: str + + class Config: + orm_mode = True + + +class UserUpdate(BaseModel): + username: str = None + email: EmailStr = None + password: str = None + + class Config: + orm_mode = True diff --git a/ticket-booking-platform/app/services/__init__.py b/ticket-booking-platform/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/services/kafka_consumer.py b/ticket-booking-platform/app/services/kafka_consumer.py new file mode 100644 index 0000000..3570c9a --- /dev/null +++ b/ticket-booking-platform/app/services/kafka_consumer.py @@ -0,0 +1,100 @@ +from kafka import KafkaConsumer +import json +import logging +from app.core.config import kafka_settings +from app.services.mail import MailService + + +class NotificationService: + def __init__(self): + self.consumer = KafkaConsumer( + kafka_settings.payment_notifications_topic, + bootstrap_servers=kafka_settings.bootstrap_servers, + auto_offset_reset="earliest", + group_id=kafka_settings.notification_service_group, + value_deserializer=lambda x: json.loads( + x.decode("utf-8") + ), # Ensure decoding + enable_auto_commit=True, + max_poll_interval_ms=300000, + max_poll_records=10, + fetch_max_bytes=1048576, + ) + self.mail_service = MailService() + logging.info("NotificationService initialized and consumer started.") + + def start_consuming(self): + logging.info("NotificationService: Consumer started.") + + try: + for message in self.consumer: + logging.info(f"NotificationService received: {message.value}") + + order_id = message.value["order_id"] + status = message.value["status"] + amount = message.value["amount"] + + if status == "success": + self.send_booking_confirmation_email(order_id, amount) + elif status == "failed": + self.send_payment_failure_notification(order_id) + + except Exception as e: + raise e + finally: + self.consumer.close() # Close the consumer gracefully + + def send_booking_confirmation_email(self, order_id, amount): + subject = f"Booking Confirmation - Order {order_id}" + body = f"Thank you for your payment of ${amount:.2f}. Your booking (Order ID: {order_id}) has been confirmed." + self.mail_service.send_email( + recipient="thapa.qw12@gmail.com", # Replace with actual recipient + subject=subject, + body=body, + ) + + def send_payment_failure_notification(self, order_id): + logging.info(f"Sending payment failure notification for Order ID ") + subject = f"Payment Failure - Order {order_id}" + body = f"We regret to inform you that your payment for Order ID {order_id} has failed. Please try again." + self.mail_service.send_email( + recipient="thapa.qw12@gmail.com", # Replace with actual recipient + subject=subject, + body=body, + ) + + +class WaitlistProcessor: + def __init__(self): + self.consumer = KafkaConsumer( + kafka_settings.ticket_availability_topic, + bootstrap_servers=kafka_settings.bootstrap_servers, + auto_offset_reset="earliest", + group_id=kafka_settings.waitlist_service_group, + value_deserializer=lambda x: json.loads(x.decode("utf-8")), + ) + logging.info("WaitlistProcessor initialized and ready to consume messages.") + + def start_consuming(self): + logging.info("WaitlistProcessor: Consumer started.") + try: + for message in self.consumer: + logging.info(f"WaitlistProcessor received: {message.value}") + + event_id = message.value["event_id"] + status = message.value["status"] + tickets_count = message.value["tickets_count"] + + if status == "available" and tickets_count > 0: + logging.info(f"Notifying waitlist users for event {event_id}.") + self.notify_waitlist_users(event_id, tickets_count) + except Exception as e: + logging.error(f"Error in WaitlistProcessor: {e}") + + def notify_waitlist_users(self, event_id, available_tickets): + logging.info( + f"Notifying waitlist users for event {event_id}. {available_tickets} tickets now available." + ) + print( + f"Notifying waitlist users for event {event_id}. {available_tickets} tickets now available." + ) diff --git a/ticket-booking-platform/app/services/kafka_producer.py b/ticket-booking-platform/app/services/kafka_producer.py new file mode 100644 index 0000000..7d8ba82 --- /dev/null +++ b/ticket-booking-platform/app/services/kafka_producer.py @@ -0,0 +1,54 @@ +import logging +from kafka import KafkaProducer +import json +import uuid +from datetime import datetime +from app.core.config import kafka_settings + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class KafkaEventProducer: + def __init__(self): + self.producer = KafkaProducer( + bootstrap_servers=kafka_settings.bootstrap_servers, + value_serializer=lambda v: json.dumps(v).encode("utf-8"), + ) + logger.info( + "KafkaEventProducer initialized with bootstrap servers: %s", + kafka_settings.bootstrap_servers, + ) + + def publish_ticket_availability( + self, event_id: uuid.UUID, status: str, tickets: int + ): + message = { + "event_id": str(event_id), + "status": status, + "tickets_count": tickets, + "timestamp": str(datetime.now()), + } + topic = kafka_settings.ticket_availability_topic + self.producer.send(topic, message) + self.producer.flush() + logger.info( + "Published ticket availability message to topic '%s': %s", topic, message + ) + + def publish_payment_notification( + self, order_id: uuid.UUID, status: str, amount: float, user_id: uuid.UUID + ): + message = { + "order_id": str(order_id), + "status": status, + "amount": amount, + "user_id": user_id, + "timestamp": str(datetime.now()), + } + topic = kafka_settings.payment_notifications_topic + self.producer.send(topic, message) + self.producer.flush() + logger.info( + "Published payment notification message to topic '%s': %s", topic, message + ) diff --git a/ticket-booking-platform/app/services/mail.py b/ticket-booking-platform/app/services/mail.py new file mode 100644 index 0000000..21989b9 --- /dev/null +++ b/ticket-booking-platform/app/services/mail.py @@ -0,0 +1,31 @@ +import smtplib +from email.message import EmailMessage +from app.core.config import settings +import logging + + +class MailService: + def __init__(self): + self.smtp_host = settings.EMAIL_HOST + self.smtp_port = settings.EMAIL_PORT + self.smtp_username = settings.EMAIL_HOST_USER + self.smtp_password = settings.EMAIL_HOST_PASSWORD + self.smtp_from_email = settings.DEFAULT_FROM_EMAIL + + def send_email(self, recipient, subject, body): + try: + msg = EmailMessage() + msg["From"] = self.smtp_from_email + msg["To"] = recipient + msg["Subject"] = subject + msg.set_content(body) + + # Connect to the SMTP server and send the email + with smtplib.SMTP(self.smtp_host, self.smtp_port) as server: + server.starttls() # Upgrade connection to secure + server.login(self.smtp_username, self.smtp_password) + server.send_message(msg) + + logging.info(f"Email sent to {recipient} with subject: {subject}") + except Exception as e: + print(f"Failed to send email to {recipient}. Error: {e}") diff --git a/ticket-booking-platform/app/services/stripe.py b/ticket-booking-platform/app/services/stripe.py new file mode 100644 index 0000000..0981a4c --- /dev/null +++ b/ticket-booking-platform/app/services/stripe.py @@ -0,0 +1,90 @@ +import os +import uuid +import stripe +from app.models.ticket import TicketStatus +from app.repositories.order import OrderStatus +from app.repositories.order import OrderRepository +from app.repositories.ticket import TicketRepository +from sqlalchemy.orm import Session + + +stripe.api_key = os.getenv( + "STRIPE_SECRET_KEYS", + "sk_test_51QQtS4BDmkshEb9hkHvrxBNPSbpiD81u5DYh5VyCvMT3OwTKhVXqcD3758tFCjNtbSG5DfYPHxOvXFGoSd9eUsNX001aVtGCcr", +) + + +def create_stripe_checkout_session( + amount, + quantity, + product_name, + currency, + success_url="https://www.booking-ticket.com.com/success", + cancel_url="https://www.booking-ticket.com.com/cancel", +): + try: + # Create a checkout session + price_in_cents = int(amount * 100) + + tracking_id = uuid.uuid4() + + session = stripe.checkout.Session.create( + payment_method_types=["card"], # Specify the payment methods + line_items=[ + { + "price_data": { + "currency": currency, + "product_data": { + "name": product_name, + }, + "unit_amount": price_in_cents, # Amount in cents + }, + "quantity": quantity, + } + ], + mode="payment", + success_url=success_url, + cancel_url=cancel_url, + metadata={ + "tracking_id": str(tracking_id), + "this_is_a_test_key": "this_is_a_test_value", + }, + payment_intent_data={ + "metadata": { + "tracking_id": str(tracking_id), + } + }, + ) + retrieved_session = stripe.checkout.Session.retrieve(session.id) + payment_intent_id = retrieved_session.payment_intent + + print( + f"Stripe Checkout session created: {session.id}, {payment_intent_id}", + flush=True, + ) + + return session, tracking_id + except Exception as e: + raise e + + +class StripeWebhookService: + def __init__(self, db: Session): + self.order_repository = OrderRepository(db) + self.ticket_repository = TicketRepository(db) + + def handle_payment_intent_succeeded(self, payment_intent_id: str): + order = self.order_repository.get_order_by_stripe_session_id(payment_intent_id) + if order: + self.order_repository.update_order_status(order.id, OrderStatus.COMPLETED) + self.ticket_repository.update_ticket_status_by_event( + order.event_id, TicketStatus.SOLD + ) + + def handle_payment_intent_failed(self, payment_intent_id: str): + order = self.order_repository.get_order_by_stripe_session_id(payment_intent_id) + if order: + self.order_repository.update_order_status(order.id, OrderStatus.CANCELLED) + self.ticket_repository.update_ticket_status_by_event( + order.event_id, TicketStatus.AVAILABLE + ) diff --git a/ticket-booking-platform/app/services/utils.py b/ticket-booking-platform/app/services/utils.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/utils/__init__.py b/ticket-booking-platform/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/utils/redis_utils.py b/ticket-booking-platform/app/utils/redis_utils.py new file mode 100644 index 0000000..b3ab991 --- /dev/null +++ b/ticket-booking-platform/app/utils/redis_utils.py @@ -0,0 +1,48 @@ +import redis +import time +import uuid + + +class RedisClient: + def __init__(self, host="redis", port=6379, db=0): + self.redis_client = redis.Redis( + host=host, port=port, db=db, decode_responses=True + ) + + def acquire_lock(self, lock_name, timeout=10): + """ + Acquire a distributed lock using Redis + """ + # Generate a unique lock identifier + lock_value = str(uuid.uuid4()) + + # Try to set the lock with an expiration + if self.redis_client.set(lock_name, lock_value, nx=True, ex=timeout): + return lock_value + return None + + def release_lock(self, lock_name, lock_value=None): + """ + Release a distributed lock + """ + # If no specific lock_value is provided, just delete the lock + if lock_value is None: + return self.redis_client.delete(lock_name) + + # Ensure we only release the lock if it matches the original lock value + lua_script = """ + if redis.call('get', KEYS[1]) == ARGV[1] then + return redis.call('del', KEYS[1]) + else + return 0 + end + """ + return self.redis_client.eval(lua_script, 1, lock_name, lock_value) + + +# Global Redis client instance +redis_client = RedisClient() + + +def get_redis_client(): + return redis_client diff --git a/ticket-booking-platform/docker-compose.yaml b/ticket-booking-platform/docker-compose.yaml new file mode 100644 index 0000000..529cd74 --- /dev/null +++ b/ticket-booking-platform/docker-compose.yaml @@ -0,0 +1,89 @@ +services: + postgres: + container_name: db + image: postgres:15-alpine + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] + interval: 5s + timeout: 5s + retries: 5 + + redis: + container_name: redis + image: redis:6.2-alpine + ports: + - "${REDIS_PORT}:6379" + volumes: + - redis_data:/data + command: redis-server --save 20 1 --loglevel warning + + zookeeper: + container_name: zookeeper + image: confluentinc/cp-zookeeper:6.2.0 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + volumes: + - zookeeper_data:/var/lib/zookeeper/data + + kafka: + container_name: kafka + image: confluentinc/cp-kafka:6.2.0 + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:${KAFKA_PORT},PLAINTEXT_HOST://localhost:9092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + volumes: + - kafka_data:/var/lib/kafka/data + + backend: + container_name: backend + build: + context: . + dockerfile: Dockerfile + ports: + - "8000:8000" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_started + kafka: + condition: service_started + environment: + - DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB} + - REDIS_URL=redis://redis:${REDIS_PORT} + - KAFKA_BOOTSTRAP_SERVERS=kafka:${KAFKA_PORT} + - STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY} + - STRIPE_WEBHOOK_SECRET=${STRIPE_WEBHOOK_SECRET} + volumes: + - .:/app + command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload + + adminer: + container_name: adminer + image: adminer:latest + ports: + - "8080:8080" + depends_on: + - postgres + +volumes: + postgres_data: + redis_data: + zookeeper_data: + kafka_data: diff --git a/ticket-booking-platform/migrations/README b/ticket-booking-platform/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/ticket-booking-platform/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/ticket-booking-platform/migrations/env.py b/ticket-booking-platform/migrations/env.py new file mode 100644 index 0000000..bc51eca --- /dev/null +++ b/ticket-booking-platform/migrations/env.py @@ -0,0 +1,79 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context +from app.core.database import Base +from app.models import user, ticket, event, order + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/ticket-booking-platform/migrations/script.py.mako b/ticket-booking-platform/migrations/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/ticket-booking-platform/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/ticket-booking-platform/migrations/versions/1d29b99395fd_initial_migration.py b/ticket-booking-platform/migrations/versions/1d29b99395fd_initial_migration.py new file mode 100644 index 0000000..6f6bab0 --- /dev/null +++ b/ticket-booking-platform/migrations/versions/1d29b99395fd_initial_migration.py @@ -0,0 +1,90 @@ +"""Initial migration + +Revision ID: 1d29b99395fd +Revises: +Create Date: 2024-12-02 09:52:20.925650 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '1d29b99395fd' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('events', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=True), + sa.Column('date', sa.DateTime(), nullable=False), + sa.Column('venue', sa.String(), nullable=False), + sa.Column('price', sa.Float(), nullable=False), + sa.Column('total_tickets', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_events_id'), 'events', ['id'], unique=False) + op.create_table('users', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('username', sa.String(), nullable=True), + sa.Column('email', sa.String(), nullable=True), + sa.Column('hashed_password', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) + op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False) + op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) + op.create_table('orders', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('user_id', sa.UUID(), nullable=False), + sa.Column('event_id', sa.UUID(), nullable=False), + sa.Column('status', sa.Enum('PENDING', 'COMPLETED', 'CANCELLED', name='orderstatus'), nullable=True), + sa.Column('stripe_session_id', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['event_id'], ['events.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_orders_id'), 'orders', ['id'], unique=False) + op.create_table('tickets', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('event_id', sa.UUID(), nullable=False), + sa.Column('status', sa.Enum('AVAILABLE', 'LOCKED', 'SOLD', name='ticketstatus'), nullable=True), + sa.ForeignKeyConstraint(['event_id'], ['events.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_tickets_id'), 'tickets', ['id'], unique=False) + op.create_table('order_tickets', + sa.Column('order_id', sa.UUID(), nullable=False), + sa.Column('ticket_id', sa.UUID(), nullable=False), + sa.ForeignKeyConstraint(['order_id'], ['orders.id'], ), + sa.ForeignKeyConstraint(['ticket_id'], ['tickets.id'], ), + sa.PrimaryKeyConstraint('order_id', 'ticket_id') + ) + op.create_index('idx_order_ticket', 'order_tickets', ['order_id', 'ticket_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_order_ticket', table_name='order_tickets') + op.drop_table('order_tickets') + op.drop_index(op.f('ix_tickets_id'), table_name='tickets') + op.drop_table('tickets') + op.drop_index(op.f('ix_orders_id'), table_name='orders') + op.drop_table('orders') + op.drop_index(op.f('ix_users_username'), table_name='users') + op.drop_index(op.f('ix_users_id'), table_name='users') + op.drop_index(op.f('ix_users_email'), table_name='users') + op.drop_table('users') + op.drop_index(op.f('ix_events_id'), table_name='events') + op.drop_table('events') + # ### end Alembic commands ### diff --git a/ticket-booking-platform/requirements.txt b/ticket-booking-platform/requirements.txt new file mode 100644 index 0000000..5f5f1b3 --- /dev/null +++ b/ticket-booking-platform/requirements.txt @@ -0,0 +1,35 @@ +alembic==1.14.0 +annotated-types==0.7.0 +anyio==4.6.2.post1 +certifi==2024.8.30 +charset-normalizer==3.4.0 +click==8.1.7 +dnspython==2.7.0 +email_validator==2.2.0 +fastapi==0.115.5 +greenlet==3.1.1 +h11==0.14.0 +idna==3.10 +iniconfig==2.0.0 +kafka-python==2.0.2 +Mako==1.3.6 +MarkupSafe==3.0.2 +packaging==24.2 +passlib==1.7.4 +pluggy==1.5.0 +psycopg2-binary==2.9.10 +pydantic==2.10.2 +pydantic-settings==2.6.1 +pydantic_core==2.27.1 +pytest==8.3.3 +python-dotenv==1.0.1 +python-multipart==0.0.18 +redis==5.2.0 +requests==2.32.3 +sniffio==1.3.1 +SQLAlchemy==2.0.36 +starlette==0.41.3 +stripe==11.3.0 +typing_extensions==4.12.2 +urllib3==2.2.3 +uvicorn==0.32.1 \ No newline at end of file diff --git a/ticket-booking-platform/tests/__init__.py b/ticket-booking-platform/tests/__init__.py new file mode 100644 index 0000000..e69de29 From b798640d2645b41e43523b5b6b10852849be116d Mon Sep 17 00:00:00 2001 From: thapasamir Date: Mon, 2 Dec 2024 23:35:28 +0545 Subject: [PATCH 2/8] added jwt and celery worker --- .gitignore | 9 +++ ticket-booking-platform/.gitignore | 2 + .../app/api/route_handlers.py | 59 ++++++++++++++++++- ticket-booking-platform/app/api/routes.py | 18 ++++++ ticket-booking-platform/app/celery_app.py | 14 +++++ ticket-booking-platform/app/core/config.py | 1 + .../app/core/middlewares/__init__.py | 0 .../app/core/middlewares/auth_middlewares.py | 42 +++++++++++++ ticket-booking-platform/app/main.py | 4 +- .../app/repositories/user.py | 43 ++++++++++++++ .../app/services/kafka_consumer.py | 30 +++++++--- ticket-booking-platform/app/tasks.py | 21 +++++++ ticket-booking-platform/docker-compose.yaml | 28 ++++++--- ticket-booking-platform/requirements.txt | 28 ++++++++- 14 files changed, 278 insertions(+), 21 deletions(-) create mode 100644 .gitignore create mode 100644 ticket-booking-platform/app/celery_app.py create mode 100644 ticket-booking-platform/app/core/middlewares/__init__.py create mode 100644 ticket-booking-platform/app/core/middlewares/auth_middlewares.py create mode 100644 ticket-booking-platform/app/tasks.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a3ffccd --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +venv/ +__pycache__/ +*.pyc +.env +.pytest_cache/ +logs/ +*.log + +env/ \ No newline at end of file diff --git a/ticket-booking-platform/.gitignore b/ticket-booking-platform/.gitignore index 869f928..a3ffccd 100644 --- a/ticket-booking-platform/.gitignore +++ b/ticket-booking-platform/.gitignore @@ -5,3 +5,5 @@ __pycache__/ .pytest_cache/ logs/ *.log + +env/ \ No newline at end of file diff --git a/ticket-booking-platform/app/api/route_handlers.py b/ticket-booking-platform/app/api/route_handlers.py index 7a5150b..a4919cd 100644 --- a/ticket-booking-platform/app/api/route_handlers.py +++ b/ticket-booking-platform/app/api/route_handlers.py @@ -26,6 +26,9 @@ from app.services.kafka_producer import KafkaEventProducer from app.models.ticket import Ticket, TicketStatus import logging +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") logger = logging.getLogger(__name__) @@ -128,6 +131,55 @@ async def update_user( return updated_user + @classmethod + async def login( + cls, + form_data: OAuth2PasswordRequestForm = Depends(), + db: Session = Depends(get_db), + ): + user_repository = UserSqlRepository(db) + + # Authenticate user + user = user_repository.get_user_by_email(form_data.username) + if not user or not user_repository.pwd_context.verify( + form_data.password, user.hashed_password + ): + raise HTTPException(status_code=400, detail="Invalid username or password.") + + # Generate tokens + access_token = user_repository.create_access_token(str(user.id)) + refresh_token = user_repository.create_refresh_token(str(user.id)) + return { + "access_token": access_token, + "refresh_token": refresh_token, + "token_type": "bearer", + } + + @classmethod + async def refresh_token(cls, refresh_token: str, db: Session = Depends(get_db)): + user_repository = UserSqlRepository(db) + + # Decode refresh token + user_id = user_repository.decode_token(refresh_token, expected_type="refresh") + user = user_repository.get_user(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found.") + + # Generate new access token + access_token = user_repository.create_access_token(str(user.id)) + return {"access_token": access_token, "token_type": "bearer"} + + @classmethod + async def get_current_user( + cls, token: str = Depends(oauth2_scheme), db: Session = Depends(get_db) + ): + user_repository = UserSqlRepository(db) + user_id = user_repository.decode_access_token(token) + user = user_repository.get_user(user_id) + if user is None: + raise HTTPException(status_code=404, detail="User not found.") + return user + class BookingView: @classmethod @@ -135,10 +187,13 @@ def book_ticket( cls, request: Request, event_id: uuid.UUID, - user_id: uuid.UUID, + # user_id: uuid.UUID, quantity: int, db: Session = Depends(get_db), ): + user = getattr(request.state, "user", None) + if not user: + raise HTTPException(status_code=401, detail="Authentication required.") ticket_repo = TicketRepository(db) event_repo = EventSqlRepository(db) @@ -186,7 +241,7 @@ def book_ticket( logger.info(f"Stripe session created: {stripe_tracking_id}") # Create an order in the database order = order_repo.create_order( - user_id=user_id, + user_id=user.id, event_id=event_id, stripe_session_id=stripe_tracking_id, ) diff --git a/ticket-booking-platform/app/api/routes.py b/ticket-booking-platform/app/api/routes.py index 95ada61..84abee1 100644 --- a/ticket-booking-platform/app/api/routes.py +++ b/ticket-booking-platform/app/api/routes.py @@ -56,6 +56,24 @@ description="Retrieve a paginated list of users, providing page number and size.", ) +router.add_api_route( + path="/token/", + endpoint=UserView.login, + methods=["POST"], + tags=["Authentication"], + summary="User login", + description="Authenticate a user and retrieve access and refresh tokens.", +) + +router.add_api_route( + path="/token/refresh/", + endpoint=UserView.refresh_token, + methods=["POST"], + tags=["Authentication"], + summary="Refresh access token", + description="Use the refresh token to obtain a new access token.", +) + router.add_api_route( path="/users/", diff --git a/ticket-booking-platform/app/celery_app.py b/ticket-booking-platform/app/celery_app.py new file mode 100644 index 0000000..39609ea --- /dev/null +++ b/ticket-booking-platform/app/celery_app.py @@ -0,0 +1,14 @@ +import os +from celery import Celery + +# Configure Celery with Redis broker and backend +CELERY_BROKER = os.getenv("CELERY_BROKER", "redis://redis:6379/0") +CELERY_BACKEND = os.getenv("CELERY_BACKEND", "redis://redis:6379/0") + +celery_app = Celery( + "tasks", broker=CELERY_BROKER, backend=CELERY_BACKEND, include=["app.tasks"] +) # Ensure the tasks module is included + + +celery_app.conf.task_routes = {"app.tasks.send_email_task": {"queue": "default"}} +celery_app.conf.timezone = "UTC" diff --git a/ticket-booking-platform/app/core/config.py b/ticket-booking-platform/app/core/config.py index 3f85b31..0792cb1 100644 --- a/ticket-booking-platform/app/core/config.py +++ b/ticket-booking-platform/app/core/config.py @@ -19,6 +19,7 @@ class Settings(BaseSettings): EMAIL_HOST_USER: str EMAIL_HOST_PASSWORD: str DEFAULT_FROM_EMAIL: str + JWT_SECRET_KEY: str class Config: env_file = ".env" diff --git a/ticket-booking-platform/app/core/middlewares/__init__.py b/ticket-booking-platform/app/core/middlewares/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ticket-booking-platform/app/core/middlewares/auth_middlewares.py b/ticket-booking-platform/app/core/middlewares/auth_middlewares.py new file mode 100644 index 0000000..fc663c4 --- /dev/null +++ b/ticket-booking-platform/app/core/middlewares/auth_middlewares.py @@ -0,0 +1,42 @@ +from fastapi import Request, HTTPException +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import Response +from fastapi.security import OAuth2PasswordBearer +from sqlalchemy.orm import Session +from app.core.database import get_db +from app.models.user import User +from app.repositories.user import UserSqlRepository + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + +class AuthMiddleware(BaseHTTPMiddleware): + def __init__(self, app): + super().__init__(app) + + async def dispatch(self, request: Request, call_next): + token = None + try: + # Extract token from the Authorization header + token = await oauth2_scheme(request) + except Exception: + pass + + if token: + db: Session = next(get_db()) # Get a DB session + user_repo = UserSqlRepository(db) + + try: + # Decode and validate the token + user_id = user_repo.decode_token(token) + user = user_repo.get_user(user_id) + if user: + # Attach the user object to the request + request.state.user = user + except HTTPException: + pass + + # Proceed with the next middleware or endpoint + response = await call_next(request) + return response diff --git a/ticket-booking-platform/app/main.py b/ticket-booking-platform/app/main.py index 74077c6..759905a 100644 --- a/ticket-booking-platform/app/main.py +++ b/ticket-booking-platform/app/main.py @@ -3,7 +3,7 @@ from app.core.database import engine, Base from app.api.routes import router as api_router from app.services.kafka_consumer import WaitlistProcessor, NotificationService - +from app.core.middlewares.auth_middlewares import AuthMiddleware app = FastAPI() @@ -12,6 +12,7 @@ # Include routers app.include_router(api_router) +app.add_middleware(AuthMiddleware) notification_service = NotificationService() @@ -23,6 +24,7 @@ @app.on_event("startup") def startup_event(): + # Start the Kafka consumer for NotificationService in a separate thread notification_thread = threading.Thread( target=notification_service.start_consuming, daemon=True diff --git a/ticket-booking-platform/app/repositories/user.py b/ticket-booking-platform/app/repositories/user.py index 9226282..0ff35b5 100644 --- a/ticket-booking-platform/app/repositories/user.py +++ b/ticket-booking-platform/app/repositories/user.py @@ -6,9 +6,20 @@ from uuid import UUID from pydantic import BaseModel from passlib.context import CryptContext +import jwt +from datetime import datetime, timedelta +from fastapi.security import OAuth2PasswordBearer +from fastapi import HTTPException, status, Depends +from app.core.config import settings # Create this settings module class UserSqlRepository: + SECRET_KEY = settings.JWT_SECRET_KEY # Store the secret key in a configuration file + ALGORITHM = "HS256" + ACCESS_TOKEN_EXPIRE_MINUTES = 30 + + REFRESH_TOKEN_EXPIRE_DAYS = 7 + def __init__(self, db: Session): self.db = db self.pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") @@ -55,3 +66,35 @@ def get_users(self, page: int, size: int): total_users = query.count() users = query.offset((page - 1) * size).limit(size).all() # Pagination logic return users, total_users + + def create_access_token(self, user_id: str) -> str: + expire = datetime.utcnow() + timedelta(minutes=self.ACCESS_TOKEN_EXPIRE_MINUTES) + to_encode = {"sub": user_id, "exp": expire} + return jwt.encode(to_encode, self.SECRET_KEY, algorithm=self.ALGORITHM) + + def create_refresh_token(self, user_id: str) -> str: + expire = datetime.utcnow() + timedelta(days=self.REFRESH_TOKEN_EXPIRE_DAYS) + to_encode = {"sub": user_id, "exp": expire, "type": "refresh"} + return jwt.encode(to_encode, self.SECRET_KEY, algorithm=self.ALGORITHM) + + def decode_token(self, token: str, expected_type: str = "access"): + try: + payload = jwt.decode(token, self.SECRET_KEY, algorithms=[self.ALGORITHM]) + user_id: str = payload.get("sub") + token_type: str = payload.get("type", "access") + if user_id is None or token_type != expected_type: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token.", + ) + return user_id + except jwt.ExpiredSignatureError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Token expired.", + ) + except jwt.InvalidTokenError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token.", + ) diff --git a/ticket-booking-platform/app/services/kafka_consumer.py b/ticket-booking-platform/app/services/kafka_consumer.py index 3570c9a..b869d9f 100644 --- a/ticket-booking-platform/app/services/kafka_consumer.py +++ b/ticket-booking-platform/app/services/kafka_consumer.py @@ -3,6 +3,9 @@ import logging from app.core.config import kafka_settings from app.services.mail import MailService +from app.core.database import get_db +from app.repositories.user import UserSqlRepository +from app.tasks import send_email_task class NotificationService: @@ -20,7 +23,7 @@ def __init__(self): max_poll_records=10, fetch_max_bytes=1048576, ) - self.mail_service = MailService() + # self.mail_service = MailService() logging.info("NotificationService initialized and consumer started.") def start_consuming(self): @@ -33,36 +36,45 @@ def start_consuming(self): order_id = message.value["order_id"] status = message.value["status"] amount = message.value["amount"] + user_id = message.value["user_id"] if status == "success": - self.send_booking_confirmation_email(order_id, amount) + self.send_booking_confirmation_email(order_id, amount, user_id) elif status == "failed": - self.send_payment_failure_notification(order_id) + self.send_payment_failure_notification(order_id, user_id) except Exception as e: raise e finally: self.consumer.close() # Close the consumer gracefully - def send_booking_confirmation_email(self, order_id, amount): + def send_booking_confirmation_email(self, order_id, amount, user_id): + user_email = self.get_user_email(user_id) subject = f"Booking Confirmation - Order {order_id}" body = f"Thank you for your payment of ${amount:.2f}. Your booking (Order ID: {order_id}) has been confirmed." - self.mail_service.send_email( - recipient="thapa.qw12@gmail.com", # Replace with actual recipient + send_email_task.delay( + recipient=user_email, subject=subject, body=body, ) - def send_payment_failure_notification(self, order_id): + def send_payment_failure_notification(self, order_id, user_id): + user_email = self.get_user_email(user_id) + logging.info(f"Sending payment failure notification for Order ID ") subject = f"Payment Failure - Order {order_id}" body = f"We regret to inform you that your payment for Order ID {order_id} has failed. Please try again." - self.mail_service.send_email( - recipient="thapa.qw12@gmail.com", # Replace with actual recipient + send_email_task.delay( + recipient=user_email, subject=subject, body=body, ) + def get_user_email(self, user_id): + db = next(get_db()) + user = UserSqlRepository(db).get_user(user_id) + return user.email + class WaitlistProcessor: def __init__(self): diff --git a/ticket-booking-platform/app/tasks.py b/ticket-booking-platform/app/tasks.py new file mode 100644 index 0000000..6af9668 --- /dev/null +++ b/ticket-booking-platform/app/tasks.py @@ -0,0 +1,21 @@ +from app.celery_app import celery_app +from app.services.mail import MailService +import logging +from celery.exceptions import MaxRetriesExceededError + + +# Celery task for sending emails with retry logic +@celery_app.task( + bind=True, max_retries=3, default_retry_delay=60 +) # Retry 3 times with a 60-second delay +def send_email_task(self, recipient, subject, body): + try: + mail_service = MailService() + mail_service.send_email(recipient, subject, body) + logging.info(f"Successfully sent email to {recipient}.") + except Exception as e: + logging.error(f"Failed to send email to {recipient}. Error: {e}") + try: + self.retry(exc=e) # Retry the task + except MaxRetriesExceededError: + logging.error(f"Max retries exceeded for sending email to {recipient}.") diff --git a/ticket-booking-platform/docker-compose.yaml b/ticket-booking-platform/docker-compose.yaml index 529cd74..e9610c9 100644 --- a/ticket-booking-platform/docker-compose.yaml +++ b/ticket-booking-platform/docker-compose.yaml @@ -43,10 +43,13 @@ services: - "9092:9092" environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:${KAFKA_PORT},PLAINTEXT_HOST://localhost:9092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:${KAFKA_PORT},PLAINTEXT_HOST://zookeeper:9092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_BROKER_ID: 1 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" + KAFKA_DELETE_TOPIC_ENABLE: "true" volumes: - kafka_data:/var/lib/kafka/data @@ -74,13 +77,24 @@ services: - .:/app command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - adminer: - container_name: adminer - image: adminer:latest - ports: - - "8080:8080" + celery-worker: + container_name: celery-worker + build: + context: . + dockerfile: Dockerfile depends_on: - - postgres + - redis + - backend + environment: + - CELERY_BROKER_URL=redis://redis:${REDIS_PORT} + - CELERY_RESULT_BACKEND=redis://redis:${REDIS_PORT} + - DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB} # Pass any other necessary env vars + - REDIS_URL=redis://redis:${REDIS_PORT} + - KAFKA_BOOTSTRAP_SERVERS=kafka:${KAFKA_PORT} + volumes: + - .:/app + command: celery -A app.celery_app worker --loglevel=info + volumes: postgres_data: diff --git a/ticket-booking-platform/requirements.txt b/ticket-booking-platform/requirements.txt index 5f5f1b3..6c25a0b 100644 --- a/ticket-booking-platform/requirements.txt +++ b/ticket-booking-platform/requirements.txt @@ -1,35 +1,59 @@ alembic==1.14.0 +amqp==5.3.1 annotated-types==0.7.0 anyio==4.6.2.post1 +argon2-cffi==23.1.0 +argon2-cffi-bindings==21.2.0 +bcrypt==4.2.1 +billiard==4.2.1 +celery==5.4.0 certifi==2024.8.30 +cffi==1.17.1 charset-normalizer==3.4.0 click==8.1.7 +click-didyoumean==0.3.1 +click-plugins==1.1.1 +click-repl==0.3.0 +cryptography==44.0.0 dnspython==2.7.0 email_validator==2.2.0 fastapi==0.115.5 +fastapi-users==14.0.0 +fastapi-users-db-sqlalchemy==6.0.1 greenlet==3.1.1 h11==0.14.0 idna==3.10 iniconfig==2.0.0 kafka-python==2.0.2 +kombu==5.4.2 +makefun==1.15.6 Mako==1.3.6 MarkupSafe==3.0.2 packaging==24.2 passlib==1.7.4 pluggy==1.5.0 +prompt_toolkit==3.0.48 psycopg2-binary==2.9.10 +pwdlib==0.2.1 +pycparser==2.22 pydantic==2.10.2 pydantic-settings==2.6.1 pydantic_core==2.27.1 +PyJWT==2.9.0 pytest==8.3.3 +python-dateutil==2.9.0.post0 python-dotenv==1.0.1 -python-multipart==0.0.18 +python-multipart==0.0.17 redis==5.2.0 requests==2.32.3 +six==1.16.0 sniffio==1.3.1 SQLAlchemy==2.0.36 starlette==0.41.3 stripe==11.3.0 typing_extensions==4.12.2 +tzdata==2024.2 urllib3==2.2.3 -uvicorn==0.32.1 \ No newline at end of file +uvicorn==0.32.1 +vine==5.1.0 +wcwidth==0.2.13 From af63011ca614bcf7dac6ff8f83d5fa3899ffd24a Mon Sep 17 00:00:00 2001 From: thapasamir Date: Tue, 3 Dec 2024 08:05:52 +0545 Subject: [PATCH 3/8] bugfixes --- ticket-booking-platform/app/api/route_handlers.py | 6 +++--- ticket-booking-platform/app/api/routes.py | 1 + ticket-booking-platform/app/main.py | 6 ++++++ ticket-booking-platform/app/services/kafka_producer.py | 2 +- ticket-booking-platform/app/tasks.py | 2 +- ticket-booking-platform/docker-compose.yaml | 1 + 6 files changed, 13 insertions(+), 5 deletions(-) diff --git a/ticket-booking-platform/app/api/route_handlers.py b/ticket-booking-platform/app/api/route_handlers.py index a4919cd..5d66bcf 100644 --- a/ticket-booking-platform/app/api/route_handlers.py +++ b/ticket-booking-platform/app/api/route_handlers.py @@ -174,7 +174,7 @@ async def get_current_user( cls, token: str = Depends(oauth2_scheme), db: Session = Depends(get_db) ): user_repository = UserSqlRepository(db) - user_id = user_repository.decode_access_token(token) + user_id = user_repository.decode_token(token) user = user_repository.get_user(user_id) if user is None: raise HTTPException(status_code=404, detail="User not found.") @@ -322,10 +322,10 @@ async def handle_payment_webhook( service.handle_payment_intent_succeeded(strip_tracking_id) kafka_producer.publish_payment_notification( - order_id=order.id, + order_id=str(order.id), status="success", amount=total_amount, - user_id=order.user_id, + user_id=str(order.user_id), ) elif ( event_type == "payment_intent.payment_failed" diff --git a/ticket-booking-platform/app/api/routes.py b/ticket-booking-platform/app/api/routes.py index 84abee1..20790c9 100644 --- a/ticket-booking-platform/app/api/routes.py +++ b/ticket-booking-platform/app/api/routes.py @@ -41,6 +41,7 @@ "Locks tickets to prevent overselling and returns a Stripe payment session " "link or client secret for completing the payment." ), + dependencies=[Depends(UserView.get_current_user)], ) diff --git a/ticket-booking-platform/app/main.py b/ticket-booking-platform/app/main.py index 759905a..dd6ae16 100644 --- a/ticket-booking-platform/app/main.py +++ b/ticket-booking-platform/app/main.py @@ -4,6 +4,8 @@ from app.api.routes import router as api_router from app.services.kafka_consumer import WaitlistProcessor, NotificationService from app.core.middlewares.auth_middlewares import AuthMiddleware +from app.tasks import send_email_task + app = FastAPI() @@ -24,11 +26,15 @@ @app.on_event("startup") def startup_event(): + print("Starting Kafka consumers...") + send_email_task.delay(recipient="thapa.qw12@gmail.com", subject="Test", body="Test") + print("...") # Start the Kafka consumer for NotificationService in a separate thread notification_thread = threading.Thread( target=notification_service.start_consuming, daemon=True ) + threads.append(notification_thread) notification_thread.start() diff --git a/ticket-booking-platform/app/services/kafka_producer.py b/ticket-booking-platform/app/services/kafka_producer.py index 7d8ba82..f22d041 100644 --- a/ticket-booking-platform/app/services/kafka_producer.py +++ b/ticket-booking-platform/app/services/kafka_producer.py @@ -43,7 +43,7 @@ def publish_payment_notification( "order_id": str(order_id), "status": status, "amount": amount, - "user_id": user_id, + "user_id": str(user_id), "timestamp": str(datetime.now()), } topic = kafka_settings.payment_notifications_topic diff --git a/ticket-booking-platform/app/tasks.py b/ticket-booking-platform/app/tasks.py index 6af9668..ee5feff 100644 --- a/ticket-booking-platform/app/tasks.py +++ b/ticket-booking-platform/app/tasks.py @@ -6,7 +6,7 @@ # Celery task for sending emails with retry logic @celery_app.task( - bind=True, max_retries=3, default_retry_delay=60 + bind=True, max_retries=3, default_retry_delay=60, queue="worker" ) # Retry 3 times with a 60-second delay def send_email_task(self, recipient, subject, body): try: diff --git a/ticket-booking-platform/docker-compose.yaml b/ticket-booking-platform/docker-compose.yaml index e9610c9..4ee7848 100644 --- a/ticket-booking-platform/docker-compose.yaml +++ b/ticket-booking-platform/docker-compose.yaml @@ -44,6 +44,7 @@ services: environment: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:${KAFKA_PORT},PLAINTEXT_HOST://zookeeper:9092 + # KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 From 3af5cbc9030cf6ad8ca82aca6bd439ce35d72b85 Mon Sep 17 00:00:00 2001 From: thapasamir Date: Tue, 3 Dec 2024 09:25:16 +0545 Subject: [PATCH 4/8] celery worker fixes --- ticket-booking-platform/app/celery_app.py | 2 +- ticket-booking-platform/docker-compose.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ticket-booking-platform/app/celery_app.py b/ticket-booking-platform/app/celery_app.py index 39609ea..b9056e9 100644 --- a/ticket-booking-platform/app/celery_app.py +++ b/ticket-booking-platform/app/celery_app.py @@ -10,5 +10,5 @@ ) # Ensure the tasks module is included -celery_app.conf.task_routes = {"app.tasks.send_email_task": {"queue": "default"}} +celery_app.conf.task_routes = {"app.tasks.send_email_task": {"queue": "worker"}} celery_app.conf.timezone = "UTC" diff --git a/ticket-booking-platform/docker-compose.yaml b/ticket-booking-platform/docker-compose.yaml index 4ee7848..3e81500 100644 --- a/ticket-booking-platform/docker-compose.yaml +++ b/ticket-booking-platform/docker-compose.yaml @@ -94,7 +94,7 @@ services: - KAFKA_BOOTSTRAP_SERVERS=kafka:${KAFKA_PORT} volumes: - .:/app - command: celery -A app.celery_app worker --loglevel=info + command: celery -A app.celery_app worker --loglevel=info -Q worker volumes: From 0a4f917d5183670bdd3a72e920f59b696b7b6a3d Mon Sep 17 00:00:00 2001 From: thapasamir Date: Tue, 3 Dec 2024 09:32:49 +0545 Subject: [PATCH 5/8] Updated readme.md --- ticket-booking-platform/README.md | 44 ++++++++++++++++++++++--------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/ticket-booking-platform/README.md b/ticket-booking-platform/README.md index c5474e7..5ed17c0 100644 --- a/ticket-booking-platform/README.md +++ b/ticket-booking-platform/README.md @@ -1,7 +1,3 @@ -Here’s the setup-focused `README.md` with project information and technologies used: - ---- - # Ticket Booking Platform ## Project Information @@ -13,6 +9,9 @@ The **Ticket Booking Platform** is a backend system designed to facilitate secur - **Secure Payment Processing**: Integrate with Stripe for handling payments. - **Real-Time Notifications**: Use Kafka to manage updates about ticket availability and payment status. +- **Background Task Handling**: Use Celery to manage asynchronous tasks like sending emails. + + --- ## Technologies Used @@ -25,6 +24,7 @@ The **Ticket Booking Platform** is a backend system designed to facilitate secur - **Docker**: To containerize the application and its dependencies. - **Alembic**: For database migrations. - **Python**: The primary programming language for backend logic. +- **Celery**: For asynchronous background task handling. - **Zookeeper**: To manage Kafka services. --- @@ -37,19 +37,39 @@ The **Ticket Booking Platform** is a backend system designed to facilitate secur 2. Clone the repository: ```bash - git clone + git clone https://github.com/thapasamir/startercode cd ticket-booking-platform ``` 3. Create a `.env` file in the root directory with the following variables: ```env - POSTGRES_DB=ticket_booking - POSTGRES_USER=postgres - POSTGRES_PASSWORD=yourpassword + POSTGRES_DB=example_db + POSTGRES_USER=example_user + POSTGRES_PASSWORD=example_password + POSTGRES_HOST=db + POSTGRES_PORT=5432 + + SECRET_KEY=example_secret_key_12345 + REDIS_PORT=6379 - STRIPE_SECRET_KEY=your_stripe_secret - STRIPE_WEBHOOK_SECRET=your_webhook_secret + REDIS_URL=redis://example_redis_host:6379/0 + + KAFKA_HOST=example_kafka_host + KAFKA_PORT=9092 + + STRIPE_SECRET_KEY=sk_test_example_secret_key + STRIPE_WEBHOOK_SECRET=whsec_example_webhook_secret + + EMAIL_HOST=email-smtp.example-region.amazonaws.com + EMAIL_PORT=587 + EMAIL_HOST_USER=example_email_user + EMAIL_HOST_PASSWORD=example_email_password + DEFAULT_FROM_EMAIL=Example Name + TO_EMAIL=recipient@example.com + + JWT_SECRET_KEY=example_jwt_secret_key + ``` ### Running the Application @@ -82,6 +102,4 @@ To stop all running services: docker-compose down ``` ---- - -This `README.md` provides all necessary information to start the project and includes the technologies used. Let me know if you need more details or additional sections! \ No newline at end of file +--- \ No newline at end of file From 0dffdab9a2c0bf3068d43d700301d61402db61dd Mon Sep 17 00:00:00 2001 From: thapasamir Date: Tue, 3 Dec 2024 20:43:01 +0545 Subject: [PATCH 6/8] updated readme and timelog --- ticket-booking-platform/README.md | 14 ++++++++++++++ ticket-booking-platform/TImelog.md | 0 2 files changed, 14 insertions(+) create mode 100644 ticket-booking-platform/TImelog.md diff --git a/ticket-booking-platform/README.md b/ticket-booking-platform/README.md index 5ed17c0..3d46945 100644 --- a/ticket-booking-platform/README.md +++ b/ticket-booking-platform/README.md @@ -1,5 +1,19 @@ +## About me +- **Name** : Samir thapa +- **Email** : samirthapa61622845@gmail.com +- **Role** : Backend +- **Address** : Hetauda,Nepal +- **Github** : github.com/thapasamir +- **Linkened** : https://www.linkedin.com/in/samir-thapa-73a9a31b6/ + + + # Ticket Booking Platform +### +Note : This project wont run properly if not conifigured propely, Make sure to put the http://backend_url/webhook/payment/ in stripe webhook section. + +If running locally then can you ngrok to test it for webhook endpoint ## Project Information The **Ticket Booking Platform** is a backend system designed to facilitate secure and real-time ticket booking for events. It handles high-concurrency scenarios, processes payments securely, and provides real-time updates using an event-driven architecture. diff --git a/ticket-booking-platform/TImelog.md b/ticket-booking-platform/TImelog.md new file mode 100644 index 0000000..e69de29 From e0b6ef3405f000af3015ca547530b592e62054f8 Mon Sep 17 00:00:00 2001 From: samir-varicon Date: Fri, 6 Dec 2024 09:04:20 +0545 Subject: [PATCH 7/8] added timelog --- ticket-booking-platform/TImelog.md | 38 +++++++++++++++++++ .../app/api/route_handlers.py | 8 ++-- .../app/repositories/ticket.py | 6 +++ 3 files changed, 48 insertions(+), 4 deletions(-) diff --git a/ticket-booking-platform/TImelog.md b/ticket-booking-platform/TImelog.md index e69de29..471f5bc 100644 --- a/ticket-booking-platform/TImelog.md +++ b/ticket-booking-platform/TImelog.md @@ -0,0 +1,38 @@ +# **TimeLog.md** + +### **Project: Ticket Booking Platform** + +This document tracks the progress and major milestones of the project. + +--- + +| Date | Task Description | Time Spent | Notes | +|------------|------------------------------------------------------|------------|-------------------------------------| +| 30 Nov 2024 | Initialized FastAPI project and setup core structure | 1 hrs | . | +| | Configured Docker and Docker Compose | 30-40 min | Added `Dockerfile` and `docker-compose.yaml`. for api,db,redis,kafka,zookeeper| +| | Explored stripe api | 1 hr | stripe python sdk,docs,weebhooks | +| | Designed database models, Configured Alembic for database migrations,Wrote repository classes for database operations | 1 hr | stripe python sdk,docs,weebhooks | + + +| Date | Task Description | Time Spent | Notes | +|------------|------------------------------------------------------|------------|-------------------------------------| +| 1 Dec 2024 | Create apis, and views , implemented lock mechanisum for ticket using redis ,create kafaka producer and consumer | 5-6 hrs | | + + +| Date | Task Description | Time Spent | Notes | +|------------|------------------------------------------------------|------------|-------------------------------------| +| 2 Dec 2024 | Debuging locking ticket and kafla message consumer processing(Debugging and logic change) | 1 hrs | | +| 2 Dec 2024 |Debugging stripe ,when creating stripe session i was unable to pass my custom metadata,which will later recived from weebhook . | 2 hrs | | +| 2 Dec 2024 |Tested api and whole workflow, also tested payment failed cases using ngrok for webhooks | 2 hrs | | +| 2 Dec 2024 |Implemented jwt and middleware for passing user object which is extracted from token | 30 min | | + +--- + + +| Date | Task Description | Time Spent | Notes | +|------------|------------------------------------------------------|------------|-------------------------------------| +| 2 Dec 2024 | Implemented celery for handlening background task and testing | 30 min | | +| 2 Dec 2024 |Miscellious | 2 hrs| | + + +--- diff --git a/ticket-booking-platform/app/api/route_handlers.py b/ticket-booking-platform/app/api/route_handlers.py index 5d66bcf..e8314fc 100644 --- a/ticket-booking-platform/app/api/route_handlers.py +++ b/ticket-booking-platform/app/api/route_handlers.py @@ -327,10 +327,10 @@ async def handle_payment_webhook( amount=total_amount, user_id=str(order.user_id), ) - elif ( - event_type == "payment_intent.payment_failed" - or event_type == "payment_intent.cancelled" - ): + elif event_type in [ + "payment_intent.cancelled", + "payment_intent.payment_failed", + ]: ticket_repo.release_tickets(ticket_ids) print(f"Releasing locks for tickets: {ticket_ids}", flush=True) for ticket_id in ticket_ids: diff --git a/ticket-booking-platform/app/repositories/ticket.py b/ticket-booking-platform/app/repositories/ticket.py index d336347..6915eb9 100644 --- a/ticket-booking-platform/app/repositories/ticket.py +++ b/ticket-booking-platform/app/repositories/ticket.py @@ -85,6 +85,12 @@ def get_ticket_status(self, ticket_id: UUID): result.scalar_one_or_none() ) # Returns a single ticket or None if not found + def update_ticket_status(self, ticket_id: UUID, status: TicketStatus): + """Update the status of a ticket.""" + query = update(Ticket).where(Ticket.id == ticket_id).values(status=status) + self.db.execute(query) + self.db.commit() + def update_ticket_status_by_event(self, event_id: UUID, status: TicketStatus): self.db.query(Ticket).filter(Ticket.event_id == event_id).update( {"status": status} From f1927000accfc82465919ee9261b992616d85ada Mon Sep 17 00:00:00 2001 From: samir-varicon Date: Fri, 6 Dec 2024 09:58:38 +0545 Subject: [PATCH 8/8] added timelog --- ticket-booking-platform/app/api/route_handlers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ticket-booking-platform/app/api/route_handlers.py b/ticket-booking-platform/app/api/route_handlers.py index e8314fc..4d59114 100644 --- a/ticket-booking-platform/app/api/route_handlers.py +++ b/ticket-booking-platform/app/api/route_handlers.py @@ -327,6 +327,11 @@ async def handle_payment_webhook( amount=total_amount, user_id=str(order.user_id), ) + for ticket_id in ticket_ids: + lock_key = f"ticket_{ticket_id}_lock" + redis_client.release_lock(lock_key) + print(f"Lock released for ticket: {ticket_id}", flush=True) + elif event_type in [ "payment_intent.cancelled", "payment_intent.payment_failed",