Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OpenAI Chat App w Astro #47

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions examples/openai-chat/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Dependencies
node_modules/
yarn-debug.log*
yarn-error.log*

# Environment variables
.env
.env.local
.env.*

# Build output
dist/
build/

# OS files
.DS_Store
Thumbs.db

# IDE and editor files
.idea/
.vscode/
*.swp
*.swo

# Astro
.astro/

# Netlify
.netlify/
1 change: 1 addition & 0 deletions examples/openai-chat/.nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
v20
63 changes: 63 additions & 0 deletions examples/openai-chat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# AI Chat App

A simple AI chat application built with Astro, React, and OpenAI, deployed on Netlify.

## Features

- Real-time chat interface
- Streaming responses from OpenAI's GPT-3.5 Turbo
- Modern, responsive design
- Serverless architecture using Netlify Functions

## Prerequisites

- Node.js (v18 or later)
- npm
- OpenAI API key
- Netlify account (for deployment)

## Setup

1. Clone the repository:

```bash
git clone <repository-url>
cd kaibanjs-ai-chat
```

2. Install dependencies:

```bash
npm install
```

3. Create a `.env` file in the root directory and add your OpenAI API key:
```
OPENAI_API_KEY=your_openai_api_key_here
```

## Development

To run the development server:

```bash
npm run dev
```

The application will be available at `http://localhost:4321`.

## Deployment

1. Push your code to GitHub

2. Connect your repository to Netlify

3. Configure the environment variable in Netlify:

- Add `OPENAI_API_KEY` in your Netlify environment variables

4. Deploy! Netlify will automatically build and deploy your application.

## License

MIT
9 changes: 9 additions & 0 deletions examples/openai-chat/astro.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import { defineConfig } from "astro/config";
import netlify from "@astrojs/netlify/functions";
import react from "@astrojs/react";

export default defineConfig({
output: "server",
adapter: netlify(),
integrations: [react()],
});
54 changes: 54 additions & 0 deletions examples/openai-chat/netlify/functions/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import type { Context } from "@netlify/functions";
import OpenAI from "openai";

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

export default async (req: Request, context: Context) => {
if (req.method !== "POST") {
return new Response("Method Not Allowed", { status: 405 });
}

try {
const text = await req.text();
const { message } = JSON.parse(text || "{}");

if (!message) {
return new Response("Message is required", { status: 400 });
}

const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: message }],
stream: true,
});

// Create a ReadableStream for the response
const readableStream = new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
const text = chunk.choices[0]?.delta?.content || "";
if (text) {
controller.enqueue(new TextEncoder().encode(text));
}
}
controller.close();
},
});

// Return a Response object with the stream
return new Response(readableStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
},
});
} catch (error) {
console.error("Error:", error);
return new Response(JSON.stringify({ error: "Internal Server Error" }), {
status: 500,
});
}
};
27 changes: 27 additions & 0 deletions examples/openai-chat/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "kaibanjs-ai-chat",
"version": "1.0.0",
"type": "module",
"scripts": {
"dev": "astro dev",
"start": "astro dev",
"build": "astro build",
"preview": "astro preview",
"astro": "astro"
},
"dependencies": {
"@astrojs/netlify": "6.1.0",
"@astrojs/react": "4.2.0",
"@netlify/functions": "^2.4.1",
"@types/node": "^20.11.16",
"@types/react": "^18.2.48",
"@types/react-dom": "^18.2.18",
"astro": "5.2.5",
"openai": "^4.26.0",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"typescript": "^5.3.3"
}
}
158 changes: 158 additions & 0 deletions examples/openai-chat/src/components/Chat.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
import { useState, useRef, useEffect } from "react";

interface Message {
role: "user" | "assistant";
content: string;
}

export default function Chat() {
const [messages, setMessages] = useState<Message[]>([]);
const [input, setInput] = useState("");
const [isLoading, setIsLoading] = useState(false);
const messagesEndRef = useRef<HTMLDivElement>(null);

const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};

useEffect(() => {
scrollToBottom();
}, [messages]);

const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!input.trim() || isLoading) return;

const userMessage = { role: "user" as const, content: input.trim() };
setMessages((prev) => [...prev, userMessage]);
setInput("");
setIsLoading(true);

try {
const response = await fetch("/.netlify/functions/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ message: userMessage.content }),
});

if (!response.ok) throw new Error("Network response was not ok");

const reader = response.body?.getReader();
if (!reader) throw new Error("No reader available");

let assistantMessage = "";
setMessages((prev) => [...prev, { role: "assistant", content: "" }]);

while (true) {
const { done, value } = await reader.read();
if (done) break;

const text = new TextDecoder().decode(value);
assistantMessage += text;
setMessages((prev) => [
...prev.slice(0, -1),
{ role: "assistant", content: assistantMessage },
]);
}
} catch (error) {
console.error("Error:", error);
setMessages((prev) => [
...prev,
{
role: "assistant",
content: "Sorry, there was an error processing your request.",
},
]);
} finally {
setIsLoading(false);
}
};

return (
<div className="chat-container" style={styles.container}>
<div className="messages" style={styles.messages}>
{messages.map((message, index) => (
<div
key={index}
style={{
...styles.message,
...(message.role === "user"
? styles.userMessage
: styles.assistantMessage),
}}>
<strong>{message.role === "user" ? "You: " : "AI: "}</strong>
<span>{message.content}</span>
</div>
))}
<div ref={messagesEndRef} />
</div>
<form onSubmit={handleSubmit} style={styles.form}>
<input
type="text"
value={input}
onChange={(e) => setInput(e.target.value)}
placeholder="Type your message..."
style={styles.input}
disabled={isLoading}
/>
<button type="submit" disabled={isLoading} style={styles.button}>
{isLoading ? "Sending..." : "Send"}
</button>
</form>
</div>
);
}

const styles = {
container: {
display: "flex",
flexDirection: "column" as const,
height: "600px",
border: "1px solid #ddd",
borderRadius: "8px",
background: "#fff",
},
messages: {
flex: 1,
overflowY: "auto" as const,
padding: "1rem",
},
message: {
marginBottom: "1rem",
padding: "0.8rem",
borderRadius: "8px",
maxWidth: "80%",
},
userMessage: {
marginLeft: "auto",
background: "#007bff",
color: "#fff",
},
assistantMessage: {
marginRight: "auto",
background: "#f1f1f1",
color: "#333",
},
form: {
display: "flex",
padding: "1rem",
borderTop: "1px solid #ddd",
gap: "0.5rem",
},
input: {
flex: 1,
padding: "0.5rem",
border: "1px solid #ddd",
borderRadius: "4px",
fontSize: "1rem",
},
button: {
padding: "0.5rem 1rem",
background: "#007bff",
color: "#fff",
border: "none",
borderRadius: "4px",
cursor: "pointer",
fontSize: "1rem",
},
};
1 change: 1 addition & 0 deletions examples/openai-chat/src/env.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/// <reference path="../.astro/types.d.ts" />
44 changes: 44 additions & 0 deletions examples/openai-chat/src/layouts/Layout.astro
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
---
interface Props {
title: string;
}

const { title } = Astro.props;
---

<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>{title}</title>
<style>
:root {
--font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI",
Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
}
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
font-family: var(--font-family);
background: #f5f5f5;
color: #333;
line-height: 1.6;
}
main {
max-width: 800px;
margin: 0 auto;
padding: 2rem;
min-height: 100vh;
}
</style>
</head>
<body>
<main>
<slot />
</main>
</body>
</html>
Loading