Skip to content

Commit

Permalink
chat: Add SSE streaming for Llama.cpp based models
Browse files Browse the repository at this point in the history
  • Loading branch information
Dhruvgera committed May 13, 2023
1 parent 6369c75 commit ef92436
Show file tree
Hide file tree
Showing 10 changed files with 239 additions and 225 deletions.
Binary file removed public/favicon.ico
Binary file not shown.
109 changes: 0 additions & 109 deletions src/App.css

This file was deleted.

13 changes: 0 additions & 13 deletions src/App.js

This file was deleted.

8 changes: 0 additions & 8 deletions src/App.test.js

This file was deleted.

178 changes: 117 additions & 61 deletions src/ChatGptInterface.js
Original file line number Diff line number Diff line change
@@ -1,96 +1,142 @@
import React, { useState, useRef, useEffect } from "react";
import "./App.css"; // Import custom CSS for styling
import React, { useState, useRef, useEffect, Fragment } from "react";
import "./index.css";

const host = "http://localhost:8080";
const temperature = 0.7;

const ChatGptInterface = () => {
const [messages, setMessages] = useState([]);
const [input, setInput] = useState("");
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState(null);
const [models, setModels] = useState([]); // Added state for models
const [models, setModels] = useState([]);
const [currentAssistantMessage, setCurrentAssistantMessage] = useState("");
const chatContainerRef = useRef(null);

const handleInputChange = (e) => {
setInput(e.target.value);
};

const handleSubmit = async () => {
// Reset error state and set loading state
setError(null);
setIsLoading(true);

try {
const requestOptions = {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model: selectedModel, // Use selectedModel from state as model name
messages: [
...messages,
{
role: "user",
content: input,
},
],
temperature: 0.7,
}),
};

const response = await fetch(
"http://localhost:8080/v1/chat/completions",
requestOptions
);

const data = await response.json();
const assistantResponse =
data?.choices?.[0]?.message?.content || "No response found";

const handleSubmit = async () => {
// Add user input to messages
setMessages((prevMessages) => [
...prevMessages,
{ role: "user", content: input }, // Append user input message
{ role: "assistant", content: assistantResponse },
{ role: "user", content: input },
]);

// Clear input field
setInput("");
} catch (error) {
console.error("Error:", error);
setError("Failed to fetch response. Please try again: " + error.message); // Update error message
} finally {
// Set loading state to false after response or error is received
setIsLoading(false);
}
};
// Reset error state and set loading state
setError(null);
setIsLoading(true);

try {
const requestOptions = {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model: selectedModel,
messages: [
...messages,
{
role: "user",
content: input,
},
],
temperature,
stream: true,
}),
};

const response = await fetch(`${host}/v1/chat/completions`, requestOptions);

let data = "";
const reader = response.body.getReader();
let partialData = "";
let done = false;
let assistantResponse = "";

while (!done) {
const { value, done: readerDone } = await reader.read();

done = readerDone;

if (value) {
const chunk = new TextDecoder().decode(value);
partialData += chunk;
const lines = partialData.split("\n");

for (let i = 0; i < lines.length - 1; i++) {
const line = lines[i];
if (line.startsWith("data: ")) {
const jsonStr = line.substring("data: ".length);
const json = JSON.parse(jsonStr);

// Check if the response contains choices and delta fields
if (json.choices && json.choices.length > 0 && json.choices[0].delta) {
const token = json.choices[0].delta.content;
if (token !== undefined) {
assistantResponse += token;
setCurrentAssistantMessage(assistantResponse);
}
}
}
}

partialData = lines[lines.length - 1];
}
}

// Scroll to the bottom of the chat container whenever a new message is added
useEffect(() => {
if (chatContainerRef.current) {
chatContainerRef.current.scrollTop = chatContainerRef.current.scrollHeight;
// Add assistant response to messages
setMessages((prevMessages) => [
...prevMessages,
{ role: "assistant", content: assistantResponse },
]);

// Clear input field and currentAssistantMessage
setInput("");
setCurrentAssistantMessage("");
} catch (error) {
console.error("Error:", error);
setError("Failed to fetch response. Please try again: " + error.message);
} finally {
setIsLoading(false);
}
}, [messages]);


};

useEffect(() => {
// Fetch models on component mount
const fetchModels = async () => {
try {
const response = await fetch(
"http://localhost:8080/v1/models"
);
const response = await fetch(`${host}/v1/models`);
const data = await response.json();
setModels(data?.data || []);
} catch (error) {
console.error("Error:", error);
}
};
fetchModels();
}, []); // Empty dependency array to fetch models only on mount
}, []);

const handleModelChange = (e) => {
setSelectedModel(e.target.value);
};

const [selectedModel, setSelectedModel] = useState(""); // Added state for selected model
const [selectedModel, setSelectedModel] = useState("");

useEffect(() => {
if (chatContainerRef.current) {
chatContainerRef.current.scrollTop =
chatContainerRef.current.scrollHeight;
}
}, [messages, currentAssistantMessage]);

const renderMessageContent = (content) => {
const parts = content.split("\n");
return parts.map((part, index) => (
<Fragment key={index}>
{part}
{index < parts.length - 1 && <br />}
</Fragment>
));
};

return (
<div className="chat-page">
Expand Down Expand Up @@ -122,9 +168,19 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
<span className="message-role">
{message.role === "user" ? "You" : "LocalAI"}:
</span>
<span className="message-content">{message.content}</span>
<span className="message-content">
{renderMessageContent(message.content)}
</span>
</div>
))}
{isLoading && (
<div className="chat-message assistant-message">
<span className="message-role">LocalAI:</span>
<span className="message-content">
{renderMessageContent(currentAssistantMessage)}
</span>
</div>
)}
</div>
</div>
<div className="chat-input">
Expand All @@ -151,4 +207,4 @@ const [selectedModel, setSelectedModel] = useState(""); // Added state for selec
);
};

export default ChatGptInterface;
export default ChatGptInterface;
Loading

0 comments on commit ef92436

Please sign in to comment.