Skip to content

Commit

Permalink
Merge pull request #45 from Barqawiz/add-streamin-to-chatgpt
Browse files Browse the repository at this point in the history
Add streaming functionality to the chatbot
  • Loading branch information
Barqawiz authored Sep 1, 2023
2 parents 6aa5cad + f53ec51 commit 73a63ca
Show file tree
Hide file tree
Showing 11 changed files with 397 additions and 220 deletions.
1 change: 1 addition & 0 deletions IntelliNode/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ IntelliNode is the ultimate tool to integrate with the latest language models an
- Add support for hugging face inference.
- Generate prompt using LLM.
- Add support for huge data memory semantic search using `SemanticSearchPaging`.
- Update the chatbot with `stream` function.

Join the [discord server](https://discord.gg/VYgCh2p3Ww) for the latest updates and community support.

Expand Down
38 changes: 36 additions & 2 deletions IntelliNode/function/Chatbot.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ Copyright 2023 Github.com/Barqawiz/IntelliNode
const OpenAIWrapper = require("../wrappers/OpenAIWrapper");
const ReplicateWrapper = require('../wrappers/ReplicateWrapper');
const AWSEndpointWrapper = require('../wrappers/AWSEndpointWrapper');
const GPTStreamParser = require('../utils/StreamParser');

const {
ChatGPTInput,
ChatModelInput,
Expand Down Expand Up @@ -78,6 +80,38 @@ class Chatbot {
}
}

async *stream(modelInput) {
if (this.provider === SupportedChatModels.OPENAI) {
yield* this._chatGPTStream(modelInput);
} else {
throw new Error("The stream function support only chatGPT, for other providers use chat function.");
}
}

async *_chatGPTStream(modelInput) {
let params;

if (modelInput instanceof ChatModelInput) {
params = modelInput.getChatInput();
params.stream = true;
} else if (typeof modelInput === "object") {
params = modelInput;
params.stream = true;
} else {
throw new Error("Invalid input: Must be an instance of ChatGPTInput or a dictionary");
}

const streamParser = new GPTStreamParser();

const stream = await this.openaiWrapper.generateChatText(params);

// Collect data from the stream
for await (const chunk of stream) {
const chunkText = chunk.toString('utf8');
yield* streamParser.feed(chunkText);
}
}

async _chatGPT(modelInput, functions = null, function_call = null) {
let params;

Expand Down Expand Up @@ -105,8 +139,8 @@ class Chatbot {

async _chatReplicateLLama(modelInput, debugMode) {
let params;
const waitTime = 2000,
maxIterate = 100;
const waitTime = 2500,
maxIterate = 200;
let iteration = 0;

if (modelInput instanceof ChatModelInput) {
Expand Down
4 changes: 3 additions & 1 deletion IntelliNode/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const MatchHelpers = require('./utils/MatchHelpers');
const SystemHelper = require('./utils/SystemHelper');
const Prompt = require('./utils/Prompt');
const ProxyHelper = require('./utils/ProxyHelper');
const GPTStreamParser = require('./utils/StreamParser');

module.exports = {
RemoteLanguageModel,
Expand Down Expand Up @@ -74,5 +75,6 @@ module.exports = {
Prompt,
LLamaSageInput,
LLMEvaluation,
SemanticSearchPaging
SemanticSearchPaging,
GPTStreamParser
};
2 changes: 1 addition & 1 deletion IntelliNode/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "intellinode",
"version": "1.3.7",
"version": "1.3.8",
"description": "Access various AI models, such as ChatGPT, Llama, Diffusion, Cohere, WaveNet, and others",
"main": "index.js",
"keywords": [
Expand Down
46 changes: 32 additions & 14 deletions IntelliNode/test/integration/Chatbot.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,23 @@ async function testReplicateLLamaCase1() {
}
}

async function testReplicateLLamaCase2() {
try {
console.log('\nLLama test case 3: \n')
const input = new LLamaReplicateInput("you are helpful coding assistant!",
{model: '34b-code'});
input.addUserMessage("how to develop micro service using node js");

const responses = await replicateBot.chat(input);

responses.forEach((response) => console.log("- " + response));

assert(responses.length > 0, "testReplicateLLamaCase1 response length should be greater than 0");
} catch (error) {
console.error("Test case failed with exception:", error.message);
}
}

async function testSageMakerLLamaCase() {
try {
console.log('\nLLama sagemaker test case 1: \n')
Expand All @@ -142,33 +159,34 @@ async function testSageMakerLLamaCase() {

}

async function testReplicateLLamaCase3() {
try {
console.log('\nLLama test case 3: \n')
const input = new LLamaReplicateInput("you are helpful coding assistant!",
{model: '34b-code'});
input.addUserMessage("how to develop micro service using node js");

const responses = await replicateBot.chat(input);
async function testStreamOpenaiChatGPTCase1() {
console.log('\nchat test case 1: \n')
const mode = "You are a helpful astronomy assistant.";
const input = new ChatGPTInput(mode);
input.addUserMessage("what is the story of batman the dark night with less than 10 words");

responses.forEach((response) => console.log("- " + response));
let fullText = '';
for await (const contentText of bot.stream(input)) {
fullText += contentText;
console.log('Received chunk:', contentText);
}

assert(responses.length > 0, "testReplicateLLamaCase1 response length should be greater than 0");
} catch (error) {
console.error("Test case failed with exception:", error.message);
}
console.log('full stream text: ', fullText)
assert(fullText.length > 0, "testStreamOpenaiChatGPTCase1 response length should be greater than 0");
}

(async () => {

console.log('### Openai model ###')
await testOpenaiChatGPTCase1();
await testOpenaiChatGPTCase2();
await testOpenaiChatGPTCase3();
// streaming
await testStreamOpenaiChatGPTCase1();

console.log('### Replicate llama model ###')
await testReplicateLLamaCase1();
await testReplicateLLamaCase2();
await testReplicateLLamaCase3();

console.log('### SageMaker llama model ###')
//await testSageMakerLLamaCase();
Expand Down
Loading

0 comments on commit 73a63ca

Please sign in to comment.