-
Notifications
You must be signed in to change notification settings - Fork 802
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactored ai backend code for composability
- Loading branch information
Showing
6 changed files
with
192 additions
and
141 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
interface Env { | ||
VECTORIZE_INDEX: VectorizeIndex; | ||
AI: Fetcher; | ||
SECURITY_KEY: string; | ||
OPENAI_API_KEY: string; | ||
GOOGLE_AI_API_KEY: string; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import { CloudflareVectorizeStore } from '@langchain/cloudflare'; | ||
import * as apiAdd from './routes/add'; | ||
import * as apiQuery from "./routes/query" | ||
import * as apiAsk from "./routes/ask" | ||
import { OpenAIEmbeddings } from './OpenAIEmbedder'; | ||
import { GenerativeModel } from '@google/generative-ai'; | ||
import { Request } from '@cloudflare/workers-types'; | ||
|
||
|
||
type RouteHandler = (request: Request, store: CloudflareVectorizeStore, embeddings: OpenAIEmbeddings, model: GenerativeModel, env: Env, ctx?: ExecutionContext) => Promise<Response>; | ||
|
||
const routeMap = new Map<string, Record<string, RouteHandler>>(); | ||
|
||
routeMap.set('/add', { | ||
POST: apiAdd.POST, | ||
}); | ||
|
||
routeMap.set('/query', { | ||
GET: apiQuery.GET, | ||
}); | ||
|
||
routeMap.set('/ask', { | ||
POST: apiAsk.POST, | ||
}); | ||
|
||
// Add more route mappings as needed | ||
// routeMap.set('/api/otherRoute', { ... }); | ||
|
||
export default routeMap; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
import { Request } from "@cloudflare/workers-types"; | ||
import { type CloudflareVectorizeStore } from "@langchain/cloudflare"; | ||
|
||
export async function POST(request: Request, store: CloudflareVectorizeStore) { | ||
const body = await request.json() as { | ||
pageContent: string, | ||
title?: string, | ||
description?: string, | ||
category?: string, | ||
url: string, | ||
user: string | ||
}; | ||
|
||
if (!body.pageContent || !body.url) { | ||
return new Response(JSON.stringify({ message: "Invalid Page Content" }), { status: 400 }); | ||
} | ||
const newPageContent = `Title: ${body.title}\nDescription: ${body.description}\nURL: ${body.url}\nContent: ${body.pageContent}` | ||
|
||
|
||
await store.addDocuments([ | ||
{ | ||
pageContent: newPageContent, | ||
metadata: { | ||
title: body.title ?? "", | ||
description: body.description ?? "", | ||
category: body.category ?? "", | ||
url: body.url, | ||
user: body.user, | ||
}, | ||
}, | ||
], { | ||
ids: [`${body.url}`] | ||
}) | ||
|
||
return new Response(JSON.stringify({ message: "Document Added" }), { status: 200 }); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
import { GenerativeModel } from "@google/generative-ai"; | ||
import { OpenAIEmbeddings } from "../OpenAIEmbedder"; | ||
import { CloudflareVectorizeStore } from "@langchain/cloudflare"; | ||
import { Request } from "@cloudflare/workers-types"; | ||
|
||
export async function POST(request: Request, _: CloudflareVectorizeStore, embeddings: OpenAIEmbeddings, model: GenerativeModel, env?: Env) { | ||
const body = await request.json() as { | ||
query: string | ||
}; | ||
|
||
if (!body.query) { | ||
return new Response(JSON.stringify({ message: "Invalid Page Content" }), { status: 400 }); | ||
} | ||
|
||
const prompt = `You are an agent that answers a question based on the query. don't say 'based on the context'.\n\n Context:\n${body.query} \nAnswer this question based on the context. Question: ${body.query}\nAnswer:` | ||
const output = await model.generateContentStream(prompt); | ||
|
||
const response = new Response( | ||
new ReadableStream({ | ||
async start(controller) { | ||
const converter = new TextEncoder(); | ||
for await (const chunk of output.stream) { | ||
const chunkText = await chunk.text(); | ||
console.log(chunkText); | ||
const encodedChunk = converter.encode("data: " + JSON.stringify({ "response": chunkText }) + "\n\n"); | ||
controller.enqueue(encodedChunk); | ||
} | ||
const doneChunk = converter.encode("data: [DONE]"); | ||
controller.enqueue(doneChunk); | ||
controller.close(); | ||
} | ||
}) | ||
); | ||
return response; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import { GenerativeModel } from "@google/generative-ai"; | ||
import { OpenAIEmbeddings } from "../OpenAIEmbedder"; | ||
import { CloudflareVectorizeStore } from "@langchain/cloudflare"; | ||
import { Request } from "@cloudflare/workers-types"; | ||
|
||
export async function GET(request: Request, _: CloudflareVectorizeStore, embeddings: OpenAIEmbeddings, model: GenerativeModel, env?: Env) { | ||
const queryparams = new URL(request.url).searchParams; | ||
const query = queryparams.get("q"); | ||
const topK = parseInt(queryparams.get("topK") ?? "5"); | ||
const user = queryparams.get("user") | ||
|
||
const sourcesOnly = (queryparams.get("sourcesOnly") ?? "false") | ||
|
||
if (!user) { | ||
return new Response(JSON.stringify({ message: "Invalid User" }), { status: 400 }); | ||
} | ||
|
||
if (!query) { | ||
return new Response(JSON.stringify({ message: "Invalid Query" }), { status: 400 }); | ||
} | ||
|
||
const filter: VectorizeVectorMetadataFilter = { | ||
user: { | ||
$eq: user | ||
} | ||
} | ||
|
||
const queryAsVector = await embeddings.embedQuery(query); | ||
|
||
const resp = await env!.VECTORIZE_INDEX.query(queryAsVector, { | ||
topK, | ||
filter | ||
}); | ||
|
||
if (resp.count === 0) { | ||
return new Response(JSON.stringify({ message: "No Results Found" }), { status: 400 }); | ||
} | ||
|
||
const highScoreIds = resp.matches.filter(({ score }) => score > 0.3).map(({ id }) => id) | ||
|
||
if (sourcesOnly === "true") { | ||
return new Response(JSON.stringify({ ids: highScoreIds }), { status: 200 }); | ||
} | ||
|
||
const vec = await env!.VECTORIZE_INDEX.getByIds(highScoreIds) | ||
|
||
if (vec.length === 0 || !vec[0].metadata) { | ||
return new Response(JSON.stringify({ message: "No Results Found" }), { status: 400 }); | ||
} | ||
|
||
const preparedContext = vec.slice(0, 3).map(({ metadata }) => `Website title: ${metadata!.title}\nDescription: ${metadata!.description}\nURL: ${metadata!.url}\nContent: ${metadata!.text}`).join("\n\n"); | ||
|
||
const prompt = `You are an agent that summarizes a page based on the query. Be direct and concise, don't say 'based on the context'.\n\n Context:\n${preparedContext} \nAnswer this question based on the context. Question: ${query}\nAnswer:` | ||
const output = await model.generateContentStream(prompt); | ||
|
||
const response = new Response( | ||
new ReadableStream({ | ||
async start(controller) { | ||
const converter = new TextEncoder(); | ||
for await (const chunk of output.stream) { | ||
const chunkText = await chunk.text(); | ||
const encodedChunk = converter.encode("data: " + JSON.stringify({ "response": chunkText }) + "\n\n"); | ||
controller.enqueue(encodedChunk); | ||
} | ||
const doneChunk = converter.encode("data: [DONE]"); | ||
controller.enqueue(doneChunk); | ||
controller.close(); | ||
} | ||
}) | ||
); | ||
return response; | ||
} |