aboutsummaryrefslogtreecommitdiff
path: root/apps/cf-ai-backend/src/routes/chat.ts
diff options
context:
space:
mode:
Diffstat (limited to 'apps/cf-ai-backend/src/routes/chat.ts')
-rw-r--r--apps/cf-ai-backend/src/routes/chat.ts77
1 files changed, 43 insertions, 34 deletions
diff --git a/apps/cf-ai-backend/src/routes/chat.ts b/apps/cf-ai-backend/src/routes/chat.ts
index 95788d03..75e298b8 100644
--- a/apps/cf-ai-backend/src/routes/chat.ts
+++ b/apps/cf-ai-backend/src/routes/chat.ts
@@ -1,28 +1,28 @@
-import { Content, GenerativeModel } from "@google/generative-ai";
-import { OpenAIEmbeddings } from "../OpenAIEmbedder";
-import { CloudflareVectorizeStore } from "@langchain/cloudflare";
-import { Request } from "@cloudflare/workers-types";
+import { Content, GenerativeModel } from '@google/generative-ai';
+import { OpenAIEmbeddings } from '../OpenAIEmbedder';
+import { CloudflareVectorizeStore } from '@langchain/cloudflare';
+import { Request } from '@cloudflare/workers-types';
export async function POST(request: Request, _: CloudflareVectorizeStore, embeddings: OpenAIEmbeddings, model: GenerativeModel, env?: Env) {
const queryparams = new URL(request.url).searchParams;
- const query = queryparams.get("q");
- const topK = parseInt(queryparams.get("topK") ?? "5");
- const user = queryparams.get("user")
- const spaces = queryparams.get("spaces")
- const spacesArray = spaces ? spaces.split(",") : undefined
+ const query = queryparams.get('q');
+ const topK = parseInt(queryparams.get('topK') ?? '5');
+ const user = queryparams.get('user');
+ const spaces = queryparams.get('spaces');
+ const spacesArray = spaces ? spaces.split(',') : undefined;
- const sourcesOnly = (queryparams.get("sourcesOnly") ?? "false")
+ const sourcesOnly = queryparams.get('sourcesOnly') ?? 'false';
if (!user) {
- return new Response(JSON.stringify({ message: "Invalid User" }), { status: 400 });
+ return new Response(JSON.stringify({ message: 'Invalid User' }), { status: 400 });
}
if (!query) {
- return new Response(JSON.stringify({ message: "Invalid Query" }), { status: 400 });
+ return new Response(JSON.stringify({ message: 'Invalid Query' }), { status: 400 });
}
const filter: VectorizeVectorMetadataFilter = {
- user
- }
+ user,
+ };
const responses: VectorizeMatches = { matches: [], count: 0 };
@@ -34,12 +34,12 @@ export async function POST(request: Request, _: CloudflareVectorizeStore, embedd
const resp = await env!.VECTORIZE_INDEX.query(queryAsVector, {
topK,
- filter
+ filter,
});
if (resp.count > 0) {
- responses.matches.push(...resp.matches)
- responses.count += resp.count
+ responses.matches.push(...resp.matches);
+ responses.count += resp.count;
}
}
} else {
@@ -47,13 +47,13 @@ export async function POST(request: Request, _: CloudflareVectorizeStore, embedd
const resp = await env!.VECTORIZE_INDEX.query(queryAsVector, {
topK,
filter: {
- user
- }
+ user,
+ },
});
if (resp.count > 0) {
- responses.matches.push(...resp.matches)
- responses.count += resp.count
+ responses.matches.push(...resp.matches);
+ responses.count += resp.count;
}
}
@@ -61,27 +61,36 @@ export async function POST(request: Request, _: CloudflareVectorizeStore, embedd
// return new Response(JSON.stringify({ message: "No Results Found" }), { status: 404 });
// }
- const highScoreIds = responses.matches.filter(({ score }) => score > 0.35).map(({ id }) => id)
+ const highScoreIds = responses.matches.filter(({ score }) => score > 0.35).map(({ id }) => id);
- if (sourcesOnly === "true") {
+ if (sourcesOnly === 'true') {
return new Response(JSON.stringify({ ids: highScoreIds }), { status: 200 });
}
- const vec = await env!.VECTORIZE_INDEX.getByIds(highScoreIds)
+ const vec = await env!.VECTORIZE_INDEX.getByIds(highScoreIds);
- const preparedContext = vec.map(({ metadata }) => `Website title: ${metadata!.title}\nDescription: ${metadata!.description}\nURL: ${metadata!.url}\nContent: ${metadata!.text}`).join("\n\n");
+ const preparedContext = vec
+ .map(
+ ({ metadata }) =>
+ `Website title: ${metadata!.title}\nDescription: ${metadata!.description}\nURL: ${metadata!.url}\nContent: ${metadata!.text}`,
+ )
+ .join('\n\n');
- const body = await request.json() as {
- chatHistory?: Content[]
+ const body = (await request.json()) as {
+ chatHistory?: Content[];
};
const defaultHistory = [
{
- role: "user",
- parts: [{ text: `You are an agent that summarizes a page based on the query. don't say 'based on the context'. I expect you to be like a 'Second Brain'. you will be provided with the context (old saved posts) and questions. Answer accordingly. Answer in markdown format` }],
+ role: 'user',
+ parts: [
+ {
+ text: `You are an agent that summarizes a page based on the query. don't say 'based on the context'. I expect you to be like a 'Second Brain'. you will be provided with the context (old saved posts) and questions. Answer accordingly. Answer in markdown format`,
+ },
+ ],
},
{
- role: "model",
+ role: 'model',
parts: [{ text: "Ok, I am a personal assistant, and will act as a second brain to help with user's queries." }],
},
] as Content[];
@@ -100,14 +109,14 @@ export async function POST(request: Request, _: CloudflareVectorizeStore, embedd
const converter = new TextEncoder();
for await (const chunk of output.stream) {
const chunkText = await chunk.text();
- const encodedChunk = converter.encode("data: " + JSON.stringify({ "response": chunkText }) + "\n\n");
+ const encodedChunk = converter.encode('data: ' + JSON.stringify({ response: chunkText }) + '\n\n');
controller.enqueue(encodedChunk);
}
- const doneChunk = converter.encode("data: [DONE]");
+ const doneChunk = converter.encode('data: [DONE]');
controller.enqueue(doneChunk);
controller.close();
- }
- })
+ },
+ }),
);
return response;
}