aboutsummaryrefslogtreecommitdiff
path: root/apps
diff options
context:
space:
mode:
authorDhravya Shah <[email protected]>2024-07-09 14:00:04 -0500
committerGitHub <[email protected]>2024-07-09 14:00:04 -0500
commitb487eea712ba4f4f7934dff16a603af9febbdf3f (patch)
tree51821d4814a534813c14f23061ad030be95b38ea /apps
parentMerge pull request #109 from meetpateltech/terms (diff)
parentupdate Error messages and handling (diff)
downloadsupermemory-b487eea712ba4f4f7934dff16a603af9febbdf3f.tar.xz
supermemory-b487eea712ba4f4f7934dff16a603af9febbdf3f.zip
Merge pull request #111 from Dhravya/kush/misc-be-improvements
show chunks seperations in database, proper error handling and prettier config
Diffstat (limited to 'apps')
-rw-r--r--apps/cf-ai-backend/src/helper.ts10
-rw-r--r--apps/cf-ai-backend/src/index.ts11
-rw-r--r--apps/web/app/actions/doers.ts67
3 files changed, 51 insertions, 37 deletions
diff --git a/apps/cf-ai-backend/src/helper.ts b/apps/cf-ai-backend/src/helper.ts
index 98e38ce8..8502ca35 100644
--- a/apps/cf-ai-backend/src/helper.ts
+++ b/apps/cf-ai-backend/src/helper.ts
@@ -150,6 +150,7 @@ export async function batchCreateChunksAndEmbeddings({
const allIds = await context.env.KV.list({ prefix: uuid });
+ let pageContent = "";
// If some chunks for that content already exist, we'll just update the metadata to include
// the user.
if (allIds.keys.length > 0) {
@@ -168,12 +169,15 @@ export async function batchCreateChunksAndEmbeddings({
return acc;
}, {}),
};
-
+ const content =
+ vector.metadata.content.toString().split("Content: ")[1] ||
+ vector.metadata.content;
+ pageContent += `<---chunkId: ${vector.id}\n${content}\n---->`;
return vector;
});
await context.env.VECTORIZE_INDEX.upsert(newVectors);
- return;
+ return pageContent; //Return the page content that goes to d1 db
}
for (let i = 0; i < chunks.length; i++) {
@@ -209,5 +213,7 @@ export async function batchCreateChunksAndEmbeddings({
console.log("Docs added: ", docs);
await context.env.KV.put(chunkId, ourID);
+ pageContent += `<---chunkId: ${chunkId}\n${chunk}\n---->`;
}
+ return pageContent; // Return the pageContent that goes to the d1 db
}
diff --git a/apps/cf-ai-backend/src/index.ts b/apps/cf-ai-backend/src/index.ts
index 04f80f0d..0844c22e 100644
--- a/apps/cf-ai-backend/src/index.ts
+++ b/apps/cf-ai-backend/src/index.ts
@@ -15,7 +15,6 @@ import { zValidator } from "@hono/zod-validator";
import chunkText from "./utils/chonker";
import { systemPrompt, template } from "./prompts/prompt1";
import { swaggerUI } from "@hono/swagger-ui";
-import { createOpenAI } from "@ai-sdk/openai";
const app = new Hono<{ Bindings: Env }>();
@@ -65,14 +64,18 @@ app.post("/api/add", zValidator("json", vectorObj), async (c) => {
const { store } = await initQuery(c);
console.log(body.spaces);
- await batchCreateChunksAndEmbeddings({
+ const chunks = chunkText(body.pageContent, 1536);
+ if (chunks.length > 20) {
+ return c.json({ status: "error", message: "We are unable to process documents this size just yet, try something smaller" });
+ }
+ const chunkedInput = await batchCreateChunksAndEmbeddings({
store,
body,
- chunks: chunkText(body.pageContent, 1536),
+ chunks: chunks,
context: c,
});
- return c.json({ status: "ok" });
+ return c.json({ status: "ok", chunkedInput });
});
app.post(
diff --git a/apps/web/app/actions/doers.ts b/apps/web/app/actions/doers.ts
index 280ee244..fcbab0f8 100644
--- a/apps/web/app/actions/doers.ts
+++ b/apps/web/app/actions/doers.ts
@@ -7,7 +7,6 @@ import {
chatHistory,
chatThreads,
contentToSpace,
- sessions,
space,
spacesAccess,
storedContent,
@@ -19,11 +18,9 @@ import { Tweet } from "react-tweet/api";
import { getMetaData } from "@/lib/get-metadata";
import { and, eq, inArray, sql } from "drizzle-orm";
import { LIMITS } from "@/lib/constants";
-import { z } from "zod";
-import { AddFromAPIType, ChatHistory } from "@repo/shared-types";
+import { ChatHistory } from "@repo/shared-types";
import { decipher } from "@/server/encrypt";
import { redirect } from "next/navigation";
-import { ensureAuth } from "../api/ensureAuth";
import { tweetToMd } from "@repo/shared-types/utils";
export const createSpace = async (
@@ -265,6 +262,36 @@ export const createMemory = async (input: {
let contentId: number | undefined;
+ const response = (await vectorSaveResponse.json()) as {
+ status: string;
+ chunkedInput: string;
+ message?: string;
+ };
+
+ try {
+ if (response.status !== "ok") {
+ if (response.status === "error") {
+ return {
+ success: false,
+ data: 0,
+ error: response.message,
+ };
+ } else {
+ return {
+ success: false,
+ data: 0,
+ error: `Failed to save to vector store. Backend returned error: ${response.message}`,
+ };
+ }
+ }
+ } catch (e) {
+ return {
+ success: false,
+ data: 0,
+ error: `Failed to save to vector store. Backend returned error: ${e}`,
+ };
+ }
+
const saveToDbUrl =
(metadata.baseUrl.split("#supermemory-user-")[0] ?? metadata.baseUrl) +
"#supermemory-user-" +
@@ -275,7 +302,7 @@ export const createMemory = async (input: {
const insertResponse = await db
.insert(storedContent)
.values({
- content: pageContent,
+ content: response.chunkedInput,
title: metadata.title,
description: metadata.description,
url: saveToDbUrl,
@@ -349,32 +376,10 @@ export const createMemory = async (input: {
);
}
- try {
- const response = await vectorSaveResponse.json();
-
- const expectedResponse = z.object({ status: z.literal("ok") });
-
- const parsedResponse = expectedResponse.safeParse(response);
-
- if (!parsedResponse.success) {
- return {
- success: false,
- data: 0,
- error: `Failed to save to vector store. Backend returned error: ${parsedResponse.error.message}`,
- };
- }
-
- return {
- success: true,
- data: 1,
- };
- } catch (e) {
- return {
- success: false,
- data: 0,
- error: `Failed to save to vector store. Backend returned error: ${e}`,
- };
- }
+ return {
+ success: true,
+ data: 1,
+ };
};
export const createChatThread = async (