aboutsummaryrefslogtreecommitdiff
path: root/apps/backend/src/workflow
diff options
context:
space:
mode:
authorDhravya Shah <[email protected]>2025-01-26 12:39:31 -0700
committerDhravya Shah <[email protected]>2025-01-26 12:39:31 -0700
commit119280aeb6e1fcb9a3ecce112f95904306daf93c (patch)
tree7372f496c531b7161aa9be6baaa98434cbffd445 /apps/backend/src/workflow
parentimport tools: CSV and markdown (obsidian) (diff)
downloadsupermemory-119280aeb6e1fcb9a3ecce112f95904306daf93c.tar.xz
supermemory-119280aeb6e1fcb9a3ecce112f95904306daf93c.zip
change embedding model
Diffstat (limited to 'apps/backend/src/workflow')
-rw-r--r--apps/backend/src/workflow/index.ts32
1 files changed, 4 insertions, 28 deletions
diff --git a/apps/backend/src/workflow/index.ts b/apps/backend/src/workflow/index.ts
index 41c73015..24a1ff3e 100644
--- a/apps/backend/src/workflow/index.ts
+++ b/apps/backend/src/workflow/index.ts
@@ -142,36 +142,12 @@ export class ContentWorkflow extends WorkflowEntrypoint<Env, WorkflowParams> {
);
}
- const model = openai(this.env, this.env.OPEN_AI_API_KEY).embedding(
- "text-embedding-3-large",
- {
- dimensions: 1536,
- }
- );
-
- // Step 3: Create chunks from the content.
- const embeddings = await step.do(
- "create embeddings",
- {
- retries: {
- backoff: "constant",
- delay: "10 seconds",
- limit: 7,
- },
- timeout: "2 minutes",
- },
- async () => {
- const { embeddings }: { embeddings: Array<number>[] } = await embedMany(
- {
- model,
- values: chunked,
- }
- );
- return embeddings;
- }
- );
+ const {data: embeddings} = await this.env.AI.run("@cf/baai/bge-base-en-v1.5", {
+ text: chunked,
+ });
+
// Step 4: Prepare chunk data
const chunkInsertData: ChunkInsert[] = await step.do(
"prepare chunk data",