aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMahesh Sanikommu <[email protected]>2025-10-11 08:03:06 -0700
committerGitHub <[email protected]>2025-10-11 08:03:06 -0700
commit6a6a65a56af9fed0ba9c4e47201bf2068ab43ad2 (patch)
tree340e02b087ccb4222adcf29aae37d26cfce873cf
parentcreate memory adding option in vercel sdk (#484) (diff)
parentfix: side effect removal (diff)
downloadsupermemory-6a6a65a56af9fed0ba9c4e47201bf2068ab43ad2.tar.xz
supermemory-6a6a65a56af9fed0ba9c4e47201bf2068ab43ad2.zip
Merge pull request #485 from supermemoryai/10-10-fix_add_memory_code_params_and_documentation_in_readme
fix: add memory code params and documentation in readme
-rw-r--r--packages/tools/README.md45
-rw-r--r--packages/tools/src/vercel/index.ts1
-rw-r--r--packages/tools/src/vercel/middleware.ts26
-rw-r--r--packages/tools/test/ai-sdk-test.ts3
4 files changed, 53 insertions, 22 deletions
diff --git a/packages/tools/README.md b/packages/tools/README.md
index cdc594b6..2b3f51d9 100644
--- a/packages/tools/README.md
+++ b/packages/tools/README.md
@@ -170,10 +170,37 @@ const result = await generateText({
})
```
-**Combined Options** - Use verbose logging with specific modes:
+#### Automatic Memory Capture
+
+The middleware can automatically save user messages as memories:
+
+**Always Save Memories** - Automatically stores every user message as a memory:
+```typescript
+import { generateText } from "ai"
+import { withSupermemory } from "@supermemory/tools/ai-sdk"
+import { openai } from "@ai-sdk/openai"
+
+const modelWithAutoSave = withSupermemory(openai("gpt-4"), "user-123", {
+ addMemory: "always"
+})
+
+const result = await generateText({
+ model: modelWithAutoSave,
+ messages: [{ role: "user", content: "I prefer React with TypeScript for my projects" }],
+})
+// This message will be automatically saved as a memory
+```
+
+**Never Save Memories (Default)** - Only retrieves memories without storing new ones:
+```typescript
+const modelWithNoSave = withSupermemory(openai("gpt-4"), "user-123")
+```
+
+**Combined Options** - Use verbose logging with specific modes and memory storage:
```typescript
const modelWithOptions = withSupermemory(openai("gpt-4"), "user-123", {
mode: "profile",
+ addMemory: "always",
verbose: true
})
```
@@ -247,6 +274,22 @@ interface SupermemoryToolsConfig {
- **containerTags**: Array of custom container tags (mutually exclusive with projectId)
- **projectId**: Project ID which gets converted to container tag format (mutually exclusive with containerTags)
+### withSupermemory Middleware Options
+
+The `withSupermemory` middleware accepts additional configuration options:
+
+```typescript
+interface WithSupermemoryOptions {
+ verbose?: boolean
+ mode?: "profile" | "query" | "full"
+ addMemory?: "always" | "never"
+}
+```
+
+- **verbose**: Enable detailed logging of memory search and injection process (default: false)
+- **mode**: Memory search mode - "profile" (default), "query", or "full"
+- **addMemory**: Automatic memory storage mode - "always" or "never" (default: "never")
+
## Available Tools
### Search Memories
diff --git a/packages/tools/src/vercel/index.ts b/packages/tools/src/vercel/index.ts
index f145a060..b6871009 100644
--- a/packages/tools/src/vercel/index.ts
+++ b/packages/tools/src/vercel/index.ts
@@ -15,6 +15,7 @@ import { createSupermemoryMiddleware } from "./middleware"
* @param options - Optional configuration options for the middleware
* @param options.verbose - Optional flag to enable detailed logging of memory search and injection process (default: false)
* @param options.mode - Optional mode for memory search: "profile" (default), "query", or "full"
+ * @param options.addMemory - Optional mode for memory search: "always" (default), "never"
*
* @returns A wrapped language model that automatically includes relevant memories in prompts
*
diff --git a/packages/tools/src/vercel/middleware.ts b/packages/tools/src/vercel/middleware.ts
index 0caa33ed..86ec7b88 100644
--- a/packages/tools/src/vercel/middleware.ts
+++ b/packages/tools/src/vercel/middleware.ts
@@ -22,12 +22,6 @@ const supermemoryprofilesearch = async (
containerTag: string,
queryText: string,
): Promise<ProfileStructure> => {
- const SUPERMEMORY_API_KEY = process.env.SUPERMEMORY_API_KEY
-
- if (!SUPERMEMORY_API_KEY) {
- throw new Error("SUPERMEMORY_API_KEY is not set")
- }
-
const payload = queryText
? JSON.stringify({
q: queryText,
@@ -42,7 +36,7 @@ const supermemoryprofilesearch = async (
method: "POST",
headers: {
"Content-Type": "application/json",
- Authorization: `Bearer ${SUPERMEMORY_API_KEY}`,
+ Authorization: `Bearer ${process.env.SUPERMEMORY_API_KEY}`,
},
body: payload,
})
@@ -118,7 +112,7 @@ const addSystemPrompt = async (
}
if (systemPromptExists) {
- logger.debug("Appending memories to existing system prompt")
+ logger.debug("Added memories to existing system prompt")
return {
...params,
prompt: params.prompt.map((prompt) =>
@@ -130,7 +124,7 @@ const addSystemPrompt = async (
}
logger.debug(
- "System prompt does not exist, creating system prompt with memories",
+ "System prompt does not exist, created system prompt with memories",
)
return {
...params,
@@ -169,25 +163,17 @@ export const createSupermemoryMiddleware = (
addMemory: "always" | "never" = "never"
): LanguageModelV2Middleware => {
const logger = createLogger(verbose)
-
- const SUPERMEMORY_API_KEY = process.env.SUPERMEMORY_API_KEY
- if (!SUPERMEMORY_API_KEY) {
- throw new Error("SUPERMEMORY_API_KEY is not set")
- }
-
+
const client = new Supermemory({
- apiKey: SUPERMEMORY_API_KEY,
+ apiKey: process.env.SUPERMEMORY_API_KEY,
})
return {
transformParams: async ({ params }) => {
const userMessage = getLastUserMessage(params)
- // Add userMessage to memories based on addMemory setting
if (addMemory === "always" && userMessage && userMessage.trim()) {
- addMemoryTool(client, containerTag, userMessage, logger).catch((error) => {
- logger.error("Failed to create memories", { error })
- })
+ addMemoryTool(client, containerTag, userMessage, logger)
}
if (mode !== "profile") {
diff --git a/packages/tools/test/ai-sdk-test.ts b/packages/tools/test/ai-sdk-test.ts
index 2fafdfc6..99a94618 100644
--- a/packages/tools/test/ai-sdk-test.ts
+++ b/packages/tools/test/ai-sdk-test.ts
@@ -4,7 +4,8 @@ import { openai } from "@ai-sdk/openai"
const modelWithMemory = withSupermemory(openai("gpt-5"), "user_id_life", {
verbose: true,
- mode: "query", // options are profile, query, full
+ mode: "query", // options are profile, query, full (default is profile)
+ addMemory: "always", // options are always, never (default is never)
})
const result = await generateText({