aboutsummaryrefslogtreecommitdiff
path: root/apps/docs/integrations/ai-sdk.mdx
blob: 4f70d916327377f346420bb8732b227f2f75126f (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
---
title: "Vercel AI SDK"
sidebarTitle: "Vercel AI SDK"
description: "Use Supermemory with Vercel AI SDK for seamless memory management"
icon: "triangle"
---

The Supermemory AI SDK provides native integration with Vercel's AI SDK through two approaches: **User Profiles** for automatic personalization and **Memory Tools** for agent-based interactions.

<Card title="@supermemory/tools on npm" icon="npm" href="https://www.npmjs.com/package/@supermemory/tools">
    Check out the NPM page for more details
</Card>

## Installation

```bash
npm install @supermemory/tools
```

## Quick Comparison

| Approach | Use Case | Setup |
|----------|----------|-------|
| User Profiles | Personalized LLM responses with automatic user context | Simple middleware |
| Memory Tools | AI agents that need explicit memory control | Tool definitions |

---

## User Profiles with Middleware

Automatically inject user profiles into every LLM call for instant personalization.

```typescript
import { generateText } from "ai"
import { withSupermemory } from "@supermemory/tools/ai-sdk"
import { openai } from "@ai-sdk/openai"

const modelWithMemory = withSupermemory(openai("gpt-5"), "user-123")

const result = await generateText({
  model: modelWithMemory,
  messages: [{ role: "user", content: "What do you know about me?" }]
})
```

<Note>
  **Memory saving is disabled by default.** The middleware only retrieves existing memories. To automatically save new memories:

  ```typescript
  const modelWithMemory = withSupermemory(openai("gpt-5"), "user-123", {
    addMemory: "always"
  })
  ```
</Note>

### Memory Search Modes

**Profile Mode (Default)** - Retrieves the user's complete profile:

```typescript
const model = withSupermemory(openai("gpt-4"), "user-123", { mode: "profile" })
```

**Query Mode** - Searches memories based on the user's message:

```typescript
const model = withSupermemory(openai("gpt-4"), "user-123", { mode: "query" })
```

**Full Mode** - Combines profile AND query-based search:

```typescript
const model = withSupermemory(openai("gpt-4"), "user-123", { mode: "full" })
```

### Custom Prompt Templates

Customize how memories are formatted:

```typescript
import { withSupermemory, type MemoryPromptData } from "@supermemory/tools/ai-sdk"

const claudePrompt = (data: MemoryPromptData) => `
<context>
  <user_profile>
    ${data.userMemories}
  </user_profile>
  <relevant_memories>
    ${data.generalSearchMemories}
  </relevant_memories>
</context>
`.trim()

const model = withSupermemory(anthropic("claude-3-sonnet"), "user-123", {
  mode: "full",
  promptTemplate: claudePrompt
})
```

### Verbose Logging

```typescript
const model = withSupermemory(openai("gpt-4"), "user-123", {
  verbose: true
})
// Console output shows memory retrieval details
```

---

## Memory Tools

Add memory capabilities to AI agents with search, add, and fetch operations.

```typescript
import { streamText } from "ai"
import { createAnthropic } from "@ai-sdk/anthropic"
import { supermemoryTools } from "@supermemory/tools/ai-sdk"

const anthropic = createAnthropic({ apiKey: "YOUR_ANTHROPIC_KEY" })

const result = await streamText({
  model: anthropic("claude-3-sonnet"),
  prompt: "Remember that my name is Alice",
  tools: supermemoryTools("YOUR_SUPERMEMORY_KEY")
})
```

### Available Tools

**Search Memories** - Semantic search through user memories:

```typescript
const result = await streamText({
  model: openai("gpt-5"),
  prompt: "What are my dietary preferences?",
  tools: supermemoryTools("API_KEY")
})
// AI will call: searchMemories({ informationToGet: "dietary preferences" })
```

**Add Memory** - Store new information:

```typescript
const result = await streamText({
  model: anthropic("claude-3-sonnet"),
  prompt: "Remember that I'm allergic to peanuts",
  tools: supermemoryTools("API_KEY")
})
// AI will call: addMemory({ memory: "User is allergic to peanuts" })
```

### Using Individual Tools

For more control, import tools separately:

```typescript
import {
  searchMemoriesTool,
  addMemoryTool
} from "@supermemory/tools/ai-sdk"

const result = await streamText({
  model: openai("gpt-5"),
  prompt: "What do you know about me?",
  tools: {
    searchMemories: searchMemoriesTool("API_KEY", { projectId: "personal" }),
    createEvent: yourCustomTool,
  }
})
```

### Tool Results

```typescript
// searchMemories result
{ success: true, results: [...], count: 5 }

// addMemory result
{ success: true, memory: { id: "mem_123", ... } }
```