"use client"
import { useState, useEffect, useCallback, useRef } from "react"
import type { UIMessage } from "@ai-sdk/react"
import { motion, AnimatePresence } from "motion/react"
import { useChat } from "@ai-sdk/react"
import { DefaultChatTransport } from "ai"
import NovaOrb from "@/components/nova/nova-orb"
import { Button } from "@ui/components/button"
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@ui/components/dialog"
import { ScrollArea } from "@ui/components/scroll-area"
import {
Check,
ChevronDownIcon,
HistoryIcon,
PanelRightCloseIcon,
Plus,
SearchIcon,
SquarePenIcon,
Trash2,
XIcon,
} from "lucide-react"
import { formatDistanceToNow } from "date-fns"
import { cn } from "@lib/utils"
import { dmSansClassName } from "@/lib/fonts"
import ChatInput from "./input"
import ChatModelSelector from "./model-selector"
import { GradientLogo, LogoBgGradient } from "@ui/assets/Logo"
import { useProject } from "@/stores"
import type { ModelId } from "@/lib/models"
import { SuperLoader } from "../../superloader"
import { UserMessage } from "./message/user-message"
import { AgentMessage } from "./message/agent-message"
import { ChainOfThought } from "./input/chain-of-thought"
import { useIsMobile } from "@hooks/use-mobile"
import { analytics } from "@/lib/analytics"
import { generateId } from "@lib/generate-id"
const DEFAULT_SUGGESTIONS = [
"Show me all content related to Supermemory.",
"Summarize the key ideas from My Gita.",
"Which memories connect design and AI?",
"What are the main themes across my memories?",
]
function ChatEmptyStatePlaceholder({
onSuggestionClick,
suggestions = DEFAULT_SUGGESTIONS,
}: {
onSuggestionClick: (suggestion: string) => void
suggestions?: string[]
}) {
return (
Ask me anything about your memories...
{suggestions.map((suggestion) => (
))}
)
}
export function ChatSidebar({
isChatOpen,
setIsChatOpen,
queuedMessage,
onConsumeQueuedMessage,
emptyStateSuggestions,
}: {
isChatOpen: boolean
setIsChatOpen: (open: boolean) => void
queuedMessage?: string | null
onConsumeQueuedMessage?: () => void
emptyStateSuggestions?: string[]
}) {
const isMobile = useIsMobile()
const [input, setInput] = useState("")
const [selectedModel, setSelectedModel] = useState("gemini-2.5-pro")
const [copiedMessageId, setCopiedMessageId] = useState(null)
const [hoveredMessageId, setHoveredMessageId] = useState(null)
const [messageFeedback, setMessageFeedback] = useState<
Record
>({})
const [expandedMemories, setExpandedMemories] = useState(null)
const [followUpQuestions, setFollowUpQuestions] = useState<
Record
>({})
const [loadingFollowUps, setLoadingFollowUps] = useState<
Record
>({})
const [isInputExpanded, setIsInputExpanded] = useState(false)
const [isScrolledToBottom, setIsScrolledToBottom] = useState(true)
const [heightOffset, setHeightOffset] = useState(95)
const [isHistoryOpen, setIsHistoryOpen] = useState(false)
const [threads, setThreads] = useState<
Array<{ id: string; title: string; createdAt: string; updatedAt: string }>
>([])
const [isLoadingThreads, setIsLoadingThreads] = useState(false)
const [confirmingDeleteId, setConfirmingDeleteId] = useState(
null,
)
const pendingFollowUpGenerations = useRef>(new Set())
const messagesContainerRef = useRef(null)
const { selectedProject } = useProject()
const [currentChatId, setCurrentChatId] = useState(() => generateId())
const [pendingThreadLoad, setPendingThreadLoad] = useState<{
id: string
messages: UIMessage[]
} | null>(null)
// Adjust chat height based on scroll position (desktop only)
useEffect(() => {
if (isMobile) return
const handleWindowScroll = () => {
const scrollThreshold = 80
const scrollY = window.scrollY
const progress = Math.min(scrollY / scrollThreshold, 1)
const newOffset = 95 - progress * (95 - 15)
setHeightOffset(newOffset)
}
window.addEventListener("scroll", handleWindowScroll, { passive: true })
handleWindowScroll()
return () => window.removeEventListener("scroll", handleWindowScroll)
}, [isMobile])
const { messages, sendMessage, status, setMessages, stop } = useChat({
id: currentChatId ?? undefined,
transport: new DefaultChatTransport({
api: `${process.env.NEXT_PUBLIC_BACKEND_URL}/chat/v2`,
credentials: "include",
body: {
metadata: {
projectId: selectedProject,
model: selectedModel,
chatId: currentChatId,
},
},
}),
onFinish: async (result) => {
if (result.message.role !== "assistant") return
// Mark this message as needing follow-up generation
// We'll generate it after the message is fully in the messages array
if (result.message.id) {
pendingFollowUpGenerations.current.add(result.message.id)
}
},
})
useEffect(() => {
if (pendingThreadLoad && currentChatId === pendingThreadLoad.id) {
setMessages(pendingThreadLoad.messages)
setPendingThreadLoad(null)
}
}, [currentChatId, pendingThreadLoad, setMessages])
// Generate follow-up questions after assistant messages are complete
useEffect(() => {
const generateFollowUps = async () => {
// Find assistant messages that need follow-up generation
const messagesToProcess = messages.filter(
(msg) =>
msg.role === "assistant" &&
pendingFollowUpGenerations.current.has(msg.id) &&
!followUpQuestions[msg.id] &&
!loadingFollowUps[msg.id],
)
for (const message of messagesToProcess) {
// Get complete text from the message
const assistantText = message.parts
.filter((p) => p.type === "text")
.map((p) => p.text)
.join(" ")
.trim()
// Only generate if we have substantial text (at least 50 chars)
// This ensures the message is complete, not just the first chunk
// Also check if status is idle to ensure streaming is complete
if (
assistantText.length < 50 ||
status === "streaming" ||
status === "submitted"
) {
continue
}
// Mark as processing
pendingFollowUpGenerations.current.delete(message.id)
setLoadingFollowUps((prev) => ({
...prev,
[message.id]: true,
}))
try {
// Get recent messages for context
const recentMessages = messages.slice(-5).map((msg) => ({
role: msg.role,
content: msg.parts
.filter((p) => p.type === "text")
.map((p) => p.text)
.join(" "),
}))
const response = await fetch(
`${process.env.NEXT_PUBLIC_BACKEND_URL}/chat/follow-ups`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
credentials: "include",
body: JSON.stringify({
messages: recentMessages,
assistantResponse: assistantText,
}),
},
)
if (response.ok) {
const data = await response.json()
if (data.questions && Array.isArray(data.questions)) {
setFollowUpQuestions((prev) => ({
...prev,
[message.id]: data.questions,
}))
}
}
} catch (error) {
console.error("Failed to generate follow-up questions:", error)
} finally {
setLoadingFollowUps((prev) => ({
...prev,
[message.id]: false,
}))
}
}
}
// Only generate if not currently streaming or submitted
// Small delay to ensure message is fully processed
if (status !== "streaming" && status !== "submitted") {
const timeoutId = setTimeout(() => {
generateFollowUps()
}, 300)
return () => clearTimeout(timeoutId)
}
}, [messages, followUpQuestions, loadingFollowUps, status])
const checkIfScrolledToBottom = useCallback(() => {
if (!messagesContainerRef.current) return
const container = messagesContainerRef.current
const { scrollTop, scrollHeight, clientHeight } = container
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
const isAtBottom = distanceFromBottom <= 20
setIsScrolledToBottom(isAtBottom)
}, [])
const scrollToBottom = useCallback(() => {
if (messagesContainerRef.current) {
messagesContainerRef.current.scrollTop =
messagesContainerRef.current.scrollHeight
setIsScrolledToBottom(true)
}
}, [])
const handleSend = () => {
if (!input.trim() || status === "submitted" || status === "streaming")
return
analytics.chatMessageSent({ source: "typed" })
sendMessage({ text: input })
setInput("")
scrollToBottom()
}
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault()
handleSend()
}
}
const toggleChat = () => {
setIsChatOpen(!isChatOpen)
}
const handleCopyMessage = useCallback((messageId: string, text: string) => {
analytics.chatMessageCopied({ message_id: messageId })
navigator.clipboard.writeText(text)
setCopiedMessageId(messageId)
setTimeout(() => setCopiedMessageId(null), 2000)
}, [])
const handleLikeMessage = useCallback(
(messageId: string) => {
const wasLiked = messageFeedback[messageId] === "like"
setMessageFeedback((prev) => ({
...prev,
[messageId]: prev[messageId] === "like" ? null : "like",
}))
if (!wasLiked) {
analytics.chatMessageLiked({ message_id: messageId })
}
},
[messageFeedback],
)
const handleDislikeMessage = useCallback(
(messageId: string) => {
const wasDisliked = messageFeedback[messageId] === "dislike"
setMessageFeedback((prev) => ({
...prev,
[messageId]: prev[messageId] === "dislike" ? null : "dislike",
}))
if (!wasDisliked) {
analytics.chatMessageDisliked({ message_id: messageId })
}
},
[messageFeedback],
)
const handleToggleMemories = useCallback((messageId: string) => {
setExpandedMemories((prev) => {
const isExpanding = prev !== messageId
if (isExpanding) {
analytics.chatMemoryExpanded({ message_id: messageId })
} else {
analytics.chatMemoryCollapsed({ message_id: messageId })
}
return prev === messageId ? null : messageId
})
}, [])
const handleNewChat = useCallback(() => {
analytics.newChatCreated()
const newId = generateId()
setCurrentChatId(newId)
setMessages([])
setInput("")
}, [setMessages])
const fetchThreads = useCallback(async () => {
setIsLoadingThreads(true)
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_BACKEND_URL}/chat/threads?projectId=${selectedProject}`,
{ credentials: "include" },
)
if (response.ok) {
const data = await response.json()
setThreads(data.threads || [])
}
} catch (error) {
console.error("Failed to fetch threads:", error)
} finally {
setIsLoadingThreads(false)
}
}, [selectedProject])
const loadThread = useCallback(async (threadId: string) => {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_BACKEND_URL}/chat/threads/${threadId}`,
{ credentials: "include" },
)
if (response.ok) {
const data = await response.json()
const uiMessages = data.messages.map(
(m: {
id: string
role: string
parts: unknown
createdAt: string
}) => ({
id: m.id,
role: m.role,
parts: m.parts || [],
createdAt: new Date(m.createdAt),
}),
)
setCurrentChatId(threadId)
setPendingThreadLoad({ id: threadId, messages: uiMessages })
analytics.chatThreadLoaded({ thread_id: threadId })
setIsHistoryOpen(false)
setConfirmingDeleteId(null)
}
} catch (error) {
console.error("Failed to load thread:", error)
}
}, [])
const deleteThread = useCallback(
async (threadId: string) => {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_BACKEND_URL}/chat/threads/${threadId}`,
{ method: "DELETE", credentials: "include" },
)
if (response.ok) {
analytics.chatThreadDeleted({ thread_id: threadId })
setThreads((prev) => prev.filter((t) => t.id !== threadId))
if (currentChatId === threadId) {
handleNewChat()
}
}
} catch (error) {
console.error("Failed to delete thread:", error)
} finally {
setConfirmingDeleteId(null)
}
},
[currentChatId, handleNewChat],
)
const formatRelativeTime = (isoString: string): string => {
return formatDistanceToNow(new Date(isoString), { addSuffix: true })
}
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
const activeElement = document.activeElement as HTMLElement | null
const isInEditableContext =
activeElement?.tagName === "INPUT" ||
activeElement?.tagName === "TEXTAREA" ||
activeElement?.isContentEditable ||
activeElement?.closest('[contenteditable="true"]')
if (
e.key.toLowerCase() === "t" &&
!e.metaKey &&
!e.ctrlKey &&
!e.altKey &&
isChatOpen &&
!isInEditableContext
) {
e.preventDefault()
handleNewChat()
}
}
window.addEventListener("keydown", handleKeyDown)
return () => window.removeEventListener("keydown", handleKeyDown)
}, [isChatOpen, handleNewChat])
// Send queued message when chat opens
useEffect(() => {
if (
isChatOpen &&
queuedMessage &&
status !== "submitted" &&
status !== "streaming"
) {
analytics.chatMessageSent({ source: "highlight" })
sendMessage({ text: queuedMessage })
onConsumeQueuedMessage?.()
}
}, [isChatOpen, queuedMessage, status, sendMessage, onConsumeQueuedMessage])
// Scroll to bottom when a new user message is added
useEffect(() => {
const lastMessage = messages[messages.length - 1]
if (lastMessage?.role === "user" && messagesContainerRef.current) {
messagesContainerRef.current.scrollTop =
messagesContainerRef.current.scrollHeight
setIsScrolledToBottom(true)
}
// Always check scroll position when messages change
checkIfScrolledToBottom()
}, [messages, checkIfScrolledToBottom])
// Add scroll event listener to track scroll position
useEffect(() => {
const container = messagesContainerRef.current
if (!container) return
const handleScroll = () => {
requestAnimationFrame(() => {
checkIfScrolledToBottom()
})
}
container.addEventListener("scroll", handleScroll, { passive: true })
// Initial check with a small delay to ensure DOM is ready
setTimeout(() => {
checkIfScrolledToBottom()
}, 100)
// Also observe resize to detect content height changes
const resizeObserver = new ResizeObserver(() => {
requestAnimationFrame(() => {
checkIfScrolledToBottom()
})
})
resizeObserver.observe(container)
return () => {
container.removeEventListener("scroll", handleScroll)
resizeObserver.disconnect()
}
}, [checkIfScrolledToBottom])
return (
{!isChatOpen ? (
Chat with Nova
) : (
{!isMobile && (
)}
{!isMobile && (
T
)}
{isMobile ? (
) : (
)}
{isInputExpanded && (
)}
{messages.length === 0 && (
{
analytics.chatSuggestedQuestionClicked()
analytics.chatMessageSent({ source: "suggested" })
sendMessage({ text: suggestion })
}}
suggestions={emptyStateSuggestions}
/>
)}
0
? "flex flex-col space-y-3 min-h-full justify-end pt-14"
: "",
)}
>
{messages.map((message, index) => (
// biome-ignore lint/a11y/noStaticElementInteractions: Hover detection for message actions
message.role === "assistant" &&
setHoveredMessageId(message.id)
}
onMouseLeave={() =>
message.role === "assistant" && setHoveredMessageId(null)
}
>
{message.role === "user" ? (
) : (
{
analytics.chatFollowUpClicked({
thread_id: currentChatId || undefined,
})
analytics.chatMessageSent({ source: "follow_up" })
setInput(question)
}}
/>
)}
))}
{(status === "submitted" || status === "streaming") &&
messages[messages.length - 1]?.role === "user" && (
)}
{!isScrolledToBottom && messages.length > 0 && (
)}
setInput(e.target.value)}
onSend={handleSend}
onStop={stop}
onKeyDown={handleKeyDown}
isResponding={status === "submitted" || status === "streaming"}
activeStatus={
status === "submitted"
? "Thinking..."
: status === "streaming"
? "Structuring response..."
: "Waiting for input..."
}
onExpandedChange={setIsInputExpanded}
chainOfThoughtComponent={
messages.length > 0 ? (
) : null
}
/>
)}
)
}