Skip to content

Commit

Permalink
feat: moved to streamText, preventing flickering
Browse files Browse the repository at this point in the history
  • Loading branch information
NathanBrodin committed Dec 1, 2024
1 parent fa9fc99 commit b5cc4d8
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 63 deletions.
5 changes: 4 additions & 1 deletion app/(chat)/conversations/[conversationId]/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@ export default async function ConversationPage({ params }: { params: Promise<{ c
return (
<div className="p-4">
{messages.map((message) => (
<Message message={{ ...message, display: <Content content={message.display} /> }} key={message.id} />
<Message
message={{ ...message, display: <Content content={message.display} duration={0} /> }}
key={message.id}
/>
))}
{messages.length === 0 && (
<div className="flex h-full w-full flex-1 items-center justify-center">
Expand Down
20 changes: 12 additions & 8 deletions components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import { Geo } from "@vercel/edge"
import { generateId } from "ai"
import { readStreamableValue } from "ai/rsc"
import { AnimatePresence } from "framer-motion"
import { useState } from "react"
import { Conversation } from "@/components/conversation"
Expand All @@ -11,6 +12,7 @@ import { Separator } from "@/components/ui/separator"
import { useActions, useUIState } from "@/hooks/use-ai"
import { UIState } from "@/lib/chat/types"
import { Question } from "@/lib/questions/types"
import { Content } from "./content"
import InfoDialog from "./info-dialog"
import { Loader } from "./loader"

Expand Down Expand Up @@ -49,14 +51,16 @@ export default function Chat({ questions, location }: ChatProps) {
// Get the assistant's response
const result = await continueConversation(value, location)

setMessages([
...newMessages,
{
id: assistantMessageId,
role: "assistant",
display: result,
},
])
let textContent = ""

for await (const delta of readStreamableValue(result)) {
textContent = `${textContent}${delta}`

setMessages([
...newMessages,
{ id: assistantMessageId, role: "assistant", display: <Content content={textContent} /> },
])
}
} catch (error) {
setMessages([
...newMessages,
Expand Down
10 changes: 5 additions & 5 deletions components/content.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
"use client"

import { StreamableValue } from "ai/rsc"
import remarkGfm from "remark-gfm"
import { CodeBlock, Pre } from "@/components/ui/code"
import { useStreamableText } from "@/hooks/use-streamable-text"
import { useAnimatedText } from "@/hooks/use-animated-text"
import { MemoizedReactMarkdown } from "./markdown"

type ContentProps = {
content: string | StreamableValue<string>
content: string
duration?: number
}

export function Content({ content }: ContentProps) {
const text = useStreamableText(content)
export function Content({ content, duration }: ContentProps) {
const text = useAnimatedText(content, duration)

return (
<MemoizedReactMarkdown
Expand Down
6 changes: 3 additions & 3 deletions hooks/use-animated-text.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { useEffect, useState } from "react"

const delimiter = "" // or " " to split by word

export function useAnimatedText(text: string) {
export function useAnimatedText(text: string, duration = 2) {
const [cursor, setCursor] = useState(0)
const [startingCursor, setStartingCursor] = useState(0)
const [prevText, setPrevText] = useState(text)
Expand All @@ -18,15 +18,15 @@ export function useAnimatedText(text: string) {
useEffect(() => {
const controls = animate(startingCursor, text.split(delimiter).length, {
// Tweak the animation here
duration: 4,
duration: duration,
ease: "easeOut",
onUpdate(latest) {
setCursor(Math.floor(latest))
},
})

return () => controls.stop()
}, [startingCursor, text])
}, [startingCursor, text, duration])

return text.split(delimiter).slice(0, cursor).join(delimiter)
}
22 changes: 0 additions & 22 deletions hooks/use-streamable-text.tsx

This file was deleted.

45 changes: 22 additions & 23 deletions lib/chat/actions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,19 @@ import "server-only"
import { anthropic } from "@ai-sdk/anthropic"
import { captureException } from "@sentry/nextjs"
import { Geo } from "@vercel/edge"
import { generateId } from "ai"
import { createAI, createStreamableValue, getMutableAIState, streamUI } from "ai/rsc"
import { generateId, streamText } from "ai"
import { createAI, createStreamableValue, getMutableAIState, StreamableValue } from "ai/rsc"
import { headers } from "next/headers"
import { ReactNode } from "react"
import { Content } from "@/components/content"
import { systemPrompt } from "./prompt"
import { AIActions, AIState, ServerMessage, UIState } from "./types"
import { saveChat } from "../db/actions"
import { rateLimit } from "../rate-limit"

export async function continueConversation(input: string, location: Geo): Promise<ReactNode> {
export async function continueConversation(input: string, location: Geo): Promise<StreamableValue<any, any>> {
"use server"

// Implement rate limit based on the request's IP
const header = await headers()
const header = headers()
const ip = (header.get("x-forwarded-for") ?? "127.0.0.2").split(",")[0]

const { success } = await rateLimit(ip)
Expand All @@ -30,26 +28,27 @@ export async function continueConversation(input: string, location: Geo): Promis
// Update the AI state with the new user message.
history.update([...(history.get() as ServerMessage[]), { role: "user", content: input }])

const stream = createStreamableValue("")
const node = <Content content={stream.value} />
const stream = createStreamableValue()

try {
const result = await streamUI({
model: anthropic("claude-3-5-haiku-latest"),
system: systemPrompt(location),
messages: history.get() as ServerMessage[],
text: ({ content, done }) => {
if (done) {
stream.done()
history.done([...(history.get() as ServerMessage[]), { role: "assistant", content }])
} else {
stream.update(content)
}
;(async () => {
const { textStream } = streamText({
model: anthropic("claude-3-5-haiku-latest"),
system: systemPrompt(location),
messages: history.get() as ServerMessage[],
onFinish(event) {
history.done([...(history.get() as ServerMessage[]), { role: "assistant", content: event.text }])
},
})

return node
},
})
return result.value
for await (const text of textStream) {
stream.update(text)
}

stream.done()
})()

return stream.value
} catch (error) {
stream.done()
captureException(error)
Expand Down
3 changes: 2 additions & 1 deletion lib/chat/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Geo } from "@vercel/edge"
import { StreamableValue } from "ai/rsc"
import { ReactNode } from "react"

// Define the AI state and UI state types
Expand All @@ -18,5 +19,5 @@ export type UIState = ChatMessage[]

// Define the actions type
export type AIActions = {
continueConversation: (input: string, location: Geo) => Promise<ReactNode>
continueConversation: (input: string, location: Geo) => Promise<StreamableValue<any, any>>
}

0 comments on commit b5cc4d8

Please sign in to comment.