Skip to content

Commit

Permalink
refactor(ui): add request loading indicator to chat playground (Tabby…
Browse files Browse the repository at this point in the history
…ML#1762)

* scroll to bottom

* feat: chat playground loading and scroll

* [autofix.ci] apply automated fixes

* rename

* skeleton

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
  • Loading branch information
liangfung and autofix-ci[bot] authored Apr 4, 2024
1 parent 1e0d0e3 commit ece03ce
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 14 deletions.
15 changes: 13 additions & 2 deletions ee/tabby-ui/components/chat-list.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,19 @@ import { type Message } from 'ai'

import { MessageActionType } from '@/lib/types'
import { Separator } from '@/components/ui/separator'
import { ChatMessage } from '@/components/chat-message'
import { ChatMessage, MessagePendingIndicator } from '@/components/chat-message'

export interface ChatList {
messages: Message[]
handleMessageAction: (messageId: string, action: MessageActionType) => void
isStreamResponsePending?: boolean
}

export function ChatList({ messages, handleMessageAction }: ChatList) {
export function ChatList({
messages,
handleMessageAction,
isStreamResponsePending
}: ChatList) {
if (!messages.length) {
return null
}
Expand All @@ -27,6 +32,12 @@ export function ChatList({ messages, handleMessageAction }: ChatList) {
)}
</div>
))}
{isStreamResponsePending && (
<>
<Separator className="my-4 md:my-8" />
<MessagePendingIndicator />
</>
)}
</div>
)
}
21 changes: 16 additions & 5 deletions ee/tabby-ui/components/chat-message.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { CodeBlock } from '@/components/ui/codeblock'
import { ChatMessageActions } from '@/components/chat-message-actions'
import { MemoizedReactMarkdown } from '@/components/markdown'

import { Skeleton } from './ui/skeleton'
import { UserAvatar } from './user-avatar'

export interface ChatMessageProps {
Expand All @@ -30,11 +31,7 @@ export function ChatMessage({
className={cn('group relative mb-4 flex items-start md:-ml-12')}
{...props}
>
<div
className={cn(
'shrink-0 select-none rounded-full border bg-background shadow'
)}
>
<div className="shrink-0 select-none rounded-full border bg-background shadow">
{message.role === 'user' ? (
<UserAvatar className="h-8 w-8" />
) : (
Expand Down Expand Up @@ -92,6 +89,20 @@ export function ChatMessage({
)
}

export function MessagePendingIndicator() {
return (
<div className="mb-4 flex items-start md:-ml-12">
<div className="shrink-0 select-none rounded-full border bg-background shadow">
<IconTabby className="h-8 w-8" />
</div>
<div className="ml-4 flex-1 space-y-2 px-1">
<Skeleton className="h-3 w-full" />
<Skeleton className="h-3 w-full" />
</div>
</div>
)
}

function IconTabby({ className }: { className?: string }) {
return (
<Image
Expand Down
44 changes: 40 additions & 4 deletions ee/tabby-ui/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import type { Message, UseChatHelpers } from 'ai/react'
import { find, findIndex } from 'lodash-es'
import { toast } from 'sonner'

import { useLatest } from '@/lib/hooks/use-latest'
import { usePatchFetch } from '@/lib/hooks/use-patch-fetch'
import { useStore } from '@/lib/hooks/use-store'
import { addChat, updateMessages } from '@/lib/stores/chat-actions'
Expand All @@ -28,8 +29,16 @@ function ChatRenderer(
{ id, initialMessages, className }: ChatProps,
ref: React.ForwardedRef<ChatRef>
) {
usePatchFetch()
const chats = useStore(useChatStore, state => state.chats)
// When the response status text is 200, the variable should be false
const [isStreamResponsePending, setIsStreamResponsePending] =
React.useState(false)

const onStreamToken = useLatest(() => {
if (isStreamResponsePending) {
setIsStreamResponsePending(false)
}
})

const useChatHelpers = useChat({
initialMessages,
Expand All @@ -44,6 +53,15 @@ function ChatRenderer(
}
})

usePatchFetch({
onStart: () => {
setIsStreamResponsePending(true)
},
onToken: () => {
onStreamToken.current()
}
})

const {
messages,
append,
Expand All @@ -57,6 +75,11 @@ function ChatRenderer(

const [selectedMessageId, setSelectedMessageId] = React.useState<string>()

const onStop = () => {
setIsStreamResponsePending(false)
stop()
}

const onRegenerateResponse = (messageId: string) => {
const messageIndex = findIndex(messages, { id: messageId })
const prevMessage = messages?.[messageIndex - 1]
Expand Down Expand Up @@ -100,6 +123,13 @@ function ChatRenderer(
}
}

const scrollToBottom = (behavior?: ScrollBehavior) => {
window.scrollTo({
top: document.body.offsetHeight,
behavior
})
}

const handleSubmit = async (value: string) => {
if (findIndex(chats, { id }) === -1) {
addChat(id, truncateText(value))
Expand All @@ -122,12 +152,17 @@ function ChatRenderer(
}, [messages])

React.useEffect(() => {
const scrollHeight = document.documentElement.scrollHeight
window.scrollTo(0, scrollHeight)
scrollToBottom()

return () => stop()
}, [])

React.useLayoutEffect(() => {
if (isStreamResponsePending) {
scrollToBottom('smooth')
}
}, [isStreamResponsePending])

React.useImperativeHandle(
ref,
() => {
Expand All @@ -145,6 +180,7 @@ function ChatRenderer(
<ChatList
messages={messages}
handleMessageAction={handleMessageAction}
isStreamResponsePending={isStreamResponsePending}
/>
<ChatScrollAnchor trackVisibility={isLoading} />
</>
Expand All @@ -157,7 +193,7 @@ function ChatRenderer(
className="fixed inset-x-0 bottom-0 lg:ml-[280px]"
id={id}
isLoading={isLoading}
stop={stop}
stop={onStop}
append={append}
reload={reload}
messages={messages}
Expand Down
6 changes: 3 additions & 3 deletions ee/tabby-ui/lib/hooks/use-patch-fetch.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { useEffect } from 'react'
import { OpenAIStream, StreamingTextResponse } from 'ai'
import { OpenAIStream, OpenAIStreamCallbacks, StreamingTextResponse } from 'ai'

import fetcher from '../tabby/fetcher'

export function usePatchFetch() {
export function usePatchFetch(callbacks?: OpenAIStreamCallbacks) {
useEffect(() => {
if (!window._originFetch) {
window._originFetch = window.fetch
Expand All @@ -26,7 +26,7 @@ export function usePatchFetch() {
headers,
customFetch: fetch,
responseFormatter(response) {
const stream = OpenAIStream(response, undefined)
const stream = OpenAIStream(response, callbacks)
return new StreamingTextResponse(stream)
}
})
Expand Down

0 comments on commit ece03ce

Please sign in to comment.