Spaces:
Running
Running
File size: 1,496 Bytes
aa01edd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import { Paper, Stack } from "@mantine/core";
import type { ChatMessage } from "gpt-tokenizer/GptEncoding";
import { lazy, memo, Suspense } from "react";
const FormattedMarkdown = lazy(() => import("./FormattedMarkdown"));
interface MessageListProps {
messages: ChatMessage[];
}
interface MessageProps {
message: ChatMessage;
index: number;
}
const Message = memo(
function Message({ message, index }: MessageProps) {
return (
<Paper
key={`${message.role}-${index}`}
shadow="xs"
radius="xl"
p="sm"
maw="90%"
style={{
alignSelf: message.role === "user" ? "flex-end" : "flex-start",
}}
>
<Suspense fallback={message.content}>
<FormattedMarkdown>{message.content}</FormattedMarkdown>
</Suspense>
</Paper>
);
},
(prevProps, nextProps) => {
return (
prevProps.message.content === nextProps.message.content &&
prevProps.message.role === nextProps.message.role
);
},
);
const MessageList = memo(function MessageList({ messages }: MessageListProps) {
if (messages.length <= 2) return null;
return (
<Stack gap="md">
{messages
.slice(2)
.filter((message) => message.content.length > 0)
.map((message, index) => (
<Message
key={`${message.role}-${index}`}
message={message}
index={index}
/>
))}
</Stack>
);
});
export default MessageList;
|