matt HOFFNER
barebones
b9d9891
raw
history blame
4.01 kB
import { useEffect, useRef } from 'react';
import { GetServerSideProps } from 'next';
import Head from 'next/head';
import {
DEFAULT_SYSTEM_PROMPT,
DEFAULT_TEMPERATURE,
saveConversation,
saveConversations,
updateConversation,
useCreateReducer
} from '@/utils';
import { Chat } from '@/components/Chat/Chat';
import HomeContext from './home.context';
import { HomeInitialState, initialState } from './home.state';
import { v4 as uuidv4 } from 'uuid';
interface Props {
serverSideApiKeyIsSet: boolean;
serverSidePluginKeysSet: boolean;
defaultModelId: any;
}
const Home = ({
defaultModelId,
}: Props) => {
const contextValue = useCreateReducer<HomeInitialState>({
initialState,
});
const {
state: {
lightMode,
conversations,
selectedConversation
},
dispatch,
} = contextValue;
const stopConversationRef = useRef<boolean>(false);
// FETCH MODELS ----------------------------------------------
const handleSelectConversation = (conversation: any) => {
dispatch({
field: 'selectedConversation',
value: conversation,
});
saveConversation(conversation);
};
// CONVERSATION OPERATIONS --------------------------------------------
const handleNewConversation = () => {
const lastConversation = conversations[conversations.length - 1];
const newConversation: any = {
id: uuidv4(),
name: 'New Conversation',
messages: [],
model: lastConversation?.model || {
id: "OpenAIModels[defaultModelId].id",
name: "OpenAIModels[defaultModelId].name",
maxLength: "OpenAIModels[defaultModelId].maxLength",
tokenLimit: "OpenAIModels[defaultModelId].tokenLimit",
},
prompt: DEFAULT_SYSTEM_PROMPT,
temperature: lastConversation?.temperature ?? DEFAULT_TEMPERATURE,
folderId: null,
};
const updatedConversations = [...conversations, newConversation];
dispatch({ field: 'selectedConversation', value: newConversation });
dispatch({ field: 'conversations', value: updatedConversations });
saveConversation(newConversation);
saveConversations(updatedConversations);
dispatch({ field: 'loading', value: false });
};
const handleUpdateConversation = (
conversation: any,
data: any,
) => {
const updatedConversation = {
...conversation,
[data.key]: data.value,
};
const { single, all } = updateConversation(
updatedConversation,
conversations,
);
dispatch({ field: 'selectedConversation', value: single });
dispatch({ field: 'conversations', value: all });
};
// EFFECTS --------------------------------------------
useEffect(() => {
if (window.innerWidth < 640) {
dispatch({ field: 'showChatbar', value: false });
}
}, [selectedConversation]);
// ON LOAD --------------------------------------------
return (
<HomeContext.Provider
value={{
...contextValue,
handleNewConversation,
handleSelectConversation,
handleUpdateConversation,
}}
>
<Head>
<title>Web LLM Embed</title>
<meta name="description" content="Web LLM Embed" />
<meta
name="viewport"
content="height=device-height ,width=device-width, initial-scale=1, user-scalable=no"
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<main
className={`flex h-screen w-screen flex-col text-sm text-white dark:text-white ${lightMode}`}
>
<div className="flex h-full w-full pt-[48px] sm:pt-0">
<div className="flex flex-1">
<Chat stopConversationRef={stopConversationRef} />
</div>
</div>
</main>
</HomeContext.Provider>
);
};
export default Home;
export const getServerSideProps: GetServerSideProps = async ({ locale }) => {
const defaultModelId = "fallbackModelID"
return {
props: {
defaultModelId
},
};
};