Spaces:
Runtime error
Runtime error
File size: 4,014 Bytes
b9d9891 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
import { useEffect, useRef } from 'react';
import { GetServerSideProps } from 'next';
import Head from 'next/head';
import {
DEFAULT_SYSTEM_PROMPT,
DEFAULT_TEMPERATURE,
saveConversation,
saveConversations,
updateConversation,
useCreateReducer
} from '@/utils';
import { Chat } from '@/components/Chat/Chat';
import HomeContext from './home.context';
import { HomeInitialState, initialState } from './home.state';
import { v4 as uuidv4 } from 'uuid';
interface Props {
serverSideApiKeyIsSet: boolean;
serverSidePluginKeysSet: boolean;
defaultModelId: any;
}
const Home = ({
defaultModelId,
}: Props) => {
const contextValue = useCreateReducer<HomeInitialState>({
initialState,
});
const {
state: {
lightMode,
conversations,
selectedConversation
},
dispatch,
} = contextValue;
const stopConversationRef = useRef<boolean>(false);
// FETCH MODELS ----------------------------------------------
const handleSelectConversation = (conversation: any) => {
dispatch({
field: 'selectedConversation',
value: conversation,
});
saveConversation(conversation);
};
// CONVERSATION OPERATIONS --------------------------------------------
const handleNewConversation = () => {
const lastConversation = conversations[conversations.length - 1];
const newConversation: any = {
id: uuidv4(),
name: 'New Conversation',
messages: [],
model: lastConversation?.model || {
id: "OpenAIModels[defaultModelId].id",
name: "OpenAIModels[defaultModelId].name",
maxLength: "OpenAIModels[defaultModelId].maxLength",
tokenLimit: "OpenAIModels[defaultModelId].tokenLimit",
},
prompt: DEFAULT_SYSTEM_PROMPT,
temperature: lastConversation?.temperature ?? DEFAULT_TEMPERATURE,
folderId: null,
};
const updatedConversations = [...conversations, newConversation];
dispatch({ field: 'selectedConversation', value: newConversation });
dispatch({ field: 'conversations', value: updatedConversations });
saveConversation(newConversation);
saveConversations(updatedConversations);
dispatch({ field: 'loading', value: false });
};
const handleUpdateConversation = (
conversation: any,
data: any,
) => {
const updatedConversation = {
...conversation,
[data.key]: data.value,
};
const { single, all } = updateConversation(
updatedConversation,
conversations,
);
dispatch({ field: 'selectedConversation', value: single });
dispatch({ field: 'conversations', value: all });
};
// EFFECTS --------------------------------------------
useEffect(() => {
if (window.innerWidth < 640) {
dispatch({ field: 'showChatbar', value: false });
}
}, [selectedConversation]);
// ON LOAD --------------------------------------------
return (
<HomeContext.Provider
value={{
...contextValue,
handleNewConversation,
handleSelectConversation,
handleUpdateConversation,
}}
>
<Head>
<title>Web LLM Embed</title>
<meta name="description" content="Web LLM Embed" />
<meta
name="viewport"
content="height=device-height ,width=device-width, initial-scale=1, user-scalable=no"
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<main
className={`flex h-screen w-screen flex-col text-sm text-white dark:text-white ${lightMode}`}
>
<div className="flex h-full w-full pt-[48px] sm:pt-0">
<div className="flex flex-1">
<Chat stopConversationRef={stopConversationRef} />
</div>
</div>
</main>
</HomeContext.Provider>
);
};
export default Home;
export const getServerSideProps: GetServerSideProps = async ({ locale }) => {
const defaultModelId = "fallbackModelID"
return {
props: {
defaultModelId
},
};
};
|