Spaces:
Running
Running
File size: 2,921 Bytes
1e22bf6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
import { usePubSub } from "create-pubsub/react";
import { Suspense, lazy, useMemo } from "react";
import {
modelLoadingProgressPubSub,
modelSizeInMegabytesPubSub,
queryPubSub,
responsePubSub,
settingsPubSub,
textGenerationStatePubSub,
} from "../../modules/pubSub";
const AiResponseContent = lazy(() => import("./AiResponseContent"));
const PreparingContent = lazy(() => import("./PreparingContent"));
const LoadingModelContent = lazy(() => import("./LoadingModelContent"));
const ChatInterface = lazy(() => import("./ChatInterface"));
const AiModelDownloadAllowanceContent = lazy(
() => import("./AiModelDownloadAllowanceContent"),
);
export default function AiResponseSection() {
const [query] = usePubSub(queryPubSub);
const [response] = usePubSub(responsePubSub);
const [textGenerationState, setTextGenerationState] = usePubSub(
textGenerationStatePubSub,
);
const [modelLoadingProgress] = usePubSub(modelLoadingProgressPubSub);
const [settings] = usePubSub(settingsPubSub);
const [modelSizeInMegabytes] = usePubSub(modelSizeInMegabytesPubSub);
return useMemo(() => {
if (!settings.enableAiResponse || textGenerationState === "idle") {
return null;
}
const generatingStates = [
"generating",
"interrupted",
"completed",
"failed",
];
if (generatingStates.includes(textGenerationState)) {
return (
<>
<Suspense>
<AiResponseContent
textGenerationState={textGenerationState}
response={response}
setTextGenerationState={setTextGenerationState}
/>
</Suspense>
{textGenerationState === "completed" && (
<Suspense>
<ChatInterface initialQuery={query} initialResponse={response} />
</Suspense>
)}
</>
);
}
if (textGenerationState === "loadingModel") {
return (
<Suspense>
<LoadingModelContent
modelLoadingProgress={modelLoadingProgress}
modelSizeInMegabytes={modelSizeInMegabytes}
/>
</Suspense>
);
}
if (textGenerationState === "preparingToGenerate") {
return (
<Suspense>
<PreparingContent textGenerationState={textGenerationState} />
</Suspense>
);
}
if (textGenerationState === "awaitingSearchResults") {
return (
<Suspense>
<PreparingContent textGenerationState={textGenerationState} />
</Suspense>
);
}
if (textGenerationState === "awaitingModelDownloadAllowance") {
return (
<Suspense>
<AiModelDownloadAllowanceContent />
</Suspense>
);
}
return null;
}, [
settings.enableAiResponse,
textGenerationState,
response,
query,
modelLoadingProgress,
modelSizeInMegabytes,
setTextGenerationState,
]);
}
|