File size: 2,925 Bytes
136f9cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5c221d7
136f9cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import React from "react";
import { useLiveQuery } from "dexie-react-hooks";
import { ChatHistoryDB } from "@/lib/chat/memory";
import { HumanMessage, AIMessageChunk } from "@langchain/core/messages";
import { ChatManager } from "@/lib/chat/manager";
import { IDocument } from "@/lib/document/types";
import { toast } from "sonner";
import { IConfig } from "@/lib/config/types";

// Get the singleton instance of ChatHistoryDB
const chatHistoryDB = ChatHistoryDB.getInstance();

export const useChatSession = (id: string | undefined) => {
  return useLiveQuery(async () => {
    if (!id || id === "new") return null;
    return await chatHistoryDB.sessions.get(id);
  }, [id]);
};

export interface ChatSessionConfig {
  config?: IConfig;
  id?: string;
}

export const useSelectedModel = (id: string | undefined, config: IConfig | undefined) => {
  const [selectedModel, setSelectedModel] = React.useState<string | null>(null);

  useLiveQuery(async () => {
    if (!config) return;
    
    const model = !id || id === "new" 
      ? config.default_chat_model
      : (await chatHistoryDB.sessions.get(id))?.model ?? config.default_chat_model;
    setSelectedModel(model);
  }, [id, config]);

  return [selectedModel, setSelectedModel, chatHistoryDB] as const;
};

// Use the singleton pattern for ChatManager
export const useChatManager = () => {
  return React.useMemo(() => ChatManager.getInstance(), []);
};

export const generateMessage = async (
  chatId: string | undefined, 
  input: string, 
  attachments: IDocument[], 
  isGenerating: boolean, 
  setIsGenerating: (isGenerating: boolean) => void, 
  setStreamingHumanMessage: (streamingHumanMessage: HumanMessage | null) => void, 
  setStreamingAIMessageChunks: React.Dispatch<React.SetStateAction<AIMessageChunk[]>>, 
  chatManager: ChatManager,
  setInput: (input: string) => void,
  setAttachments: (attachments: IDocument[]) => void
) => {
  if (!chatId || isGenerating) return;
  if (!input.trim() && !attachments.length) {
    return;
  }

  try {
    setIsGenerating(true);

    const chatInput = input;
    const chatAttachments = attachments;

    setInput("");
    setAttachments([]);
    setStreamingHumanMessage(new HumanMessage(chatInput));
    setStreamingAIMessageChunks([]);

    // Note: The ChatManager.chat method retrieves the reasoningEffort from the chat session in the database
    const messageIterator = chatManager.chat(chatId, chatInput, chatAttachments);

    for await (const event of messageIterator) {
      if (event.type === "stream") {
        setStreamingAIMessageChunks(prev => [...prev, event.content as AIMessageChunk]);
      } else if (event.type === "end") {
        setIsGenerating(false);
        setStreamingHumanMessage(null);
        setStreamingAIMessageChunks([]);
      }
    }
  } catch (error) {
    console.error(error);
    toast.error(`Failed to send message: ${error}`);
    setIsGenerating(false);
  }
};