File size: 2,155 Bytes
beffe84
 
 
 
 
 
 
 
 
 
 
 
 
 
7908478
 
 
 
 
 
 
 
 
 
 
 
beffe84
 
 
 
 
 
 
 
 
 
 
 
 
 
770ee32
beffe84
 
 
 
 
770ee32
 
 
beffe84
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import os

import chainlit as cl
from langgraph_sdk import get_client
from langchain_core.messages import HumanMessage

LANGGRAPH_DEPLOYMENT = os.environ.get("LANGGRAPH_DEPLOYMENT")

@cl.on_chat_start
async def on_start():
    # Initialize the Langgraph client
    langraph_client = get_client(
        url=LANGGRAPH_DEPLOYMENT
    )
    
    try:
        assistants = await langraph_client.assistants.search(
            graph_id="simple_rag", metadata={"created_by": "system"}
        )

        thread = await langraph_client.threads.create()
    except Exception as e:
        print(f"Error occurred while creating assistant or thread: {str(e)}")
        # You might want to handle the error appropriately here
        # For example, you could raise a custom error or return a default value
        raise

    cl.user_session.set("langraph_client", langraph_client)
    cl.user_session.set("assistant_id", assistants[0]["assistant_id"])
    cl.user_session.set("thread_id", thread["thread_id"])


@cl.on_message
async def main(message: cl.Message):
    msg = cl.Message(content="")

    langraph_client = cl.user_session.get("langraph_client")
    assistant_id = cl.user_session.get("assistant_id")
    thread_id = cl.user_session.get("thread_id")

    async with cl.Step(name="Scanning documentation") as step:

        async for chunk in langraph_client.runs.stream(
            thread_id=thread_id,
            assistant_id=assistant_id,
            input={
                "messages": [
                    HumanMessage(content=message.content)
                ]
            },
            stream_mode="events",
        ):
            if chunk.event == "events":
                if chunk.data["event"] == "on_chat_model_stream":
                    await msg.stream_token(chunk.data["data"]["chunk"]["content"])
                else:
                    if "data" in chunk.data and "input" in chunk.data["data"]:
                        step.input = chunk.data["data"]["input"]
                    if "data" in chunk.data and "output" in chunk.data["data"]:
                        step.output = chunk.data["data"]["output"]

    await msg.send()