antoineandrieu commited on
Commit
da67151
·
1 Parent(s): 65d7b9c

Remove history management as its handled by the backend

Browse files
Files changed (1) hide show
  1. app.py +9 -16
app.py CHANGED
@@ -9,32 +9,25 @@ client = get_client(url=LANGGRAPH_DEPLOYMENT)
9
 
10
  async def respond(
11
  message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
  ):
15
- messages = [SystemMessage(content=system_message)]
16
-
17
- for user_msg, ai_msg in history:
18
- if user_msg:
19
- messages.append(HumanMessage(content=user_msg))
20
- if ai_msg:
21
- messages.append(AIMessage(content=ai_msg))
22
 
23
- messages.append(HumanMessage(content=message))
 
 
 
 
24
 
25
  assistants = await client.assistants.search(
26
  graph_id="retrieval_graph", metadata={"created_by": "system"}
27
  )
28
- thread = await client.threads.create()
29
 
30
  response = ""
31
 
32
  async for chunk in client.runs.stream(
33
- thread_id=thread["thread_id"],
34
- assistant_id=assistants[0]["assistant_id"],
35
- input={
36
- "messages": messages
37
- },
38
  stream_mode="events",
39
  ):
40
  if chunk.event == "events":
 
9
 
10
  async def respond(
11
  message,
 
 
12
  ):
13
+ thread = await client.threads.create()
 
 
 
 
 
 
14
 
15
+ await client.messages.create(
16
+ thread_id=thread.id,
17
+ content=message,
18
+ role="user"
19
+ )
20
 
21
  assistants = await client.assistants.search(
22
  graph_id="retrieval_graph", metadata={"created_by": "system"}
23
  )
 
24
 
25
  response = ""
26
 
27
  async for chunk in client.runs.stream(
28
+ thread_id=thread.id,
29
+ assistant_id=assistants[0].id,
30
+ input={},
 
 
31
  stream_mode="events",
32
  ):
33
  if chunk.event == "events":