mencraft commited on
Commit
2a7b2e9
·
1 Parent(s): c8c44e2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -1
app.py CHANGED
@@ -38,6 +38,8 @@ index = VectorStoreIndex.from_documents(documents)
38
 
39
  @cl.on_chat_start
40
  async def factory():
 
 
41
  # llm_predictor = LLMPredictor(
42
  # llm=ChatOpenAI(
43
  # temperature=0,
@@ -61,6 +63,9 @@ async def factory():
61
  service_context=gpt_35_context
62
  )
63
 
 
 
 
64
  cl.user_session.set("query_engine", query_engine)
65
 
66
 
@@ -70,4 +75,15 @@ async def main(message):
70
  response = await cl.make_async(query_engine.query)(message)
71
  print(response)
72
 
73
- await cl.Message(content=response).send()
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
  @cl.on_chat_start
40
  async def factory():
41
+ msg = cl.Message(content=f"Building Index...")
42
+ await msg.send()
43
  # llm_predictor = LLMPredictor(
44
  # llm=ChatOpenAI(
45
  # temperature=0,
 
63
  service_context=gpt_35_context
64
  )
65
 
66
+ msg.content = f"Index built!"
67
+ await msg.send()
68
+
69
  cl.user_session.set("query_engine", query_engine)
70
 
71
 
 
75
  response = await cl.make_async(query_engine.query)(message)
76
  print(response)
77
 
78
+ # await cl.Message(content=response).send()
79
+ response_message = cl.Message(content="")
80
+ response_message.content = response
81
+ print(response_message)
82
+ await response_message.send()
83
+ # # for token in response.response_gen:
84
+ # # await response_message.stream_token(token=token)
85
+
86
+ # # if response.response_txt:
87
+ # response_message.content = response
88
+
89
+ # await response_message.send()