Aiswarya Sankar commited on
Commit
a1d349c
·
1 Parent(s): ea2c2f5

update file

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -424,7 +424,7 @@ def solveGithubIssue(ticket, history) -> Response:
424
  flat_list = [item for item in flat_list if item is not None]
425
 
426
  print(flat_list)
427
- for char in qa({"question": question, "chat_history": flat_list})["answer"]:
428
  history[-1][1] += char
429
  yield history
430
 
@@ -463,7 +463,7 @@ def bot(history, **kwargs):
463
  [StreamingGradioCallbackHandler(q)]
464
  ),
465
  )
466
- qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever, max_tokens_limit=16000)
467
  chat_history = []
468
 
469
  except Exception as e:
@@ -476,7 +476,7 @@ def bot(history, **kwargs):
476
  flat_list = [item for item in flat_list if item is not None]
477
  print(flat_list)
478
 
479
- for char in qa({"question": user_message, "chat_history": flat_list})["answer"]:
480
  history[-1][1] += char
481
  yield history
482
 
 
424
  flat_list = [item for item in flat_list if item is not None]
425
 
426
  print(flat_list)
427
+ for char in qa({"question": question, "chat_history": history})["answer"]:
428
  history[-1][1] += char
429
  yield history
430
 
 
463
  [StreamingGradioCallbackHandler(q)]
464
  ),
465
  )
466
+ qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever, max_tokens_limit=16000, return_source_documents=True, get_chat_history=lambda h : h)
467
  chat_history = []
468
 
469
  except Exception as e:
 
476
  flat_list = [item for item in flat_list if item is not None]
477
  print(flat_list)
478
 
479
+ for char in qa({"question": user_message, "chat_history": history})["answer"]:
480
  history[-1][1] += char
481
  yield history
482