Namitg02 commited on
Commit
11a5cb0
·
verified ·
1 Parent(s): c76678b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -9
app.py CHANGED
@@ -60,22 +60,35 @@ retriever = vectordb.as_retriever(
60
 
61
  from langchain.chains import create_retrieval_chain
62
  from langchain.chains.combine_documents import create_stuff_documents_chain
63
- from langchain import hub
 
64
 
65
  READER_MODEL="HuggingFaceH4/zephyr-7b-beta"
66
  #HuggingFaceH4/zephyr-7b-beta
67
  #qa = ConversationalRetrievalChain.from_llm(llm=READER_MODEL,retriever=retriever,memory=memory)
68
  #qa = RetrievalQA.from_chain_type(llm=READER_MODEL,chain_type="map_reduce",retriever=retriever,verbose=True)
69
 
70
- retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
71
- combine_docs_chain = create_stuff_documents_chain(
72
- READER_MODEL, retrieval_qa_chat_prompt
 
 
 
 
 
 
 
 
 
73
  )
74
- qa = create_retrieval_chain(retriever, combine_docs_chain)
 
 
 
75
 
76
- result = qa(question)
77
- import gradio as gr
78
- gr.load("READER_MODEL").launch()
79
 
80
  #result = ({"query": question})
81
- print("qa")
 
60
 
61
  from langchain.chains import create_retrieval_chain
62
  from langchain.chains.combine_documents import create_stuff_documents_chain
63
+ #from langchain import hub
64
+
65
 
66
  READER_MODEL="HuggingFaceH4/zephyr-7b-beta"
67
  #HuggingFaceH4/zephyr-7b-beta
68
  #qa = ConversationalRetrievalChain.from_llm(llm=READER_MODEL,retriever=retriever,memory=memory)
69
  #qa = RetrievalQA.from_chain_type(llm=READER_MODEL,chain_type="map_reduce",retriever=retriever,verbose=True)
70
 
71
+ #retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
72
+
73
+ qa_chat_prompt = ChatPromptTemplate.from_template("""Answer the following question based only on the provided context:
74
+
75
+ <context>
76
+ {Tou are a doctor}
77
+ </context>
78
+
79
+ Question: {input}""")
80
+
81
+ docs_chain = create_stuff_documents_chain(
82
+ READER_MODEL, qa_chat_prompt
83
  )
84
+ retrieval_chain = create_retrieval_chain(retriever, docs_chain)
85
+ response = retrieval_chain.invoke({"input": "how can I reverse diabetes?"})
86
+ print(response["answer"])
87
+
88
 
89
+ #result = qa(question)
90
+ #import gradio as gr
91
+ #gr.load("READER_MODEL").launch()
92
 
93
  #result = ({"query": question})
94
+ #print("qa")