Namitg02 commited on
Commit
f4c65b4
·
verified ·
1 Parent(s): aa46ac9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -7
app.py CHANGED
@@ -30,9 +30,6 @@ vectordb = Chroma.from_documents(
30
 
31
  retriever = vectordb.as_retriever()
32
 
33
- import gradio as gr
34
- gr.load("models/HuggingFaceH4/zephyr-7b-beta").launch()
35
-
36
  #docs_ss = vectordb.similarity_search(question,k=3)
37
 
38
 
@@ -51,10 +48,6 @@ Helpful Answer:"""
51
  QA_CHAIN_PROMPT = PromptTemplate.from_template(template)
52
 
53
 
54
- from langchain.chains import ConversationalRetrievalChain
55
- #qa_chain = RetrievalQA.from_chain_type(models/HuggingFaceH4/zephyr-7b-beta,retriever=vectordb.as_retriever(),chain_type_kwargs={"prompt": QA_CHAIN_PROMPT})
56
-
57
-
58
  from langchain.memory import ConversationBufferMemory
59
  memory = ConversationBufferMemory(
60
  memory_key="chat_history",
@@ -64,9 +57,14 @@ memory = ConversationBufferMemory(
64
  question = "Can I reverse Diabetes?"
65
  print("template")
66
 
 
 
67
  retriever=vectordb.as_retriever()
68
  READER_MODEL = "HuggingFaceH4/zephyr-7b-beta"
69
  qa = ConversationalRetrievalChain.from_llm(llm=READER_MODEL,retriever=retriever,memory=memory,chain_type_kwargs={"prompt": QA_CHAIN_PROMPT})
70
 
 
 
 
71
  #result = ({"query": question})
72
  print("qa")
 
30
 
31
  retriever = vectordb.as_retriever()
32
 
 
 
 
33
  #docs_ss = vectordb.similarity_search(question,k=3)
34
 
35
 
 
48
  QA_CHAIN_PROMPT = PromptTemplate.from_template(template)
49
 
50
 
 
 
 
 
51
  from langchain.memory import ConversationBufferMemory
52
  memory = ConversationBufferMemory(
53
  memory_key="chat_history",
 
57
  question = "Can I reverse Diabetes?"
58
  print("template")
59
 
60
+ from langchain.chains import ConversationalRetrievalChain
61
+
62
  retriever=vectordb.as_retriever()
63
  READER_MODEL = "HuggingFaceH4/zephyr-7b-beta"
64
  qa = ConversationalRetrievalChain.from_llm(llm=READER_MODEL,retriever=retriever,memory=memory,chain_type_kwargs={"prompt": QA_CHAIN_PROMPT})
65
 
66
+ import gradio as gr
67
+ gr.load("READER_MODEL").launch()
68
+
69
  #result = ({"query": question})
70
  print("qa")