suriya7 commited on
Commit
d4989ea
·
verified ·
1 Parent(s): 47c1d3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -14,9 +14,9 @@ llm = HuggingFaceHub(repo_id="suriya7/MaxMini-Instruct-248M",
14
  })
15
 
16
 
17
- template = """Please Answer the User Question and use the previous chat to know about the past conversation: previous chat: {chat_history}\nUser:{question}\nChatbot:"""
18
 
19
- prompt = PromptTemplate(template=template,input_variables=['question','chat_history'])
20
 
21
  llm_chain = LLMChain(
22
  llm=llm,
@@ -24,14 +24,14 @@ llm_chain = LLMChain(
24
  verbose=True,
25
  )
26
 
27
- previous_response = ""
28
  def conversational_chat(user_query):
29
- global previous_response
30
- previous_response = "".join([f"User: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None])
31
- print(f"this is my previous {previous_response}")
32
  result = llm_chain.predict(
33
  question=user_query,
34
- chat_history = previous_response
35
  )
36
  st.session_state['history'].append((user_query, result))
37
  return result
 
14
  })
15
 
16
 
17
+ template = """Please Answer the Question:{question}"""
18
 
19
+ prompt = PromptTemplate(template=template,input_variables=['question'])
20
 
21
  llm_chain = LLMChain(
22
  llm=llm,
 
24
  verbose=True,
25
  )
26
 
27
+ # previous_response = ""
28
  def conversational_chat(user_query):
29
+ # global previous_response
30
+ # previous_response = "".join([f"User: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None])
31
+ # print(f"this is my previous {previous_response}")
32
  result = llm_chain.predict(
33
  question=user_query,
34
+ # chat_history = previous_response
35
  )
36
  st.session_state['history'].append((user_query, result))
37
  return result