Not-Grim-Refer's picture
Update app.py
fb7ceee
raw
history blame
1.43 kB
import streamlit as st
import time
from queue import Queue
st.title("Falcon QA Bot")
huggingfacehub_api_token = st.secrets["hf_token"]
from langchain import HuggingFaceHub, PromptTemplate, LLMChain
repo_id = "tiiuae/falcon-7b-instruct"
llm = HuggingFaceHub(huggingfacehub_api_token=huggingfacehub_api_token,
repo_id=repo_id,
model_kwargs={"temperature":0.2, "max_new_tokens":2000})
template = """
You are an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
{question}
"""
queue = Queue()
def chat(query):
prompt = PromptTemplate(template=template, input_variables=["question"])
llm_chain = LLMChain(prompt=prompt,verbose=True,llm=llm)
result = llm_chain.predict(question=query)
return result
def main():
input = st.text_input("What do you want to ask about", placeholder="Input your question here")
if input:
# Add the user's question to the queue
queue.put(input)
# Check if there are any waiting users
if not queue.empty():
# Get the next user's question from the queue
query = queue.get()
# Generate a response to the user's question
result = chat(query)
# Display the response to the user
st.write(result,unsafe_allow_html=True)
if __name__ == '__main__':
main()