|
import streamlit as st |
|
from transformers import pipeline |
|
from collections import deque |
|
|
|
|
|
system_prompt = "You are an AI assistant that specializes in helping with code-based questions and tasks. Feel free to ask anything related to coding!" |
|
|
|
st.title("Falcon QA Bot") |
|
|
|
@st.cache(allow_output_mutation=True) |
|
def get_qa_pipeline(): |
|
return pipeline("question-answering", model="tiiuae/falcon-7b-instruct", device=0) |
|
|
|
def chat(query): |
|
pipeline = get_qa_pipeline() |
|
result = pipeline(question=query, max_length=2000, context=system_prompt) |
|
return result |
|
|
|
def main(): |
|
user_queue = deque() |
|
|
|
st.markdown('<style>div.row-widget.stRadio > div{flex-direction:row;}</style>', unsafe_allow_html=True) |
|
|
|
input = st.text_area("What do you want to ask about", value="", height=150, max_chars=500, key="input") |
|
if st.button("Ask"): |
|
if input: |
|
user_queue.append(input) |
|
|
|
if user_queue: |
|
current_user = user_queue[0] |
|
st.text_area("System Prompt", value=system_prompt, height=150, disabled=True) |
|
st.text_area("User Input", value=current_user, height=150, disabled=True) |
|
with st.spinner("Generating response..."): |
|
output = chat(current_user) |
|
st.text_area("Falcon's Answer", value=output["answer"], height=150, disabled=True) |
|
user_queue.popleft() |
|
|
|
if __name__ == '__main__': |
|
main() |