Spaces:
Running
Running
File size: 4,934 Bytes
a94de35 0aad6fd 85c189a 0aad6fd a94de35 0aad6fd 577cbf8 e7373ce 577cbf8 e7373ce a94de35 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
import streamlit as st
from teapotai import TeapotAI, TeapotAISettings
import hashlib
import os
import requests
default_documents = []
API_KEY = os.environ.get("brave_api_key")
def brave_search(query, count=3):
url = "https://api.search.brave.com/res/v1/web/search"
headers = {"Accept": "application/json", "X-Subscription-Token": API_KEY}
params = {
"q": query,
"count": count,
"extra_snippets": True
}
response = requests.get(url, headers=headers, params=params)
if response.status_code == 200:
results = response.json().get("web", {}).get("results", [])
print(results)
return [(res["title"], res["description"], res["url"]) for res in results]
else:
print(f"Error: {response.status_code}, {response.text}")
return []
# Function to handle the chat with TeapotAI
def handle_chat(user_input, teapot_ai):
results = brave_search(user_input)
documents = []
for i, (title, description, url) in enumerate(results, 1):
documents.append(description.replace('<strong>','').replace('</strong>',''))
print(documents)
context="\n".join(documents)
response = teapot_ai.query(
context=context,
query=user_input
)
# response = teapot_ai.chat([
# {
# "role": "system",
# "content": "You are Teapot, an open-source AI assistant optimized for running efficiently on low-end devices. You provide short, accurate responses without hallucinating and excel at extracting information and summarizing text."
# },
# {
# "role": "user",
# "content": user_input
# }
# ])
return response
def suggestion_button(suggestion_text, teapot_ai):
if st.button(suggestion_text):
handle_chat(suggestion_text, teapot_ai)
# Function to hash documents
def hash_documents(documents):
return hashlib.sha256("\n".join(documents).encode("utf-8")).hexdigest()
# Streamlit app
def main():
st.set_page_config(page_title="TeapotAI Chat", page_icon=":robot_face:", layout="wide")
# Sidebar for document input
st.sidebar.header("Document Input (for RAG)")
user_documents = st.sidebar.text_area(
"Enter documents, each on a new line",
value="\n".join(default_documents)
)
# Parse the user input to get the documents (split by newline)
documents = user_documents.split("\n")
# Ensure non-empty documents
documents = [doc for doc in documents if doc.strip()]
# Check if documents have changed
new_documents_hash = hash_documents(documents)
# Load model if documents have changed, otherwise reuse the model from session_state
if "documents_hash" not in st.session_state or st.session_state.documents_hash != new_documents_hash:
with st.spinner('Loading Model and Embeddings...'):
teapot_ai = TeapotAI(documents=documents or default_documents, settings=TeapotAISettings(rag_num_results=3))
# Store the new hash and model in session state
st.session_state.documents_hash = new_documents_hash
st.session_state.teapot_ai = teapot_ai
else:
# Reuse the existing model
teapot_ai = st.session_state.teapot_ai
# Initialize session state and display the welcome message
if "messages" not in st.session_state:
st.session_state.messages = [{"role": "assistant", "content": "Hi, I am Teapot AI, how can I help you?"}]
# Display previous messages from chat history
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input
user_input = st.chat_input("Ask about famous landmarks")
s1, s2, s3 = st.columns([1, 2, 3])
with s1:
suggestion_button("How tall is the Eiffel Tower?", teapot_ai)
with s2:
suggestion_button("Extract the year the Eiffel Tower was constructed.", teapot_ai)
with s3:
suggestion_button("How large is the Death Star?", teapot_ai)
if user_input:
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(user_input)
# Add user message to session state
st.session_state.messages.append({"role": "user", "content": user_input})
with st.spinner('Generating Response...'):
# Get the answer from TeapotAI using chat functionality
response = handle_chat(user_input, teapot_ai)
# Display assistant response in chat message container
with st.chat_message("assistant"):
st.markdown(response)
# Add assistant response to session state
st.session_state.messages.append({"role": "assistant", "content": response})
st.markdown("### Suggested Questions")
# Run the app
if __name__ == "__main__":
main()
|