File-Test / app.py
Poonawala's picture
Update app.py
c167991 verified
raw
history blame
5.42 kB
import streamlit as st
import PyPDF2
from huggingface_hub import InferenceClient
# Initialize the Inference Client
client = InferenceClient("meta-llama/Llama-3.2-3B-Instruct")
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
uploaded_pdf=None
):
messages = [{"role": "system", "content": system_message}]
# Add previous conversation history to the messages
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
# If a new message is entered, add it to the conversation history
messages.append({"role": "user", "content": message})
# If a PDF is uploaded, process its content
if uploaded_pdf is not None:
file_content = extract_pdf_text(uploaded_pdf)
if file_content:
messages.append({"role": "user", "content": f"Document Content: {file_content}"})
# Get response from the model
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
def extract_pdf_text(file):
"""Extract text from a PDF file."""
try:
reader = PyPDF2.PdfReader(file)
text = ""
for page in reader.pages:
text += page.extract_text()
return text
except Exception as e:
return f"Error extracting text from PDF: {str(e)}"
# Streamlit UI
st.set_page_config(page_title="Health Assistant", layout="wide")
# Custom CSS for Streamlit app
st.markdown(
"""
<style>
body {
background-color: #1e2a38; /* Dark blue background */
color: #ffffff; /* White text for readability */
font-family: 'Arial', sans-serif; /* Clean and modern font */
}
.stButton button {
background-color: #42B3CE !important; /* Light blue button */
color: #2e3b4e !important; /* Dark text for contrast */
border: none !important;
padding: 10px 20px !important;
border-radius: 8px !important;
font-size: 16px;
font-weight: bold;
transition: background-color 0.3s ease, transform 0.2s ease;
}
.stButton button:hover {
background-color: #3189A2 !important; /* Darker blue on hover */
transform: scale(1.05);
}
.stTextInput input {
background-color: #2f3b4d;
color: white;
border: 2px solid #42B3CE;
padding: 12px;
border-radius: 8px;
font-size: 16px;
transition: border 0.3s ease;
}
.stTextInput input:focus {
border-color: #3189A2;
}
</style>
""",
unsafe_allow_html=True,
)
# Title and description
st.title("Health Assistant Chat")
st.subheader("Chat with your health assistant and upload a document for analysis")
# System message for health-related responses
system_message = (
"You are a virtual health assistant designed to provide accurate and reliable information "
"related to health, wellness, and medical topics. Your primary goal is to assist users with "
"their health-related queries, offer general guidance, and suggest when to consult a licensed "
"medical professional. If a user asks a question that is unrelated to health, wellness, or medical "
"topics, respond politely but firmly with: 'I'm sorry, I can't help with that because I am a virtual "
"health assistant designed to assist with health-related needs. Please let me know if you have any health-related questions.'"
)
# Upload a PDF file
uploaded_pdf = st.file_uploader("Upload a PDF file (Optional)", type="pdf")
# User input message
message = st.text_input("Type your health-related question:")
# History for conversation tracking
if 'history' not in st.session_state:
st.session_state['history'] = []
# Collect and display previous conversation history
history = st.session_state['history']
for user_message, assistant_message in history:
st.markdown(f"**You:** {user_message}")
st.markdown(f"**Assistant:** {assistant_message}")
# Max tokens, temperature, and top-p sliders
max_tokens = st.slider("Max new tokens", min_value=1, max_value=2048, value=512)
temperature = st.slider("Temperature", min_value=0.1, max_value=4.0, value=0.7, step=0.1)
top_p = st.slider("Top-p (nucleus sampling)", min_value=0.1, max_value=1.0, value=0.95, step=0.05)
# Button to generate response
if st.button("Generate Response"):
if message:
# Append the user's question to the conversation history
st.session_state.history.append((message, ""))
# Generate the response based on the user's input and any uploaded document
response = respond(
message,
st.session_state.history,
system_message,
max_tokens,
temperature,
top_p,
uploaded_pdf
)
# Display the response
for resp in response:
st.markdown(f"**Assistant:** {resp}")
# Update the conversation history with the assistant's response
st.session_state.history[-1] = (message, resp)
else:
st.error("Please enter a question to proceed.")