|
import streamlit as st |
|
import PyPDF2 |
|
from huggingface_hub import InferenceClient |
|
|
|
|
|
client = InferenceClient("meta-llama/Llama-3.2-3B-Instruct") |
|
|
|
|
|
def respond( |
|
message, |
|
history: list[tuple[str, str]], |
|
system_message, |
|
max_tokens, |
|
temperature, |
|
top_p, |
|
uploaded_pdf=None |
|
): |
|
messages = [{"role": "system", "content": system_message}] |
|
|
|
|
|
for val in history: |
|
if val[0]: |
|
messages.append({"role": "user", "content": val[0]}) |
|
if val[1]: |
|
messages.append({"role": "assistant", "content": val[1]}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
if uploaded_pdf is not None: |
|
file_content = extract_pdf_text(uploaded_pdf) |
|
if file_content: |
|
messages.append({"role": "user", "content": f"Document Content: {file_content}"}) |
|
|
|
|
|
response = "" |
|
for message in client.chat_completion( |
|
messages, |
|
max_tokens=max_tokens, |
|
stream=True, |
|
temperature=temperature, |
|
top_p=top_p, |
|
): |
|
token = message.choices[0].delta.content |
|
response += token |
|
yield response |
|
|
|
|
|
def extract_pdf_text(file): |
|
"""Extract text from a PDF file.""" |
|
try: |
|
reader = PyPDF2.PdfReader(file) |
|
text = "" |
|
for page in reader.pages: |
|
text += page.extract_text() |
|
return text |
|
except Exception as e: |
|
return f"Error extracting text from PDF: {str(e)}" |
|
|
|
|
|
|
|
st.set_page_config(page_title="Health Assistant", layout="wide") |
|
|
|
|
|
st.markdown( |
|
""" |
|
<style> |
|
body { |
|
background-color: #1e2a38; /* Dark blue background */ |
|
color: #ffffff; /* White text for readability */ |
|
font-family: 'Arial', sans-serif; /* Clean and modern font */ |
|
} |
|
.stButton button { |
|
background-color: #42B3CE !important; /* Light blue button */ |
|
color: #2e3b4e !important; /* Dark text for contrast */ |
|
border: none !important; |
|
padding: 10px 20px !important; |
|
border-radius: 8px !important; |
|
font-size: 16px; |
|
font-weight: bold; |
|
transition: background-color 0.3s ease, transform 0.2s ease; |
|
} |
|
.stButton button:hover { |
|
background-color: #3189A2 !important; /* Darker blue on hover */ |
|
transform: scale(1.05); |
|
} |
|
.stTextInput input { |
|
background-color: #2f3b4d; |
|
color: white; |
|
border: 2px solid #42B3CE; |
|
padding: 12px; |
|
border-radius: 8px; |
|
font-size: 16px; |
|
transition: border 0.3s ease; |
|
} |
|
.stTextInput input:focus { |
|
border-color: #3189A2; |
|
} |
|
</style> |
|
""", |
|
unsafe_allow_html=True, |
|
) |
|
|
|
|
|
st.title("Health Assistant Chat") |
|
st.subheader("Chat with your health assistant and upload a document for analysis") |
|
|
|
|
|
system_message = ( |
|
"You are a virtual health assistant designed to provide accurate and reliable information " |
|
"related to health, wellness, and medical topics. Your primary goal is to assist users with " |
|
"their health-related queries, offer general guidance, and suggest when to consult a licensed " |
|
"medical professional. If a user asks a question that is unrelated to health, wellness, or medical " |
|
"topics, respond politely but firmly with: 'I'm sorry, I can't help with that because I am a virtual " |
|
"health assistant designed to assist with health-related needs. Please let me know if you have any health-related questions.'" |
|
) |
|
|
|
|
|
uploaded_pdf = st.file_uploader("Upload a PDF file (Optional)", type="pdf") |
|
|
|
|
|
message = st.text_input("Type your health-related question:") |
|
|
|
|
|
if 'history' not in st.session_state: |
|
st.session_state['history'] = [] |
|
|
|
|
|
history = st.session_state['history'] |
|
for user_message, assistant_message in history: |
|
st.markdown(f"**You:** {user_message}") |
|
st.markdown(f"**Assistant:** {assistant_message}") |
|
|
|
|
|
max_tokens = st.slider("Max new tokens", min_value=1, max_value=2048, value=512) |
|
temperature = st.slider("Temperature", min_value=0.1, max_value=4.0, value=0.7, step=0.1) |
|
top_p = st.slider("Top-p (nucleus sampling)", min_value=0.1, max_value=1.0, value=0.95, step=0.05) |
|
|
|
|
|
if st.button("Generate Response"): |
|
if message: |
|
|
|
st.session_state.history.append((message, "")) |
|
|
|
response = respond( |
|
message, |
|
st.session_state.history, |
|
system_message, |
|
max_tokens, |
|
temperature, |
|
top_p, |
|
uploaded_pdf |
|
) |
|
|
|
for resp in response: |
|
st.markdown(f"**Assistant:** {resp}") |
|
|
|
st.session_state.history[-1] = (message, resp) |
|
else: |
|
st.error("Please enter a question to proceed.") |
|
|