File size: 2,297 Bytes
5251091
 
 
 
fe2c3a0
 
 
5251091
 
 
 
 
 
 
 
 
 
 
fe2c3a0
5eeaec9
5251091
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
08af185
 
 
 
 
 
5251091
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import streamlit as st
import os
import google.generativeai as genai

# Set your Google API Key directly or use Hugging Face Spaces' Secrets
genai.configure(api_key=os.getenv("GOOGLE_KEY"))  # Make sure to set this in the Spaces secrets

model = genai.GenerativeModel("gemini-pro")
chat = model.start_chat(history=[])

def get_gemini_response(prompt):
    try:
        response = chat.send_message(prompt, stream=True)
        return response
    except Exception as e:
        return f"An error occurred: {str(e)}"

# Streamlit app configuration
st.set_page_config(page_title="Med ChatBot")
st.header("Medical ChatBot")

# Initialize session state for chat history
if "chat_history" not in st.session_state:
    st.session_state["chat_history"] = []

# Input and submission button
input_text = st.text_input("Input: ", key="input")
submit = st.button("Ask the question")

if submit and input_text:
    # Context for the LLM with history included
    chat_history_text = " ".join([f"{role}: {text}" for role, text in st.session_state["chat_history"]])
    
    context = (
        "You are a medical chatbot designed to assist users in understanding their symptoms. "
        "Provide clear, concise, and informative responses based on NHS guidelines. "
        "Avoid technical jargon and code snippets. If asked a question unrelated to medical topics, "
        "respond with: 'I am a medical bot and I don't have that knowledge.' "
        f"Previous conversation: {chat_history_text} "
    )

    prompt = f"{context} User's latest input: {input_text}"  # Include the latest user input
    response = get_gemini_response(prompt)

    # Add user query to session state chat history
    st.session_state['chat_history'].append(("You", input_text))

    st.subheader("The Response is")
    # If the response is a list of chunks, combine them into a single string
    if isinstance(response, list):
        full_response = " ".join([chunk.text for chunk in response])
        st.write(full_response)
        st.session_state['chat_history'].append(("Bot", full_response))
    else:
        st.write(response)
        st.session_state['chat_history'].append(("Bot", response))

st.subheader("The Chat History is")
for role, text in st.session_state['chat_history']:
    st.write(f"{role}: {text}")