MediGuide / app.py
treasuremars's picture
Update app.py
94ce4e1 verified
raw
history blame
4.47 kB
import pandas as pd
df = pd.read_csv('./drugs_side_effects_drugs_com.csv')
df = df[['drug_name', 'medical_condition', 'side_effects']]
df.dropna(inplace=True)
context_data = []
for i in range(2):
context = " | ".join([f"{col}: {df.iloc[i][col]}" for col in df.columns])
context_data.append(context)
import os
# Get the secret key from the environment
groq_key = os.environ.get('gloq_key')
## LLM used for RAG
from langchain_groq import ChatGroq
llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key)
## Embedding model!
from langchain_huggingface import HuggingFaceEmbeddings
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
# create vector store!
from langchain_chroma import Chroma
vectorstore = Chroma(
collection_name="medical_dataset_store",
embedding_function=embed_model,
persist_directory="./",
)
# add data to vector nstore
vectorstore.add_texts(context_data)
retriever = vectorstore.as_retriever()
from langchain_core.prompts import PromptTemplate
template = ("""You are a pharmacist and medical expert.
Use the provided context to answer the question.
If the question is related to medical condition, drug name
and side effects that are not in the context, look online and answer them.
If you don't know the answer, say so. Explain your answer in detail.
Do not discuss the context in your response; just provide the answer directly.
Context: {context}
Question: {question}
Answer:""")
rag_prompt = PromptTemplate.from_template(template)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
rag_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| rag_prompt
| llm
| StrOutputParser()
)
import gradio as gr
# # Function to stream responses
# def rag_memory_stream(message, history):
# partial_text = ""
# for new_text in rag_chain.stream(message): # Assuming rag_chain is pre-defined
# partial_text += new_text
# yield partial_text
# Example rag_memory_stream function with history handling
def rag_memory_stream(messages, history=[]):
"""
A generator-based function that processes messages, maintains history,
and streams responses for interaction with the chatbot.
"""
# Ensure messages is a list of strings
if isinstance(messages, list) and all(isinstance(msg, str) for msg in messages):
user_message = messages[-1] # Extract the latest user message
else:
raise ValueError("Expected messages to be a list of strings.")
partial_text = ""
history.append({"user": user_message, "bot": ""}) # Add to history
# Simulate response generation (replace with actual rag_chain logic)
for new_text in rag_chain.stream(user_message): # Assuming rag_chain is pre-defined
partial_text += new_text
history[-1]["bot"] = partial_text # Update bot response in history
yield partial_text
examples = [
"What are the side effects of aspirin?",
"Can ibuprofen cause dizziness?"
]
# Title and description for the app
title = "CareBot: AI Medical Assistant for Drug Information and Side Effects"
description = """
This AI-powered chatbot provides reliable information about drugs, their side effects, and related medical conditions.
Powered by the Groq API and LangChain, it delivers real-time, accurate responses.
Example Questions:
- What are the side effects of aspirin?
- Can ibuprofen cause dizziness?
Disclaimer: This chatbot is for informational purposes only and not a substitute for professional medical advice.
"""
# Customizing Gradio interface for a better look
# demo = gr.Interface(
# fn=rag_memory_stream,
# inputs=gr.Textbox(
# lines=5,
# placeholder="Type your medical question here...",
# label="Your Medical Question"
# ),
# outputs=gr.Textbox(
# lines=15, # Reduced line count for better layout
# label="AI Response"
# ),
# title=title,
# description=description,
# allow_flagging="never"
# )
demo = gr.ChatInterface(fn=rag_memory_stream,
type="list",
title=title,
description=description,
fill_height=True,
examples=examples,
theme="glass",
)
if __name__ == "__main__":
demo.launch()