Spaces:
Running
Running
import pandas as pd | |
context_data = pd.read_csv("./drugs_side_effects_drugs_com.csv") | |
import os | |
from langchain_groq import ChatGroq | |
llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=os.environ.get("GROQ_API_KEY")) | |
## Embedding model! | |
from langchain_huggingface import HuggingFaceEmbeddings | |
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") | |
# create vector store! | |
from langchain_chroma import Chroma | |
vectorstore = Chroma( | |
collection_name="medical_dataset_store", | |
embedding_function=embed_model, | |
) | |
# add data to vector nstore | |
vectorstore.add_texts(context_data) | |
retriever = vectorstore.as_retriever() | |
from langchain_core.prompts import PromptTemplate | |
template = ("""You are a medical expert. | |
Use the provided context to answer the question. | |
If you don't know the answer, say so. Explain your answer in detail. | |
Do not discuss the context in your response; just provide the answer directly. | |
Context: {context} | |
Question: {question} | |
Answer:""") | |
rag_prompt = PromptTemplate.from_template(template) | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_core.runnables import RunnablePassthrough | |
rag_chain = ( | |
{"context": retriever, "question": RunnablePassthrough()} | |
| rag_prompt | |
| llm | |
| StrOutputParser() | |
) | |
import gradio as gr | |
def rag_memory_stream(message, history): | |
# Define possible greeting messages and their responses | |
greetings = { | |
"hello": "Hello! How can I assist you today?", | |
"hi": "Hi there! How can I help you?", | |
"good morning": "Good morning! How can I assist you?", | |
"good afternoon": "Good afternoon! What can I help you with?", | |
"good evening": "Good evening! Do you have any questions for me?", | |
} | |
# Normalize the input message to lowercase for comparison | |
normalized_message = message.strip().lower() | |
# Check if the message is a greeting | |
if normalized_message in greetings: | |
yield greetings[normalized_message] | |
return # End early as the greeting is handled | |
# Default behavior for non-greeting messages | |
partial_text = "" | |
for new_text in rag_chain.stream(message): | |
partial_text += new_text | |
yield partial_text | |
examples = [ | |
"What is Aspirin", | |
"Can Doxycycline Treat Acnes", | |
] | |
description = "Hello! Welcome to MediGuide ChatBot,AI-powered assistant designed to facilitate healthcare providers to make informed decision-making by providing reliable information about various medical drugs, including their uses, side effects, contraindications and classification" | |
title = "MediGuide ChatBot" | |
demo = gr.ChatInterface(fn=rag_memory_stream, | |
type="messages", | |
title=title, | |
description=description, | |
fill_height=True, | |
examples=examples, | |
theme=gr.themes.Soft(), | |
) | |
if __name__ == "__main__": | |
demo.launch() | |