Spaces:
Sleeping
Sleeping
from langchain_community.chat_message_histories import StreamlitChatMessageHistory | |
import streamlit as st | |
from langchain.prompts import ( | |
ChatPromptTemplate, | |
HumanMessagePromptTemplate, | |
MessagesPlaceholder, | |
) | |
from more_itertools import chunked | |
from langserve import RemoteRunnable | |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM | |
import os | |
from langchain import PromptTemplate | |
from langchain import LLMChain | |
from langchain_together import Together | |
import re | |
import pdfplumber | |
# Set the API key with double quotes | |
os.environ['TOGETHER_API_KEY'] = "5653bbfbaf1f7c1438206f18e5dfc2f5992b8f0b6aa9796b0131ea454648ccde" | |
text = "" | |
max_pages = 16 | |
with pdfplumber.open("/content/AI Engineer Test.pdf") as pdf: | |
for i, page in enumerate(pdf.pages): | |
if i >= max_pages: | |
break | |
text += page.extract_text() + "\n" | |
def Bot(Questions): | |
chat_template = """ | |
Based on the provided context: {text} | |
Please answer the following question: {Questions} | |
Only provide answers that are directly related to the context. If the question is unrelated, respond with "I don't know". | |
""" | |
prompt = PromptTemplate( | |
input_variables=['text', 'Questions'], | |
template=chat_template | |
) | |
llama3 = Together(model="meta-llama/Llama-3-70b-chat-hf", max_tokens=250) | |
Generated_chat = LLMChain(llm=llama3, prompt=prompt) | |
try: | |
response = Generated_chat.invoke({ | |
"text": text, | |
"Questions": Questions | |
}) | |
response_text = response['text'] | |
response_text = response_text.replace("assistant", "") | |
# Post-processing to handle repeated words and ensure completeness | |
words = response_text.split() | |
seen = set() | |
filtered_words = [word for word in words if word.lower() not in seen and not seen.add(word.lower())] | |
response_text = ' '.join(filtered_words) | |
response_text = response_text.strip() # Ensuring no extra spaces at the ends | |
if not response_text.endswith('.'): | |
response_text += '.' | |
return response_text | |
except Exception as e: | |
return f"Error in generating response: {e}" | |
def ChatBot(Questions): | |
greetings = ["hi", "hello", "hey", "greetings", "what's up", "howdy"] | |
# Check if the input question is a greeting | |
question_lower = Questions.lower().strip() | |
if question_lower in greetings or any(question_lower.startswith(greeting) for greeting in greetings): | |
return "Hello! How can I assist you with the document today?" | |
else: | |
response=Bot(Questions) | |
return response.translate(str.maketrans('', '', '\n')) | |
# --- Logo --- | |
st.set_page_config( | |
page_title="AI Engineer Test Chatbot", | |
page_icon="/content/Insight Therapy Solutions.png", | |
layout="wide", | |
) | |
st.sidebar.image("/content/Insight Therapy Solutions.png", width=200) | |
st.sidebar.title("Navigation") | |
st.sidebar.write("Reclaim Your Mental Health") | |
st.sidebar.markdown("[Visit us at](https://www.insighttherapysolutions.com/)") | |
rag_chain = RemoteRunnable("http://69.61.24.171:8000/rag_chain/") | |
msgs = StreamlitChatMessageHistory(key="langchain_messages") | |
# --- Main Content --- | |
st.markdown("## π Chatbot For AI Engineer test:") | |
if len(msgs.messages) == 0: | |
msgs.add_ai_message("Hi! How can I assist you today?") | |
for msg in msgs.messages: | |
st.chat_message(msg.type).write(msg.content) | |
if prompt := st.chat_input(): | |
st.chat_message("human").write(prompt) | |
with st.chat_message("assistant"): | |
message_placeholder = st.empty() | |
full_response = "" | |
try: | |
_chat_history = st.session_state.langchain_messages[1:40] | |
_chat_history_tranform = list( | |
chunked([msg.content for msg in _chat_history], n=2) | |
) | |
response = rag_chain.stream( | |
{"question": prompt, "chat_history": _chat_history_tranform} | |
) | |
for res in response: | |
full_response += res or "" | |
message_placeholder.markdown(full_response + "|") | |
message_placeholder.markdown(full_response) | |
msgs.add_user_message(prompt) | |
msgs.add_ai_message(full_response) | |
except Exception as e: | |
st.error(f"An error occured. {e}") |