Spaces:
Sleeping
Sleeping
import streamlit as st | |
from app_config import SYSTEM_PROMPT | |
from langchain_groq import ChatGroq | |
from dotenv import load_dotenv | |
from pathlib import Path | |
import os | |
import session_manager | |
from langchain_community.utilities import GoogleSerperAPIWrapper | |
env_path = Path('.') / '.env' | |
load_dotenv(dotenv_path=env_path) | |
st.markdown( | |
""" | |
<style> | |
.st-emotion-cache-janbn0 { | |
flex-direction: row-reverse; | |
text-align: right; | |
} | |
.st-emotion-cache-1ec2a3d{ | |
display: none; | |
} | |
</style> | |
""", | |
unsafe_allow_html=True, | |
) | |
# Intialize chat history | |
print("SYSTEM MESSAGE") | |
if "messages" not in st.session_state: | |
st.session_state.messages = [{"role": "system", "content": SYSTEM_PROMPT}] | |
print("SYSTEM MODEL") | |
if "llm" not in st.session_state: | |
st.session_state.llm = ChatGroq( | |
model="llama-3.3-70b-versatile", | |
temperature=0, | |
max_tokens=None, | |
timeout=None, | |
max_retries=2, | |
api_key=str(os.getenv('GROQ_API')) | |
) | |
if "search_tool" not in st.session_state: | |
st.session_state.search_tool = GoogleSerperAPIWrapper( | |
serper_api_key=str(os.getenv('SERPER_API'))) | |
def get_answer(query): | |
new_search_query = st.session_state.llm.invoke( | |
f"Convert below query to english for Ahmedabad Municipal Corporation (AMC) You just need to give translated query. Don't add any additional details.\n Query: {query}").content | |
search_result = st.session_state.search_tool.run( | |
f"{new_search_query} site:https://ahmedabadcity.gov.in/") | |
system_prompt = """You are a helpful assistance for The Ahmedabad Municipal Corporation (AMC). which asnwer user query from given context only. Output language should be as same as `original_query_from_user`. | |
context: {context} | |
original_query_from_user: {original_query} | |
query: {query}""" | |
return st.session_state.llm.invoke(system_prompt.format(context=search_result, query=new_search_query, original_query=query)).content | |
session_manager.set_session_state(st.session_state) | |
print("container") | |
# Display chat messages from history | |
st.markdown("<h1 style='text-align: center;'>AMC Bot</h1>", unsafe_allow_html=True) | |
container = st.container(height=700) | |
for message in st.session_state.messages: | |
if message["role"] != "system": | |
with container.chat_message(message["role"]): | |
if message['type'] == "table": | |
st.dataframe(message['content'].set_index( | |
message['content'].columns[0])) | |
elif message['type'] == "html": | |
st.markdown(message['content'], unsafe_allow_html=True) | |
else: | |
st.write(message["content"]) | |
# When user gives input | |
if prompt := st.chat_input("Enter your query here... "): | |
with container.chat_message("user"): | |
st.write(prompt) | |
st.session_state.messages.append( | |
{"role": "user", "content": prompt, "type": "string"}) | |
st.session_state.last_query = prompt | |
with container.chat_message("assistant"): | |
current_conversation = """""" | |
# if st.session_state.next_agent != "general_agent" and st.session_state.next_agent in st.session_state.agent_history: | |
for message in st.session_state.messages: | |
if message['role'] == 'user': | |
current_conversation += f"""user: {message['content']}\n""" | |
if message['role'] == 'assistant': | |
current_conversation += f"""ai: {message['content']}\n""" | |
current_conversation += f"""user: {prompt}\n""" | |
print("****************************************** Messages ******************************************") | |
print("messages", current_conversation) | |
print() | |
print() | |
response = get_answer(current_conversation) | |
print("******************************************************** Response ********************************************************") | |
print("MY RESPONSE IS:", response) | |
st.write(response) | |
st.session_state.messages.append( | |
{"role": "assistant", "content": response, "type": "string"}) | |