File size: 4,089 Bytes
b842e14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import streamlit as st
from app_config import SYSTEM_PROMPT
from langchain_groq import ChatGroq
from dotenv import load_dotenv
from pathlib import Path
import os
import session_manager

from langchain_community.utilities import GoogleSerperAPIWrapper
env_path = Path('.') / '.env'
load_dotenv(dotenv_path=env_path)


st.markdown(
    """
<style>
    .st-emotion-cache-janbn0 {
        flex-direction: row-reverse;
        text-align: right;
    }
    .st-emotion-cache-1ec2a3d{
        display: none;
    }
</style>
""",
    unsafe_allow_html=True,
)

# Intialize chat history
print("SYSTEM MESSAGE")
if "messages" not in st.session_state:
    st.session_state.messages = [{"role": "system", "content": SYSTEM_PROMPT}]

print("SYSTEM MODEL")
if "llm" not in st.session_state:
    st.session_state.llm = ChatGroq(
        model="llama-3.3-70b-versatile",
        temperature=0,
        max_tokens=None,
        timeout=None,
        max_retries=2,
        api_key=str(os.getenv('GROQ_API'))
    )

if "search_tool" not in st.session_state:
    st.session_state.search_tool = GoogleSerperAPIWrapper(
        serper_api_key=str(os.getenv('SERPER_API')))


def get_answer(query):
    new_search_query = st.session_state.llm.invoke(
        f"Convert below query to english for Ahmedabad Municipal Corporation (AMC) You just need to give translated query. Don't add any additional details.\n Query: {query}").content
    search_result = st.session_state.search_tool.run(
        f"{new_search_query} site:https://ahmedabadcity.gov.in/")

    system_prompt = """You are a helpful assistance for The Ahmedabad Municipal Corporation (AMC). which asnwer user query from given context only. Output language should be as same as `original_query_from_user`.
  context: {context}
  original_query_from_user: {original_query}
  query: {query}"""

    return st.session_state.llm.invoke(system_prompt.format(context=search_result, query=new_search_query, original_query=query)).content


session_manager.set_session_state(st.session_state)

print("container")
# Display chat messages from history
st.markdown("<h1 style='text-align: center;'>AMC Bot</h1>", unsafe_allow_html=True)
container = st.container(height=700)
for message in st.session_state.messages:
    if message["role"] != "system":
        with container.chat_message(message["role"]):
            if message['type'] == "table":
                st.dataframe(message['content'].set_index(
                    message['content'].columns[0]))
            elif message['type'] == "html":
                st.markdown(message['content'], unsafe_allow_html=True)
            else:
                st.write(message["content"])

# When user gives input
if prompt := st.chat_input("Enter your query here... "):
    with container.chat_message("user"):
        st.write(prompt)
    st.session_state.messages.append(
        {"role": "user", "content": prompt, "type": "string"})
    st.session_state.last_query = prompt

    with container.chat_message("assistant"):
        current_conversation = """"""

        # if st.session_state.next_agent != "general_agent" and st.session_state.next_agent in st.session_state.agent_history:
        for message in st.session_state.messages:
            if message['role'] == 'user':
                current_conversation += f"""user: {message['content']}\n"""
            if message['role'] == 'assistant':
                current_conversation += f"""ai: {message['content']}\n"""

        current_conversation += f"""user: {prompt}\n"""

        print("****************************************** Messages ******************************************")
        print("messages", current_conversation)
        print()
        print()
        response = get_answer(current_conversation)
        print("******************************************************** Response ********************************************************")
        print("MY RESPONSE IS:", response)

        st.write(response)
        st.session_state.messages.append(
            {"role": "assistant", "content": response, "type": "string"})