File size: 3,881 Bytes
7780f5c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 18 08:01:41 2023

@author: Shamim Ahamed, RE AIMS Lab
"""

import streamlit as st
import pandas as pd
from tqdm.cli import tqdm
import numpy as np
import requests
import pandas as pd
from tqdm import tqdm


def get_user_data(api, parameters):
    response = requests.post(f"{api}", json=parameters)
    if response.status_code == 200:
        return response.json()
    else:
        print(f"ERROR: {response.status_code}")
        return None



st.set_page_config(page_title="SuSastho.AI Chatbot", page_icon="🚀", layout='wide')

st.markdown("""
<style>
p {
    font-size:0.8rem !important;
}
textarea {
    font-size: 0.8rem !important;
    padding: 0.8rem 1rem 0.75rem 0.8rem !important;
}
button {
    padding: 0.65rem !important;
}

.css-1lr5yb2 {
    background-color: rgb(105 197 180) !important;
}


.css-1c7y2kd {
    background-color: Transparent !important;
}
.css-4oy321 {
    background-color: rgba(240, 242, 246, 0.5) !important;
}

</style>
""", unsafe_allow_html=True)

st.markdown("""
<style>
    #MainMenu {visibility: hidden;}
    footer {visibility: hidden;}
</style>
""",unsafe_allow_html=True)


model_names = {
    'BLOOM 7B': 'bloom-7b',
}



with st.sidebar:
    st.title("SuSastho.AI - ChatBot 🚀")
    model_name = model_names[st.selectbox('Model', list(model_names.keys()), 0)]
    
    ctx_checker_tmp = st.slider('Context Checker Sensitivity', min_value=0.001, max_value=1.0, value=0.008, step=0.001)
    lm_tmp = st.slider('Language Model Sensitivity', min_value=0.001, max_value=1.0, value=0.1, step=0.001)

    
      

endpoint = st.secrets["LLMEndpoint"]


def main():
    if model_name == 'None':
        st.markdown('##### Please select a model.')
        return
    
    # Initialize chat history
    if "messages" not in st.session_state:
        st.session_state.messages = [{"role": 'assistant', "content": 'হ্যালো! আমি একটি এআই অ্যাসিস্ট্যান্ট। কীভাবে সাহায্য করতে পারি? 😊'}]
    
    # Display chat messages from history on app rerun
    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.markdown(message["content"])


    # Accept user input
    if prompt := st.chat_input("এখানে মেসেজ লিখুন"):
        # Display user message in chat message container
        with st.chat_message("user"):
            st.markdown(prompt)
        # Add user message to chat history
        st.session_state.messages.append({"role": "user", "content": prompt})
        
        
        ## Get context
        params = {
            "chat_history": [
                {"content": prompt}
            ],
            "model": "bloom-7b",
            "mode": "specific",
            "config": {
                "ctx_checker_tmp": ctx_checker_tmp,
                "lm_tmp": lm_tmp,
            }
        }
        resp = get_user_data(endpoint, params)
        if resp == None:
            st.markdown('#### INTERNAL ERROR')
            return
        
        response = resp['data']['responses'][0]['content']
        context = resp['data']['logs']['content']['retrival_model']['matched_doc']
        
        clen = len(context)
        context = '\n\n===============================\n\n'.join(context)
        
        response = f'###### Config: Context Checker Value: {ctx_checker_tmp}, LM Value: {lm_tmp}\n\n##### Matched Context: {clen}\n{context}\n\n##### Response:\n{response}'
        
        # Display assistant response in chat message container
        with st.chat_message("assistant", avatar=None):
            st.markdown(response)
            
        # Add assistant response to chat history
        st.session_state.messages.append({"role": "assistant", "content": response})
    
    

main()