|
import requests |
|
import streamlit as st |
|
import time |
|
|
|
st.title("Omdena Chatbot Interface") |
|
|
|
|
|
url = 'https://omdena-lc-omdena-ng-lagos-chatbot-model.hf.space' |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if user_input := st.chat_input("What is up?"): |
|
|
|
st.session_state.messages.append({"role": "user", "content": user_input}) |
|
|
|
with st.chat_message("user"): |
|
st.markdown(user_input) |
|
|
|
|
|
payload = {"sender": "user", "message": user_input} |
|
response = requests.post(url+'/webhooks/rest/webhook', json=payload) |
|
bot_reply = response.json() |
|
|
|
|
|
if bot_reply !=[]: |
|
assistant_response = bot_reply[0]["text"] |
|
else: |
|
assistant_response = 'API request returned with an empty list []. Please continue with a different question' |
|
|
|
|
|
with st.chat_message("assistant"): |
|
message_placeholder = st.empty() |
|
full_response = "" |
|
|
|
for chunk in assistant_response.split(): |
|
full_response += chunk + " " |
|
time.sleep(0.05) |
|
|
|
message_placeholder.markdown(full_response + "β") |
|
message_placeholder.markdown(full_response) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": full_response}) |
|
|
|
|
|
|
|
webhook_url = "https://script.google.com/macros/s/AKfycbzhikyq7IduuEPGmrvcmJV9YlziiVyBysQ_oYf7lOzF8w9zg--BI2S_5cLuftp0pKqy/exec" |
|
action = "?action=addData" |
|
|
|
data = { |
|
"user": user_input, |
|
"bot": assistant_response |
|
} |
|
try: |
|
|
|
response = requests.post(webhook_url + action, json=data) |
|
except: |
|
pass |
|
|
|
|
|
|
|
with st.expander("Debug"): |
|
if st.button("Show Debug Info"): |
|
request_ids = ['/status', '/version'] |
|
results = [requests.get(url+request_id).json() for request_id in request_ids] |
|
st.write(results) |
|
else: |
|
st.write("") |
|
|
|
|
|
|