File size: 2,787 Bytes
dd48f96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32927db
 
dd48f96
 
 
 
 
 
32927db
0cee625
dd48f96
 
 
 
 
 
 
 
 
 
41e62b3
dd48f96
 
 
b8fb5d0
 
dd48f96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76

import streamlit as st
import os
import urllib.request
import json
import ssl
from menu import menu


menu()

# Retrieve Answer through API -----------------------------------------------
def chat_api(history, question):
    data = {'chat_history': history,"question": question}
    body = str.encode(json.dumps(data))
    url = st.secrets["url"]
    api_key = st.secrets["ms_api_key"]
    print(url)
    print(api_key)
    # Replace this with the primary/secondary key, AMLToken, or Microsoft Entra ID token for the endpoint
    if not api_key:
        raise Exception("A key should be provided to invoke the endpoint")


    headers = {'Content-Type':'application/json', 'Accept': 'application/json', 'Authorization':('Bearer '+ api_key)}
    print(headers)
    print(body)
    req = urllib.request.Request(url, body, headers)
    response = urllib.request.urlopen(req)
    result = response.read()
    answer = json.loads(result)["answer"]

    return(answer)


# Header UI ------------------------------------------------------------------
# st.image('./Cognizant_Logo.jpg', width=300)
st.header("RCM Copilot", divider = 'blue')

# Chat UI --------------------------------------------------------------------
st.write("This chat as access to search the web to answer your quesitons.")
st.write("Model: gpt-4o-mini")
st.write("Version: V0.1")
st.header("", divider = 'blue')

# Initialize chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Display chat messages from history on app rerun
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Accept user input
if prompt := st.chat_input("What is your question?"):
    # Add user message to chat history
    st.session_state.messages.append({"role": "user", "content": prompt})
    # Display user message in chat message container
    with st.chat_message("user"):
        st.markdown(prompt)

    # Display assistant response in chat message container
    with st.chat_message("assistant"):
        message_placeholder = st.empty()
        full_response = ""
        with st.status("Getting the answer...") as status:
            #assistant_response = get_completion(prompt, ft_model)
            chat_history = st.session_state.messages
            print(chat_history)
            assistant_response = chat_api(history = chat_history, question = prompt)
            full_response = assistant_response
            status.update(label="Done.", state = "complete")
        message_placeholder.markdown(full_response)
    # Add assistant response to chat history
    st.session_state.messages.append({"role": "assistant", "content": full_response})