File size: 3,643 Bytes
6daf863 9b38b1c 6daf863 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
import time
import os
import streamlit as st
from together import Together
# Set the Streamlit page configuration and theme
st.set_page_config(page_title="AURA", layout="centered")
# Display the logo image
col1, col2, col3 = st.columns([1, 30, 1])
with col2:
st.image("img/AURA.png", use_column_width=True) # Adjusted the path to use the uploaded image
st.write("""
π **Greetings!** π I am **AURA**: your **Artificial Understanding and Responsive Assistant**.
Whether you're navigating stormy seas or dancing under starlit skies, I'm here to lend an empathetic ear,
offer thoughtful guidance, and illuminate your digital journey. Let's explore life's pathways together! π€β¨
""")
def hide_hamburger_menu():
st.markdown("""
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
""", unsafe_allow_html=True)
hide_hamburger_menu()
# Initialize Together client with API key
API_KEY = os.getenv('together_api')
client = Together(api_key=API_KEY)
def generate_response(messages):
try:
response = client.chat.completions.create(
model="meta-llama/Llama-3-70b-chat-hf",
messages=messages
)
return response.choices[0].message.content
except Exception as e:
print(f"Error generating response: {e}")
return None
# Initialize session state for messages
if "messages" not in st.session_state:
st.session_state.messages = [
{"role": "system",
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."},
]
def reset_conversation():
st.session_state.messages = [
{"role": "system",
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."},
]
# Display chat history
for message in st.session_state.messages:
if message["role"] != "system": # Skip system messages
with st.chat_message(message["role"]):
st.write(message["content"])
# User input
input_prompt = st.chat_input("Say something...")
if input_prompt:
with st.chat_message("user"):
st.markdown(f"**You:** {input_prompt}")
st.session_state.messages.append({"role": "user", "content": input_prompt})
with st.chat_message("assistant"):
with st.spinner("Thinking π‘..."):
# Include system message in the messages sent to the API but not in the displayed chat
messages_for_api = [msg for msg in st.session_state.messages if msg["role"] != "system"]
messages_for_api.insert(0, {"role": "system",
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."})
response = generate_response(messages_for_api)
message_placeholder = st.empty()
answer = response or "Sorry, I couldn't generate a response."
# Initialize the response message
full_response = ""
for chunk in answer:
# Simulate typing by appending chunks of the response over time
full_response += chunk
time.sleep(0.02) # Adjust the sleep time to control the "typing" speed
message_placeholder.markdown(full_response + " |", unsafe_allow_html=True)
st.session_state.messages.append({"role": "assistant", "content": answer})
if st.button('ποΈ Reset All Chat', on_click=reset_conversation):
st.experimental_rerun()
|