|
import time |
|
import os |
|
import streamlit as st |
|
from together import Together |
|
|
|
|
|
st.set_page_config(page_title="AURA", layout="centered") |
|
|
|
|
|
col1, col2, col3 = st.columns([1, 30, 1]) |
|
with col2: |
|
st.image("img/AURA.png", use_column_width=True) |
|
|
|
st.write(""" |
|
π **Greetings!** π I am **AURA**: your **Artificial Understanding and Responsive Assistant**. |
|
Whether you're navigating stormy seas or dancing under starlit skies, I'm here to lend an empathetic ear, |
|
offer thoughtful guidance, and illuminate your digital journey. Let's explore life's pathways together! π€β¨ |
|
""") |
|
|
|
|
|
def hide_hamburger_menu(): |
|
st.markdown(""" |
|
<style> |
|
#MainMenu {visibility: hidden;} |
|
footer {visibility: hidden;} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
|
|
hide_hamburger_menu() |
|
|
|
|
|
API_KEY = os.getenv('together_api') |
|
client = Together(api_key=API_KEY) |
|
|
|
|
|
def generate_response(messages): |
|
try: |
|
response = client.chat.completions.create( |
|
model="meta-llama/Llama-3-70b-chat-hf", |
|
messages=messages |
|
) |
|
return response.choices[0].message.content |
|
except Exception as e: |
|
print(f"Error generating response: {e}") |
|
return None |
|
|
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [ |
|
{"role": "system", |
|
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."}, |
|
] |
|
|
|
|
|
def reset_conversation(): |
|
st.session_state.messages = [ |
|
{"role": "system", |
|
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."}, |
|
] |
|
|
|
|
|
|
|
for message in st.session_state.messages: |
|
if message["role"] != "system": |
|
with st.chat_message(message["role"]): |
|
st.write(message["content"]) |
|
|
|
|
|
input_prompt = st.chat_input("Say something...") |
|
if input_prompt: |
|
with st.chat_message("user"): |
|
st.markdown(f"**You:** {input_prompt}") |
|
|
|
st.session_state.messages.append({"role": "user", "content": input_prompt}) |
|
with st.chat_message("assistant"): |
|
with st.spinner("Thinking π‘..."): |
|
|
|
messages_for_api = [msg for msg in st.session_state.messages if msg["role"] != "system"] |
|
messages_for_api.insert(0, {"role": "system", |
|
"content": "You are an empathetic companion named AURA. Provide concise responses and use emojis to enhance the conversation."}) |
|
|
|
response = generate_response(messages_for_api) |
|
message_placeholder = st.empty() |
|
answer = response or "Sorry, I couldn't generate a response." |
|
|
|
|
|
full_response = "" |
|
for chunk in answer: |
|
|
|
full_response += chunk |
|
time.sleep(0.02) |
|
message_placeholder.markdown(full_response + " |", unsafe_allow_html=True) |
|
|
|
st.session_state.messages.append({"role": "assistant", "content": answer}) |
|
|
|
if st.button('ποΈ Reset All Chat', on_click=reset_conversation): |
|
st.experimental_rerun() |
|
|