Spaces:
Sleeping
Sleeping
File size: 1,480 Bytes
5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f 5ce92d1 39a4d0f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import streamlit as st
import os
from groq import Groq
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
# Fetch API key from environment
api_key = os.getenv("GROQ_API_KEY")
# App Title
st.title("Simple AI Agent with LLaMA 3.1")
# App Description
st.markdown("""
This AI agent uses the LLaMA 3.1 model and Groq API to answer your queries.
Please provide a question below to interact with the model.
""")
# Warn if API key is missing
if not api_key:
st.error("API key not found. Please ensure you have added a `.env` file with `GROQ_API_KEY`.")
# Input box for user query
user_input = st.text_input("Enter your question:", placeholder="Ask something...")
# Button to fetch response
if st.button("Get Response"):
if user_input.strip():
try:
# Initialize Groq client
client = Groq(api_key=api_key)
# Query the AI model
chat_completion = client.chat.completions.create(
messages=[{"role": "user", "content": user_input}],
model="llama3-8b-8192",
)
# Display the response
st.success("AI Response:")
st.write(chat_completion.choices[0].message.content)
except Exception as e:
st.error(f"Error: {e}")
else:
st.warning("Please enter a valid question!")
# Footer
st.markdown("---")
st.markdown("Powered by **LLaMA 3.1** and **Groq API**.")
|