import streamlit as st import os from groq import Groq from dotenv import load_dotenv # Load environment variables from .env file load_dotenv() # Fetch API key from environment api_key = os.getenv("GROQ_API_KEY") # App Title st.title("Simple AI Agent with LLaMA 3.1") # App Description st.markdown(""" This AI agent uses the LLaMA 3.1 model and Groq API to answer your queries. Please provide a question below to interact with the model. """) # Warn if API key is missing if not api_key: st.error("API key not found. Please ensure you have added a `.env` file with `GROQ_API_KEY`.") # Input box for user query user_input = st.text_input("Enter your question:", placeholder="Ask something...") # Button to fetch response if st.button("Get Response"): if user_input.strip(): try: # Initialize Groq client client = Groq(api_key=api_key) # Query the AI model chat_completion = client.chat.completions.create( messages=[{"role": "user", "content": user_input}], model="llama3-8b-8192", ) # Display the response st.success("AI Response:") st.write(chat_completion.choices[0].message.content) except Exception as e: st.error(f"Error: {e}") else: st.warning("Please enter a valid question!") # Footer st.markdown("---") st.markdown("Powered by **LLaMA 3.1** and **Groq API**.")