Waseem7711 commited on
Commit
39a4d0f
·
verified ·
1 Parent(s): 1a4127d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -17
app.py CHANGED
@@ -6,33 +6,46 @@ from dotenv import load_dotenv
6
  # Load environment variables from .env file
7
  load_dotenv()
8
 
9
- # Title of the app
 
 
 
10
  st.title("Simple AI Agent with LLaMA 3.1")
11
 
12
- # Description
13
- st.markdown("This is an AI agent powered by the LLaMA 3.1 model and Groq API.")
 
 
 
14
 
15
- # Input for user queries
16
- user_input = st.text_input("Ask something:")
 
17
 
18
- # Display response area
19
- if st.button("Get Response"):
20
- # Fetch API key from .env file
21
- api_key = os.getenv("GROQ_API_KEY")
22
-
23
- if api_key and user_input:
24
- # Set up Groq client
25
- client = Groq(api_key=api_key)
26
 
 
 
 
27
  try:
28
- # Send query to LLaMA model
 
 
 
29
  chat_completion = client.chat.completions.create(
30
  messages=[{"role": "user", "content": user_input}],
31
  model="llama3-8b-8192",
32
  )
33
- # Display response
34
- st.success(chat_completion.choices[0].message.content)
 
 
35
  except Exception as e:
36
  st.error(f"Error: {e}")
37
  else:
38
- st.warning("Please ensure the API key is set in the .env file and ask a valid question.")
 
 
 
 
 
6
  # Load environment variables from .env file
7
  load_dotenv()
8
 
9
+ # Fetch API key from environment
10
+ api_key = os.getenv("GROQ_API_KEY")
11
+
12
+ # App Title
13
  st.title("Simple AI Agent with LLaMA 3.1")
14
 
15
+ # App Description
16
+ st.markdown("""
17
+ This AI agent uses the LLaMA 3.1 model and Groq API to answer your queries.
18
+ Please provide a question below to interact with the model.
19
+ """)
20
 
21
+ # Warn if API key is missing
22
+ if not api_key:
23
+ st.error("API key not found. Please ensure you have added a `.env` file with `GROQ_API_KEY`.")
24
 
25
+ # Input box for user query
26
+ user_input = st.text_input("Enter your question:", placeholder="Ask something...")
 
 
 
 
 
 
27
 
28
+ # Button to fetch response
29
+ if st.button("Get Response"):
30
+ if user_input.strip():
31
  try:
32
+ # Initialize Groq client
33
+ client = Groq(api_key=api_key)
34
+
35
+ # Query the AI model
36
  chat_completion = client.chat.completions.create(
37
  messages=[{"role": "user", "content": user_input}],
38
  model="llama3-8b-8192",
39
  )
40
+
41
+ # Display the response
42
+ st.success("AI Response:")
43
+ st.write(chat_completion.choices[0].message.content)
44
  except Exception as e:
45
  st.error(f"Error: {e}")
46
  else:
47
+ st.warning("Please enter a valid question!")
48
+
49
+ # Footer
50
+ st.markdown("---")
51
+ st.markdown("Powered by **LLaMA 3.1** and **Groq API**.")