File size: 1,358 Bytes
9ee3f6b
 
 
 
 
71ce63b
 
 
 
9ee3f6b
 
ab34178
9ee3f6b
90cd621
9190d00
90cd621
9190d00
90cd621
 
 
 
 
9190d00
71ce63b
9ee3f6b
ab34178
71ce63b
9ee3f6b
71ce63b
9ee3f6b
9dea61e
ab34178
9ee3f6b
71ce63b
ebc22e6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import os
import gradio as gr
from groq import Groq

# Set up Groq API client (ensure GROQ_API_KEY is set in your environment or as a Hugging Face secret for deployment)
apikey = os.getenv("apikey")
print(f"API Key: {apikey}")  # Debugging line, remove it before pushing to Hugging Face

client = Groq(api_key=apikey)

# Function to interact with the LLM using Groq's API
def chatbot(messages):
    try:
        # Ensure the messages list is not empty
        if not messages:
            messages = [("System", "Hello! How can I assist you today?")]

        user_input = messages[-1][0]  # Last user input message

        if not user_input.strip():  # Check for empty input
            messages.append(("System", "It seems like you may have accidentally sent an empty message. Please rephrase."))
            return messages

        # Sending request to Groq API
        chat_completion = client.chat.completions.create(
            messages=[{"role": "user", "content": user_input}],
            model="llama3-8b-8192",  # Replace with the correct model name
        )

        response = chat_completion.choices[0].message.content
        messages.append((user_input, response))  # Append user input and bot response as a tuple
        return messages
    except Exception as e:
        # Capture the specific error message for debugging
        print