# Import libraries import os import gradio as gr from groq import Groq # Set up Groq API client (ensure GROQ_API_KEY is set in your environment or as a Hugging Face secret for deployment) client = Groq(api_key= apikey ) # Function to interact with the LLM using Groq's API def chatbot(user_input): try: # Send user input to the LLM and get a response chat_completion = client.chat.completions.create( messages=[ { "role": "user", "content": user_input, } ], model="llama3-8b-8192", # Replace with the desired model ) response = chat_completion.choices[0].message.content return response except Exception as e: # Handle errors gracefully return f"An error occurred: {str(e)}" # Create a Gradio interface for real-time interaction iface = gr.Interface( fn=chatbot, inputs=gr.Textbox(label="Your Message", placeholder="Type a message here..."), outputs=gr.Textbox(label="Response"), title="Real-Time Text-to-Text Chatbot - by ATIF MEHMOOD", description="You can Chat with the LLM in real-time using Groq's API.", ) # Launch the Gradio app iface.launch()