import gradio as gr from langchain.chat_models import ChatOpenAI # Ensure you have the necessary imports and installations # If you're using Groq AI, make sure to import it correctly # from groq import Groq # Uncomment if using Groq # Initialize your model here (ensure you have the correct model setup) # For example, if using Groq: # client = Groq(api_key="YOUR_API_KEY") with gr.Blocks() as iface: gr.Markdown("# Mr AI") gr.Markdown("Hi there! I'm your friendly assistant. Ask me anything, and I'll do my best to help! (By Thirumoorthi.A)") chat_history_output = gr.Chatbot(label="Chat History", height=400) with gr.Row(): message_input = gr.Textbox(label="Your Question", placeholder="Type your question here...", lines=2) submit_btn = gr.Button("Submit") clear_btn = gr.Button("Clear Chat") def respond(message, chat_history): chat_history.append(("USER", message)) # Replace this line with your model's response logic response = "This is a placeholder response." # Replace with actual model call # If using Groq, it might look like: # response = client.chat.completions.create(messages=[{"role": "user", "content": message}]) chat_history.append(("Assistant", response)) return "", chat_history def clear_chat(): return [] submit_btn.click(respond, inputs=[message_input, chat_history_output], outputs=[message_input, chat_history_output]) clear_btn.click(clear_chat, outputs=chat_history_output) iface.launch()