import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer # Load your fine-tuned model model_name = "SupermanRX/therapistAi" # Replace with your model's Hugging Face path tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def chatbot(input_text): # Generate a response inputs = tokenizer(input_text, return_tensors="pt") outputs = model.generate(inputs["input_ids"], max_length=200, pad_token_id=tokenizer.eos_token_id) response = tokenizer.decode(outputs[0], skip_special_tokens=True) return response # Use the default chatbot interface with gr.Blocks() as demo: gr.Chatbot().style(height=600).chat( chatbot, placeholder="Type your message here...", show_label=False ) demo.launch()