#gr.Interface.load("models/hipnologo/gpt2-imdb-finetune").launch() import gradio as gr from gradio import inputs, outputs from transformers import AutoTokenizer, AutoModelForSequenceClassification def predict_review(text): # Specify the model name or path model_name = "hipnologo/gpt2-imdb-finetune" # Replace with your model name on the Hugging Face model hub # Load your model and tokenizer tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSequenceClassification.from_pretrained(model_name) # encoding the input text input_ids = tokenizer.encode(text, return_tensors="pt") # getting the logits output = model(input_ids) logits = output.logits # getting the predicted class predicted_class = logits.argmax(-1).item() sentiment = 'Positive' if predicted_class == 1 else 'Negative' # Create a Markdown string for the output result_md = f"Sentiment: {sentiment}" return result_md iface = gr.Interface( fn=predict_review, inputs=inputs.Textbox(lines=7, placeholder="Enter text here..."), outputs=outputs.Text(), title="Sentiment Analysis", description="This application predicts the sentiment (Positive/Negative) of the input text using a fine-tuned GPT-2 model.", theme="compact" # change this to the theme you prefer: 'huggingface', 'default' ) iface.launch()