Sushrut98's picture
Update app.py
ec53ef5 verified
### 1. Imports and class names setup ###
import gradio as gr
import os
from transformers import BertForSequenceClassification
import torch
import numpy as np
from transformers import BertTokenizer, BertModel, BertConfig
# from model import create_effnetb2_model
from timeit import default_timer as timer
# from typing import Tuple, Dict
examples = [
["Basically there's a family where a little boy (Jake) thinks there's a zombie in his closet & his parents are fighting all the time.<br /><br />This movie is slower than a soap opera... and suddenly, Jake decides to become Rambo and kill the zombie.<br /><br />OK, first of all when you're going to make a film you must Decide if its a thriller or a drama! As a drama the movie is watchable. Parents are divorcing & arguing like in real life. And then we have Jake with his closet which totally ruins all the film! I expected to see a BOOGEYMAN similar movie, and instead i watched a drama with some meaningless thriller spots.<br /><br />3 out of 10 just for the well playing parents & descent dialogs. As for the shots with Jake: just ignore them."],
["One of the other reviewers has mentioned that after watching just 1 Oz episode you'll be hooked. They are right, as this is exactly what happened with me.<br /><br />The first thing that struck me about Oz was its brutality and unflinching scenes of violence, which set in right from the word GO. Trust me, this is not a show for the faint hearted or timid. This show pulls no punches with regards to drugs, sex or violence. Its is hardcore, in the classic use of the word.<br /><br />It is called OZ as that is the nickname given to the Oswald Maximum Security State Penitentary. It focuses mainly on Emerald City, an experimental section of the prison where all the cells have glass fronts and face inwards, so privacy is not high on the agenda. Em City is home to many..Aryans, Muslims, gangstas, Latinos, Christians, Italians, Irish and more....so scuffles, death stares, dodgy dealings and shady agreements are never far away.<br /><br />I would say the main appeal of the show is due to the fact that it goes where other shows wouldn't dare. Forget pretty pictures painted for mainstream audiences, forget charm, forget romance...OZ doesn't mess around. The first episode I ever saw struck me as so nasty it was surreal, I couldn't say I was ready for it, but as I watched more, I developed a taste for Oz, and got accustomed to the high levels of graphic violence. Not just violence, but injustice (crooked guards who'll be sold out for a nickel, inmates who'll kill on order and get away with it, well mannered, middle class inmates being turned into prison bitches due to their lack of street skills or prison experience) Watching Oz, you may become comfortable with what is uncomfortable viewing....thats if you can get in touch with your darker side."]
]
# Setup class names
# class_names = ["pizza", "steak", "sushi"]
### 2. Model and transforms preparation ###
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased',
do_lower_case=True)
# Create BERT model
model = BertForSequenceClassification.from_pretrained("bert-base-uncased",
num_labels=2,
output_attentions=False,
output_hidden_states=False)
model.load_state_dict(torch.load(f='finetuned_BERT_epoch_10.model', map_location=torch.device('cpu')))
### 3. Predict function ###
# Create predict function
def predict(text) :
"""Transforms and performs a prediction on Text.
"""
# Start the timer
start_time = timer()
encoding = tokenizer.encode_plus(
text,
None,
add_special_tokens=True,
max_length=256,
pad_to_max_length=True,
return_token_type_ids=True,
return_tensors='pt'
)
model.eval()
loss_val_total = 0
predictions = []
# batch = tuple(prediction)
inputs = {'input_ids': encoding["input_ids"],
'attention_mask': encoding["attention_mask"],
}
with torch.no_grad():
outputs = model(**inputs)
print(outputs)
# loss = outputs[0]
logits = outputs[0]
# loss_val_total += loss.item()
logits = logits.detach().cpu().numpy()
# print(logits)
# label_ids = inputs['labels'].cpu().numpy()
predictions.append(logits)
# true_vals.append(label_ids)
# loss_val_avg = loss_val_total/len(dataloader_val)
predictions = np.concatenate(predictions, axis=0)
preds_flat = np.argmax(predictions, axis=1).flatten()
if preds_flat==0:
prediction = "positive"
else:
prediction = "negative"
# Calculate the prediction time
pred_time = round(timer() - start_time, 5)
# Return the prediction dictionary and prediction time
return prediction, pred_time
### 4. Gradio app ###
# Create title, description and article strings
title = "Sentiment Analysis"
description = "Using Bert to predict the movie review. Either positive or negative."
# Create examples list from "examples/" directory
# example_list = [["examples/" + example] for example in os.listdir("examples")]
# Create the Gradio demo
demo = gr.Interface(fn=predict, # mapping function from input to output
inputs=gr.Textbox(lines=5, max_lines=6, label="Input Text"),
outputs=["text",
gr.Number(label="Prediction time (s)")],
# Create examples list from "examples/" directory
examples=examples,
title=title,
description=description)
# Launch the demo!
demo.launch()