Sushrut98 commited on
Commit
72c7316
·
verified ·
1 Parent(s): 0205e8e

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -102
app.py DELETED
@@ -1,102 +0,0 @@
1
- ### 1. Imports and class names setup ###
2
- import gradio as gr
3
- import os
4
- import torch
5
- from transformers import BertTokenizer, BertModel, BertConfig
6
- from transformers import BertForSequenceClassification
7
- # from model import create_effnetb2_model
8
- from timeit import default_timer as timer
9
- # from typing import Tuple, Dict
10
-
11
- # Setup class names
12
- # class_names = ["pizza", "steak", "sushi"]
13
-
14
- ### 2. Model and transforms preparation ###
15
- tokenizer = BertTokenizer.from_pretrained('bert-base-uncased',
16
- do_lower_case=True)
17
- # Create BERT model
18
- model = BertForSequenceClassification.from_pretrained("bert-base-uncased",
19
- num_labels=2,
20
- output_attentions=False,
21
- output_hidden_states=False)
22
- model.load_state_dict(torch.load(f='finetuned_BERT_epoch_10.model', map_location=torch.device('cpu')))
23
- ### 3. Predict function ###
24
-
25
- # Create predict function
26
- def predict(text) :
27
- """Transforms and performs a prediction on Text.
28
- """
29
- # Start the timer
30
- start_time = timer()
31
- encoding = tokenizer.encode_plus(
32
- text,
33
- None,
34
- add_special_tokens=True,
35
- max_length=256,
36
- pad_to_max_length=True,
37
- return_token_type_ids=True,
38
- return_tensors='pt'
39
- )
40
-
41
- model.eval()
42
-
43
- loss_val_total = 0
44
- predictions = []
45
- # batch = tuple(prediction)
46
-
47
- inputs = {'input_ids': encoding["input_ids"],
48
- 'attention_mask': encoding["attention_mask"],
49
- }
50
-
51
- with torch.no_grad():
52
- outputs = model(**inputs)
53
-
54
- print(outputs)
55
- # loss = outputs[0]
56
- logits = outputs[0]
57
- # loss_val_total += loss.item()
58
-
59
- logits = logits.detach().cpu().numpy()
60
- # print(logits)
61
- # label_ids = inputs['labels'].cpu().numpy()
62
- predictions.append(logits)
63
- # true_vals.append(label_ids)
64
-
65
- # loss_val_avg = loss_val_total/len(dataloader_val)
66
-
67
- predictions = np.concatenate(predictions, axis=0)
68
-
69
- preds_flat = np.argmax(predictions, axis=1).flatten()
70
-
71
- if preds_flat==0:
72
- prediction = "positive"
73
- else:
74
- prediction = "negative"
75
-
76
- # Calculate the prediction time
77
- pred_time = round(timer() - start_time, 5)
78
-
79
- # Return the prediction dictionary and prediction time
80
- return prediction, pred_time
81
-
82
- ### 4. Gradio app ###
83
-
84
- # Create title, description and article strings
85
- title = "Sentiment Analysis"
86
- description = "An EfficientNetB2 feature extractor computer vision model to classify images of food as pizza, steak or sushi."
87
-
88
- # Create examples list from "examples/" directory
89
- # example_list = [["examples/" + example] for example in os.listdir("examples")]
90
-
91
- # Create the Gradio demo
92
- demo = gr.Interface(fn=predict, # mapping function from input to output
93
- inputs=["text", "checkbox"],
94
- outputs=["text",
95
- gr.Number(label="Prediction time (s)")],
96
- # Create examples list from "examples/" directory
97
- # examples=example_list,
98
- title=title,
99
- description=description)
100
-
101
- # Launch the demo!
102
- demo.launch()