niknikita commited on
Commit
a331b68
·
1 Parent(s): af9102e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -2
app.py CHANGED
@@ -33,8 +33,27 @@ def get_top95(y_predict, convert_target):
33
  break
34
  return lst_labels
35
  #
36
- # model = MyModel()
37
- model = torch.load("model.pt", map_location='cpu')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
  # print(model)
40
  # model = DistilBertForSequenceClassification.from_pretrained("model/distilbert-model1.pt", local_files_only=True)
 
33
  break
34
  return lst_labels
35
  #
36
+ # Creating the customized model, by adding a drop out and a dense layer on top of distil bert to get the final output for the model.
37
+ from transformers import DistilBertModel, DistilBertTokenizer
38
+
39
+ class DistillBERTClass(torch.nn.Module):
40
+ def __init__(self):
41
+ super(DistillBERTClass, self).__init__()
42
+ self.l1 = DistilBertModel.from_pretrained("distilbert-base-uncased")
43
+ self.pre_classifier = torch.nn.Linear(768, 768)
44
+ self.dropout = torch.nn.Dropout(0.3)
45
+ self.classifier = torch.nn.Linear(768, 8)
46
+
47
+ def forward(self, input_ids, attention_mask):
48
+ output_1 = self.l1(input_ids=input_ids, attention_mask=attention_mask)
49
+ hidden_state = output_1[0]
50
+ pooler = hidden_state[:, 0]
51
+ pooler = self.pre_classifier(pooler)
52
+ pooler = torch.nn.ReLU()(pooler)
53
+ pooler = self.dropout(pooler)
54
+ output = self.classifier(pooler)
55
+ return output
56
+ model = torch.load("model.pt", map_location='cpu').eval()
57
 
58
  # print(model)
59
  # model = DistilBertForSequenceClassification.from_pretrained("model/distilbert-model1.pt", local_files_only=True)