Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,10 +3,26 @@ import pandas as pd
|
|
3 |
import torch
|
4 |
from torch import nn
|
5 |
from torch.utils.data import Dataset, DataLoader
|
6 |
-
from transformers import AutoTokenizer, BertForSequenceClassification
|
7 |
from sklearn import metrics
|
8 |
import streamlit as st
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
# Define models to be used
|
11 |
bert_path = "bert-base-uncased"
|
12 |
bert_tokenizer = AutoTokenizer.from_pretrained(bert_path)
|
|
|
3 |
import torch
|
4 |
from torch import nn
|
5 |
from torch.utils.data import Dataset, DataLoader
|
6 |
+
from transformers import AutoTokenizer, BertModel, BertForSequenceClassification
|
7 |
from sklearn import metrics
|
8 |
import streamlit as st
|
9 |
|
10 |
+
# Have data for BertClass ready for our tuned model.
|
11 |
+
class BertClass(torch.nn.Module):
|
12 |
+
def __init__(self):
|
13 |
+
super(BertClass, self).__init__()
|
14 |
+
self.l1 = BertModel.from_pretrained(model_path)
|
15 |
+
self.dropout = torch.nn.Dropout(HEAD_DROP_OUT)
|
16 |
+
self.classifier = torch.nn.Linear(768, 6)
|
17 |
+
|
18 |
+
def forward(self, input_ids, attention_mask, token_type_ids):
|
19 |
+
output_1 = self.l1(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids)
|
20 |
+
hidden_state = output_1[0]
|
21 |
+
pooler = hidden_state[:, 0]
|
22 |
+
pooler = self.dropout(pooler)
|
23 |
+
output = self.classifier(pooler)
|
24 |
+
return output
|
25 |
+
|
26 |
# Define models to be used
|
27 |
bert_path = "bert-base-uncased"
|
28 |
bert_tokenizer = AutoTokenizer.from_pretrained(bert_path)
|