Djacon commited on
Commit
a9218a7
·
1 Parent(s): cdf1ae0

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +80 -0
README.md CHANGED
@@ -1,3 +1,83 @@
1
  ---
 
2
  license: mit
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+
3
  license: mit
4
+ language: ["ru"]
5
+ tags:
6
+ - russian
7
+ - classification
8
+ - emotion
9
+ - emotion-detection
10
+ - emotion-recognition
11
+ - multiclass
12
+ widget:
13
+ - text: "Как дела?"
14
+ - text: "Дурак твой дед"
15
+ - text: "Только попробуй!!!"
16
+ - text: "Не хочу в школу("
17
+ - text: "Сейчас ровно час дня"
18
+ - text: "А ты уверен, что эти полоски снизу не врут? Точно уверен? Вот прям 100 процентов?"
19
+ datasets:
20
+ - Djacon/ru_goemotions
21
+
22
  ---
23
+
24
+ # First - you should prepare few functions to talk to model
25
+
26
+ ```python
27
+ import torch
28
+ from transformers import BertForSequenceClassification, AutoTokenizer
29
+
30
+ LABELS = ['радость', 'интерес', 'удивление', 'печаль', 'гнев', 'отвращение', 'страх', 'вина', 'нейтрально']
31
+ tokenizer = AutoTokenizer.from_pretrained('Djacon/rubert-tiny2-russian-emotion-detection')
32
+ model = BertForSequenceClassification.from_pretrained('Djacon/rubert-tiny2-russian-emotion-detection')
33
+
34
+ # Predicting emotion in text
35
+ @torch.no_grad()
36
+ def predict_emotion(text: str) -> str:
37
+ inputs = tokenizer(text, truncation=True, return_tensors='pt')
38
+ inputs = inputs.to(model.device)
39
+
40
+ outputs = model(**inputs)
41
+
42
+ pred = torch.nn.functional.softmax(outputs.logits, dim=1)
43
+ pred = pred.argmax(dim=1)
44
+
45
+ return LABELS[pred[0]]
46
+
47
+ # Probabilistic prediction of emotion in a text
48
+ @torch.no_grad()
49
+ def predict_emotions(text: str) -> list:
50
+ inputs = tokenizer(text, truncation=True, return_tensors='pt')
51
+ inputs = inputs.to(model.device)
52
+
53
+ outputs = model(**inputs)
54
+
55
+ pred = torch.nn.functional.softmax(outputs.logits, dim=1)
56
+
57
+ emotions_list = {}
58
+ for i in range(len(pred[0].tolist())):
59
+ emotions_list[LABELS[i]] = round(pred[0].tolist()[i], 4)
60
+ return emotions_list
61
+ ```
62
+
63
+ # And then - just gently ask a model to predict your emotion
64
+
65
+ ```python
66
+ simple_prediction = predict_emotion("Какой же сегодня прекрасный день, братья")
67
+ not_simple_prediction = predict_emotions("Какой же сегодня прекрасный день, братья")
68
+
69
+ print(simple_prediction)
70
+ print(not_simple_prediction)
71
+ # happiness
72
+ # {'neutral': 0.0004941817605867982, 'happiness': 0.9979524612426758, 'sadness': 0.0002536600804887712, 'enthusiasm': 0.0005498139653354883, 'fear': 0.00025326196919195354, 'anger': 0.0003583927755244076, 'disgust': 0.00013807788491249084}
73
+ ```
74
+
75
+ # Citations
76
+ ```
77
+ @misc{Djacon,
78
+ author = {Djacon},
79
+ year = {2023},
80
+ publisher = {Hugging Face},
81
+ journal = {Hugging Face Hub},
82
+ }
83
+ ```