Delete modules
Browse files- modules/dataset.py +0 -16
- modules/inference.py +0 -11
modules/dataset.py
DELETED
@@ -1,16 +0,0 @@
|
|
1 |
-
from datasets import load_dataset
|
2 |
-
|
3 |
-
dataset = load_dataset("go_emotions", split="train")
|
4 |
-
|
5 |
-
emotions = dataset.info.features["labels"].feature.names
|
6 |
-
|
7 |
-
|
8 |
-
def query_emotion(start, end):
|
9 |
-
rows = dataset[start:end]
|
10 |
-
|
11 |
-
observations = [
|
12 |
-
{"text": r[0], "emotion": emotions[r[1][0]]}
|
13 |
-
for r in zip(rows["text"], rows["labels"])
|
14 |
-
]
|
15 |
-
|
16 |
-
return observations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
modules/inference.py
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
from transformers import T5Tokenizer, T5ForConditionalGeneration
|
2 |
-
|
3 |
-
tokenizer = T5Tokenizer.from_pretrained("t5-small")
|
4 |
-
model = T5ForConditionalGeneration.from_pretrained("t5-small")
|
5 |
-
|
6 |
-
|
7 |
-
def infer_t5(input):
|
8 |
-
input_ids = tokenizer(input, return_tensors="pt").input_ids
|
9 |
-
outputs = model.generate(input_ids)
|
10 |
-
|
11 |
-
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|