Spaces:
Sleeping
Sleeping
Update modules/abstractive.py
Browse files- modules/abstractive.py +6 -2
modules/abstractive.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1 |
import torch
|
2 |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
|
3 |
-
|
4 |
|
5 |
def load_summarizers():
|
6 |
models = {
|
7 |
-
"Pegasus": "google/pegasus-large",
|
8 |
"T5": "Overglitch/t5-small-cnn-dailymail",
|
9 |
"BART": "facebook/bart-large-cnn",
|
10 |
}
|
@@ -15,6 +14,11 @@ def load_summarizers():
|
|
15 |
summarizers[model_name] = (model, tokenizer)
|
16 |
return summarizers
|
17 |
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
def abstractive_summary(summarizers, model_name, text, max_length, num_beams):
|
20 |
model, tokenizer = summarizers[model_name]
|
|
|
1 |
import torch
|
2 |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
|
3 |
+
from transformers import PegasusTokenizer, PegasusForConditionalGeneration
|
4 |
|
5 |
def load_summarizers():
|
6 |
models = {
|
|
|
7 |
"T5": "Overglitch/t5-small-cnn-dailymail",
|
8 |
"BART": "facebook/bart-large-cnn",
|
9 |
}
|
|
|
14 |
summarizers[model_name] = (model, tokenizer)
|
15 |
return summarizers
|
16 |
|
17 |
+
def load_pegasus_model_and_tokenizer(model_name: str):
|
18 |
+
model = PegasusForConditionalGeneration.from_pretrained(model_name)
|
19 |
+
tokenizer = PegasusTokenizer.from_pretrained(model_name)
|
20 |
+
return model, tokenizer
|
21 |
+
|
22 |
|
23 |
def abstractive_summary(summarizers, model_name, text, max_length, num_beams):
|
24 |
model, tokenizer = summarizers[model_name]
|