Spaces:
Sleeping
Sleeping
zhenyundeng
commited on
Commit
·
c052247
1
Parent(s):
7156297
update app.py
Browse files
app.py
CHANGED
|
@@ -79,14 +79,14 @@ LABEL = [
|
|
| 79 |
veracity_tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
|
| 80 |
bert_model = BertForSequenceClassification.from_pretrained("bert-base-uncased", num_labels=4, problem_type="single_label_classification")
|
| 81 |
veracity_checkpoint_path = os.getcwd() + "/averitec/pretrained_models/bert_veracity.ckpt"
|
| 82 |
-
veracity_model = SequenceClassificationModule.load_from_checkpoint(veracity_checkpoint_path,tokenizer=veracity_tokenizer, model=bert_model)
|
| 83 |
# veracity_model = SequenceClassificationModule.load_from_checkpoint(veracity_checkpoint_path,tokenizer=veracity_tokenizer, model=bert_model).to(device)
|
| 84 |
|
| 85 |
# Justification
|
| 86 |
justification_tokenizer = BartTokenizer.from_pretrained('facebook/bart-large', add_prefix_space=True)
|
| 87 |
bart_model = BartForConditionalGeneration.from_pretrained("facebook/bart-large")
|
| 88 |
best_checkpoint = os.getcwd()+ '/averitec/pretrained_models/bart_justifications_verdict-epoch=13-val_loss=2.03-val_meteor=0.28.ckpt'
|
| 89 |
-
justification_model = JustificationGenerationModule.load_from_checkpoint(best_checkpoint, tokenizer=justification_tokenizer, model=bart_model)
|
| 90 |
# justification_model = JustificationGenerationModule.load_from_checkpoint(best_checkpoint, tokenizer=justification_tokenizer, model=bart_model).to(device)
|
| 91 |
|
| 92 |
|
|
|
|
| 79 |
veracity_tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
|
| 80 |
bert_model = BertForSequenceClassification.from_pretrained("bert-base-uncased", num_labels=4, problem_type="single_label_classification")
|
| 81 |
veracity_checkpoint_path = os.getcwd() + "/averitec/pretrained_models/bert_veracity.ckpt"
|
| 82 |
+
veracity_model = SequenceClassificationModule.load_from_checkpoint(veracity_checkpoint_path,tokenizer=veracity_tokenizer, model=bert_model).to('cuda')
|
| 83 |
# veracity_model = SequenceClassificationModule.load_from_checkpoint(veracity_checkpoint_path,tokenizer=veracity_tokenizer, model=bert_model).to(device)
|
| 84 |
|
| 85 |
# Justification
|
| 86 |
justification_tokenizer = BartTokenizer.from_pretrained('facebook/bart-large', add_prefix_space=True)
|
| 87 |
bart_model = BartForConditionalGeneration.from_pretrained("facebook/bart-large")
|
| 88 |
best_checkpoint = os.getcwd()+ '/averitec/pretrained_models/bart_justifications_verdict-epoch=13-val_loss=2.03-val_meteor=0.28.ckpt'
|
| 89 |
+
justification_model = JustificationGenerationModule.load_from_checkpoint(best_checkpoint, tokenizer=justification_tokenizer, model=bart_model).to('cuda')
|
| 90 |
# justification_model = JustificationGenerationModule.load_from_checkpoint(best_checkpoint, tokenizer=justification_tokenizer, model=bart_model).to(device)
|
| 91 |
|
| 92 |
|