Update modeling_hf_nomic_bert.py
Browse files
modeling_hf_nomic_bert.py
CHANGED
@@ -16,7 +16,7 @@ from einops import rearrange, repeat
|
|
16 |
from transformers import GPT2Config, PreTrainedModel
|
17 |
from transformers.models.bert.modeling_bert import (
|
18 |
BaseModelOutputWithPoolingAndCrossAttentions,
|
19 |
-
|
20 |
SequenceClassifierOutput
|
21 |
)
|
22 |
|
@@ -1147,9 +1147,11 @@ class NomicBertForPreTraining(NomicBertPreTrainedModel):
|
|
1147 |
)
|
1148 |
total_loss = masked_lm_loss.float()
|
1149 |
|
1150 |
-
return
|
1151 |
loss=total_loss,
|
1152 |
-
|
|
|
|
|
1153 |
)
|
1154 |
|
1155 |
|
|
|
16 |
from transformers import GPT2Config, PreTrainedModel
|
17 |
from transformers.models.bert.modeling_bert import (
|
18 |
BaseModelOutputWithPoolingAndCrossAttentions,
|
19 |
+
MaskedLMOutput,
|
20 |
SequenceClassifierOutput
|
21 |
)
|
22 |
|
|
|
1147 |
)
|
1148 |
total_loss = masked_lm_loss.float()
|
1149 |
|
1150 |
+
return MaskedLMOutput(
|
1151 |
loss=total_loss,
|
1152 |
+
logits=prediction_scores,
|
1153 |
+
hidden_states=outputs.hidden_states,
|
1154 |
+
attentions=None,
|
1155 |
)
|
1156 |
|
1157 |
|