Fill-Mask
Transformers
PyTorch
Safetensors
English
nomic_bert
custom_code
zpn commited on
Commit
e75bfa2
1 Parent(s): f3ce33a

Update modeling_hf_nomic_bert.py

Browse files
Files changed (1) hide show
  1. modeling_hf_nomic_bert.py +5 -3
modeling_hf_nomic_bert.py CHANGED
@@ -16,7 +16,7 @@ from einops import rearrange, repeat
16
  from transformers import GPT2Config, PreTrainedModel
17
  from transformers.models.bert.modeling_bert import (
18
  BaseModelOutputWithPoolingAndCrossAttentions,
19
- BertForPreTrainingOutput,
20
  SequenceClassifierOutput
21
  )
22
 
@@ -1147,9 +1147,11 @@ class NomicBertForPreTraining(NomicBertPreTrainedModel):
1147
  )
1148
  total_loss = masked_lm_loss.float()
1149
 
1150
- return BertForPreTrainingOutput(
1151
  loss=total_loss,
1152
- prediction_logits=prediction_scores,
 
 
1153
  )
1154
 
1155
 
 
16
  from transformers import GPT2Config, PreTrainedModel
17
  from transformers.models.bert.modeling_bert import (
18
  BaseModelOutputWithPoolingAndCrossAttentions,
19
+ MaskedLMOutput,
20
  SequenceClassifierOutput
21
  )
22
 
 
1147
  )
1148
  total_loss = masked_lm_loss.float()
1149
 
1150
+ return MaskedLMOutput(
1151
  loss=total_loss,
1152
+ logits=prediction_scores,
1153
+ hidden_states=outputs.hidden_states,
1154
+ attentions=None,
1155
  )
1156
 
1157