Fill-Mask
Transformers
PyTorch
Safetensors
English
nomic_bert
custom_code
zpn commited on
Commit
4bb68f6
1 Parent(s): b672c72

Update modeling_hf_nomic_bert.py

Browse files
Files changed (1) hide show
  1. modeling_hf_nomic_bert.py +1 -1
modeling_hf_nomic_bert.py CHANGED
@@ -391,7 +391,7 @@ class NomicBertPreTrainedModel(PreTrainedModel):
391
  num_labels = kwargs.pop("num_labels", None)
392
  rotary_scaling_factor = kwargs.pop("rotary_scaling_factor", None)
393
  strict = kwargs.pop("strict", True)
394
- dtype = kwargs.pop("dtype", None)
395
  if rotary_scaling_factor:
396
  config.rotary_scaling_factor = rotary_scaling_factor
397
 
 
391
  num_labels = kwargs.pop("num_labels", None)
392
  rotary_scaling_factor = kwargs.pop("rotary_scaling_factor", None)
393
  strict = kwargs.pop("strict", True)
394
+ dtype = kwargs.pop("torch_dtype", None)
395
  if rotary_scaling_factor:
396
  config.rotary_scaling_factor = rotary_scaling_factor
397