nihalnayak commited on
Commit
ce0a909
·
verified ·
1 Parent(s): c50d55f

Update modeling_nvembed.py

Browse files

Removing MISTRAL_INPUTS_DOCSTRING from import.

Files changed (1) hide show
  1. modeling_nvembed.py +0 -2
modeling_nvembed.py CHANGED
@@ -8,7 +8,6 @@ from contextlib import nullcontext
8
  from transformers import AutoModel, PreTrainedTokenizerFast, BatchEncoding, DataCollatorWithPadding
9
  from transformers.modeling_utils import PreTrainedModel
10
  from transformers.models.auto import AutoTokenizer
11
- from transformers.models.mistral.modeling_mistral import MISTRAL_INPUTS_DOCSTRING
12
  from transformers.modeling_outputs import BaseModelOutputWithPast
13
  from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask, _prepare_4d_attention_mask_for_sdpa
14
  from transformers import MistralModel, MistralConfig
@@ -39,7 +38,6 @@ class BidirectionalMistralModel(MistralModel):
39
  layer.self_attn.is_causal = False
40
  self._attn_implementation = "eager"
41
 
42
- @add_start_docstrings_to_model_forward(MISTRAL_INPUTS_DOCSTRING)
43
  def forward(
44
  self,
45
  input_ids: torch.LongTensor = None,
 
8
  from transformers import AutoModel, PreTrainedTokenizerFast, BatchEncoding, DataCollatorWithPadding
9
  from transformers.modeling_utils import PreTrainedModel
10
  from transformers.models.auto import AutoTokenizer
 
11
  from transformers.modeling_outputs import BaseModelOutputWithPast
12
  from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask, _prepare_4d_attention_mask_for_sdpa
13
  from transformers import MistralModel, MistralConfig
 
38
  layer.self_attn.is_causal = False
39
  self._attn_implementation = "eager"
40
 
 
41
  def forward(
42
  self,
43
  input_ids: torch.LongTensor = None,