Update modeling_hf_nomic_bert.py
Browse files- modeling_hf_nomic_bert.py +2 -13
modeling_hf_nomic_bert.py
CHANGED
@@ -1694,7 +1694,6 @@ class NomicBertModel(NomicBertPreTrainedModel):
|
|
1694 |
return_dict=None,
|
1695 |
matryoshka_dim=None,
|
1696 |
inputs_embeds=None,
|
1697 |
-
head_mask=None,
|
1698 |
):
|
1699 |
if input_ids is not None and inputs_embeds is not None:
|
1700 |
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
|
@@ -1918,10 +1917,6 @@ class NomicBertForMultipleChoice(NomicBertPreTrainedModel):
|
|
1918 |
position_ids=position_ids,
|
1919 |
head_mask=head_mask,
|
1920 |
inputs_embeds=inputs_embeds,
|
1921 |
-
output_attentions=output_attentions,
|
1922 |
-
output_hidden_states=output_hidden_states,
|
1923 |
-
return_dict=return_dict,
|
1924 |
-
unpad_inputs=unpad_inputs,
|
1925 |
)
|
1926 |
|
1927 |
pooled_output = outputs[1]
|
@@ -1987,9 +1982,6 @@ class NomicBertForTokenClassification(NomicBertPreTrainedModel):
|
|
1987 |
position_ids=position_ids,
|
1988 |
head_mask=head_mask,
|
1989 |
inputs_embeds=inputs_embeds,
|
1990 |
-
output_attentions=output_attentions,
|
1991 |
-
output_hidden_states=output_hidden_states,
|
1992 |
-
return_dict=return_dict,
|
1993 |
)
|
1994 |
|
1995 |
sequence_output = outputs[0]
|
@@ -1999,7 +1991,7 @@ class NomicBertForTokenClassification(NomicBertPreTrainedModel):
|
|
1999 |
|
2000 |
loss = None
|
2001 |
if labels is not None:
|
2002 |
-
loss_fct = CrossEntropyLoss()
|
2003 |
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
|
2004 |
|
2005 |
if not return_dict:
|
@@ -2057,9 +2049,6 @@ class NomicBertForQuestionAnswering(NomicBertPreTrainedModel):
|
|
2057 |
position_ids=position_ids,
|
2058 |
head_mask=head_mask,
|
2059 |
inputs_embeds=inputs_embeds,
|
2060 |
-
output_attentions=output_attentions,
|
2061 |
-
output_hidden_states=output_hidden_states,
|
2062 |
-
return_dict=return_dict,
|
2063 |
)
|
2064 |
|
2065 |
sequence_output = outputs[0]
|
@@ -2081,7 +2070,7 @@ class NomicBertForQuestionAnswering(NomicBertPreTrainedModel):
|
|
2081 |
start_positions = start_positions.clamp(0, ignored_index)
|
2082 |
end_positions = end_positions.clamp(0, ignored_index)
|
2083 |
|
2084 |
-
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
|
2085 |
start_loss = loss_fct(start_logits, start_positions)
|
2086 |
end_loss = loss_fct(end_logits, end_positions)
|
2087 |
total_loss = (start_loss + end_loss) / 2
|
|
|
1694 |
return_dict=None,
|
1695 |
matryoshka_dim=None,
|
1696 |
inputs_embeds=None,
|
|
|
1697 |
):
|
1698 |
if input_ids is not None and inputs_embeds is not None:
|
1699 |
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
|
|
|
1917 |
position_ids=position_ids,
|
1918 |
head_mask=head_mask,
|
1919 |
inputs_embeds=inputs_embeds,
|
|
|
|
|
|
|
|
|
1920 |
)
|
1921 |
|
1922 |
pooled_output = outputs[1]
|
|
|
1982 |
position_ids=position_ids,
|
1983 |
head_mask=head_mask,
|
1984 |
inputs_embeds=inputs_embeds,
|
|
|
|
|
|
|
1985 |
)
|
1986 |
|
1987 |
sequence_output = outputs[0]
|
|
|
1991 |
|
1992 |
loss = None
|
1993 |
if labels is not None:
|
1994 |
+
loss_fct = nn.CrossEntropyLoss()
|
1995 |
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
|
1996 |
|
1997 |
if not return_dict:
|
|
|
2049 |
position_ids=position_ids,
|
2050 |
head_mask=head_mask,
|
2051 |
inputs_embeds=inputs_embeds,
|
|
|
|
|
|
|
2052 |
)
|
2053 |
|
2054 |
sequence_output = outputs[0]
|
|
|
2070 |
start_positions = start_positions.clamp(0, ignored_index)
|
2071 |
end_positions = end_positions.clamp(0, ignored_index)
|
2072 |
|
2073 |
+
loss_fct = nn.CrossEntropyLoss(ignore_index=ignored_index)
|
2074 |
start_loss = loss_fct(start_logits, start_positions)
|
2075 |
end_loss = loss_fct(end_logits, end_positions)
|
2076 |
total_loss = (start_loss + end_loss) / 2
|