Nicolas-BZRD commited on
Commit
552f645
·
verified ·
1 Parent(s): d426046

Fix import

Browse files
Files changed (1) hide show
  1. modeling_eurobert.py +9 -9
modeling_eurobert.py CHANGED
@@ -26,15 +26,15 @@ import torch
26
  from torch import nn
27
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
28
 
29
- from ...activations import ACT2FN
30
- from ...cache_utils import Cache, StaticCache
31
- from ...modeling_attn_mask_utils import AttentionMaskConverter
32
- from ...modeling_flash_attention_utils import FlashAttentionKwargs
33
- from ...modeling_outputs import BaseModelOutput, BaseModelOutputWithPast, MaskedLMOutput, SequenceClassifierOutput
34
- from ...modeling_rope_utils import ROPE_INIT_FUNCTIONS
35
- from ...modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
36
- from ...processing_utils import Unpack
37
- from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
38
  from .configuration_eurobert import EuroBertConfig
39
 
40
 
 
26
  from torch import nn
27
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
28
 
29
+ from transformers.activations import ACT2FN
30
+ from transformers.cache_utils import Cache, StaticCache
31
+ from transformers.modeling_attn_mask_utils import AttentionMaskConverter
32
+ from transformers.modeling_flash_attention_utils import FlashAttentionKwargs
33
+ from transformers.modeling_outputs import BaseModelOutput, BaseModelOutputWithPast, MaskedLMOutput, SequenceClassifierOutput
34
+ from transformers.modeling_rope_utils import ROPE_INIT_FUNCTIONS
35
+ from transformers.modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
36
+ from transformers.processing_utils import Unpack
37
+ from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
38
  from .configuration_eurobert import EuroBertConfig
39
 
40