PyTorch
English
Chinese
plm
custom_code
jjw0126 commited on
Commit
66680d2
·
verified ·
1 Parent(s): be0bde5

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. modeling_plm.py +0 -5
  2. tokenizer.json +0 -0
modeling_plm.py CHANGED
@@ -691,11 +691,6 @@ class PLMDecoderLayer(nn.Module):
691
  super().__init__()
692
  self.hidden_size = config.hidden_size
693
 
694
- if config.use_sliding_window and config._attn_implementation != "flash_attention_2":
695
- logger.warning_once(
696
- f"Sliding Window Attention is enabled but not implemented for `{config._attn_implementation}`; "
697
- "unexpected results may be encountered."
698
- )
699
  self.self_attn = PLM_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx)
700
  self.mlp = PLMMLP(config)
701
  self.input_layernorm = PLMRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
 
691
  super().__init__()
692
  self.hidden_size = config.hidden_size
693
 
 
 
 
 
 
694
  self.self_attn = PLM_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx)
695
  self.mlp = PLMMLP(config)
696
  self.input_layernorm = PLMRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff