bys0318 commited on
Commit
16e280f
·
verified ·
1 Parent(s): fdfab97

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -9,6 +9,7 @@
9
  ],
10
  "attention_dropout": 0.0,
11
  "attention_softmax_in_fp32": true,
 
12
  "auto_map": {
13
  "AutoConfig": "configuration_chatglm.ChatGLMConfig",
14
  "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration",
 
9
  ],
10
  "attention_dropout": 0.0,
11
  "attention_softmax_in_fp32": true,
12
+ "attn_implementation": "sdpa",
13
  "auto_map": {
14
  "AutoConfig": "configuration_chatglm.ChatGLMConfig",
15
  "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration",