farpluto commited on
Commit
740bbaf
1 Parent(s): 1c4e805

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -17,7 +17,7 @@
17
  "architectures": [
18
  "Phi3ForCausalLM"
19
  ],
20
- "attn_implementation": "flash_attention_2",
21
  "attention_dropout": 0.0,
22
  "auto_map": {
23
  "AutoConfig": "configuration_phi3.Phi3Config",
@@ -240,6 +240,6 @@
240
  "torch_dtype": "bfloat16",
241
  "transformers_version": "4.37.2",
242
  "use_bfloat16": true,
243
- "use_flash_attn": true
244
  }
245
  }
 
17
  "architectures": [
18
  "Phi3ForCausalLM"
19
  ],
20
+ "attn_implementation": "eager",
21
  "attention_dropout": 0.0,
22
  "auto_map": {
23
  "AutoConfig": "configuration_phi3.Phi3Config",
 
240
  "torch_dtype": "bfloat16",
241
  "transformers_version": "4.37.2",
242
  "use_bfloat16": true,
243
+ "use_flash_attn": false
244
  }
245
  }