TheBloke commited on
Commit
3925783
·
1 Parent(s): 57137bc

Fix config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -3
config.json CHANGED
@@ -8,7 +8,7 @@
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
- "bf16": true,
12
  "emb_dropout_prob": 0.0,
13
  "fp16": false,
14
  "fp32": false,
@@ -48,7 +48,6 @@
48
  "sym": true,
49
  "tokenizer": null,
50
  "true_sequential": true,
51
- "use_cuda_fp16": false,
52
  "use_exllama": true
53
  },
54
  "rotary_emb_base": 10000,
@@ -64,7 +63,7 @@
64
  "use_cache_kernel": false,
65
  "use_cache_quantization": false,
66
  "use_dynamic_ntk": true,
67
- "use_flash_attn": true,
68
  "use_logn_attn": true,
69
  "vocab_size": 151936
70
  }
 
8
  "AutoConfig": "configuration_qwen.QWenConfig",
9
  "AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
10
  },
11
+ "bf16": false,
12
  "emb_dropout_prob": 0.0,
13
  "fp16": false,
14
  "fp32": false,
 
48
  "sym": true,
49
  "tokenizer": null,
50
  "true_sequential": true,
 
51
  "use_exllama": true
52
  },
53
  "rotary_emb_base": 10000,
 
63
  "use_cache_kernel": false,
64
  "use_cache_quantization": false,
65
  "use_dynamic_ntk": true,
66
+ "use_flash_attn": "auto",
67
  "use_logn_attn": true,
68
  "vocab_size": 151936
69
  }