bnjmnmarie commited on
Commit
a724bb7
·
verified ·
1 Parent(s): 2e8d8a7

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +18 -17
config.json CHANGED
@@ -4,23 +4,24 @@
4
  "Qwen2ForCausalLM"
5
  ],
6
  "quantization_config": {
7
- "bits": 8,
8
- "dynamic": null,
9
- "group_size": 128,
10
- "desc_act": true,
11
- "sym": true,
12
- "lm_head": false,
13
- "quant_method": "gptq",
14
- "checkpoint_format": "gptq",
15
- "meta": {
16
- "quantizer": [
17
- "gptqmodel:1.7.4"
18
- ],
19
- "uri": "https://github.com/modelcloud/gptqmodel",
20
- "damp_percent": 0.01,
21
- "damp_auto_increment": 0.0025,
22
- "static_groups": false,
23
- "true_sequential": true,
 
24
  },
25
  "attention_dropout": 0.0,
26
  "bos_token_id": 151643,
 
4
  "Qwen2ForCausalLM"
5
  ],
6
  "quantization_config": {
7
+ "bits": 4,
8
+ "dynamic": null,
9
+ "group_size": 128,
10
+ "desc_act": true,
11
+ "sym": true,
12
+ "lm_head": false,
13
+ "quant_method": "gptq",
14
+ "checkpoint_format": "gptq",
15
+ "meta": {
16
+ "quantizer": [
17
+ "gptqmodel:1.7.4"
18
+ ],
19
+ "uri": "https://github.com/modelcloud/gptqmodel",
20
+ "damp_percent": 0.01,
21
+ "damp_auto_increment": 0.0025,
22
+ "static_groups": false,
23
+ "true_sequential": true
24
+ }
25
  },
26
  "attention_dropout": 0.0,
27
  "bos_token_id": 151643,