userzhu commited on
Commit
203b358
·
verified ·
1 Parent(s): 7e1fa41

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -2
config.json CHANGED
@@ -18,6 +18,8 @@
18
  "num_key_value_heads": 8,
19
  "pretraining_tp": 1,
20
  "quantization_config": {
 
 
21
  "bnb_4bit_compute_dtype": "float16",
22
  "bnb_4bit_quant_storage": "uint8",
23
  "bnb_4bit_quant_type": "nf4",
@@ -27,7 +29,8 @@
27
  "llm_int8_skip_modules": null,
28
  "llm_int8_threshold": 6.0,
29
  "load_in_4bit": true,
30
- "load_in_8bit": false
 
31
  },
32
  "rms_norm_eps": 1e-05,
33
  "rope_scaling": null,
@@ -37,4 +40,4 @@
37
  "transformers_version": "4.40.1",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
- }
 
18
  "num_key_value_heads": 8,
19
  "pretraining_tp": 1,
20
  "quantization_config": {
21
+ "_load_in_4bit": true,
22
+ "_load_in_8bit": false,
23
  "bnb_4bit_compute_dtype": "float16",
24
  "bnb_4bit_quant_storage": "uint8",
25
  "bnb_4bit_quant_type": "nf4",
 
29
  "llm_int8_skip_modules": null,
30
  "llm_int8_threshold": 6.0,
31
  "load_in_4bit": true,
32
+ "load_in_8bit": false,
33
+ "quant_method": "bitsandbytes"
34
  },
35
  "rms_norm_eps": 1e-05,
36
  "rope_scaling": null,
 
40
  "transformers_version": "4.40.1",
41
  "use_cache": false,
42
  "vocab_size": 128256
43
+ }