jaewoo park commited on
Commit
4ee7dbe
·
1 Parent(s): e4c3457

Upload model

Browse files
Files changed (2) hide show
  1. README.md +5 -5
  2. adapter_config.json +1 -1
README.md CHANGED
@@ -203,15 +203,15 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
203
 
204
  The following `bitsandbytes` quantization config was used during training:
205
  - quant_method: bitsandbytes
206
- - load_in_8bit: True
207
- - load_in_4bit: False
208
  - llm_int8_threshold: 6.0
209
  - llm_int8_skip_modules: None
210
  - llm_int8_enable_fp32_cpu_offload: False
211
  - llm_int8_has_fp16_weight: False
212
- - bnb_4bit_quant_type: fp4
213
- - bnb_4bit_use_double_quant: False
214
- - bnb_4bit_compute_dtype: float32
215
 
216
  ### Framework versions
217
 
 
203
 
204
  The following `bitsandbytes` quantization config was used during training:
205
  - quant_method: bitsandbytes
206
+ - load_in_8bit: False
207
+ - load_in_4bit: True
208
  - llm_int8_threshold: 6.0
209
  - llm_int8_skip_modules: None
210
  - llm_int8_enable_fp32_cpu_offload: False
211
  - llm_int8_has_fp16_weight: False
212
+ - bnb_4bit_quant_type: nf4
213
+ - bnb_4bit_use_double_quant: True
214
+ - bnb_4bit_compute_dtype: bfloat16
215
 
216
  ### Framework versions
217
 
adapter_config.json CHANGED
@@ -9,7 +9,7 @@
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "lora_alpha": 16,
12
- "lora_dropout": 0.05,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 8,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "lora_alpha": 16,
12
+ "lora_dropout": 0.1,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 8,