twhoool02 commited on
Commit
b301059
1 Parent(s): fd97b1d

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -21,6 +21,7 @@
21
  "_load_in_4bit": true,
22
  "_load_in_8bit": false,
23
  "bnb_4bit_compute_dtype": "float16",
 
24
  "bnb_4bit_quant_type": "nf4",
25
  "bnb_4bit_use_double_quant": true,
26
  "llm_int8_enable_fp32_cpu_offload": false,
@@ -36,7 +37,7 @@
36
  "rope_theta": 10000.0,
37
  "tie_word_embeddings": false,
38
  "torch_dtype": "float16",
39
- "transformers_version": "4.38.2",
40
- "use_cache": false,
41
  "vocab_size": 32000
42
  }
 
21
  "_load_in_4bit": true,
22
  "_load_in_8bit": false,
23
  "bnb_4bit_compute_dtype": "float16",
24
+ "bnb_4bit_quant_storage": "uint8",
25
  "bnb_4bit_quant_type": "nf4",
26
  "bnb_4bit_use_double_quant": true,
27
  "llm_int8_enable_fp32_cpu_offload": false,
 
37
  "rope_theta": 10000.0,
38
  "tie_word_embeddings": false,
39
  "torch_dtype": "float16",
40
+ "transformers_version": "4.39.1",
41
+ "use_cache": true,
42
  "vocab_size": 32000
43
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
- "transformers_version": "4.38.2"
10
  }
 
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
+ "transformers_version": "4.39.1"
10
  }