Upload folder using huggingface_hub
#3
by
sharpenb
- opened
- config.json +2 -2
- generation_config.json +1 -2
- smash_config.json +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/covalent/.cache/models/
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
@@ -50,7 +50,7 @@
|
|
50 |
"tokenizer_class": "SPTokenizer",
|
51 |
"torch_dtype": "float16",
|
52 |
"transformers_version": "4.46.2",
|
53 |
-
"use_cache":
|
54 |
"vocab_size": 250680,
|
55 |
"api_key": null
|
56 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "/covalent/.cache/models/tmptk1nuvyhzxxjxk51",
|
3 |
"architectures": [
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
|
|
50 |
"tokenizer_class": "SPTokenizer",
|
51 |
"torch_dtype": "float16",
|
52 |
"transformers_version": "4.46.2",
|
53 |
+
"use_cache": true,
|
54 |
"vocab_size": 250680,
|
55 |
"api_key": null
|
56 |
}
|
generation_config.json
CHANGED
@@ -3,6 +3,5 @@
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 3,
|
6 |
-
"transformers_version": "4.46.2"
|
7 |
-
"use_cache": false
|
8 |
}
|
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 3,
|
6 |
+
"transformers_version": "4.46.2"
|
|
|
7 |
}
|
smash_config.json
CHANGED
@@ -28,7 +28,7 @@
|
|
28 |
"quant_llm-int8_weight_bits": 8,
|
29 |
"max_batch_size": 1,
|
30 |
"device": "cuda",
|
31 |
-
"cache_dir": "/covalent/.cache/models/
|
32 |
"task": "",
|
33 |
"save_load_fn": "bitsandbytes",
|
34 |
"save_load_fn_args": {}
|
|
|
28 |
"quant_llm-int8_weight_bits": 8,
|
29 |
"max_batch_size": 1,
|
30 |
"device": "cuda",
|
31 |
+
"cache_dir": "/covalent/.cache/models/tmptk1nuvyh",
|
32 |
"task": "",
|
33 |
"save_load_fn": "bitsandbytes",
|
34 |
"save_load_fn_args": {}
|