Upload folder using huggingface_hub (#4)
Browse files- 5b935628bc2d94ea952ace903929d1a9a3f92f7ee8790fc35af61f27d6fb77d9 (24896d257878fb4cb1e1ef20257cb1964007b424)
- 7f592f5e72da2956591c42252259999f70f6a8cd03ab47a7ebdb5908538ea2b3 (b29eca999191f624b523a19f175aa68f37ef5326)
- 10635252fb85e1f5cc25b0281f0235b52ea6d0acecae3d258e9458f8beee7663 (09c21bdb4d7cab9cb414f31cc52cce3ed3d176d6)
- 0dd7112ff1723da48c310c7764dcf955eedf9246c3ec75932006e4584d35f8db (ae03d0a8cebc6e697381d9566fec2c59604284dc)
- 456085e0dd131097fdf8c41862de0c934f914dc81631b017e1ce42c67f5a64d6 (66485936db287345ca685f9d27ac951c36f766e0)
- config.json +4 -2
- generation_config.json +1 -1
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +0 -0
config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
|
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
@@ -26,14 +27,15 @@
|
|
26 |
"model_version": 1,
|
27 |
"outlier_channel_split": false,
|
28 |
"packsz": 4,
|
29 |
-
"rescale_WH": false
|
|
|
30 |
},
|
31 |
"rms_norm_eps": 1e-05,
|
32 |
"rope_scaling": null,
|
33 |
"rope_theta": 10000.0,
|
34 |
"tie_word_embeddings": false,
|
35 |
"torch_dtype": "float16",
|
36 |
-
"transformers_version": "4.
|
37 |
"use_cache": true,
|
38 |
"vocab_size": 32000
|
39 |
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"eos_token_id": 2,
|
10 |
"hidden_act": "silu",
|
|
|
27 |
"model_version": 1,
|
28 |
"outlier_channel_split": false,
|
29 |
"packsz": 4,
|
30 |
+
"rescale_WH": false,
|
31 |
+
"resid_scale_override": -1
|
32 |
},
|
33 |
"rms_norm_eps": 1e-05,
|
34 |
"rope_scaling": null,
|
35 |
"rope_theta": 10000.0,
|
36 |
"tie_word_embeddings": false,
|
37 |
"torch_dtype": "float16",
|
38 |
+
"transformers_version": "4.36.2",
|
39 |
"use_cache": true,
|
40 |
"vocab_size": 32000
|
41 |
}
|
generation_config.json
CHANGED
@@ -6,5 +6,5 @@
|
|
6 |
"pad_token_id": 0,
|
7 |
"temperature": 0.6,
|
8 |
"top_p": 0.9,
|
9 |
-
"transformers_version": "4.
|
10 |
}
|
|
|
6 |
"pad_token_id": 0,
|
7 |
"temperature": 0.6,
|
8 |
"top_p": 0.9,
|
9 |
+
"transformers_version": "4.36.2"
|
10 |
}
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:614ed7f1a5c12d21f2d8149e2738e4bb26d6f2410ede66dd84138fdf57ac40ee
|
3 |
+
size 4964068056
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b523cabe0f6eebc29885c2a594f7da8ef59e7839c4c7ee04efff781e585039b
|
3 |
+
size 4985874080
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9a5081b94724d83118bfeaeed307280ed31acf405091e4a67c78be1df3b7147d
|
3 |
+
size 4964866176
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31efb0f29baadadb0057e7158e3d7ce64d53f429742824be3f89767d3e8a9b88
|
3 |
+
size 3271337328
|
model.safetensors.index.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|