wanzin commited on
Commit
70497f9
·
1 Parent(s): f7f1a0e

adding bloom 1b1 model

Browse files
Files changed (3) hide show
  1. config.json +4 -4
  2. model.safetensors +3 -0
  3. pytorch_model.bin +3 -0
config.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
- "architectures": [
5
- "BloomForCausalLM"
6
- ],
7
  "attention_softmax_in_fp32": true,
8
  "bias_dropout_fusion": true,
9
  "bos_token_id": 1,
 
 
 
10
  "eos_token_id": 2,
11
  "pad_token_id": 3,
12
  "unk_token_id": 0,
@@ -15,7 +15,7 @@
15
  "layer_norm_epsilon": 1e-05,
16
  "masked_softmax_fusion": true,
17
  "model_type": "bloom",
18
- "n_embed": 1024,
19
  "n_inner": null,
20
  "n_layer": 24,
21
  "num_attention_heads": 16,
 
1
  {
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
 
 
 
4
  "attention_softmax_in_fp32": true,
5
  "bias_dropout_fusion": true,
6
  "bos_token_id": 1,
7
+ "architectures": [
8
+ "BloomForCausalLM"
9
+ ],
10
  "eos_token_id": 2,
11
  "pad_token_id": 3,
12
  "unk_token_id": 0,
 
15
  "layer_norm_epsilon": 1e-05,
16
  "masked_softmax_fusion": true,
17
  "model_type": "bloom",
18
+ "n_embed": 1536,
19
  "n_inner": null,
20
  "n_layer": 24,
21
  "num_attention_heads": 16,
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9758ab2f72cca8764473e05c02f74c11c0f8f9472d02138a5993c1a103c07bb9
3
+ size 2130659086
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae458039e2615e851d4e4e77f36869e1c6daf8ac00f6cfbe8a4c0252dc80de97
3
+ size 2130731319