yzhangcs commited on
Commit
2f413dd
·
verified ·
1 Parent(s): 02d7df9

Upload HGRN2ForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +1 -1
  2. config.json +5 -1
  3. generation_config.json +1 -1
  4. model.safetensors +2 -2
README.md CHANGED
@@ -8,4 +8,4 @@ license: mit
8
  datasets:
9
  - cerebras/SlimPajama-627B
10
  library_name: fla
11
- ---
 
8
  datasets:
9
  - cerebras/SlimPajama-627B
10
  library_name: fla
11
+ ---
config.json CHANGED
@@ -3,13 +3,16 @@
3
  "architectures": [
4
  "HGRN2ForCausalLM"
5
  ],
 
6
  "attn_mode": "fused_chunk",
7
  "bos_token_id": 1,
8
  "clamp_min": null,
9
  "conv_size": 4,
 
10
  "eos_token_id": 2,
11
  "expand_ratio": 128,
12
  "fuse_cross_entropy": true,
 
13
  "hidden_act": "swish",
14
  "hidden_ratio": 4,
15
  "hidden_size": 2048,
@@ -17,13 +20,14 @@
17
  "intermediate_size": null,
18
  "max_position_embeddings": 2048,
19
  "model_type": "hgrn2",
 
20
  "num_heads": null,
21
  "num_hidden_layers": 24,
22
  "rms_norm_eps": 1e-06,
23
  "share_conv_kernel": true,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
- "transformers_version": "4.40.0.dev0",
27
  "use_cache": true,
28
  "use_lower_bound": true,
29
  "use_short_conv": false,
 
3
  "architectures": [
4
  "HGRN2ForCausalLM"
5
  ],
6
+ "attn": null,
7
  "attn_mode": "fused_chunk",
8
  "bos_token_id": 1,
9
  "clamp_min": null,
10
  "conv_size": 4,
11
+ "elementwise_affine": true,
12
  "eos_token_id": 2,
13
  "expand_ratio": 128,
14
  "fuse_cross_entropy": true,
15
+ "fuse_norm": true,
16
  "hidden_act": "swish",
17
  "hidden_ratio": 4,
18
  "hidden_size": 2048,
 
20
  "intermediate_size": null,
21
  "max_position_embeddings": 2048,
22
  "model_type": "hgrn2",
23
+ "norm_eps": 1e-06,
24
  "num_heads": null,
25
  "num_hidden_layers": 24,
26
  "rms_norm_eps": 1e-06,
27
  "share_conv_kernel": true,
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.48.2",
31
  "use_cache": true,
32
  "use_lower_bound": true,
33
  "use_short_conv": false,
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.40.0.dev0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.48.2"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f395b111991637a1e613dc3486067d562eb96a70a0a524b30310c97ee776ec46
3
- size 2728816304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92ea939b56018e51e696d1cb38970ed2797fad97bfe1eacff144bdc0397cdc7c
3
+ size 2728818968