yzhangcs commited on
Commit
6e04029
·
verified ·
1 Parent(s): bcc58e7

Upload GLAForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +1 -1
  2. config.json +7 -1
  3. generation_config.json +1 -1
  4. model.safetensors +2 -2
README.md CHANGED
@@ -8,4 +8,4 @@ license: mit
8
  datasets:
9
  - cerebras/SlimPajama-627B
10
  library_name: fla
11
- ---
 
8
  datasets:
9
  - cerebras/SlimPajama-627B
10
  library_name: fla
11
+ ---
config.json CHANGED
@@ -3,13 +3,16 @@
3
  "architectures": [
4
  "GLAForCausalLM"
5
  ],
 
6
  "attn_mode": "chunk",
7
  "bos_token_id": 1,
8
  "clamp_min": null,
9
  "conv_size": 4,
 
10
  "eos_token_id": 2,
11
  "expand_k": 0.5,
12
  "expand_v": 1,
 
13
  "fuse_cross_entropy": true,
14
  "fuse_norm": true,
15
  "hidden_act": "swish",
@@ -19,16 +22,19 @@
19
  "intermediate_size": null,
20
  "max_position_embeddings": 2048,
21
  "model_type": "gla",
 
22
  "num_heads": 4,
23
  "num_hidden_layers": 24,
 
24
  "rms_norm_eps": 1e-06,
25
  "share_conv_kernel": true,
26
  "tie_word_embeddings": true,
27
  "torch_dtype": "bfloat16",
28
- "transformers_version": "4.39.1",
29
  "use_cache": true,
30
  "use_gk": true,
31
  "use_gv": false,
 
32
  "use_short_conv": false,
33
  "vocab_size": 32000
34
  }
 
3
  "architectures": [
4
  "GLAForCausalLM"
5
  ],
6
+ "attn": null,
7
  "attn_mode": "chunk",
8
  "bos_token_id": 1,
9
  "clamp_min": null,
10
  "conv_size": 4,
11
+ "elementwise_affine": true,
12
  "eos_token_id": 2,
13
  "expand_k": 0.5,
14
  "expand_v": 1,
15
+ "feature_map": null,
16
  "fuse_cross_entropy": true,
17
  "fuse_norm": true,
18
  "hidden_act": "swish",
 
22
  "intermediate_size": null,
23
  "max_position_embeddings": 2048,
24
  "model_type": "gla",
25
+ "norm_eps": 1e-06,
26
  "num_heads": 4,
27
  "num_hidden_layers": 24,
28
+ "num_kv_heads": null,
29
  "rms_norm_eps": 1e-06,
30
  "share_conv_kernel": true,
31
  "tie_word_embeddings": true,
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.48.2",
34
  "use_cache": true,
35
  "use_gk": true,
36
  "use_gv": false,
37
+ "use_output_gate": true,
38
  "use_short_conv": false,
39
  "vocab_size": 32000
40
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.39.1"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.48.2"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:da7b708c66f1b9d1ef7d86684a97f4605f014deac8962d2b91dba8cef907f597
3
- size 683449944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07637f325a73991dcc31936e43ab9da2eb5c1a79135c477c3013bf3118a05a4a
3
+ size 683452592