UICHEOL-HWANG commited on
Commit
f8cf0f7
·
verified ·
1 Parent(s): 6497d36

Adding ONNX file of this model (#1)

Browse files

- Adding ONNX file of this model (daaf76c6d8ffda9fcb61b8cffd8f9c0399e78628)

.gitattributes CHANGED
@@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ onnx/_gpt_neox_layers.0_attention_rotary_emb_Constant_attr__value filter=lfs diff=lfs merge=lfs -text
37
+ onnx/_gpt_neox_layers.0_attention_rotary_emb_Constant_5_attr__value filter=lfs diff=lfs merge=lfs -text
38
+ onnx/Constant_205_attr__value filter=lfs diff=lfs merge=lfs -text
39
+ onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
onnx/Constant_205_attr__value ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83dd5f1a5a07912d2e7dbb73b75edeb2ce4c1f1d6ec48144a887bd4a4f72325b
3
+ size 4194304
onnx/_gpt_neox_layers.0_attention_rotary_emb_Constant_5_attr__value ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d078cc99f779ceff367f4bef2518b4954067e43a8153c83a46321c819d43738c
3
+ size 524288
onnx/_gpt_neox_layers.0_attention_rotary_emb_Constant_attr__value ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50a5df57529925b572ba0190fc9b1f94d72b26e08c42b2af896eb572e8c8948
3
+ size 524288
onnx/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "UICHEOL-HWANG/EcomGen-0.0.1v",
3
+ "architectures": [
4
+ "GPTNeoXForCausalLM"
5
+ ],
6
+ "attention_bias": true,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "classifier_dropout": 0.1,
10
+ "eos_token_id": 2,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout": 0.0,
13
+ "hidden_size": 2048,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 8192,
16
+ "layer_norm_eps": 1e-05,
17
+ "max_position_embeddings": 2048,
18
+ "model_type": "gpt_neox",
19
+ "num_attention_heads": 16,
20
+ "num_hidden_layers": 24,
21
+ "partial_rotary_factor": 0.5,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000,
24
+ "rotary_emb_base": 10000,
25
+ "rotary_pct": 0.5,
26
+ "tie_word_embeddings": false,
27
+ "transformers_version": "4.37.2",
28
+ "use_cache": true,
29
+ "use_parallel_residual": true,
30
+ "vocab_size": 30080
31
+ }
onnx/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.37.2"
6
+ }
onnx/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19d5ed0392f138673755b78495d2a502c9a8e9a97232405df2391d7710f5ce48
3
+ size 6520210
onnx/model.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1698bd0c5b8043e14f7488b100f1e946aea47dd77cafdc56e38047ec09ceb093
3
+ size 5327052800