Synchronizing local compiler cache.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +24 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.neff +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.neff +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.neff +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/model.log +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.neff +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/model.log +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/11cebcde130c1f5b8a5d.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/481519fdaa82a5e9ca3c.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/5276a012f2eefa9af6da.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/gpt2/aa40852fa5208b294329.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/4b98629fe202b2140a00.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af13e5d873398d00de9e.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/d0a6fa1b6956a4d680fa.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/d4873490f07d6364f226.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/4811debc00fce09ac124.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/70df192a162dc76a8ba2.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/b6900574c407686bc850.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/b708ec372a7fc766d289.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/d8df4e1c37e866f90233.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e852034e61abed3bc0ba.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/0dd9463a56ca665e0009.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/21ab9e45cd41b65fb4e0.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ec3b5393c3977096c001.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/63d71adb49398ef365fe.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/84e0c54d67bb0c2ff590.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dfa5935d58cd292c0422.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/compile_flags.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.done +0 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.hlo_module.pb +3 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.neff +3 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/compile_flags.json +1 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/model.done +0 -0
- neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/model.hlo_module.pb +3 -0
.gitattributes
CHANGED
@@ -6450,3 +6450,27 @@ neuronxcc-2.15.143.0+e39249ad/MODULE_fbfe999e7918c5e3c314+39f12043/model.neff fi
|
|
6450 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_1384aee4e828bcd1c687/model.neuron filter=lfs diff=lfs merge=lfs -text
|
6451 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_ad62c560530518a4523f/model.neuron filter=lfs diff=lfs merge=lfs -text
|
6452 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_fb5aa8e68ce47badf8db/model.neuron filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6450 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_1384aee4e828bcd1c687/model.neuron filter=lfs diff=lfs merge=lfs -text
|
6451 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_ad62c560530518a4523f/model.neuron filter=lfs diff=lfs merge=lfs -text
|
6452 |
neuronxcc-2.15.128.0+56dc5a86/MODULE_fb5aa8e68ce47badf8db/model.neuron filter=lfs diff=lfs merge=lfs -text
|
6453 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6454 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_0b08b19b4706a50917ba+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6455 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_10ff4d6a928b94472f16+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6456 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_1b62264e9a4966bfa0e4+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6457 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_21a5e6094f799eaf9413+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6458 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_3c7f47a5211f45696b45+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6459 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_3dbb2404de8d122cfb72+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6460 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_452de3ba481f27cd766e+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6461 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_48a5a5886c35331e113d+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6462 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_499ca495a71d3ecda4ec+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6463 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_67f2ec151ef567c93a66+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6464 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_686de18f172d024b3d29+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6465 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_6d4285f70d46d16f0d98+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6466 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_742fbf1379b1b37ffccc+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6467 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_8896ee5f087b4718eea3+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6468 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_a51bc33669478de21a96+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6469 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_ab26fd71fa3f5e9fa45b+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6470 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_adcba825ed1523ba8d12+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6471 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_bc4eef25d6686ad21908+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6472 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_bdab92f819266df89445+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6473 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_cb3ec814cbdbd1a9df1b+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6474 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_e1e1e3a85cd7a83e0dd1+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6475 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_e503bb548c30caa02b1a+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6476 |
+
neuronxcc-2.16.303.0+d9f03cda/MODULE_ea4f2290b2fcba35e5fb+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ae1d087bbf16c0740359a6b0f5087438a47e8485ee208ce22084f2617b62144a
|
3 |
+
size 57038
|
neuronxcc-2.15.143.0+e39249ad/MODULE_159b8e04162fff0b5e0b+39f12043/model.neff
ADDED
Binary file (144 kB). View file
|
|
neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d488fce94d4a0e71d453fc1559922da327af9cb8c37f3b31c59a12e2e86533b
|
3 |
+
size 69301
|
neuronxcc-2.15.143.0+e39249ad/MODULE_1c17384445f02bb1c520+39f12043/model.neff
ADDED
Binary file (134 kB). View file
|
|
neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f17585322c240dc8bd2253c69e65bad5d8bb2f8c871ed8c6f1cd9a4fbab9a27
|
3 |
+
size 57056
|
neuronxcc-2.15.143.0+e39249ad/MODULE_a7a030e002b72284d2f7+39f12043/model.neff
ADDED
Binary file (134 kB). View file
|
|
neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0d30aaaa7c85357560266abb1cfe544de4c41cc57a341419477b5869776ed28
|
3 |
+
size 66209
|
neuronxcc-2.15.143.0+e39249ad/MODULE_b81ae95f77d4c1accaa5+39f12043/model.log
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Failed compilation with ['neuronx-cc', 'compile', '--target=trn1', '--framework=XLA', '/tmp/ubuntu/neuroncc_compile_workdir/e8af53ca-9784-4616-946c-44f1f821c144/model.MODULE_b81ae95f77d4c1accaa5+39f12043.hlo_module.pb', '--output', '/tmp/ubuntu/neuroncc_compile_workdir/e8af53ca-9784-4616-946c-44f1f821c144/model.MODULE_b81ae95f77d4c1accaa5+39f12043.neff', '--model-type=transformer', '--auto-cast=none', '--execute-repetition=1', '--verbose=35']: 2024-12-16T14:15:25Z [TEN404] (_dot.952) Internal tensorizer error: SundaISel:Verifier::No store before first load inst 3990 for tensor t3598 (inst: float32<1 x 1> $3990 = tensorscalarptr_add(float32<1 x 1> TongaPSum partitions[0] float32 [1, 1] %3840[0,0], float32<1 x 1> TongaSB partitions[0] float32 [1, 1] %3598[0,0]) # id=3990, , src_id=None, instances=1 # dl = tensor_op_name: _add.179 | hlo_id: 1356 | [[];[]] -> [[];[]] ) - Please open a support ticket at https://github.com/aws-neuron/aws-neuron-sdk/issues/new. You may also be able to obtain more information using the 'XLA_IR_DEBUG' and 'XLA_HLO_DEBUG' environment variables.
|
neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5d52fd2f202ce379c427a8b52fbf13f84733b0d6f4af34f2a5980f887fc72bda
|
3 |
+
size 55679
|
neuronxcc-2.15.143.0+e39249ad/MODULE_d029fb66a724c1553f2f+39f12043/model.neff
ADDED
Binary file (134 kB). View file
|
|
neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:16502c8a2a9e35806ce35afb05df98a44d15f6f9ec86d9ac005c34f65a22bf0b
|
3 |
+
size 69041
|
neuronxcc-2.15.143.0+e39249ad/MODULE_ec079d43c779ced7e047+39f12043/model.log
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Failed compilation with ['neuronx-cc', 'compile', '--target=trn1', '--framework=XLA', '/tmp/ubuntu/neuroncc_compile_workdir/ca2f2bf9-26a9-4c69-97f9-36019967036b/model.MODULE_ec079d43c779ced7e047+39f12043.hlo_module.pb', '--output', '/tmp/ubuntu/neuroncc_compile_workdir/ca2f2bf9-26a9-4c69-97f9-36019967036b/model.MODULE_ec079d43c779ced7e047+39f12043.neff', '--model-type=transformer', '--auto-cast=none', '--execute-repetition=1', '--verbose=35']: 2024-12-16T14:15:32Z [TEN404] (_dot.1004) Internal tensorizer error: SundaISel:Verifier::No store before first load inst 4353 for tensor t3956 (inst: float32<1 x 1> $4353 = tensorscalarptr_add(float32<1 x 1> $4352, float32<1 x 1> TongaSB partitions[0] float32 [1, 1] %3956[0,0]) # id=4353, , src_id=None, instances=1 # dl = tensor_op_name: _add.182 | hlo_id: 1778 | [[];[]] -> [[];[]] ) - Please open a support ticket at https://github.com/aws-neuron/aws-neuron-sdk/issues/new. You may also be able to obtain more information using the 'XLA_IR_DEBUG' and 'XLA_HLO_DEBUG' environment variables.
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/11cebcde130c1f5b8a5d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/481519fdaa82a5e9ca3c.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/5276a012f2eefa9af6da.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/gpt2/aa40852fa5208b294329.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/4b98629fe202b2140a00.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af13e5d873398d00de9e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/d0a6fa1b6956a4d680fa.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Llama-3.1-8B-Instruct/d4873490f07d6364f226.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct", "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/4811debc00fce09ac124.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/70df192a162dc76a8ba2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/b6900574c407686bc850.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/b708ec372a7fc766d289.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 128, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/d8df4e1c37e866f90233.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/llama/meta-llama/Meta-Llama-3.1-8B/e852034e61abed3bc0ba.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/0dd9463a56ca665e0009.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/21ab9e45cd41b65fb4e0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ec3b5393c3977096c001.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/63d71adb49398ef365fe.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/84e0c54d67bb0c2ff590.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dfa5935d58cd292c0422.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.303.0+d9f03cda", "num_cores": 8, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--target=trn2", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.done
ADDED
File without changes
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fae6bba5e76799bce81338669eab39b6f67989cebf3e30d0ed3bff2a98afc556
|
3 |
+
size 331842
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_01c540090ff8bced389b+7ac4dbae/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ae241934369b4c671b6a1e3b5286933f850c24642730687b8c06a0bf6bda1ad1
|
3 |
+
size 3206144
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/model.done
ADDED
File without changes
|
neuronxcc-2.16.303.0+d9f03cda/MODULE_0912ea9cf6fe9e1af454+613edded/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec9373f814ab03ecf1f250e2f08fa06127361ee356b1d62b3b34d4fc688b5261
|
3 |
+
size 52661
|