Synchronizing local compiler cache.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +60 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/043b440bccea34d291fb.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/155654dd5a1bb456560b.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/ece51ad6b8284fbc396b.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f0c720d3b9ddf052a333.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/gpt2/3af48830f5f011356907.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5201027a68de1e69bc8e.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/60efebcadae8a26b86b7.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/84e5771468d2f23ad8e5.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/aa607c4b73bfa5f2e0c4.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/ecb672d572323f8b285d.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/HuggingFaceTB/cosmo-1b/33a8cb17f93fe3fc9dd0.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/5a4b2f45c18a4bc9c83a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/8302a801fe562845883a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/89e270031bd45d93e39a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/99d4c970c17dcefa3f28.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/6e6daa6abc2844413369.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8410ddf2de3dafde4131.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8fa4a7481ec62ba032dc.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/ab86a7aa89828ed0a3c4.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/f1b0daa4b0bcee7fc566.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/29b98126099fa1f8bda7.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/697789aa059b99a37b94.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/c7219bf61b6c82837681.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ecdab458fc6dde78ac77.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/f70944e5962fa4f9ec08.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/optimum/mistral-1.1b-testing/b00d6d0d7b1f7077de3f.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/37ebededdb70b6343673.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/40801c00b86f8a57bd61.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/62c3001284dec02a2f5a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dc2ecabf6779efae1cbc.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/062012f53ad75e74ec59.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/44f9e3972c172ca7db97.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b66a6c8e252945ece8d0.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/d334f8877ad5f8e74e78.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/model.neff +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/model.neff +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.neff +0 -0
.gitattributes
CHANGED
@@ -3193,3 +3193,63 @@ neuronxcc-2.13.66.0+6dfecc895/MODULE_af97e15eb5b056af300b+2c2d707e/model.neff fi
|
|
3193 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_b59f0df073c8fe387670+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3194 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_ce6191179705bea7c918+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3195 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_ed14118cab9ea5e3aa38+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3193 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_b59f0df073c8fe387670+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3194 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_ce6191179705bea7c918+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3195 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_ed14118cab9ea5e3aa38+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3196 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3197 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3198 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3199 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3200 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_285a036b91e916e8eea0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3201 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_2a4f91813d1835a0f6a7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3202 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_2f46e3e3a9cc37a9f386+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3203 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_356e1ac33244f70871ed+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3204 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_3672bd56e9ce89cfe2cc+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3205 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_39c8487f1889958182f7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3206 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_413da2df6912e4f61f74+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3207 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_47682bd9c9d5912d6688+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3208 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_4efa9050e49cf0fd52ea+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3209 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_4f822640d7fcf1269c2a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3210 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_5471fadab3e3ee0bc1bb+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3211 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_5605d83d11484be2e579+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3212 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_5729cbb8a8a9e0ae1c68+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3213 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_65d66b03fc7cad11b905+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3214 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_66a59c8c9a1bb7923ad5+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3215 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_6c94588ed1d389f2a56e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3216 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_712f16088b92ce1552a7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3217 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_73d160cbd5af37c439b3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3218 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_87a4f3f0d2c8a431a001+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3219 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_88bb293cbec6b542e9b1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3220 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_8ae58871e7bd6db4281f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3221 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_8d5648ed256bffb4f75e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3222 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_92c1b5ae8a448b1de108+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3223 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_98f4541c7fef2c0443a5+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3224 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_9b6bd87db65507f39a41+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3225 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_a1b103ca76fc3468bea0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3226 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_a41365d178e5e6c22cc2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3227 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_a7ffdfb2ef584c7ff858+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3228 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_b010847cb8c1194b6858+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3229 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_b4616482089c6d7404d1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3230 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_baa48b588d2301d09876+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3231 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_bb04e202902bf226b34b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3232 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_bc9b64a5de7b4f5eae3a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3233 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_bf90eb72da914cee62e1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3234 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_c1f986ff637dbeda5b25+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3235 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_c5afb930fa944fd003f8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3236 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_c64d295e29c30cd6bcb8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3237 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_cc06eead49bacd4f3b97+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3238 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_cebc0d79dffdb4b073d7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3239 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d11a14fe0d09008357f2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3240 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d37372bb65f76ac89b24+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3241 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d3b0c2d106d7295dbdb9+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3242 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d99c3f549b97f8547b37+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3243 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_dbfed0b02649def5bfd0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3244 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ddff37e03914d1e4cc1a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3245 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_e38e264182409d3ea8bb+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3246 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ea9dbc326da4c03455ea+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3247 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_eae40350bbf4ea1569ad+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3248 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ec338419f643491eb02f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3249 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ed23e4b120bf2c0cc579+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3250 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_eed7705dab350e38b2f8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3251 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_f183a302833064c6610c+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3252 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_f4206425e3ee5646ec34+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3253 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_f5819d12264aac7692e2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3254 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_f5f2ff1474db87dc8ad6+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
3255 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_fb1a63421e5ced8eff9e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/043b440bccea34d291fb.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/155654dd5a1bb456560b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/ece51ad6b8284fbc396b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f0c720d3b9ddf052a333.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/gpt2/3af48830f5f011356907.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5201027a68de1e69bc8e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/60efebcadae8a26b86b7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/84e5771468d2f23ad8e5.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/aa607c4b73bfa5f2e0c4.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/ecb672d572323f8b285d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/HuggingFaceTB/cosmo-1b/33a8cb17f93fe3fc9dd0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/5a4b2f45c18a4bc9c83a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/8302a801fe562845883a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/89e270031bd45d93e39a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/99d4c970c17dcefa3f28.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/6e6daa6abc2844413369.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8410ddf2de3dafde4131.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8fa4a7481ec62ba032dc.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/ab86a7aa89828ed0a3c4.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/f1b0daa4b0bcee7fc566.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/29b98126099fa1f8bda7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/697789aa059b99a37b94.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/c7219bf61b6c82837681.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ecdab458fc6dde78ac77.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/f70944e5962fa4f9ec08.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/optimum/mistral-1.1b-testing/b00d6d0d7b1f7077de3f.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/37ebededdb70b6343673.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/40801c00b86f8a57bd61.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/62c3001284dec02a2f5a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dc2ecabf6779efae1cbc.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/062012f53ad75e74ec59.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/44f9e3972c172ca7db97.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b66a6c8e252945ece8d0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/d334f8877ad5f8e74e78.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7e2961a1715dcb12e3efaf810c456d039e576a02034d4339eb643f419eab7f58
|
3 |
+
size 13090
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.neff
ADDED
Binary file (134 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80ea3542d6650f894d256d75865e01942f51635a93b7f52c866319e38c54ae61
|
3 |
+
size 50129
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.neff
ADDED
Binary file (226 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:069b6db549c78b0645211b900002a69c5a791edfeac2f850bb6b4544d10f6535
|
3 |
+
size 323888
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_146077388d26b5450bf4+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b4fdaecf59466dba8eb73245dcc70467d88d407cf9a1c9cde341d40b7fb55720
|
3 |
+
size 2653184
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e4e011639bfab6b2562101c1e1b3dd9b5e8d44d5a57dc03dd684c9ed11ea744a
|
3 |
+
size 511615
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15540a4ca7f66130a69f+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e200c3d7f12511d87a386fb94b5ed87c597545fb6428db04e5ec1f3944af001
|
3 |
+
size 32195584
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8aa31805a207894289c809131f705d15f03e364e21bcfc6ec4af79ea3291129f
|
3 |
+
size 56659
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.neff
ADDED
Binary file (155 kB). View file
|
|