optimum-internal-testing-user
commited on
Synchronizing local compiler cache.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +114 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/26575c75a97054312245.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/48cfba5e4ff5369b6e51.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/638108a35a53ccc460bd.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/77bf56a610a467c3b01c.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/97351cfb2a7ab3fe894d.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/d8d190c62c0b163c34d7.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/e8097cf453d786f97d26.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/mistral/optimum/mistral-1.1b-testing/3f196a5a5beadf2af838.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/llama/meta-llama/Meta-Llama-3-8B/c2f28b3cdba011f81417.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/mistral/mistralai/Mistral-7B-Instruct-v0.3/1ccf5513f809a1c9b8e8.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/mistral/mistralai/Mistral-7B-Instruct-v0.3/d9d23b589ca43e531fa2.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4efe4ec09e4490308cc0.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/9d5f94b71f6ac3455612.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b9db5e6da88e9f07ee3e.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f9a5439ee67f962284df.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/593328a77bb235f2a8c4.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/797982a596fe5330c2fb.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2713954a7f357d88b849.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/7e574905d10c185da6d3.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/8e370463308e6d06a809.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/b99d0747aa4e9af275fb.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/d8ff5659ffc4c4f4120b.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e62dd30300cbb6a57da5.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/864bc63eecc707804124.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/cc7cda3941881c035cb2.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/e31ae96d70b5049c0036.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/75ff34e7d936ca49b6c2.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/9b7d5605b2dff8357fec.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/c16f42c4d7e1ac059eaf.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/NousResearch/Meta-Llama-3-8B-Instruct/e874499a8a7dcf0e3617.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/1ff321163e68478a1ce2.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/311a29bf60fb4f6082c6.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/60954a11dd58b586c979.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/fb4b47ea0eea68e736b2.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-Llama/Llama-2-7b-chat-hf/413432928afdb7aad6db.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-Llama/Meta-Llama-3-8B/2b4c60684eea0835c2f6.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/c2f28b3cdba011f81417.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/4ada30f8bc0a03d8ee0f.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/c7ea17c8aba530d53b0b.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/52fbcfd7fedd024371f9.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/61da37a594d2621c84f3.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/8ab25062c2316baf852b.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/8adb053370d3399fa36d.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/optimum/mistral-1.1b-testing/3f196a5a5beadf2af838.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mixtral/dacorvo/Mixtral-tiny/43944cbea36886b7c858.json +1 -0
.gitattributes
CHANGED
@@ -33,3 +33,117 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_007503d4c04a14f9a185+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
37 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_007ddbaadfdea74c4db5+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
38 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_029109cc3729b56e3989+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
39 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_04d8bfb8c1cb9a11144b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
40 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_06fde6a8f78a452b6d07+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
41 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_09f121a0ce582da08d38+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
42 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_0a1c2a25c4af8cd643f0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
43 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_0e121f3f89913112e427+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
44 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_14b9e7a133b5771c2a99+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
45 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_178af04b5b08c3578620+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
46 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_18083b8b4fbd2e658735+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
47 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_1c4be4f9d056547195c3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
48 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_1e240c1c706ae08c64fe+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
49 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_1f5a72270467461fd8ed+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
50 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_250ee69b911da3bdc8d2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
51 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2745e2a360eb982e261d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
52 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_290493cd80b7ed55b503+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
53 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2920d83bb25b02e4cc51+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
54 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_29cca2b907e9f3fdb2f8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
55 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2bbf8ec5e16249e923c7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
56 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2c4d5c0dcf0dbb708b70+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
57 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2d6b5f55a1f4f693790c+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
58 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_301a41e1f343ba3bec71+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
59 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_31fac282cca1d97df50f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
60 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_35a5e2c0699bcf01dfe7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
61 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_3601833f49e690f13c63+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
62 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_361449db4a7b8ecbe0be+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
63 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_373d2bd4a3e7bc287ea8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
64 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_38611d3db210d2b5925b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
65 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_3aa14f7f2c5ba85f8a71+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
66 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_3ad368cca4ed8f043ee1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
67 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_3c8fc1da83313ac638a0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
68 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_40c61269bb576a56cab1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
69 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_40e36c782678b0e16897+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
70 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_4143702e707b541adb55+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
71 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_4171d2d7e0b61f4d1b13+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
72 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_42df9e1a521f5732af09+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
73 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_432252e94a4dc13ef3b6+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
74 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_4444187f1406cf78d349+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
75 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_44846e9c6f250ca8cfb3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
76 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_44d97f852aed156dc887+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
77 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_47947d22492bd9fb429d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
78 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_47991e7bed6610af26d7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
79 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_48ee15b0a738f243158d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
80 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_49abf957a79769689511+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
81 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_4c0119427a64a6390346+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
82 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_4cc7cdbf96054f333830+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
83 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_50a1588f6cf69a2eec93+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
84 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_5207f7054fee18b4a110+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
85 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_568a54fb45a63f6158dc+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
86 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_588d8cf9d8af8c4272bf+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
87 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_6266ec9aa24a214ad7d6+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
88 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_67a4d1fe61d13931853b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
89 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_67e8e7cfb953c7108269+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
90 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_6b094a2ea2d3f54879b4+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
91 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_6b1e240f57e09b1d4d62+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
92 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_72a15ce66c87f97aed47+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
93 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_72a51ff433ca11ac031b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
94 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_73085727dd8ddb3ddb8a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
95 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_78af80b2cb4468b1d2c2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
96 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_7b5a0cd7677790397b55+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
97 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_80b37e060185910ca232+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
98 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_80cfbe7a1668a2c0aae2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
99 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_82e897b5734412186a71+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
100 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_830f95f98361da5d8382+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
101 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_85487114e7f6969a71f7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
102 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_86a6ad64bbcf0e8eda18+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
103 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_88d23e4ee569c6ff0ea4+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
104 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_8d30f35f445bd39100c0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
105 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_8f7e5719c01964db300f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
106 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_93182cd0704193a7e77e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
107 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_94ba1628e6c982bab9f5+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
108 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_984c1478d915ad14199e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
109 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_984ee9c6f0d654f0c0c3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
110 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_99dde6c040cad2ae1788+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
111 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_99f4d3c2bed47f4f1a46+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
112 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_9ab5721edb372331817f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
113 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_9c2ba8d97fa166a6a084+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
114 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_9d5ee9d103b12a28f89d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
115 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_9de38ed5d4f3f67e66ab+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
116 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_9f530c9922606ff59465+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
117 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_a06c2a9fc81c978515e9+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
118 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_aa83097151469405fa53+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
119 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_ab57a27fbc39f3dfb229+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
120 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_ad7cde6fb89e436f4944+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
121 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_adf5b762ba5f48a9160e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
122 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_af1bf6364bd2ed9743fa+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
123 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b1733b83c7a15804aadf+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
124 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b182a5ccab0c760915ab+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
125 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b34e819f9371decdc13f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
126 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b3ca64867673612ad42c+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
127 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b5831444f8ddd1f99157+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
128 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b872e414ac799a1ec2b6+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
129 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b99ac5e19f9eff20761d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
130 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_ba8f813c7efe61e647b3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
131 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_bca01d99adbcb3d4c9c2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
132 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_bda66213bdc97718c04e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
133 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_bf49580423129e50f96d+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
134 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_c129045bda9bf853b93e+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
135 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_c4028bced80092acdc41+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
136 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_c5fae4eaeff8c4b088ce+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
137 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_ca64b706bacd8b456f58+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
138 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_ccdbae5eeb030e72d5cf+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
139 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_d1ba6c68e170415611b1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
140 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_d508d68ca9af9d78e9ce+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
141 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e1fd2cfc93d9d8d1cfc8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
142 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e322a40c75dbf0c1aa78+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
143 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e622f61d060e8ff0a0fa+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
144 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e6330f79a1b82c78a9aa+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
145 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e68c66014ae30ec51f71+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
146 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e805a82c8d8a8e7ebfe1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
147 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e8c6961fde56ab09a360+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
148 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_f217aeeb232d7d4568c8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
149 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_f4f63b2510413deb054c+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/26575c75a97054312245.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 48, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 12, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/48cfba5e4ff5369b6e51.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/638108a35a53ccc460bd.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/77bf56a610a467c3b01c.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/97351cfb2a7ab3fe894d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/d8d190c62c0b163c34d7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 48, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/e8097cf453d786f97d26.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "1460c22666392e470910ce3d44ffeb2ab7dbd4df", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/mistral/optimum/mistral-1.1b-testing/3f196a5a5beadf2af838.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/llama/meta-llama/Meta-Llama-3-8B/c2f28b3cdba011f81417.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/mistral/mistralai/Mistral-7B-Instruct-v0.3/1ccf5513f809a1c9b8e8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.3", "checkpoint_revision": "83e9aa141f2e28c82232fea5325f54edf17c43de", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32768}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22/inference/mistral/mistralai/Mistral-7B-Instruct-v0.3/d9d23b589ca43e531fa2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.3", "checkpoint_revision": "83e9aa141f2e28c82232fea5325f54edf17c43de", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32768}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4efe4ec09e4490308cc0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/9d5f94b71f6ac3455612.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b9db5e6da88e9f07ee3e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f9a5439ee67f962284df.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/593328a77bb235f2a8c4.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/797982a596fe5330c2fb.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 128, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2713954a7f357d88b849.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/7e574905d10c185da6d3.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/8e370463308e6d06a809.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 16, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b", "compiler_type": "neuronx-cc", "compiler_version": "2.12.54.0+f631c2365", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/b99d0747aa4e9af275fb.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/d8ff5659ffc4c4f4120b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 24, "sequence_length": 128, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e62dd30300cbb6a57da5.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/864bc63eecc707804124.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/cc7cda3941881c035cb2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/gpt2/openai-community/gpt2/e31ae96d70b5049c0036.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/75ff34e7d936ca49b6c2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/9b7d5605b2dff8357fec.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/HuggingFaceTB/cosmo-1b/c16f42c4d7e1ac059eaf.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/NousResearch/Meta-Llama-3-8B-Instruct/e874499a8a7dcf0e3617.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "NousResearch/Meta-Llama-3-8B-Instruct", "checkpoint_revision": "3cf58932fb9b7257157a1a7e4b4cf0a469b069ba", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/1ff321163e68478a1ce2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/311a29bf60fb4f6082c6.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/60954a11dd58b586c979.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/dacorvo/tiny-random-llama/fb4b47ea0eea68e736b2.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-Llama/Llama-2-7b-chat-hf/413432928afdb7aad6db.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-Llama/Llama-2-7b-chat-hf", "checkpoint_revision": "f5db02db724555f92da89c216ac04704f23d4590", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-Llama/Meta-Llama-3-8B/2b4c60684eea0835c2f6.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-Llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/c2f28b3cdba011f81417.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/4ada30f8bc0a03d8ee0f.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/c7ea17c8aba530d53b0b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/52fbcfd7fedd024371f9.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/61da37a594d2621c84f3.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/8ab25062c2316baf852b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/8adb053370d3399fa36d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mistral/optimum/mistral-1.1b-testing/3f196a5a5beadf2af838.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.23.dev0/inference/mixtral/dacorvo/Mixtral-tiny/43944cbea36886b7c858.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|