Synchronizing local compiler cache.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +30 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/043b440bccea34d291fb.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/155654dd5a1bb456560b.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/ece51ad6b8284fbc396b.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f0c720d3b9ddf052a333.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/gpt2/3af48830f5f011356907.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5201027a68de1e69bc8e.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/60efebcadae8a26b86b7.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/84e5771468d2f23ad8e5.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/aa607c4b73bfa5f2e0c4.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/ecb672d572323f8b285d.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/5a4b2f45c18a4bc9c83a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/8302a801fe562845883a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/89e270031bd45d93e39a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/99d4c970c17dcefa3f28.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8fa4a7481ec62ba032dc.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/f1b0daa4b0bcee7fc566.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/29b98126099fa1f8bda7.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/697789aa059b99a37b94.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/c7219bf61b6c82837681.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ecdab458fc6dde78ac77.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/f70944e5962fa4f9ec08.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/37ebededdb70b6343673.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/40801c00b86f8a57bd61.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/62c3001284dec02a2f5a.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dc2ecabf6779efae1cbc.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/062012f53ad75e74ec59.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/44f9e3972c172ca7db97.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b66a6c8e252945ece8d0.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/d334f8877ad5f8e74e78.json +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.neff +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/model.neff +0 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/model.hlo_module.pb +3 -0
.gitattributes
CHANGED
@@ -151,3 +151,33 @@ neuronxcc-2.13.66.0+6dfecc895/MODULE_00fe98d615fb072392ce+2c2d707e/model.neff fi
|
|
151 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_05ffcb959c9e232f6071+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
152 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_161d550e91fe728b06bd+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
153 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_af97e15eb5b056af300b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_05ffcb959c9e232f6071+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
152 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_161d550e91fe728b06bd+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
153 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_af97e15eb5b056af300b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
154 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
155 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
156 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_2a4f91813d1835a0f6a7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
157 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_356e1ac33244f70871ed+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
158 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_5471fadab3e3ee0bc1bb+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
159 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_5729cbb8a8a9e0ae1c68+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
160 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_66a59c8c9a1bb7923ad5+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
161 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_712f16088b92ce1552a7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
162 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_73d160cbd5af37c439b3+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
163 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_88bb293cbec6b542e9b1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
164 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_8ae58871e7bd6db4281f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
165 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_92c1b5ae8a448b1de108+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
166 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_9b6bd87db65507f39a41+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
167 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_a1b103ca76fc3468bea0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
168 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_b4616482089c6d7404d1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
169 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_baa48b588d2301d09876+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
170 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_bf90eb72da914cee62e1+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
171 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_c5afb930fa944fd003f8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
172 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_cebc0d79dffdb4b073d7+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
173 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d11a14fe0d09008357f2+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
174 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d3b0c2d106d7295dbdb9+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
175 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_d99c3f549b97f8547b37+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
176 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_dbfed0b02649def5bfd0+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
177 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ddff37e03914d1e4cc1a+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
178 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_e38e264182409d3ea8bb+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
179 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_eae40350bbf4ea1569ad+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
180 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ec338419f643491eb02f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
181 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_ed23e4b120bf2c0cc579+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
182 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_eed7705dab350e38b2f8+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
183 |
+
neuronxcc-2.14.182.0+a56cbff7/MODULE_f183a302833064c6610c+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/043b440bccea34d291fb.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/155654dd5a1bb456560b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/ece51ad6b8284fbc396b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/f0c720d3b9ddf052a333.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/gpt2/3af48830f5f011356907.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5201027a68de1e69bc8e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/60efebcadae8a26b86b7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/84e5771468d2f23ad8e5.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/aa607c4b73bfa5f2e0c4.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/ecb672d572323f8b285d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/5a4b2f45c18a4bc9c83a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/8302a801fe562845883a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/89e270031bd45d93e39a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/dacorvo/tiny-random-llama/99d4c970c17dcefa3f28.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/8fa4a7481ec62ba032dc.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/meta-llama/Meta-Llama-3-8B/f1b0daa4b0bcee7fc566.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "62bd457b6fe961a42a631306577e622c83876cb6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/29b98126099fa1f8bda7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 4, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/697789aa059b99a37b94.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/c7219bf61b6c82837681.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ecdab458fc6dde78ac77.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/f70944e5962fa4f9ec08.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/37ebededdb70b6343673.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/40801c00b86f8a57bd61.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/62c3001284dec02a2f5a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/mixtral/dacorvo/Mixtral-tiny/dc2ecabf6779efae1cbc.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/062012f53ad75e74ec59.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/44f9e3972c172ca7db97.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b66a6c8e252945ece8d0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/0_REGISTRY/0.0.24.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/d334f8877ad5f8e74e78.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.14.182.0+a56cbff7", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7e2961a1715dcb12e3efaf810c456d039e576a02034d4339eb643f419eab7f58
|
3 |
+
size 13090
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_08ad0cd4f5c5a5415a09+2c2d707e/model.neff
ADDED
Binary file (134 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80ea3542d6650f894d256d75865e01942f51635a93b7f52c866319e38c54ae61
|
3 |
+
size 50129
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_0e5fb538f93c1f3fd8d8+2c2d707e/model.neff
ADDED
Binary file (226 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8aa31805a207894289c809131f705d15f03e364e21bcfc6ec4af79ea3291129f
|
3 |
+
size 56659
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_15d42db652fa62735c68+2c2d707e/model.neff
ADDED
Binary file (155 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:75ddd0fa36baa1ffccb172c1a5e0c2af3a9aec90cf1a79ecbef7a9e747040e4a
|
3 |
+
size 333872
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1d5c76706b604a934e43+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4afd66e272f2729ac84a5f63cc8121e639c422aa0e991428a39e10dbcbec087e
|
3 |
+
size 2386944
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0beb3bc81714d1f70e77131f347a87578e245d93457b5aa875e18761c434e459
|
3 |
+
size 1900
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1ddfe7bb03e6b5141bca+2c2d707e/model.neff
ADDED
Binary file (42 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c016cb9348980e5ceadfeb94168eab02c144bb5693c61751c2d3367d09ef5b5f
|
3 |
+
size 57433
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_1f3d02b2530b83c4abf7+2c2d707e/model.neff
ADDED
Binary file (155 kB). View file
|
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.14.182.0+a56cbff7/MODULE_207c4a429ddcb02a831a+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e23a38ae6830eabad688b5fd949542ebdcdcb1b050d748d5362a1716867e7847
|
3 |
+
size 422786
|