Removing all cached artifacts priori to 0.3.0
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/gpt2/780a42b2f8414c10d661.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/08c4ca22e4b7154e6b9f.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/babd32e9533067e7a2b4.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/f173c509660868474329.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/granite/ibm-granite/granite-3.1-2b-instruct/0211503669c9e68fbbde.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/granite/ibm-granite/granite-3.1-2b-instruct/7cf1a953f534c2649a7d.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/7bae97d51948b959db6b.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/2596378c026ba9520cff.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/299ac0af4dd23524b311.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/33f9ba3f192c33894d11.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/3819d6acabe8a087e721.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/5be4c42d5d5ccd3ee909.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/6f53d3dc8123c011f979.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/8aa7f4dbe49a823b65aa.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/8eac270969750c79ed43.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/93909955bee644cea760.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/a152541d678f561d8e5d.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/b6fe802ef136e664d9c4.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/b7374e7b99aa7227e591.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/f54906e314fbdb1f9711.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/f88c36c56550c35c2c21.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-2-13b-hf/409469dd48ea340e2031.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-2-7b-hf/678b8395e743df567a53.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-70B-Instruct/20b51ff77994bb4c1cc6.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-70B-Instruct/491a48ca8c62ac951be1.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-8B-Instruct/25395edbfe4cb40ed4e9.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/7a87208395f3db5ba7be.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/8cd73c74d69aa6440798.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/ebbf84c3779811da8d2c.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B/06646cc5b14dfa510412.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-3B/60cfa39a4231a8e3e393.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3-8B/c779aa50e0bee568e4f9.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B-Instruct/3b6b0065e24ca9d2eeb0.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/1e202b999f7f54120fb6.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/663199108a75cad3f73b.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/6753026c17aa56168b84.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/7f1cfbb1aa86ae8f2d64.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/c485b9da6e019edad2e2.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/e51e4db5bc6b4214ad42.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/fd4509240ba482254c74.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/5dc9f8288bce68812153.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/8f3a6f200be91cc32a57.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/1114d34726992c649698.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/2de9fcd20f86d046ac39.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/5560ab3d8247072045ba.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/9f6aa27cba2cbc22e78c.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/b4ba7de14335ba8730a5.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/bce0bf86a51ac631df0a.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/Intel/neural-chat-7b-v3-3/3a43907cb16dccf9b601.json +0 -1
- neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/Intel/neural-chat-7b-v3-3/b8070b9bb31f874b18b3.json +0 -1
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/gpt2/780a42b2f8414c10d661.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/08c4ca22e4b7154e6b9f.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 16, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/babd32e9533067e7a2b4.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/gpt2/openai-community/gpt2/f173c509660868474329.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "openai-community/gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/granite/ibm-granite/granite-3.1-2b-instruct/0211503669c9e68fbbde.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["GraniteForCausalLM"], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.015625, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "logits_scaling": 8.0, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "granite", "neuron": {"auto_cast_type": "bf16", "batch_size": 2, "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct", "checkpoint_revision": "374ef54e020a3ce208c65e96d6213922a87d8952", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 5000000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 49155}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/granite/ibm-granite/granite-3.1-2b-instruct/7cf1a953f534c2649a7d.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["GraniteForCausalLM"], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.015625, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "logits_scaling": 8.0, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "granite", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct", "checkpoint_revision": "07eae41278b4cd918dbe32da5d77c27e1379f751", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 5000000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 49155}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/7bae97d51948b959db6b.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/2596378c026ba9520cff.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/299ac0af4dd23524b311.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/33f9ba3f192c33894d11.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/3819d6acabe8a087e721.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/5be4c42d5d5ccd3ee909.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/6f53d3dc8123c011f979.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/8aa7f4dbe49a823b65aa.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/8eac270969750c79ed43.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/93909955bee644cea760.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/a152541d678f561d8e5d.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/b6fe802ef136e664d9c4.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/b7374e7b99aa7227e591.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/f54906e314fbdb1f9711.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "74fbf131a939963dd1e244389bb61ad0d0440a4d", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/deepseek-ai/DeepSeek-R1-Distill-Llama-8B/f88c36c56550c35c2c21.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "checkpoint_revision": "6a6f4aa4197940add57724a7707d069478df56b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-2-13b-hf/409469dd48ea340e2031.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-13b-hf", "checkpoint_revision": "5c31dfb671ce7cfe2d7bb7c04375e44c55e815b1", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 40, "num_key_value_heads": 40, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-2-7b-hf/678b8395e743df567a53.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-7b-hf", "checkpoint_revision": "01c7f73d771dfac7d292323805ebc428287df4f9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-70B-Instruct/20b51ff77994bb4c1cc6.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 8192, "initializer_range": 0.02, "intermediate_size": 28672, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-3.1-70B-Instruct", "checkpoint_revision": "1605565b47bb9346c5515c34102e054115b4f98b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 64, "num_hidden_layers": 80, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-70B-Instruct/491a48ca8c62ac951be1.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 8192, "initializer_range": 0.02, "intermediate_size": 28672, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-3.1-70B-Instruct", "checkpoint_revision": "1605565b47bb9346c5515c34102e054115b4f98b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 24, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 64, "num_hidden_layers": 80, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.1-8B-Instruct/25395edbfe4cb40ed4e9.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-3.1-8B-Instruct", "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/7a87208395f3db5ba7be.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Llama-3.2-1B-Instruct", "checkpoint_revision": "9213176726f574b556790deb65791e0c5aa438b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 16, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 32.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/8cd73c74d69aa6440798.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 2, "checkpoint_id": "meta-llama/Llama-3.2-1B-Instruct", "checkpoint_revision": "9213176726f574b556790deb65791e0c5aa438b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 16, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 32.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B-Instruct/ebbf84c3779811da8d2c.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-3.2-1B-Instruct", "checkpoint_revision": "9213176726f574b556790deb65791e0c5aa438b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 16, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 32.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-1B/06646cc5b14dfa510412.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-3.2-1B", "checkpoint_revision": "4e20de362430cd3b72f300e6b0f18e50e7166e08", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 16, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 32.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Llama-3.2-3B/60cfa39a4231a8e3e393.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-3.2-3B", "checkpoint_revision": "13afe5124825b4f3751f836b40dafda64c1ed062", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 24, "num_hidden_layers": 28, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 32.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3-8B/c779aa50e0bee568e4f9.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3-8B", "checkpoint_revision": "8cde5ca8380496c9a6cc7ef3a8b46a0372a1d920", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B-Instruct/3b6b0065e24ca9d2eeb0.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", "checkpoint_revision": "0e9e39f249a16976918f6564b8830bc894c89659", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/1e202b999f7f54120fb6.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/663199108a75cad3f73b.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/6753026c17aa56168b84.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/7f1cfbb1aa86ae8f2d64.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/c485b9da6e019edad2e2.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/e51e4db5bc6b4214ad42.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/meta-llama/Meta-Llama-3.1-8B/fd4509240ba482254c74.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128001, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "meta-llama/Meta-Llama-3.1-8B", "checkpoint_revision": "d04e592bb4f6aa9cfee91e2e20afa771667e1d4b", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/5dc9f8288bce68812153.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/llama/princeton-nlp/Sheared-LLaMA-1.3B/8f3a6f200be91cc32a57.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5504, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "princeton-nlp/Sheared-LLaMA-1.3B", "checkpoint_revision": "a4b76938edbf571ea7d7d9904861cbdca08809b4", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/1114d34726992c649698.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/2de9fcd20f86d046ac39.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/5560ab3d8247072045ba.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/9f6aa27cba2cbc22e78c.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/b4ba7de14335ba8730a5.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/HuggingFaceH4/zephyr-7b-beta/bce0bf86a51ac631df0a.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "892b3d7a7b1cf10c7a701c60881cd93df615734c", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/Intel/neural-chat-7b-v3-3/3a43907cb16dccf9b601.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "Intel/neural-chat-7b-v3-3", "checkpoint_revision": "7506dfc5fb325a8a8e0c4f9a6a001671833e5b8e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev1/inference/mistral/Intel/neural-chat-7b-v3-3/b8070b9bb31f874b18b3.json
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "Intel/neural-chat-7b-v3-3", "checkpoint_revision": "7506dfc5fb325a8a8e0c4f9a6a001671833e5b8e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
|
|
|
|
|