MeganEFlynn's picture
Upload folder using huggingface_hub
c11c4cd verified
raw
history blame contribute delete
618 Bytes
{
"conversion_tool": "create_final_eagle3_config.py",
"source_checkpoint": "nvidia/Llama-4-Maverick-17B-128E-Eagle3",
"format": "speculators-eagle3",
"architecture": "Llama3-based Eagle3 head",
"verifier": "Llama4 Maverick",
"notes": [
"Eagle3 head based on Llama3 architecture (rope_type: llama3)",
"Targets Llama4 Maverick verifier (Llama4ForConditionalGeneration)",
"Large vocabulary of 202,048 tokens",
"Uses auxiliary hidden states from layers 1, 23, 44",
"NVIDIA-specific fields preserved as extra configuration",
"May require Eagle3Speculator implementation extensions"
]
}