mgoin's picture
Upload folder using huggingface_hub
0b1f6cd verified
raw
history blame contribute delete
144 Bytes
DEFAULT_stage:
DEFAULT_modifiers:
QuantizationModifier:
ignore: [lm_head, 're:.*mlp.gate$']
targets: Linear
scheme: FP8