llava-gemma-7b / generation_config.json
matthewlyleolson's picture
Upload folder using huggingface_hub
a8de4d4 verified
raw
history blame contribute delete
455 Bytes
{
"_from_model_config": true,
"attn_softmax_bf16": null,
"bos_token_id": 2,
"bucket_internal": null,
"bucket_size": -1,
"eos_token_id": 1,
"flash_attention_recompute": null,
"ignore_eos": null,
"kv_cache_fp8": null,
"limit_hpu_graphs": null,
"pad_token_id": 0,
"reduce_recompile": null,
"reuse_cache": null,
"static_shapes": null,
"transformers_version": "4.39.0.dev0",
"trim_logits": null,
"use_flash_attention": null
}