File size: 928 Bytes
95d207f bd80303 95d207f bd80303 95d207f bd80303 95d207f bd80303 95d207f bd80303 95d207f bd80303 95d207f bd80303 95d207f bd80303 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
{
"name": "Biggie SmoLlm Q8_0",
"model_path": "biggie_groked_int8_q8_0.gguf",
"load_params": {
"n_ctx": 2048,
"n_batch": 512,
"n_gpu_layers": 0,
"use_mlock": true,
"rope_freq_base": 10000,
"rope_freq_scale": 1.0,
"f16_kv": false,
"cache_type_k": "q8_0", // 🔑 Equivalent to -ctk q8_0
"cache_type_v": "q8_0"
},
"inference_params": {
"n_threads": 1,
"n_predict": 1024,
"top_k": 40,
"top_p": 0.85,
"temperature": 1.5,
"repeat_penalty": 1.1,
"min_p": 0.3,
"mirostat": 0,
"mirostat_tau": 5,
"mirostat_eta": 0.1,
"tfs_z": 1,
"typical_p": 1,
"presence_penalty": 0,
"frequency_penalty": 0,
"pre_prompt": "You are a NASA JPL Scientist.",
"pre_prompt_suffix": "\n",
"pre_prompt_prefix": "",
"input_prefix": "<|im_start|>Human: ",
"input_suffix": "\n",
"antiprompt": ["Human:"],
"stop_sequences": []
}
} |