File size: 443 Bytes
6146e7d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
models:
- model: clibrain/Llama-2-13b-ft-instruct-es
- model: LeoLM/leo-hessianai-13b
- model: daekeun-ml/Llama-2-ko-DPO-13B
- model: pleisto/yuren-13b-chatml
- model: bofenghuang/vigogne-2-13b-instruct
- model: OpenBuddy/openbuddy-llama2-13b-v8.1-fp16
merge_method: dare_ties
base_model: TheBloke/Llama-2-13B-fp16
dtype: float16
parameters:
density: 0.3
weight: 1.0
normalize: true
int8_mask: true
tokenizer_source: base
|