open-llm-bot
commited on
Add axolotl-ai-co/romulus-mistral-nemo-12b-simpo to eval queue
Browse files
axolotl-ai-co/romulus-mistral-nemo-12b-simpo_eval_request_False_bfloat16_Original.json
CHANGED
@@ -1,16 +1 @@
|
|
1 |
-
{
|
2 |
-
"model": "axolotl-ai-co/romulus-mistral-nemo-12b-simpo",
|
3 |
-
"base_model": "",
|
4 |
-
"revision": "15fd3ffa46c1ea51aa5d26a1da24214e324d7cf2",
|
5 |
-
"precision": "bfloat16",
|
6 |
-
"params": 12.248,
|
7 |
-
"architectures": "MistralForCausalLM",
|
8 |
-
"weight_type": "Original",
|
9 |
-
"status": "FINISHED",
|
10 |
-
"submitted_time": "2024-07-25T02:59:39Z",
|
11 |
-
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
-
"job_id": "7813135",
|
13 |
-
"job_start_time": "2024-07-26T14:05:55.733010",
|
14 |
-
"use_chat_template": true,
|
15 |
-
"sender": "user3542384468"
|
16 |
-
}
|
|
|
1 |
+
{"model": "axolotl-ai-co/romulus-mistral-nemo-12b-simpo", "base_model": "", "revision": "19e2c43d9eaa3b98dc074eff60cbfc78e588beb4", "precision": "bfloat16", "params": 12.248, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-09-21T04:00:40Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true, "sender": "CombinHorizon"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|