File size: 472 Bytes
0b892b6 |
1 |
{"model": "3rd-Degree-Burn/L-3.1-Science-Writer-8B", "base_model": "", "revision": "d9bb11fb02f8eca3aec408912278e513377115da", "precision": "float16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-11-19T05:11:13Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": false, "sender": "3rd-Degree-Burn"} |