plektos's picture
Upload model
69d7a77 verified
raw
history blame
383 Bytes
{
"architectures": [
"MultiTaskModel"
],
"base_model_name": "HuggingFaceTB/SmolLM2-135M-Instruct",
"debug": false,
"hidden_size": 576,
"model_type": "multi-task-model",
"num_choices": 5,
"num_classes": 49152,
"sequence_lengths": {
"formula": 128,
"prompt": 256,
"rationale": 384
},
"torch_dtype": "float32",
"transformers_version": "4.35.2"
}