|
{ |
|
"_name_or_path": "facebook/dinov2-large", |
|
"apply_layernorm": true, |
|
"architectures": [ |
|
"Dinov2ForImageClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.0, |
|
"drop_path_rate": 0.0, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.0, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "Acropore_branched", |
|
"1": "Acropore_digitised", |
|
"2": "Acropore_tabular", |
|
"3": "Algae", |
|
"4": "Dead_coral", |
|
"5": "Millepore", |
|
"6": "No_acropore_encrusting", |
|
"7": "No_acropore_massive", |
|
"8": "No_acropore_sub_massive", |
|
"9": "Rock", |
|
"10": "Rubble", |
|
"11": "Sand" |
|
}, |
|
"image_size": 140, |
|
"initializer_range": 0.02, |
|
"label2id": { |
|
"Acropore_branched": 0, |
|
"Acropore_digitised": 1, |
|
"Acropore_tabular": 2, |
|
"Algae": 3, |
|
"Dead_coral": 4, |
|
"Millepore": 5, |
|
"No_acropore_encrusting": 6, |
|
"No_acropore_massive": 7, |
|
"No_acropore_sub_massive": 8, |
|
"Rock": 9, |
|
"Rubble": 10, |
|
"Sand": 11 |
|
}, |
|
"layer_norm_eps": 1e-06, |
|
"layerscale_value": 1.0, |
|
"mlp_ratio": 4, |
|
"model_type": "dinov2", |
|
"num_attention_heads": 16, |
|
"num_channels": 3, |
|
"num_hidden_layers": 24, |
|
"out_features": [ |
|
"stage24" |
|
], |
|
"out_indices": [ |
|
24 |
|
], |
|
"patch_size": 14, |
|
"problem_type": "multi_label_classification", |
|
"qkv_bias": true, |
|
"reshape_hidden_states": true, |
|
"stage_names": [ |
|
"stem", |
|
"stage1", |
|
"stage2", |
|
"stage3", |
|
"stage4", |
|
"stage5", |
|
"stage6", |
|
"stage7", |
|
"stage8", |
|
"stage9", |
|
"stage10", |
|
"stage11", |
|
"stage12", |
|
"stage13", |
|
"stage14", |
|
"stage15", |
|
"stage16", |
|
"stage17", |
|
"stage18", |
|
"stage19", |
|
"stage20", |
|
"stage21", |
|
"stage22", |
|
"stage23", |
|
"stage24" |
|
], |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.41.0", |
|
"use_swiglu_ffn": false, |
|
"initial_learning_rate": 0.001, |
|
"train_batch_size": 64, |
|
"eval_batch_size": 64, |
|
"optimizer": { |
|
"type": "Adam" |
|
}, |
|
"lr_scheduler_type": { |
|
"type": "ReduceLROnPlateau" |
|
}, |
|
"patience_lr_scheduler": 5, |
|
"factor_lr_scheduler": 0.1, |
|
"weight_decay": 0.0001, |
|
"early_stopping_patience": 10, |
|
"freeze_encoder": true, |
|
"data_augmentation": true, |
|
"num_epochs": 150 |
|
} |