File size: 1,323 Bytes
5a3f58e 1035d29 5a3f58e 1035d29 5a3f58e 1035d29 5a3f58e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
{
"_name_or_path": "jonglet/mbvit_small",
"architectures": [
"MobileViTForImageClassification"
],
"aspp_dropout_prob": 0.1,
"aspp_out_channels": 256,
"atrous_rates": [
6,
12,
18
],
"attention_probs_dropout_prob": 0.0,
"classifier_dropout_prob": 0.1,
"conv_kernel_size": 3,
"expand_ratio": 4.0,
"hidden_act": "silu",
"hidden_dropout_prob": 0.1,
"hidden_sizes": [
144,
192,
240
],
"id2label": {
"0": "1",
"1": "B",
"10": "p",
"11": "q",
"12": "r",
"2": "K",
"3": "N",
"4": "P",
"5": "Q",
"6": "R",
"7": "b",
"8": "k",
"9": "n"
},
"image_size": 256,
"initializer_range": 0.02,
"label2id": {
"1": "0",
"B": "1",
"K": "2",
"N": "3",
"P": "4",
"Q": "5",
"R": "6",
"b": "7",
"k": "8",
"n": "9",
"p": "10",
"q": "11",
"r": "12"
},
"layer_norm_eps": 1e-05,
"mlp_ratio": 2.0,
"model_type": "mobilevit",
"neck_hidden_sizes": [
16,
32,
64,
96,
128,
160,
640
],
"num_attention_heads": 4,
"num_channels": 3,
"output_stride": 32,
"patch_size": 2,
"problem_type": "single_label_classification",
"qkv_bias": true,
"semantic_loss_ignore_index": 255,
"torch_dtype": "float32",
"transformers_version": "4.29.2"
}
|