File size: 5,436 Bytes
a820c8c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 |
{
"module": "keras_hub.src.models.efficientnet.efficientnet_image_classifier",
"class_name": "EfficientNetImageClassifier",
"config": {
"backbone": {
"module": "keras_hub.src.models.efficientnet.efficientnet_backbone",
"class_name": "EfficientNetBackbone",
"config": {
"name": "efficient_net_backbone",
"trainable": true,
"width_coefficient": 1.0,
"depth_coefficient": 1.0,
"dropout": 0,
"depth_divisor": 8,
"min_depth": null,
"activation": "swish",
"input_shape": [
null,
null,
3
],
"stackwise_kernel_sizes": [
3,
3,
5,
3,
5,
5,
3
],
"stackwise_num_repeats": [
1,
2,
2,
3,
3,
4,
1
],
"stackwise_input_filters": [
32,
16,
24,
40,
80,
112,
192
],
"stackwise_output_filters": [
16,
24,
40,
80,
112,
192,
320
],
"stackwise_expansion_ratios": [
1,
6,
6,
6,
6,
6,
6
],
"stackwise_squeeze_and_excite_ratios": [
0.25,
0.25,
0.25,
0.25,
0.25,
0.25,
0.25
],
"stackwise_strides": [
1,
2,
2,
2,
1,
2,
1
],
"stackwise_block_types": [
"v1",
"v1",
"v1",
"v1",
"v1",
"v1",
"v1"
],
"include_stem_padding": true,
"use_depth_divisor_as_min_depth": true,
"cap_round_filter_decrease": true,
"stem_conv_padding": "valid",
"batch_norm_momentum": 0.9,
"batch_norm_epsilon": 1e-05
},
"registered_name": "keras_hub>EfficientNetBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.efficientnet.efficientnet_image_classifier_preprocessor",
"class_name": "EfficientNetImageClassifierPreprocessor",
"config": {
"name": "efficient_net_image_classifier_preprocessor",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_converter": {
"module": "keras_hub.src.models.efficientnet.efficientnet_image_converter",
"class_name": "EfficientNetImageConverter",
"config": {
"name": "efficient_net_image_converter",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_size": [
224,
224
],
"scale": [
0.017124753831663668,
0.01750700280112045,
0.017429193899782133
],
"offset": [
-2.1179039301310043,
-2.0357142857142856,
-1.8044444444444445
],
"interpolation": "bicubic",
"crop_to_aspect_ratio": true
},
"registered_name": "keras_hub>EfficientNetImageConverter"
},
"config_file": "preprocessor.json"
},
"registered_name": "keras_hub>EfficientNetImageClassifierPreprocessor"
},
"name": "efficient_net_image_classifier",
"num_classes": 1000,
"pooling": "avg",
"activation": null,
"dropout": 0.0
},
"registered_name": "keras_hub>EfficientNetImageClassifier"
} |