emaeon commited on
Commit
e4517f3
1 Parent(s): 139f638

Training in progress, epoch 0

Browse files
Files changed (4) hide show
  1. config.json +166 -0
  2. preprocessor_config.json +22 -0
  3. pytorch_model.bin +3 -0
  4. training_args.bin +3 -0
config.json ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "10_SD22-0010",
13
+ "1": "11_SD22-0011",
14
+ "2": "12_SD22-0012",
15
+ "3": "13_SD21-0013",
16
+ "4": "14_SD21-0014",
17
+ "5": "15_SD21-0015",
18
+ "6": "16_SD21-0016",
19
+ "7": "17_SD22-0017",
20
+ "8": "18_SD22-0018",
21
+ "9": "19_SD22-0019",
22
+ "10": "1_SD18-0001",
23
+ "11": "20_SD22-0020",
24
+ "12": "21_SD22-0021",
25
+ "13": "22_SD22-0022",
26
+ "14": "23_SD22-0023",
27
+ "15": "24_SD22-0024",
28
+ "16": "25_SD22-0025",
29
+ "17": "26_SD22-0026",
30
+ "18": "27_SD22-0027",
31
+ "19": "28_SD22-0028",
32
+ "20": "29_SD22-0029",
33
+ "21": "2_SD22-0002",
34
+ "22": "30_SD22-0030",
35
+ "23": "31_SD22-0031",
36
+ "24": "32_SD22-0032",
37
+ "25": "33_SD22-0033",
38
+ "26": "34_SD21-0034",
39
+ "27": "35_SD21-0035",
40
+ "28": "36_SD22-0036",
41
+ "29": "37_AX21-0037",
42
+ "30": "38_AX22-0038",
43
+ "31": "39_HAX22-0039",
44
+ "32": "3_SD22-0003",
45
+ "33": "40_HLAX22-0020",
46
+ "34": "41_AX22-0041",
47
+ "35": "42_AX22-0042",
48
+ "36": "43_LAX22-0043",
49
+ "37": "44_HAX22-0044",
50
+ "38": "45_SC22-0045",
51
+ "39": "46_CAL22-0046",
52
+ "40": "47_CAL22-0047",
53
+ "41": "48_SC22-0048",
54
+ "42": "49_SC23-0049",
55
+ "43": "4_SD22-0004",
56
+ "44": "50_SC23-0050",
57
+ "45": "51_SAB22-0051",
58
+ "46": "52_LW21-0052",
59
+ "47": "53_NOR22-0053",
60
+ "48": "54_NOR22-0054",
61
+ "49": "55_NOR22-0055",
62
+ "50": "5_SD22-0005",
63
+ "51": "6_SD22-0006",
64
+ "52": "7_SD22-0007",
65
+ "53": "80_LW",
66
+ "54": "81_LW",
67
+ "55": "82_NOR",
68
+ "56": "83_NOR",
69
+ "57": "84_NOR",
70
+ "58": "85_NOR",
71
+ "59": "86_NOR",
72
+ "60": "87_NOR",
73
+ "61": "88_NOR",
74
+ "62": "89_NOR(HAX)",
75
+ "63": "8_SD22-0008",
76
+ "64": "90_LW_B",
77
+ "65": "91_LW_B",
78
+ "66": "92_NOR_B",
79
+ "67": "93_NOR_B",
80
+ "68": "9_SD22-0009"
81
+ },
82
+ "image_size": 224,
83
+ "initializer_range": 0.02,
84
+ "intermediate_size": 3072,
85
+ "label2id": {
86
+ "10_SD22-0010": 0,
87
+ "11_SD22-0011": 1,
88
+ "12_SD22-0012": 2,
89
+ "13_SD21-0013": 3,
90
+ "14_SD21-0014": 4,
91
+ "15_SD21-0015": 5,
92
+ "16_SD21-0016": 6,
93
+ "17_SD22-0017": 7,
94
+ "18_SD22-0018": 8,
95
+ "19_SD22-0019": 9,
96
+ "1_SD18-0001": 10,
97
+ "20_SD22-0020": 11,
98
+ "21_SD22-0021": 12,
99
+ "22_SD22-0022": 13,
100
+ "23_SD22-0023": 14,
101
+ "24_SD22-0024": 15,
102
+ "25_SD22-0025": 16,
103
+ "26_SD22-0026": 17,
104
+ "27_SD22-0027": 18,
105
+ "28_SD22-0028": 19,
106
+ "29_SD22-0029": 20,
107
+ "2_SD22-0002": 21,
108
+ "30_SD22-0030": 22,
109
+ "31_SD22-0031": 23,
110
+ "32_SD22-0032": 24,
111
+ "33_SD22-0033": 25,
112
+ "34_SD21-0034": 26,
113
+ "35_SD21-0035": 27,
114
+ "36_SD22-0036": 28,
115
+ "37_AX21-0037": 29,
116
+ "38_AX22-0038": 30,
117
+ "39_HAX22-0039": 31,
118
+ "3_SD22-0003": 32,
119
+ "40_HLAX22-0020": 33,
120
+ "41_AX22-0041": 34,
121
+ "42_AX22-0042": 35,
122
+ "43_LAX22-0043": 36,
123
+ "44_HAX22-0044": 37,
124
+ "45_SC22-0045": 38,
125
+ "46_CAL22-0046": 39,
126
+ "47_CAL22-0047": 40,
127
+ "48_SC22-0048": 41,
128
+ "49_SC23-0049": 42,
129
+ "4_SD22-0004": 43,
130
+ "50_SC23-0050": 44,
131
+ "51_SAB22-0051": 45,
132
+ "52_LW21-0052": 46,
133
+ "53_NOR22-0053": 47,
134
+ "54_NOR22-0054": 48,
135
+ "55_NOR22-0055": 49,
136
+ "5_SD22-0005": 50,
137
+ "6_SD22-0006": 51,
138
+ "7_SD22-0007": 52,
139
+ "80_LW": 53,
140
+ "81_LW": 54,
141
+ "82_NOR": 55,
142
+ "83_NOR": 56,
143
+ "84_NOR": 57,
144
+ "85_NOR": 58,
145
+ "86_NOR": 59,
146
+ "87_NOR": 60,
147
+ "88_NOR": 61,
148
+ "89_NOR(HAX)": 62,
149
+ "8_SD22-0008": 63,
150
+ "90_LW_B": 64,
151
+ "91_LW_B": 65,
152
+ "92_NOR_B": 66,
153
+ "93_NOR_B": 67,
154
+ "9_SD22-0009": 68
155
+ },
156
+ "layer_norm_eps": 1e-12,
157
+ "model_type": "vit",
158
+ "num_attention_heads": 12,
159
+ "num_channels": 3,
160
+ "num_hidden_layers": 12,
161
+ "patch_size": 16,
162
+ "problem_type": "single_label_classification",
163
+ "qkv_bias": true,
164
+ "torch_dtype": "float32",
165
+ "transformers_version": "4.34.1"
166
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cf32e0f4a382e521d97b7b174175d6336bcb7750797c0bc6c36f6ef48a74a73
3
+ size 343478253
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8593abd01efef2c9dbf6d76e7b5408ed62a826588f09f6bea022dd97a4457773
3
+ size 4155