amaye15 commited on
Commit
c1de723
1 Parent(s): 7f77415

Upload BeitForImageClassification

Browse files
Files changed (2) hide show
  1. config.json +42 -36
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,71 +1,77 @@
1
  {
2
- "_name_or_path": "microsoft/swinv2-base-patch4-window16-256",
 
3
  "architectures": [
4
- "Swinv2ForImageClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.0,
7
- "depths": [
8
- 2,
9
- 2,
10
- 18,
11
- 2
12
- ],
13
  "drop_path_rate": 0.1,
14
- "embed_dim": 128,
15
- "encoder_stride": 32,
16
  "hidden_act": "gelu",
17
  "hidden_dropout_prob": 0.0,
18
- "hidden_size": 1024,
19
  "id2label": {
20
  "0": "0",
21
  "1": "90",
22
  "2": "180",
23
  "3": "270"
24
  },
25
- "image_size": 256,
26
  "initializer_range": 0.02,
 
27
  "label2id": {
28
  "0": 0,
29
  "180": 2,
30
  "270": 3,
31
  "90": 1
32
  },
33
- "layer_norm_eps": 1e-05,
34
- "mlp_ratio": 4.0,
35
- "model_type": "swinv2",
 
36
  "num_channels": 3,
37
- "num_heads": [
38
- 4,
39
- 8,
40
- 16,
41
- 32
42
- ],
43
- "num_layers": 4,
44
  "out_features": [
45
- "stage4"
46
  ],
47
  "out_indices": [
48
- 4
49
  ],
50
- "patch_size": 4,
51
- "path_norm": true,
52
- "pretrained_window_sizes": [
53
- 0,
54
- 0,
55
- 0,
56
- 0
57
  ],
58
  "problem_type": "single_label_classification",
59
- "qkv_bias": true,
 
60
  "stage_names": [
61
  "stem",
62
  "stage1",
63
  "stage2",
64
  "stage3",
65
- "stage4"
 
 
 
 
 
 
 
 
66
  ],
67
  "torch_dtype": "float32",
68
- "transformers_version": "4.44.1",
69
- "use_absolute_embeddings": false,
70
- "window_size": 16
 
 
 
 
 
71
  }
 
1
  {
2
+ "_name_or_path": "/Users/andrewmayes/Dev/rotate-net/results/checkpoint-9560",
3
+ "add_fpn": false,
4
  "architectures": [
5
+ "BeitForImageClassification"
6
  ],
7
  "attention_probs_dropout_prob": 0.0,
8
+ "auxiliary_channels": 256,
9
+ "auxiliary_concat_input": false,
10
+ "auxiliary_loss_weight": 0.4,
11
+ "auxiliary_num_convs": 1,
 
 
12
  "drop_path_rate": 0.1,
 
 
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.0,
15
+ "hidden_size": 768,
16
  "id2label": {
17
  "0": "0",
18
  "1": "90",
19
  "2": "180",
20
  "3": "270"
21
  },
22
+ "image_size": 384,
23
  "initializer_range": 0.02,
24
+ "intermediate_size": 3072,
25
  "label2id": {
26
  "0": 0,
27
  "180": 2,
28
  "270": 3,
29
  "90": 1
30
  },
31
+ "layer_norm_eps": 1e-12,
32
+ "layer_scale_init_value": 0.1,
33
+ "model_type": "beit",
34
+ "num_attention_heads": 12,
35
  "num_channels": 3,
36
+ "num_hidden_layers": 12,
 
 
 
 
 
 
37
  "out_features": [
38
+ "stage12"
39
  ],
40
  "out_indices": [
41
+ 12
42
  ],
43
+ "patch_size": 16,
44
+ "pool_scales": [
45
+ 1,
46
+ 2,
47
+ 3,
48
+ 6
 
49
  ],
50
  "problem_type": "single_label_classification",
51
+ "reshape_hidden_states": true,
52
+ "semantic_loss_ignore_index": 255,
53
  "stage_names": [
54
  "stem",
55
  "stage1",
56
  "stage2",
57
  "stage3",
58
+ "stage4",
59
+ "stage5",
60
+ "stage6",
61
+ "stage7",
62
+ "stage8",
63
+ "stage9",
64
+ "stage10",
65
+ "stage11",
66
+ "stage12"
67
  ],
68
  "torch_dtype": "float32",
69
+ "transformers_version": "4.45.1",
70
+ "use_absolute_position_embeddings": false,
71
+ "use_auxiliary_head": true,
72
+ "use_mask_token": false,
73
+ "use_mean_pooling": true,
74
+ "use_relative_position_bias": true,
75
+ "use_shared_relative_position_bias": false,
76
+ "vocab_size": 8192
77
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b272769585309fe151efb8d6cbe6ecb50b85b8f3e3dda661cedc2cac33a873ce
3
- size 347653688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:214914a8d51ae11d305b70a4b7d708b66cf652367af9d0e784ecd38098855a55
3
+ size 343938968