File size: 1,586 Bytes
2ec03e6
c1de723
 
2ec03e6
c1de723
2ec03e6
 
c1de723
 
 
 
2ec03e6
 
 
c1de723
2ec03e6
 
 
 
 
 
c1de723
2ec03e6
c1de723
2ec03e6
 
 
 
 
 
c1de723
 
 
 
2ec03e6
c1de723
2ec03e6
c1de723
2ec03e6
 
c1de723
2ec03e6
c1de723
 
 
 
 
 
2ec03e6
 
c1de723
 
2ec03e6
 
 
 
 
c1de723
 
 
 
 
 
 
 
 
2ec03e6
 
c1de723
 
 
 
 
 
 
 
2ec03e6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
{
  "_name_or_path": "/Users/andrewmayes/Dev/rotate-net/results/checkpoint-9560",
  "add_fpn": false,
  "architectures": [
    "BeitForImageClassification"
  ],
  "attention_probs_dropout_prob": 0.0,
  "auxiliary_channels": 256,
  "auxiliary_concat_input": false,
  "auxiliary_loss_weight": 0.4,
  "auxiliary_num_convs": 1,
  "drop_path_rate": 0.1,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0.0,
  "hidden_size": 768,
  "id2label": {
    "0": "0",
    "1": "90",
    "2": "180",
    "3": "270"
  },
  "image_size": 384,
  "initializer_range": 0.02,
  "intermediate_size": 3072,
  "label2id": {
    "0": 0,
    "180": 2,
    "270": 3,
    "90": 1
  },
  "layer_norm_eps": 1e-12,
  "layer_scale_init_value": 0.1,
  "model_type": "beit",
  "num_attention_heads": 12,
  "num_channels": 3,
  "num_hidden_layers": 12,
  "out_features": [
    "stage12"
  ],
  "out_indices": [
    12
  ],
  "patch_size": 16,
  "pool_scales": [
    1,
    2,
    3,
    6
  ],
  "problem_type": "single_label_classification",
  "reshape_hidden_states": true,
  "semantic_loss_ignore_index": 255,
  "stage_names": [
    "stem",
    "stage1",
    "stage2",
    "stage3",
    "stage4",
    "stage5",
    "stage6",
    "stage7",
    "stage8",
    "stage9",
    "stage10",
    "stage11",
    "stage12"
  ],
  "torch_dtype": "float32",
  "transformers_version": "4.45.1",
  "use_absolute_position_embeddings": false,
  "use_auxiliary_head": true,
  "use_mask_token": false,
  "use_mean_pooling": true,
  "use_relative_position_bias": true,
  "use_shared_relative_position_bias": false,
  "vocab_size": 8192
}