File size: 2,345 Bytes
d62d510 66cff20 d62d510 66cff20 d62d510 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
{
"_name_or_path": "bert-base-multilingual-uncased",
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"directionality": "bidi",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "5RDO",
"1": "S0Z5",
"10": "S6MS",
"11": "JB2M",
"12": "1G29",
"13": "A97B",
"14": "GJL1",
"15": "QMUM",
"16": "AXS5",
"17": "JTV5",
"18": "IT6N",
"19": "956I",
"2": "DP3Q",
"20": "7U8O",
"21": "9FPZ",
"22": "1QU8",
"23": "TUHS",
"24": "I2WU",
"25": "A0J6",
"26": "S6X7",
"27": "4SJR",
"28": "CUIH",
"29": "SS0L",
"3": "FH4R",
"30": "IAS6",
"31": "ARDP",
"32": "B0V5",
"33": "1SL4",
"34": "9999",
"35": "1ZHJ",
"36": "TDD5",
"37": "R2L8",
"38": "4S57",
"39": "AJ9U",
"4": "R6UT",
"40": "DDES",
"41": "XYGP",
"5": "UJ35",
"6": "MDOL",
"7": "8888",
"8": "8EHB",
"9": "K0RI"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"1G29": 12,
"1QU8": 22,
"1SL4": 33,
"1ZHJ": 35,
"4S57": 38,
"4SJR": 27,
"5RDO": 0,
"7U8O": 20,
"8888": 7,
"8EHB": 8,
"956I": 19,
"9999": 34,
"9FPZ": 21,
"A0J6": 25,
"A97B": 13,
"AJ9U": 39,
"ARDP": 31,
"AXS5": 16,
"B0V5": 32,
"CUIH": 28,
"DDES": 40,
"DP3Q": 2,
"FH4R": 3,
"GJL1": 14,
"I2WU": 24,
"IAS6": 30,
"IT6N": 18,
"JB2M": 11,
"JTV5": 17,
"K0RI": 9,
"MDOL": 6,
"QMUM": 15,
"R2L8": 37,
"R6UT": 4,
"S0Z5": 1,
"S6MS": 10,
"S6X7": 26,
"SS0L": 29,
"TDD5": 36,
"TUHS": 23,
"UJ35": 5,
"XYGP": 41
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.24.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 105879
}
|