niharrp9's picture
Training in progress, step 500
e703650 verified
{
"_name_or_path": "distilbert-base-multilingual-cased",
"activation": "gelu",
"architectures": [
"CustomDistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "ar",
"1": "bg",
"2": "de",
"3": "el",
"4": "en",
"5": "es",
"6": "fr",
"7": "hi",
"8": "it",
"9": "ja",
"10": "nl",
"11": "pl",
"12": "pt",
"13": "ru",
"14": "sw",
"15": "th",
"16": "tr",
"17": "ur",
"18": "vi",
"19": "zh"
},
"initializer_range": 0.02,
"label2id": {
"ar": 0,
"bg": 1,
"de": 2,
"el": 3,
"en": 4,
"es": 5,
"fr": 6,
"hi": 7,
"it": 8,
"ja": 9,
"nl": 10,
"pl": 11,
"pt": 12,
"ru": 13,
"sw": 14,
"th": 15,
"tr": 16,
"ur": 17,
"vi": 18,
"zh": 19
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"output_past": true,
"pad_token_id": 0,
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.46.1",
"vocab_size": 119547
}