{ "_name_or_path": "dandelin/vilt-b32-mlm", "architectures": [ "ViltForQuestionAnswering" ], "attention_probs_dropout_prob": 0.0, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 768, "id2label": { "0": "10:00-17:00", "1": "hajo", "2": "06:00-18:00", "3": "08:30-19:17", "4": "assam_state_museum", "5": "hampton_inn", "6": "mayflower_hotel", "7": "kamakhya_temple", "8": "1751", "9": "20-250_rupees", "10": "14:30-17:30", "11": "04:00-21:00", "12": "free", "13": "8th-9th_century", "14": "sivasagar", "15": "gruham_sojourn_homestay", "16": "1583", "17": "1994", "18": "shreemoyee_inn", "19": "06:00-21:00", "20": "navagraha_temple", "21": "click_hotel", "22": "sachika_hotels", "23": "1725", "24": "1998", "25": "yes", "26": "guwahati", "27": "ugratara_devalaya", "28": "5_rupees", "29": "tirupati_balaji_temple ", "30": "12:00-16:00", "31": "1751_to_1769", "32": "guwahati_planetarium", "33": "30_rupees", "34": "05:30-20:00", "35": "rang_ghar", "36": "1940", "37": "temple_of_hajo ", "38": "sona_hotel" }, "image_size": 384, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "04:00-21:00": 11, "05:30-20:00": 34, "06:00-18:00": 2, "06:00-21:00": 19, "08:30-19:17": 3, "10:00-17:00": 0, "12:00-16:00": 30, "14:30-17:30": 10, "1583": 16, "1725": 23, "1751": 8, "1751_to_1769": 31, "1940": 36, "1994": 17, "1998": 24, "20-250_rupees": 9, "30_rupees": 33, "5_rupees": 28, "8th-9th_century": 13, "assam_state_museum": 4, "click_hotel": 21, "free": 12, "gruham_sojourn_homestay": 15, "guwahati": 26, "guwahati_planetarium": 32, "hajo": 1, "hampton_inn": 5, "kamakhya_temple": 7, "mayflower_hotel": 6, "navagraha_temple": 20, "rang_ghar": 35, "sachika_hotels": 22, "shreemoyee_inn": 18, "sivasagar": 14, "sona_hotel": 38, "temple_of_hajo ": 37, "tirupati_balaji_temple ": 29, "ugratara_devalaya": 27, "yes": 25 }, "layer_norm_eps": 1e-12, "max_image_length": -1, "max_position_embeddings": 40, "modality_type_vocab_size": 2, "model_type": "vilt", "num_attention_heads": 12, "num_channels": 3, "num_hidden_layers": 12, "num_images": -1, "patch_size": 32, "qkv_bias": true, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.35.0", "type_vocab_size": 2, "vocab_size": 30522 }