{ "_name_or_path": "roberta-base", "architectures": [ "RobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "edges": [ 9.088523711398011e-08, 0.4886343777179718, 0.8423471450805664, 0.9731593132019043, 0.9975177049636841, 0.9998431205749512, 0.9999911785125732, 1.0 ], "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "LABEL_0" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "LABEL_0": 0 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "number_of_bins": 8, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.41.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50273 }