|
{ |
|
"_name_or_path": "AIMH/mental-roberta-large", |
|
"architectures": [ |
|
"RobertaForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "Feeling-bad-about-yourself-or-that-you-are-a-failure-or-have-let-yourself-or-your-family-down", |
|
"1": "Feeling-down-depressed-or-hopeless", |
|
"2": "Feeling-tired-or-having-little-energy", |
|
"3": "Little-interest-or-pleasure-in-doing", |
|
"4": "Moving-or-speaking-so-slowly-that-other-people-could-have-noticed-Or-the-opposite-being-so-fidgety-or-restless-that-you-have-been-moving-around-a-lot-more-than-usual", |
|
"5": "Poor-appetite-or-overeating", |
|
"6": "Thoughts-that-you-would-be-better-off-dead-or-of-hurting-yourself-in-some-way", |
|
"7": "Trouble-concentrating-on-things-such-as-reading-the-newspaper-or-watching-television", |
|
"8": "Trouble-falling-or-staying-asleep-or-sleeping-too-much" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 4096, |
|
"label2id": { |
|
"Feeling-bad-about-yourself-or-that-you-are-a-failure-or-have-let-yourself-or-your-family-down": 0, |
|
"Feeling-down-depressed-or-hopeless": 1, |
|
"Feeling-tired-or-having-little-energy": 2, |
|
"Little-interest-or-pleasure-in-doing": 3, |
|
"Moving-or-speaking-so-slowly-that-other-people-could-have-noticed-Or-the-opposite-being-so-fidgety-or-restless-that-you-have-been-moving-around-a-lot-more-than-usual": 4, |
|
"Poor-appetite-or-overeating": 5, |
|
"Thoughts-that-you-would-be-better-off-dead-or-of-hurting-yourself-in-some-way": 6, |
|
"Trouble-concentrating-on-things-such-as-reading-the-newspaper-or-watching-television": 7, |
|
"Trouble-falling-or-staying-asleep-or-sleeping-too-much": 8 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "roberta", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 24, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"problem_type": "multi_label_classification", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.41.2", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 50265 |
|
} |
|
|