File size: 501 Bytes
86e7944 9695c8f e642fd8 1dc97b8 e642fd8 4c649b3 1dc97b8 4c649b3 e642fd8 4c649b3 1dc97b8 4c649b3 1dc97b8 4c649b3 1dc97b8 4c649b3 1dc97b8 4c649b3 1dc97b8 4c649b3 1dc97b8 4c649b3 1dc97b8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
---
library_name: transformers
tags: []
---
# Model Card for Model ID
<!-- Provide a quick summary of what the model is/does. -->
![image/png](https://cdn-uploads.huggingface.co/production/uploads/66790f78c411cf3d84e7ceb0/60cuWN1vE3e6UTBCt7S2p.png)
## ---------
checkpoint = "AIMH/mental-roberta-large"
learning_rate = 2e-5
batch_size = 4
num_train_epochs = 5
weight_decay = 0.01
metric_name = "accuracy"
optimizer = AdamW
test_size = 0.2
logging_steps = 25
column_item = "labels_array" |