File size: 848 Bytes
3db57d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
accumulate_grad_batches: 1
adafactor: false
adam_epsilon: 1.0e-08
attention_dropout: null
cache_dir: null
config_name: ''
deterministic: false
do_eval: false
do_test: false
do_train: true
dropout_rate: null
eval_batch_size: 16
eval_dataset_path: ''
force_reinit: false
fp16: true
gpus: -1
gradient_clip_val: 1.0
learning_rate: 1.0e-05
lr_scheduler: linear
max_epochs: 10
max_history: 1
max_seq_length: 0
min_delta: 0.0
model_name_or_path: roberta-large
num_workers: 8
output_dir: ./checkpoints/roberta-large-faithcritic
overwrite_output_dir: false
pad_to_multiple_of: 8
patience: 5
predict_dataset_path: null
save_last: false
seed: 42
test_dataset_path: null
test_task: FaithCritic
tokenizer_name: null
train_batch_size: 16
train_dataset_path: ''
train_task: FaithCritic
val_check_interval: 0.5
warmup_ratio: 0.08
warmup_steps: 0
weight_decay: 0.1