Pieter Delobelle commited on
Commit
b9aa40c
1 Parent(s): 27b9c63

removed params

Browse files
Files changed (1) hide show
  1. parameters.json +0 -51
parameters.json DELETED
@@ -1,51 +0,0 @@
1
- {
2
- "force": false,
3
- "dump_path": "serialization_dir/distilrobbert-v2-mini",
4
- "data_file": "data/binarized_text_mini.pdelobelle-robbert-v2-dutch-base.pickle",
5
- "student_type": "roberta",
6
- "student_config": "training_configs/distilrobbert-base.json",
7
- "student_pretrained_weights": null,
8
- "teacher_type": "roberta",
9
- "teacher_name": "pdelobelle/robbert-v2-dutch-base",
10
- "temperature": 2.0,
11
- "alpha_ce": 5.0,
12
- "alpha_mlm": 2.0,
13
- "alpha_clm": 0.0,
14
- "alpha_mse": 0.0,
15
- "alpha_cos": 1.0,
16
- "mlm": true,
17
- "mlm_mask_prop": 0.15,
18
- "word_mask": 0.8,
19
- "word_keep": 0.1,
20
- "word_rand": 0.1,
21
- "mlm_smoothing": 0.7,
22
- "token_counts": "data/token_counts_mini.pdelobelle-robbert-v2-dutch-base.pickle",
23
- "restrict_ce_to_mask": false,
24
- "freeze_pos_embs": true,
25
- "freeze_token_type_embds": false,
26
- "n_epoch": 3,
27
- "batch_size": 5,
28
- "group_by_size": true,
29
- "gradient_accumulation_steps": 100,
30
- "warmup_prop": 0.05,
31
- "weight_decay": 0.01,
32
- "learning_rate": 0.0005,
33
- "adam_epsilon": 1e-06,
34
- "max_grad_norm": 5.0,
35
- "initializer_range": 0.02,
36
- "fp16": false,
37
- "fp16_opt_level": "O1",
38
- "gpus": 1,
39
- "local_rank": 0,
40
- "seed": 56,
41
- "log_interval": 500,
42
- "checkpoint_interval": 4000,
43
- "n_nodes": 1,
44
- "node_id": 0,
45
- "global_rank": 0,
46
- "world_size": 1,
47
- "n_gpu_per_node": 1,
48
- "multi_gpu": false,
49
- "is_master": true,
50
- "multi_node": false
51
- }