Update config.json
Browse files- config.json +6 -3
config.json
CHANGED
@@ -17,14 +17,16 @@
|
|
17 |
"div_val": 4,
|
18 |
"dropatt": 0.0,
|
19 |
"dropout": 0.1,
|
|
|
20 |
"ext_len": 0,
|
21 |
"init": "normal",
|
22 |
"init_range": 0.01,
|
23 |
"init_std": 0.02,
|
|
|
24 |
"mem_len": 1600,
|
|
|
25 |
"n_head": 16,
|
26 |
"n_layer": 18,
|
27 |
-
"n_token": 267735,
|
28 |
"pre_lnorm": false,
|
29 |
"proj_init_std": 0.01,
|
30 |
"same_length": true,
|
@@ -42,5 +44,6 @@
|
|
42 |
true
|
43 |
],
|
44 |
"tie_weight": true,
|
45 |
-
"untie_r": true
|
46 |
-
|
|
|
|
17 |
"div_val": 4,
|
18 |
"dropatt": 0.0,
|
19 |
"dropout": 0.1,
|
20 |
+
"eos_token_id": 0,
|
21 |
"ext_len": 0,
|
22 |
"init": "normal",
|
23 |
"init_range": 0.01,
|
24 |
"init_std": 0.02,
|
25 |
+
"layer_norm_epsilon": 1e-05,
|
26 |
"mem_len": 1600,
|
27 |
+
"model_type": "transfo-xl",
|
28 |
"n_head": 16,
|
29 |
"n_layer": 18,
|
|
|
30 |
"pre_lnorm": false,
|
31 |
"proj_init_std": 0.01,
|
32 |
"same_length": true,
|
|
|
44 |
true
|
45 |
],
|
46 |
"tie_weight": true,
|
47 |
+
"untie_r": true,
|
48 |
+
"vocab_size": 267735
|
49 |
+
}
|