Update config.json
Browse files- config.json +6 -7
config.json
CHANGED
@@ -15,7 +15,9 @@
|
|
15 |
512,
|
16 |
512
|
17 |
],
|
18 |
-
"
|
|
|
|
|
19 |
"hidden_act": "gelu",
|
20 |
"hidden_dropout_prob": 0.1,
|
21 |
"hidden_size": 768,
|
@@ -24,14 +26,11 @@
|
|
24 |
"intermediate_size": 3072,
|
25 |
"layer_norm_eps": 1e-05,
|
26 |
"max_position_embeddings": 4098,
|
|
|
27 |
"num_attention_heads": 12,
|
28 |
"num_hidden_layers": 12,
|
29 |
-
"
|
30 |
-
"
|
31 |
-
"output_hidden_states": false,
|
32 |
-
"pruned_heads": {},
|
33 |
-
"torchscript": false,
|
34 |
"type_vocab_size": 1,
|
35 |
-
"use_bfloat16": false,
|
36 |
"vocab_size": 50265
|
37 |
}
|
|
|
15 |
512,
|
16 |
512
|
17 |
],
|
18 |
+
"bos_token_id": 0,
|
19 |
+
"eos_token_id": 2,
|
20 |
+
"gradient_checkpointing": false,
|
21 |
"hidden_act": "gelu",
|
22 |
"hidden_dropout_prob": 0.1,
|
23 |
"hidden_size": 768,
|
|
|
26 |
"intermediate_size": 3072,
|
27 |
"layer_norm_eps": 1e-05,
|
28 |
"max_position_embeddings": 4098,
|
29 |
+
"model_type": "longformer",
|
30 |
"num_attention_heads": 12,
|
31 |
"num_hidden_layers": 12,
|
32 |
+
"pad_token_id": 1,
|
33 |
+
"sep_token_id": 2,
|
|
|
|
|
|
|
34 |
"type_vocab_size": 1,
|
|
|
35 |
"vocab_size": 50265
|
36 |
}
|