Gunulhona commited on
Commit
1763efa
·
1 Parent(s): d882dc5
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -11,13 +11,13 @@
11
  "decoder_attention_heads": 12,
12
  "decoder_ffn_dim": 3072,
13
  "decoder_layerdrop": 0.0,
14
- "decoder_layers": 9,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 12,
18
  "encoder_ffn_dim": 3072,
19
  "encoder_layerdrop": 0.0,
20
- "encoder_layers": 6,
21
  "eos_token_id": 2,
22
  "forced_eos_token_id": 2,
23
  "id2label": {
@@ -35,7 +35,7 @@
35
  },
36
  "max_position_embeddings": 1026,
37
  "model_type": "bart",
38
- "num_hidden_layers": 6,
39
  "pad_token_id": 3,
40
  "scale_embedding": false,
41
  "torch_dtype": "float32",
 
11
  "decoder_attention_heads": 12,
12
  "decoder_ffn_dim": 3072,
13
  "decoder_layerdrop": 0.0,
14
+ "decoder_layers": 12,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 12,
18
  "encoder_ffn_dim": 3072,
19
  "encoder_layerdrop": 0.0,
20
+ "encoder_layers": 3,
21
  "eos_token_id": 2,
22
  "forced_eos_token_id": 2,
23
  "id2label": {
 
35
  },
36
  "max_position_embeddings": 1026,
37
  "model_type": "bart",
38
+ "num_hidden_layers": 3,
39
  "pad_token_id": 3,
40
  "scale_embedding": false,
41
  "torch_dtype": "float32",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6f28b2c677434bb2b4f8fe0bd703a8ee4170287ded078b001fe1bfdcb848e541
3
- size 435663781
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df852d5a5f43969e22b9b5dfd1f77d555ea711d9d1b596dad321edb022650727
3
+ size 549110281