oweller2
commited on
Commit
•
0c3adcf
1
Parent(s):
ecbaeab
switch back to pad
Browse files- config.json +3 -3
- pytorch_model.bin +1 -1
config.json
CHANGED
@@ -69,9 +69,9 @@
|
|
69 |
"num_attention_heads": 12,
|
70 |
"num_hidden_layers": 22,
|
71 |
"num_initial_layers": 1,
|
72 |
-
"pad_logits":
|
73 |
"pad_token_id": null,
|
74 |
-
"padding": "
|
75 |
"pooling_type": "cls",
|
76 |
"position_embedding_type": "absolute",
|
77 |
"rotary_emb_base": 10000.0,
|
@@ -82,7 +82,7 @@
|
|
82 |
"sliding_window": 128,
|
83 |
"transformers_version": "4.44.1",
|
84 |
"type_vocab_size": 2,
|
85 |
-
"unpad_embeddings":
|
86 |
"use_cache": true,
|
87 |
"use_fa2": true,
|
88 |
"use_sdpa_attn_mask": false,
|
|
|
69 |
"num_attention_heads": 12,
|
70 |
"num_hidden_layers": 22,
|
71 |
"num_initial_layers": 1,
|
72 |
+
"pad_logits": true,
|
73 |
"pad_token_id": null,
|
74 |
+
"padding": "unpadded",
|
75 |
"pooling_type": "cls",
|
76 |
"position_embedding_type": "absolute",
|
77 |
"rotary_emb_base": 10000.0,
|
|
|
82 |
"sliding_window": 128,
|
83 |
"transformers_version": "4.44.1",
|
84 |
"type_vocab_size": 2,
|
85 |
+
"unpad_embeddings": true,
|
86 |
"use_cache": true,
|
87 |
"use_fa2": true,
|
88 |
"use_sdpa_attn_mask": false,
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 598685038
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8609b713f7a977d8e827f99e7f283e62cd1f08e24d639ea6b16a5eac25316093
|
3 |
size 598685038
|