oweller2 commited on
Commit
27bdfe2
1 Parent(s): 11a83af
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -69,9 +69,9 @@
69
  "num_attention_heads": 12,
70
  "num_hidden_layers": 22,
71
  "num_initial_layers": 1,
72
- "pad_logits": true,
73
  "pad_token_id": 50283,
74
- "padding": "unpadded",
75
  "pooling_type": "cls",
76
  "position_embedding_type": "absolute",
77
  "rotary_emb_base": 10000.0,
@@ -82,7 +82,7 @@
82
  "sliding_window": 128,
83
  "transformers_version": "4.44.1",
84
  "type_vocab_size": 2,
85
- "unpad_embeddings": true,
86
  "use_cache": true,
87
  "use_fa2": true,
88
  "use_sdpa_attn_mask": false,
 
69
  "num_attention_heads": 12,
70
  "num_hidden_layers": 22,
71
  "num_initial_layers": 1,
72
+ "pad_logits": false,
73
  "pad_token_id": 50283,
74
+ "padding": "padded",
75
  "pooling_type": "cls",
76
  "position_embedding_type": "absolute",
77
  "rotary_emb_base": 10000.0,
 
82
  "sliding_window": 128,
83
  "transformers_version": "4.44.1",
84
  "type_vocab_size": 2,
85
+ "unpad_embeddings": false,
86
  "use_cache": true,
87
  "use_fa2": true,
88
  "use_sdpa_attn_mask": false,