leobitz commited on
Commit
4d15a0b
1 Parent(s): ba820ee

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +3 -4
config.json CHANGED
@@ -1,13 +1,12 @@
1
  {
2
  "_name_or_path": "microsoft/deberta-v3-base",
3
  "architectures": [
4
- "TsgDebertaV2ForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "backbone_output_dropout": 0.5,
8
  "chunk_overlap_size": 64,
9
  "classifier_dropout": 0.5,
10
- "classifier_num_layers": 1,
11
  "classifier_prob": 0.5,
12
  "hidden_act": "gelu",
13
  "hidden_dropout_prob": 0.1,
@@ -27,8 +26,8 @@
27
  "max_len": 512,
28
  "max_position_embeddings": 512,
29
  "max_relative_positions": -1,
30
- "model_name": "tsg-deberta",
31
- "model_type": "tsg-deberta",
32
  "norm_rel_ebd": "layer_norm",
33
  "num_attention_heads": 12,
34
  "num_hidden_layers": 12,
 
1
  {
2
  "_name_or_path": "microsoft/deberta-v3-base",
3
  "architectures": [
4
+ "DebertaV2ForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "backbone_output_dropout": 0.5,
8
  "chunk_overlap_size": 64,
9
  "classifier_dropout": 0.5,
 
10
  "classifier_prob": 0.5,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
 
26
  "max_len": 512,
27
  "max_position_embeddings": 512,
28
  "max_relative_positions": -1,
29
+ "model_name": "microsoft/deberta-v3-base",
30
+ "model_type": "deberta-v2",
31
  "norm_rel_ebd": "layer_norm",
32
  "num_attention_heads": 12,
33
  "num_hidden_layers": 12,