Model save
Browse files- README.md +161 -0
- config.json +539 -0
- model.safetensors +3 -0
- special_tokens_map.json +7 -0
- tokenizer.json +0 -0
- tokenizer_config.json +57 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
README.md
ADDED
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: distilbert/distilbert-base-uncased-finetuned-sst-2-english
|
4 |
+
tags:
|
5 |
+
- generated_from_trainer
|
6 |
+
metrics:
|
7 |
+
- accuracy
|
8 |
+
- f1
|
9 |
+
model-index:
|
10 |
+
- name: distilbert-base-uncased-finetuned-sst-2-english-finetuned-abstract_classification
|
11 |
+
results: []
|
12 |
+
---
|
13 |
+
|
14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
15 |
+
should probably proofread and complete it, then remove this comment. -->
|
16 |
+
|
17 |
+
# distilbert-base-uncased-finetuned-sst-2-english-finetuned-abstract_classification
|
18 |
+
|
19 |
+
This model is a fine-tuned version of [distilbert/distilbert-base-uncased-finetuned-sst-2-english](https://huggingface.co/distilbert/distilbert-base-uncased-finetuned-sst-2-english) on the None dataset.
|
20 |
+
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.3820
|
22 |
+
- Accuracy: 0.9803
|
23 |
+
- F1: 0.9709
|
24 |
+
|
25 |
+
## Model description
|
26 |
+
|
27 |
+
More information needed
|
28 |
+
|
29 |
+
## Intended uses & limitations
|
30 |
+
|
31 |
+
More information needed
|
32 |
+
|
33 |
+
## Training and evaluation data
|
34 |
+
|
35 |
+
More information needed
|
36 |
+
|
37 |
+
## Training procedure
|
38 |
+
|
39 |
+
### Training hyperparameters
|
40 |
+
|
41 |
+
The following hyperparameters were used during training:
|
42 |
+
- learning_rate: 1e-05
|
43 |
+
- train_batch_size: 64
|
44 |
+
- eval_batch_size: 64
|
45 |
+
- seed: 42
|
46 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
47 |
+
- lr_scheduler_type: linear
|
48 |
+
- num_epochs: 100
|
49 |
+
|
50 |
+
### Training results
|
51 |
+
|
52 |
+
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
|
53 |
+
|:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|
|
54 |
+
| 1.4187 | 1.0 | 4 | 1.1950 | 0.8071 | 0.7275 |
|
55 |
+
| 1.215 | 2.0 | 8 | 1.1708 | 0.8150 | 0.7405 |
|
56 |
+
| 1.2073 | 3.0 | 12 | 1.1419 | 0.8110 | 0.7359 |
|
57 |
+
| 1.2722 | 4.0 | 16 | 1.1119 | 0.8110 | 0.7360 |
|
58 |
+
| 1.1215 | 5.0 | 20 | 1.0880 | 0.8189 | 0.7488 |
|
59 |
+
| 1.1604 | 6.0 | 24 | 1.0609 | 0.8268 | 0.7587 |
|
60 |
+
| 1.1658 | 7.0 | 28 | 1.0354 | 0.8346 | 0.7683 |
|
61 |
+
| 1.1585 | 8.0 | 32 | 1.0155 | 0.8307 | 0.7639 |
|
62 |
+
| 1.1995 | 9.0 | 36 | 0.9936 | 0.8268 | 0.7596 |
|
63 |
+
| 1.084 | 10.0 | 40 | 0.9698 | 0.8268 | 0.7598 |
|
64 |
+
| 1.208 | 11.0 | 44 | 0.9477 | 0.8386 | 0.7755 |
|
65 |
+
| 1.0951 | 12.0 | 48 | 0.9297 | 0.8583 | 0.7979 |
|
66 |
+
| 1.042 | 13.0 | 52 | 0.9119 | 0.8543 | 0.7924 |
|
67 |
+
| 1.0197 | 14.0 | 56 | 0.8913 | 0.8543 | 0.7924 |
|
68 |
+
| 1.0083 | 15.0 | 60 | 0.8761 | 0.8583 | 0.7979 |
|
69 |
+
| 0.9577 | 16.0 | 64 | 0.8606 | 0.8543 | 0.7927 |
|
70 |
+
| 0.9542 | 17.0 | 68 | 0.8418 | 0.8543 | 0.7929 |
|
71 |
+
| 0.9632 | 18.0 | 72 | 0.8262 | 0.8543 | 0.7925 |
|
72 |
+
| 0.9265 | 19.0 | 76 | 0.8122 | 0.8543 | 0.7924 |
|
73 |
+
| 0.978 | 20.0 | 80 | 0.7951 | 0.8622 | 0.8070 |
|
74 |
+
| 0.8984 | 21.0 | 84 | 0.7810 | 0.8661 | 0.8124 |
|
75 |
+
| 0.8813 | 22.0 | 88 | 0.7684 | 0.8740 | 0.8227 |
|
76 |
+
| 0.8821 | 23.0 | 92 | 0.7550 | 0.8819 | 0.8328 |
|
77 |
+
| 0.8303 | 24.0 | 96 | 0.7419 | 0.8819 | 0.8341 |
|
78 |
+
| 0.833 | 25.0 | 100 | 0.7327 | 0.8898 | 0.8456 |
|
79 |
+
| 0.9008 | 26.0 | 104 | 0.7151 | 0.8976 | 0.8559 |
|
80 |
+
| 0.838 | 27.0 | 108 | 0.7035 | 0.9016 | 0.8592 |
|
81 |
+
| 0.7202 | 28.0 | 112 | 0.6964 | 0.9055 | 0.8641 |
|
82 |
+
| 0.7998 | 29.0 | 116 | 0.6803 | 0.9094 | 0.8711 |
|
83 |
+
| 0.7539 | 30.0 | 120 | 0.6693 | 0.9055 | 0.8656 |
|
84 |
+
| 0.7137 | 31.0 | 124 | 0.6625 | 0.9134 | 0.8766 |
|
85 |
+
| 0.8068 | 32.0 | 128 | 0.6536 | 0.9173 | 0.8824 |
|
86 |
+
| 0.7688 | 33.0 | 132 | 0.6393 | 0.9173 | 0.8806 |
|
87 |
+
| 0.7516 | 34.0 | 136 | 0.6308 | 0.9134 | 0.8777 |
|
88 |
+
| 0.7908 | 35.0 | 140 | 0.6251 | 0.9134 | 0.8764 |
|
89 |
+
| 0.6659 | 36.0 | 144 | 0.6141 | 0.9134 | 0.8761 |
|
90 |
+
| 0.7202 | 37.0 | 148 | 0.6043 | 0.9291 | 0.8986 |
|
91 |
+
| 0.6657 | 38.0 | 152 | 0.5966 | 0.9370 | 0.9099 |
|
92 |
+
| 0.6988 | 39.0 | 156 | 0.5886 | 0.9409 | 0.9142 |
|
93 |
+
| 0.7726 | 40.0 | 160 | 0.5799 | 0.9370 | 0.9100 |
|
94 |
+
| 0.5252 | 41.0 | 164 | 0.5716 | 0.9409 | 0.9141 |
|
95 |
+
| 0.6311 | 42.0 | 168 | 0.5650 | 0.9409 | 0.9142 |
|
96 |
+
| 0.6402 | 43.0 | 172 | 0.5583 | 0.9409 | 0.9147 |
|
97 |
+
| 0.6468 | 44.0 | 176 | 0.5513 | 0.9409 | 0.9147 |
|
98 |
+
| 0.6197 | 45.0 | 180 | 0.5437 | 0.9449 | 0.9200 |
|
99 |
+
| 0.6282 | 46.0 | 184 | 0.5371 | 0.9449 | 0.9200 |
|
100 |
+
| 0.6579 | 47.0 | 188 | 0.5313 | 0.9409 | 0.9142 |
|
101 |
+
| 0.6682 | 48.0 | 192 | 0.5237 | 0.9409 | 0.9142 |
|
102 |
+
| 0.6592 | 49.0 | 196 | 0.5168 | 0.9488 | 0.9258 |
|
103 |
+
| 0.547 | 50.0 | 200 | 0.5104 | 0.9488 | 0.9257 |
|
104 |
+
| 0.5069 | 51.0 | 204 | 0.5042 | 0.9488 | 0.9257 |
|
105 |
+
| 0.6015 | 52.0 | 208 | 0.4995 | 0.9567 | 0.9367 |
|
106 |
+
| 0.549 | 53.0 | 212 | 0.4935 | 0.9606 | 0.9425 |
|
107 |
+
| 0.6206 | 54.0 | 216 | 0.4870 | 0.9646 | 0.9482 |
|
108 |
+
| 0.5396 | 55.0 | 220 | 0.4821 | 0.9685 | 0.9541 |
|
109 |
+
| 0.5753 | 56.0 | 224 | 0.4773 | 0.9646 | 0.9482 |
|
110 |
+
| 0.5867 | 57.0 | 228 | 0.4732 | 0.9685 | 0.9542 |
|
111 |
+
| 0.5553 | 58.0 | 232 | 0.4685 | 0.9724 | 0.9596 |
|
112 |
+
| 0.4751 | 59.0 | 236 | 0.4641 | 0.9724 | 0.9596 |
|
113 |
+
| 0.5857 | 60.0 | 240 | 0.4588 | 0.9685 | 0.9538 |
|
114 |
+
| 0.5199 | 61.0 | 244 | 0.4563 | 0.9724 | 0.9596 |
|
115 |
+
| 0.5616 | 62.0 | 248 | 0.4535 | 0.9685 | 0.9538 |
|
116 |
+
| 0.5698 | 63.0 | 252 | 0.4481 | 0.9685 | 0.9538 |
|
117 |
+
| 0.5302 | 64.0 | 256 | 0.4435 | 0.9646 | 0.9479 |
|
118 |
+
| 0.5311 | 65.0 | 260 | 0.4405 | 0.9685 | 0.9537 |
|
119 |
+
| 0.5204 | 66.0 | 264 | 0.4385 | 0.9685 | 0.9537 |
|
120 |
+
| 0.4678 | 67.0 | 268 | 0.4334 | 0.9764 | 0.9653 |
|
121 |
+
| 0.5635 | 68.0 | 272 | 0.4297 | 0.9724 | 0.9595 |
|
122 |
+
| 0.5404 | 69.0 | 276 | 0.4275 | 0.9764 | 0.9653 |
|
123 |
+
| 0.5246 | 70.0 | 280 | 0.4256 | 0.9764 | 0.9653 |
|
124 |
+
| 0.4557 | 71.0 | 284 | 0.4236 | 0.9764 | 0.9653 |
|
125 |
+
| 0.5924 | 72.0 | 288 | 0.4215 | 0.9764 | 0.9653 |
|
126 |
+
| 0.5166 | 73.0 | 292 | 0.4178 | 0.9764 | 0.9653 |
|
127 |
+
| 0.375 | 74.0 | 296 | 0.4141 | 0.9764 | 0.9653 |
|
128 |
+
| 0.5337 | 75.0 | 300 | 0.4111 | 0.9764 | 0.9653 |
|
129 |
+
| 0.4728 | 76.0 | 304 | 0.4088 | 0.9764 | 0.9653 |
|
130 |
+
| 0.516 | 77.0 | 308 | 0.4070 | 0.9764 | 0.9653 |
|
131 |
+
| 0.4553 | 78.0 | 312 | 0.4051 | 0.9764 | 0.9653 |
|
132 |
+
| 0.4761 | 79.0 | 316 | 0.4034 | 0.9764 | 0.9653 |
|
133 |
+
| 0.4672 | 80.0 | 320 | 0.4011 | 0.9724 | 0.9595 |
|
134 |
+
| 0.5029 | 81.0 | 324 | 0.3990 | 0.9764 | 0.9653 |
|
135 |
+
| 0.4754 | 82.0 | 328 | 0.3973 | 0.9764 | 0.9653 |
|
136 |
+
| 0.4678 | 83.0 | 332 | 0.3962 | 0.9764 | 0.9653 |
|
137 |
+
| 0.4717 | 84.0 | 336 | 0.3950 | 0.9803 | 0.9708 |
|
138 |
+
| 0.4518 | 85.0 | 340 | 0.3935 | 0.9803 | 0.9709 |
|
139 |
+
| 0.5682 | 86.0 | 344 | 0.3916 | 0.9803 | 0.9709 |
|
140 |
+
| 0.4313 | 87.0 | 348 | 0.3900 | 0.9803 | 0.9709 |
|
141 |
+
| 0.4528 | 88.0 | 352 | 0.3883 | 0.9803 | 0.9709 |
|
142 |
+
| 0.5075 | 89.0 | 356 | 0.3871 | 0.9803 | 0.9709 |
|
143 |
+
| 0.4255 | 90.0 | 360 | 0.3865 | 0.9803 | 0.9709 |
|
144 |
+
| 0.4278 | 91.0 | 364 | 0.3860 | 0.9803 | 0.9709 |
|
145 |
+
| 0.5074 | 92.0 | 368 | 0.3855 | 0.9803 | 0.9709 |
|
146 |
+
| 0.5244 | 93.0 | 372 | 0.3848 | 0.9803 | 0.9709 |
|
147 |
+
| 0.4806 | 94.0 | 376 | 0.3839 | 0.9803 | 0.9709 |
|
148 |
+
| 0.4271 | 95.0 | 380 | 0.3832 | 0.9803 | 0.9709 |
|
149 |
+
| 0.4829 | 96.0 | 384 | 0.3827 | 0.9803 | 0.9709 |
|
150 |
+
| 0.4356 | 97.0 | 388 | 0.3823 | 0.9803 | 0.9709 |
|
151 |
+
| 0.5412 | 98.0 | 392 | 0.3821 | 0.9803 | 0.9709 |
|
152 |
+
| 0.4539 | 99.0 | 396 | 0.3820 | 0.9803 | 0.9709 |
|
153 |
+
| 0.4462 | 100.0 | 400 | 0.3820 | 0.9803 | 0.9709 |
|
154 |
+
|
155 |
+
|
156 |
+
### Framework versions
|
157 |
+
|
158 |
+
- Transformers 4.41.2
|
159 |
+
- Pytorch 2.0.1+cu117
|
160 |
+
- Datasets 2.19.1
|
161 |
+
- Tokenizers 0.19.1
|
config.json
ADDED
@@ -0,0 +1,539 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "distilbert/distilbert-base-uncased-finetuned-sst-2-english",
|
3 |
+
"activation": "gelu",
|
4 |
+
"architectures": [
|
5 |
+
"DistilBertForSequenceClassification"
|
6 |
+
],
|
7 |
+
"attention_dropout": 0.1,
|
8 |
+
"dim": 768,
|
9 |
+
"dropout": 0.1,
|
10 |
+
"finetuning_task": "sst-2",
|
11 |
+
"hidden_dim": 3072,
|
12 |
+
"id2label": {
|
13 |
+
"0": "LABEL_0",
|
14 |
+
"1": "LABEL_1",
|
15 |
+
"2": "LABEL_2",
|
16 |
+
"3": "LABEL_3",
|
17 |
+
"4": "LABEL_4",
|
18 |
+
"5": "LABEL_5",
|
19 |
+
"6": "LABEL_6",
|
20 |
+
"7": "LABEL_7",
|
21 |
+
"8": "LABEL_8",
|
22 |
+
"9": "LABEL_9",
|
23 |
+
"10": "LABEL_10",
|
24 |
+
"11": "LABEL_11",
|
25 |
+
"12": "LABEL_12",
|
26 |
+
"13": "LABEL_13",
|
27 |
+
"14": "LABEL_14",
|
28 |
+
"15": "LABEL_15",
|
29 |
+
"16": "LABEL_16",
|
30 |
+
"17": "LABEL_17",
|
31 |
+
"18": "LABEL_18",
|
32 |
+
"19": "LABEL_19",
|
33 |
+
"20": "LABEL_20",
|
34 |
+
"21": "LABEL_21",
|
35 |
+
"22": "LABEL_22",
|
36 |
+
"23": "LABEL_23",
|
37 |
+
"24": "LABEL_24",
|
38 |
+
"25": "LABEL_25",
|
39 |
+
"26": "LABEL_26",
|
40 |
+
"27": "LABEL_27",
|
41 |
+
"28": "LABEL_28",
|
42 |
+
"29": "LABEL_29",
|
43 |
+
"30": "LABEL_30",
|
44 |
+
"31": "LABEL_31",
|
45 |
+
"32": "LABEL_32",
|
46 |
+
"33": "LABEL_33",
|
47 |
+
"34": "LABEL_34",
|
48 |
+
"35": "LABEL_35",
|
49 |
+
"36": "LABEL_36",
|
50 |
+
"37": "LABEL_37",
|
51 |
+
"38": "LABEL_38",
|
52 |
+
"39": "LABEL_39",
|
53 |
+
"40": "LABEL_40",
|
54 |
+
"41": "LABEL_41",
|
55 |
+
"42": "LABEL_42",
|
56 |
+
"43": "LABEL_43",
|
57 |
+
"44": "LABEL_44",
|
58 |
+
"45": "LABEL_45",
|
59 |
+
"46": "LABEL_46",
|
60 |
+
"47": "LABEL_47",
|
61 |
+
"48": "LABEL_48",
|
62 |
+
"49": "LABEL_49",
|
63 |
+
"50": "LABEL_50",
|
64 |
+
"51": "LABEL_51",
|
65 |
+
"52": "LABEL_52",
|
66 |
+
"53": "LABEL_53",
|
67 |
+
"54": "LABEL_54",
|
68 |
+
"55": "LABEL_55",
|
69 |
+
"56": "LABEL_56",
|
70 |
+
"57": "LABEL_57",
|
71 |
+
"58": "LABEL_58",
|
72 |
+
"59": "LABEL_59",
|
73 |
+
"60": "LABEL_60",
|
74 |
+
"61": "LABEL_61",
|
75 |
+
"62": "LABEL_62",
|
76 |
+
"63": "LABEL_63",
|
77 |
+
"64": "LABEL_64",
|
78 |
+
"65": "LABEL_65",
|
79 |
+
"66": "LABEL_66",
|
80 |
+
"67": "LABEL_67",
|
81 |
+
"68": "LABEL_68",
|
82 |
+
"69": "LABEL_69",
|
83 |
+
"70": "LABEL_70",
|
84 |
+
"71": "LABEL_71",
|
85 |
+
"72": "LABEL_72",
|
86 |
+
"73": "LABEL_73",
|
87 |
+
"74": "LABEL_74",
|
88 |
+
"75": "LABEL_75",
|
89 |
+
"76": "LABEL_76",
|
90 |
+
"77": "LABEL_77",
|
91 |
+
"78": "LABEL_78",
|
92 |
+
"79": "LABEL_79",
|
93 |
+
"80": "LABEL_80",
|
94 |
+
"81": "LABEL_81",
|
95 |
+
"82": "LABEL_82",
|
96 |
+
"83": "LABEL_83",
|
97 |
+
"84": "LABEL_84",
|
98 |
+
"85": "LABEL_85",
|
99 |
+
"86": "LABEL_86",
|
100 |
+
"87": "LABEL_87",
|
101 |
+
"88": "LABEL_88",
|
102 |
+
"89": "LABEL_89",
|
103 |
+
"90": "LABEL_90",
|
104 |
+
"91": "LABEL_91",
|
105 |
+
"92": "LABEL_92",
|
106 |
+
"93": "LABEL_93",
|
107 |
+
"94": "LABEL_94",
|
108 |
+
"95": "LABEL_95",
|
109 |
+
"96": "LABEL_96",
|
110 |
+
"97": "LABEL_97",
|
111 |
+
"98": "LABEL_98",
|
112 |
+
"99": "LABEL_99",
|
113 |
+
"100": "LABEL_100",
|
114 |
+
"101": "LABEL_101",
|
115 |
+
"102": "LABEL_102",
|
116 |
+
"103": "LABEL_103",
|
117 |
+
"104": "LABEL_104",
|
118 |
+
"105": "LABEL_105",
|
119 |
+
"106": "LABEL_106",
|
120 |
+
"107": "LABEL_107",
|
121 |
+
"108": "LABEL_108",
|
122 |
+
"109": "LABEL_109",
|
123 |
+
"110": "LABEL_110",
|
124 |
+
"111": "LABEL_111",
|
125 |
+
"112": "LABEL_112",
|
126 |
+
"113": "LABEL_113",
|
127 |
+
"114": "LABEL_114",
|
128 |
+
"115": "LABEL_115",
|
129 |
+
"116": "LABEL_116",
|
130 |
+
"117": "LABEL_117",
|
131 |
+
"118": "LABEL_118",
|
132 |
+
"119": "LABEL_119",
|
133 |
+
"120": "LABEL_120",
|
134 |
+
"121": "LABEL_121",
|
135 |
+
"122": "LABEL_122",
|
136 |
+
"123": "LABEL_123",
|
137 |
+
"124": "LABEL_124",
|
138 |
+
"125": "LABEL_125",
|
139 |
+
"126": "LABEL_126",
|
140 |
+
"127": "LABEL_127",
|
141 |
+
"128": "LABEL_128",
|
142 |
+
"129": "LABEL_129",
|
143 |
+
"130": "LABEL_130",
|
144 |
+
"131": "LABEL_131",
|
145 |
+
"132": "LABEL_132",
|
146 |
+
"133": "LABEL_133",
|
147 |
+
"134": "LABEL_134",
|
148 |
+
"135": "LABEL_135",
|
149 |
+
"136": "LABEL_136",
|
150 |
+
"137": "LABEL_137",
|
151 |
+
"138": "LABEL_138",
|
152 |
+
"139": "LABEL_139",
|
153 |
+
"140": "LABEL_140",
|
154 |
+
"141": "LABEL_141",
|
155 |
+
"142": "LABEL_142",
|
156 |
+
"143": "LABEL_143",
|
157 |
+
"144": "LABEL_144",
|
158 |
+
"145": "LABEL_145",
|
159 |
+
"146": "LABEL_146",
|
160 |
+
"147": "LABEL_147",
|
161 |
+
"148": "LABEL_148",
|
162 |
+
"149": "LABEL_149",
|
163 |
+
"150": "LABEL_150",
|
164 |
+
"151": "LABEL_151",
|
165 |
+
"152": "LABEL_152",
|
166 |
+
"153": "LABEL_153",
|
167 |
+
"154": "LABEL_154",
|
168 |
+
"155": "LABEL_155",
|
169 |
+
"156": "LABEL_156",
|
170 |
+
"157": "LABEL_157",
|
171 |
+
"158": "LABEL_158",
|
172 |
+
"159": "LABEL_159",
|
173 |
+
"160": "LABEL_160",
|
174 |
+
"161": "LABEL_161",
|
175 |
+
"162": "LABEL_162",
|
176 |
+
"163": "LABEL_163",
|
177 |
+
"164": "LABEL_164",
|
178 |
+
"165": "LABEL_165",
|
179 |
+
"166": "LABEL_166",
|
180 |
+
"167": "LABEL_167",
|
181 |
+
"168": "LABEL_168",
|
182 |
+
"169": "LABEL_169",
|
183 |
+
"170": "LABEL_170",
|
184 |
+
"171": "LABEL_171",
|
185 |
+
"172": "LABEL_172",
|
186 |
+
"173": "LABEL_173",
|
187 |
+
"174": "LABEL_174",
|
188 |
+
"175": "LABEL_175",
|
189 |
+
"176": "LABEL_176",
|
190 |
+
"177": "LABEL_177",
|
191 |
+
"178": "LABEL_178",
|
192 |
+
"179": "LABEL_179",
|
193 |
+
"180": "LABEL_180",
|
194 |
+
"181": "LABEL_181",
|
195 |
+
"182": "LABEL_182",
|
196 |
+
"183": "LABEL_183",
|
197 |
+
"184": "LABEL_184",
|
198 |
+
"185": "LABEL_185",
|
199 |
+
"186": "LABEL_186",
|
200 |
+
"187": "LABEL_187",
|
201 |
+
"188": "LABEL_188",
|
202 |
+
"189": "LABEL_189",
|
203 |
+
"190": "LABEL_190",
|
204 |
+
"191": "LABEL_191",
|
205 |
+
"192": "LABEL_192",
|
206 |
+
"193": "LABEL_193",
|
207 |
+
"194": "LABEL_194",
|
208 |
+
"195": "LABEL_195",
|
209 |
+
"196": "LABEL_196",
|
210 |
+
"197": "LABEL_197",
|
211 |
+
"198": "LABEL_198",
|
212 |
+
"199": "LABEL_199",
|
213 |
+
"200": "LABEL_200",
|
214 |
+
"201": "LABEL_201",
|
215 |
+
"202": "LABEL_202",
|
216 |
+
"203": "LABEL_203",
|
217 |
+
"204": "LABEL_204",
|
218 |
+
"205": "LABEL_205",
|
219 |
+
"206": "LABEL_206",
|
220 |
+
"207": "LABEL_207",
|
221 |
+
"208": "LABEL_208",
|
222 |
+
"209": "LABEL_209",
|
223 |
+
"210": "LABEL_210",
|
224 |
+
"211": "LABEL_211",
|
225 |
+
"212": "LABEL_212",
|
226 |
+
"213": "LABEL_213",
|
227 |
+
"214": "LABEL_214",
|
228 |
+
"215": "LABEL_215",
|
229 |
+
"216": "LABEL_216",
|
230 |
+
"217": "LABEL_217",
|
231 |
+
"218": "LABEL_218",
|
232 |
+
"219": "LABEL_219",
|
233 |
+
"220": "LABEL_220",
|
234 |
+
"221": "LABEL_221",
|
235 |
+
"222": "LABEL_222",
|
236 |
+
"223": "LABEL_223",
|
237 |
+
"224": "LABEL_224",
|
238 |
+
"225": "LABEL_225",
|
239 |
+
"226": "LABEL_226",
|
240 |
+
"227": "LABEL_227",
|
241 |
+
"228": "LABEL_228",
|
242 |
+
"229": "LABEL_229",
|
243 |
+
"230": "LABEL_230",
|
244 |
+
"231": "LABEL_231",
|
245 |
+
"232": "LABEL_232",
|
246 |
+
"233": "LABEL_233",
|
247 |
+
"234": "LABEL_234",
|
248 |
+
"235": "LABEL_235",
|
249 |
+
"236": "LABEL_236",
|
250 |
+
"237": "LABEL_237",
|
251 |
+
"238": "LABEL_238",
|
252 |
+
"239": "LABEL_239",
|
253 |
+
"240": "LABEL_240",
|
254 |
+
"241": "LABEL_241",
|
255 |
+
"242": "LABEL_242",
|
256 |
+
"243": "LABEL_243",
|
257 |
+
"244": "LABEL_244",
|
258 |
+
"245": "LABEL_245",
|
259 |
+
"246": "LABEL_246",
|
260 |
+
"247": "LABEL_247",
|
261 |
+
"248": "LABEL_248",
|
262 |
+
"249": "LABEL_249",
|
263 |
+
"250": "LABEL_250",
|
264 |
+
"251": "LABEL_251",
|
265 |
+
"252": "LABEL_252",
|
266 |
+
"253": "LABEL_253"
|
267 |
+
},
|
268 |
+
"initializer_range": 0.02,
|
269 |
+
"label2id": {
|
270 |
+
"LABEL_0": 0,
|
271 |
+
"LABEL_1": 1,
|
272 |
+
"LABEL_10": 10,
|
273 |
+
"LABEL_100": 100,
|
274 |
+
"LABEL_101": 101,
|
275 |
+
"LABEL_102": 102,
|
276 |
+
"LABEL_103": 103,
|
277 |
+
"LABEL_104": 104,
|
278 |
+
"LABEL_105": 105,
|
279 |
+
"LABEL_106": 106,
|
280 |
+
"LABEL_107": 107,
|
281 |
+
"LABEL_108": 108,
|
282 |
+
"LABEL_109": 109,
|
283 |
+
"LABEL_11": 11,
|
284 |
+
"LABEL_110": 110,
|
285 |
+
"LABEL_111": 111,
|
286 |
+
"LABEL_112": 112,
|
287 |
+
"LABEL_113": 113,
|
288 |
+
"LABEL_114": 114,
|
289 |
+
"LABEL_115": 115,
|
290 |
+
"LABEL_116": 116,
|
291 |
+
"LABEL_117": 117,
|
292 |
+
"LABEL_118": 118,
|
293 |
+
"LABEL_119": 119,
|
294 |
+
"LABEL_12": 12,
|
295 |
+
"LABEL_120": 120,
|
296 |
+
"LABEL_121": 121,
|
297 |
+
"LABEL_122": 122,
|
298 |
+
"LABEL_123": 123,
|
299 |
+
"LABEL_124": 124,
|
300 |
+
"LABEL_125": 125,
|
301 |
+
"LABEL_126": 126,
|
302 |
+
"LABEL_127": 127,
|
303 |
+
"LABEL_128": 128,
|
304 |
+
"LABEL_129": 129,
|
305 |
+
"LABEL_13": 13,
|
306 |
+
"LABEL_130": 130,
|
307 |
+
"LABEL_131": 131,
|
308 |
+
"LABEL_132": 132,
|
309 |
+
"LABEL_133": 133,
|
310 |
+
"LABEL_134": 134,
|
311 |
+
"LABEL_135": 135,
|
312 |
+
"LABEL_136": 136,
|
313 |
+
"LABEL_137": 137,
|
314 |
+
"LABEL_138": 138,
|
315 |
+
"LABEL_139": 139,
|
316 |
+
"LABEL_14": 14,
|
317 |
+
"LABEL_140": 140,
|
318 |
+
"LABEL_141": 141,
|
319 |
+
"LABEL_142": 142,
|
320 |
+
"LABEL_143": 143,
|
321 |
+
"LABEL_144": 144,
|
322 |
+
"LABEL_145": 145,
|
323 |
+
"LABEL_146": 146,
|
324 |
+
"LABEL_147": 147,
|
325 |
+
"LABEL_148": 148,
|
326 |
+
"LABEL_149": 149,
|
327 |
+
"LABEL_15": 15,
|
328 |
+
"LABEL_150": 150,
|
329 |
+
"LABEL_151": 151,
|
330 |
+
"LABEL_152": 152,
|
331 |
+
"LABEL_153": 153,
|
332 |
+
"LABEL_154": 154,
|
333 |
+
"LABEL_155": 155,
|
334 |
+
"LABEL_156": 156,
|
335 |
+
"LABEL_157": 157,
|
336 |
+
"LABEL_158": 158,
|
337 |
+
"LABEL_159": 159,
|
338 |
+
"LABEL_16": 16,
|
339 |
+
"LABEL_160": 160,
|
340 |
+
"LABEL_161": 161,
|
341 |
+
"LABEL_162": 162,
|
342 |
+
"LABEL_163": 163,
|
343 |
+
"LABEL_164": 164,
|
344 |
+
"LABEL_165": 165,
|
345 |
+
"LABEL_166": 166,
|
346 |
+
"LABEL_167": 167,
|
347 |
+
"LABEL_168": 168,
|
348 |
+
"LABEL_169": 169,
|
349 |
+
"LABEL_17": 17,
|
350 |
+
"LABEL_170": 170,
|
351 |
+
"LABEL_171": 171,
|
352 |
+
"LABEL_172": 172,
|
353 |
+
"LABEL_173": 173,
|
354 |
+
"LABEL_174": 174,
|
355 |
+
"LABEL_175": 175,
|
356 |
+
"LABEL_176": 176,
|
357 |
+
"LABEL_177": 177,
|
358 |
+
"LABEL_178": 178,
|
359 |
+
"LABEL_179": 179,
|
360 |
+
"LABEL_18": 18,
|
361 |
+
"LABEL_180": 180,
|
362 |
+
"LABEL_181": 181,
|
363 |
+
"LABEL_182": 182,
|
364 |
+
"LABEL_183": 183,
|
365 |
+
"LABEL_184": 184,
|
366 |
+
"LABEL_185": 185,
|
367 |
+
"LABEL_186": 186,
|
368 |
+
"LABEL_187": 187,
|
369 |
+
"LABEL_188": 188,
|
370 |
+
"LABEL_189": 189,
|
371 |
+
"LABEL_19": 19,
|
372 |
+
"LABEL_190": 190,
|
373 |
+
"LABEL_191": 191,
|
374 |
+
"LABEL_192": 192,
|
375 |
+
"LABEL_193": 193,
|
376 |
+
"LABEL_194": 194,
|
377 |
+
"LABEL_195": 195,
|
378 |
+
"LABEL_196": 196,
|
379 |
+
"LABEL_197": 197,
|
380 |
+
"LABEL_198": 198,
|
381 |
+
"LABEL_199": 199,
|
382 |
+
"LABEL_2": 2,
|
383 |
+
"LABEL_20": 20,
|
384 |
+
"LABEL_200": 200,
|
385 |
+
"LABEL_201": 201,
|
386 |
+
"LABEL_202": 202,
|
387 |
+
"LABEL_203": 203,
|
388 |
+
"LABEL_204": 204,
|
389 |
+
"LABEL_205": 205,
|
390 |
+
"LABEL_206": 206,
|
391 |
+
"LABEL_207": 207,
|
392 |
+
"LABEL_208": 208,
|
393 |
+
"LABEL_209": 209,
|
394 |
+
"LABEL_21": 21,
|
395 |
+
"LABEL_210": 210,
|
396 |
+
"LABEL_211": 211,
|
397 |
+
"LABEL_212": 212,
|
398 |
+
"LABEL_213": 213,
|
399 |
+
"LABEL_214": 214,
|
400 |
+
"LABEL_215": 215,
|
401 |
+
"LABEL_216": 216,
|
402 |
+
"LABEL_217": 217,
|
403 |
+
"LABEL_218": 218,
|
404 |
+
"LABEL_219": 219,
|
405 |
+
"LABEL_22": 22,
|
406 |
+
"LABEL_220": 220,
|
407 |
+
"LABEL_221": 221,
|
408 |
+
"LABEL_222": 222,
|
409 |
+
"LABEL_223": 223,
|
410 |
+
"LABEL_224": 224,
|
411 |
+
"LABEL_225": 225,
|
412 |
+
"LABEL_226": 226,
|
413 |
+
"LABEL_227": 227,
|
414 |
+
"LABEL_228": 228,
|
415 |
+
"LABEL_229": 229,
|
416 |
+
"LABEL_23": 23,
|
417 |
+
"LABEL_230": 230,
|
418 |
+
"LABEL_231": 231,
|
419 |
+
"LABEL_232": 232,
|
420 |
+
"LABEL_233": 233,
|
421 |
+
"LABEL_234": 234,
|
422 |
+
"LABEL_235": 235,
|
423 |
+
"LABEL_236": 236,
|
424 |
+
"LABEL_237": 237,
|
425 |
+
"LABEL_238": 238,
|
426 |
+
"LABEL_239": 239,
|
427 |
+
"LABEL_24": 24,
|
428 |
+
"LABEL_240": 240,
|
429 |
+
"LABEL_241": 241,
|
430 |
+
"LABEL_242": 242,
|
431 |
+
"LABEL_243": 243,
|
432 |
+
"LABEL_244": 244,
|
433 |
+
"LABEL_245": 245,
|
434 |
+
"LABEL_246": 246,
|
435 |
+
"LABEL_247": 247,
|
436 |
+
"LABEL_248": 248,
|
437 |
+
"LABEL_249": 249,
|
438 |
+
"LABEL_25": 25,
|
439 |
+
"LABEL_250": 250,
|
440 |
+
"LABEL_251": 251,
|
441 |
+
"LABEL_252": 252,
|
442 |
+
"LABEL_253": 253,
|
443 |
+
"LABEL_26": 26,
|
444 |
+
"LABEL_27": 27,
|
445 |
+
"LABEL_28": 28,
|
446 |
+
"LABEL_29": 29,
|
447 |
+
"LABEL_3": 3,
|
448 |
+
"LABEL_30": 30,
|
449 |
+
"LABEL_31": 31,
|
450 |
+
"LABEL_32": 32,
|
451 |
+
"LABEL_33": 33,
|
452 |
+
"LABEL_34": 34,
|
453 |
+
"LABEL_35": 35,
|
454 |
+
"LABEL_36": 36,
|
455 |
+
"LABEL_37": 37,
|
456 |
+
"LABEL_38": 38,
|
457 |
+
"LABEL_39": 39,
|
458 |
+
"LABEL_4": 4,
|
459 |
+
"LABEL_40": 40,
|
460 |
+
"LABEL_41": 41,
|
461 |
+
"LABEL_42": 42,
|
462 |
+
"LABEL_43": 43,
|
463 |
+
"LABEL_44": 44,
|
464 |
+
"LABEL_45": 45,
|
465 |
+
"LABEL_46": 46,
|
466 |
+
"LABEL_47": 47,
|
467 |
+
"LABEL_48": 48,
|
468 |
+
"LABEL_49": 49,
|
469 |
+
"LABEL_5": 5,
|
470 |
+
"LABEL_50": 50,
|
471 |
+
"LABEL_51": 51,
|
472 |
+
"LABEL_52": 52,
|
473 |
+
"LABEL_53": 53,
|
474 |
+
"LABEL_54": 54,
|
475 |
+
"LABEL_55": 55,
|
476 |
+
"LABEL_56": 56,
|
477 |
+
"LABEL_57": 57,
|
478 |
+
"LABEL_58": 58,
|
479 |
+
"LABEL_59": 59,
|
480 |
+
"LABEL_6": 6,
|
481 |
+
"LABEL_60": 60,
|
482 |
+
"LABEL_61": 61,
|
483 |
+
"LABEL_62": 62,
|
484 |
+
"LABEL_63": 63,
|
485 |
+
"LABEL_64": 64,
|
486 |
+
"LABEL_65": 65,
|
487 |
+
"LABEL_66": 66,
|
488 |
+
"LABEL_67": 67,
|
489 |
+
"LABEL_68": 68,
|
490 |
+
"LABEL_69": 69,
|
491 |
+
"LABEL_7": 7,
|
492 |
+
"LABEL_70": 70,
|
493 |
+
"LABEL_71": 71,
|
494 |
+
"LABEL_72": 72,
|
495 |
+
"LABEL_73": 73,
|
496 |
+
"LABEL_74": 74,
|
497 |
+
"LABEL_75": 75,
|
498 |
+
"LABEL_76": 76,
|
499 |
+
"LABEL_77": 77,
|
500 |
+
"LABEL_78": 78,
|
501 |
+
"LABEL_79": 79,
|
502 |
+
"LABEL_8": 8,
|
503 |
+
"LABEL_80": 80,
|
504 |
+
"LABEL_81": 81,
|
505 |
+
"LABEL_82": 82,
|
506 |
+
"LABEL_83": 83,
|
507 |
+
"LABEL_84": 84,
|
508 |
+
"LABEL_85": 85,
|
509 |
+
"LABEL_86": 86,
|
510 |
+
"LABEL_87": 87,
|
511 |
+
"LABEL_88": 88,
|
512 |
+
"LABEL_89": 89,
|
513 |
+
"LABEL_9": 9,
|
514 |
+
"LABEL_90": 90,
|
515 |
+
"LABEL_91": 91,
|
516 |
+
"LABEL_92": 92,
|
517 |
+
"LABEL_93": 93,
|
518 |
+
"LABEL_94": 94,
|
519 |
+
"LABEL_95": 95,
|
520 |
+
"LABEL_96": 96,
|
521 |
+
"LABEL_97": 97,
|
522 |
+
"LABEL_98": 98,
|
523 |
+
"LABEL_99": 99
|
524 |
+
},
|
525 |
+
"max_position_embeddings": 512,
|
526 |
+
"model_type": "distilbert",
|
527 |
+
"n_heads": 12,
|
528 |
+
"n_layers": 6,
|
529 |
+
"output_past": true,
|
530 |
+
"pad_token_id": 0,
|
531 |
+
"problem_type": "single_label_classification",
|
532 |
+
"qa_dropout": 0.1,
|
533 |
+
"seq_classif_dropout": 0.2,
|
534 |
+
"sinusoidal_pos_embds": false,
|
535 |
+
"tie_weights_": true,
|
536 |
+
"torch_dtype": "float32",
|
537 |
+
"transformers_version": "4.41.2",
|
538 |
+
"vocab_size": 30522
|
539 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f592541796455765d838f1d3d2f26832e955f87e496edab0d0fa7334c54bd6d9
|
3 |
+
size 268607736
|
special_tokens_map.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": "[CLS]",
|
3 |
+
"mask_token": "[MASK]",
|
4 |
+
"pad_token": "[PAD]",
|
5 |
+
"sep_token": "[SEP]",
|
6 |
+
"unk_token": "[UNK]"
|
7 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[PAD]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"100": {
|
12 |
+
"content": "[UNK]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"101": {
|
20 |
+
"content": "[CLS]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"102": {
|
28 |
+
"content": "[SEP]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"103": {
|
36 |
+
"content": "[MASK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"clean_up_tokenization_spaces": true,
|
45 |
+
"cls_token": "[CLS]",
|
46 |
+
"do_basic_tokenize": true,
|
47 |
+
"do_lower_case": true,
|
48 |
+
"mask_token": "[MASK]",
|
49 |
+
"model_max_length": 512,
|
50 |
+
"never_split": null,
|
51 |
+
"pad_token": "[PAD]",
|
52 |
+
"sep_token": "[SEP]",
|
53 |
+
"strip_accents": null,
|
54 |
+
"tokenize_chinese_chars": true,
|
55 |
+
"tokenizer_class": "DistilBertTokenizer",
|
56 |
+
"unk_token": "[UNK]"
|
57 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4714f499c57a3b3321f79eb516239afdda9d3a4c0132eca962b7f674a1e52150
|
3 |
+
size 4795
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|