End of training
Browse files- README.md +12 -102
- model.safetensors +1 -1
- runs/Jan24_15-12-38_2348aa0e5e1b/events.out.tfevents.1706109158.2348aa0e5e1b.18236.2 +3 -0
- tokenizer.json +2 -11
- training_args.bin +1 -1
README.md
CHANGED
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
-
- Loss:
|
19 |
|
20 |
## Model description
|
21 |
|
@@ -40,112 +40,22 @@ The following hyperparameters were used during training:
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
-
- num_epochs:
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
-
| No log | 1.0 | 3 |
|
50 |
-
| No log | 2.0 | 6 |
|
51 |
-
| No log | 3.0 | 9 |
|
52 |
-
| No log | 4.0 | 12 |
|
53 |
-
| No log | 5.0 | 15 |
|
54 |
-
| No log | 6.0 | 18 |
|
55 |
-
| No log | 7.0 | 21 |
|
56 |
-
| No log | 8.0 | 24 |
|
57 |
-
| No log | 9.0 | 27 |
|
58 |
-
| No log | 10.0 | 30 |
|
59 |
-
| No log | 11.0 | 33 | 0.3226 |
|
60 |
-
| No log | 12.0 | 36 | 0.2853 |
|
61 |
-
| No log | 13.0 | 39 | 0.2620 |
|
62 |
-
| No log | 14.0 | 42 | 0.2338 |
|
63 |
-
| No log | 15.0 | 45 | 0.1985 |
|
64 |
-
| No log | 16.0 | 48 | 0.1481 |
|
65 |
-
| No log | 17.0 | 51 | 0.0294 |
|
66 |
-
| No log | 18.0 | 54 | 0.0184 |
|
67 |
-
| No log | 19.0 | 57 | 0.0778 |
|
68 |
-
| No log | 20.0 | 60 | 0.0157 |
|
69 |
-
| No log | 21.0 | 63 | 0.0101 |
|
70 |
-
| No log | 22.0 | 66 | 0.0104 |
|
71 |
-
| No log | 23.0 | 69 | 0.0136 |
|
72 |
-
| No log | 24.0 | 72 | 0.0094 |
|
73 |
-
| No log | 25.0 | 75 | 0.0081 |
|
74 |
-
| No log | 26.0 | 78 | 0.0066 |
|
75 |
-
| No log | 27.0 | 81 | 0.0057 |
|
76 |
-
| No log | 28.0 | 84 | 0.0053 |
|
77 |
-
| No log | 29.0 | 87 | 0.0048 |
|
78 |
-
| No log | 30.0 | 90 | 0.0043 |
|
79 |
-
| No log | 31.0 | 93 | 0.0039 |
|
80 |
-
| No log | 32.0 | 96 | 0.0036 |
|
81 |
-
| No log | 33.0 | 99 | 0.0038 |
|
82 |
-
| No log | 34.0 | 102 | 0.0036 |
|
83 |
-
| No log | 35.0 | 105 | 0.0031 |
|
84 |
-
| No log | 36.0 | 108 | 0.0029 |
|
85 |
-
| No log | 37.0 | 111 | 0.0028 |
|
86 |
-
| No log | 38.0 | 114 | 0.0026 |
|
87 |
-
| No log | 39.0 | 117 | 0.0025 |
|
88 |
-
| No log | 40.0 | 120 | 0.0024 |
|
89 |
-
| No log | 41.0 | 123 | 0.0023 |
|
90 |
-
| No log | 42.0 | 126 | 0.0022 |
|
91 |
-
| No log | 43.0 | 129 | 0.0022 |
|
92 |
-
| No log | 44.0 | 132 | 0.0021 |
|
93 |
-
| No log | 45.0 | 135 | 0.0020 |
|
94 |
-
| No log | 46.0 | 138 | 0.0019 |
|
95 |
-
| No log | 47.0 | 141 | 0.0018 |
|
96 |
-
| No log | 48.0 | 144 | 0.0017 |
|
97 |
-
| No log | 49.0 | 147 | 0.0016 |
|
98 |
-
| No log | 50.0 | 150 | 0.0016 |
|
99 |
-
| No log | 51.0 | 153 | 0.0015 |
|
100 |
-
| No log | 52.0 | 156 | 0.0015 |
|
101 |
-
| No log | 53.0 | 159 | 0.0015 |
|
102 |
-
| No log | 54.0 | 162 | 0.0014 |
|
103 |
-
| No log | 55.0 | 165 | 0.0014 |
|
104 |
-
| No log | 56.0 | 168 | 0.0013 |
|
105 |
-
| No log | 57.0 | 171 | 0.0013 |
|
106 |
-
| No log | 58.0 | 174 | 0.0013 |
|
107 |
-
| No log | 59.0 | 177 | 0.0012 |
|
108 |
-
| No log | 60.0 | 180 | 0.0012 |
|
109 |
-
| No log | 61.0 | 183 | 0.0012 |
|
110 |
-
| No log | 62.0 | 186 | 0.0012 |
|
111 |
-
| No log | 63.0 | 189 | 0.0011 |
|
112 |
-
| No log | 64.0 | 192 | 0.0011 |
|
113 |
-
| No log | 65.0 | 195 | 0.0011 |
|
114 |
-
| No log | 66.0 | 198 | 0.0011 |
|
115 |
-
| No log | 67.0 | 201 | 0.0011 |
|
116 |
-
| No log | 68.0 | 204 | 0.0010 |
|
117 |
-
| No log | 69.0 | 207 | 0.0010 |
|
118 |
-
| No log | 70.0 | 210 | 0.0010 |
|
119 |
-
| No log | 71.0 | 213 | 0.0010 |
|
120 |
-
| No log | 72.0 | 216 | 0.0010 |
|
121 |
-
| No log | 73.0 | 219 | 0.0010 |
|
122 |
-
| No log | 74.0 | 222 | 0.0010 |
|
123 |
-
| No log | 75.0 | 225 | 0.0010 |
|
124 |
-
| No log | 76.0 | 228 | 0.0010 |
|
125 |
-
| No log | 77.0 | 231 | 0.0009 |
|
126 |
-
| No log | 78.0 | 234 | 0.0009 |
|
127 |
-
| No log | 79.0 | 237 | 0.0009 |
|
128 |
-
| No log | 80.0 | 240 | 0.0009 |
|
129 |
-
| No log | 81.0 | 243 | 0.0009 |
|
130 |
-
| No log | 82.0 | 246 | 0.0009 |
|
131 |
-
| No log | 83.0 | 249 | 0.0009 |
|
132 |
-
| No log | 84.0 | 252 | 0.0009 |
|
133 |
-
| No log | 85.0 | 255 | 0.0009 |
|
134 |
-
| No log | 86.0 | 258 | 0.0009 |
|
135 |
-
| No log | 87.0 | 261 | 0.0009 |
|
136 |
-
| No log | 88.0 | 264 | 0.0009 |
|
137 |
-
| No log | 89.0 | 267 | 0.0009 |
|
138 |
-
| No log | 90.0 | 270 | 0.0009 |
|
139 |
-
| No log | 91.0 | 273 | 0.0009 |
|
140 |
-
| No log | 92.0 | 276 | 0.0009 |
|
141 |
-
| No log | 93.0 | 279 | 0.0009 |
|
142 |
-
| No log | 94.0 | 282 | 0.0009 |
|
143 |
-
| No log | 95.0 | 285 | 0.0009 |
|
144 |
-
| No log | 96.0 | 288 | 0.0009 |
|
145 |
-
| No log | 97.0 | 291 | 0.0009 |
|
146 |
-
| No log | 98.0 | 294 | 0.0009 |
|
147 |
-
| No log | 99.0 | 297 | 0.0009 |
|
148 |
-
| No log | 100.0 | 300 | 0.0009 |
|
149 |
|
150 |
|
151 |
### Framework versions
|
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 1.7210
|
19 |
|
20 |
## Model description
|
21 |
|
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
+
- num_epochs: 10
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
+
| No log | 1.0 | 3 | 5.2372 |
|
50 |
+
| No log | 2.0 | 6 | 4.5456 |
|
51 |
+
| No log | 3.0 | 9 | 3.9316 |
|
52 |
+
| No log | 4.0 | 12 | 3.3406 |
|
53 |
+
| No log | 5.0 | 15 | 2.8055 |
|
54 |
+
| No log | 6.0 | 18 | 2.4044 |
|
55 |
+
| No log | 7.0 | 21 | 2.1014 |
|
56 |
+
| No log | 8.0 | 24 | 1.8914 |
|
57 |
+
| No log | 9.0 | 27 | 1.7682 |
|
58 |
+
| No log | 10.0 | 30 | 1.7210 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
### Framework versions
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 265470032
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5819b144962df126a6807a504e15ad700d2ad629211b2009b69d53c761f9f497
|
3 |
size 265470032
|
runs/Jan24_15-12-38_2348aa0e5e1b/events.out.tfevents.1706109158.2348aa0e5e1b.18236.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:483facc7d71734b739d6e6d6eab1f1913b221cef4f3880ef250436e8a4f85099
|
3 |
+
size 7085
|
tokenizer.json
CHANGED
@@ -4,18 +4,9 @@
|
|
4 |
"direction": "Right",
|
5 |
"max_length": 384,
|
6 |
"strategy": "OnlySecond",
|
7 |
-
"stride":
|
8 |
-
},
|
9 |
-
"padding": {
|
10 |
-
"strategy": {
|
11 |
-
"Fixed": 384
|
12 |
-
},
|
13 |
-
"direction": "Right",
|
14 |
-
"pad_to_multiple_of": null,
|
15 |
-
"pad_id": 0,
|
16 |
-
"pad_type_id": 0,
|
17 |
-
"pad_token": "[PAD]"
|
18 |
},
|
|
|
19 |
"added_tokens": [
|
20 |
{
|
21 |
"id": 0,
|
|
|
4 |
"direction": "Right",
|
5 |
"max_length": 384,
|
6 |
"strategy": "OnlySecond",
|
7 |
+
"stride": 128
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
},
|
9 |
+
"padding": null,
|
10 |
"added_tokens": [
|
11 |
{
|
12 |
"id": 0,
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4536
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b68d5147a45de62c2825a2630d591b0bd02b568d10ca22398a934f7226af1439
|
3 |
size 4536
|