Julien Chaumond
commited on
Commit
•
1e3e88c
1
Parent(s):
843babf
initial import
Browse files- .gitattributes +1 -0
- config.yaml +34 -0
- loss.tsv +113 -0
- pytorch_model.bin +3 -0
- test.tsv +0 -0
- training.log +0 -0
- weights.txt +3 -0
.gitattributes
CHANGED
@@ -6,3 +6,4 @@
|
|
6 |
*.tar.gz filter=lfs diff=lfs merge=lfs -text
|
7 |
*.ot filter=lfs diff=lfs merge=lfs -text
|
8 |
*.onnx filter=lfs diff=lfs merge=lfs -text
|
|
|
|
6 |
*.tar.gz filter=lfs diff=lfs merge=lfs -text
|
7 |
*.ot filter=lfs diff=lfs merge=lfs -text
|
8 |
*.onnx filter=lfs diff=lfs merge=lfs -text
|
9 |
+
weights.txt filter=lfs diff=lfs merge=lfs -text
|
config.yaml
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
experiment_name: 'NER'
|
2 |
+
run: >
|
3 |
+
python -u local_execute_flair.py {checkpoint}
|
4 |
+
setup: slurm_virtual_env
|
5 |
+
type_: gpu
|
6 |
+
|
7 |
+
model_params:
|
8 |
+
nlayers: 1
|
9 |
+
hidden_size: 256
|
10 |
+
|
11 |
+
task:
|
12 |
+
tag_type: 'NER'
|
13 |
+
data_set: 'conll03'
|
14 |
+
|
15 |
+
embeddings:
|
16 |
+
char_lm: 'data/embeddings/news-forward--h2048-l1-d0.05-grow/epoch_8.pt+data/embeddings/news-backward--h2048-l1-d0.05-grow/epoch_8.pt'
|
17 |
+
charlm_type: 'normal'
|
18 |
+
word: 'glove'
|
19 |
+
char: False
|
20 |
+
elmo: ''
|
21 |
+
|
22 |
+
network:
|
23 |
+
name: 'epoch_8'
|
24 |
+
mode: 'nodrop'
|
25 |
+
|
26 |
+
learning_params:
|
27 |
+
learning_rate: 0.1
|
28 |
+
mini_batch_size: 32
|
29 |
+
anneal_factor: 0.5
|
30 |
+
patience: 3
|
31 |
+
anneal_with_restarts: False
|
32 |
+
use_dropout: 0.0
|
33 |
+
use_word_dropout: 0.05
|
34 |
+
use_locked_dropout: 0.5
|
loss.tsv
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
EPOCH TIMESTAMP BAD_EPOCHS LEARNING_RATE TRAIN_LOSS TRAIN_PRECISION TRAIN_RECALL TRAIN_ACCURACY TRAIN_F-SCORE DEV_LOSS DEV_PRECISION DEV_RECALL DEV_ACCURACY DEV_F-SCORE TEST_LOSS TEST_PRECISION TEST_RECALL TEST_ACCURACY TEST_F-SCORE
|
2 |
+
0 11:54:34 0 0.1000 2.73538844317839 _ _ _ _ _ _ _ _ _ _ 0.8821 0.8849 0.8835 0.8835
|
3 |
+
1 11:56:11 0 0.1000 1.118676934314344 _ _ _ _ _ _ _ _ _ _ 0.9033 0.9033 0.9033 0.9033
|
4 |
+
2 11:57:48 0 0.1000 0.8625876535434152 _ _ _ _ _ _ _ _ _ _ 0.9033 0.906 0.9046 0.9046
|
5 |
+
3 11:59:25 0 0.1000 0.7509978762139655 _ _ _ _ _ _ _ _ _ _ 0.9033 0.9117 0.9075 0.9075
|
6 |
+
4 12:01:02 0 0.1000 0.6714545077168508 _ _ _ _ _ _ _ _ _ _ 0.9169 0.9129 0.9149 0.9149
|
7 |
+
5 12:02:41 0 0.1000 0.6137293781658204 _ _ _ _ _ _ _ _ _ _ 0.9075 0.9157 0.9116 0.9116
|
8 |
+
6 12:04:18 0 0.1000 0.5599488914352068 _ _ _ _ _ _ _ _ _ _ 0.9142 0.923 0.9186 0.9186
|
9 |
+
7 12:05:53 0 0.1000 0.5161069346102171 _ _ _ _ _ _ _ _ _ _ 0.9163 0.9193 0.9178 0.9178
|
10 |
+
8 12:07:29 0 0.1000 0.5244319860007404 _ _ _ _ _ _ _ _ _ _ 0.9247 0.9263 0.9255 0.9255
|
11 |
+
9 12:09:06 1 0.1000 0.4650024102062725 _ _ _ _ _ _ _ _ _ _ 0.9156 0.9201 0.9179 0.9178
|
12 |
+
10 12:10:43 0 0.1000 0.45536044239190343 _ _ _ _ _ _ _ _ _ _ 0.9147 0.924 0.9193 0.9193
|
13 |
+
11 12:12:20 0 0.1000 0.4311140479611516 _ _ _ _ _ _ _ _ _ _ 0.9076 0.9221 0.9148 0.9148
|
14 |
+
12 12:13:57 0 0.1000 0.41773630422427716 _ _ _ _ _ _ _ _ _ _ 0.9135 0.9232 0.9183 0.9183
|
15 |
+
13 12:15:34 0 0.1000 0.39989252508555506 _ _ _ _ _ _ _ _ _ _ 0.9127 0.924 0.9184 0.9183
|
16 |
+
14 12:17:11 0 0.1000 0.41186320149258326 _ _ _ _ _ _ _ _ _ _ 0.917 0.9235 0.9203 0.9202
|
17 |
+
15 12:18:46 1 0.1000 0.39109092455649835 _ _ _ _ _ _ _ _ _ _ 0.9237 0.9278 0.9257 0.9257
|
18 |
+
16 12:20:22 0 0.1000 0.36358819198835796 _ _ _ _ _ _ _ _ _ _ 0.9223 0.9253 0.9238 0.9238
|
19 |
+
17 12:21:58 0 0.1000 0.3543564788127071 _ _ _ _ _ _ _ _ _ _ 0.923 0.9338 0.9284 0.9284
|
20 |
+
18 12:23:34 0 0.1000 0.3434745372229977 _ _ _ _ _ _ _ _ _ _ 0.9254 0.9306 0.928 0.928
|
21 |
+
19 12:25:09 0 0.1000 0.33744065085704467 _ _ _ _ _ _ _ _ _ _ 0.9228 0.9297 0.9263 0.9262
|
22 |
+
20 12:26:46 0 0.1000 0.33369330760877464 _ _ _ _ _ _ _ _ _ _ 0.9226 0.9283 0.9254 0.9254
|
23 |
+
21 12:28:23 0 0.1000 0.3400332434277014 _ _ _ _ _ _ _ _ _ _ 0.9217 0.9249 0.9233 0.9233
|
24 |
+
22 12:29:59 1 0.1000 0.3198289048708545 _ _ _ _ _ _ _ _ _ _ 0.9199 0.9297 0.9248 0.9248
|
25 |
+
23 12:31:37 0 0.1000 0.319529091976577 _ _ _ _ _ _ _ _ _ _ 0.9218 0.9302 0.926 0.926
|
26 |
+
24 12:33:12 0 0.1000 0.3116783418222823 _ _ _ _ _ _ _ _ _ _ 0.9233 0.9267 0.925 0.925
|
27 |
+
25 12:34:49 0 0.1000 0.29937481850471265 _ _ _ _ _ _ _ _ _ _ 0.9233 0.9295 0.9264 0.9264
|
28 |
+
26 12:36:25 0 0.1000 0.29989841146752844 _ _ _ _ _ _ _ _ _ _ 0.9201 0.9281 0.9241 0.9241
|
29 |
+
27 12:38:00 1 0.1000 0.29239999280869716 _ _ _ _ _ _ _ _ _ _ 0.9173 0.9285 0.9228 0.9229
|
30 |
+
28 12:39:36 0 0.1000 0.28970547210283215 _ _ _ _ _ _ _ _ _ _ 0.9251 0.9313 0.9282 0.9282
|
31 |
+
29 12:41:12 0 0.1000 0.29258880794568354 _ _ _ _ _ _ _ _ _ _ 0.926 0.9304 0.9282 0.9282
|
32 |
+
30 12:42:47 1 0.1000 0.2881326142581234 _ _ _ _ _ _ _ _ _ _ 0.9233 0.9292 0.9262 0.9262
|
33 |
+
31 12:44:24 0 0.1000 0.2810851916358289 _ _ _ _ _ _ _ _ _ _ 0.926 0.9285 0.9272 0.9272
|
34 |
+
32 12:46:02 0 0.1000 0.2719097940707177 _ _ _ _ _ _ _ _ _ _ 0.9236 0.9295 0.9266 0.9265
|
35 |
+
33 12:47:37 0 0.1000 0.2713488189016313 _ _ _ _ _ _ _ _ _ _ 0.9221 0.9327 0.9274 0.9274
|
36 |
+
34 12:49:14 0 0.1000 0.2809630265140549 _ _ _ _ _ _ _ _ _ _ 0.9258 0.9304 0.9281 0.9281
|
37 |
+
35 12:50:52 1 0.1000 0.26241666142846076 _ _ _ _ _ _ _ _ _ _ 0.9227 0.9294 0.926 0.926
|
38 |
+
36 12:52:28 0 0.1000 0.24546798556075666 _ _ _ _ _ _ _ _ _ _ 0.9239 0.9304 0.9271 0.9271
|
39 |
+
37 12:54:05 0 0.1000 0.252687362127625 _ _ _ _ _ _ _ _ _ _ 0.9199 0.9297 0.9248 0.9248
|
40 |
+
38 12:55:40 1 0.1000 0.25465273592250237 _ _ _ _ _ _ _ _ _ _ 0.9212 0.929 0.9251 0.9251
|
41 |
+
39 12:57:16 2 0.1000 0.25537284292813234 _ _ _ _ _ _ _ _ _ _ 0.9227 0.9294 0.926 0.926
|
42 |
+
40 12:58:52 3 0.1000 0.25918467180622834 _ _ _ _ _ _ _ _ _ _ 0.9238 0.9338 0.9288 0.9288
|
43 |
+
41 13:00:27 0 0.0500 0.2266597393010875 _ _ _ _ _ _ _ _ _ _ 0.9241 0.9313 0.9277 0.9277
|
44 |
+
42 13:02:04 0 0.0500 0.20563673338696012 _ _ _ _ _ _ _ _ _ _ 0.9249 0.9309 0.9279 0.9279
|
45 |
+
43 13:03:47 0 0.0500 0.2099513485329213 _ _ _ _ _ _ _ _ _ _ 0.9242 0.9302 0.9272 0.9272
|
46 |
+
44 13:05:28 1 0.0500 0.20067912960016313 _ _ _ _ _ _ _ _ _ _ 0.9267 0.9329 0.9298 0.9298
|
47 |
+
45 13:07:09 0 0.0500 0.18854872132422626 _ _ _ _ _ _ _ _ _ _ 0.9244 0.9327 0.9285 0.9285
|
48 |
+
46 13:08:49 0 0.0500 0.19183702602726538 _ _ _ _ _ _ _ _ _ _ 0.9249 0.9336 0.9292 0.9292
|
49 |
+
47 13:10:29 1 0.0500 0.18874652515044027 _ _ _ _ _ _ _ _ _ _ 0.926 0.9308 0.9284 0.9284
|
50 |
+
48 13:12:07 2 0.0500 0.18504279042132687 _ _ _ _ _ _ _ _ _ _ 0.9255 0.9322 0.9288 0.9288
|
51 |
+
49 13:13:44 0 0.0500 0.18426746800876997 _ _ _ _ _ _ _ _ _ _ 0.9226 0.9304 0.9265 0.9265
|
52 |
+
50 13:15:22 0 0.0500 0.18763158002636435 _ _ _ _ _ _ _ _ _ _ 0.9223 0.9295 0.9259 0.9259
|
53 |
+
51 13:17:00 1 0.0500 0.1793311693310085 _ _ _ _ _ _ _ _ _ _ 0.9251 0.9317 0.9284 0.9284
|
54 |
+
52 13:18:38 0 0.0500 0.17924908532725758 _ _ _ _ _ _ _ _ _ _ 0.9282 0.9322 0.9302 0.9302
|
55 |
+
53 13:20:16 0 0.0500 0.16567363577768138 _ _ _ _ _ _ _ _ _ _ 0.9251 0.9313 0.9282 0.9282
|
56 |
+
54 13:21:53 0 0.0500 0.1709139728354922 _ _ _ _ _ _ _ _ _ _ 0.9259 0.9313 0.9286 0.9286
|
57 |
+
55 13:23:30 1 0.0500 0.1622482556505513 _ _ _ _ _ _ _ _ _ _ 0.9266 0.9317 0.9291 0.9291
|
58 |
+
56 13:25:07 0 0.0500 0.1750158140835436 _ _ _ _ _ _ _ _ _ _ 0.9263 0.9304 0.9284 0.9283
|
59 |
+
57 13:26:43 1 0.0500 0.16699462409059007 _ _ _ _ _ _ _ _ _ _ 0.9252 0.9313 0.9283 0.9282
|
60 |
+
58 13:28:21 2 0.0500 0.17495543460745686 _ _ _ _ _ _ _ _ _ _ 0.9304 0.9343 0.9323 0.9323
|
61 |
+
59 13:29:58 3 0.0500 0.15922732715754498 _ _ _ _ _ _ _ _ _ _ 0.9255 0.9327 0.9291 0.9291
|
62 |
+
60 13:31:34 0 0.0500 0.1611535432493371 _ _ _ _ _ _ _ _ _ _ 0.9238 0.9299 0.9269 0.9268
|
63 |
+
61 13:33:12 1 0.0500 0.16398702251525493 _ _ _ _ _ _ _ _ _ _ 0.9255 0.9322 0.9288 0.9288
|
64 |
+
62 13:34:47 2 0.0500 0.1708997069468235 _ _ _ _ _ _ _ _ _ _ 0.926 0.9309 0.9285 0.9284
|
65 |
+
63 13:36:22 3 0.0500 0.15964372034144714 _ _ _ _ _ _ _ _ _ _ 0.9287 0.9313 0.93 0.93
|
66 |
+
64 13:37:57 0 0.0250 0.15631725272208263 _ _ _ _ _ _ _ _ _ _ 0.9262 0.9327 0.9294 0.9294
|
67 |
+
65 13:39:35 0 0.0250 0.14626933712691323 _ _ _ _ _ _ _ _ _ _ 0.9247 0.9327 0.9287 0.9287
|
68 |
+
66 13:41:11 0 0.0250 0.1442983325810857 _ _ _ _ _ _ _ _ _ _ 0.9247 0.9325 0.9286 0.9286
|
69 |
+
67 13:42:48 0 0.0250 0.13470685914209696 _ _ _ _ _ _ _ _ _ _ 0.9266 0.932 0.9293 0.9293
|
70 |
+
68 13:44:25 0 0.0250 0.12936415235853993 _ _ _ _ _ _ _ _ _ _ 0.9243 0.9333 0.9287 0.9288
|
71 |
+
69 13:46:01 0 0.0250 0.14095035943275402 _ _ _ _ _ _ _ _ _ _ 0.9272 0.9329 0.93 0.93
|
72 |
+
70 13:47:37 1 0.0250 0.1347877641412318 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9327 0.9302 0.9302
|
73 |
+
71 13:49:11 2 0.0250 0.13894263496310033 _ _ _ _ _ _ _ _ _ _ 0.9238 0.9317 0.9277 0.9277
|
74 |
+
72 13:50:48 3 0.0250 0.1443860820139132 _ _ _ _ _ _ _ _ _ _ 0.9258 0.9318 0.9288 0.9288
|
75 |
+
73 13:52:25 0 0.0125 0.12581411408310833 _ _ _ _ _ _ _ _ _ _ 0.9274 0.9334 0.9304 0.9304
|
76 |
+
74 13:54:01 0 0.0125 0.12721009627335866 _ _ _ _ _ _ _ _ _ _ 0.9274 0.9334 0.9304 0.9304
|
77 |
+
75 13:55:37 1 0.0125 0.1278715432384683 _ _ _ _ _ _ _ _ _ _ 0.9272 0.9334 0.9303 0.9303
|
78 |
+
76 13:57:12 2 0.0125 0.12584064525297675 _ _ _ _ _ _ _ _ _ _ 0.9268 0.9324 0.9296 0.9296
|
79 |
+
77 13:58:47 3 0.0125 0.1265358718140677 _ _ _ _ _ _ _ _ _ _ 0.9272 0.9329 0.93 0.93
|
80 |
+
78 14:00:23 0 0.0063 0.12764276347302203 _ _ _ _ _ _ _ _ _ _ 0.9272 0.9329 0.93 0.93
|
81 |
+
79 14:01:59 1 0.0063 0.12714154495622113 _ _ _ _ _ _ _ _ _ _ 0.9271 0.9322 0.9296 0.9296
|
82 |
+
80 14:03:37 2 0.0063 0.1286786236311048 _ _ _ _ _ _ _ _ _ _ 0.9271 0.932 0.9295 0.9295
|
83 |
+
81 14:05:14 3 0.0063 0.12327975417478088 _ _ _ _ _ _ _ _ _ _ 0.9277 0.932 0.9299 0.9298
|
84 |
+
82 14:06:49 0 0.0063 0.12612910181023929 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9334 0.9305 0.9305
|
85 |
+
83 14:08:24 1 0.0063 0.11570722295915842 _ _ _ _ _ _ _ _ _ _ 0.9285 0.9336 0.931 0.931
|
86 |
+
84 14:10:00 0 0.0063 0.11755319823354639 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9329 0.9302 0.9302
|
87 |
+
85 14:11:35 1 0.0063 0.11165953258741182 _ _ _ _ _ _ _ _ _ _ 0.9262 0.9327 0.9294 0.9294
|
88 |
+
86 14:13:11 0 0.0063 0.10628769204318475 _ _ _ _ _ _ _ _ _ _ 0.9284 0.9341 0.9313 0.9312
|
89 |
+
87 14:14:50 0 0.0063 0.11921017647874857 _ _ _ _ _ _ _ _ _ _ 0.9281 0.9343 0.9312 0.9312
|
90 |
+
88 14:16:25 1 0.0063 0.11036880946718812 _ _ _ _ _ _ _ _ _ _ 0.9282 0.9334 0.9308 0.9308
|
91 |
+
89 14:18:01 2 0.0063 0.11956419991534725 _ _ _ _ _ _ _ _ _ _ 0.9269 0.9338 0.9303 0.9303
|
92 |
+
90 14:19:37 3 0.0063 0.11449584954218936 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9343 0.931 0.931
|
93 |
+
91 14:21:15 0 0.0031 0.11985806564015179 _ _ _ _ _ _ _ _ _ _ 0.9274 0.9338 0.9306 0.9306
|
94 |
+
92 14:22:49 1 0.0031 0.11379744750351425 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9333 0.9305 0.9305
|
95 |
+
93 14:24:26 2 0.0031 0.12914000581817717 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9334 0.9305 0.9305
|
96 |
+
94 14:26:01 3 0.0031 0.11716058269358712 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9338 0.9307 0.9307
|
97 |
+
95 14:28:00 0 0.0016 0.1204525799710712 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9334 0.9306 0.9306
|
98 |
+
96 14:29:41 1 0.0016 0.11789309700052311 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9333 0.9305 0.9305
|
99 |
+
97 14:32:51 2 0.0016 0.11506359194608418 _ _ _ _ _ _ _ _ _ _ 0.9282 0.9334 0.9308 0.9308
|
100 |
+
98 14:36:39 3 0.0016 0.11743626397896258 _ _ _ _ _ _ _ _ _ _ 0.9279 0.9336 0.9307 0.9307
|
101 |
+
99 14:38:46 0 0.0008 0.11851828922484035 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9334 0.9305 0.9304
|
102 |
+
100 14:40:52 1 0.0008 0.11981437879804349 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9334 0.9305 0.9305
|
103 |
+
101 14:43:49 2 0.0008 0.11978041002280485 _ _ _ _ _ _ _ _ _ _ 0.9277 0.9334 0.9305 0.9305
|
104 |
+
102 14:46:01 3 0.0008 0.1271665156879238 _ _ _ _ _ _ _ _ _ _ 0.928 0.9334 0.9307 0.9307
|
105 |
+
103 14:48:22 0 0.0004 0.1229250064769851 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9334 0.9306 0.9306
|
106 |
+
104 14:50:40 1 0.0004 0.1170557794264453 _ _ _ _ _ _ _ _ _ _ 0.9278 0.9334 0.9306 0.9306
|
107 |
+
105 14:52:46 2 0.0004 0.11941440940219807 _ _ _ _ _ _ _ _ _ _ 0.928 0.9334 0.9307 0.9307
|
108 |
+
106 14:55:43 3 0.0004 0.10896147789105241 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9333 0.9304 0.9304
|
109 |
+
107 14:58:20 0 0.0002 0.10579175638492298 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9333 0.9304 0.9304
|
110 |
+
108 15:00:31 0 0.0002 0.12743578397477667 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9333 0.9304 0.9304
|
111 |
+
109 15:03:02 1 0.0002 0.11809778236757544 _ _ _ _ _ _ _ _ _ _ 0.9273 0.9331 0.9302 0.9302
|
112 |
+
110 15:05:55 2 0.0002 0.1163037369672841 _ _ _ _ _ _ _ _ _ _ 0.9273 0.9331 0.9302 0.9302
|
113 |
+
111 15:07:57 3 0.0002 0.1237109458744316 _ _ _ _ _ _ _ _ _ _ 0.9275 0.9331 0.9303 0.9303
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:854d0ad0e7a0639e8268aa741a3a48a0c3a11336d34b79edf8d20496eff8c6b2
|
3 |
+
size 432197603
|
test.tsv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training.log
ADDED
The diff for this file is too large to render.
See raw diff
|
|
weights.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b99673af50e611831d14dcef36f32f47dd1e7150bbd68b1996541deb48540263
|
3 |
+
size 20305307
|