salbatarni commited on
Commit
b086119
1 Parent(s): c1c35e4

Training in progress, step 160

Browse files
Files changed (4) hide show
  1. README.md +137 -0
  2. config.json +32 -0
  3. model.safetensors +3 -0
  4. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: aubmindlab/bert-base-arabertv02
3
+ tags:
4
+ - generated_from_trainer
5
+ model-index:
6
+ - name: arabert_cross_organization_task7_fold5
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
11
+ should probably proofread and complete it, then remove this comment. -->
12
+
13
+ # arabert_cross_organization_task7_fold5
14
+
15
+ This model is a fine-tuned version of [aubmindlab/bert-base-arabertv02](https://huggingface.co/aubmindlab/bert-base-arabertv02) on the None dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Loss: 0.5800
18
+ - Qwk: 0.7690
19
+ - Mse: 0.5800
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 2e-05
39
+ - train_batch_size: 64
40
+ - eval_batch_size: 64
41
+ - seed: 42
42
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
+ - lr_scheduler_type: linear
44
+ - num_epochs: 10
45
+
46
+ ### Training results
47
+
48
+ | Training Loss | Epoch | Step | Validation Loss | Qwk | Mse |
49
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|
50
+ | No log | 0.125 | 2 | 3.1878 | 0.0032 | 3.1878 |
51
+ | No log | 0.25 | 4 | 1.6075 | 0.1098 | 1.6075 |
52
+ | No log | 0.375 | 6 | 1.1035 | 0.2670 | 1.1035 |
53
+ | No log | 0.5 | 8 | 1.4112 | 0.3086 | 1.4112 |
54
+ | No log | 0.625 | 10 | 1.3436 | 0.3880 | 1.3436 |
55
+ | No log | 0.75 | 12 | 1.1181 | 0.3654 | 1.1181 |
56
+ | No log | 0.875 | 14 | 0.8279 | 0.4164 | 0.8279 |
57
+ | No log | 1.0 | 16 | 0.6795 | 0.4918 | 0.6795 |
58
+ | No log | 1.125 | 18 | 0.6997 | 0.5370 | 0.6997 |
59
+ | No log | 1.25 | 20 | 0.8208 | 0.6120 | 0.8208 |
60
+ | No log | 1.375 | 22 | 0.7785 | 0.7348 | 0.7785 |
61
+ | No log | 1.5 | 24 | 0.6388 | 0.7210 | 0.6388 |
62
+ | No log | 1.625 | 26 | 0.6688 | 0.7365 | 0.6688 |
63
+ | No log | 1.75 | 28 | 0.6507 | 0.7403 | 0.6507 |
64
+ | No log | 1.875 | 30 | 0.5320 | 0.7356 | 0.5320 |
65
+ | No log | 2.0 | 32 | 0.5254 | 0.7454 | 0.5254 |
66
+ | No log | 2.125 | 34 | 0.5348 | 0.7325 | 0.5348 |
67
+ | No log | 2.25 | 36 | 0.6139 | 0.7376 | 0.6139 |
68
+ | No log | 2.375 | 38 | 0.6648 | 0.7474 | 0.6648 |
69
+ | No log | 2.5 | 40 | 0.5894 | 0.7707 | 0.5894 |
70
+ | No log | 2.625 | 42 | 0.5580 | 0.7530 | 0.5580 |
71
+ | No log | 2.75 | 44 | 0.5824 | 0.7698 | 0.5824 |
72
+ | No log | 2.875 | 46 | 0.6444 | 0.7641 | 0.6444 |
73
+ | No log | 3.0 | 48 | 0.5327 | 0.7206 | 0.5327 |
74
+ | No log | 3.125 | 50 | 0.5871 | 0.7517 | 0.5871 |
75
+ | No log | 3.25 | 52 | 0.5331 | 0.7366 | 0.5331 |
76
+ | No log | 3.375 | 54 | 0.6130 | 0.7665 | 0.6130 |
77
+ | No log | 3.5 | 56 | 0.5889 | 0.7650 | 0.5889 |
78
+ | No log | 3.625 | 58 | 0.5848 | 0.7758 | 0.5848 |
79
+ | No log | 3.75 | 60 | 0.7089 | 0.7737 | 0.7089 |
80
+ | No log | 3.875 | 62 | 0.7846 | 0.7865 | 0.7846 |
81
+ | No log | 4.0 | 64 | 0.6552 | 0.7793 | 0.6552 |
82
+ | No log | 4.125 | 66 | 0.5020 | 0.7284 | 0.5020 |
83
+ | No log | 4.25 | 68 | 0.5170 | 0.7322 | 0.5170 |
84
+ | No log | 4.375 | 70 | 0.5877 | 0.7481 | 0.5877 |
85
+ | No log | 4.5 | 72 | 0.5700 | 0.7494 | 0.5700 |
86
+ | No log | 4.625 | 74 | 0.5147 | 0.7380 | 0.5147 |
87
+ | No log | 4.75 | 76 | 0.5942 | 0.7664 | 0.5942 |
88
+ | No log | 4.875 | 78 | 0.6564 | 0.7710 | 0.6564 |
89
+ | No log | 5.0 | 80 | 0.6565 | 0.7710 | 0.6565 |
90
+ | No log | 5.125 | 82 | 0.6572 | 0.7802 | 0.6572 |
91
+ | No log | 5.25 | 84 | 0.6860 | 0.7836 | 0.6860 |
92
+ | No log | 5.375 | 86 | 0.6265 | 0.7687 | 0.6265 |
93
+ | No log | 5.5 | 88 | 0.5116 | 0.7530 | 0.5116 |
94
+ | No log | 5.625 | 90 | 0.5026 | 0.7603 | 0.5026 |
95
+ | No log | 5.75 | 92 | 0.5588 | 0.7542 | 0.5588 |
96
+ | No log | 5.875 | 94 | 0.6752 | 0.7902 | 0.6752 |
97
+ | No log | 6.0 | 96 | 0.7891 | 0.7984 | 0.7891 |
98
+ | No log | 6.125 | 98 | 0.7038 | 0.7947 | 0.7038 |
99
+ | No log | 6.25 | 100 | 0.5797 | 0.7519 | 0.5797 |
100
+ | No log | 6.375 | 102 | 0.5895 | 0.7634 | 0.5895 |
101
+ | No log | 6.5 | 104 | 0.6498 | 0.7782 | 0.6498 |
102
+ | No log | 6.625 | 106 | 0.5864 | 0.7623 | 0.5864 |
103
+ | No log | 6.75 | 108 | 0.5259 | 0.7227 | 0.5259 |
104
+ | No log | 6.875 | 110 | 0.5133 | 0.7040 | 0.5133 |
105
+ | No log | 7.0 | 112 | 0.5219 | 0.7120 | 0.5219 |
106
+ | No log | 7.125 | 114 | 0.5822 | 0.7464 | 0.5822 |
107
+ | No log | 7.25 | 116 | 0.6526 | 0.7676 | 0.6526 |
108
+ | No log | 7.375 | 118 | 0.6628 | 0.7818 | 0.6628 |
109
+ | No log | 7.5 | 120 | 0.6080 | 0.7726 | 0.6080 |
110
+ | No log | 7.625 | 122 | 0.5645 | 0.7416 | 0.5645 |
111
+ | No log | 7.75 | 124 | 0.5592 | 0.7409 | 0.5592 |
112
+ | No log | 7.875 | 126 | 0.5637 | 0.7527 | 0.5637 |
113
+ | No log | 8.0 | 128 | 0.5640 | 0.7522 | 0.5640 |
114
+ | No log | 8.125 | 130 | 0.5743 | 0.7522 | 0.5743 |
115
+ | No log | 8.25 | 132 | 0.6128 | 0.7551 | 0.6128 |
116
+ | No log | 8.375 | 134 | 0.6083 | 0.7551 | 0.6083 |
117
+ | No log | 8.5 | 136 | 0.5761 | 0.7661 | 0.5761 |
118
+ | No log | 8.625 | 138 | 0.5522 | 0.7596 | 0.5522 |
119
+ | No log | 8.75 | 140 | 0.5418 | 0.7580 | 0.5418 |
120
+ | No log | 8.875 | 142 | 0.5541 | 0.7626 | 0.5541 |
121
+ | No log | 9.0 | 144 | 0.5890 | 0.7591 | 0.5890 |
122
+ | No log | 9.125 | 146 | 0.6347 | 0.7704 | 0.6347 |
123
+ | No log | 9.25 | 148 | 0.6524 | 0.7643 | 0.6524 |
124
+ | No log | 9.375 | 150 | 0.6441 | 0.7643 | 0.6441 |
125
+ | No log | 9.5 | 152 | 0.6196 | 0.7597 | 0.6196 |
126
+ | No log | 9.625 | 154 | 0.5988 | 0.7575 | 0.5988 |
127
+ | No log | 9.75 | 156 | 0.5835 | 0.7591 | 0.5835 |
128
+ | No log | 9.875 | 158 | 0.5803 | 0.7672 | 0.5803 |
129
+ | No log | 10.0 | 160 | 0.5800 | 0.7690 | 0.5800 |
130
+
131
+
132
+ ### Framework versions
133
+
134
+ - Transformers 4.44.0
135
+ - Pytorch 2.4.0
136
+ - Datasets 2.21.0
137
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "aubmindlab/bert-base-arabertv02",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 3072,
16
+ "label2id": {
17
+ "LABEL_0": 0
18
+ },
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
+ "pad_token_id": 0,
25
+ "position_embedding_type": "absolute",
26
+ "problem_type": "regression",
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.44.0",
29
+ "type_vocab_size": 2,
30
+ "use_cache": true,
31
+ "vocab_size": 64000
32
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1ec2b9a8ab5cef569f85d7069ba586116449637fff76004befc0c9bbce097be
3
+ size 540799996
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60469b2ae4fa815aeb2fb9bf654e8109d287196972358cc958544d502e521a5a
3
+ size 5240