Farouk
commited on
Commit
Β·
701ad04
1
Parent(s):
5650484
Training in progress, step 8400
Browse files- adapter_model.bin +1 -1
- checkpoint-6200/adapter_model/adapter_model/README.md +12 -0
- checkpoint-6200/adapter_model/adapter_model/adapter_model.bin +1 -1
- {checkpoint-6400 β checkpoint-8400}/README.md +0 -0
- {checkpoint-6400 β checkpoint-8400}/adapter_config.json +0 -0
- {checkpoint-6400 β checkpoint-8400}/adapter_model.bin +1 -1
- {checkpoint-6400 β checkpoint-8400}/added_tokens.json +0 -0
- {checkpoint-6400 β checkpoint-8400}/optimizer.pt +1 -1
- {checkpoint-6400 β checkpoint-8400}/rng_state.pth +1 -1
- {checkpoint-6400 β checkpoint-8400}/scheduler.pt +1 -1
- {checkpoint-6400 β checkpoint-8400}/special_tokens_map.json +0 -0
- {checkpoint-6400 β checkpoint-8400}/tokenizer.model +0 -0
- {checkpoint-6400 β checkpoint-8400}/tokenizer_config.json +0 -0
- {checkpoint-6400 β checkpoint-8400}/trainer_state.json +1913 -3
- {checkpoint-6400 β checkpoint-8400}/training_args.bin +0 -0
adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 319977229
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ced9e734f3fc12124fc782ca014e6dcf293347365a287b101ef9ffa2589d529
|
3 |
size 319977229
|
checkpoint-6200/adapter_model/adapter_model/README.md
CHANGED
@@ -103,6 +103,17 @@ The following `bitsandbytes` quantization config was used during training:
|
|
103 |
- bnb_4bit_use_double_quant: True
|
104 |
- bnb_4bit_compute_dtype: bfloat16
|
105 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
The following `bitsandbytes` quantization config was used during training:
|
107 |
- load_in_8bit: False
|
108 |
- load_in_4bit: True
|
@@ -124,5 +135,6 @@ The following `bitsandbytes` quantization config was used during training:
|
|
124 |
- PEFT 0.4.0
|
125 |
- PEFT 0.4.0
|
126 |
- PEFT 0.4.0
|
|
|
127 |
|
128 |
- PEFT 0.4.0
|
|
|
103 |
- bnb_4bit_use_double_quant: True
|
104 |
- bnb_4bit_compute_dtype: bfloat16
|
105 |
|
106 |
+
The following `bitsandbytes` quantization config was used during training:
|
107 |
+
- load_in_8bit: False
|
108 |
+
- load_in_4bit: True
|
109 |
+
- llm_int8_threshold: 6.0
|
110 |
+
- llm_int8_skip_modules: None
|
111 |
+
- llm_int8_enable_fp32_cpu_offload: False
|
112 |
+
- llm_int8_has_fp16_weight: False
|
113 |
+
- bnb_4bit_quant_type: nf4
|
114 |
+
- bnb_4bit_use_double_quant: True
|
115 |
+
- bnb_4bit_compute_dtype: bfloat16
|
116 |
+
|
117 |
The following `bitsandbytes` quantization config was used during training:
|
118 |
- load_in_8bit: False
|
119 |
- load_in_4bit: True
|
|
|
135 |
- PEFT 0.4.0
|
136 |
- PEFT 0.4.0
|
137 |
- PEFT 0.4.0
|
138 |
+
- PEFT 0.4.0
|
139 |
|
140 |
- PEFT 0.4.0
|
checkpoint-6200/adapter_model/adapter_model/adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 319977229
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:41495ac2a6e599a28fbb089b85c94cdd1fee50a70344cebf6ef7b2136faa0701
|
3 |
size 319977229
|
{checkpoint-6400 β checkpoint-8400}/README.md
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/adapter_config.json
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/adapter_model.bin
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 319977229
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ced9e734f3fc12124fc782ca014e6dcf293347365a287b101ef9ffa2589d529
|
3 |
size 319977229
|
{checkpoint-6400 β checkpoint-8400}/added_tokens.json
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/optimizer.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1279539973
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:587d3d40ca1e30f7d23e37b27f073ad8bbbdc87c164e0004794a273f2963c5cd
|
3 |
size 1279539973
|
{checkpoint-6400 β checkpoint-8400}/rng_state.pth
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14511
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cbd521752719417393b3e92486f3c053728be709a0b43c20128cfca85d357957
|
3 |
size 14511
|
{checkpoint-6400 β checkpoint-8400}/scheduler.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ccea19161c9664b17438c6997dfcdb88c41a0367a09102cbb43f2917eaa7b7b2
|
3 |
size 627
|
{checkpoint-6400 β checkpoint-8400}/special_tokens_map.json
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/tokenizer.model
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/tokenizer_config.json
RENAMED
File without changes
|
{checkpoint-6400 β checkpoint-8400}/trainer_state.json
RENAMED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.7293602228164673,
|
3 |
"best_model_checkpoint": "experts/expert-16/checkpoint-6200",
|
4 |
-
"epoch": 2.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -6118,11 +6118,1921 @@
|
|
6118 |
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
6119 |
"mmlu_loss": 1.3004325469542422,
|
6120 |
"step": 6400
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6121 |
}
|
6122 |
],
|
6123 |
"max_steps": 10000,
|
6124 |
"num_train_epochs": 4,
|
6125 |
-
"total_flos":
|
6126 |
"trial_name": null,
|
6127 |
"trial_params": null
|
6128 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.7293602228164673,
|
3 |
"best_model_checkpoint": "experts/expert-16/checkpoint-6200",
|
4 |
+
"epoch": 2.661596958174905,
|
5 |
+
"global_step": 8400,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
6118 |
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
6119 |
"mmlu_loss": 1.3004325469542422,
|
6120 |
"step": 6400
|
6121 |
+
},
|
6122 |
+
{
|
6123 |
+
"epoch": 2.03,
|
6124 |
+
"learning_rate": 0.0002,
|
6125 |
+
"loss": 0.5702,
|
6126 |
+
"step": 6410
|
6127 |
+
},
|
6128 |
+
{
|
6129 |
+
"epoch": 2.03,
|
6130 |
+
"learning_rate": 0.0002,
|
6131 |
+
"loss": 0.5957,
|
6132 |
+
"step": 6420
|
6133 |
+
},
|
6134 |
+
{
|
6135 |
+
"epoch": 2.04,
|
6136 |
+
"learning_rate": 0.0002,
|
6137 |
+
"loss": 0.5994,
|
6138 |
+
"step": 6430
|
6139 |
+
},
|
6140 |
+
{
|
6141 |
+
"epoch": 2.04,
|
6142 |
+
"learning_rate": 0.0002,
|
6143 |
+
"loss": 0.5922,
|
6144 |
+
"step": 6440
|
6145 |
+
},
|
6146 |
+
{
|
6147 |
+
"epoch": 2.04,
|
6148 |
+
"learning_rate": 0.0002,
|
6149 |
+
"loss": 0.5626,
|
6150 |
+
"step": 6450
|
6151 |
+
},
|
6152 |
+
{
|
6153 |
+
"epoch": 2.05,
|
6154 |
+
"learning_rate": 0.0002,
|
6155 |
+
"loss": 0.5912,
|
6156 |
+
"step": 6460
|
6157 |
+
},
|
6158 |
+
{
|
6159 |
+
"epoch": 2.05,
|
6160 |
+
"learning_rate": 0.0002,
|
6161 |
+
"loss": 0.5877,
|
6162 |
+
"step": 6470
|
6163 |
+
},
|
6164 |
+
{
|
6165 |
+
"epoch": 2.05,
|
6166 |
+
"learning_rate": 0.0002,
|
6167 |
+
"loss": 0.578,
|
6168 |
+
"step": 6480
|
6169 |
+
},
|
6170 |
+
{
|
6171 |
+
"epoch": 2.06,
|
6172 |
+
"learning_rate": 0.0002,
|
6173 |
+
"loss": 0.6207,
|
6174 |
+
"step": 6490
|
6175 |
+
},
|
6176 |
+
{
|
6177 |
+
"epoch": 2.06,
|
6178 |
+
"learning_rate": 0.0002,
|
6179 |
+
"loss": 0.5606,
|
6180 |
+
"step": 6500
|
6181 |
+
},
|
6182 |
+
{
|
6183 |
+
"epoch": 2.06,
|
6184 |
+
"learning_rate": 0.0002,
|
6185 |
+
"loss": 0.553,
|
6186 |
+
"step": 6510
|
6187 |
+
},
|
6188 |
+
{
|
6189 |
+
"epoch": 2.07,
|
6190 |
+
"learning_rate": 0.0002,
|
6191 |
+
"loss": 0.6092,
|
6192 |
+
"step": 6520
|
6193 |
+
},
|
6194 |
+
{
|
6195 |
+
"epoch": 2.07,
|
6196 |
+
"learning_rate": 0.0002,
|
6197 |
+
"loss": 0.6183,
|
6198 |
+
"step": 6530
|
6199 |
+
},
|
6200 |
+
{
|
6201 |
+
"epoch": 2.07,
|
6202 |
+
"learning_rate": 0.0002,
|
6203 |
+
"loss": 0.5825,
|
6204 |
+
"step": 6540
|
6205 |
+
},
|
6206 |
+
{
|
6207 |
+
"epoch": 2.08,
|
6208 |
+
"learning_rate": 0.0002,
|
6209 |
+
"loss": 0.5674,
|
6210 |
+
"step": 6550
|
6211 |
+
},
|
6212 |
+
{
|
6213 |
+
"epoch": 2.08,
|
6214 |
+
"learning_rate": 0.0002,
|
6215 |
+
"loss": 0.5587,
|
6216 |
+
"step": 6560
|
6217 |
+
},
|
6218 |
+
{
|
6219 |
+
"epoch": 2.08,
|
6220 |
+
"learning_rate": 0.0002,
|
6221 |
+
"loss": 0.5317,
|
6222 |
+
"step": 6570
|
6223 |
+
},
|
6224 |
+
{
|
6225 |
+
"epoch": 2.08,
|
6226 |
+
"learning_rate": 0.0002,
|
6227 |
+
"loss": 0.6731,
|
6228 |
+
"step": 6580
|
6229 |
+
},
|
6230 |
+
{
|
6231 |
+
"epoch": 2.09,
|
6232 |
+
"learning_rate": 0.0002,
|
6233 |
+
"loss": 0.6242,
|
6234 |
+
"step": 6590
|
6235 |
+
},
|
6236 |
+
{
|
6237 |
+
"epoch": 2.09,
|
6238 |
+
"learning_rate": 0.0002,
|
6239 |
+
"loss": 0.6332,
|
6240 |
+
"step": 6600
|
6241 |
+
},
|
6242 |
+
{
|
6243 |
+
"epoch": 2.09,
|
6244 |
+
"eval_loss": 0.7567528486251831,
|
6245 |
+
"eval_runtime": 111.0264,
|
6246 |
+
"eval_samples_per_second": 9.007,
|
6247 |
+
"eval_steps_per_second": 4.503,
|
6248 |
+
"step": 6600
|
6249 |
+
},
|
6250 |
+
{
|
6251 |
+
"epoch": 2.09,
|
6252 |
+
"mmlu_eval_accuracy": 0.47542707100737025,
|
6253 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
6254 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
6255 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
6256 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
6257 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
6258 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
6259 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
6260 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
6261 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
6262 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
6263 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
6264 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
6265 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
6266 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
6267 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
6268 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
6269 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
6270 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
6271 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
6272 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
6273 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
6274 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
6275 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
6276 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
6277 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
6278 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
6279 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
6280 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
6281 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.85,
|
6282 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
6283 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
6284 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
6285 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
6286 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
6287 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
6288 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
6289 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
6290 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
6291 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
6292 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
6293 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
6294 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
6295 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
6296 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.26,
|
6297 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
6298 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
6299 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
6300 |
+
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225,
|
6301 |
+
"mmlu_eval_accuracy_professional_law": 0.31176470588235294,
|
6302 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
6303 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
6304 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
6305 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
6306 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
6307 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
6308 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
6309 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
6310 |
+
"mmlu_loss": 1.4275867019247448,
|
6311 |
+
"step": 6600
|
6312 |
+
},
|
6313 |
+
{
|
6314 |
+
"epoch": 2.09,
|
6315 |
+
"learning_rate": 0.0002,
|
6316 |
+
"loss": 0.5948,
|
6317 |
+
"step": 6610
|
6318 |
+
},
|
6319 |
+
{
|
6320 |
+
"epoch": 2.1,
|
6321 |
+
"learning_rate": 0.0002,
|
6322 |
+
"loss": 0.6068,
|
6323 |
+
"step": 6620
|
6324 |
+
},
|
6325 |
+
{
|
6326 |
+
"epoch": 2.1,
|
6327 |
+
"learning_rate": 0.0002,
|
6328 |
+
"loss": 0.5831,
|
6329 |
+
"step": 6630
|
6330 |
+
},
|
6331 |
+
{
|
6332 |
+
"epoch": 2.1,
|
6333 |
+
"learning_rate": 0.0002,
|
6334 |
+
"loss": 0.5664,
|
6335 |
+
"step": 6640
|
6336 |
+
},
|
6337 |
+
{
|
6338 |
+
"epoch": 2.11,
|
6339 |
+
"learning_rate": 0.0002,
|
6340 |
+
"loss": 0.622,
|
6341 |
+
"step": 6650
|
6342 |
+
},
|
6343 |
+
{
|
6344 |
+
"epoch": 2.11,
|
6345 |
+
"learning_rate": 0.0002,
|
6346 |
+
"loss": 0.5759,
|
6347 |
+
"step": 6660
|
6348 |
+
},
|
6349 |
+
{
|
6350 |
+
"epoch": 2.11,
|
6351 |
+
"learning_rate": 0.0002,
|
6352 |
+
"loss": 0.5841,
|
6353 |
+
"step": 6670
|
6354 |
+
},
|
6355 |
+
{
|
6356 |
+
"epoch": 2.12,
|
6357 |
+
"learning_rate": 0.0002,
|
6358 |
+
"loss": 0.6221,
|
6359 |
+
"step": 6680
|
6360 |
+
},
|
6361 |
+
{
|
6362 |
+
"epoch": 2.12,
|
6363 |
+
"learning_rate": 0.0002,
|
6364 |
+
"loss": 0.5904,
|
6365 |
+
"step": 6690
|
6366 |
+
},
|
6367 |
+
{
|
6368 |
+
"epoch": 2.12,
|
6369 |
+
"learning_rate": 0.0002,
|
6370 |
+
"loss": 0.6121,
|
6371 |
+
"step": 6700
|
6372 |
+
},
|
6373 |
+
{
|
6374 |
+
"epoch": 2.13,
|
6375 |
+
"learning_rate": 0.0002,
|
6376 |
+
"loss": 0.5526,
|
6377 |
+
"step": 6710
|
6378 |
+
},
|
6379 |
+
{
|
6380 |
+
"epoch": 2.13,
|
6381 |
+
"learning_rate": 0.0002,
|
6382 |
+
"loss": 0.6742,
|
6383 |
+
"step": 6720
|
6384 |
+
},
|
6385 |
+
{
|
6386 |
+
"epoch": 2.13,
|
6387 |
+
"learning_rate": 0.0002,
|
6388 |
+
"loss": 0.5705,
|
6389 |
+
"step": 6730
|
6390 |
+
},
|
6391 |
+
{
|
6392 |
+
"epoch": 2.14,
|
6393 |
+
"learning_rate": 0.0002,
|
6394 |
+
"loss": 0.6151,
|
6395 |
+
"step": 6740
|
6396 |
+
},
|
6397 |
+
{
|
6398 |
+
"epoch": 2.14,
|
6399 |
+
"learning_rate": 0.0002,
|
6400 |
+
"loss": 0.5902,
|
6401 |
+
"step": 6750
|
6402 |
+
},
|
6403 |
+
{
|
6404 |
+
"epoch": 2.14,
|
6405 |
+
"learning_rate": 0.0002,
|
6406 |
+
"loss": 0.6448,
|
6407 |
+
"step": 6760
|
6408 |
+
},
|
6409 |
+
{
|
6410 |
+
"epoch": 2.15,
|
6411 |
+
"learning_rate": 0.0002,
|
6412 |
+
"loss": 0.5395,
|
6413 |
+
"step": 6770
|
6414 |
+
},
|
6415 |
+
{
|
6416 |
+
"epoch": 2.15,
|
6417 |
+
"learning_rate": 0.0002,
|
6418 |
+
"loss": 0.5613,
|
6419 |
+
"step": 6780
|
6420 |
+
},
|
6421 |
+
{
|
6422 |
+
"epoch": 2.15,
|
6423 |
+
"learning_rate": 0.0002,
|
6424 |
+
"loss": 0.5802,
|
6425 |
+
"step": 6790
|
6426 |
+
},
|
6427 |
+
{
|
6428 |
+
"epoch": 2.15,
|
6429 |
+
"learning_rate": 0.0002,
|
6430 |
+
"loss": 0.6026,
|
6431 |
+
"step": 6800
|
6432 |
+
},
|
6433 |
+
{
|
6434 |
+
"epoch": 2.15,
|
6435 |
+
"eval_loss": 0.7631368637084961,
|
6436 |
+
"eval_runtime": 111.0583,
|
6437 |
+
"eval_samples_per_second": 9.004,
|
6438 |
+
"eval_steps_per_second": 4.502,
|
6439 |
+
"step": 6800
|
6440 |
+
},
|
6441 |
+
{
|
6442 |
+
"epoch": 2.15,
|
6443 |
+
"mmlu_eval_accuracy": 0.47370240345715936,
|
6444 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
6445 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
6446 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
6447 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
6448 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
6449 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
6450 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
6451 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
6452 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
6453 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
6454 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
6455 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
6456 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
6457 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
6458 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
6459 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
6460 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
6461 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
6462 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
6463 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727,
|
6464 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
6465 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
6466 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
6467 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
6468 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.5116279069767442,
|
6469 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
6470 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
6471 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
6472 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
6473 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
6474 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
6475 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6538461538461539,
|
6476 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
6477 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
6478 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
6479 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
6480 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
6481 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
6482 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
6483 |
+
"mmlu_eval_accuracy_marketing": 0.72,
|
6484 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
6485 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
6486 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
6487 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
6488 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
6489 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
6490 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
6491 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
6492 |
+
"mmlu_eval_accuracy_professional_law": 0.3235294117647059,
|
6493 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5483870967741935,
|
6494 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
6495 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
6496 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
6497 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
6498 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
6499 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
6500 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
6501 |
+
"mmlu_loss": 1.295992794337223,
|
6502 |
+
"step": 6800
|
6503 |
+
},
|
6504 |
+
{
|
6505 |
+
"epoch": 2.16,
|
6506 |
+
"learning_rate": 0.0002,
|
6507 |
+
"loss": 0.5435,
|
6508 |
+
"step": 6810
|
6509 |
+
},
|
6510 |
+
{
|
6511 |
+
"epoch": 2.16,
|
6512 |
+
"learning_rate": 0.0002,
|
6513 |
+
"loss": 0.593,
|
6514 |
+
"step": 6820
|
6515 |
+
},
|
6516 |
+
{
|
6517 |
+
"epoch": 2.16,
|
6518 |
+
"learning_rate": 0.0002,
|
6519 |
+
"loss": 0.5898,
|
6520 |
+
"step": 6830
|
6521 |
+
},
|
6522 |
+
{
|
6523 |
+
"epoch": 2.17,
|
6524 |
+
"learning_rate": 0.0002,
|
6525 |
+
"loss": 0.5404,
|
6526 |
+
"step": 6840
|
6527 |
+
},
|
6528 |
+
{
|
6529 |
+
"epoch": 2.17,
|
6530 |
+
"learning_rate": 0.0002,
|
6531 |
+
"loss": 0.593,
|
6532 |
+
"step": 6850
|
6533 |
+
},
|
6534 |
+
{
|
6535 |
+
"epoch": 2.17,
|
6536 |
+
"learning_rate": 0.0002,
|
6537 |
+
"loss": 0.5832,
|
6538 |
+
"step": 6860
|
6539 |
+
},
|
6540 |
+
{
|
6541 |
+
"epoch": 2.18,
|
6542 |
+
"learning_rate": 0.0002,
|
6543 |
+
"loss": 0.6201,
|
6544 |
+
"step": 6870
|
6545 |
+
},
|
6546 |
+
{
|
6547 |
+
"epoch": 2.18,
|
6548 |
+
"learning_rate": 0.0002,
|
6549 |
+
"loss": 0.6147,
|
6550 |
+
"step": 6880
|
6551 |
+
},
|
6552 |
+
{
|
6553 |
+
"epoch": 2.18,
|
6554 |
+
"learning_rate": 0.0002,
|
6555 |
+
"loss": 0.6102,
|
6556 |
+
"step": 6890
|
6557 |
+
},
|
6558 |
+
{
|
6559 |
+
"epoch": 2.19,
|
6560 |
+
"learning_rate": 0.0002,
|
6561 |
+
"loss": 0.5885,
|
6562 |
+
"step": 6900
|
6563 |
+
},
|
6564 |
+
{
|
6565 |
+
"epoch": 2.19,
|
6566 |
+
"learning_rate": 0.0002,
|
6567 |
+
"loss": 0.5549,
|
6568 |
+
"step": 6910
|
6569 |
+
},
|
6570 |
+
{
|
6571 |
+
"epoch": 2.19,
|
6572 |
+
"learning_rate": 0.0002,
|
6573 |
+
"loss": 0.5973,
|
6574 |
+
"step": 6920
|
6575 |
+
},
|
6576 |
+
{
|
6577 |
+
"epoch": 2.2,
|
6578 |
+
"learning_rate": 0.0002,
|
6579 |
+
"loss": 0.589,
|
6580 |
+
"step": 6930
|
6581 |
+
},
|
6582 |
+
{
|
6583 |
+
"epoch": 2.2,
|
6584 |
+
"learning_rate": 0.0002,
|
6585 |
+
"loss": 0.6258,
|
6586 |
+
"step": 6940
|
6587 |
+
},
|
6588 |
+
{
|
6589 |
+
"epoch": 2.2,
|
6590 |
+
"learning_rate": 0.0002,
|
6591 |
+
"loss": 0.6038,
|
6592 |
+
"step": 6950
|
6593 |
+
},
|
6594 |
+
{
|
6595 |
+
"epoch": 2.21,
|
6596 |
+
"learning_rate": 0.0002,
|
6597 |
+
"loss": 0.5865,
|
6598 |
+
"step": 6960
|
6599 |
+
},
|
6600 |
+
{
|
6601 |
+
"epoch": 2.21,
|
6602 |
+
"learning_rate": 0.0002,
|
6603 |
+
"loss": 0.6355,
|
6604 |
+
"step": 6970
|
6605 |
+
},
|
6606 |
+
{
|
6607 |
+
"epoch": 2.21,
|
6608 |
+
"learning_rate": 0.0002,
|
6609 |
+
"loss": 0.6572,
|
6610 |
+
"step": 6980
|
6611 |
+
},
|
6612 |
+
{
|
6613 |
+
"epoch": 2.21,
|
6614 |
+
"learning_rate": 0.0002,
|
6615 |
+
"loss": 0.5367,
|
6616 |
+
"step": 6990
|
6617 |
+
},
|
6618 |
+
{
|
6619 |
+
"epoch": 2.22,
|
6620 |
+
"learning_rate": 0.0002,
|
6621 |
+
"loss": 0.5959,
|
6622 |
+
"step": 7000
|
6623 |
+
},
|
6624 |
+
{
|
6625 |
+
"epoch": 2.22,
|
6626 |
+
"eval_loss": 0.7645158767700195,
|
6627 |
+
"eval_runtime": 111.037,
|
6628 |
+
"eval_samples_per_second": 9.006,
|
6629 |
+
"eval_steps_per_second": 4.503,
|
6630 |
+
"step": 7000
|
6631 |
+
},
|
6632 |
+
{
|
6633 |
+
"epoch": 2.22,
|
6634 |
+
"mmlu_eval_accuracy": 0.478166482161635,
|
6635 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
6636 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
6637 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
6638 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
6639 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
6640 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
6641 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
6642 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
6643 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
6644 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
6645 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
6646 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
6647 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.46153846153846156,
|
6648 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
6649 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
6650 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
6651 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
6652 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
6653 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
6654 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
6655 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
6656 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
6657 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
6658 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
6659 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
6660 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
6661 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
6662 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
6663 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
6664 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
6665 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
6666 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
6667 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
6668 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
6669 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
6670 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
6671 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
6672 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
6673 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
6674 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
6675 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
6676 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
6677 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
6678 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.23,
|
6679 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
6680 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
6681 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
6682 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
6683 |
+
"mmlu_eval_accuracy_professional_law": 0.35294117647058826,
|
6684 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
6685 |
+
"mmlu_eval_accuracy_professional_psychology": 0.463768115942029,
|
6686 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
6687 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
6688 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
6689 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
6690 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
6691 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
6692 |
+
"mmlu_loss": 1.506881151358079,
|
6693 |
+
"step": 7000
|
6694 |
+
},
|
6695 |
+
{
|
6696 |
+
"epoch": 2.22,
|
6697 |
+
"learning_rate": 0.0002,
|
6698 |
+
"loss": 0.6429,
|
6699 |
+
"step": 7010
|
6700 |
+
},
|
6701 |
+
{
|
6702 |
+
"epoch": 2.22,
|
6703 |
+
"learning_rate": 0.0002,
|
6704 |
+
"loss": 0.5899,
|
6705 |
+
"step": 7020
|
6706 |
+
},
|
6707 |
+
{
|
6708 |
+
"epoch": 2.23,
|
6709 |
+
"learning_rate": 0.0002,
|
6710 |
+
"loss": 0.5661,
|
6711 |
+
"step": 7030
|
6712 |
+
},
|
6713 |
+
{
|
6714 |
+
"epoch": 2.23,
|
6715 |
+
"learning_rate": 0.0002,
|
6716 |
+
"loss": 0.5747,
|
6717 |
+
"step": 7040
|
6718 |
+
},
|
6719 |
+
{
|
6720 |
+
"epoch": 2.23,
|
6721 |
+
"learning_rate": 0.0002,
|
6722 |
+
"loss": 0.603,
|
6723 |
+
"step": 7050
|
6724 |
+
},
|
6725 |
+
{
|
6726 |
+
"epoch": 2.24,
|
6727 |
+
"learning_rate": 0.0002,
|
6728 |
+
"loss": 0.5864,
|
6729 |
+
"step": 7060
|
6730 |
+
},
|
6731 |
+
{
|
6732 |
+
"epoch": 2.24,
|
6733 |
+
"learning_rate": 0.0002,
|
6734 |
+
"loss": 0.588,
|
6735 |
+
"step": 7070
|
6736 |
+
},
|
6737 |
+
{
|
6738 |
+
"epoch": 2.24,
|
6739 |
+
"learning_rate": 0.0002,
|
6740 |
+
"loss": 0.6275,
|
6741 |
+
"step": 7080
|
6742 |
+
},
|
6743 |
+
{
|
6744 |
+
"epoch": 2.25,
|
6745 |
+
"learning_rate": 0.0002,
|
6746 |
+
"loss": 0.6118,
|
6747 |
+
"step": 7090
|
6748 |
+
},
|
6749 |
+
{
|
6750 |
+
"epoch": 2.25,
|
6751 |
+
"learning_rate": 0.0002,
|
6752 |
+
"loss": 0.6475,
|
6753 |
+
"step": 7100
|
6754 |
+
},
|
6755 |
+
{
|
6756 |
+
"epoch": 2.25,
|
6757 |
+
"learning_rate": 0.0002,
|
6758 |
+
"loss": 0.6191,
|
6759 |
+
"step": 7110
|
6760 |
+
},
|
6761 |
+
{
|
6762 |
+
"epoch": 2.26,
|
6763 |
+
"learning_rate": 0.0002,
|
6764 |
+
"loss": 0.5623,
|
6765 |
+
"step": 7120
|
6766 |
+
},
|
6767 |
+
{
|
6768 |
+
"epoch": 2.26,
|
6769 |
+
"learning_rate": 0.0002,
|
6770 |
+
"loss": 0.6052,
|
6771 |
+
"step": 7130
|
6772 |
+
},
|
6773 |
+
{
|
6774 |
+
"epoch": 2.26,
|
6775 |
+
"learning_rate": 0.0002,
|
6776 |
+
"loss": 0.545,
|
6777 |
+
"step": 7140
|
6778 |
+
},
|
6779 |
+
{
|
6780 |
+
"epoch": 2.27,
|
6781 |
+
"learning_rate": 0.0002,
|
6782 |
+
"loss": 0.5975,
|
6783 |
+
"step": 7150
|
6784 |
+
},
|
6785 |
+
{
|
6786 |
+
"epoch": 2.27,
|
6787 |
+
"learning_rate": 0.0002,
|
6788 |
+
"loss": 0.6022,
|
6789 |
+
"step": 7160
|
6790 |
+
},
|
6791 |
+
{
|
6792 |
+
"epoch": 2.27,
|
6793 |
+
"learning_rate": 0.0002,
|
6794 |
+
"loss": 0.608,
|
6795 |
+
"step": 7170
|
6796 |
+
},
|
6797 |
+
{
|
6798 |
+
"epoch": 2.28,
|
6799 |
+
"learning_rate": 0.0002,
|
6800 |
+
"loss": 0.6401,
|
6801 |
+
"step": 7180
|
6802 |
+
},
|
6803 |
+
{
|
6804 |
+
"epoch": 2.28,
|
6805 |
+
"learning_rate": 0.0002,
|
6806 |
+
"loss": 0.6429,
|
6807 |
+
"step": 7190
|
6808 |
+
},
|
6809 |
+
{
|
6810 |
+
"epoch": 2.28,
|
6811 |
+
"learning_rate": 0.0002,
|
6812 |
+
"loss": 0.5495,
|
6813 |
+
"step": 7200
|
6814 |
+
},
|
6815 |
+
{
|
6816 |
+
"epoch": 2.28,
|
6817 |
+
"eval_loss": 0.7578040361404419,
|
6818 |
+
"eval_runtime": 111.0662,
|
6819 |
+
"eval_samples_per_second": 9.004,
|
6820 |
+
"eval_steps_per_second": 4.502,
|
6821 |
+
"step": 7200
|
6822 |
+
},
|
6823 |
+
{
|
6824 |
+
"epoch": 2.28,
|
6825 |
+
"mmlu_eval_accuracy": 0.47051789661643223,
|
6826 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
6827 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
6828 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
6829 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
6830 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
6831 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
6832 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
6833 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
6834 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
6835 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
6836 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
6837 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
6838 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
6839 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
6840 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
6841 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293,
|
6842 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
6843 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
6844 |
+
"mmlu_eval_accuracy_high_school_biology": 0.34375,
|
6845 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
6846 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
6847 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
6848 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
6849 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
6850 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
6851 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
6852 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
6853 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
6854 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
6855 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
6856 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
6857 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
6858 |
+
"mmlu_eval_accuracy_human_aging": 0.7391304347826086,
|
6859 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
6860 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
6861 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
6862 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
6863 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
6864 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
6865 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
6866 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
6867 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
6868 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
6869 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
6870 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
6871 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
6872 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
6873 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194,
|
6874 |
+
"mmlu_eval_accuracy_professional_law": 0.3176470588235294,
|
6875 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
6876 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
6877 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
6878 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
6879 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
6880 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
6881 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
6882 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
6883 |
+
"mmlu_loss": 1.5382918150944747,
|
6884 |
+
"step": 7200
|
6885 |
+
},
|
6886 |
+
{
|
6887 |
+
"epoch": 2.28,
|
6888 |
+
"learning_rate": 0.0002,
|
6889 |
+
"loss": 0.5606,
|
6890 |
+
"step": 7210
|
6891 |
+
},
|
6892 |
+
{
|
6893 |
+
"epoch": 2.29,
|
6894 |
+
"learning_rate": 0.0002,
|
6895 |
+
"loss": 0.5737,
|
6896 |
+
"step": 7220
|
6897 |
+
},
|
6898 |
+
{
|
6899 |
+
"epoch": 2.29,
|
6900 |
+
"learning_rate": 0.0002,
|
6901 |
+
"loss": 0.6112,
|
6902 |
+
"step": 7230
|
6903 |
+
},
|
6904 |
+
{
|
6905 |
+
"epoch": 2.29,
|
6906 |
+
"learning_rate": 0.0002,
|
6907 |
+
"loss": 0.626,
|
6908 |
+
"step": 7240
|
6909 |
+
},
|
6910 |
+
{
|
6911 |
+
"epoch": 2.3,
|
6912 |
+
"learning_rate": 0.0002,
|
6913 |
+
"loss": 0.608,
|
6914 |
+
"step": 7250
|
6915 |
+
},
|
6916 |
+
{
|
6917 |
+
"epoch": 2.3,
|
6918 |
+
"learning_rate": 0.0002,
|
6919 |
+
"loss": 0.6265,
|
6920 |
+
"step": 7260
|
6921 |
+
},
|
6922 |
+
{
|
6923 |
+
"epoch": 2.3,
|
6924 |
+
"learning_rate": 0.0002,
|
6925 |
+
"loss": 0.6053,
|
6926 |
+
"step": 7270
|
6927 |
+
},
|
6928 |
+
{
|
6929 |
+
"epoch": 2.31,
|
6930 |
+
"learning_rate": 0.0002,
|
6931 |
+
"loss": 0.6135,
|
6932 |
+
"step": 7280
|
6933 |
+
},
|
6934 |
+
{
|
6935 |
+
"epoch": 2.31,
|
6936 |
+
"learning_rate": 0.0002,
|
6937 |
+
"loss": 0.5217,
|
6938 |
+
"step": 7290
|
6939 |
+
},
|
6940 |
+
{
|
6941 |
+
"epoch": 2.31,
|
6942 |
+
"learning_rate": 0.0002,
|
6943 |
+
"loss": 0.6124,
|
6944 |
+
"step": 7300
|
6945 |
+
},
|
6946 |
+
{
|
6947 |
+
"epoch": 2.32,
|
6948 |
+
"learning_rate": 0.0002,
|
6949 |
+
"loss": 0.5506,
|
6950 |
+
"step": 7310
|
6951 |
+
},
|
6952 |
+
{
|
6953 |
+
"epoch": 2.32,
|
6954 |
+
"learning_rate": 0.0002,
|
6955 |
+
"loss": 0.6095,
|
6956 |
+
"step": 7320
|
6957 |
+
},
|
6958 |
+
{
|
6959 |
+
"epoch": 2.32,
|
6960 |
+
"learning_rate": 0.0002,
|
6961 |
+
"loss": 0.5972,
|
6962 |
+
"step": 7330
|
6963 |
+
},
|
6964 |
+
{
|
6965 |
+
"epoch": 2.33,
|
6966 |
+
"learning_rate": 0.0002,
|
6967 |
+
"loss": 0.6714,
|
6968 |
+
"step": 7340
|
6969 |
+
},
|
6970 |
+
{
|
6971 |
+
"epoch": 2.33,
|
6972 |
+
"learning_rate": 0.0002,
|
6973 |
+
"loss": 0.6083,
|
6974 |
+
"step": 7350
|
6975 |
+
},
|
6976 |
+
{
|
6977 |
+
"epoch": 2.33,
|
6978 |
+
"learning_rate": 0.0002,
|
6979 |
+
"loss": 0.6033,
|
6980 |
+
"step": 7360
|
6981 |
+
},
|
6982 |
+
{
|
6983 |
+
"epoch": 2.34,
|
6984 |
+
"learning_rate": 0.0002,
|
6985 |
+
"loss": 0.5881,
|
6986 |
+
"step": 7370
|
6987 |
+
},
|
6988 |
+
{
|
6989 |
+
"epoch": 2.34,
|
6990 |
+
"learning_rate": 0.0002,
|
6991 |
+
"loss": 0.5958,
|
6992 |
+
"step": 7380
|
6993 |
+
},
|
6994 |
+
{
|
6995 |
+
"epoch": 2.34,
|
6996 |
+
"learning_rate": 0.0002,
|
6997 |
+
"loss": 0.6009,
|
6998 |
+
"step": 7390
|
6999 |
+
},
|
7000 |
+
{
|
7001 |
+
"epoch": 2.34,
|
7002 |
+
"learning_rate": 0.0002,
|
7003 |
+
"loss": 0.5608,
|
7004 |
+
"step": 7400
|
7005 |
+
},
|
7006 |
+
{
|
7007 |
+
"epoch": 2.34,
|
7008 |
+
"eval_loss": 0.767185628414154,
|
7009 |
+
"eval_runtime": 111.2161,
|
7010 |
+
"eval_samples_per_second": 8.992,
|
7011 |
+
"eval_steps_per_second": 4.496,
|
7012 |
+
"step": 7400
|
7013 |
+
},
|
7014 |
+
{
|
7015 |
+
"epoch": 2.34,
|
7016 |
+
"mmlu_eval_accuracy": 0.46046773240416866,
|
7017 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
7018 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
7019 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
7020 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
7021 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
7022 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
7023 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
7024 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
7025 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
7026 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
7027 |
+
"mmlu_eval_accuracy_college_physics": 0.09090909090909091,
|
7028 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
7029 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
7030 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
7031 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
7032 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683,
|
7033 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
7034 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
7035 |
+
"mmlu_eval_accuracy_high_school_biology": 0.3125,
|
7036 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.13636363636363635,
|
7037 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7038 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
7039 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
7040 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
7041 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
7042 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
7043 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
7044 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
7045 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
7046 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
7047 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
7048 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
7049 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
7050 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
7051 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
7052 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
7053 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
7054 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
7055 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
7056 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
7057 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
7058 |
+
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186,
|
7059 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
7060 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.27,
|
7061 |
+
"mmlu_eval_accuracy_nutrition": 0.5454545454545454,
|
7062 |
+
"mmlu_eval_accuracy_philosophy": 0.4117647058823529,
|
7063 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
7064 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194,
|
7065 |
+
"mmlu_eval_accuracy_professional_law": 0.3,
|
7066 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
7067 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
7068 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
7069 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
7070 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
7071 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
7072 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
7073 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
7074 |
+
"mmlu_loss": 1.5711101981040392,
|
7075 |
+
"step": 7400
|
7076 |
+
},
|
7077 |
+
{
|
7078 |
+
"epoch": 2.35,
|
7079 |
+
"learning_rate": 0.0002,
|
7080 |
+
"loss": 0.5974,
|
7081 |
+
"step": 7410
|
7082 |
+
},
|
7083 |
+
{
|
7084 |
+
"epoch": 2.35,
|
7085 |
+
"learning_rate": 0.0002,
|
7086 |
+
"loss": 0.5677,
|
7087 |
+
"step": 7420
|
7088 |
+
},
|
7089 |
+
{
|
7090 |
+
"epoch": 2.35,
|
7091 |
+
"learning_rate": 0.0002,
|
7092 |
+
"loss": 0.5592,
|
7093 |
+
"step": 7430
|
7094 |
+
},
|
7095 |
+
{
|
7096 |
+
"epoch": 2.36,
|
7097 |
+
"learning_rate": 0.0002,
|
7098 |
+
"loss": 0.5754,
|
7099 |
+
"step": 7440
|
7100 |
+
},
|
7101 |
+
{
|
7102 |
+
"epoch": 2.36,
|
7103 |
+
"learning_rate": 0.0002,
|
7104 |
+
"loss": 0.6117,
|
7105 |
+
"step": 7450
|
7106 |
+
},
|
7107 |
+
{
|
7108 |
+
"epoch": 2.36,
|
7109 |
+
"learning_rate": 0.0002,
|
7110 |
+
"loss": 0.5462,
|
7111 |
+
"step": 7460
|
7112 |
+
},
|
7113 |
+
{
|
7114 |
+
"epoch": 2.37,
|
7115 |
+
"learning_rate": 0.0002,
|
7116 |
+
"loss": 0.5888,
|
7117 |
+
"step": 7470
|
7118 |
+
},
|
7119 |
+
{
|
7120 |
+
"epoch": 2.37,
|
7121 |
+
"learning_rate": 0.0002,
|
7122 |
+
"loss": 0.5933,
|
7123 |
+
"step": 7480
|
7124 |
+
},
|
7125 |
+
{
|
7126 |
+
"epoch": 2.37,
|
7127 |
+
"learning_rate": 0.0002,
|
7128 |
+
"loss": 0.6329,
|
7129 |
+
"step": 7490
|
7130 |
+
},
|
7131 |
+
{
|
7132 |
+
"epoch": 2.38,
|
7133 |
+
"learning_rate": 0.0002,
|
7134 |
+
"loss": 0.6803,
|
7135 |
+
"step": 7500
|
7136 |
+
},
|
7137 |
+
{
|
7138 |
+
"epoch": 2.38,
|
7139 |
+
"learning_rate": 0.0002,
|
7140 |
+
"loss": 0.5907,
|
7141 |
+
"step": 7510
|
7142 |
+
},
|
7143 |
+
{
|
7144 |
+
"epoch": 2.38,
|
7145 |
+
"learning_rate": 0.0002,
|
7146 |
+
"loss": 0.5929,
|
7147 |
+
"step": 7520
|
7148 |
+
},
|
7149 |
+
{
|
7150 |
+
"epoch": 2.39,
|
7151 |
+
"learning_rate": 0.0002,
|
7152 |
+
"loss": 0.6288,
|
7153 |
+
"step": 7530
|
7154 |
+
},
|
7155 |
+
{
|
7156 |
+
"epoch": 2.39,
|
7157 |
+
"learning_rate": 0.0002,
|
7158 |
+
"loss": 0.5839,
|
7159 |
+
"step": 7540
|
7160 |
+
},
|
7161 |
+
{
|
7162 |
+
"epoch": 2.39,
|
7163 |
+
"learning_rate": 0.0002,
|
7164 |
+
"loss": 0.5886,
|
7165 |
+
"step": 7550
|
7166 |
+
},
|
7167 |
+
{
|
7168 |
+
"epoch": 2.4,
|
7169 |
+
"learning_rate": 0.0002,
|
7170 |
+
"loss": 0.6225,
|
7171 |
+
"step": 7560
|
7172 |
+
},
|
7173 |
+
{
|
7174 |
+
"epoch": 2.4,
|
7175 |
+
"learning_rate": 0.0002,
|
7176 |
+
"loss": 0.6009,
|
7177 |
+
"step": 7570
|
7178 |
+
},
|
7179 |
+
{
|
7180 |
+
"epoch": 2.4,
|
7181 |
+
"learning_rate": 0.0002,
|
7182 |
+
"loss": 0.5975,
|
7183 |
+
"step": 7580
|
7184 |
+
},
|
7185 |
+
{
|
7186 |
+
"epoch": 2.4,
|
7187 |
+
"learning_rate": 0.0002,
|
7188 |
+
"loss": 0.5581,
|
7189 |
+
"step": 7590
|
7190 |
+
},
|
7191 |
+
{
|
7192 |
+
"epoch": 2.41,
|
7193 |
+
"learning_rate": 0.0002,
|
7194 |
+
"loss": 0.612,
|
7195 |
+
"step": 7600
|
7196 |
+
},
|
7197 |
+
{
|
7198 |
+
"epoch": 2.41,
|
7199 |
+
"eval_loss": 0.76031494140625,
|
7200 |
+
"eval_runtime": 111.0399,
|
7201 |
+
"eval_samples_per_second": 9.006,
|
7202 |
+
"eval_steps_per_second": 4.503,
|
7203 |
+
"step": 7600
|
7204 |
+
},
|
7205 |
+
{
|
7206 |
+
"epoch": 2.41,
|
7207 |
+
"mmlu_eval_accuracy": 0.47951118911559576,
|
7208 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
7209 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
7210 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
7211 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
7212 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
7213 |
+
"mmlu_eval_accuracy_college_biology": 0.4375,
|
7214 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
7215 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
7216 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
7217 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
7218 |
+
"mmlu_eval_accuracy_college_physics": 0.18181818181818182,
|
7219 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
7220 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
7221 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
7222 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
7223 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
7224 |
+
"mmlu_eval_accuracy_formal_logic": 0.07142857142857142,
|
7225 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
7226 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
7227 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
7228 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7229 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
7230 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
7231 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
7232 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4418604651162791,
|
7233 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
7234 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
7235 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
7236 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8833333333333333,
|
7237 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.21739130434782608,
|
7238 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
7239 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
7240 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
7241 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
7242 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
7243 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
7244 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
7245 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
7246 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
7247 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
7248 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
7249 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
7250 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
7251 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
7252 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
7253 |
+
"mmlu_eval_accuracy_philosophy": 0.5588235294117647,
|
7254 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
7255 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
7256 |
+
"mmlu_eval_accuracy_professional_law": 0.3352941176470588,
|
7257 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
7258 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5217391304347826,
|
7259 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
7260 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
7261 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
7262 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
7263 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
7264 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
7265 |
+
"mmlu_loss": 1.584926052947891,
|
7266 |
+
"step": 7600
|
7267 |
+
},
|
7268 |
+
{
|
7269 |
+
"epoch": 2.41,
|
7270 |
+
"learning_rate": 0.0002,
|
7271 |
+
"loss": 0.5914,
|
7272 |
+
"step": 7610
|
7273 |
+
},
|
7274 |
+
{
|
7275 |
+
"epoch": 2.41,
|
7276 |
+
"learning_rate": 0.0002,
|
7277 |
+
"loss": 0.59,
|
7278 |
+
"step": 7620
|
7279 |
+
},
|
7280 |
+
{
|
7281 |
+
"epoch": 2.42,
|
7282 |
+
"learning_rate": 0.0002,
|
7283 |
+
"loss": 0.6179,
|
7284 |
+
"step": 7630
|
7285 |
+
},
|
7286 |
+
{
|
7287 |
+
"epoch": 2.42,
|
7288 |
+
"learning_rate": 0.0002,
|
7289 |
+
"loss": 0.6203,
|
7290 |
+
"step": 7640
|
7291 |
+
},
|
7292 |
+
{
|
7293 |
+
"epoch": 2.42,
|
7294 |
+
"learning_rate": 0.0002,
|
7295 |
+
"loss": 0.6113,
|
7296 |
+
"step": 7650
|
7297 |
+
},
|
7298 |
+
{
|
7299 |
+
"epoch": 2.43,
|
7300 |
+
"learning_rate": 0.0002,
|
7301 |
+
"loss": 0.5505,
|
7302 |
+
"step": 7660
|
7303 |
+
},
|
7304 |
+
{
|
7305 |
+
"epoch": 2.43,
|
7306 |
+
"learning_rate": 0.0002,
|
7307 |
+
"loss": 0.5664,
|
7308 |
+
"step": 7670
|
7309 |
+
},
|
7310 |
+
{
|
7311 |
+
"epoch": 2.43,
|
7312 |
+
"learning_rate": 0.0002,
|
7313 |
+
"loss": 0.596,
|
7314 |
+
"step": 7680
|
7315 |
+
},
|
7316 |
+
{
|
7317 |
+
"epoch": 2.44,
|
7318 |
+
"learning_rate": 0.0002,
|
7319 |
+
"loss": 0.6125,
|
7320 |
+
"step": 7690
|
7321 |
+
},
|
7322 |
+
{
|
7323 |
+
"epoch": 2.44,
|
7324 |
+
"learning_rate": 0.0002,
|
7325 |
+
"loss": 0.607,
|
7326 |
+
"step": 7700
|
7327 |
+
},
|
7328 |
+
{
|
7329 |
+
"epoch": 2.44,
|
7330 |
+
"learning_rate": 0.0002,
|
7331 |
+
"loss": 0.5657,
|
7332 |
+
"step": 7710
|
7333 |
+
},
|
7334 |
+
{
|
7335 |
+
"epoch": 2.45,
|
7336 |
+
"learning_rate": 0.0002,
|
7337 |
+
"loss": 0.5419,
|
7338 |
+
"step": 7720
|
7339 |
+
},
|
7340 |
+
{
|
7341 |
+
"epoch": 2.45,
|
7342 |
+
"learning_rate": 0.0002,
|
7343 |
+
"loss": 0.614,
|
7344 |
+
"step": 7730
|
7345 |
+
},
|
7346 |
+
{
|
7347 |
+
"epoch": 2.45,
|
7348 |
+
"learning_rate": 0.0002,
|
7349 |
+
"loss": 0.6107,
|
7350 |
+
"step": 7740
|
7351 |
+
},
|
7352 |
+
{
|
7353 |
+
"epoch": 2.46,
|
7354 |
+
"learning_rate": 0.0002,
|
7355 |
+
"loss": 0.6099,
|
7356 |
+
"step": 7750
|
7357 |
+
},
|
7358 |
+
{
|
7359 |
+
"epoch": 2.46,
|
7360 |
+
"learning_rate": 0.0002,
|
7361 |
+
"loss": 0.5994,
|
7362 |
+
"step": 7760
|
7363 |
+
},
|
7364 |
+
{
|
7365 |
+
"epoch": 2.46,
|
7366 |
+
"learning_rate": 0.0002,
|
7367 |
+
"loss": 0.6274,
|
7368 |
+
"step": 7770
|
7369 |
+
},
|
7370 |
+
{
|
7371 |
+
"epoch": 2.47,
|
7372 |
+
"learning_rate": 0.0002,
|
7373 |
+
"loss": 0.5902,
|
7374 |
+
"step": 7780
|
7375 |
+
},
|
7376 |
+
{
|
7377 |
+
"epoch": 2.47,
|
7378 |
+
"learning_rate": 0.0002,
|
7379 |
+
"loss": 0.5902,
|
7380 |
+
"step": 7790
|
7381 |
+
},
|
7382 |
+
{
|
7383 |
+
"epoch": 2.47,
|
7384 |
+
"learning_rate": 0.0002,
|
7385 |
+
"loss": 0.599,
|
7386 |
+
"step": 7800
|
7387 |
+
},
|
7388 |
+
{
|
7389 |
+
"epoch": 2.47,
|
7390 |
+
"eval_loss": 0.760485827922821,
|
7391 |
+
"eval_runtime": 111.1916,
|
7392 |
+
"eval_samples_per_second": 8.993,
|
7393 |
+
"eval_steps_per_second": 4.497,
|
7394 |
+
"step": 7800
|
7395 |
+
},
|
7396 |
+
{
|
7397 |
+
"epoch": 2.47,
|
7398 |
+
"mmlu_eval_accuracy": 0.48418694277386404,
|
7399 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
7400 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
7401 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
7402 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
7403 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
7404 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
7405 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
7406 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
7407 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
7408 |
+
"mmlu_eval_accuracy_college_medicine": 0.5,
|
7409 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
7410 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
7411 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
7412 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
7413 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
7414 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
7415 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
7416 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
7417 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
7418 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
7419 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7420 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
7421 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
7422 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
7423 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
7424 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
7425 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
7426 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
7427 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
7428 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
7429 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
7430 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
7431 |
+
"mmlu_eval_accuracy_human_aging": 0.7391304347826086,
|
7432 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
7433 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
7434 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
7435 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
7436 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
7437 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
7438 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
7439 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
7440 |
+
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186,
|
7441 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
7442 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.27,
|
7443 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
7444 |
+
"mmlu_eval_accuracy_philosophy": 0.5588235294117647,
|
7445 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
7446 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
7447 |
+
"mmlu_eval_accuracy_professional_law": 0.34705882352941175,
|
7448 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6129032258064516,
|
7449 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
7450 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
7451 |
+
"mmlu_eval_accuracy_security_studies": 0.4074074074074074,
|
7452 |
+
"mmlu_eval_accuracy_sociology": 0.6363636363636364,
|
7453 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
7454 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
7455 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
7456 |
+
"mmlu_loss": 1.4828916400204128,
|
7457 |
+
"step": 7800
|
7458 |
+
},
|
7459 |
+
{
|
7460 |
+
"epoch": 2.47,
|
7461 |
+
"learning_rate": 0.0002,
|
7462 |
+
"loss": 0.6005,
|
7463 |
+
"step": 7810
|
7464 |
+
},
|
7465 |
+
{
|
7466 |
+
"epoch": 2.48,
|
7467 |
+
"learning_rate": 0.0002,
|
7468 |
+
"loss": 0.6662,
|
7469 |
+
"step": 7820
|
7470 |
+
},
|
7471 |
+
{
|
7472 |
+
"epoch": 2.48,
|
7473 |
+
"learning_rate": 0.0002,
|
7474 |
+
"loss": 0.5821,
|
7475 |
+
"step": 7830
|
7476 |
+
},
|
7477 |
+
{
|
7478 |
+
"epoch": 2.48,
|
7479 |
+
"learning_rate": 0.0002,
|
7480 |
+
"loss": 0.5826,
|
7481 |
+
"step": 7840
|
7482 |
+
},
|
7483 |
+
{
|
7484 |
+
"epoch": 2.49,
|
7485 |
+
"learning_rate": 0.0002,
|
7486 |
+
"loss": 0.5804,
|
7487 |
+
"step": 7850
|
7488 |
+
},
|
7489 |
+
{
|
7490 |
+
"epoch": 2.49,
|
7491 |
+
"learning_rate": 0.0002,
|
7492 |
+
"loss": 0.587,
|
7493 |
+
"step": 7860
|
7494 |
+
},
|
7495 |
+
{
|
7496 |
+
"epoch": 2.49,
|
7497 |
+
"learning_rate": 0.0002,
|
7498 |
+
"loss": 0.6062,
|
7499 |
+
"step": 7870
|
7500 |
+
},
|
7501 |
+
{
|
7502 |
+
"epoch": 2.5,
|
7503 |
+
"learning_rate": 0.0002,
|
7504 |
+
"loss": 0.5616,
|
7505 |
+
"step": 7880
|
7506 |
+
},
|
7507 |
+
{
|
7508 |
+
"epoch": 2.5,
|
7509 |
+
"learning_rate": 0.0002,
|
7510 |
+
"loss": 0.6351,
|
7511 |
+
"step": 7890
|
7512 |
+
},
|
7513 |
+
{
|
7514 |
+
"epoch": 2.5,
|
7515 |
+
"learning_rate": 0.0002,
|
7516 |
+
"loss": 0.5738,
|
7517 |
+
"step": 7900
|
7518 |
+
},
|
7519 |
+
{
|
7520 |
+
"epoch": 2.51,
|
7521 |
+
"learning_rate": 0.0002,
|
7522 |
+
"loss": 0.5564,
|
7523 |
+
"step": 7910
|
7524 |
+
},
|
7525 |
+
{
|
7526 |
+
"epoch": 2.51,
|
7527 |
+
"learning_rate": 0.0002,
|
7528 |
+
"loss": 0.5696,
|
7529 |
+
"step": 7920
|
7530 |
+
},
|
7531 |
+
{
|
7532 |
+
"epoch": 2.51,
|
7533 |
+
"learning_rate": 0.0002,
|
7534 |
+
"loss": 0.5812,
|
7535 |
+
"step": 7930
|
7536 |
+
},
|
7537 |
+
{
|
7538 |
+
"epoch": 2.52,
|
7539 |
+
"learning_rate": 0.0002,
|
7540 |
+
"loss": 0.5786,
|
7541 |
+
"step": 7940
|
7542 |
+
},
|
7543 |
+
{
|
7544 |
+
"epoch": 2.52,
|
7545 |
+
"learning_rate": 0.0002,
|
7546 |
+
"loss": 0.6053,
|
7547 |
+
"step": 7950
|
7548 |
+
},
|
7549 |
+
{
|
7550 |
+
"epoch": 2.52,
|
7551 |
+
"learning_rate": 0.0002,
|
7552 |
+
"loss": 0.5727,
|
7553 |
+
"step": 7960
|
7554 |
+
},
|
7555 |
+
{
|
7556 |
+
"epoch": 2.53,
|
7557 |
+
"learning_rate": 0.0002,
|
7558 |
+
"loss": 0.621,
|
7559 |
+
"step": 7970
|
7560 |
+
},
|
7561 |
+
{
|
7562 |
+
"epoch": 2.53,
|
7563 |
+
"learning_rate": 0.0002,
|
7564 |
+
"loss": 0.5679,
|
7565 |
+
"step": 7980
|
7566 |
+
},
|
7567 |
+
{
|
7568 |
+
"epoch": 2.53,
|
7569 |
+
"learning_rate": 0.0002,
|
7570 |
+
"loss": 0.6138,
|
7571 |
+
"step": 7990
|
7572 |
+
},
|
7573 |
+
{
|
7574 |
+
"epoch": 2.53,
|
7575 |
+
"learning_rate": 0.0002,
|
7576 |
+
"loss": 0.588,
|
7577 |
+
"step": 8000
|
7578 |
+
},
|
7579 |
+
{
|
7580 |
+
"epoch": 2.53,
|
7581 |
+
"eval_loss": 0.7585816979408264,
|
7582 |
+
"eval_runtime": 111.2835,
|
7583 |
+
"eval_samples_per_second": 8.986,
|
7584 |
+
"eval_steps_per_second": 4.493,
|
7585 |
+
"step": 8000
|
7586 |
+
},
|
7587 |
+
{
|
7588 |
+
"epoch": 2.53,
|
7589 |
+
"mmlu_eval_accuracy": 0.48589851563960756,
|
7590 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
7591 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
7592 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
7593 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
7594 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
7595 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
7596 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
7597 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
7598 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
7599 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
7600 |
+
"mmlu_eval_accuracy_college_physics": 0.18181818181818182,
|
7601 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
7602 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
7603 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
7604 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
7605 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
7606 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
7607 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
7608 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
7609 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
7610 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7611 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
7612 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
7613 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
7614 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4883720930232558,
|
7615 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
7616 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5384615384615384,
|
7617 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
7618 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.9,
|
7619 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
7620 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
7621 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
7622 |
+
"mmlu_eval_accuracy_human_aging": 0.7391304347826086,
|
7623 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
7624 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
7625 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
7626 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
7627 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
7628 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
7629 |
+
"mmlu_eval_accuracy_marketing": 0.68,
|
7630 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
7631 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186,
|
7632 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5526315789473685,
|
7633 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
7634 |
+
"mmlu_eval_accuracy_nutrition": 0.48484848484848486,
|
7635 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
7636 |
+
"mmlu_eval_accuracy_prehistory": 0.5428571428571428,
|
7637 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
7638 |
+
"mmlu_eval_accuracy_professional_law": 0.3588235294117647,
|
7639 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
7640 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4927536231884058,
|
7641 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
7642 |
+
"mmlu_eval_accuracy_security_studies": 0.4074074074074074,
|
7643 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
7644 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
7645 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
7646 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
7647 |
+
"mmlu_loss": 1.566373301120402,
|
7648 |
+
"step": 8000
|
7649 |
+
},
|
7650 |
+
{
|
7651 |
+
"epoch": 2.54,
|
7652 |
+
"learning_rate": 0.0002,
|
7653 |
+
"loss": 0.5624,
|
7654 |
+
"step": 8010
|
7655 |
+
},
|
7656 |
+
{
|
7657 |
+
"epoch": 2.54,
|
7658 |
+
"learning_rate": 0.0002,
|
7659 |
+
"loss": 0.6206,
|
7660 |
+
"step": 8020
|
7661 |
+
},
|
7662 |
+
{
|
7663 |
+
"epoch": 2.54,
|
7664 |
+
"learning_rate": 0.0002,
|
7665 |
+
"loss": 0.607,
|
7666 |
+
"step": 8030
|
7667 |
+
},
|
7668 |
+
{
|
7669 |
+
"epoch": 2.55,
|
7670 |
+
"learning_rate": 0.0002,
|
7671 |
+
"loss": 0.6344,
|
7672 |
+
"step": 8040
|
7673 |
+
},
|
7674 |
+
{
|
7675 |
+
"epoch": 2.55,
|
7676 |
+
"learning_rate": 0.0002,
|
7677 |
+
"loss": 0.6705,
|
7678 |
+
"step": 8050
|
7679 |
+
},
|
7680 |
+
{
|
7681 |
+
"epoch": 2.55,
|
7682 |
+
"learning_rate": 0.0002,
|
7683 |
+
"loss": 0.5679,
|
7684 |
+
"step": 8060
|
7685 |
+
},
|
7686 |
+
{
|
7687 |
+
"epoch": 2.56,
|
7688 |
+
"learning_rate": 0.0002,
|
7689 |
+
"loss": 0.6,
|
7690 |
+
"step": 8070
|
7691 |
+
},
|
7692 |
+
{
|
7693 |
+
"epoch": 2.56,
|
7694 |
+
"learning_rate": 0.0002,
|
7695 |
+
"loss": 0.6486,
|
7696 |
+
"step": 8080
|
7697 |
+
},
|
7698 |
+
{
|
7699 |
+
"epoch": 2.56,
|
7700 |
+
"learning_rate": 0.0002,
|
7701 |
+
"loss": 0.5959,
|
7702 |
+
"step": 8090
|
7703 |
+
},
|
7704 |
+
{
|
7705 |
+
"epoch": 2.57,
|
7706 |
+
"learning_rate": 0.0002,
|
7707 |
+
"loss": 0.6454,
|
7708 |
+
"step": 8100
|
7709 |
+
},
|
7710 |
+
{
|
7711 |
+
"epoch": 2.57,
|
7712 |
+
"learning_rate": 0.0002,
|
7713 |
+
"loss": 0.6085,
|
7714 |
+
"step": 8110
|
7715 |
+
},
|
7716 |
+
{
|
7717 |
+
"epoch": 2.57,
|
7718 |
+
"learning_rate": 0.0002,
|
7719 |
+
"loss": 0.5509,
|
7720 |
+
"step": 8120
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 2.58,
|
7724 |
+
"learning_rate": 0.0002,
|
7725 |
+
"loss": 0.6267,
|
7726 |
+
"step": 8130
|
7727 |
+
},
|
7728 |
+
{
|
7729 |
+
"epoch": 2.58,
|
7730 |
+
"learning_rate": 0.0002,
|
7731 |
+
"loss": 0.5865,
|
7732 |
+
"step": 8140
|
7733 |
+
},
|
7734 |
+
{
|
7735 |
+
"epoch": 2.58,
|
7736 |
+
"learning_rate": 0.0002,
|
7737 |
+
"loss": 0.6002,
|
7738 |
+
"step": 8150
|
7739 |
+
},
|
7740 |
+
{
|
7741 |
+
"epoch": 2.59,
|
7742 |
+
"learning_rate": 0.0002,
|
7743 |
+
"loss": 0.6342,
|
7744 |
+
"step": 8160
|
7745 |
+
},
|
7746 |
+
{
|
7747 |
+
"epoch": 2.59,
|
7748 |
+
"learning_rate": 0.0002,
|
7749 |
+
"loss": 0.6312,
|
7750 |
+
"step": 8170
|
7751 |
+
},
|
7752 |
+
{
|
7753 |
+
"epoch": 2.59,
|
7754 |
+
"learning_rate": 0.0002,
|
7755 |
+
"loss": 0.6361,
|
7756 |
+
"step": 8180
|
7757 |
+
},
|
7758 |
+
{
|
7759 |
+
"epoch": 2.6,
|
7760 |
+
"learning_rate": 0.0002,
|
7761 |
+
"loss": 0.5676,
|
7762 |
+
"step": 8190
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 2.6,
|
7766 |
+
"learning_rate": 0.0002,
|
7767 |
+
"loss": 0.6125,
|
7768 |
+
"step": 8200
|
7769 |
+
},
|
7770 |
+
{
|
7771 |
+
"epoch": 2.6,
|
7772 |
+
"eval_loss": 0.7568719387054443,
|
7773 |
+
"eval_runtime": 111.2374,
|
7774 |
+
"eval_samples_per_second": 8.99,
|
7775 |
+
"eval_steps_per_second": 4.495,
|
7776 |
+
"step": 8200
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 2.6,
|
7780 |
+
"mmlu_eval_accuracy": 0.4699982014237092,
|
7781 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
7782 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
7783 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
7784 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
7785 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
7786 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
7787 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
7788 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
7789 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
7790 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
7791 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
7792 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
7793 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
7794 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
7795 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
7796 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
7797 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
7798 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
7799 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
7800 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727,
|
7801 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7802 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
7803 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
7804 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
7805 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
7806 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
7807 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
7808 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
7809 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
7810 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
7811 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
7812 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
7813 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
7814 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
7815 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
7816 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
7817 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
7818 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
7819 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
7820 |
+
"mmlu_eval_accuracy_marketing": 0.68,
|
7821 |
+
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273,
|
7822 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628,
|
7823 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
7824 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
7825 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
7826 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
7827 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
7828 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
7829 |
+
"mmlu_eval_accuracy_professional_law": 0.3588235294117647,
|
7830 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
7831 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
7832 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
7833 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
7834 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
7835 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
7836 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
7837 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
7838 |
+
"mmlu_loss": 1.3685555242527248,
|
7839 |
+
"step": 8200
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 2.6,
|
7843 |
+
"learning_rate": 0.0002,
|
7844 |
+
"loss": 0.5992,
|
7845 |
+
"step": 8210
|
7846 |
+
},
|
7847 |
+
{
|
7848 |
+
"epoch": 2.6,
|
7849 |
+
"learning_rate": 0.0002,
|
7850 |
+
"loss": 0.6068,
|
7851 |
+
"step": 8220
|
7852 |
+
},
|
7853 |
+
{
|
7854 |
+
"epoch": 2.61,
|
7855 |
+
"learning_rate": 0.0002,
|
7856 |
+
"loss": 0.6986,
|
7857 |
+
"step": 8230
|
7858 |
+
},
|
7859 |
+
{
|
7860 |
+
"epoch": 2.61,
|
7861 |
+
"learning_rate": 0.0002,
|
7862 |
+
"loss": 0.5809,
|
7863 |
+
"step": 8240
|
7864 |
+
},
|
7865 |
+
{
|
7866 |
+
"epoch": 2.61,
|
7867 |
+
"learning_rate": 0.0002,
|
7868 |
+
"loss": 0.6368,
|
7869 |
+
"step": 8250
|
7870 |
+
},
|
7871 |
+
{
|
7872 |
+
"epoch": 2.62,
|
7873 |
+
"learning_rate": 0.0002,
|
7874 |
+
"loss": 0.5731,
|
7875 |
+
"step": 8260
|
7876 |
+
},
|
7877 |
+
{
|
7878 |
+
"epoch": 2.62,
|
7879 |
+
"learning_rate": 0.0002,
|
7880 |
+
"loss": 0.6439,
|
7881 |
+
"step": 8270
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 2.62,
|
7885 |
+
"learning_rate": 0.0002,
|
7886 |
+
"loss": 0.5661,
|
7887 |
+
"step": 8280
|
7888 |
+
},
|
7889 |
+
{
|
7890 |
+
"epoch": 2.63,
|
7891 |
+
"learning_rate": 0.0002,
|
7892 |
+
"loss": 0.5816,
|
7893 |
+
"step": 8290
|
7894 |
+
},
|
7895 |
+
{
|
7896 |
+
"epoch": 2.63,
|
7897 |
+
"learning_rate": 0.0002,
|
7898 |
+
"loss": 0.5385,
|
7899 |
+
"step": 8300
|
7900 |
+
},
|
7901 |
+
{
|
7902 |
+
"epoch": 2.63,
|
7903 |
+
"learning_rate": 0.0002,
|
7904 |
+
"loss": 0.5913,
|
7905 |
+
"step": 8310
|
7906 |
+
},
|
7907 |
+
{
|
7908 |
+
"epoch": 2.64,
|
7909 |
+
"learning_rate": 0.0002,
|
7910 |
+
"loss": 0.5817,
|
7911 |
+
"step": 8320
|
7912 |
+
},
|
7913 |
+
{
|
7914 |
+
"epoch": 2.64,
|
7915 |
+
"learning_rate": 0.0002,
|
7916 |
+
"loss": 0.6098,
|
7917 |
+
"step": 8330
|
7918 |
+
},
|
7919 |
+
{
|
7920 |
+
"epoch": 2.64,
|
7921 |
+
"learning_rate": 0.0002,
|
7922 |
+
"loss": 0.558,
|
7923 |
+
"step": 8340
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 2.65,
|
7927 |
+
"learning_rate": 0.0002,
|
7928 |
+
"loss": 0.6008,
|
7929 |
+
"step": 8350
|
7930 |
+
},
|
7931 |
+
{
|
7932 |
+
"epoch": 2.65,
|
7933 |
+
"learning_rate": 0.0002,
|
7934 |
+
"loss": 0.5921,
|
7935 |
+
"step": 8360
|
7936 |
+
},
|
7937 |
+
{
|
7938 |
+
"epoch": 2.65,
|
7939 |
+
"learning_rate": 0.0002,
|
7940 |
+
"loss": 0.6194,
|
7941 |
+
"step": 8370
|
7942 |
+
},
|
7943 |
+
{
|
7944 |
+
"epoch": 2.66,
|
7945 |
+
"learning_rate": 0.0002,
|
7946 |
+
"loss": 0.6849,
|
7947 |
+
"step": 8380
|
7948 |
+
},
|
7949 |
+
{
|
7950 |
+
"epoch": 2.66,
|
7951 |
+
"learning_rate": 0.0002,
|
7952 |
+
"loss": 0.5851,
|
7953 |
+
"step": 8390
|
7954 |
+
},
|
7955 |
+
{
|
7956 |
+
"epoch": 2.66,
|
7957 |
+
"learning_rate": 0.0002,
|
7958 |
+
"loss": 0.5574,
|
7959 |
+
"step": 8400
|
7960 |
+
},
|
7961 |
+
{
|
7962 |
+
"epoch": 2.66,
|
7963 |
+
"eval_loss": 0.7574586868286133,
|
7964 |
+
"eval_runtime": 111.0853,
|
7965 |
+
"eval_samples_per_second": 9.002,
|
7966 |
+
"eval_steps_per_second": 4.501,
|
7967 |
+
"step": 8400
|
7968 |
+
},
|
7969 |
+
{
|
7970 |
+
"epoch": 2.66,
|
7971 |
+
"mmlu_eval_accuracy": 0.47813110611906134,
|
7972 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
7973 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
7974 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
7975 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
7976 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
7977 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
7978 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
7979 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
7980 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
7981 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
7982 |
+
"mmlu_eval_accuracy_college_physics": 0.18181818181818182,
|
7983 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
7984 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
7985 |
+
"mmlu_eval_accuracy_econometrics": 0.3333333333333333,
|
7986 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
7987 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073,
|
7988 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
7989 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
7990 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
7991 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727,
|
7992 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
7993 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
7994 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
7995 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
7996 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4883720930232558,
|
7997 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
7998 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5384615384615384,
|
7999 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
8000 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8833333333333333,
|
8001 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
8002 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
8003 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
8004 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
8005 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
8006 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
8007 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
8008 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
8009 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
8010 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
8011 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
8012 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
8013 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
8014 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
8015 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
8016 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
8017 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
8018 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
8019 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
8020 |
+
"mmlu_eval_accuracy_professional_law": 0.3588235294117647,
|
8021 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5483870967741935,
|
8022 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
8023 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
8024 |
+
"mmlu_eval_accuracy_security_studies": 0.4074074074074074,
|
8025 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
8026 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
8027 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
8028 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
8029 |
+
"mmlu_loss": 1.4071070384574622,
|
8030 |
+
"step": 8400
|
8031 |
}
|
8032 |
],
|
8033 |
"max_steps": 10000,
|
8034 |
"num_train_epochs": 4,
|
8035 |
+
"total_flos": 2.548339176788902e+18,
|
8036 |
"trial_name": null,
|
8037 |
"trial_params": null
|
8038 |
}
|
{checkpoint-6400 β checkpoint-8400}/training_args.bin
RENAMED
File without changes
|