Yhhhhhhhhh commited on
Commit
3fa5773
1 Parent(s): 7b236ee

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,3 +1,60 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: deepseek-ai/deepseek-coder-1.3b-instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: one_merge_yhhhhhhh_deepseek-coder-1.3b-instruct
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # one_merge_yhhhhhhh_deepseek-coder-1.3b-instruct
18
+
19
+ This model is a fine-tuned version of [deepseek-ai/deepseek-coder-1.3b-instruct](https://huggingface.co/deepseek-ai/deepseek-coder-1.3b-instruct) on the output dataset.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 0.1964
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 5e-06
41
+ - train_batch_size: 8
42
+ - eval_batch_size: 1
43
+ - seed: 42
44
+ - gradient_accumulation_steps: 2
45
+ - total_train_batch_size: 16
46
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
+ - lr_scheduler_type: cosine
48
+ - lr_scheduler_warmup_ratio: 0.03
49
+ - num_epochs: 4.0
50
+
51
+ ### Training results
52
+
53
+
54
+
55
+ ### Framework versions
56
+
57
+ - Transformers 4.44.2
58
+ - Pytorch 2.5.0+cu121
59
+ - Datasets 2.21.0
60
+ - Tokenizers 0.19.1
all_results.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.9975624619134673,
3
+ "eval_loss": 0.19639351963996887,
4
+ "eval_runtime": 0.8898,
5
+ "eval_samples_per_second": 1.124,
6
+ "eval_steps_per_second": 1.124,
7
+ "total_flos": 3.38221904554623e+17,
8
+ "train_loss": 0.21007875238613385,
9
+ "train_runtime": 5418.5603,
10
+ "train_samples_per_second": 9.688,
11
+ "train_steps_per_second": 0.605
12
+ }
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "deepseek-ai/deepseek-coder-1.3b-instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 32013,
9
+ "eos_token_id": 32021,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2048,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 5504,
14
+ "max_position_embeddings": 16384,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "num_key_value_heads": 16,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-06,
22
+ "rope_scaling": {
23
+ "factor": 4.0,
24
+ "rope_type": "linear",
25
+ "type": "linear"
26
+ },
27
+ "rope_theta": 100000,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.44.2",
31
+ "use_cache": false,
32
+ "vocab_size": 32256
33
+ }
eval_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.9975624619134673,
3
+ "eval_loss": 0.19639351963996887,
4
+ "eval_runtime": 0.8898,
5
+ "eval_samples_per_second": 1.124,
6
+ "eval_steps_per_second": 1.124
7
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 32013,
4
+ "eos_token_id": 32021,
5
+ "transformers_version": "4.44.2"
6
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dd16e90002e6b75461307550d37ec85c3dc77a3f3e9c50cc243dfa9e96e3f70
3
+ size 4986380064
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1156d11f6546b835964645fb59bb1487240ca066aaa18c0666b5f19a92038798
3
+ size 399532808
model.safetensors.index.json ADDED
@@ -0,0 +1,226 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 5385887744
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00002.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
162
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
163
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
164
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
165
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
166
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
167
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
168
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
169
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
170
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
171
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
172
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
173
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
174
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
175
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
176
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
177
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
178
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
179
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
180
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
181
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
182
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
183
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
184
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
185
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
186
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
187
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
188
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
189
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
190
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
191
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
192
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
193
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
194
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
195
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
196
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
197
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.norm.weight": "model-00002-of-00002.safetensors"
225
+ }
226
+ }
runs/Nov09_10-04-36_5937a45575d7/events.out.tfevents.1731146687.5937a45575d7.28655.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8df825f766be98d142e8b2ca2e748363e70d1c4aa7319112e7b871ada273bb1d
3
+ size 74854
runs/Nov09_10-04-36_5937a45575d7/events.out.tfevents.1731152130.5937a45575d7.28655.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc40ffb0c8c69e275b40399ee8c4ab1f87a6d69d649a20688ad93c88f7e11e7
3
+ size 359
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|EOT|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|end▁of▁sentence|>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "32000": {
7
+ "content": "õ",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": false
13
+ },
14
+ "32001": {
15
+ "content": "÷",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "32002": {
23
+ "content": "Á",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "32003": {
31
+ "content": "ý",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": false
37
+ },
38
+ "32004": {
39
+ "content": "À",
40
+ "lstrip": false,
41
+ "normalized": true,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "32005": {
47
+ "content": "ÿ",
48
+ "lstrip": false,
49
+ "normalized": true,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "32006": {
55
+ "content": "ø",
56
+ "lstrip": false,
57
+ "normalized": true,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "32007": {
63
+ "content": "ú",
64
+ "lstrip": false,
65
+ "normalized": true,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": false
69
+ },
70
+ "32008": {
71
+ "content": "þ",
72
+ "lstrip": false,
73
+ "normalized": true,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": false
77
+ },
78
+ "32009": {
79
+ "content": "ü",
80
+ "lstrip": false,
81
+ "normalized": true,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": false
85
+ },
86
+ "32010": {
87
+ "content": "ù",
88
+ "lstrip": false,
89
+ "normalized": true,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": false
93
+ },
94
+ "32011": {
95
+ "content": "ö",
96
+ "lstrip": false,
97
+ "normalized": true,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "32012": {
103
+ "content": "û",
104
+ "lstrip": false,
105
+ "normalized": true,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "32013": {
111
+ "content": "<|begin▁of▁sentence|>",
112
+ "lstrip": false,
113
+ "normalized": true,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "32014": {
119
+ "content": "<|end▁of▁sentence|>",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": true
125
+ },
126
+ "32015": {
127
+ "content": "<|fim▁hole|>",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "32016": {
135
+ "content": "<|fim▁begin|>",
136
+ "lstrip": false,
137
+ "normalized": true,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "32017": {
143
+ "content": "<|fim▁end|>",
144
+ "lstrip": false,
145
+ "normalized": true,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "32018": {
151
+ "content": "<pad>",
152
+ "lstrip": false,
153
+ "normalized": true,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "32019": {
159
+ "content": "<|User|>",
160
+ "lstrip": false,
161
+ "normalized": true,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "32020": {
167
+ "content": "<|Assistant|>",
168
+ "lstrip": false,
169
+ "normalized": true,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "32021": {
175
+ "content": "<|EOT|>",
176
+ "lstrip": false,
177
+ "normalized": true,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": true
181
+ }
182
+ },
183
+ "bos_token": "<|begin▁of▁sentence|>",
184
+ "chat_template": "{{ '<|begin▁of▁sentence|>' }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Please continue to complete the function. You are not allowed to modify the given code and do the completion only. Please return all completed function in a codeblock. Here is the given code to do completion:\n```python\n' + content + '\n' }}{% elif message['role'] == 'assistant' %}{{ '\n' + content + '\n```<|EOT|>' + '\n' }}{% endif %}{% endfor %}",
185
+ "clean_up_tokenization_spaces": false,
186
+ "eos_token": "<|EOT|>",
187
+ "legacy": true,
188
+ "model_max_length": 16384,
189
+ "pad_token": "<|end▁of▁sentence|>",
190
+ "padding_side": "right",
191
+ "sp_model_kwargs": {},
192
+ "split_special_tokens": false,
193
+ "tokenizer_class": "LlamaTokenizer",
194
+ "unk_token": null,
195
+ "use_default_system_prompt": false
196
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.9975624619134673,
3
+ "total_flos": 3.38221904554623e+17,
4
+ "train_loss": 0.21007875238613385,
5
+ "train_runtime": 5418.5603,
6
+ "train_samples_per_second": 9.688,
7
+ "train_steps_per_second": 0.605
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 3280, "loss": 0.7319, "learning_rate": 5.05050505050505e-07, "epoch": 0.01218769043266301, "percentage": 0.3, "elapsed_time": "0:00:29", "remaining_time": "2:38:38"}
2
+ {"current_steps": 20, "total_steps": 3280, "loss": 0.6746, "learning_rate": 1.01010101010101e-06, "epoch": 0.02437538086532602, "percentage": 0.61, "elapsed_time": "0:00:55", "remaining_time": "2:29:37"}
3
+ {"current_steps": 30, "total_steps": 3280, "loss": 0.722, "learning_rate": 1.5151515151515152e-06, "epoch": 0.03656307129798903, "percentage": 0.91, "elapsed_time": "0:01:21", "remaining_time": "2:26:58"}
4
+ {"current_steps": 40, "total_steps": 3280, "loss": 0.6053, "learning_rate": 2.02020202020202e-06, "epoch": 0.04875076173065204, "percentage": 1.22, "elapsed_time": "0:01:45", "remaining_time": "2:22:44"}
5
+ {"current_steps": 50, "total_steps": 3280, "loss": 0.467, "learning_rate": 2.5252525252525258e-06, "epoch": 0.06093845216331505, "percentage": 1.52, "elapsed_time": "0:02:08", "remaining_time": "2:17:49"}
6
+ {"current_steps": 60, "total_steps": 3280, "loss": 0.4346, "learning_rate": 3.0303030303030305e-06, "epoch": 0.07312614259597806, "percentage": 1.83, "elapsed_time": "0:02:28", "remaining_time": "2:12:58"}
7
+ {"current_steps": 70, "total_steps": 3280, "loss": 0.4145, "learning_rate": 3.5353535353535356e-06, "epoch": 0.08531383302864107, "percentage": 2.13, "elapsed_time": "0:02:47", "remaining_time": "2:08:22"}
8
+ {"current_steps": 80, "total_steps": 3280, "loss": 0.4062, "learning_rate": 4.04040404040404e-06, "epoch": 0.09750152346130408, "percentage": 2.44, "elapsed_time": "0:03:05", "remaining_time": "2:03:38"}
9
+ {"current_steps": 90, "total_steps": 3280, "loss": 0.3627, "learning_rate": 4.5454545454545455e-06, "epoch": 0.10968921389396709, "percentage": 2.74, "elapsed_time": "0:03:23", "remaining_time": "2:00:08"}
10
+ {"current_steps": 100, "total_steps": 3280, "loss": 0.3336, "learning_rate": 4.99999878077914e-06, "epoch": 0.1218769043266301, "percentage": 3.05, "elapsed_time": "0:03:42", "remaining_time": "1:58:01"}
11
+ {"current_steps": 110, "total_steps": 3280, "loss": 0.3489, "learning_rate": 4.9998524757147466e-06, "epoch": 0.1340645947592931, "percentage": 3.35, "elapsed_time": "0:04:02", "remaining_time": "1:56:15"}
12
+ {"current_steps": 120, "total_steps": 3280, "loss": 0.3617, "learning_rate": 4.999462342829388e-06, "epoch": 0.14625228519195613, "percentage": 3.66, "elapsed_time": "0:04:18", "remaining_time": "1:53:33"}
13
+ {"current_steps": 130, "total_steps": 3280, "loss": 0.3894, "learning_rate": 4.9988284201754075e-06, "epoch": 0.15843997562461914, "percentage": 3.96, "elapsed_time": "0:04:36", "remaining_time": "1:51:34"}
14
+ {"current_steps": 140, "total_steps": 3280, "loss": 0.3448, "learning_rate": 4.99795076958365e-06, "epoch": 0.17062766605728213, "percentage": 4.27, "elapsed_time": "0:04:54", "remaining_time": "1:49:55"}
15
+ {"current_steps": 150, "total_steps": 3280, "loss": 0.3187, "learning_rate": 4.996829476657414e-06, "epoch": 0.18281535648994515, "percentage": 4.57, "elapsed_time": "0:05:10", "remaining_time": "1:47:55"}
16
+ {"current_steps": 160, "total_steps": 3280, "loss": 0.3343, "learning_rate": 4.995464650764122e-06, "epoch": 0.19500304692260817, "percentage": 4.88, "elapsed_time": "0:05:28", "remaining_time": "1:46:50"}
17
+ {"current_steps": 170, "total_steps": 3280, "loss": 0.3143, "learning_rate": 4.993856425024637e-06, "epoch": 0.2071907373552712, "percentage": 5.18, "elapsed_time": "0:05:44", "remaining_time": "1:44:55"}
18
+ {"current_steps": 180, "total_steps": 3280, "loss": 0.3301, "learning_rate": 4.992004956300287e-06, "epoch": 0.21937842778793418, "percentage": 5.49, "elapsed_time": "0:06:01", "remaining_time": "1:43:50"}
19
+ {"current_steps": 190, "total_steps": 3280, "loss": 0.3501, "learning_rate": 4.989910425177561e-06, "epoch": 0.2315661182205972, "percentage": 5.79, "elapsed_time": "0:06:19", "remaining_time": "1:42:55"}
20
+ {"current_steps": 200, "total_steps": 3280, "loss": 0.283, "learning_rate": 4.987573035950499e-06, "epoch": 0.2437538086532602, "percentage": 6.1, "elapsed_time": "0:06:35", "remaining_time": "1:41:29"}
21
+ {"current_steps": 210, "total_steps": 3280, "loss": 0.3144, "learning_rate": 4.984993016600763e-06, "epoch": 0.25594149908592323, "percentage": 6.4, "elapsed_time": "0:06:49", "remaining_time": "1:39:52"}
22
+ {"current_steps": 220, "total_steps": 3280, "loss": 0.3221, "learning_rate": 4.982170618775401e-06, "epoch": 0.2681291895185862, "percentage": 6.71, "elapsed_time": "0:07:05", "remaining_time": "1:38:34"}
23
+ {"current_steps": 230, "total_steps": 3280, "loss": 0.3244, "learning_rate": 4.979106117762303e-06, "epoch": 0.28031687995124926, "percentage": 7.01, "elapsed_time": "0:07:24", "remaining_time": "1:38:08"}
24
+ {"current_steps": 240, "total_steps": 3280, "loss": 0.3112, "learning_rate": 4.975799812463348e-06, "epoch": 0.29250457038391225, "percentage": 7.32, "elapsed_time": "0:07:40", "remaining_time": "1:37:14"}
25
+ {"current_steps": 250, "total_steps": 3280, "loss": 0.3321, "learning_rate": 4.972252025365251e-06, "epoch": 0.30469226081657524, "percentage": 7.62, "elapsed_time": "0:07:55", "remaining_time": "1:36:04"}
26
+ {"current_steps": 260, "total_steps": 3280, "loss": 0.3337, "learning_rate": 4.968463102508114e-06, "epoch": 0.3168799512492383, "percentage": 7.93, "elapsed_time": "0:08:14", "remaining_time": "1:35:39"}
27
+ {"current_steps": 270, "total_steps": 3280, "loss": 0.3029, "learning_rate": 4.9644334134516645e-06, "epoch": 0.3290676416819013, "percentage": 8.23, "elapsed_time": "0:08:28", "remaining_time": "1:34:32"}
28
+ {"current_steps": 280, "total_steps": 3280, "loss": 0.3411, "learning_rate": 4.960163351239216e-06, "epoch": 0.34125533211456427, "percentage": 8.54, "elapsed_time": "0:08:43", "remaining_time": "1:33:31"}
29
+ {"current_steps": 290, "total_steps": 3280, "loss": 0.3192, "learning_rate": 4.955653332359331e-06, "epoch": 0.3534430225472273, "percentage": 8.84, "elapsed_time": "0:08:58", "remaining_time": "1:32:36"}
30
+ {"current_steps": 300, "total_steps": 3280, "loss": 0.3113, "learning_rate": 4.950903796705201e-06, "epoch": 0.3656307129798903, "percentage": 9.15, "elapsed_time": "0:09:16", "remaining_time": "1:32:11"}
31
+ {"current_steps": 310, "total_steps": 3280, "loss": 0.3191, "learning_rate": 4.94591520753173e-06, "epoch": 0.3778184034125533, "percentage": 9.45, "elapsed_time": "0:09:33", "remaining_time": "1:31:37"}
32
+ {"current_steps": 320, "total_steps": 3280, "loss": 0.3092, "learning_rate": 4.940688051410364e-06, "epoch": 0.39000609384521634, "percentage": 9.76, "elapsed_time": "0:09:48", "remaining_time": "1:30:40"}
33
+ {"current_steps": 330, "total_steps": 3280, "loss": 0.3343, "learning_rate": 4.935222838181623e-06, "epoch": 0.40219378427787933, "percentage": 10.06, "elapsed_time": "0:10:00", "remaining_time": "1:29:28"}
34
+ {"current_steps": 340, "total_steps": 3280, "loss": 0.3115, "learning_rate": 4.929520100905375e-06, "epoch": 0.4143814747105424, "percentage": 10.37, "elapsed_time": "0:10:17", "remaining_time": "1:29:03"}
35
+ {"current_steps": 350, "total_steps": 3280, "loss": 0.2922, "learning_rate": 4.923580395808846e-06, "epoch": 0.42656916514320536, "percentage": 10.67, "elapsed_time": "0:10:33", "remaining_time": "1:28:21"}
36
+ {"current_steps": 360, "total_steps": 3280, "loss": 0.3211, "learning_rate": 4.917404302232362e-06, "epoch": 0.43875685557586835, "percentage": 10.98, "elapsed_time": "0:10:49", "remaining_time": "1:27:48"}
37
+ {"current_steps": 370, "total_steps": 3280, "loss": 0.3205, "learning_rate": 4.910992422572845e-06, "epoch": 0.4509445460085314, "percentage": 11.28, "elapsed_time": "0:11:04", "remaining_time": "1:27:02"}
38
+ {"current_steps": 380, "total_steps": 3280, "loss": 0.331, "learning_rate": 4.904345382225058e-06, "epoch": 0.4631322364411944, "percentage": 11.59, "elapsed_time": "0:11:19", "remaining_time": "1:26:25"}
39
+ {"current_steps": 390, "total_steps": 3280, "loss": 0.3224, "learning_rate": 4.897463829520604e-06, "epoch": 0.4753199268738574, "percentage": 11.89, "elapsed_time": "0:11:35", "remaining_time": "1:25:52"}
40
+ {"current_steps": 400, "total_steps": 3280, "loss": 0.3054, "learning_rate": 4.890348435664694e-06, "epoch": 0.4875076173065204, "percentage": 12.2, "elapsed_time": "0:11:51", "remaining_time": "1:25:19"}
41
+ {"current_steps": 410, "total_steps": 3280, "loss": 0.2973, "learning_rate": 4.882999894670669e-06, "epoch": 0.4996953077391834, "percentage": 12.5, "elapsed_time": "0:12:05", "remaining_time": "1:24:41"}
42
+ {"current_steps": 420, "total_steps": 3280, "loss": 0.295, "learning_rate": 4.875418923292322e-06, "epoch": 0.5118829981718465, "percentage": 12.8, "elapsed_time": "0:12:20", "remaining_time": "1:24:02"}
43
+ {"current_steps": 430, "total_steps": 3280, "loss": 0.3027, "learning_rate": 4.867606260953979e-06, "epoch": 0.5240706886045094, "percentage": 13.11, "elapsed_time": "0:12:35", "remaining_time": "1:23:30"}
44
+ {"current_steps": 440, "total_steps": 3280, "loss": 0.3007, "learning_rate": 4.85956266967838e-06, "epoch": 0.5362583790371724, "percentage": 13.41, "elapsed_time": "0:12:51", "remaining_time": "1:22:58"}
45
+ {"current_steps": 450, "total_steps": 3280, "loss": 0.3168, "learning_rate": 4.8512889340123535e-06, "epoch": 0.5484460694698354, "percentage": 13.72, "elapsed_time": "0:13:07", "remaining_time": "1:22:35"}
46
+ {"current_steps": 460, "total_steps": 3280, "loss": 0.3137, "learning_rate": 4.842785860950296e-06, "epoch": 0.5606337599024985, "percentage": 14.02, "elapsed_time": "0:13:23", "remaining_time": "1:22:08"}
47
+ {"current_steps": 470, "total_steps": 3280, "loss": 0.3118, "learning_rate": 4.834054279855459e-06, "epoch": 0.5728214503351615, "percentage": 14.33, "elapsed_time": "0:13:40", "remaining_time": "1:21:45"}
48
+ {"current_steps": 480, "total_steps": 3280, "loss": 0.289, "learning_rate": 4.825095042379054e-06, "epoch": 0.5850091407678245, "percentage": 14.63, "elapsed_time": "0:13:56", "remaining_time": "1:21:20"}
49
+ {"current_steps": 490, "total_steps": 3280, "loss": 0.2937, "learning_rate": 4.815909022377189e-06, "epoch": 0.5971968312004875, "percentage": 14.94, "elapsed_time": "0:14:12", "remaining_time": "1:20:52"}
50
+ {"current_steps": 500, "total_steps": 3280, "loss": 0.3063, "learning_rate": 4.806497115825629e-06, "epoch": 0.6093845216331505, "percentage": 15.24, "elapsed_time": "0:14:29", "remaining_time": "1:20:33"}
51
+ {"current_steps": 510, "total_steps": 3280, "loss": 0.316, "learning_rate": 4.796860240732414e-06, "epoch": 0.6215722120658135, "percentage": 15.55, "elapsed_time": "0:15:38", "remaining_time": "1:24:55"}
52
+ {"current_steps": 520, "total_steps": 3280, "loss": 0.3127, "learning_rate": 4.786999337048311e-06, "epoch": 0.6337599024984766, "percentage": 15.85, "elapsed_time": "0:15:54", "remaining_time": "1:24:27"}
53
+ {"current_steps": 530, "total_steps": 3280, "loss": 0.3057, "learning_rate": 4.77691536657514e-06, "epoch": 0.6459475929311396, "percentage": 16.16, "elapsed_time": "0:16:09", "remaining_time": "1:23:49"}
54
+ {"current_steps": 540, "total_steps": 3280, "loss": 0.3006, "learning_rate": 4.766609312871958e-06, "epoch": 0.6581352833638026, "percentage": 16.46, "elapsed_time": "0:16:27", "remaining_time": "1:23:29"}
55
+ {"current_steps": 550, "total_steps": 3280, "loss": 0.3131, "learning_rate": 4.756082181159131e-06, "epoch": 0.6703229737964655, "percentage": 16.77, "elapsed_time": "0:16:41", "remaining_time": "1:22:49"}
56
+ {"current_steps": 560, "total_steps": 3280, "loss": 0.3079, "learning_rate": 4.745334998220282e-06, "epoch": 0.6825106642291285, "percentage": 17.07, "elapsed_time": "0:16:54", "remaining_time": "1:22:09"}
57
+ {"current_steps": 570, "total_steps": 3280, "loss": 0.3257, "learning_rate": 4.734368812302148e-06, "epoch": 0.6946983546617916, "percentage": 17.38, "elapsed_time": "0:17:08", "remaining_time": "1:21:31"}
58
+ {"current_steps": 580, "total_steps": 3280, "loss": 0.2991, "learning_rate": 4.723184693012334e-06, "epoch": 0.7068860450944546, "percentage": 17.68, "elapsed_time": "0:17:22", "remaining_time": "1:20:52"}
59
+ {"current_steps": 590, "total_steps": 3280, "loss": 0.3093, "learning_rate": 4.711783731214984e-06, "epoch": 0.7190737355271176, "percentage": 17.99, "elapsed_time": "0:17:40", "remaining_time": "1:20:35"}
60
+ {"current_steps": 600, "total_steps": 3280, "loss": 0.2807, "learning_rate": 4.700167038924386e-06, "epoch": 0.7312614259597806, "percentage": 18.29, "elapsed_time": "0:17:55", "remaining_time": "1:20:02"}
61
+ {"current_steps": 610, "total_steps": 3280, "loss": 0.3026, "learning_rate": 4.688335749196511e-06, "epoch": 0.7434491163924436, "percentage": 18.6, "elapsed_time": "0:18:11", "remaining_time": "1:19:36"}
62
+ {"current_steps": 620, "total_steps": 3280, "loss": 0.2992, "learning_rate": 4.676291016018491e-06, "epoch": 0.7556368068251066, "percentage": 18.9, "elapsed_time": "0:18:26", "remaining_time": "1:19:05"}
63
+ {"current_steps": 630, "total_steps": 3280, "loss": 0.3223, "learning_rate": 4.664034014196069e-06, "epoch": 0.7678244972577697, "percentage": 19.21, "elapsed_time": "0:18:40", "remaining_time": "1:18:32"}
64
+ {"current_steps": 640, "total_steps": 3280, "loss": 0.3069, "learning_rate": 4.651565939239011e-06, "epoch": 0.7800121876904327, "percentage": 19.51, "elapsed_time": "0:18:54", "remaining_time": "1:17:59"}
65
+ {"current_steps": 650, "total_steps": 3280, "loss": 0.3112, "learning_rate": 4.638888007244498e-06, "epoch": 0.7921998781230957, "percentage": 19.82, "elapsed_time": "0:19:07", "remaining_time": "1:17:22"}
66
+ {"current_steps": 660, "total_steps": 3280, "loss": 0.2949, "learning_rate": 4.626001454778511e-06, "epoch": 0.8043875685557587, "percentage": 20.12, "elapsed_time": "0:19:25", "remaining_time": "1:17:05"}
67
+ {"current_steps": 670, "total_steps": 3280, "loss": 0.3128, "learning_rate": 4.612907538755224e-06, "epoch": 0.8165752589884216, "percentage": 20.43, "elapsed_time": "0:19:42", "remaining_time": "1:16:47"}
68
+ {"current_steps": 680, "total_steps": 3280, "loss": 0.3115, "learning_rate": 4.599607536314406e-06, "epoch": 0.8287629494210847, "percentage": 20.73, "elapsed_time": "0:19:58", "remaining_time": "1:16:21"}
69
+ {"current_steps": 690, "total_steps": 3280, "loss": 0.3189, "learning_rate": 4.586102744696851e-06, "epoch": 0.8409506398537477, "percentage": 21.04, "elapsed_time": "0:20:12", "remaining_time": "1:15:52"}
70
+ {"current_steps": 700, "total_steps": 3280, "loss": 0.2756, "learning_rate": 4.572394481117855e-06, "epoch": 0.8531383302864107, "percentage": 21.34, "elapsed_time": "0:20:27", "remaining_time": "1:15:24"}
71
+ {"current_steps": 710, "total_steps": 3280, "loss": 0.3078, "learning_rate": 4.558484082638729e-06, "epoch": 0.8653260207190737, "percentage": 21.65, "elapsed_time": "0:20:44", "remaining_time": "1:15:05"}
72
+ {"current_steps": 720, "total_steps": 3280, "loss": 0.3017, "learning_rate": 4.544372906036399e-06, "epoch": 0.8775137111517367, "percentage": 21.95, "elapsed_time": "0:21:00", "remaining_time": "1:14:42"}
73
+ {"current_steps": 730, "total_steps": 3280, "loss": 0.3277, "learning_rate": 4.5300623276710604e-06, "epoch": 0.8897014015843998, "percentage": 22.26, "elapsed_time": "0:21:17", "remaining_time": "1:14:21"}
74
+ {"current_steps": 740, "total_steps": 3280, "loss": 0.2817, "learning_rate": 4.515553743351934e-06, "epoch": 0.9018890920170628, "percentage": 22.56, "elapsed_time": "0:21:31", "remaining_time": "1:13:52"}
75
+ {"current_steps": 750, "total_steps": 3280, "loss": 0.2845, "learning_rate": 4.5008485682011265e-06, "epoch": 0.9140767824497258, "percentage": 22.87, "elapsed_time": "0:21:45", "remaining_time": "1:13:25"}
76
+ {"current_steps": 760, "total_steps": 3280, "loss": 0.3032, "learning_rate": 4.4859482365156e-06, "epoch": 0.9262644728823888, "percentage": 23.17, "elapsed_time": "0:21:58", "remaining_time": "1:12:52"}
77
+ {"current_steps": 770, "total_steps": 3280, "loss": 0.3154, "learning_rate": 4.470854201627278e-06, "epoch": 0.9384521633150518, "percentage": 23.48, "elapsed_time": "0:22:13", "remaining_time": "1:12:26"}
78
+ {"current_steps": 780, "total_steps": 3280, "loss": 0.3276, "learning_rate": 4.4555679357612925e-06, "epoch": 0.9506398537477148, "percentage": 23.78, "elapsed_time": "0:22:30", "remaining_time": "1:12:08"}
79
+ {"current_steps": 790, "total_steps": 3280, "loss": 0.2941, "learning_rate": 4.440090929892382e-06, "epoch": 0.9628275441803779, "percentage": 24.09, "elapsed_time": "0:22:45", "remaining_time": "1:11:42"}
80
+ {"current_steps": 800, "total_steps": 3280, "loss": 0.2795, "learning_rate": 4.424424693599474e-06, "epoch": 0.9750152346130408, "percentage": 24.39, "elapsed_time": "0:23:00", "remaining_time": "1:11:20"}
81
+ {"current_steps": 810, "total_steps": 3280, "loss": 0.2968, "learning_rate": 4.4085707549184395e-06, "epoch": 0.9872029250457038, "percentage": 24.7, "elapsed_time": "0:23:16", "remaining_time": "1:10:57"}
82
+ {"current_steps": 820, "total_steps": 3280, "loss": 0.289, "learning_rate": 4.392530660193058e-06, "epoch": 0.9993906154783668, "percentage": 25.0, "elapsed_time": "0:23:31", "remaining_time": "1:10:33"}
83
+ {"current_steps": 830, "total_steps": 3280, "loss": 0.2459, "learning_rate": 4.376305973924188e-06, "epoch": 1.01157830591103, "percentage": 25.3, "elapsed_time": "0:23:45", "remaining_time": "1:10:07"}
84
+ {"current_steps": 840, "total_steps": 3280, "loss": 0.2095, "learning_rate": 4.359898278617171e-06, "epoch": 1.023765996343693, "percentage": 25.61, "elapsed_time": "0:24:00", "remaining_time": "1:09:44"}
85
+ {"current_steps": 850, "total_steps": 3280, "loss": 0.2504, "learning_rate": 4.343309174627484e-06, "epoch": 1.035953686776356, "percentage": 25.91, "elapsed_time": "0:24:15", "remaining_time": "1:09:21"}
86
+ {"current_steps": 860, "total_steps": 3280, "loss": 0.2335, "learning_rate": 4.326540280004634e-06, "epoch": 1.048141377209019, "percentage": 26.22, "elapsed_time": "0:24:32", "remaining_time": "1:09:03"}
87
+ {"current_steps": 870, "total_steps": 3280, "loss": 0.2312, "learning_rate": 4.309593230334355e-06, "epoch": 1.0603290676416819, "percentage": 26.52, "elapsed_time": "0:24:46", "remaining_time": "1:08:38"}
88
+ {"current_steps": 880, "total_steps": 3280, "loss": 0.2367, "learning_rate": 4.292469678579063e-06, "epoch": 1.0725167580743449, "percentage": 26.83, "elapsed_time": "0:25:00", "remaining_time": "1:08:13"}
89
+ {"current_steps": 890, "total_steps": 3280, "loss": 0.2445, "learning_rate": 4.275171294916641e-06, "epoch": 1.0847044485070079, "percentage": 27.13, "elapsed_time": "0:25:16", "remaining_time": "1:07:52"}
90
+ {"current_steps": 900, "total_steps": 3280, "loss": 0.2329, "learning_rate": 4.2576997665775275e-06, "epoch": 1.0968921389396709, "percentage": 27.44, "elapsed_time": "0:25:31", "remaining_time": "1:07:30"}
91
+ {"current_steps": 910, "total_steps": 3280, "loss": 0.2174, "learning_rate": 4.2400567976801575e-06, "epoch": 1.1090798293723338, "percentage": 27.74, "elapsed_time": "0:25:45", "remaining_time": "1:07:04"}
92
+ {"current_steps": 920, "total_steps": 3280, "loss": 0.2304, "learning_rate": 4.22224410906474e-06, "epoch": 1.1212675198049968, "percentage": 28.05, "elapsed_time": "0:25:59", "remaining_time": "1:06:41"}
93
+ {"current_steps": 930, "total_steps": 3280, "loss": 0.2206, "learning_rate": 4.204263438125421e-06, "epoch": 1.13345521023766, "percentage": 28.35, "elapsed_time": "0:26:14", "remaining_time": "1:06:19"}
94
+ {"current_steps": 940, "total_steps": 3280, "loss": 0.2348, "learning_rate": 4.186116538640814e-06, "epoch": 1.145642900670323, "percentage": 28.66, "elapsed_time": "0:26:28", "remaining_time": "1:05:54"}
95
+ {"current_steps": 950, "total_steps": 3280, "loss": 0.2071, "learning_rate": 4.167805180602951e-06, "epoch": 1.157830591102986, "percentage": 28.96, "elapsed_time": "0:26:45", "remaining_time": "1:05:38"}
96
+ {"current_steps": 960, "total_steps": 3280, "loss": 0.2053, "learning_rate": 4.149331150044635e-06, "epoch": 1.170018281535649, "percentage": 29.27, "elapsed_time": "0:27:03", "remaining_time": "1:05:22"}
97
+ {"current_steps": 970, "total_steps": 3280, "loss": 0.2068, "learning_rate": 4.130696248865244e-06, "epoch": 1.182205971968312, "percentage": 29.57, "elapsed_time": "0:27:16", "remaining_time": "1:04:56"}
98
+ {"current_steps": 980, "total_steps": 3280, "loss": 0.2468, "learning_rate": 4.111902294654971e-06, "epoch": 1.194393662400975, "percentage": 29.88, "elapsed_time": "0:27:32", "remaining_time": "1:04:38"}
99
+ {"current_steps": 990, "total_steps": 3280, "loss": 0.2254, "learning_rate": 4.09295112051755e-06, "epoch": 1.206581352833638, "percentage": 30.18, "elapsed_time": "0:27:46", "remaining_time": "1:04:15"}
100
+ {"current_steps": 1000, "total_steps": 3280, "loss": 0.2306, "learning_rate": 4.073844574891452e-06, "epoch": 1.218769043266301, "percentage": 30.49, "elapsed_time": "0:27:59", "remaining_time": "1:03:49"}
101
+ {"current_steps": 1010, "total_steps": 3280, "loss": 0.2294, "learning_rate": 4.054584521369603e-06, "epoch": 1.230956733698964, "percentage": 30.79, "elapsed_time": "0:29:09", "remaining_time": "1:05:31"}
102
+ {"current_steps": 1020, "total_steps": 3280, "loss": 0.2222, "learning_rate": 4.03517283851761e-06, "epoch": 1.2431444241316272, "percentage": 31.1, "elapsed_time": "0:29:24", "remaining_time": "1:05:08"}
103
+ {"current_steps": 1030, "total_steps": 3280, "loss": 0.2636, "learning_rate": 4.01561141969053e-06, "epoch": 1.2553321145642902, "percentage": 31.4, "elapsed_time": "0:29:40", "remaining_time": "1:04:49"}
104
+ {"current_steps": 1040, "total_steps": 3280, "loss": 0.2509, "learning_rate": 3.995902172848205e-06, "epoch": 1.2675198049969532, "percentage": 31.71, "elapsed_time": "0:29:55", "remaining_time": "1:04:26"}
105
+ {"current_steps": 1050, "total_steps": 3280, "loss": 0.2165, "learning_rate": 3.976047020369155e-06, "epoch": 1.2797074954296161, "percentage": 32.01, "elapsed_time": "0:30:07", "remaining_time": "1:03:59"}
106
+ {"current_steps": 1060, "total_steps": 3280, "loss": 0.22, "learning_rate": 3.9560478988630866e-06, "epoch": 1.2918951858622791, "percentage": 32.32, "elapsed_time": "0:30:22", "remaining_time": "1:03:36"}
107
+ {"current_steps": 1070, "total_steps": 3280, "loss": 0.2258, "learning_rate": 3.935906758981993e-06, "epoch": 1.3040828762949421, "percentage": 32.62, "elapsed_time": "0:30:35", "remaining_time": "1:03:10"}
108
+ {"current_steps": 1080, "total_steps": 3280, "loss": 0.2378, "learning_rate": 3.9156255652299005e-06, "epoch": 1.3162705667276051, "percentage": 32.93, "elapsed_time": "0:30:49", "remaining_time": "1:02:46"}
109
+ {"current_steps": 1090, "total_steps": 3280, "loss": 0.2477, "learning_rate": 3.89520629577125e-06, "epoch": 1.328458257160268, "percentage": 33.23, "elapsed_time": "0:31:04", "remaining_time": "1:02:26"}
110
+ {"current_steps": 1100, "total_steps": 3280, "loss": 0.2073, "learning_rate": 3.8746509422379575e-06, "epoch": 1.340645947592931, "percentage": 33.54, "elapsed_time": "0:31:19", "remaining_time": "1:02:04"}
111
+ {"current_steps": 1110, "total_steps": 3280, "loss": 0.2199, "learning_rate": 3.853961509535159e-06, "epoch": 1.352833638025594, "percentage": 33.84, "elapsed_time": "0:31:34", "remaining_time": "1:01:43"}
112
+ {"current_steps": 1120, "total_steps": 3280, "loss": 0.2284, "learning_rate": 3.83314001564565e-06, "epoch": 1.365021328458257, "percentage": 34.15, "elapsed_time": "0:31:48", "remaining_time": "1:01:19"}
113
+ {"current_steps": 1130, "total_steps": 3280, "loss": 0.2372, "learning_rate": 3.8121884914330663e-06, "epoch": 1.37720901889092, "percentage": 34.45, "elapsed_time": "0:32:02", "remaining_time": "1:00:57"}
114
+ {"current_steps": 1140, "total_steps": 3280, "loss": 0.2565, "learning_rate": 3.791108980443794e-06, "epoch": 1.389396709323583, "percentage": 34.76, "elapsed_time": "0:32:16", "remaining_time": "1:00:34"}
115
+ {"current_steps": 1150, "total_steps": 3280, "loss": 0.2066, "learning_rate": 3.769903538707652e-06, "epoch": 1.4015843997562463, "percentage": 35.06, "elapsed_time": "0:32:30", "remaining_time": "1:00:12"}
116
+ {"current_steps": 1160, "total_steps": 3280, "loss": 0.2217, "learning_rate": 3.7485742345373517e-06, "epoch": 1.4137720901889093, "percentage": 35.37, "elapsed_time": "0:32:45", "remaining_time": "0:59:52"}
117
+ {"current_steps": 1170, "total_steps": 3280, "loss": 0.2218, "learning_rate": 3.727123148326758e-06, "epoch": 1.4259597806215722, "percentage": 35.67, "elapsed_time": "0:33:00", "remaining_time": "0:59:32"}
118
+ {"current_steps": 1180, "total_steps": 3280, "loss": 0.225, "learning_rate": 3.705552372347981e-06, "epoch": 1.4381474710542352, "percentage": 35.98, "elapsed_time": "0:33:13", "remaining_time": "0:59:08"}
119
+ {"current_steps": 1190, "total_steps": 3280, "loss": 0.2346, "learning_rate": 3.683864010547294e-06, "epoch": 1.4503351614868982, "percentage": 36.28, "elapsed_time": "0:33:29", "remaining_time": "0:58:49"}
120
+ {"current_steps": 1200, "total_steps": 3280, "loss": 0.2393, "learning_rate": 3.662060178339927e-06, "epoch": 1.4625228519195612, "percentage": 36.59, "elapsed_time": "0:33:44", "remaining_time": "0:58:29"}
121
+ {"current_steps": 1210, "total_steps": 3280, "loss": 0.2361, "learning_rate": 3.6401430024037315e-06, "epoch": 1.4747105423522242, "percentage": 36.89, "elapsed_time": "0:33:58", "remaining_time": "0:58:07"}
122
+ {"current_steps": 1220, "total_steps": 3280, "loss": 0.2198, "learning_rate": 3.618114620471756e-06, "epoch": 1.4868982327848872, "percentage": 37.2, "elapsed_time": "0:34:13", "remaining_time": "0:57:47"}
123
+ {"current_steps": 1230, "total_steps": 3280, "loss": 0.2396, "learning_rate": 3.5959771811237342e-06, "epoch": 1.4990859232175504, "percentage": 37.5, "elapsed_time": "0:34:30", "remaining_time": "0:57:30"}
124
+ {"current_steps": 1240, "total_steps": 3280, "loss": 0.2308, "learning_rate": 3.573732843576519e-06, "epoch": 1.5112736136502134, "percentage": 37.8, "elapsed_time": "0:34:45", "remaining_time": "0:57:10"}
125
+ {"current_steps": 1250, "total_steps": 3280, "loss": 0.2051, "learning_rate": 3.5513837774734816e-06, "epoch": 1.5234613040828764, "percentage": 38.11, "elapsed_time": "0:35:02", "remaining_time": "0:56:55"}
126
+ {"current_steps": 1260, "total_steps": 3280, "loss": 0.2526, "learning_rate": 3.5289321626728912e-06, "epoch": 1.5356489945155394, "percentage": 38.41, "elapsed_time": "0:35:18", "remaining_time": "0:56:36"}
127
+ {"current_steps": 1270, "total_steps": 3280, "loss": 0.2112, "learning_rate": 3.5063801890352955e-06, "epoch": 1.5478366849482024, "percentage": 38.72, "elapsed_time": "0:35:32", "remaining_time": "0:56:14"}
128
+ {"current_steps": 1280, "total_steps": 3280, "loss": 0.2199, "learning_rate": 3.4837300562099324e-06, "epoch": 1.5600243753808654, "percentage": 39.02, "elapsed_time": "0:35:48", "remaining_time": "0:55:57"}
129
+ {"current_steps": 1290, "total_steps": 3280, "loss": 0.249, "learning_rate": 3.4609839734201793e-06, "epoch": 1.5722120658135283, "percentage": 39.33, "elapsed_time": "0:36:02", "remaining_time": "0:55:35"}
130
+ {"current_steps": 1300, "total_steps": 3280, "loss": 0.2634, "learning_rate": 3.4381441592480756e-06, "epoch": 1.5843997562461913, "percentage": 39.63, "elapsed_time": "0:36:20", "remaining_time": "0:55:21"}
131
+ {"current_steps": 1310, "total_steps": 3280, "loss": 0.2243, "learning_rate": 3.4152128414179263e-06, "epoch": 1.5965874466788543, "percentage": 39.94, "elapsed_time": "0:36:33", "remaining_time": "0:54:59"}
132
+ {"current_steps": 1320, "total_steps": 3280, "loss": 0.2478, "learning_rate": 3.3921922565790188e-06, "epoch": 1.6087751371115173, "percentage": 40.24, "elapsed_time": "0:36:48", "remaining_time": "0:54:40"}
133
+ {"current_steps": 1330, "total_steps": 3280, "loss": 0.2005, "learning_rate": 3.3690846500874664e-06, "epoch": 1.6209628275441803, "percentage": 40.55, "elapsed_time": "0:37:04", "remaining_time": "0:54:21"}
134
+ {"current_steps": 1340, "total_steps": 3280, "loss": 0.2517, "learning_rate": 3.345892275787204e-06, "epoch": 1.6331505179768433, "percentage": 40.85, "elapsed_time": "0:37:19", "remaining_time": "0:54:02"}
135
+ {"current_steps": 1350, "total_steps": 3280, "loss": 0.2361, "learning_rate": 3.3226173957901533e-06, "epoch": 1.6453382084095063, "percentage": 41.16, "elapsed_time": "0:37:34", "remaining_time": "0:53:42"}
136
+ {"current_steps": 1360, "total_steps": 3280, "loss": 0.2228, "learning_rate": 3.2992622802555844e-06, "epoch": 1.6575258988421693, "percentage": 41.46, "elapsed_time": "0:37:48", "remaining_time": "0:53:22"}
137
+ {"current_steps": 1370, "total_steps": 3280, "loss": 0.2347, "learning_rate": 3.2758292071686928e-06, "epoch": 1.6697135892748323, "percentage": 41.77, "elapsed_time": "0:38:03", "remaining_time": "0:53:03"}
138
+ {"current_steps": 1380, "total_steps": 3280, "loss": 0.2326, "learning_rate": 3.2523204621184094e-06, "epoch": 1.6819012797074955, "percentage": 42.07, "elapsed_time": "0:38:19", "remaining_time": "0:52:46"}
139
+ {"current_steps": 1390, "total_steps": 3280, "loss": 0.2281, "learning_rate": 3.2287383380744746e-06, "epoch": 1.6940889701401585, "percentage": 42.38, "elapsed_time": "0:38:33", "remaining_time": "0:52:25"}
140
+ {"current_steps": 1400, "total_steps": 3280, "loss": 0.2109, "learning_rate": 3.2050851351637853e-06, "epoch": 1.7062766605728215, "percentage": 42.68, "elapsed_time": "0:38:46", "remaining_time": "0:52:03"}
141
+ {"current_steps": 1410, "total_steps": 3280, "loss": 0.2387, "learning_rate": 3.1813631604460504e-06, "epoch": 1.7184643510054844, "percentage": 42.99, "elapsed_time": "0:39:01", "remaining_time": "0:51:46"}
142
+ {"current_steps": 1420, "total_steps": 3280, "loss": 0.2348, "learning_rate": 3.1575747276887657e-06, "epoch": 1.7306520414381474, "percentage": 43.29, "elapsed_time": "0:39:16", "remaining_time": "0:51:26"}
143
+ {"current_steps": 1430, "total_steps": 3280, "loss": 0.2323, "learning_rate": 3.1337221571415388e-06, "epoch": 1.7428397318708104, "percentage": 43.6, "elapsed_time": "0:39:30", "remaining_time": "0:51:06"}
144
+ {"current_steps": 1440, "total_steps": 3280, "loss": 0.238, "learning_rate": 3.1098077753097763e-06, "epoch": 1.7550274223034736, "percentage": 43.9, "elapsed_time": "0:39:48", "remaining_time": "0:50:51"}
145
+ {"current_steps": 1450, "total_steps": 3280, "loss": 0.2223, "learning_rate": 3.085833914727765e-06, "epoch": 1.7672151127361366, "percentage": 44.21, "elapsed_time": "0:40:03", "remaining_time": "0:50:33"}
146
+ {"current_steps": 1460, "total_steps": 3280, "loss": 0.2271, "learning_rate": 3.0618029137311634e-06, "epoch": 1.7794028031687996, "percentage": 44.51, "elapsed_time": "0:40:18", "remaining_time": "0:50:15"}
147
+ {"current_steps": 1470, "total_steps": 3280, "loss": 0.2372, "learning_rate": 3.037717116228929e-06, "epoch": 1.7915904936014626, "percentage": 44.82, "elapsed_time": "0:40:35", "remaining_time": "0:49:58"}
148
+ {"current_steps": 1480, "total_steps": 3280, "loss": 0.2397, "learning_rate": 3.013578871474699e-06, "epoch": 1.8037781840341256, "percentage": 45.12, "elapsed_time": "0:40:49", "remaining_time": "0:49:38"}
149
+ {"current_steps": 1490, "total_steps": 3280, "loss": 0.2237, "learning_rate": 2.9893905338376503e-06, "epoch": 1.8159658744667886, "percentage": 45.43, "elapsed_time": "0:41:06", "remaining_time": "0:49:22"}
150
+ {"current_steps": 1500, "total_steps": 3280, "loss": 0.2099, "learning_rate": 2.965154462572869e-06, "epoch": 1.8281535648994516, "percentage": 45.73, "elapsed_time": "0:41:19", "remaining_time": "0:49:02"}
151
+ {"current_steps": 1510, "total_steps": 3280, "loss": 0.2205, "learning_rate": 2.9408730215912247e-06, "epoch": 1.8403412553321146, "percentage": 46.04, "elapsed_time": "0:42:27", "remaining_time": "0:49:45"}
152
+ {"current_steps": 1520, "total_steps": 3280, "loss": 0.2149, "learning_rate": 2.91654857922881e-06, "epoch": 1.8525289457647776, "percentage": 46.34, "elapsed_time": "0:42:43", "remaining_time": "0:49:28"}
153
+ {"current_steps": 1530, "total_steps": 3280, "loss": 0.2309, "learning_rate": 2.892183508015939e-06, "epoch": 1.8647166361974405, "percentage": 46.65, "elapsed_time": "0:42:58", "remaining_time": "0:49:09"}
154
+ {"current_steps": 1540, "total_steps": 3280, "loss": 0.2254, "learning_rate": 2.867780184445735e-06, "epoch": 1.8769043266301035, "percentage": 46.95, "elapsed_time": "0:43:14", "remaining_time": "0:48:51"}
155
+ {"current_steps": 1550, "total_steps": 3280, "loss": 0.2251, "learning_rate": 2.8433409887423397e-06, "epoch": 1.8890920170627665, "percentage": 47.26, "elapsed_time": "0:43:28", "remaining_time": "0:48:31"}
156
+ {"current_steps": 1560, "total_steps": 3280, "loss": 0.2375, "learning_rate": 2.8188683046287496e-06, "epoch": 1.9012797074954295, "percentage": 47.56, "elapsed_time": "0:43:44", "remaining_time": "0:48:13"}
157
+ {"current_steps": 1570, "total_steps": 3280, "loss": 0.2195, "learning_rate": 2.794364519094317e-06, "epoch": 1.9134673979280925, "percentage": 47.87, "elapsed_time": "0:44:00", "remaining_time": "0:47:56"}
158
+ {"current_steps": 1580, "total_steps": 3280, "loss": 0.1939, "learning_rate": 2.7698320221619278e-06, "epoch": 1.9256550883607555, "percentage": 48.17, "elapsed_time": "0:44:12", "remaining_time": "0:47:34"}
159
+ {"current_steps": 1590, "total_steps": 3280, "loss": 0.2419, "learning_rate": 2.7452732066548914e-06, "epoch": 1.9378427787934185, "percentage": 48.48, "elapsed_time": "0:44:27", "remaining_time": "0:47:15"}
160
+ {"current_steps": 1600, "total_steps": 3280, "loss": 0.1974, "learning_rate": 2.7206904679635465e-06, "epoch": 1.9500304692260817, "percentage": 48.78, "elapsed_time": "0:44:43", "remaining_time": "0:46:57"}
161
+ {"current_steps": 1610, "total_steps": 3280, "loss": 0.251, "learning_rate": 2.6960862038116265e-06, "epoch": 1.9622181596587447, "percentage": 49.09, "elapsed_time": "0:44:58", "remaining_time": "0:46:38"}
162
+ {"current_steps": 1620, "total_steps": 3280, "loss": 0.2148, "learning_rate": 2.6714628140223885e-06, "epoch": 1.9744058500914077, "percentage": 49.39, "elapsed_time": "0:45:12", "remaining_time": "0:46:19"}
163
+ {"current_steps": 1630, "total_steps": 3280, "loss": 0.2322, "learning_rate": 2.6468227002845476e-06, "epoch": 1.9865935405240707, "percentage": 49.7, "elapsed_time": "0:45:29", "remaining_time": "0:46:03"}
164
+ {"current_steps": 1640, "total_steps": 3280, "loss": 0.2059, "learning_rate": 2.6221682659180186e-06, "epoch": 1.9987812309567337, "percentage": 50.0, "elapsed_time": "0:45:44", "remaining_time": "0:45:44"}
165
+ {"current_steps": 1650, "total_steps": 3280, "loss": 0.1819, "learning_rate": 2.597501915639507e-06, "epoch": 2.010968921389397, "percentage": 50.3, "elapsed_time": "0:45:59", "remaining_time": "0:45:25"}
166
+ {"current_steps": 1660, "total_steps": 3280, "loss": 0.1463, "learning_rate": 2.5728260553279592e-06, "epoch": 2.02315661182206, "percentage": 50.61, "elapsed_time": "0:46:14", "remaining_time": "0:45:07"}
167
+ {"current_steps": 1670, "total_steps": 3280, "loss": 0.1563, "learning_rate": 2.5481430917899e-06, "epoch": 2.035344302254723, "percentage": 50.91, "elapsed_time": "0:46:29", "remaining_time": "0:44:49"}
168
+ {"current_steps": 1680, "total_steps": 3280, "loss": 0.1488, "learning_rate": 2.523455432524681e-06, "epoch": 2.047531992687386, "percentage": 51.22, "elapsed_time": "0:46:43", "remaining_time": "0:44:29"}
169
+ {"current_steps": 1690, "total_steps": 3280, "loss": 0.1599, "learning_rate": 2.4987654854896606e-06, "epoch": 2.059719683120049, "percentage": 51.52, "elapsed_time": "0:46:58", "remaining_time": "0:44:11"}
170
+ {"current_steps": 1700, "total_steps": 3280, "loss": 0.1493, "learning_rate": 2.4740756588653388e-06, "epoch": 2.071907373552712, "percentage": 51.83, "elapsed_time": "0:47:11", "remaining_time": "0:43:51"}
171
+ {"current_steps": 1710, "total_steps": 3280, "loss": 0.161, "learning_rate": 2.4493883608204703e-06, "epoch": 2.084095063985375, "percentage": 52.13, "elapsed_time": "0:47:27", "remaining_time": "0:43:34"}
172
+ {"current_steps": 1720, "total_steps": 3280, "loss": 0.1705, "learning_rate": 2.4247059992771836e-06, "epoch": 2.096282754418038, "percentage": 52.44, "elapsed_time": "0:47:42", "remaining_time": "0:43:16"}
173
+ {"current_steps": 1730, "total_steps": 3280, "loss": 0.15, "learning_rate": 2.4000309816761105e-06, "epoch": 2.108470444850701, "percentage": 52.74, "elapsed_time": "0:47:57", "remaining_time": "0:42:57"}
174
+ {"current_steps": 1740, "total_steps": 3280, "loss": 0.1556, "learning_rate": 2.375365714741584e-06, "epoch": 2.1206581352833638, "percentage": 53.05, "elapsed_time": "0:48:13", "remaining_time": "0:42:40"}
175
+ {"current_steps": 1750, "total_steps": 3280, "loss": 0.1846, "learning_rate": 2.3507126042468807e-06, "epoch": 2.1328458257160268, "percentage": 53.35, "elapsed_time": "0:48:27", "remaining_time": "0:42:22"}
176
+ {"current_steps": 1760, "total_steps": 3280, "loss": 0.1691, "learning_rate": 2.3260740547795818e-06, "epoch": 2.1450335161486898, "percentage": 53.66, "elapsed_time": "0:48:42", "remaining_time": "0:42:03"}
177
+ {"current_steps": 1770, "total_steps": 3280, "loss": 0.1617, "learning_rate": 2.3014524695070277e-06, "epoch": 2.1572212065813527, "percentage": 53.96, "elapsed_time": "0:48:56", "remaining_time": "0:41:45"}
178
+ {"current_steps": 1780, "total_steps": 3280, "loss": 0.1547, "learning_rate": 2.276850249941927e-06, "epoch": 2.1694088970140157, "percentage": 54.27, "elapsed_time": "0:49:11", "remaining_time": "0:41:26"}
179
+ {"current_steps": 1790, "total_steps": 3280, "loss": 0.149, "learning_rate": 2.2522697957081134e-06, "epoch": 2.1815965874466787, "percentage": 54.57, "elapsed_time": "0:49:25", "remaining_time": "0:41:08"}
180
+ {"current_steps": 1800, "total_steps": 3280, "loss": 0.1484, "learning_rate": 2.2277135043065024e-06, "epoch": 2.1937842778793417, "percentage": 54.88, "elapsed_time": "0:49:42", "remaining_time": "0:40:52"}
181
+ {"current_steps": 1810, "total_steps": 3280, "loss": 0.1681, "learning_rate": 2.203183770881239e-06, "epoch": 2.2059719683120047, "percentage": 55.18, "elapsed_time": "0:49:58", "remaining_time": "0:40:35"}
182
+ {"current_steps": 1820, "total_steps": 3280, "loss": 0.1605, "learning_rate": 2.178682987986088e-06, "epoch": 2.2181596587446677, "percentage": 55.49, "elapsed_time": "0:50:14", "remaining_time": "0:40:18"}
183
+ {"current_steps": 1830, "total_steps": 3280, "loss": 0.1645, "learning_rate": 2.154213545351067e-06, "epoch": 2.2303473491773307, "percentage": 55.79, "elapsed_time": "0:50:27", "remaining_time": "0:39:59"}
184
+ {"current_steps": 1840, "total_steps": 3280, "loss": 0.1544, "learning_rate": 2.129777829649367e-06, "epoch": 2.2425350396099937, "percentage": 56.1, "elapsed_time": "0:50:43", "remaining_time": "0:39:41"}
185
+ {"current_steps": 1850, "total_steps": 3280, "loss": 0.1518, "learning_rate": 2.1053782242645534e-06, "epoch": 2.254722730042657, "percentage": 56.4, "elapsed_time": "0:50:58", "remaining_time": "0:39:23"}
186
+ {"current_steps": 1860, "total_steps": 3280, "loss": 0.1896, "learning_rate": 2.081017109058108e-06, "epoch": 2.26691042047532, "percentage": 56.71, "elapsed_time": "0:51:16", "remaining_time": "0:39:08"}
187
+ {"current_steps": 1870, "total_steps": 3280, "loss": 0.1319, "learning_rate": 2.056696860137298e-06, "epoch": 2.279098110907983, "percentage": 57.01, "elapsed_time": "0:51:29", "remaining_time": "0:38:49"}
188
+ {"current_steps": 1880, "total_steps": 3280, "loss": 0.1425, "learning_rate": 2.0324198496234227e-06, "epoch": 2.291285801340646, "percentage": 57.32, "elapsed_time": "0:51:42", "remaining_time": "0:38:30"}
189
+ {"current_steps": 1890, "total_steps": 3280, "loss": 0.1517, "learning_rate": 2.0081884454204396e-06, "epoch": 2.303473491773309, "percentage": 57.62, "elapsed_time": "0:51:57", "remaining_time": "0:38:12"}
190
+ {"current_steps": 1900, "total_steps": 3280, "loss": 0.146, "learning_rate": 1.984005010984011e-06, "epoch": 2.315661182205972, "percentage": 57.93, "elapsed_time": "0:52:10", "remaining_time": "0:37:53"}
191
+ {"current_steps": 1910, "total_steps": 3280, "loss": 0.1529, "learning_rate": 1.9598719050909753e-06, "epoch": 2.327848872638635, "percentage": 58.23, "elapsed_time": "0:52:25", "remaining_time": "0:37:36"}
192
+ {"current_steps": 1920, "total_steps": 3280, "loss": 0.1791, "learning_rate": 1.935791481609283e-06, "epoch": 2.340036563071298, "percentage": 58.54, "elapsed_time": "0:52:42", "remaining_time": "0:37:19"}
193
+ {"current_steps": 1930, "total_steps": 3280, "loss": 0.1528, "learning_rate": 1.9117660892684067e-06, "epoch": 2.352224253503961, "percentage": 58.84, "elapsed_time": "0:52:59", "remaining_time": "0:37:04"}
194
+ {"current_steps": 1940, "total_steps": 3280, "loss": 0.139, "learning_rate": 1.8877980714302532e-06, "epoch": 2.364411943936624, "percentage": 59.15, "elapsed_time": "0:53:13", "remaining_time": "0:36:45"}
195
+ {"current_steps": 1950, "total_steps": 3280, "loss": 0.1495, "learning_rate": 1.8638897658605962e-06, "epoch": 2.376599634369287, "percentage": 59.45, "elapsed_time": "0:53:28", "remaining_time": "0:36:28"}
196
+ {"current_steps": 1960, "total_steps": 3280, "loss": 0.1412, "learning_rate": 1.840043504501065e-06, "epoch": 2.38878732480195, "percentage": 59.76, "elapsed_time": "0:53:42", "remaining_time": "0:36:10"}
197
+ {"current_steps": 1970, "total_steps": 3280, "loss": 0.1507, "learning_rate": 1.816261613241686e-06, "epoch": 2.400975015234613, "percentage": 60.06, "elapsed_time": "0:53:55", "remaining_time": "0:35:51"}
198
+ {"current_steps": 1980, "total_steps": 3280, "loss": 0.1725, "learning_rate": 1.7925464116940299e-06, "epoch": 2.413162705667276, "percentage": 60.37, "elapsed_time": "0:54:11", "remaining_time": "0:35:34"}
199
+ {"current_steps": 1990, "total_steps": 3280, "loss": 0.1605, "learning_rate": 1.7689002129649584e-06, "epoch": 2.425350396099939, "percentage": 60.67, "elapsed_time": "0:54:26", "remaining_time": "0:35:17"}
200
+ {"current_steps": 2000, "total_steps": 3280, "loss": 0.1599, "learning_rate": 1.7453253234310164e-06, "epoch": 2.437538086532602, "percentage": 60.98, "elapsed_time": "0:54:41", "remaining_time": "0:35:00"}
201
+ {"current_steps": 2010, "total_steps": 3280, "loss": 0.1312, "learning_rate": 1.7218240425134669e-06, "epoch": 2.449725776965265, "percentage": 61.28, "elapsed_time": "0:55:50", "remaining_time": "0:35:17"}
202
+ {"current_steps": 2020, "total_steps": 3280, "loss": 0.1627, "learning_rate": 1.6983986624540227e-06, "epoch": 2.461913467397928, "percentage": 61.59, "elapsed_time": "0:56:06", "remaining_time": "0:34:59"}
203
+ {"current_steps": 2030, "total_steps": 3280, "loss": 0.1685, "learning_rate": 1.6750514680912606e-06, "epoch": 2.474101157830591, "percentage": 61.89, "elapsed_time": "0:56:20", "remaining_time": "0:34:41"}
204
+ {"current_steps": 2040, "total_steps": 3280, "loss": 0.1704, "learning_rate": 1.6517847366377693e-06, "epoch": 2.4862888482632544, "percentage": 62.2, "elapsed_time": "0:56:35", "remaining_time": "0:34:23"}
205
+ {"current_steps": 2050, "total_steps": 3280, "loss": 0.1598, "learning_rate": 1.628600737458037e-06, "epoch": 2.4984765386959173, "percentage": 62.5, "elapsed_time": "0:56:50", "remaining_time": "0:34:06"}
206
+ {"current_steps": 2060, "total_steps": 3280, "loss": 0.169, "learning_rate": 1.605501731847101e-06, "epoch": 2.5106642291285803, "percentage": 62.8, "elapsed_time": "0:57:08", "remaining_time": "0:33:50"}
207
+ {"current_steps": 2070, "total_steps": 3280, "loss": 0.1509, "learning_rate": 1.5824899728099934e-06, "epoch": 2.5228519195612433, "percentage": 63.11, "elapsed_time": "0:57:22", "remaining_time": "0:33:32"}
208
+ {"current_steps": 2080, "total_steps": 3280, "loss": 0.153, "learning_rate": 1.5595677048419855e-06, "epoch": 2.5350396099939063, "percentage": 63.41, "elapsed_time": "0:57:34", "remaining_time": "0:33:12"}
209
+ {"current_steps": 2090, "total_steps": 3280, "loss": 0.165, "learning_rate": 1.5367371637096705e-06, "epoch": 2.5472273004265693, "percentage": 63.72, "elapsed_time": "0:57:50", "remaining_time": "0:32:55"}
210
+ {"current_steps": 2100, "total_steps": 3280, "loss": 0.1773, "learning_rate": 1.5140005762328892e-06, "epoch": 2.5594149908592323, "percentage": 64.02, "elapsed_time": "0:58:06", "remaining_time": "0:32:38"}
211
+ {"current_steps": 2110, "total_steps": 3280, "loss": 0.1439, "learning_rate": 1.4913601600675387e-06, "epoch": 2.5716026812918953, "percentage": 64.33, "elapsed_time": "0:58:20", "remaining_time": "0:32:21"}
212
+ {"current_steps": 2120, "total_steps": 3280, "loss": 0.1378, "learning_rate": 1.468818123489263e-06, "epoch": 2.5837903717245583, "percentage": 64.63, "elapsed_time": "0:58:35", "remaining_time": "0:32:03"}
213
+ {"current_steps": 2130, "total_steps": 3280, "loss": 0.1614, "learning_rate": 1.4463766651780698e-06, "epoch": 2.5959780621572213, "percentage": 64.94, "elapsed_time": "0:58:51", "remaining_time": "0:31:46"}
214
+ {"current_steps": 2140, "total_steps": 3280, "loss": 0.1639, "learning_rate": 1.4240379740038758e-06, "epoch": 2.6081657525898843, "percentage": 65.24, "elapsed_time": "0:59:05", "remaining_time": "0:31:28"}
215
+ {"current_steps": 2150, "total_steps": 3280, "loss": 0.154, "learning_rate": 1.4018042288130101e-06, "epoch": 2.6203534430225472, "percentage": 65.55, "elapsed_time": "0:59:19", "remaining_time": "0:31:10"}
216
+ {"current_steps": 2160, "total_steps": 3280, "loss": 0.1558, "learning_rate": 1.3796775982156984e-06, "epoch": 2.6325411334552102, "percentage": 65.85, "elapsed_time": "0:59:32", "remaining_time": "0:30:52"}
217
+ {"current_steps": 2170, "total_steps": 3280, "loss": 0.1618, "learning_rate": 1.3576602403745456e-06, "epoch": 2.6447288238878732, "percentage": 66.16, "elapsed_time": "0:59:48", "remaining_time": "0:30:35"}
218
+ {"current_steps": 2180, "total_steps": 3280, "loss": 0.1502, "learning_rate": 1.3357543027940254e-06, "epoch": 2.656916514320536, "percentage": 66.46, "elapsed_time": "1:00:04", "remaining_time": "0:30:18"}
219
+ {"current_steps": 2190, "total_steps": 3280, "loss": 0.161, "learning_rate": 1.3139619221110348e-06, "epoch": 2.669104204753199, "percentage": 66.77, "elapsed_time": "1:00:18", "remaining_time": "0:30:00"}
220
+ {"current_steps": 2200, "total_steps": 3280, "loss": 0.169, "learning_rate": 1.2922852238864767e-06, "epoch": 2.681291895185862, "percentage": 67.07, "elapsed_time": "1:00:33", "remaining_time": "0:29:43"}
221
+ {"current_steps": 2210, "total_steps": 3280, "loss": 0.1466, "learning_rate": 1.2707263223979544e-06, "epoch": 2.693479585618525, "percentage": 67.38, "elapsed_time": "1:00:48", "remaining_time": "0:29:26"}
222
+ {"current_steps": 2220, "total_steps": 3280, "loss": 0.1594, "learning_rate": 1.2492873204335415e-06, "epoch": 2.705667276051188, "percentage": 67.68, "elapsed_time": "1:01:03", "remaining_time": "0:29:09"}
223
+ {"current_steps": 2230, "total_steps": 3280, "loss": 0.1431, "learning_rate": 1.227970309086685e-06, "epoch": 2.717854966483851, "percentage": 67.99, "elapsed_time": "1:01:18", "remaining_time": "0:28:52"}
224
+ {"current_steps": 2240, "total_steps": 3280, "loss": 0.162, "learning_rate": 1.2067773675522487e-06, "epoch": 2.730042656916514, "percentage": 68.29, "elapsed_time": "1:01:34", "remaining_time": "0:28:35"}
225
+ {"current_steps": 2250, "total_steps": 3280, "loss": 0.1597, "learning_rate": 1.1857105629237126e-06, "epoch": 2.742230347349177, "percentage": 68.6, "elapsed_time": "1:01:48", "remaining_time": "0:28:17"}
226
+ {"current_steps": 2260, "total_steps": 3280, "loss": 0.14, "learning_rate": 1.164771949991556e-06, "epoch": 2.75441803778184, "percentage": 68.9, "elapsed_time": "1:02:01", "remaining_time": "0:27:59"}
227
+ {"current_steps": 2270, "total_steps": 3280, "loss": 0.1363, "learning_rate": 1.1439635710428405e-06, "epoch": 2.766605728214503, "percentage": 69.21, "elapsed_time": "1:02:18", "remaining_time": "0:27:43"}
228
+ {"current_steps": 2280, "total_steps": 3280, "loss": 0.1849, "learning_rate": 1.1232874556620086e-06, "epoch": 2.778793418647166, "percentage": 69.51, "elapsed_time": "1:02:36", "remaining_time": "0:27:27"}
229
+ {"current_steps": 2290, "total_steps": 3280, "loss": 0.1435, "learning_rate": 1.1027456205329306e-06, "epoch": 2.790981109079829, "percentage": 69.82, "elapsed_time": "1:02:49", "remaining_time": "0:27:09"}
230
+ {"current_steps": 2300, "total_steps": 3280, "loss": 0.172, "learning_rate": 1.0823400692421938e-06, "epoch": 2.8031687995124925, "percentage": 70.12, "elapsed_time": "1:03:03", "remaining_time": "0:26:52"}
231
+ {"current_steps": 2310, "total_steps": 3280, "loss": 0.1562, "learning_rate": 1.0620727920836906e-06, "epoch": 2.8153564899451555, "percentage": 70.43, "elapsed_time": "1:03:18", "remaining_time": "0:26:34"}
232
+ {"current_steps": 2320, "total_steps": 3280, "loss": 0.1685, "learning_rate": 1.04194576586448e-06, "epoch": 2.8275441803778185, "percentage": 70.73, "elapsed_time": "1:03:32", "remaining_time": "0:26:17"}
233
+ {"current_steps": 2330, "total_steps": 3280, "loss": 0.1762, "learning_rate": 1.0219609537119838e-06, "epoch": 2.8397318708104815, "percentage": 71.04, "elapsed_time": "1:03:49", "remaining_time": "0:26:01"}
234
+ {"current_steps": 2340, "total_steps": 3280, "loss": 0.1601, "learning_rate": 1.0021203048825095e-06, "epoch": 2.8519195612431445, "percentage": 71.34, "elapsed_time": "1:04:04", "remaining_time": "0:25:44"}
235
+ {"current_steps": 2350, "total_steps": 3280, "loss": 0.1451, "learning_rate": 9.824257545711172e-07, "epoch": 2.8641072516758075, "percentage": 71.65, "elapsed_time": "1:04:20", "remaining_time": "0:25:27"}
236
+ {"current_steps": 2360, "total_steps": 3280, "loss": 0.1703, "learning_rate": 9.628792237228787e-07, "epoch": 2.8762949421084705, "percentage": 71.95, "elapsed_time": "1:04:35", "remaining_time": "0:25:10"}
237
+ {"current_steps": 2370, "total_steps": 3280, "loss": 0.1607, "learning_rate": 9.434826188455056e-07, "epoch": 2.8884826325411335, "percentage": 72.26, "elapsed_time": "1:04:52", "remaining_time": "0:24:54"}
238
+ {"current_steps": 2380, "total_steps": 3280, "loss": 0.1771, "learning_rate": 9.242378318233978e-07, "epoch": 2.9006703229737965, "percentage": 72.56, "elapsed_time": "1:05:09", "remaining_time": "0:24:38"}
239
+ {"current_steps": 2390, "total_steps": 3280, "loss": 0.1738, "learning_rate": 9.051467397331148e-07, "epoch": 2.9128580134064594, "percentage": 72.87, "elapsed_time": "1:05:23", "remaining_time": "0:24:21"}
240
+ {"current_steps": 2400, "total_steps": 3280, "loss": 0.167, "learning_rate": 8.862112046602917e-07, "epoch": 2.9250457038391224, "percentage": 73.17, "elapsed_time": "1:05:38", "remaining_time": "0:24:03"}
241
+ {"current_steps": 2410, "total_steps": 3280, "loss": 0.1561, "learning_rate": 8.674330735180164e-07, "epoch": 2.9372333942717854, "percentage": 73.48, "elapsed_time": "1:05:53", "remaining_time": "0:23:47"}
242
+ {"current_steps": 2420, "total_steps": 3280, "loss": 0.1586, "learning_rate": 8.488141778666878e-07, "epoch": 2.9494210847044484, "percentage": 73.78, "elapsed_time": "1:06:07", "remaining_time": "0:23:29"}
243
+ {"current_steps": 2430, "total_steps": 3280, "loss": 0.1435, "learning_rate": 8.303563337353713e-07, "epoch": 2.9616087751371114, "percentage": 74.09, "elapsed_time": "1:06:23", "remaining_time": "0:23:13"}
244
+ {"current_steps": 2440, "total_steps": 3280, "loss": 0.1395, "learning_rate": 8.120613414446707e-07, "epoch": 2.9737964655697744, "percentage": 74.39, "elapsed_time": "1:06:37", "remaining_time": "0:22:56"}
245
+ {"current_steps": 2450, "total_steps": 3280, "loss": 0.1637, "learning_rate": 7.939309854311242e-07, "epoch": 2.9859841560024374, "percentage": 74.7, "elapsed_time": "1:06:51", "remaining_time": "0:22:38"}
246
+ {"current_steps": 2460, "total_steps": 3280, "loss": 0.1819, "learning_rate": 7.759670340731662e-07, "epoch": 2.998171846435101, "percentage": 75.0, "elapsed_time": "1:07:05", "remaining_time": "0:22:21"}
247
+ {"current_steps": 2470, "total_steps": 3280, "loss": 0.1143, "learning_rate": 7.581712395186341e-07, "epoch": 3.0103595368677634, "percentage": 75.3, "elapsed_time": "1:07:20", "remaining_time": "0:22:04"}
248
+ {"current_steps": 2480, "total_steps": 3280, "loss": 0.1116, "learning_rate": 7.405453375138794e-07, "epoch": 3.0225472273004264, "percentage": 75.61, "elapsed_time": "1:07:36", "remaining_time": "0:21:48"}
249
+ {"current_steps": 2490, "total_steps": 3280, "loss": 0.1226, "learning_rate": 7.230910472344601e-07, "epoch": 3.03473491773309, "percentage": 75.91, "elapsed_time": "1:07:52", "remaining_time": "0:21:31"}
250
+ {"current_steps": 2500, "total_steps": 3280, "loss": 0.1304, "learning_rate": 7.058100711174637e-07, "epoch": 3.0469226081657528, "percentage": 76.22, "elapsed_time": "1:08:06", "remaining_time": "0:21:15"}
251
+ {"current_steps": 2510, "total_steps": 3280, "loss": 0.1221, "learning_rate": 6.887040946954524e-07, "epoch": 3.0591102985984158, "percentage": 76.52, "elapsed_time": "1:09:13", "remaining_time": "0:21:14"}
252
+ {"current_steps": 2520, "total_steps": 3280, "loss": 0.1154, "learning_rate": 6.717747864320648e-07, "epoch": 3.0712979890310788, "percentage": 76.83, "elapsed_time": "1:09:28", "remaining_time": "0:20:57"}
253
+ {"current_steps": 2530, "total_steps": 3280, "loss": 0.1084, "learning_rate": 6.550237975592774e-07, "epoch": 3.0834856794637417, "percentage": 77.13, "elapsed_time": "1:09:41", "remaining_time": "0:20:39"}
254
+ {"current_steps": 2540, "total_steps": 3280, "loss": 0.1272, "learning_rate": 6.384527619163486e-07, "epoch": 3.0956733698964047, "percentage": 77.44, "elapsed_time": "1:09:57", "remaining_time": "0:20:22"}
255
+ {"current_steps": 2550, "total_steps": 3280, "loss": 0.1126, "learning_rate": 6.220632957904593e-07, "epoch": 3.1078610603290677, "percentage": 77.74, "elapsed_time": "1:10:13", "remaining_time": "0:20:06"}
256
+ {"current_steps": 2560, "total_steps": 3280, "loss": 0.1334, "learning_rate": 6.058569977590683e-07, "epoch": 3.1200487507617307, "percentage": 78.05, "elapsed_time": "1:10:31", "remaining_time": "0:19:50"}
257
+ {"current_steps": 2570, "total_steps": 3280, "loss": 0.098, "learning_rate": 5.898354485339839e-07, "epoch": 3.1322364411943937, "percentage": 78.35, "elapsed_time": "1:10:48", "remaining_time": "0:19:33"}
258
+ {"current_steps": 2580, "total_steps": 3280, "loss": 0.1242, "learning_rate": 5.740002108071974e-07, "epoch": 3.1444241316270567, "percentage": 78.66, "elapsed_time": "1:11:04", "remaining_time": "0:19:16"}
259
+ {"current_steps": 2590, "total_steps": 3280, "loss": 0.1071, "learning_rate": 5.583528290984516e-07, "epoch": 3.1566118220597197, "percentage": 78.96, "elapsed_time": "1:11:18", "remaining_time": "0:18:59"}
260
+ {"current_steps": 2600, "total_steps": 3280, "loss": 0.114, "learning_rate": 5.42894829604603e-07, "epoch": 3.1687995124923827, "percentage": 79.27, "elapsed_time": "1:11:34", "remaining_time": "0:18:43"}
261
+ {"current_steps": 2610, "total_steps": 3280, "loss": 0.1066, "learning_rate": 5.276277200507549e-07, "epoch": 3.1809872029250457, "percentage": 79.57, "elapsed_time": "1:11:50", "remaining_time": "0:18:26"}
262
+ {"current_steps": 2620, "total_steps": 3280, "loss": 0.1237, "learning_rate": 5.125529895432008e-07, "epoch": 3.1931748933577087, "percentage": 79.88, "elapsed_time": "1:12:07", "remaining_time": "0:18:10"}
263
+ {"current_steps": 2630, "total_steps": 3280, "loss": 0.1302, "learning_rate": 4.976721084241818e-07, "epoch": 3.2053625837903716, "percentage": 80.18, "elapsed_time": "1:12:21", "remaining_time": "0:17:53"}
264
+ {"current_steps": 2640, "total_steps": 3280, "loss": 0.1101, "learning_rate": 4.829865281284734e-07, "epoch": 3.2175502742230346, "percentage": 80.49, "elapsed_time": "1:12:36", "remaining_time": "0:17:36"}
265
+ {"current_steps": 2650, "total_steps": 3280, "loss": 0.1057, "learning_rate": 4.684976810418179e-07, "epoch": 3.2297379646556976, "percentage": 80.79, "elapsed_time": "1:12:49", "remaining_time": "0:17:18"}
266
+ {"current_steps": 2660, "total_steps": 3280, "loss": 0.113, "learning_rate": 4.5420698036121285e-07, "epoch": 3.2419256550883606, "percentage": 81.1, "elapsed_time": "1:13:04", "remaining_time": "0:17:01"}
267
+ {"current_steps": 2670, "total_steps": 3280, "loss": 0.1094, "learning_rate": 4.4011581995707267e-07, "epoch": 3.2541133455210236, "percentage": 81.4, "elapsed_time": "1:13:19", "remaining_time": "0:16:45"}
268
+ {"current_steps": 2680, "total_steps": 3280, "loss": 0.1224, "learning_rate": 4.262255742372759e-07, "epoch": 3.2663010359536866, "percentage": 81.71, "elapsed_time": "1:13:34", "remaining_time": "0:16:28"}
269
+ {"current_steps": 2690, "total_steps": 3280, "loss": 0.1038, "learning_rate": 4.1253759801310745e-07, "epoch": 3.2784887263863496, "percentage": 82.01, "elapsed_time": "1:13:50", "remaining_time": "0:16:11"}
270
+ {"current_steps": 2700, "total_steps": 3280, "loss": 0.115, "learning_rate": 3.9905322636711654e-07, "epoch": 3.2906764168190126, "percentage": 82.32, "elapsed_time": "1:14:04", "remaining_time": "0:15:54"}
271
+ {"current_steps": 2710, "total_steps": 3280, "loss": 0.1281, "learning_rate": 3.8577377452289787e-07, "epoch": 3.3028641072516756, "percentage": 82.62, "elapsed_time": "1:14:18", "remaining_time": "0:15:37"}
272
+ {"current_steps": 2720, "total_steps": 3280, "loss": 0.1246, "learning_rate": 3.727005377168036e-07, "epoch": 3.315051797684339, "percentage": 82.93, "elapsed_time": "1:14:35", "remaining_time": "0:15:21"}
273
+ {"current_steps": 2730, "total_steps": 3280, "loss": 0.1173, "learning_rate": 3.5983479107161793e-07, "epoch": 3.327239488117002, "percentage": 83.23, "elapsed_time": "1:14:49", "remaining_time": "0:15:04"}
274
+ {"current_steps": 2740, "total_steps": 3280, "loss": 0.1051, "learning_rate": 3.471777894721767e-07, "epoch": 3.339427178549665, "percentage": 83.54, "elapsed_time": "1:15:02", "remaining_time": "0:14:47"}
275
+ {"current_steps": 2750, "total_steps": 3280, "loss": 0.126, "learning_rate": 3.347307674429784e-07, "epoch": 3.351614868982328, "percentage": 83.84, "elapsed_time": "1:15:15", "remaining_time": "0:14:30"}
276
+ {"current_steps": 2760, "total_steps": 3280, "loss": 0.0962, "learning_rate": 3.224949390277668e-07, "epoch": 3.363802559414991, "percentage": 84.15, "elapsed_time": "1:15:30", "remaining_time": "0:14:13"}
277
+ {"current_steps": 2770, "total_steps": 3280, "loss": 0.096, "learning_rate": 3.1047149767111874e-07, "epoch": 3.375990249847654, "percentage": 84.45, "elapsed_time": "1:15:44", "remaining_time": "0:13:56"}
278
+ {"current_steps": 2780, "total_steps": 3280, "loss": 0.0954, "learning_rate": 2.9866161610203866e-07, "epoch": 3.388177940280317, "percentage": 84.76, "elapsed_time": "1:15:59", "remaining_time": "0:13:39"}
279
+ {"current_steps": 2790, "total_steps": 3280, "loss": 0.1221, "learning_rate": 2.8706644621957605e-07, "epoch": 3.40036563071298, "percentage": 85.06, "elapsed_time": "1:16:16", "remaining_time": "0:13:23"}
280
+ {"current_steps": 2800, "total_steps": 3280, "loss": 0.1054, "learning_rate": 2.756871189804705e-07, "epoch": 3.412553321145643, "percentage": 85.37, "elapsed_time": "1:16:30", "remaining_time": "0:13:06"}
281
+ {"current_steps": 2810, "total_steps": 3280, "loss": 0.1255, "learning_rate": 2.6452474428884294e-07, "epoch": 3.424741011578306, "percentage": 85.67, "elapsed_time": "1:16:45", "remaining_time": "0:12:50"}
282
+ {"current_steps": 2820, "total_steps": 3280, "loss": 0.1157, "learning_rate": 2.5358041088793863e-07, "epoch": 3.436928702010969, "percentage": 85.98, "elapsed_time": "1:17:00", "remaining_time": "0:12:33"}
283
+ {"current_steps": 2830, "total_steps": 3280, "loss": 0.1246, "learning_rate": 2.428551862539366e-07, "epoch": 3.449116392443632, "percentage": 86.28, "elapsed_time": "1:17:16", "remaining_time": "0:12:17"}
284
+ {"current_steps": 2840, "total_steps": 3280, "loss": 0.0985, "learning_rate": 2.323501164918257e-07, "epoch": 3.461304082876295, "percentage": 86.59, "elapsed_time": "1:17:29", "remaining_time": "0:12:00"}
285
+ {"current_steps": 2850, "total_steps": 3280, "loss": 0.1228, "learning_rate": 2.2206622623337864e-07, "epoch": 3.473491773308958, "percentage": 86.89, "elapsed_time": "1:17:44", "remaining_time": "0:11:43"}
286
+ {"current_steps": 2860, "total_steps": 3280, "loss": 0.1148, "learning_rate": 2.1200451853720605e-07, "epoch": 3.485679463741621, "percentage": 87.2, "elapsed_time": "1:18:00", "remaining_time": "0:11:27"}
287
+ {"current_steps": 2870, "total_steps": 3280, "loss": 0.1408, "learning_rate": 2.0216597479092437e-07, "epoch": 3.497867154174284, "percentage": 87.5, "elapsed_time": "1:18:15", "remaining_time": "0:11:10"}
288
+ {"current_steps": 2880, "total_steps": 3280, "loss": 0.1124, "learning_rate": 1.9255155461543385e-07, "epoch": 3.510054844606947, "percentage": 87.8, "elapsed_time": "1:18:30", "remaining_time": "0:10:54"}
289
+ {"current_steps": 2890, "total_steps": 3280, "loss": 0.1217, "learning_rate": 1.8316219577132033e-07, "epoch": 3.5222425350396103, "percentage": 88.11, "elapsed_time": "1:18:44", "remaining_time": "0:10:37"}
290
+ {"current_steps": 2900, "total_steps": 3280, "loss": 0.1087, "learning_rate": 1.7399881406738762e-07, "epoch": 3.5344302254722733, "percentage": 88.41, "elapsed_time": "1:19:00", "remaining_time": "0:10:21"}
291
+ {"current_steps": 2910, "total_steps": 3280, "loss": 0.1047, "learning_rate": 1.650623032713347e-07, "epoch": 3.5466179159049362, "percentage": 88.72, "elapsed_time": "1:19:14", "remaining_time": "0:10:04"}
292
+ {"current_steps": 2920, "total_steps": 3280, "loss": 0.1356, "learning_rate": 1.5635353502257812e-07, "epoch": 3.5588056063375992, "percentage": 89.02, "elapsed_time": "1:19:28", "remaining_time": "0:09:47"}
293
+ {"current_steps": 2930, "total_steps": 3280, "loss": 0.1148, "learning_rate": 1.4787335874723724e-07, "epoch": 3.5709932967702622, "percentage": 89.33, "elapsed_time": "1:19:41", "remaining_time": "0:09:31"}
294
+ {"current_steps": 2940, "total_steps": 3280, "loss": 0.1197, "learning_rate": 1.3962260157528052e-07, "epoch": 3.583180987202925, "percentage": 89.63, "elapsed_time": "1:19:56", "remaining_time": "0:09:14"}
295
+ {"current_steps": 2950, "total_steps": 3280, "loss": 0.1037, "learning_rate": 1.3160206825985457e-07, "epoch": 3.595368677635588, "percentage": 89.94, "elapsed_time": "1:20:07", "remaining_time": "0:08:57"}
296
+ {"current_steps": 2960, "total_steps": 3280, "loss": 0.1106, "learning_rate": 1.2381254109878644e-07, "epoch": 3.607556368068251, "percentage": 90.24, "elapsed_time": "1:20:21", "remaining_time": "0:08:41"}
297
+ {"current_steps": 2970, "total_steps": 3280, "loss": 0.1358, "learning_rate": 1.1625477985828276e-07, "epoch": 3.619744058500914, "percentage": 90.55, "elapsed_time": "1:20:37", "remaining_time": "0:08:24"}
298
+ {"current_steps": 2980, "total_steps": 3280, "loss": 0.1119, "learning_rate": 1.089295216988262e-07, "epoch": 3.631931748933577, "percentage": 90.85, "elapsed_time": "1:20:51", "remaining_time": "0:08:08"}
299
+ {"current_steps": 2990, "total_steps": 3280, "loss": 0.1245, "learning_rate": 1.0183748110327102e-07, "epoch": 3.64411943936624, "percentage": 91.16, "elapsed_time": "1:21:05", "remaining_time": "0:07:51"}
300
+ {"current_steps": 3000, "total_steps": 3280, "loss": 0.1097, "learning_rate": 9.497934980715939e-08, "epoch": 3.656307129798903, "percentage": 91.46, "elapsed_time": "1:21:22", "remaining_time": "0:07:35"}
301
+ {"current_steps": 3010, "total_steps": 3280, "loss": 0.1009, "learning_rate": 8.835579673124677e-08, "epoch": 3.668494820231566, "percentage": 91.77, "elapsed_time": "1:22:28", "remaining_time": "0:07:23"}
302
+ {"current_steps": 3020, "total_steps": 3280, "loss": 0.1167, "learning_rate": 8.196746791626243e-08, "epoch": 3.680682510664229, "percentage": 92.07, "elapsed_time": "1:22:44", "remaining_time": "0:07:07"}
303
+ {"current_steps": 3030, "total_steps": 3280, "loss": 0.1186, "learning_rate": 7.581498645989255e-08, "epoch": 3.692870201096892, "percentage": 92.38, "elapsed_time": "1:23:00", "remaining_time": "0:06:50"}
304
+ {"current_steps": 3040, "total_steps": 3280, "loss": 0.1264, "learning_rate": 6.989895245600702e-08, "epoch": 3.705057891529555, "percentage": 92.68, "elapsed_time": "1:23:16", "remaining_time": "0:06:34"}
305
+ {"current_steps": 3050, "total_steps": 3280, "loss": 0.1304, "learning_rate": 6.421994293612871e-08, "epoch": 3.717245581962218, "percentage": 92.99, "elapsed_time": "1:23:31", "remaining_time": "0:06:17"}
306
+ {"current_steps": 3060, "total_steps": 3280, "loss": 0.1158, "learning_rate": 5.8778511813150365e-08, "epoch": 3.729433272394881, "percentage": 93.29, "elapsed_time": "1:23:45", "remaining_time": "0:06:01"}
307
+ {"current_steps": 3070, "total_steps": 3280, "loss": 0.1045, "learning_rate": 5.357518982730792e-08, "epoch": 3.741620962827544, "percentage": 93.6, "elapsed_time": "1:23:59", "remaining_time": "0:05:44"}
308
+ {"current_steps": 3080, "total_steps": 3280, "loss": 0.1203, "learning_rate": 4.861048449441491e-08, "epoch": 3.753808653260207, "percentage": 93.9, "elapsed_time": "1:24:15", "remaining_time": "0:05:28"}
309
+ {"current_steps": 3090, "total_steps": 3280, "loss": 0.1174, "learning_rate": 4.3884880056359045e-08, "epoch": 3.76599634369287, "percentage": 94.21, "elapsed_time": "1:24:31", "remaining_time": "0:05:11"}
310
+ {"current_steps": 3100, "total_steps": 3280, "loss": 0.103, "learning_rate": 3.939883743387302e-08, "epoch": 3.778184034125533, "percentage": 94.51, "elapsed_time": "1:24:45", "remaining_time": "0:04:55"}
311
+ {"current_steps": 3110, "total_steps": 3280, "loss": 0.1146, "learning_rate": 3.515279418157463e-08, "epoch": 3.790371724558196, "percentage": 94.82, "elapsed_time": "1:25:00", "remaining_time": "0:04:38"}
312
+ {"current_steps": 3120, "total_steps": 3280, "loss": 0.1162, "learning_rate": 3.1147164445292923e-08, "epoch": 3.802559414990859, "percentage": 95.12, "elapsed_time": "1:25:16", "remaining_time": "0:04:22"}
313
+ {"current_steps": 3130, "total_steps": 3280, "loss": 0.1166, "learning_rate": 2.7382338921670693e-08, "epoch": 3.814747105423522, "percentage": 95.43, "elapsed_time": "1:25:29", "remaining_time": "0:04:05"}
314
+ {"current_steps": 3140, "total_steps": 3280, "loss": 0.1123, "learning_rate": 2.3858684820058376e-08, "epoch": 3.826934795856185, "percentage": 95.73, "elapsed_time": "1:25:44", "remaining_time": "0:03:49"}
315
+ {"current_steps": 3150, "total_steps": 3280, "loss": 0.1151, "learning_rate": 2.057654582669738e-08, "epoch": 3.839122486288848, "percentage": 96.04, "elapsed_time": "1:25:58", "remaining_time": "0:03:32"}
316
+ {"current_steps": 3160, "total_steps": 3280, "loss": 0.112, "learning_rate": 1.753624207119775e-08, "epoch": 3.8513101767215114, "percentage": 96.34, "elapsed_time": "1:26:14", "remaining_time": "0:03:16"}
317
+ {"current_steps": 3170, "total_steps": 3280, "loss": 0.1101, "learning_rate": 1.4738070095314527e-08, "epoch": 3.8634978671541744, "percentage": 96.65, "elapsed_time": "1:26:27", "remaining_time": "0:03:00"}
318
+ {"current_steps": 3180, "total_steps": 3280, "loss": 0.1212, "learning_rate": 1.2182302824023107e-08, "epoch": 3.8756855575868374, "percentage": 96.95, "elapsed_time": "1:26:42", "remaining_time": "0:02:43"}
319
+ {"current_steps": 3190, "total_steps": 3280, "loss": 0.1042, "learning_rate": 9.869189538899149e-09, "epoch": 3.8878732480195004, "percentage": 97.26, "elapsed_time": "1:26:56", "remaining_time": "0:02:27"}
320
+ {"current_steps": 3200, "total_steps": 3280, "loss": 0.1231, "learning_rate": 7.798955853805245e-09, "epoch": 3.9000609384521634, "percentage": 97.56, "elapsed_time": "1:27:11", "remaining_time": "0:02:10"}
321
+ {"current_steps": 3210, "total_steps": 3280, "loss": 0.1181, "learning_rate": 5.971803692883804e-09, "epoch": 3.9122486288848264, "percentage": 97.87, "elapsed_time": "1:27:26", "remaining_time": "0:01:54"}
322
+ {"current_steps": 3220, "total_steps": 3280, "loss": 0.1309, "learning_rate": 4.387911270863632e-09, "epoch": 3.9244363193174894, "percentage": 98.17, "elapsed_time": "1:27:42", "remaining_time": "0:01:38"}
323
+ {"current_steps": 3230, "total_steps": 3280, "loss": 0.1027, "learning_rate": 3.0474330756757874e-09, "epoch": 3.9366240097501524, "percentage": 98.48, "elapsed_time": "1:27:56", "remaining_time": "0:01:21"}
324
+ {"current_steps": 3240, "total_steps": 3280, "loss": 0.1265, "learning_rate": 1.9504998533870223e-09, "epoch": 3.9488117001828154, "percentage": 98.78, "elapsed_time": "1:28:12", "remaining_time": "0:01:05"}
325
+ {"current_steps": 3250, "total_steps": 3280, "loss": 0.1099, "learning_rate": 1.0972185954452596e-09, "epoch": 3.9609993906154783, "percentage": 99.09, "elapsed_time": "1:28:27", "remaining_time": "0:00:48"}
326
+ {"current_steps": 3260, "total_steps": 3280, "loss": 0.1164, "learning_rate": 4.876725282457195e-10, "epoch": 3.9731870810481413, "percentage": 99.39, "elapsed_time": "1:28:41", "remaining_time": "0:00:32"}
327
+ {"current_steps": 3270, "total_steps": 3280, "loss": 0.139, "learning_rate": 1.2192110501269005e-10, "epoch": 3.9853747714808043, "percentage": 99.7, "elapsed_time": "1:28:57", "remaining_time": "0:00:16"}
328
+ {"current_steps": 3280, "total_steps": 3280, "loss": 0.1073, "learning_rate": 0.0, "epoch": 3.9975624619134673, "percentage": 100.0, "elapsed_time": "1:29:12", "remaining_time": "0:00:00"}
329
+ {"current_steps": 3280, "total_steps": 3280, "epoch": 3.9975624619134673, "percentage": 100.0, "elapsed_time": "1:30:12", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2338 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.9975624619134673,
5
+ "eval_steps": 50000,
6
+ "global_step": 3280,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.01218769043266301,
13
+ "grad_norm": 3.578172206878662,
14
+ "learning_rate": 5.05050505050505e-07,
15
+ "loss": 0.7319,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.02437538086532602,
20
+ "grad_norm": 4.391892433166504,
21
+ "learning_rate": 1.01010101010101e-06,
22
+ "loss": 0.6746,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.03656307129798903,
27
+ "grad_norm": 4.259323596954346,
28
+ "learning_rate": 1.5151515151515152e-06,
29
+ "loss": 0.722,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.04875076173065204,
34
+ "grad_norm": 2.5641062259674072,
35
+ "learning_rate": 2.02020202020202e-06,
36
+ "loss": 0.6053,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.06093845216331505,
41
+ "grad_norm": 2.138474225997925,
42
+ "learning_rate": 2.5252525252525258e-06,
43
+ "loss": 0.467,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.07312614259597806,
48
+ "grad_norm": 3.2234861850738525,
49
+ "learning_rate": 3.0303030303030305e-06,
50
+ "loss": 0.4346,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.08531383302864107,
55
+ "grad_norm": 1.6952118873596191,
56
+ "learning_rate": 3.5353535353535356e-06,
57
+ "loss": 0.4145,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.09750152346130408,
62
+ "grad_norm": 1.6948118209838867,
63
+ "learning_rate": 4.04040404040404e-06,
64
+ "loss": 0.4062,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.10968921389396709,
69
+ "grad_norm": 1.8601840734481812,
70
+ "learning_rate": 4.5454545454545455e-06,
71
+ "loss": 0.3627,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.1218769043266301,
76
+ "grad_norm": 1.8506708145141602,
77
+ "learning_rate": 4.99999878077914e-06,
78
+ "loss": 0.3336,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.1340645947592931,
83
+ "grad_norm": 1.8687206506729126,
84
+ "learning_rate": 4.9998524757147466e-06,
85
+ "loss": 0.3489,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.14625228519195613,
90
+ "grad_norm": 2.1408703327178955,
91
+ "learning_rate": 4.999462342829388e-06,
92
+ "loss": 0.3617,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.15843997562461914,
97
+ "grad_norm": 1.6488077640533447,
98
+ "learning_rate": 4.9988284201754075e-06,
99
+ "loss": 0.3894,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.17062766605728213,
104
+ "grad_norm": 1.5750221014022827,
105
+ "learning_rate": 4.99795076958365e-06,
106
+ "loss": 0.3448,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.18281535648994515,
111
+ "grad_norm": 1.7642357349395752,
112
+ "learning_rate": 4.996829476657414e-06,
113
+ "loss": 0.3187,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.19500304692260817,
118
+ "grad_norm": 1.404475212097168,
119
+ "learning_rate": 4.995464650764122e-06,
120
+ "loss": 0.3343,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.2071907373552712,
125
+ "grad_norm": 1.5260435342788696,
126
+ "learning_rate": 4.993856425024637e-06,
127
+ "loss": 0.3143,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.21937842778793418,
132
+ "grad_norm": 1.5985782146453857,
133
+ "learning_rate": 4.992004956300287e-06,
134
+ "loss": 0.3301,
135
+ "step": 180
136
+ },
137
+ {
138
+ "epoch": 0.2315661182205972,
139
+ "grad_norm": 1.6050117015838623,
140
+ "learning_rate": 4.989910425177561e-06,
141
+ "loss": 0.3501,
142
+ "step": 190
143
+ },
144
+ {
145
+ "epoch": 0.2437538086532602,
146
+ "grad_norm": 1.5097649097442627,
147
+ "learning_rate": 4.987573035950499e-06,
148
+ "loss": 0.283,
149
+ "step": 200
150
+ },
151
+ {
152
+ "epoch": 0.25594149908592323,
153
+ "grad_norm": 1.6257539987564087,
154
+ "learning_rate": 4.984993016600763e-06,
155
+ "loss": 0.3144,
156
+ "step": 210
157
+ },
158
+ {
159
+ "epoch": 0.2681291895185862,
160
+ "grad_norm": 1.656672477722168,
161
+ "learning_rate": 4.982170618775401e-06,
162
+ "loss": 0.3221,
163
+ "step": 220
164
+ },
165
+ {
166
+ "epoch": 0.28031687995124926,
167
+ "grad_norm": 1.3519649505615234,
168
+ "learning_rate": 4.979106117762303e-06,
169
+ "loss": 0.3244,
170
+ "step": 230
171
+ },
172
+ {
173
+ "epoch": 0.29250457038391225,
174
+ "grad_norm": 1.7282569408416748,
175
+ "learning_rate": 4.975799812463348e-06,
176
+ "loss": 0.3112,
177
+ "step": 240
178
+ },
179
+ {
180
+ "epoch": 0.30469226081657524,
181
+ "grad_norm": 1.7917057275772095,
182
+ "learning_rate": 4.972252025365251e-06,
183
+ "loss": 0.3321,
184
+ "step": 250
185
+ },
186
+ {
187
+ "epoch": 0.3168799512492383,
188
+ "grad_norm": 1.2842172384262085,
189
+ "learning_rate": 4.968463102508114e-06,
190
+ "loss": 0.3337,
191
+ "step": 260
192
+ },
193
+ {
194
+ "epoch": 0.3290676416819013,
195
+ "grad_norm": 1.4093726873397827,
196
+ "learning_rate": 4.9644334134516645e-06,
197
+ "loss": 0.3029,
198
+ "step": 270
199
+ },
200
+ {
201
+ "epoch": 0.34125533211456427,
202
+ "grad_norm": 1.5072354078292847,
203
+ "learning_rate": 4.960163351239216e-06,
204
+ "loss": 0.3411,
205
+ "step": 280
206
+ },
207
+ {
208
+ "epoch": 0.3534430225472273,
209
+ "grad_norm": 1.494828224182129,
210
+ "learning_rate": 4.955653332359331e-06,
211
+ "loss": 0.3192,
212
+ "step": 290
213
+ },
214
+ {
215
+ "epoch": 0.3656307129798903,
216
+ "grad_norm": 1.6439958810806274,
217
+ "learning_rate": 4.950903796705201e-06,
218
+ "loss": 0.3113,
219
+ "step": 300
220
+ },
221
+ {
222
+ "epoch": 0.3778184034125533,
223
+ "grad_norm": 1.5399495363235474,
224
+ "learning_rate": 4.94591520753173e-06,
225
+ "loss": 0.3191,
226
+ "step": 310
227
+ },
228
+ {
229
+ "epoch": 0.39000609384521634,
230
+ "grad_norm": 1.697625756263733,
231
+ "learning_rate": 4.940688051410364e-06,
232
+ "loss": 0.3092,
233
+ "step": 320
234
+ },
235
+ {
236
+ "epoch": 0.40219378427787933,
237
+ "grad_norm": 1.5218887329101562,
238
+ "learning_rate": 4.935222838181623e-06,
239
+ "loss": 0.3343,
240
+ "step": 330
241
+ },
242
+ {
243
+ "epoch": 0.4143814747105424,
244
+ "grad_norm": 1.6118853092193604,
245
+ "learning_rate": 4.929520100905375e-06,
246
+ "loss": 0.3115,
247
+ "step": 340
248
+ },
249
+ {
250
+ "epoch": 0.42656916514320536,
251
+ "grad_norm": 1.3823405504226685,
252
+ "learning_rate": 4.923580395808846e-06,
253
+ "loss": 0.2922,
254
+ "step": 350
255
+ },
256
+ {
257
+ "epoch": 0.43875685557586835,
258
+ "grad_norm": 1.289500117301941,
259
+ "learning_rate": 4.917404302232362e-06,
260
+ "loss": 0.3211,
261
+ "step": 360
262
+ },
263
+ {
264
+ "epoch": 0.4509445460085314,
265
+ "grad_norm": 1.5171111822128296,
266
+ "learning_rate": 4.910992422572845e-06,
267
+ "loss": 0.3205,
268
+ "step": 370
269
+ },
270
+ {
271
+ "epoch": 0.4631322364411944,
272
+ "grad_norm": 1.4650630950927734,
273
+ "learning_rate": 4.904345382225058e-06,
274
+ "loss": 0.331,
275
+ "step": 380
276
+ },
277
+ {
278
+ "epoch": 0.4753199268738574,
279
+ "grad_norm": 1.4301706552505493,
280
+ "learning_rate": 4.897463829520604e-06,
281
+ "loss": 0.3224,
282
+ "step": 390
283
+ },
284
+ {
285
+ "epoch": 0.4875076173065204,
286
+ "grad_norm": 1.3901742696762085,
287
+ "learning_rate": 4.890348435664694e-06,
288
+ "loss": 0.3054,
289
+ "step": 400
290
+ },
291
+ {
292
+ "epoch": 0.4996953077391834,
293
+ "grad_norm": 1.703951358795166,
294
+ "learning_rate": 4.882999894670669e-06,
295
+ "loss": 0.2973,
296
+ "step": 410
297
+ },
298
+ {
299
+ "epoch": 0.5118829981718465,
300
+ "grad_norm": 1.3318638801574707,
301
+ "learning_rate": 4.875418923292322e-06,
302
+ "loss": 0.295,
303
+ "step": 420
304
+ },
305
+ {
306
+ "epoch": 0.5240706886045094,
307
+ "grad_norm": 1.420301914215088,
308
+ "learning_rate": 4.867606260953979e-06,
309
+ "loss": 0.3027,
310
+ "step": 430
311
+ },
312
+ {
313
+ "epoch": 0.5362583790371724,
314
+ "grad_norm": 1.350051999092102,
315
+ "learning_rate": 4.85956266967838e-06,
316
+ "loss": 0.3007,
317
+ "step": 440
318
+ },
319
+ {
320
+ "epoch": 0.5484460694698354,
321
+ "grad_norm": 1.2033629417419434,
322
+ "learning_rate": 4.8512889340123535e-06,
323
+ "loss": 0.3168,
324
+ "step": 450
325
+ },
326
+ {
327
+ "epoch": 0.5606337599024985,
328
+ "grad_norm": 1.508357286453247,
329
+ "learning_rate": 4.842785860950296e-06,
330
+ "loss": 0.3137,
331
+ "step": 460
332
+ },
333
+ {
334
+ "epoch": 0.5728214503351615,
335
+ "grad_norm": 1.8802692890167236,
336
+ "learning_rate": 4.834054279855459e-06,
337
+ "loss": 0.3118,
338
+ "step": 470
339
+ },
340
+ {
341
+ "epoch": 0.5850091407678245,
342
+ "grad_norm": 1.3695592880249023,
343
+ "learning_rate": 4.825095042379054e-06,
344
+ "loss": 0.289,
345
+ "step": 480
346
+ },
347
+ {
348
+ "epoch": 0.5971968312004875,
349
+ "grad_norm": 1.6230717897415161,
350
+ "learning_rate": 4.815909022377189e-06,
351
+ "loss": 0.2937,
352
+ "step": 490
353
+ },
354
+ {
355
+ "epoch": 0.6093845216331505,
356
+ "grad_norm": 1.2749711275100708,
357
+ "learning_rate": 4.806497115825629e-06,
358
+ "loss": 0.3063,
359
+ "step": 500
360
+ },
361
+ {
362
+ "epoch": 0.6215722120658135,
363
+ "grad_norm": 1.3291198015213013,
364
+ "learning_rate": 4.796860240732414e-06,
365
+ "loss": 0.316,
366
+ "step": 510
367
+ },
368
+ {
369
+ "epoch": 0.6337599024984766,
370
+ "grad_norm": 1.5014513731002808,
371
+ "learning_rate": 4.786999337048311e-06,
372
+ "loss": 0.3127,
373
+ "step": 520
374
+ },
375
+ {
376
+ "epoch": 0.6459475929311396,
377
+ "grad_norm": 1.442225456237793,
378
+ "learning_rate": 4.77691536657514e-06,
379
+ "loss": 0.3057,
380
+ "step": 530
381
+ },
382
+ {
383
+ "epoch": 0.6581352833638026,
384
+ "grad_norm": 1.5001622438430786,
385
+ "learning_rate": 4.766609312871958e-06,
386
+ "loss": 0.3006,
387
+ "step": 540
388
+ },
389
+ {
390
+ "epoch": 0.6703229737964655,
391
+ "grad_norm": 1.5903184413909912,
392
+ "learning_rate": 4.756082181159131e-06,
393
+ "loss": 0.3131,
394
+ "step": 550
395
+ },
396
+ {
397
+ "epoch": 0.6825106642291285,
398
+ "grad_norm": 1.5622376203536987,
399
+ "learning_rate": 4.745334998220282e-06,
400
+ "loss": 0.3079,
401
+ "step": 560
402
+ },
403
+ {
404
+ "epoch": 0.6946983546617916,
405
+ "grad_norm": 1.5198042392730713,
406
+ "learning_rate": 4.734368812302148e-06,
407
+ "loss": 0.3257,
408
+ "step": 570
409
+ },
410
+ {
411
+ "epoch": 0.7068860450944546,
412
+ "grad_norm": 1.7607523202896118,
413
+ "learning_rate": 4.723184693012334e-06,
414
+ "loss": 0.2991,
415
+ "step": 580
416
+ },
417
+ {
418
+ "epoch": 0.7190737355271176,
419
+ "grad_norm": 1.416548252105713,
420
+ "learning_rate": 4.711783731214984e-06,
421
+ "loss": 0.3093,
422
+ "step": 590
423
+ },
424
+ {
425
+ "epoch": 0.7312614259597806,
426
+ "grad_norm": 1.419439435005188,
427
+ "learning_rate": 4.700167038924386e-06,
428
+ "loss": 0.2807,
429
+ "step": 600
430
+ },
431
+ {
432
+ "epoch": 0.7434491163924436,
433
+ "grad_norm": 1.4541571140289307,
434
+ "learning_rate": 4.688335749196511e-06,
435
+ "loss": 0.3026,
436
+ "step": 610
437
+ },
438
+ {
439
+ "epoch": 0.7556368068251066,
440
+ "grad_norm": 1.3236526250839233,
441
+ "learning_rate": 4.676291016018491e-06,
442
+ "loss": 0.2992,
443
+ "step": 620
444
+ },
445
+ {
446
+ "epoch": 0.7678244972577697,
447
+ "grad_norm": 1.555803656578064,
448
+ "learning_rate": 4.664034014196069e-06,
449
+ "loss": 0.3223,
450
+ "step": 630
451
+ },
452
+ {
453
+ "epoch": 0.7800121876904327,
454
+ "grad_norm": 1.5638824701309204,
455
+ "learning_rate": 4.651565939239011e-06,
456
+ "loss": 0.3069,
457
+ "step": 640
458
+ },
459
+ {
460
+ "epoch": 0.7921998781230957,
461
+ "grad_norm": 1.5447410345077515,
462
+ "learning_rate": 4.638888007244498e-06,
463
+ "loss": 0.3112,
464
+ "step": 650
465
+ },
466
+ {
467
+ "epoch": 0.8043875685557587,
468
+ "grad_norm": 1.562729835510254,
469
+ "learning_rate": 4.626001454778511e-06,
470
+ "loss": 0.2949,
471
+ "step": 660
472
+ },
473
+ {
474
+ "epoch": 0.8165752589884216,
475
+ "grad_norm": 1.2367265224456787,
476
+ "learning_rate": 4.612907538755224e-06,
477
+ "loss": 0.3128,
478
+ "step": 670
479
+ },
480
+ {
481
+ "epoch": 0.8287629494210847,
482
+ "grad_norm": 1.6397625207901,
483
+ "learning_rate": 4.599607536314406e-06,
484
+ "loss": 0.3115,
485
+ "step": 680
486
+ },
487
+ {
488
+ "epoch": 0.8409506398537477,
489
+ "grad_norm": 1.392140507698059,
490
+ "learning_rate": 4.586102744696851e-06,
491
+ "loss": 0.3189,
492
+ "step": 690
493
+ },
494
+ {
495
+ "epoch": 0.8531383302864107,
496
+ "grad_norm": 1.19057035446167,
497
+ "learning_rate": 4.572394481117855e-06,
498
+ "loss": 0.2756,
499
+ "step": 700
500
+ },
501
+ {
502
+ "epoch": 0.8653260207190737,
503
+ "grad_norm": 1.3636456727981567,
504
+ "learning_rate": 4.558484082638729e-06,
505
+ "loss": 0.3078,
506
+ "step": 710
507
+ },
508
+ {
509
+ "epoch": 0.8775137111517367,
510
+ "grad_norm": 1.2579822540283203,
511
+ "learning_rate": 4.544372906036399e-06,
512
+ "loss": 0.3017,
513
+ "step": 720
514
+ },
515
+ {
516
+ "epoch": 0.8897014015843998,
517
+ "grad_norm": 1.3275364637374878,
518
+ "learning_rate": 4.5300623276710604e-06,
519
+ "loss": 0.3277,
520
+ "step": 730
521
+ },
522
+ {
523
+ "epoch": 0.9018890920170628,
524
+ "grad_norm": 1.1951191425323486,
525
+ "learning_rate": 4.515553743351934e-06,
526
+ "loss": 0.2817,
527
+ "step": 740
528
+ },
529
+ {
530
+ "epoch": 0.9140767824497258,
531
+ "grad_norm": 1.5521240234375,
532
+ "learning_rate": 4.5008485682011265e-06,
533
+ "loss": 0.2845,
534
+ "step": 750
535
+ },
536
+ {
537
+ "epoch": 0.9262644728823888,
538
+ "grad_norm": 1.7351168394088745,
539
+ "learning_rate": 4.4859482365156e-06,
540
+ "loss": 0.3032,
541
+ "step": 760
542
+ },
543
+ {
544
+ "epoch": 0.9384521633150518,
545
+ "grad_norm": 1.2030630111694336,
546
+ "learning_rate": 4.470854201627278e-06,
547
+ "loss": 0.3154,
548
+ "step": 770
549
+ },
550
+ {
551
+ "epoch": 0.9506398537477148,
552
+ "grad_norm": 1.4315277338027954,
553
+ "learning_rate": 4.4555679357612925e-06,
554
+ "loss": 0.3276,
555
+ "step": 780
556
+ },
557
+ {
558
+ "epoch": 0.9628275441803779,
559
+ "grad_norm": 1.5054643154144287,
560
+ "learning_rate": 4.440090929892382e-06,
561
+ "loss": 0.2941,
562
+ "step": 790
563
+ },
564
+ {
565
+ "epoch": 0.9750152346130408,
566
+ "grad_norm": 1.1270413398742676,
567
+ "learning_rate": 4.424424693599474e-06,
568
+ "loss": 0.2795,
569
+ "step": 800
570
+ },
571
+ {
572
+ "epoch": 0.9872029250457038,
573
+ "grad_norm": 1.823027491569519,
574
+ "learning_rate": 4.4085707549184395e-06,
575
+ "loss": 0.2968,
576
+ "step": 810
577
+ },
578
+ {
579
+ "epoch": 0.9993906154783668,
580
+ "grad_norm": 1.2933018207550049,
581
+ "learning_rate": 4.392530660193058e-06,
582
+ "loss": 0.289,
583
+ "step": 820
584
+ },
585
+ {
586
+ "epoch": 1.01157830591103,
587
+ "grad_norm": 1.5592014789581299,
588
+ "learning_rate": 4.376305973924188e-06,
589
+ "loss": 0.2459,
590
+ "step": 830
591
+ },
592
+ {
593
+ "epoch": 1.023765996343693,
594
+ "grad_norm": 1.4350308179855347,
595
+ "learning_rate": 4.359898278617171e-06,
596
+ "loss": 0.2095,
597
+ "step": 840
598
+ },
599
+ {
600
+ "epoch": 1.035953686776356,
601
+ "grad_norm": 1.4992603063583374,
602
+ "learning_rate": 4.343309174627484e-06,
603
+ "loss": 0.2504,
604
+ "step": 850
605
+ },
606
+ {
607
+ "epoch": 1.048141377209019,
608
+ "grad_norm": 1.5574085712432861,
609
+ "learning_rate": 4.326540280004634e-06,
610
+ "loss": 0.2335,
611
+ "step": 860
612
+ },
613
+ {
614
+ "epoch": 1.0603290676416819,
615
+ "grad_norm": 1.4838849306106567,
616
+ "learning_rate": 4.309593230334355e-06,
617
+ "loss": 0.2312,
618
+ "step": 870
619
+ },
620
+ {
621
+ "epoch": 1.0725167580743449,
622
+ "grad_norm": 1.5157781839370728,
623
+ "learning_rate": 4.292469678579063e-06,
624
+ "loss": 0.2367,
625
+ "step": 880
626
+ },
627
+ {
628
+ "epoch": 1.0847044485070079,
629
+ "grad_norm": 1.3213136196136475,
630
+ "learning_rate": 4.275171294916641e-06,
631
+ "loss": 0.2445,
632
+ "step": 890
633
+ },
634
+ {
635
+ "epoch": 1.0968921389396709,
636
+ "grad_norm": 1.2431010007858276,
637
+ "learning_rate": 4.2576997665775275e-06,
638
+ "loss": 0.2329,
639
+ "step": 900
640
+ },
641
+ {
642
+ "epoch": 1.1090798293723338,
643
+ "grad_norm": 1.3120806217193604,
644
+ "learning_rate": 4.2400567976801575e-06,
645
+ "loss": 0.2174,
646
+ "step": 910
647
+ },
648
+ {
649
+ "epoch": 1.1212675198049968,
650
+ "grad_norm": 1.7084835767745972,
651
+ "learning_rate": 4.22224410906474e-06,
652
+ "loss": 0.2304,
653
+ "step": 920
654
+ },
655
+ {
656
+ "epoch": 1.13345521023766,
657
+ "grad_norm": 1.3486850261688232,
658
+ "learning_rate": 4.204263438125421e-06,
659
+ "loss": 0.2206,
660
+ "step": 930
661
+ },
662
+ {
663
+ "epoch": 1.145642900670323,
664
+ "grad_norm": 1.3106797933578491,
665
+ "learning_rate": 4.186116538640814e-06,
666
+ "loss": 0.2348,
667
+ "step": 940
668
+ },
669
+ {
670
+ "epoch": 1.157830591102986,
671
+ "grad_norm": 1.3655650615692139,
672
+ "learning_rate": 4.167805180602951e-06,
673
+ "loss": 0.2071,
674
+ "step": 950
675
+ },
676
+ {
677
+ "epoch": 1.170018281535649,
678
+ "grad_norm": 1.3001152276992798,
679
+ "learning_rate": 4.149331150044635e-06,
680
+ "loss": 0.2053,
681
+ "step": 960
682
+ },
683
+ {
684
+ "epoch": 1.182205971968312,
685
+ "grad_norm": 1.604116678237915,
686
+ "learning_rate": 4.130696248865244e-06,
687
+ "loss": 0.2068,
688
+ "step": 970
689
+ },
690
+ {
691
+ "epoch": 1.194393662400975,
692
+ "grad_norm": 1.3530467748641968,
693
+ "learning_rate": 4.111902294654971e-06,
694
+ "loss": 0.2468,
695
+ "step": 980
696
+ },
697
+ {
698
+ "epoch": 1.206581352833638,
699
+ "grad_norm": 1.5899338722229004,
700
+ "learning_rate": 4.09295112051755e-06,
701
+ "loss": 0.2254,
702
+ "step": 990
703
+ },
704
+ {
705
+ "epoch": 1.218769043266301,
706
+ "grad_norm": 1.774383306503296,
707
+ "learning_rate": 4.073844574891452e-06,
708
+ "loss": 0.2306,
709
+ "step": 1000
710
+ },
711
+ {
712
+ "epoch": 1.230956733698964,
713
+ "grad_norm": 1.1993497610092163,
714
+ "learning_rate": 4.054584521369603e-06,
715
+ "loss": 0.2294,
716
+ "step": 1010
717
+ },
718
+ {
719
+ "epoch": 1.2431444241316272,
720
+ "grad_norm": 1.2832852602005005,
721
+ "learning_rate": 4.03517283851761e-06,
722
+ "loss": 0.2222,
723
+ "step": 1020
724
+ },
725
+ {
726
+ "epoch": 1.2553321145642902,
727
+ "grad_norm": 1.370401382446289,
728
+ "learning_rate": 4.01561141969053e-06,
729
+ "loss": 0.2636,
730
+ "step": 1030
731
+ },
732
+ {
733
+ "epoch": 1.2675198049969532,
734
+ "grad_norm": 1.5473796129226685,
735
+ "learning_rate": 3.995902172848205e-06,
736
+ "loss": 0.2509,
737
+ "step": 1040
738
+ },
739
+ {
740
+ "epoch": 1.2797074954296161,
741
+ "grad_norm": 1.3431543111801147,
742
+ "learning_rate": 3.976047020369155e-06,
743
+ "loss": 0.2165,
744
+ "step": 1050
745
+ },
746
+ {
747
+ "epoch": 1.2918951858622791,
748
+ "grad_norm": 1.580149531364441,
749
+ "learning_rate": 3.9560478988630866e-06,
750
+ "loss": 0.22,
751
+ "step": 1060
752
+ },
753
+ {
754
+ "epoch": 1.3040828762949421,
755
+ "grad_norm": 1.5631303787231445,
756
+ "learning_rate": 3.935906758981993e-06,
757
+ "loss": 0.2258,
758
+ "step": 1070
759
+ },
760
+ {
761
+ "epoch": 1.3162705667276051,
762
+ "grad_norm": 1.7426769733428955,
763
+ "learning_rate": 3.9156255652299005e-06,
764
+ "loss": 0.2378,
765
+ "step": 1080
766
+ },
767
+ {
768
+ "epoch": 1.328458257160268,
769
+ "grad_norm": 1.5809776782989502,
770
+ "learning_rate": 3.89520629577125e-06,
771
+ "loss": 0.2477,
772
+ "step": 1090
773
+ },
774
+ {
775
+ "epoch": 1.340645947592931,
776
+ "grad_norm": 1.4274479150772095,
777
+ "learning_rate": 3.8746509422379575e-06,
778
+ "loss": 0.2073,
779
+ "step": 1100
780
+ },
781
+ {
782
+ "epoch": 1.352833638025594,
783
+ "grad_norm": 1.4454461336135864,
784
+ "learning_rate": 3.853961509535159e-06,
785
+ "loss": 0.2199,
786
+ "step": 1110
787
+ },
788
+ {
789
+ "epoch": 1.365021328458257,
790
+ "grad_norm": 1.6029589176177979,
791
+ "learning_rate": 3.83314001564565e-06,
792
+ "loss": 0.2284,
793
+ "step": 1120
794
+ },
795
+ {
796
+ "epoch": 1.37720901889092,
797
+ "grad_norm": 1.6308578252792358,
798
+ "learning_rate": 3.8121884914330663e-06,
799
+ "loss": 0.2372,
800
+ "step": 1130
801
+ },
802
+ {
803
+ "epoch": 1.389396709323583,
804
+ "grad_norm": 1.8481136560440063,
805
+ "learning_rate": 3.791108980443794e-06,
806
+ "loss": 0.2565,
807
+ "step": 1140
808
+ },
809
+ {
810
+ "epoch": 1.4015843997562463,
811
+ "grad_norm": 1.325615644454956,
812
+ "learning_rate": 3.769903538707652e-06,
813
+ "loss": 0.2066,
814
+ "step": 1150
815
+ },
816
+ {
817
+ "epoch": 1.4137720901889093,
818
+ "grad_norm": 2.084979295730591,
819
+ "learning_rate": 3.7485742345373517e-06,
820
+ "loss": 0.2217,
821
+ "step": 1160
822
+ },
823
+ {
824
+ "epoch": 1.4259597806215722,
825
+ "grad_norm": 1.0964964628219604,
826
+ "learning_rate": 3.727123148326758e-06,
827
+ "loss": 0.2218,
828
+ "step": 1170
829
+ },
830
+ {
831
+ "epoch": 1.4381474710542352,
832
+ "grad_norm": 1.4757567644119263,
833
+ "learning_rate": 3.705552372347981e-06,
834
+ "loss": 0.225,
835
+ "step": 1180
836
+ },
837
+ {
838
+ "epoch": 1.4503351614868982,
839
+ "grad_norm": 1.6368629932403564,
840
+ "learning_rate": 3.683864010547294e-06,
841
+ "loss": 0.2346,
842
+ "step": 1190
843
+ },
844
+ {
845
+ "epoch": 1.4625228519195612,
846
+ "grad_norm": 1.3107318878173828,
847
+ "learning_rate": 3.662060178339927e-06,
848
+ "loss": 0.2393,
849
+ "step": 1200
850
+ },
851
+ {
852
+ "epoch": 1.4747105423522242,
853
+ "grad_norm": 1.5922001600265503,
854
+ "learning_rate": 3.6401430024037315e-06,
855
+ "loss": 0.2361,
856
+ "step": 1210
857
+ },
858
+ {
859
+ "epoch": 1.4868982327848872,
860
+ "grad_norm": 1.651711344718933,
861
+ "learning_rate": 3.618114620471756e-06,
862
+ "loss": 0.2198,
863
+ "step": 1220
864
+ },
865
+ {
866
+ "epoch": 1.4990859232175504,
867
+ "grad_norm": 1.4375849962234497,
868
+ "learning_rate": 3.5959771811237342e-06,
869
+ "loss": 0.2396,
870
+ "step": 1230
871
+ },
872
+ {
873
+ "epoch": 1.5112736136502134,
874
+ "grad_norm": 1.561681866645813,
875
+ "learning_rate": 3.573732843576519e-06,
876
+ "loss": 0.2308,
877
+ "step": 1240
878
+ },
879
+ {
880
+ "epoch": 1.5234613040828764,
881
+ "grad_norm": 1.8882336616516113,
882
+ "learning_rate": 3.5513837774734816e-06,
883
+ "loss": 0.2051,
884
+ "step": 1250
885
+ },
886
+ {
887
+ "epoch": 1.5356489945155394,
888
+ "grad_norm": 1.4128990173339844,
889
+ "learning_rate": 3.5289321626728912e-06,
890
+ "loss": 0.2526,
891
+ "step": 1260
892
+ },
893
+ {
894
+ "epoch": 1.5478366849482024,
895
+ "grad_norm": 1.9354671239852905,
896
+ "learning_rate": 3.5063801890352955e-06,
897
+ "loss": 0.2112,
898
+ "step": 1270
899
+ },
900
+ {
901
+ "epoch": 1.5600243753808654,
902
+ "grad_norm": 1.6497639417648315,
903
+ "learning_rate": 3.4837300562099324e-06,
904
+ "loss": 0.2199,
905
+ "step": 1280
906
+ },
907
+ {
908
+ "epoch": 1.5722120658135283,
909
+ "grad_norm": 1.467066764831543,
910
+ "learning_rate": 3.4609839734201793e-06,
911
+ "loss": 0.249,
912
+ "step": 1290
913
+ },
914
+ {
915
+ "epoch": 1.5843997562461913,
916
+ "grad_norm": 1.3116928339004517,
917
+ "learning_rate": 3.4381441592480756e-06,
918
+ "loss": 0.2634,
919
+ "step": 1300
920
+ },
921
+ {
922
+ "epoch": 1.5965874466788543,
923
+ "grad_norm": 1.9211691617965698,
924
+ "learning_rate": 3.4152128414179263e-06,
925
+ "loss": 0.2243,
926
+ "step": 1310
927
+ },
928
+ {
929
+ "epoch": 1.6087751371115173,
930
+ "grad_norm": 1.396285057067871,
931
+ "learning_rate": 3.3921922565790188e-06,
932
+ "loss": 0.2478,
933
+ "step": 1320
934
+ },
935
+ {
936
+ "epoch": 1.6209628275441803,
937
+ "grad_norm": 1.3925182819366455,
938
+ "learning_rate": 3.3690846500874664e-06,
939
+ "loss": 0.2005,
940
+ "step": 1330
941
+ },
942
+ {
943
+ "epoch": 1.6331505179768433,
944
+ "grad_norm": 1.3843952417373657,
945
+ "learning_rate": 3.345892275787204e-06,
946
+ "loss": 0.2517,
947
+ "step": 1340
948
+ },
949
+ {
950
+ "epoch": 1.6453382084095063,
951
+ "grad_norm": 1.3334903717041016,
952
+ "learning_rate": 3.3226173957901533e-06,
953
+ "loss": 0.2361,
954
+ "step": 1350
955
+ },
956
+ {
957
+ "epoch": 1.6575258988421693,
958
+ "grad_norm": 1.5368155241012573,
959
+ "learning_rate": 3.2992622802555844e-06,
960
+ "loss": 0.2228,
961
+ "step": 1360
962
+ },
963
+ {
964
+ "epoch": 1.6697135892748323,
965
+ "grad_norm": 1.563447117805481,
966
+ "learning_rate": 3.2758292071686928e-06,
967
+ "loss": 0.2347,
968
+ "step": 1370
969
+ },
970
+ {
971
+ "epoch": 1.6819012797074955,
972
+ "grad_norm": 1.3434542417526245,
973
+ "learning_rate": 3.2523204621184094e-06,
974
+ "loss": 0.2326,
975
+ "step": 1380
976
+ },
977
+ {
978
+ "epoch": 1.6940889701401585,
979
+ "grad_norm": 1.6988780498504639,
980
+ "learning_rate": 3.2287383380744746e-06,
981
+ "loss": 0.2281,
982
+ "step": 1390
983
+ },
984
+ {
985
+ "epoch": 1.7062766605728215,
986
+ "grad_norm": 1.9776115417480469,
987
+ "learning_rate": 3.2050851351637853e-06,
988
+ "loss": 0.2109,
989
+ "step": 1400
990
+ },
991
+ {
992
+ "epoch": 1.7184643510054844,
993
+ "grad_norm": 1.5286093950271606,
994
+ "learning_rate": 3.1813631604460504e-06,
995
+ "loss": 0.2387,
996
+ "step": 1410
997
+ },
998
+ {
999
+ "epoch": 1.7306520414381474,
1000
+ "grad_norm": 1.8186579942703247,
1001
+ "learning_rate": 3.1575747276887657e-06,
1002
+ "loss": 0.2348,
1003
+ "step": 1420
1004
+ },
1005
+ {
1006
+ "epoch": 1.7428397318708104,
1007
+ "grad_norm": 1.8635733127593994,
1008
+ "learning_rate": 3.1337221571415388e-06,
1009
+ "loss": 0.2323,
1010
+ "step": 1430
1011
+ },
1012
+ {
1013
+ "epoch": 1.7550274223034736,
1014
+ "grad_norm": 1.9394007921218872,
1015
+ "learning_rate": 3.1098077753097763e-06,
1016
+ "loss": 0.238,
1017
+ "step": 1440
1018
+ },
1019
+ {
1020
+ "epoch": 1.7672151127361366,
1021
+ "grad_norm": 1.6605966091156006,
1022
+ "learning_rate": 3.085833914727765e-06,
1023
+ "loss": 0.2223,
1024
+ "step": 1450
1025
+ },
1026
+ {
1027
+ "epoch": 1.7794028031687996,
1028
+ "grad_norm": 1.7482880353927612,
1029
+ "learning_rate": 3.0618029137311634e-06,
1030
+ "loss": 0.2271,
1031
+ "step": 1460
1032
+ },
1033
+ {
1034
+ "epoch": 1.7915904936014626,
1035
+ "grad_norm": 1.3232003450393677,
1036
+ "learning_rate": 3.037717116228929e-06,
1037
+ "loss": 0.2372,
1038
+ "step": 1470
1039
+ },
1040
+ {
1041
+ "epoch": 1.8037781840341256,
1042
+ "grad_norm": 1.541633129119873,
1043
+ "learning_rate": 3.013578871474699e-06,
1044
+ "loss": 0.2397,
1045
+ "step": 1480
1046
+ },
1047
+ {
1048
+ "epoch": 1.8159658744667886,
1049
+ "grad_norm": 1.5761010646820068,
1050
+ "learning_rate": 2.9893905338376503e-06,
1051
+ "loss": 0.2237,
1052
+ "step": 1490
1053
+ },
1054
+ {
1055
+ "epoch": 1.8281535648994516,
1056
+ "grad_norm": 1.5969911813735962,
1057
+ "learning_rate": 2.965154462572869e-06,
1058
+ "loss": 0.2099,
1059
+ "step": 1500
1060
+ },
1061
+ {
1062
+ "epoch": 1.8403412553321146,
1063
+ "grad_norm": 1.5702909231185913,
1064
+ "learning_rate": 2.9408730215912247e-06,
1065
+ "loss": 0.2205,
1066
+ "step": 1510
1067
+ },
1068
+ {
1069
+ "epoch": 1.8525289457647776,
1070
+ "grad_norm": 1.5175156593322754,
1071
+ "learning_rate": 2.91654857922881e-06,
1072
+ "loss": 0.2149,
1073
+ "step": 1520
1074
+ },
1075
+ {
1076
+ "epoch": 1.8647166361974405,
1077
+ "grad_norm": 1.2331769466400146,
1078
+ "learning_rate": 2.892183508015939e-06,
1079
+ "loss": 0.2309,
1080
+ "step": 1530
1081
+ },
1082
+ {
1083
+ "epoch": 1.8769043266301035,
1084
+ "grad_norm": 1.3271231651306152,
1085
+ "learning_rate": 2.867780184445735e-06,
1086
+ "loss": 0.2254,
1087
+ "step": 1540
1088
+ },
1089
+ {
1090
+ "epoch": 1.8890920170627665,
1091
+ "grad_norm": 1.648229956626892,
1092
+ "learning_rate": 2.8433409887423397e-06,
1093
+ "loss": 0.2251,
1094
+ "step": 1550
1095
+ },
1096
+ {
1097
+ "epoch": 1.9012797074954295,
1098
+ "grad_norm": 1.3332548141479492,
1099
+ "learning_rate": 2.8188683046287496e-06,
1100
+ "loss": 0.2375,
1101
+ "step": 1560
1102
+ },
1103
+ {
1104
+ "epoch": 1.9134673979280925,
1105
+ "grad_norm": 1.6312288045883179,
1106
+ "learning_rate": 2.794364519094317e-06,
1107
+ "loss": 0.2195,
1108
+ "step": 1570
1109
+ },
1110
+ {
1111
+ "epoch": 1.9256550883607555,
1112
+ "grad_norm": 1.9400866031646729,
1113
+ "learning_rate": 2.7698320221619278e-06,
1114
+ "loss": 0.1939,
1115
+ "step": 1580
1116
+ },
1117
+ {
1118
+ "epoch": 1.9378427787934185,
1119
+ "grad_norm": 1.6435078382492065,
1120
+ "learning_rate": 2.7452732066548914e-06,
1121
+ "loss": 0.2419,
1122
+ "step": 1590
1123
+ },
1124
+ {
1125
+ "epoch": 1.9500304692260817,
1126
+ "grad_norm": 1.4376393556594849,
1127
+ "learning_rate": 2.7206904679635465e-06,
1128
+ "loss": 0.1974,
1129
+ "step": 1600
1130
+ },
1131
+ {
1132
+ "epoch": 1.9622181596587447,
1133
+ "grad_norm": 1.4124246835708618,
1134
+ "learning_rate": 2.6960862038116265e-06,
1135
+ "loss": 0.251,
1136
+ "step": 1610
1137
+ },
1138
+ {
1139
+ "epoch": 1.9744058500914077,
1140
+ "grad_norm": 1.6351146697998047,
1141
+ "learning_rate": 2.6714628140223885e-06,
1142
+ "loss": 0.2148,
1143
+ "step": 1620
1144
+ },
1145
+ {
1146
+ "epoch": 1.9865935405240707,
1147
+ "grad_norm": 1.588615894317627,
1148
+ "learning_rate": 2.6468227002845476e-06,
1149
+ "loss": 0.2322,
1150
+ "step": 1630
1151
+ },
1152
+ {
1153
+ "epoch": 1.9987812309567337,
1154
+ "grad_norm": 1.5556650161743164,
1155
+ "learning_rate": 2.6221682659180186e-06,
1156
+ "loss": 0.2059,
1157
+ "step": 1640
1158
+ },
1159
+ {
1160
+ "epoch": 2.010968921389397,
1161
+ "grad_norm": 1.7614095211029053,
1162
+ "learning_rate": 2.597501915639507e-06,
1163
+ "loss": 0.1819,
1164
+ "step": 1650
1165
+ },
1166
+ {
1167
+ "epoch": 2.02315661182206,
1168
+ "grad_norm": 1.7154064178466797,
1169
+ "learning_rate": 2.5728260553279592e-06,
1170
+ "loss": 0.1463,
1171
+ "step": 1660
1172
+ },
1173
+ {
1174
+ "epoch": 2.035344302254723,
1175
+ "grad_norm": 1.9462904930114746,
1176
+ "learning_rate": 2.5481430917899e-06,
1177
+ "loss": 0.1563,
1178
+ "step": 1670
1179
+ },
1180
+ {
1181
+ "epoch": 2.047531992687386,
1182
+ "grad_norm": 1.5392667055130005,
1183
+ "learning_rate": 2.523455432524681e-06,
1184
+ "loss": 0.1488,
1185
+ "step": 1680
1186
+ },
1187
+ {
1188
+ "epoch": 2.059719683120049,
1189
+ "grad_norm": 1.2421252727508545,
1190
+ "learning_rate": 2.4987654854896606e-06,
1191
+ "loss": 0.1599,
1192
+ "step": 1690
1193
+ },
1194
+ {
1195
+ "epoch": 2.071907373552712,
1196
+ "grad_norm": 1.9953041076660156,
1197
+ "learning_rate": 2.4740756588653388e-06,
1198
+ "loss": 0.1493,
1199
+ "step": 1700
1200
+ },
1201
+ {
1202
+ "epoch": 2.084095063985375,
1203
+ "grad_norm": 1.3034099340438843,
1204
+ "learning_rate": 2.4493883608204703e-06,
1205
+ "loss": 0.161,
1206
+ "step": 1710
1207
+ },
1208
+ {
1209
+ "epoch": 2.096282754418038,
1210
+ "grad_norm": 2.1974692344665527,
1211
+ "learning_rate": 2.4247059992771836e-06,
1212
+ "loss": 0.1705,
1213
+ "step": 1720
1214
+ },
1215
+ {
1216
+ "epoch": 2.108470444850701,
1217
+ "grad_norm": 1.4389041662216187,
1218
+ "learning_rate": 2.4000309816761105e-06,
1219
+ "loss": 0.15,
1220
+ "step": 1730
1221
+ },
1222
+ {
1223
+ "epoch": 2.1206581352833638,
1224
+ "grad_norm": 1.538169503211975,
1225
+ "learning_rate": 2.375365714741584e-06,
1226
+ "loss": 0.1556,
1227
+ "step": 1740
1228
+ },
1229
+ {
1230
+ "epoch": 2.1328458257160268,
1231
+ "grad_norm": 1.5628312826156616,
1232
+ "learning_rate": 2.3507126042468807e-06,
1233
+ "loss": 0.1846,
1234
+ "step": 1750
1235
+ },
1236
+ {
1237
+ "epoch": 2.1450335161486898,
1238
+ "grad_norm": 1.6945581436157227,
1239
+ "learning_rate": 2.3260740547795818e-06,
1240
+ "loss": 0.1691,
1241
+ "step": 1760
1242
+ },
1243
+ {
1244
+ "epoch": 2.1572212065813527,
1245
+ "grad_norm": 1.927711009979248,
1246
+ "learning_rate": 2.3014524695070277e-06,
1247
+ "loss": 0.1617,
1248
+ "step": 1770
1249
+ },
1250
+ {
1251
+ "epoch": 2.1694088970140157,
1252
+ "grad_norm": 2.3531131744384766,
1253
+ "learning_rate": 2.276850249941927e-06,
1254
+ "loss": 0.1547,
1255
+ "step": 1780
1256
+ },
1257
+ {
1258
+ "epoch": 2.1815965874466787,
1259
+ "grad_norm": 1.3538850545883179,
1260
+ "learning_rate": 2.2522697957081134e-06,
1261
+ "loss": 0.149,
1262
+ "step": 1790
1263
+ },
1264
+ {
1265
+ "epoch": 2.1937842778793417,
1266
+ "grad_norm": 1.6864837408065796,
1267
+ "learning_rate": 2.2277135043065024e-06,
1268
+ "loss": 0.1484,
1269
+ "step": 1800
1270
+ },
1271
+ {
1272
+ "epoch": 2.2059719683120047,
1273
+ "grad_norm": 1.511353611946106,
1274
+ "learning_rate": 2.203183770881239e-06,
1275
+ "loss": 0.1681,
1276
+ "step": 1810
1277
+ },
1278
+ {
1279
+ "epoch": 2.2181596587446677,
1280
+ "grad_norm": 1.9546992778778076,
1281
+ "learning_rate": 2.178682987986088e-06,
1282
+ "loss": 0.1605,
1283
+ "step": 1820
1284
+ },
1285
+ {
1286
+ "epoch": 2.2303473491773307,
1287
+ "grad_norm": 1.924833059310913,
1288
+ "learning_rate": 2.154213545351067e-06,
1289
+ "loss": 0.1645,
1290
+ "step": 1830
1291
+ },
1292
+ {
1293
+ "epoch": 2.2425350396099937,
1294
+ "grad_norm": 1.4835309982299805,
1295
+ "learning_rate": 2.129777829649367e-06,
1296
+ "loss": 0.1544,
1297
+ "step": 1840
1298
+ },
1299
+ {
1300
+ "epoch": 2.254722730042657,
1301
+ "grad_norm": 2.8786847591400146,
1302
+ "learning_rate": 2.1053782242645534e-06,
1303
+ "loss": 0.1518,
1304
+ "step": 1850
1305
+ },
1306
+ {
1307
+ "epoch": 2.26691042047532,
1308
+ "grad_norm": 1.7257006168365479,
1309
+ "learning_rate": 2.081017109058108e-06,
1310
+ "loss": 0.1896,
1311
+ "step": 1860
1312
+ },
1313
+ {
1314
+ "epoch": 2.279098110907983,
1315
+ "grad_norm": 1.3914209604263306,
1316
+ "learning_rate": 2.056696860137298e-06,
1317
+ "loss": 0.1319,
1318
+ "step": 1870
1319
+ },
1320
+ {
1321
+ "epoch": 2.291285801340646,
1322
+ "grad_norm": 1.4881747961044312,
1323
+ "learning_rate": 2.0324198496234227e-06,
1324
+ "loss": 0.1425,
1325
+ "step": 1880
1326
+ },
1327
+ {
1328
+ "epoch": 2.303473491773309,
1329
+ "grad_norm": 1.2742137908935547,
1330
+ "learning_rate": 2.0081884454204396e-06,
1331
+ "loss": 0.1517,
1332
+ "step": 1890
1333
+ },
1334
+ {
1335
+ "epoch": 2.315661182205972,
1336
+ "grad_norm": 2.283926486968994,
1337
+ "learning_rate": 1.984005010984011e-06,
1338
+ "loss": 0.146,
1339
+ "step": 1900
1340
+ },
1341
+ {
1342
+ "epoch": 2.327848872638635,
1343
+ "grad_norm": 1.780540943145752,
1344
+ "learning_rate": 1.9598719050909753e-06,
1345
+ "loss": 0.1529,
1346
+ "step": 1910
1347
+ },
1348
+ {
1349
+ "epoch": 2.340036563071298,
1350
+ "grad_norm": 1.6674730777740479,
1351
+ "learning_rate": 1.935791481609283e-06,
1352
+ "loss": 0.1791,
1353
+ "step": 1920
1354
+ },
1355
+ {
1356
+ "epoch": 2.352224253503961,
1357
+ "grad_norm": 1.363561987876892,
1358
+ "learning_rate": 1.9117660892684067e-06,
1359
+ "loss": 0.1528,
1360
+ "step": 1930
1361
+ },
1362
+ {
1363
+ "epoch": 2.364411943936624,
1364
+ "grad_norm": 1.843386173248291,
1365
+ "learning_rate": 1.8877980714302532e-06,
1366
+ "loss": 0.139,
1367
+ "step": 1940
1368
+ },
1369
+ {
1370
+ "epoch": 2.376599634369287,
1371
+ "grad_norm": 2.3764960765838623,
1372
+ "learning_rate": 1.8638897658605962e-06,
1373
+ "loss": 0.1495,
1374
+ "step": 1950
1375
+ },
1376
+ {
1377
+ "epoch": 2.38878732480195,
1378
+ "grad_norm": 1.8273096084594727,
1379
+ "learning_rate": 1.840043504501065e-06,
1380
+ "loss": 0.1412,
1381
+ "step": 1960
1382
+ },
1383
+ {
1384
+ "epoch": 2.400975015234613,
1385
+ "grad_norm": 1.5394623279571533,
1386
+ "learning_rate": 1.816261613241686e-06,
1387
+ "loss": 0.1507,
1388
+ "step": 1970
1389
+ },
1390
+ {
1391
+ "epoch": 2.413162705667276,
1392
+ "grad_norm": 1.75933837890625,
1393
+ "learning_rate": 1.7925464116940299e-06,
1394
+ "loss": 0.1725,
1395
+ "step": 1980
1396
+ },
1397
+ {
1398
+ "epoch": 2.425350396099939,
1399
+ "grad_norm": 2.1179471015930176,
1400
+ "learning_rate": 1.7689002129649584e-06,
1401
+ "loss": 0.1605,
1402
+ "step": 1990
1403
+ },
1404
+ {
1405
+ "epoch": 2.437538086532602,
1406
+ "grad_norm": 1.8330628871917725,
1407
+ "learning_rate": 1.7453253234310164e-06,
1408
+ "loss": 0.1599,
1409
+ "step": 2000
1410
+ },
1411
+ {
1412
+ "epoch": 2.449725776965265,
1413
+ "grad_norm": 1.8222659826278687,
1414
+ "learning_rate": 1.7218240425134669e-06,
1415
+ "loss": 0.1312,
1416
+ "step": 2010
1417
+ },
1418
+ {
1419
+ "epoch": 2.461913467397928,
1420
+ "grad_norm": 2.930623769760132,
1421
+ "learning_rate": 1.6983986624540227e-06,
1422
+ "loss": 0.1627,
1423
+ "step": 2020
1424
+ },
1425
+ {
1426
+ "epoch": 2.474101157830591,
1427
+ "grad_norm": 2.1240384578704834,
1428
+ "learning_rate": 1.6750514680912606e-06,
1429
+ "loss": 0.1685,
1430
+ "step": 2030
1431
+ },
1432
+ {
1433
+ "epoch": 2.4862888482632544,
1434
+ "grad_norm": 1.4878756999969482,
1435
+ "learning_rate": 1.6517847366377693e-06,
1436
+ "loss": 0.1704,
1437
+ "step": 2040
1438
+ },
1439
+ {
1440
+ "epoch": 2.4984765386959173,
1441
+ "grad_norm": 1.8569579124450684,
1442
+ "learning_rate": 1.628600737458037e-06,
1443
+ "loss": 0.1598,
1444
+ "step": 2050
1445
+ },
1446
+ {
1447
+ "epoch": 2.5106642291285803,
1448
+ "grad_norm": 1.5510886907577515,
1449
+ "learning_rate": 1.605501731847101e-06,
1450
+ "loss": 0.169,
1451
+ "step": 2060
1452
+ },
1453
+ {
1454
+ "epoch": 2.5228519195612433,
1455
+ "grad_norm": 2.6488049030303955,
1456
+ "learning_rate": 1.5824899728099934e-06,
1457
+ "loss": 0.1509,
1458
+ "step": 2070
1459
+ },
1460
+ {
1461
+ "epoch": 2.5350396099939063,
1462
+ "grad_norm": 2.071225166320801,
1463
+ "learning_rate": 1.5595677048419855e-06,
1464
+ "loss": 0.153,
1465
+ "step": 2080
1466
+ },
1467
+ {
1468
+ "epoch": 2.5472273004265693,
1469
+ "grad_norm": 2.2100391387939453,
1470
+ "learning_rate": 1.5367371637096705e-06,
1471
+ "loss": 0.165,
1472
+ "step": 2090
1473
+ },
1474
+ {
1475
+ "epoch": 2.5594149908592323,
1476
+ "grad_norm": 1.6370184421539307,
1477
+ "learning_rate": 1.5140005762328892e-06,
1478
+ "loss": 0.1773,
1479
+ "step": 2100
1480
+ },
1481
+ {
1482
+ "epoch": 2.5716026812918953,
1483
+ "grad_norm": 1.585856318473816,
1484
+ "learning_rate": 1.4913601600675387e-06,
1485
+ "loss": 0.1439,
1486
+ "step": 2110
1487
+ },
1488
+ {
1489
+ "epoch": 2.5837903717245583,
1490
+ "grad_norm": 1.8817006349563599,
1491
+ "learning_rate": 1.468818123489263e-06,
1492
+ "loss": 0.1378,
1493
+ "step": 2120
1494
+ },
1495
+ {
1496
+ "epoch": 2.5959780621572213,
1497
+ "grad_norm": 1.554551124572754,
1498
+ "learning_rate": 1.4463766651780698e-06,
1499
+ "loss": 0.1614,
1500
+ "step": 2130
1501
+ },
1502
+ {
1503
+ "epoch": 2.6081657525898843,
1504
+ "grad_norm": 2.056910753250122,
1505
+ "learning_rate": 1.4240379740038758e-06,
1506
+ "loss": 0.1639,
1507
+ "step": 2140
1508
+ },
1509
+ {
1510
+ "epoch": 2.6203534430225472,
1511
+ "grad_norm": 1.518649935722351,
1512
+ "learning_rate": 1.4018042288130101e-06,
1513
+ "loss": 0.154,
1514
+ "step": 2150
1515
+ },
1516
+ {
1517
+ "epoch": 2.6325411334552102,
1518
+ "grad_norm": 1.8066517114639282,
1519
+ "learning_rate": 1.3796775982156984e-06,
1520
+ "loss": 0.1558,
1521
+ "step": 2160
1522
+ },
1523
+ {
1524
+ "epoch": 2.6447288238878732,
1525
+ "grad_norm": 1.8393446207046509,
1526
+ "learning_rate": 1.3576602403745456e-06,
1527
+ "loss": 0.1618,
1528
+ "step": 2170
1529
+ },
1530
+ {
1531
+ "epoch": 2.656916514320536,
1532
+ "grad_norm": 1.4357062578201294,
1533
+ "learning_rate": 1.3357543027940254e-06,
1534
+ "loss": 0.1502,
1535
+ "step": 2180
1536
+ },
1537
+ {
1538
+ "epoch": 2.669104204753199,
1539
+ "grad_norm": 1.953147053718567,
1540
+ "learning_rate": 1.3139619221110348e-06,
1541
+ "loss": 0.161,
1542
+ "step": 2190
1543
+ },
1544
+ {
1545
+ "epoch": 2.681291895185862,
1546
+ "grad_norm": 1.988788366317749,
1547
+ "learning_rate": 1.2922852238864767e-06,
1548
+ "loss": 0.169,
1549
+ "step": 2200
1550
+ },
1551
+ {
1552
+ "epoch": 2.693479585618525,
1553
+ "grad_norm": 1.5082643032073975,
1554
+ "learning_rate": 1.2707263223979544e-06,
1555
+ "loss": 0.1466,
1556
+ "step": 2210
1557
+ },
1558
+ {
1559
+ "epoch": 2.705667276051188,
1560
+ "grad_norm": 1.587957501411438,
1561
+ "learning_rate": 1.2492873204335415e-06,
1562
+ "loss": 0.1594,
1563
+ "step": 2220
1564
+ },
1565
+ {
1566
+ "epoch": 2.717854966483851,
1567
+ "grad_norm": 4.279134750366211,
1568
+ "learning_rate": 1.227970309086685e-06,
1569
+ "loss": 0.1431,
1570
+ "step": 2230
1571
+ },
1572
+ {
1573
+ "epoch": 2.730042656916514,
1574
+ "grad_norm": 2.2551496028900146,
1575
+ "learning_rate": 1.2067773675522487e-06,
1576
+ "loss": 0.162,
1577
+ "step": 2240
1578
+ },
1579
+ {
1580
+ "epoch": 2.742230347349177,
1581
+ "grad_norm": 1.6612194776535034,
1582
+ "learning_rate": 1.1857105629237126e-06,
1583
+ "loss": 0.1597,
1584
+ "step": 2250
1585
+ },
1586
+ {
1587
+ "epoch": 2.75441803778184,
1588
+ "grad_norm": 1.6561633348464966,
1589
+ "learning_rate": 1.164771949991556e-06,
1590
+ "loss": 0.14,
1591
+ "step": 2260
1592
+ },
1593
+ {
1594
+ "epoch": 2.766605728214503,
1595
+ "grad_norm": 2.2157464027404785,
1596
+ "learning_rate": 1.1439635710428405e-06,
1597
+ "loss": 0.1363,
1598
+ "step": 2270
1599
+ },
1600
+ {
1601
+ "epoch": 2.778793418647166,
1602
+ "grad_norm": 1.709621548652649,
1603
+ "learning_rate": 1.1232874556620086e-06,
1604
+ "loss": 0.1849,
1605
+ "step": 2280
1606
+ },
1607
+ {
1608
+ "epoch": 2.790981109079829,
1609
+ "grad_norm": 1.89210045337677,
1610
+ "learning_rate": 1.1027456205329306e-06,
1611
+ "loss": 0.1435,
1612
+ "step": 2290
1613
+ },
1614
+ {
1615
+ "epoch": 2.8031687995124925,
1616
+ "grad_norm": 1.8157742023468018,
1617
+ "learning_rate": 1.0823400692421938e-06,
1618
+ "loss": 0.172,
1619
+ "step": 2300
1620
+ },
1621
+ {
1622
+ "epoch": 2.8153564899451555,
1623
+ "grad_norm": 1.8622268438339233,
1624
+ "learning_rate": 1.0620727920836906e-06,
1625
+ "loss": 0.1562,
1626
+ "step": 2310
1627
+ },
1628
+ {
1629
+ "epoch": 2.8275441803778185,
1630
+ "grad_norm": 1.6750710010528564,
1631
+ "learning_rate": 1.04194576586448e-06,
1632
+ "loss": 0.1685,
1633
+ "step": 2320
1634
+ },
1635
+ {
1636
+ "epoch": 2.8397318708104815,
1637
+ "grad_norm": 1.5609955787658691,
1638
+ "learning_rate": 1.0219609537119838e-06,
1639
+ "loss": 0.1762,
1640
+ "step": 2330
1641
+ },
1642
+ {
1643
+ "epoch": 2.8519195612431445,
1644
+ "grad_norm": 1.5106117725372314,
1645
+ "learning_rate": 1.0021203048825095e-06,
1646
+ "loss": 0.1601,
1647
+ "step": 2340
1648
+ },
1649
+ {
1650
+ "epoch": 2.8641072516758075,
1651
+ "grad_norm": 1.2436853647232056,
1652
+ "learning_rate": 9.824257545711172e-07,
1653
+ "loss": 0.1451,
1654
+ "step": 2350
1655
+ },
1656
+ {
1657
+ "epoch": 2.8762949421084705,
1658
+ "grad_norm": 2.1535840034484863,
1659
+ "learning_rate": 9.628792237228787e-07,
1660
+ "loss": 0.1703,
1661
+ "step": 2360
1662
+ },
1663
+ {
1664
+ "epoch": 2.8884826325411335,
1665
+ "grad_norm": 1.6186459064483643,
1666
+ "learning_rate": 9.434826188455056e-07,
1667
+ "loss": 0.1607,
1668
+ "step": 2370
1669
+ },
1670
+ {
1671
+ "epoch": 2.9006703229737965,
1672
+ "grad_norm": 2.03439998626709,
1673
+ "learning_rate": 9.242378318233978e-07,
1674
+ "loss": 0.1771,
1675
+ "step": 2380
1676
+ },
1677
+ {
1678
+ "epoch": 2.9128580134064594,
1679
+ "grad_norm": 1.5456159114837646,
1680
+ "learning_rate": 9.051467397331148e-07,
1681
+ "loss": 0.1738,
1682
+ "step": 2390
1683
+ },
1684
+ {
1685
+ "epoch": 2.9250457038391224,
1686
+ "grad_norm": 1.4887679815292358,
1687
+ "learning_rate": 8.862112046602917e-07,
1688
+ "loss": 0.167,
1689
+ "step": 2400
1690
+ },
1691
+ {
1692
+ "epoch": 2.9372333942717854,
1693
+ "grad_norm": 2.0322256088256836,
1694
+ "learning_rate": 8.674330735180164e-07,
1695
+ "loss": 0.1561,
1696
+ "step": 2410
1697
+ },
1698
+ {
1699
+ "epoch": 2.9494210847044484,
1700
+ "grad_norm": 1.4082711935043335,
1701
+ "learning_rate": 8.488141778666878e-07,
1702
+ "loss": 0.1586,
1703
+ "step": 2420
1704
+ },
1705
+ {
1706
+ "epoch": 2.9616087751371114,
1707
+ "grad_norm": 1.7697067260742188,
1708
+ "learning_rate": 8.303563337353713e-07,
1709
+ "loss": 0.1435,
1710
+ "step": 2430
1711
+ },
1712
+ {
1713
+ "epoch": 2.9737964655697744,
1714
+ "grad_norm": 1.3499540090560913,
1715
+ "learning_rate": 8.120613414446707e-07,
1716
+ "loss": 0.1395,
1717
+ "step": 2440
1718
+ },
1719
+ {
1720
+ "epoch": 2.9859841560024374,
1721
+ "grad_norm": 1.8283402919769287,
1722
+ "learning_rate": 7.939309854311242e-07,
1723
+ "loss": 0.1637,
1724
+ "step": 2450
1725
+ },
1726
+ {
1727
+ "epoch": 2.998171846435101,
1728
+ "grad_norm": 1.988641381263733,
1729
+ "learning_rate": 7.759670340731662e-07,
1730
+ "loss": 0.1819,
1731
+ "step": 2460
1732
+ },
1733
+ {
1734
+ "epoch": 3.0103595368677634,
1735
+ "grad_norm": 1.3205652236938477,
1736
+ "learning_rate": 7.581712395186341e-07,
1737
+ "loss": 0.1143,
1738
+ "step": 2470
1739
+ },
1740
+ {
1741
+ "epoch": 3.0225472273004264,
1742
+ "grad_norm": 1.1017987728118896,
1743
+ "learning_rate": 7.405453375138794e-07,
1744
+ "loss": 0.1116,
1745
+ "step": 2480
1746
+ },
1747
+ {
1748
+ "epoch": 3.03473491773309,
1749
+ "grad_norm": 1.5111011266708374,
1750
+ "learning_rate": 7.230910472344601e-07,
1751
+ "loss": 0.1226,
1752
+ "step": 2490
1753
+ },
1754
+ {
1755
+ "epoch": 3.0469226081657528,
1756
+ "grad_norm": 1.614180088043213,
1757
+ "learning_rate": 7.058100711174637e-07,
1758
+ "loss": 0.1304,
1759
+ "step": 2500
1760
+ },
1761
+ {
1762
+ "epoch": 3.0591102985984158,
1763
+ "grad_norm": 1.8207646608352661,
1764
+ "learning_rate": 6.887040946954524e-07,
1765
+ "loss": 0.1221,
1766
+ "step": 2510
1767
+ },
1768
+ {
1769
+ "epoch": 3.0712979890310788,
1770
+ "grad_norm": 1.3963004350662231,
1771
+ "learning_rate": 6.717747864320648e-07,
1772
+ "loss": 0.1154,
1773
+ "step": 2520
1774
+ },
1775
+ {
1776
+ "epoch": 3.0834856794637417,
1777
+ "grad_norm": 1.460038185119629,
1778
+ "learning_rate": 6.550237975592774e-07,
1779
+ "loss": 0.1084,
1780
+ "step": 2530
1781
+ },
1782
+ {
1783
+ "epoch": 3.0956733698964047,
1784
+ "grad_norm": 1.6919264793395996,
1785
+ "learning_rate": 6.384527619163486e-07,
1786
+ "loss": 0.1272,
1787
+ "step": 2540
1788
+ },
1789
+ {
1790
+ "epoch": 3.1078610603290677,
1791
+ "grad_norm": 1.711581826210022,
1792
+ "learning_rate": 6.220632957904593e-07,
1793
+ "loss": 0.1126,
1794
+ "step": 2550
1795
+ },
1796
+ {
1797
+ "epoch": 3.1200487507617307,
1798
+ "grad_norm": 1.6653200387954712,
1799
+ "learning_rate": 6.058569977590683e-07,
1800
+ "loss": 0.1334,
1801
+ "step": 2560
1802
+ },
1803
+ {
1804
+ "epoch": 3.1322364411943937,
1805
+ "grad_norm": 1.6422685384750366,
1806
+ "learning_rate": 5.898354485339839e-07,
1807
+ "loss": 0.098,
1808
+ "step": 2570
1809
+ },
1810
+ {
1811
+ "epoch": 3.1444241316270567,
1812
+ "grad_norm": 1.5357776880264282,
1813
+ "learning_rate": 5.740002108071974e-07,
1814
+ "loss": 0.1242,
1815
+ "step": 2580
1816
+ },
1817
+ {
1818
+ "epoch": 3.1566118220597197,
1819
+ "grad_norm": 1.4460564851760864,
1820
+ "learning_rate": 5.583528290984516e-07,
1821
+ "loss": 0.1071,
1822
+ "step": 2590
1823
+ },
1824
+ {
1825
+ "epoch": 3.1687995124923827,
1826
+ "grad_norm": 1.7668797969818115,
1827
+ "learning_rate": 5.42894829604603e-07,
1828
+ "loss": 0.114,
1829
+ "step": 2600
1830
+ },
1831
+ {
1832
+ "epoch": 3.1809872029250457,
1833
+ "grad_norm": 1.7189100980758667,
1834
+ "learning_rate": 5.276277200507549e-07,
1835
+ "loss": 0.1066,
1836
+ "step": 2610
1837
+ },
1838
+ {
1839
+ "epoch": 3.1931748933577087,
1840
+ "grad_norm": 1.6018712520599365,
1841
+ "learning_rate": 5.125529895432008e-07,
1842
+ "loss": 0.1237,
1843
+ "step": 2620
1844
+ },
1845
+ {
1846
+ "epoch": 3.2053625837903716,
1847
+ "grad_norm": 2.137363910675049,
1848
+ "learning_rate": 4.976721084241818e-07,
1849
+ "loss": 0.1302,
1850
+ "step": 2630
1851
+ },
1852
+ {
1853
+ "epoch": 3.2175502742230346,
1854
+ "grad_norm": 1.3726582527160645,
1855
+ "learning_rate": 4.829865281284734e-07,
1856
+ "loss": 0.1101,
1857
+ "step": 2640
1858
+ },
1859
+ {
1860
+ "epoch": 3.2297379646556976,
1861
+ "grad_norm": 2.303097724914551,
1862
+ "learning_rate": 4.684976810418179e-07,
1863
+ "loss": 0.1057,
1864
+ "step": 2650
1865
+ },
1866
+ {
1867
+ "epoch": 3.2419256550883606,
1868
+ "grad_norm": 2.5125813484191895,
1869
+ "learning_rate": 4.5420698036121285e-07,
1870
+ "loss": 0.113,
1871
+ "step": 2660
1872
+ },
1873
+ {
1874
+ "epoch": 3.2541133455210236,
1875
+ "grad_norm": 1.2105039358139038,
1876
+ "learning_rate": 4.4011581995707267e-07,
1877
+ "loss": 0.1094,
1878
+ "step": 2670
1879
+ },
1880
+ {
1881
+ "epoch": 3.2663010359536866,
1882
+ "grad_norm": 1.7184550762176514,
1883
+ "learning_rate": 4.262255742372759e-07,
1884
+ "loss": 0.1224,
1885
+ "step": 2680
1886
+ },
1887
+ {
1888
+ "epoch": 3.2784887263863496,
1889
+ "grad_norm": 1.8564852476119995,
1890
+ "learning_rate": 4.1253759801310745e-07,
1891
+ "loss": 0.1038,
1892
+ "step": 2690
1893
+ },
1894
+ {
1895
+ "epoch": 3.2906764168190126,
1896
+ "grad_norm": 1.9307692050933838,
1897
+ "learning_rate": 3.9905322636711654e-07,
1898
+ "loss": 0.115,
1899
+ "step": 2700
1900
+ },
1901
+ {
1902
+ "epoch": 3.3028641072516756,
1903
+ "grad_norm": 2.4194962978363037,
1904
+ "learning_rate": 3.8577377452289787e-07,
1905
+ "loss": 0.1281,
1906
+ "step": 2710
1907
+ },
1908
+ {
1909
+ "epoch": 3.315051797684339,
1910
+ "grad_norm": 1.7740304470062256,
1911
+ "learning_rate": 3.727005377168036e-07,
1912
+ "loss": 0.1246,
1913
+ "step": 2720
1914
+ },
1915
+ {
1916
+ "epoch": 3.327239488117002,
1917
+ "grad_norm": 1.6812636852264404,
1918
+ "learning_rate": 3.5983479107161793e-07,
1919
+ "loss": 0.1173,
1920
+ "step": 2730
1921
+ },
1922
+ {
1923
+ "epoch": 3.339427178549665,
1924
+ "grad_norm": 5.8957037925720215,
1925
+ "learning_rate": 3.471777894721767e-07,
1926
+ "loss": 0.1051,
1927
+ "step": 2740
1928
+ },
1929
+ {
1930
+ "epoch": 3.351614868982328,
1931
+ "grad_norm": 2.193671226501465,
1932
+ "learning_rate": 3.347307674429784e-07,
1933
+ "loss": 0.126,
1934
+ "step": 2750
1935
+ },
1936
+ {
1937
+ "epoch": 3.363802559414991,
1938
+ "grad_norm": 1.9564625024795532,
1939
+ "learning_rate": 3.224949390277668e-07,
1940
+ "loss": 0.0962,
1941
+ "step": 2760
1942
+ },
1943
+ {
1944
+ "epoch": 3.375990249847654,
1945
+ "grad_norm": 2.2077689170837402,
1946
+ "learning_rate": 3.1047149767111874e-07,
1947
+ "loss": 0.096,
1948
+ "step": 2770
1949
+ },
1950
+ {
1951
+ "epoch": 3.388177940280317,
1952
+ "grad_norm": 1.5315691232681274,
1953
+ "learning_rate": 2.9866161610203866e-07,
1954
+ "loss": 0.0954,
1955
+ "step": 2780
1956
+ },
1957
+ {
1958
+ "epoch": 3.40036563071298,
1959
+ "grad_norm": 1.900850534439087,
1960
+ "learning_rate": 2.8706644621957605e-07,
1961
+ "loss": 0.1221,
1962
+ "step": 2790
1963
+ },
1964
+ {
1965
+ "epoch": 3.412553321145643,
1966
+ "grad_norm": 1.5665889978408813,
1967
+ "learning_rate": 2.756871189804705e-07,
1968
+ "loss": 0.1054,
1969
+ "step": 2800
1970
+ },
1971
+ {
1972
+ "epoch": 3.424741011578306,
1973
+ "grad_norm": 1.9411475658416748,
1974
+ "learning_rate": 2.6452474428884294e-07,
1975
+ "loss": 0.1255,
1976
+ "step": 2810
1977
+ },
1978
+ {
1979
+ "epoch": 3.436928702010969,
1980
+ "grad_norm": 1.8464025259017944,
1981
+ "learning_rate": 2.5358041088793863e-07,
1982
+ "loss": 0.1157,
1983
+ "step": 2820
1984
+ },
1985
+ {
1986
+ "epoch": 3.449116392443632,
1987
+ "grad_norm": 2.363825559616089,
1988
+ "learning_rate": 2.428551862539366e-07,
1989
+ "loss": 0.1246,
1990
+ "step": 2830
1991
+ },
1992
+ {
1993
+ "epoch": 3.461304082876295,
1994
+ "grad_norm": 1.678460955619812,
1995
+ "learning_rate": 2.323501164918257e-07,
1996
+ "loss": 0.0985,
1997
+ "step": 2840
1998
+ },
1999
+ {
2000
+ "epoch": 3.473491773308958,
2001
+ "grad_norm": 1.818058967590332,
2002
+ "learning_rate": 2.2206622623337864e-07,
2003
+ "loss": 0.1228,
2004
+ "step": 2850
2005
+ },
2006
+ {
2007
+ "epoch": 3.485679463741621,
2008
+ "grad_norm": 1.9547301530838013,
2009
+ "learning_rate": 2.1200451853720605e-07,
2010
+ "loss": 0.1148,
2011
+ "step": 2860
2012
+ },
2013
+ {
2014
+ "epoch": 3.497867154174284,
2015
+ "grad_norm": 2.967578172683716,
2016
+ "learning_rate": 2.0216597479092437e-07,
2017
+ "loss": 0.1408,
2018
+ "step": 2870
2019
+ },
2020
+ {
2021
+ "epoch": 3.510054844606947,
2022
+ "grad_norm": 2.1174635887145996,
2023
+ "learning_rate": 1.9255155461543385e-07,
2024
+ "loss": 0.1124,
2025
+ "step": 2880
2026
+ },
2027
+ {
2028
+ "epoch": 3.5222425350396103,
2029
+ "grad_norm": 1.7792600393295288,
2030
+ "learning_rate": 1.8316219577132033e-07,
2031
+ "loss": 0.1217,
2032
+ "step": 2890
2033
+ },
2034
+ {
2035
+ "epoch": 3.5344302254722733,
2036
+ "grad_norm": 1.700875163078308,
2037
+ "learning_rate": 1.7399881406738762e-07,
2038
+ "loss": 0.1087,
2039
+ "step": 2900
2040
+ },
2041
+ {
2042
+ "epoch": 3.5466179159049362,
2043
+ "grad_norm": 1.9662463665008545,
2044
+ "learning_rate": 1.650623032713347e-07,
2045
+ "loss": 0.1047,
2046
+ "step": 2910
2047
+ },
2048
+ {
2049
+ "epoch": 3.5588056063375992,
2050
+ "grad_norm": 1.718908429145813,
2051
+ "learning_rate": 1.5635353502257812e-07,
2052
+ "loss": 0.1356,
2053
+ "step": 2920
2054
+ },
2055
+ {
2056
+ "epoch": 3.5709932967702622,
2057
+ "grad_norm": 1.5018715858459473,
2058
+ "learning_rate": 1.4787335874723724e-07,
2059
+ "loss": 0.1148,
2060
+ "step": 2930
2061
+ },
2062
+ {
2063
+ "epoch": 3.583180987202925,
2064
+ "grad_norm": 1.5846534967422485,
2065
+ "learning_rate": 1.3962260157528052e-07,
2066
+ "loss": 0.1197,
2067
+ "step": 2940
2068
+ },
2069
+ {
2070
+ "epoch": 3.595368677635588,
2071
+ "grad_norm": 1.947350025177002,
2072
+ "learning_rate": 1.3160206825985457e-07,
2073
+ "loss": 0.1037,
2074
+ "step": 2950
2075
+ },
2076
+ {
2077
+ "epoch": 3.607556368068251,
2078
+ "grad_norm": 1.5652320384979248,
2079
+ "learning_rate": 1.2381254109878644e-07,
2080
+ "loss": 0.1106,
2081
+ "step": 2960
2082
+ },
2083
+ {
2084
+ "epoch": 3.619744058500914,
2085
+ "grad_norm": 2.34379243850708,
2086
+ "learning_rate": 1.1625477985828276e-07,
2087
+ "loss": 0.1358,
2088
+ "step": 2970
2089
+ },
2090
+ {
2091
+ "epoch": 3.631931748933577,
2092
+ "grad_norm": 2.6311395168304443,
2093
+ "learning_rate": 1.089295216988262e-07,
2094
+ "loss": 0.1119,
2095
+ "step": 2980
2096
+ },
2097
+ {
2098
+ "epoch": 3.64411943936624,
2099
+ "grad_norm": 1.8833256959915161,
2100
+ "learning_rate": 1.0183748110327102e-07,
2101
+ "loss": 0.1245,
2102
+ "step": 2990
2103
+ },
2104
+ {
2105
+ "epoch": 3.656307129798903,
2106
+ "grad_norm": 1.1179873943328857,
2107
+ "learning_rate": 9.497934980715939e-08,
2108
+ "loss": 0.1097,
2109
+ "step": 3000
2110
+ },
2111
+ {
2112
+ "epoch": 3.668494820231566,
2113
+ "grad_norm": 2.062716484069824,
2114
+ "learning_rate": 8.835579673124677e-08,
2115
+ "loss": 0.1009,
2116
+ "step": 3010
2117
+ },
2118
+ {
2119
+ "epoch": 3.680682510664229,
2120
+ "grad_norm": 1.951429843902588,
2121
+ "learning_rate": 8.196746791626243e-08,
2122
+ "loss": 0.1167,
2123
+ "step": 3020
2124
+ },
2125
+ {
2126
+ "epoch": 3.692870201096892,
2127
+ "grad_norm": 1.7196236848831177,
2128
+ "learning_rate": 7.581498645989255e-08,
2129
+ "loss": 0.1186,
2130
+ "step": 3030
2131
+ },
2132
+ {
2133
+ "epoch": 3.705057891529555,
2134
+ "grad_norm": 2.037466049194336,
2135
+ "learning_rate": 6.989895245600702e-08,
2136
+ "loss": 0.1264,
2137
+ "step": 3040
2138
+ },
2139
+ {
2140
+ "epoch": 3.717245581962218,
2141
+ "grad_norm": 1.9185491800308228,
2142
+ "learning_rate": 6.421994293612871e-08,
2143
+ "loss": 0.1304,
2144
+ "step": 3050
2145
+ },
2146
+ {
2147
+ "epoch": 3.729433272394881,
2148
+ "grad_norm": 1.453826904296875,
2149
+ "learning_rate": 5.8778511813150365e-08,
2150
+ "loss": 0.1158,
2151
+ "step": 3060
2152
+ },
2153
+ {
2154
+ "epoch": 3.741620962827544,
2155
+ "grad_norm": 1.3658956289291382,
2156
+ "learning_rate": 5.357518982730792e-08,
2157
+ "loss": 0.1045,
2158
+ "step": 3070
2159
+ },
2160
+ {
2161
+ "epoch": 3.753808653260207,
2162
+ "grad_norm": 1.7934198379516602,
2163
+ "learning_rate": 4.861048449441491e-08,
2164
+ "loss": 0.1203,
2165
+ "step": 3080
2166
+ },
2167
+ {
2168
+ "epoch": 3.76599634369287,
2169
+ "grad_norm": 1.9196701049804688,
2170
+ "learning_rate": 4.3884880056359045e-08,
2171
+ "loss": 0.1174,
2172
+ "step": 3090
2173
+ },
2174
+ {
2175
+ "epoch": 3.778184034125533,
2176
+ "grad_norm": 1.391270399093628,
2177
+ "learning_rate": 3.939883743387302e-08,
2178
+ "loss": 0.103,
2179
+ "step": 3100
2180
+ },
2181
+ {
2182
+ "epoch": 3.790371724558196,
2183
+ "grad_norm": 1.528946876525879,
2184
+ "learning_rate": 3.515279418157463e-08,
2185
+ "loss": 0.1146,
2186
+ "step": 3110
2187
+ },
2188
+ {
2189
+ "epoch": 3.802559414990859,
2190
+ "grad_norm": 1.5788449048995972,
2191
+ "learning_rate": 3.1147164445292923e-08,
2192
+ "loss": 0.1162,
2193
+ "step": 3120
2194
+ },
2195
+ {
2196
+ "epoch": 3.814747105423522,
2197
+ "grad_norm": 2.370635747909546,
2198
+ "learning_rate": 2.7382338921670693e-08,
2199
+ "loss": 0.1166,
2200
+ "step": 3130
2201
+ },
2202
+ {
2203
+ "epoch": 3.826934795856185,
2204
+ "grad_norm": 1.730526328086853,
2205
+ "learning_rate": 2.3858684820058376e-08,
2206
+ "loss": 0.1123,
2207
+ "step": 3140
2208
+ },
2209
+ {
2210
+ "epoch": 3.839122486288848,
2211
+ "grad_norm": 1.4985235929489136,
2212
+ "learning_rate": 2.057654582669738e-08,
2213
+ "loss": 0.1151,
2214
+ "step": 3150
2215
+ },
2216
+ {
2217
+ "epoch": 3.8513101767215114,
2218
+ "grad_norm": 1.7269705533981323,
2219
+ "learning_rate": 1.753624207119775e-08,
2220
+ "loss": 0.112,
2221
+ "step": 3160
2222
+ },
2223
+ {
2224
+ "epoch": 3.8634978671541744,
2225
+ "grad_norm": 2.0060977935791016,
2226
+ "learning_rate": 1.4738070095314527e-08,
2227
+ "loss": 0.1101,
2228
+ "step": 3170
2229
+ },
2230
+ {
2231
+ "epoch": 3.8756855575868374,
2232
+ "grad_norm": 2.1134166717529297,
2233
+ "learning_rate": 1.2182302824023107e-08,
2234
+ "loss": 0.1212,
2235
+ "step": 3180
2236
+ },
2237
+ {
2238
+ "epoch": 3.8878732480195004,
2239
+ "grad_norm": 1.5718306303024292,
2240
+ "learning_rate": 9.869189538899149e-09,
2241
+ "loss": 0.1042,
2242
+ "step": 3190
2243
+ },
2244
+ {
2245
+ "epoch": 3.9000609384521634,
2246
+ "grad_norm": 1.7236390113830566,
2247
+ "learning_rate": 7.798955853805245e-09,
2248
+ "loss": 0.1231,
2249
+ "step": 3200
2250
+ },
2251
+ {
2252
+ "epoch": 3.9122486288848264,
2253
+ "grad_norm": 2.0345866680145264,
2254
+ "learning_rate": 5.971803692883804e-09,
2255
+ "loss": 0.1181,
2256
+ "step": 3210
2257
+ },
2258
+ {
2259
+ "epoch": 3.9244363193174894,
2260
+ "grad_norm": 1.8747888803482056,
2261
+ "learning_rate": 4.387911270863632e-09,
2262
+ "loss": 0.1309,
2263
+ "step": 3220
2264
+ },
2265
+ {
2266
+ "epoch": 3.9366240097501524,
2267
+ "grad_norm": 2.027182102203369,
2268
+ "learning_rate": 3.0474330756757874e-09,
2269
+ "loss": 0.1027,
2270
+ "step": 3230
2271
+ },
2272
+ {
2273
+ "epoch": 3.9488117001828154,
2274
+ "grad_norm": 1.62562096118927,
2275
+ "learning_rate": 1.9504998533870223e-09,
2276
+ "loss": 0.1265,
2277
+ "step": 3240
2278
+ },
2279
+ {
2280
+ "epoch": 3.9609993906154783,
2281
+ "grad_norm": 2.0606415271759033,
2282
+ "learning_rate": 1.0972185954452596e-09,
2283
+ "loss": 0.1099,
2284
+ "step": 3250
2285
+ },
2286
+ {
2287
+ "epoch": 3.9731870810481413,
2288
+ "grad_norm": 1.293489933013916,
2289
+ "learning_rate": 4.876725282457195e-10,
2290
+ "loss": 0.1164,
2291
+ "step": 3260
2292
+ },
2293
+ {
2294
+ "epoch": 3.9853747714808043,
2295
+ "grad_norm": 1.6779310703277588,
2296
+ "learning_rate": 1.2192110501269005e-10,
2297
+ "loss": 0.139,
2298
+ "step": 3270
2299
+ },
2300
+ {
2301
+ "epoch": 3.9975624619134673,
2302
+ "grad_norm": 2.0987327098846436,
2303
+ "learning_rate": 0.0,
2304
+ "loss": 0.1073,
2305
+ "step": 3280
2306
+ },
2307
+ {
2308
+ "epoch": 3.9975624619134673,
2309
+ "step": 3280,
2310
+ "total_flos": 3.38221904554623e+17,
2311
+ "train_loss": 0.21007875238613385,
2312
+ "train_runtime": 5418.5603,
2313
+ "train_samples_per_second": 9.688,
2314
+ "train_steps_per_second": 0.605
2315
+ }
2316
+ ],
2317
+ "logging_steps": 10,
2318
+ "max_steps": 3280,
2319
+ "num_input_tokens_seen": 0,
2320
+ "num_train_epochs": 4,
2321
+ "save_steps": 500,
2322
+ "stateful_callbacks": {
2323
+ "TrainerControl": {
2324
+ "args": {
2325
+ "should_epoch_stop": false,
2326
+ "should_evaluate": false,
2327
+ "should_log": false,
2328
+ "should_save": true,
2329
+ "should_training_stop": true
2330
+ },
2331
+ "attributes": {}
2332
+ }
2333
+ },
2334
+ "total_flos": 3.38221904554623e+17,
2335
+ "train_batch_size": 8,
2336
+ "trial_name": null,
2337
+ "trial_params": null
2338
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dade4871e2101a640babfb7155b7f96d0f2eafc02855dfbbd9f2969d5c53445
3
+ size 5432
training_loss.png ADDED