Training in progress, epoch 1, checkpoint
Browse files
checkpoint-1108/config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "bert-base-uncased",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"BertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 768,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"layer_norm_eps": 1e-12,
|
| 15 |
+
"max_position_embeddings": 512,
|
| 16 |
+
"model_type": "bert",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 12,
|
| 19 |
+
"pad_token_id": 0,
|
| 20 |
+
"position_embedding_type": "absolute",
|
| 21 |
+
"torch_dtype": "float32",
|
| 22 |
+
"transformers_version": "4.47.0",
|
| 23 |
+
"type_vocab_size": 2,
|
| 24 |
+
"use_cache": true,
|
| 25 |
+
"vocab_size": 30522
|
| 26 |
+
}
|
checkpoint-1108/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a770181b755e196d444318da29c49d85933c7a74e44c646a3f539580854ce767
|
| 3 |
+
size 437958648
|
checkpoint-1108/trainer_state.json
ADDED
|
@@ -0,0 +1,818 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": null,
|
| 3 |
+
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 1.996845425867508,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 1108,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.018026137899954935,
|
| 13 |
+
"grad_norm": 18.020910263061523,
|
| 14 |
+
"learning_rate": 4.9548736462093865e-05,
|
| 15 |
+
"loss": 0.6809,
|
| 16 |
+
"step": 10
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.03605227579990987,
|
| 20 |
+
"grad_norm": 30.794422149658203,
|
| 21 |
+
"learning_rate": 4.909747292418773e-05,
|
| 22 |
+
"loss": 0.572,
|
| 23 |
+
"step": 20
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.054078413699864804,
|
| 27 |
+
"grad_norm": 24.659021377563477,
|
| 28 |
+
"learning_rate": 4.864620938628159e-05,
|
| 29 |
+
"loss": 0.554,
|
| 30 |
+
"step": 30
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.07210455159981974,
|
| 34 |
+
"grad_norm": 17.13719367980957,
|
| 35 |
+
"learning_rate": 4.819494584837546e-05,
|
| 36 |
+
"loss": 0.5261,
|
| 37 |
+
"step": 40
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.09013068949977468,
|
| 41 |
+
"grad_norm": 50.614078521728516,
|
| 42 |
+
"learning_rate": 4.7743682310469314e-05,
|
| 43 |
+
"loss": 0.5696,
|
| 44 |
+
"step": 50
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.10815682739972961,
|
| 48 |
+
"grad_norm": 16.31667137145996,
|
| 49 |
+
"learning_rate": 4.7292418772563177e-05,
|
| 50 |
+
"loss": 0.4876,
|
| 51 |
+
"step": 60
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.12618296529968454,
|
| 55 |
+
"grad_norm": 41.160762786865234,
|
| 56 |
+
"learning_rate": 4.684115523465704e-05,
|
| 57 |
+
"loss": 0.4647,
|
| 58 |
+
"step": 70
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.14420910319963948,
|
| 62 |
+
"grad_norm": 20.278989791870117,
|
| 63 |
+
"learning_rate": 4.63898916967509e-05,
|
| 64 |
+
"loss": 0.4525,
|
| 65 |
+
"step": 80
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.16223524109959442,
|
| 69 |
+
"grad_norm": 21.479433059692383,
|
| 70 |
+
"learning_rate": 4.5938628158844764e-05,
|
| 71 |
+
"loss": 0.4511,
|
| 72 |
+
"step": 90
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.18026137899954936,
|
| 76 |
+
"grad_norm": 20.85803985595703,
|
| 77 |
+
"learning_rate": 4.548736462093863e-05,
|
| 78 |
+
"loss": 0.4828,
|
| 79 |
+
"step": 100
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.19828751689950427,
|
| 83 |
+
"grad_norm": 19.689502716064453,
|
| 84 |
+
"learning_rate": 4.5036101083032495e-05,
|
| 85 |
+
"loss": 0.4785,
|
| 86 |
+
"step": 110
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.21631365479945922,
|
| 90 |
+
"grad_norm": 23.28278350830078,
|
| 91 |
+
"learning_rate": 4.458483754512636e-05,
|
| 92 |
+
"loss": 0.4593,
|
| 93 |
+
"step": 120
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.23433979269941416,
|
| 97 |
+
"grad_norm": 55.461483001708984,
|
| 98 |
+
"learning_rate": 4.413357400722022e-05,
|
| 99 |
+
"loss": 0.4265,
|
| 100 |
+
"step": 130
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.25236593059936907,
|
| 104 |
+
"grad_norm": 21.591472625732422,
|
| 105 |
+
"learning_rate": 4.368231046931408e-05,
|
| 106 |
+
"loss": 0.4611,
|
| 107 |
+
"step": 140
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.270392068499324,
|
| 111 |
+
"grad_norm": 31.00871467590332,
|
| 112 |
+
"learning_rate": 4.3231046931407945e-05,
|
| 113 |
+
"loss": 0.4489,
|
| 114 |
+
"step": 150
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.28841820639927895,
|
| 118 |
+
"grad_norm": 21.315185546875,
|
| 119 |
+
"learning_rate": 4.277978339350181e-05,
|
| 120 |
+
"loss": 0.4755,
|
| 121 |
+
"step": 160
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.3064443442992339,
|
| 125 |
+
"grad_norm": 25.012041091918945,
|
| 126 |
+
"learning_rate": 4.232851985559567e-05,
|
| 127 |
+
"loss": 0.4703,
|
| 128 |
+
"step": 170
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.32447048219918884,
|
| 132 |
+
"grad_norm": 18.25890350341797,
|
| 133 |
+
"learning_rate": 4.187725631768953e-05,
|
| 134 |
+
"loss": 0.4456,
|
| 135 |
+
"step": 180
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.3424966200991438,
|
| 139 |
+
"grad_norm": 19.33783721923828,
|
| 140 |
+
"learning_rate": 4.1425992779783394e-05,
|
| 141 |
+
"loss": 0.4764,
|
| 142 |
+
"step": 190
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.3605227579990987,
|
| 146 |
+
"grad_norm": 21.992473602294922,
|
| 147 |
+
"learning_rate": 4.0974729241877256e-05,
|
| 148 |
+
"loss": 0.4204,
|
| 149 |
+
"step": 200
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.3785488958990536,
|
| 153 |
+
"grad_norm": 20.253664016723633,
|
| 154 |
+
"learning_rate": 4.052346570397112e-05,
|
| 155 |
+
"loss": 0.4471,
|
| 156 |
+
"step": 210
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.39657503379900855,
|
| 160 |
+
"grad_norm": 17.41029167175293,
|
| 161 |
+
"learning_rate": 4.007220216606498e-05,
|
| 162 |
+
"loss": 0.4387,
|
| 163 |
+
"step": 220
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.4146011716989635,
|
| 167 |
+
"grad_norm": 18.28990364074707,
|
| 168 |
+
"learning_rate": 3.962093862815885e-05,
|
| 169 |
+
"loss": 0.4283,
|
| 170 |
+
"step": 230
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.43262730959891843,
|
| 174 |
+
"grad_norm": 48.463008880615234,
|
| 175 |
+
"learning_rate": 3.916967509025271e-05,
|
| 176 |
+
"loss": 0.4447,
|
| 177 |
+
"step": 240
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.45065344749887337,
|
| 181 |
+
"grad_norm": 15.58568000793457,
|
| 182 |
+
"learning_rate": 3.8718411552346575e-05,
|
| 183 |
+
"loss": 0.4257,
|
| 184 |
+
"step": 250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.4686795853988283,
|
| 188 |
+
"grad_norm": 31.015850067138672,
|
| 189 |
+
"learning_rate": 3.826714801444044e-05,
|
| 190 |
+
"loss": 0.4463,
|
| 191 |
+
"step": 260
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.48670572329878325,
|
| 195 |
+
"grad_norm": 18.325271606445312,
|
| 196 |
+
"learning_rate": 3.78158844765343e-05,
|
| 197 |
+
"loss": 0.4207,
|
| 198 |
+
"step": 270
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.5047318611987381,
|
| 202 |
+
"grad_norm": 20.241615295410156,
|
| 203 |
+
"learning_rate": 3.7364620938628155e-05,
|
| 204 |
+
"loss": 0.4084,
|
| 205 |
+
"step": 280
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.5227579990986931,
|
| 209 |
+
"grad_norm": 19.043285369873047,
|
| 210 |
+
"learning_rate": 3.6913357400722025e-05,
|
| 211 |
+
"loss": 0.4345,
|
| 212 |
+
"step": 290
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.540784136998648,
|
| 216 |
+
"grad_norm": 19.568265914916992,
|
| 217 |
+
"learning_rate": 3.646209386281589e-05,
|
| 218 |
+
"loss": 0.4393,
|
| 219 |
+
"step": 300
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.558810274898603,
|
| 223 |
+
"grad_norm": 26.981136322021484,
|
| 224 |
+
"learning_rate": 3.601083032490975e-05,
|
| 225 |
+
"loss": 0.4532,
|
| 226 |
+
"step": 310
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"epoch": 0.5768364127985579,
|
| 230 |
+
"grad_norm": 17.03878402709961,
|
| 231 |
+
"learning_rate": 3.555956678700361e-05,
|
| 232 |
+
"loss": 0.4208,
|
| 233 |
+
"step": 320
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"epoch": 0.5948625506985128,
|
| 237 |
+
"grad_norm": 16.008604049682617,
|
| 238 |
+
"learning_rate": 3.5108303249097474e-05,
|
| 239 |
+
"loss": 0.4396,
|
| 240 |
+
"step": 330
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"epoch": 0.6128886885984678,
|
| 244 |
+
"grad_norm": 20.344444274902344,
|
| 245 |
+
"learning_rate": 3.4657039711191336e-05,
|
| 246 |
+
"loss": 0.4318,
|
| 247 |
+
"step": 340
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"epoch": 0.6309148264984227,
|
| 251 |
+
"grad_norm": 18.641759872436523,
|
| 252 |
+
"learning_rate": 3.42057761732852e-05,
|
| 253 |
+
"loss": 0.4481,
|
| 254 |
+
"step": 350
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"epoch": 0.6489409643983777,
|
| 258 |
+
"grad_norm": 25.64444351196289,
|
| 259 |
+
"learning_rate": 3.375451263537907e-05,
|
| 260 |
+
"loss": 0.4053,
|
| 261 |
+
"step": 360
|
| 262 |
+
},
|
| 263 |
+
{
|
| 264 |
+
"epoch": 0.6669671022983326,
|
| 265 |
+
"grad_norm": 23.601179122924805,
|
| 266 |
+
"learning_rate": 3.330324909747293e-05,
|
| 267 |
+
"loss": 0.4463,
|
| 268 |
+
"step": 370
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.6849932401982876,
|
| 272 |
+
"grad_norm": 29.48953628540039,
|
| 273 |
+
"learning_rate": 3.2851985559566786e-05,
|
| 274 |
+
"loss": 0.4086,
|
| 275 |
+
"step": 380
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"epoch": 0.7030193780982424,
|
| 279 |
+
"grad_norm": 15.503927230834961,
|
| 280 |
+
"learning_rate": 3.240072202166065e-05,
|
| 281 |
+
"loss": 0.4402,
|
| 282 |
+
"step": 390
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 0.7210455159981974,
|
| 286 |
+
"grad_norm": 29.575830459594727,
|
| 287 |
+
"learning_rate": 3.194945848375451e-05,
|
| 288 |
+
"loss": 0.43,
|
| 289 |
+
"step": 400
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 0.7390716538981523,
|
| 293 |
+
"grad_norm": 26.154592514038086,
|
| 294 |
+
"learning_rate": 3.149819494584837e-05,
|
| 295 |
+
"loss": 0.432,
|
| 296 |
+
"step": 410
|
| 297 |
+
},
|
| 298 |
+
{
|
| 299 |
+
"epoch": 0.7570977917981072,
|
| 300 |
+
"grad_norm": 25.37771987915039,
|
| 301 |
+
"learning_rate": 3.104693140794224e-05,
|
| 302 |
+
"loss": 0.4277,
|
| 303 |
+
"step": 420
|
| 304 |
+
},
|
| 305 |
+
{
|
| 306 |
+
"epoch": 0.7751239296980622,
|
| 307 |
+
"grad_norm": 31.5076904296875,
|
| 308 |
+
"learning_rate": 3.0595667870036104e-05,
|
| 309 |
+
"loss": 0.4131,
|
| 310 |
+
"step": 430
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"epoch": 0.7931500675980171,
|
| 314 |
+
"grad_norm": 16.534198760986328,
|
| 315 |
+
"learning_rate": 3.0144404332129967e-05,
|
| 316 |
+
"loss": 0.433,
|
| 317 |
+
"step": 440
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"epoch": 0.8111762054979721,
|
| 321 |
+
"grad_norm": 19.064289093017578,
|
| 322 |
+
"learning_rate": 2.969314079422383e-05,
|
| 323 |
+
"loss": 0.3964,
|
| 324 |
+
"step": 450
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"epoch": 0.829202343397927,
|
| 328 |
+
"grad_norm": 18.9260196685791,
|
| 329 |
+
"learning_rate": 2.924187725631769e-05,
|
| 330 |
+
"loss": 0.396,
|
| 331 |
+
"step": 460
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"epoch": 0.847228481297882,
|
| 335 |
+
"grad_norm": 27.17948341369629,
|
| 336 |
+
"learning_rate": 2.879061371841155e-05,
|
| 337 |
+
"loss": 0.4079,
|
| 338 |
+
"step": 470
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.8652546191978369,
|
| 342 |
+
"grad_norm": 21.348743438720703,
|
| 343 |
+
"learning_rate": 2.8339350180505413e-05,
|
| 344 |
+
"loss": 0.4545,
|
| 345 |
+
"step": 480
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"epoch": 0.8832807570977917,
|
| 349 |
+
"grad_norm": 15.41019344329834,
|
| 350 |
+
"learning_rate": 2.7888086642599282e-05,
|
| 351 |
+
"loss": 0.4024,
|
| 352 |
+
"step": 490
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"epoch": 0.9013068949977467,
|
| 356 |
+
"grad_norm": 26.81648826599121,
|
| 357 |
+
"learning_rate": 2.7436823104693144e-05,
|
| 358 |
+
"loss": 0.3888,
|
| 359 |
+
"step": 500
|
| 360 |
+
},
|
| 361 |
+
{
|
| 362 |
+
"epoch": 0.9193330328977016,
|
| 363 |
+
"grad_norm": 27.93416976928711,
|
| 364 |
+
"learning_rate": 2.6985559566787007e-05,
|
| 365 |
+
"loss": 0.4086,
|
| 366 |
+
"step": 510
|
| 367 |
+
},
|
| 368 |
+
{
|
| 369 |
+
"epoch": 0.9373591707976566,
|
| 370 |
+
"grad_norm": 14.353543281555176,
|
| 371 |
+
"learning_rate": 2.6534296028880866e-05,
|
| 372 |
+
"loss": 0.416,
|
| 373 |
+
"step": 520
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"epoch": 0.9553853086976115,
|
| 377 |
+
"grad_norm": 15.367998123168945,
|
| 378 |
+
"learning_rate": 2.6083032490974728e-05,
|
| 379 |
+
"loss": 0.4184,
|
| 380 |
+
"step": 530
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"epoch": 0.9734114465975665,
|
| 384 |
+
"grad_norm": 22.00202178955078,
|
| 385 |
+
"learning_rate": 2.563176895306859e-05,
|
| 386 |
+
"loss": 0.4294,
|
| 387 |
+
"step": 540
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"epoch": 0.9914375844975214,
|
| 391 |
+
"grad_norm": 15.376652717590332,
|
| 392 |
+
"learning_rate": 2.518050541516246e-05,
|
| 393 |
+
"loss": 0.4068,
|
| 394 |
+
"step": 550
|
| 395 |
+
},
|
| 396 |
+
{
|
| 397 |
+
"epoch": 1.0,
|
| 398 |
+
"eval_f1": 0.8437814956661963,
|
| 399 |
+
"eval_fn": 317,
|
| 400 |
+
"eval_fp": 458,
|
| 401 |
+
"eval_loss": 0.3667502701282501,
|
| 402 |
+
"eval_precision": 0.8204625637005096,
|
| 403 |
+
"eval_recall": 0.8684647302904565,
|
| 404 |
+
"eval_runtime": 14.8642,
|
| 405 |
+
"eval_samples_per_second": 314.379,
|
| 406 |
+
"eval_steps_per_second": 39.356,
|
| 407 |
+
"eval_tn": 1805,
|
| 408 |
+
"eval_tp": 2093,
|
| 409 |
+
"step": 555
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.0090130689499774,
|
| 413 |
+
"grad_norm": 33.98088455200195,
|
| 414 |
+
"learning_rate": 2.472924187725632e-05,
|
| 415 |
+
"loss": 0.3578,
|
| 416 |
+
"step": 560
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.0270392068499323,
|
| 420 |
+
"grad_norm": 24.8944091796875,
|
| 421 |
+
"learning_rate": 2.427797833935018e-05,
|
| 422 |
+
"loss": 0.3203,
|
| 423 |
+
"step": 570
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.0450653447498874,
|
| 427 |
+
"grad_norm": 17.574607849121094,
|
| 428 |
+
"learning_rate": 2.3826714801444043e-05,
|
| 429 |
+
"loss": 0.335,
|
| 430 |
+
"step": 580
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.0630914826498423,
|
| 434 |
+
"grad_norm": 26.903356552124023,
|
| 435 |
+
"learning_rate": 2.3375451263537906e-05,
|
| 436 |
+
"loss": 0.289,
|
| 437 |
+
"step": 590
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.0811176205497972,
|
| 441 |
+
"grad_norm": 31.987625122070312,
|
| 442 |
+
"learning_rate": 2.292418772563177e-05,
|
| 443 |
+
"loss": 0.3511,
|
| 444 |
+
"step": 600
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.099143758449752,
|
| 448 |
+
"grad_norm": 29.54005241394043,
|
| 449 |
+
"learning_rate": 2.2472924187725634e-05,
|
| 450 |
+
"loss": 0.3126,
|
| 451 |
+
"step": 610
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.117169896349707,
|
| 455 |
+
"grad_norm": 22.088634490966797,
|
| 456 |
+
"learning_rate": 2.2021660649819496e-05,
|
| 457 |
+
"loss": 0.3004,
|
| 458 |
+
"step": 620
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.135196034249662,
|
| 462 |
+
"grad_norm": 30.805749893188477,
|
| 463 |
+
"learning_rate": 2.157039711191336e-05,
|
| 464 |
+
"loss": 0.3323,
|
| 465 |
+
"step": 630
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.153222172149617,
|
| 469 |
+
"grad_norm": 16.607519149780273,
|
| 470 |
+
"learning_rate": 2.111913357400722e-05,
|
| 471 |
+
"loss": 0.3345,
|
| 472 |
+
"step": 640
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.1712483100495719,
|
| 476 |
+
"grad_norm": 25.248517990112305,
|
| 477 |
+
"learning_rate": 2.0667870036101083e-05,
|
| 478 |
+
"loss": 0.3001,
|
| 479 |
+
"step": 650
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.1892744479495267,
|
| 483 |
+
"grad_norm": 29.825464248657227,
|
| 484 |
+
"learning_rate": 2.0216606498194946e-05,
|
| 485 |
+
"loss": 0.3146,
|
| 486 |
+
"step": 660
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.2073005858494819,
|
| 490 |
+
"grad_norm": 30.085296630859375,
|
| 491 |
+
"learning_rate": 1.976534296028881e-05,
|
| 492 |
+
"loss": 0.3335,
|
| 493 |
+
"step": 670
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.2253267237494367,
|
| 497 |
+
"grad_norm": 23.349525451660156,
|
| 498 |
+
"learning_rate": 1.9314079422382674e-05,
|
| 499 |
+
"loss": 0.27,
|
| 500 |
+
"step": 680
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.2433528616493916,
|
| 504 |
+
"grad_norm": 34.48153305053711,
|
| 505 |
+
"learning_rate": 1.8862815884476533e-05,
|
| 506 |
+
"loss": 0.3333,
|
| 507 |
+
"step": 690
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.2613789995493465,
|
| 511 |
+
"grad_norm": 49.01852798461914,
|
| 512 |
+
"learning_rate": 1.84115523465704e-05,
|
| 513 |
+
"loss": 0.275,
|
| 514 |
+
"step": 700
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.2794051374493014,
|
| 518 |
+
"grad_norm": 24.860200881958008,
|
| 519 |
+
"learning_rate": 1.796028880866426e-05,
|
| 520 |
+
"loss": 0.311,
|
| 521 |
+
"step": 710
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.2974312753492563,
|
| 525 |
+
"grad_norm": 30.17133331298828,
|
| 526 |
+
"learning_rate": 1.7509025270758123e-05,
|
| 527 |
+
"loss": 0.2988,
|
| 528 |
+
"step": 720
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.3154574132492114,
|
| 532 |
+
"grad_norm": 47.71062469482422,
|
| 533 |
+
"learning_rate": 1.7057761732851986e-05,
|
| 534 |
+
"loss": 0.2696,
|
| 535 |
+
"step": 730
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.3334835511491663,
|
| 539 |
+
"grad_norm": 40.605262756347656,
|
| 540 |
+
"learning_rate": 1.6606498194945848e-05,
|
| 541 |
+
"loss": 0.2966,
|
| 542 |
+
"step": 740
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.3515096890491212,
|
| 546 |
+
"grad_norm": 23.67529296875,
|
| 547 |
+
"learning_rate": 1.615523465703971e-05,
|
| 548 |
+
"loss": 0.2967,
|
| 549 |
+
"step": 750
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.3695358269490763,
|
| 553 |
+
"grad_norm": 25.661808013916016,
|
| 554 |
+
"learning_rate": 1.5703971119133576e-05,
|
| 555 |
+
"loss": 0.2856,
|
| 556 |
+
"step": 760
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.3875619648490312,
|
| 560 |
+
"grad_norm": 21.13004493713379,
|
| 561 |
+
"learning_rate": 1.5252707581227438e-05,
|
| 562 |
+
"loss": 0.3415,
|
| 563 |
+
"step": 770
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.405588102748986,
|
| 567 |
+
"grad_norm": 24.510709762573242,
|
| 568 |
+
"learning_rate": 1.4801444043321299e-05,
|
| 569 |
+
"loss": 0.3044,
|
| 570 |
+
"step": 780
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.423614240648941,
|
| 574 |
+
"grad_norm": 22.614412307739258,
|
| 575 |
+
"learning_rate": 1.4350180505415165e-05,
|
| 576 |
+
"loss": 0.3042,
|
| 577 |
+
"step": 790
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.4416403785488958,
|
| 581 |
+
"grad_norm": 26.001209259033203,
|
| 582 |
+
"learning_rate": 1.3898916967509026e-05,
|
| 583 |
+
"loss": 0.2906,
|
| 584 |
+
"step": 800
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.4596665164488507,
|
| 588 |
+
"grad_norm": 26.30026626586914,
|
| 589 |
+
"learning_rate": 1.3447653429602888e-05,
|
| 590 |
+
"loss": 0.2603,
|
| 591 |
+
"step": 810
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.4776926543488058,
|
| 595 |
+
"grad_norm": 26.163490295410156,
|
| 596 |
+
"learning_rate": 1.299638989169675e-05,
|
| 597 |
+
"loss": 0.2768,
|
| 598 |
+
"step": 820
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.4957187922487607,
|
| 602 |
+
"grad_norm": 48.625335693359375,
|
| 603 |
+
"learning_rate": 1.2545126353790614e-05,
|
| 604 |
+
"loss": 0.3203,
|
| 605 |
+
"step": 830
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.5137449301487156,
|
| 609 |
+
"grad_norm": 19.079364776611328,
|
| 610 |
+
"learning_rate": 1.2093862815884477e-05,
|
| 611 |
+
"loss": 0.3069,
|
| 612 |
+
"step": 840
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.5317710680486707,
|
| 616 |
+
"grad_norm": 21.295671463012695,
|
| 617 |
+
"learning_rate": 1.164259927797834e-05,
|
| 618 |
+
"loss": 0.2658,
|
| 619 |
+
"step": 850
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.5497972059486256,
|
| 623 |
+
"grad_norm": 26.641357421875,
|
| 624 |
+
"learning_rate": 1.1191335740072201e-05,
|
| 625 |
+
"loss": 0.281,
|
| 626 |
+
"step": 860
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.5678233438485805,
|
| 630 |
+
"grad_norm": 33.70462417602539,
|
| 631 |
+
"learning_rate": 1.0740072202166066e-05,
|
| 632 |
+
"loss": 0.2754,
|
| 633 |
+
"step": 870
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.5858494817485354,
|
| 637 |
+
"grad_norm": 30.827627182006836,
|
| 638 |
+
"learning_rate": 1.028880866425993e-05,
|
| 639 |
+
"loss": 0.2913,
|
| 640 |
+
"step": 880
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.6038756196484902,
|
| 644 |
+
"grad_norm": 19.456621170043945,
|
| 645 |
+
"learning_rate": 9.83754512635379e-06,
|
| 646 |
+
"loss": 0.3002,
|
| 647 |
+
"step": 890
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.6219017575484451,
|
| 651 |
+
"grad_norm": 22.82554817199707,
|
| 652 |
+
"learning_rate": 9.386281588447654e-06,
|
| 653 |
+
"loss": 0.3197,
|
| 654 |
+
"step": 900
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.6399278954484002,
|
| 658 |
+
"grad_norm": 22.25665283203125,
|
| 659 |
+
"learning_rate": 8.935018050541517e-06,
|
| 660 |
+
"loss": 0.3069,
|
| 661 |
+
"step": 910
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.6579540333483551,
|
| 665 |
+
"grad_norm": 30.897809982299805,
|
| 666 |
+
"learning_rate": 8.483754512635379e-06,
|
| 667 |
+
"loss": 0.2965,
|
| 668 |
+
"step": 920
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.67598017124831,
|
| 672 |
+
"grad_norm": 21.626707077026367,
|
| 673 |
+
"learning_rate": 8.032490974729243e-06,
|
| 674 |
+
"loss": 0.287,
|
| 675 |
+
"step": 930
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.694006309148265,
|
| 679 |
+
"grad_norm": 32.338802337646484,
|
| 680 |
+
"learning_rate": 7.581227436823105e-06,
|
| 681 |
+
"loss": 0.2863,
|
| 682 |
+
"step": 940
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.71203244704822,
|
| 686 |
+
"grad_norm": 21.633394241333008,
|
| 687 |
+
"learning_rate": 7.129963898916968e-06,
|
| 688 |
+
"loss": 0.2822,
|
| 689 |
+
"step": 950
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.7300585849481749,
|
| 693 |
+
"grad_norm": 18.33571434020996,
|
| 694 |
+
"learning_rate": 6.678700361010831e-06,
|
| 695 |
+
"loss": 0.2676,
|
| 696 |
+
"step": 960
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.7480847228481298,
|
| 700 |
+
"grad_norm": 29.97617530822754,
|
| 701 |
+
"learning_rate": 6.227436823104693e-06,
|
| 702 |
+
"loss": 0.2652,
|
| 703 |
+
"step": 970
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 1.7661108607480847,
|
| 707 |
+
"grad_norm": 37.65591812133789,
|
| 708 |
+
"learning_rate": 5.776173285198557e-06,
|
| 709 |
+
"loss": 0.2956,
|
| 710 |
+
"step": 980
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 1.7841369986480395,
|
| 714 |
+
"grad_norm": 33.256507873535156,
|
| 715 |
+
"learning_rate": 5.324909747292419e-06,
|
| 716 |
+
"loss": 0.3358,
|
| 717 |
+
"step": 990
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 1.8021631365479944,
|
| 721 |
+
"grad_norm": 24.32988739013672,
|
| 722 |
+
"learning_rate": 4.873646209386281e-06,
|
| 723 |
+
"loss": 0.3075,
|
| 724 |
+
"step": 1000
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 1.8201892744479495,
|
| 728 |
+
"grad_norm": 40.130863189697266,
|
| 729 |
+
"learning_rate": 4.422382671480145e-06,
|
| 730 |
+
"loss": 0.2682,
|
| 731 |
+
"step": 1010
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 1.8382154123479044,
|
| 735 |
+
"grad_norm": 20.9071044921875,
|
| 736 |
+
"learning_rate": 3.971119133574008e-06,
|
| 737 |
+
"loss": 0.2849,
|
| 738 |
+
"step": 1020
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 1.8562415502478595,
|
| 742 |
+
"grad_norm": 39.87733840942383,
|
| 743 |
+
"learning_rate": 3.51985559566787e-06,
|
| 744 |
+
"loss": 0.2967,
|
| 745 |
+
"step": 1030
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 1.8742676881478144,
|
| 749 |
+
"grad_norm": 27.898658752441406,
|
| 750 |
+
"learning_rate": 3.068592057761733e-06,
|
| 751 |
+
"loss": 0.2846,
|
| 752 |
+
"step": 1040
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 1.8922938260477693,
|
| 756 |
+
"grad_norm": 34.07933807373047,
|
| 757 |
+
"learning_rate": 2.6173285198555957e-06,
|
| 758 |
+
"loss": 0.2228,
|
| 759 |
+
"step": 1050
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 1.9103199639477242,
|
| 763 |
+
"grad_norm": 20.768266677856445,
|
| 764 |
+
"learning_rate": 2.1660649819494585e-06,
|
| 765 |
+
"loss": 0.269,
|
| 766 |
+
"step": 1060
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 1.928346101847679,
|
| 770 |
+
"grad_norm": 29.845983505249023,
|
| 771 |
+
"learning_rate": 1.7148014440433215e-06,
|
| 772 |
+
"loss": 0.2628,
|
| 773 |
+
"step": 1070
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 1.946372239747634,
|
| 777 |
+
"grad_norm": 31.16872787475586,
|
| 778 |
+
"learning_rate": 1.263537906137184e-06,
|
| 779 |
+
"loss": 0.2821,
|
| 780 |
+
"step": 1080
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 1.9643983776475888,
|
| 784 |
+
"grad_norm": 24.65738868713379,
|
| 785 |
+
"learning_rate": 8.122743682310469e-07,
|
| 786 |
+
"loss": 0.2961,
|
| 787 |
+
"step": 1090
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 1.982424515547544,
|
| 791 |
+
"grad_norm": 33.531166076660156,
|
| 792 |
+
"learning_rate": 3.610108303249098e-07,
|
| 793 |
+
"loss": 0.2663,
|
| 794 |
+
"step": 1100
|
| 795 |
+
}
|
| 796 |
+
],
|
| 797 |
+
"logging_steps": 10,
|
| 798 |
+
"max_steps": 1108,
|
| 799 |
+
"num_input_tokens_seen": 0,
|
| 800 |
+
"num_train_epochs": 2,
|
| 801 |
+
"save_steps": 500,
|
| 802 |
+
"stateful_callbacks": {
|
| 803 |
+
"TrainerControl": {
|
| 804 |
+
"args": {
|
| 805 |
+
"should_epoch_stop": false,
|
| 806 |
+
"should_evaluate": false,
|
| 807 |
+
"should_log": false,
|
| 808 |
+
"should_save": true,
|
| 809 |
+
"should_training_stop": true
|
| 810 |
+
},
|
| 811 |
+
"attributes": {}
|
| 812 |
+
}
|
| 813 |
+
},
|
| 814 |
+
"total_flos": 9326760690401280.0,
|
| 815 |
+
"train_batch_size": 8,
|
| 816 |
+
"trial_name": null,
|
| 817 |
+
"trial_params": null
|
| 818 |
+
}
|
checkpoint-1108/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0b6e342c007f2cc362633cfcfb92e24456b08c362b2ac6fe5da2a0eb85b9d1d1
|
| 3 |
+
size 5432
|