Andreas Köpf commited on
Commit
4afa825
1 Parent(s): 6b4b5e8
added_tokens.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|assistant|>": 32004,
3
+ "<|prefix_begin|>": 32001,
4
+ "<|prefix_end|>": 32000,
5
+ "<|prompter|>": 32003,
6
+ "<|system|>": 32002
7
+ }
config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "openlm-research/open_llama_13b",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 5120,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 13824,
12
+ "max_position_embeddings": 2048,
13
+ "model_type": "llama",
14
+ "num_attention_heads": 40,
15
+ "num_hidden_layers": 40,
16
+ "pad_token_id": 0,
17
+ "rms_norm_eps": 1e-06,
18
+ "tie_word_embeddings": false,
19
+ "torch_dtype": "bfloat16",
20
+ "transformers_version": "4.28.0.dev0",
21
+ "use_cache": true,
22
+ "vocab_size": 32016
23
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.28.0.dev0"
7
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:288e017d3f0686e018f835b49b49d9abf12d632fa6031438140d5801c787dc28
3
+ size 26032195693
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|prefix_end|>",
4
+ "<|prefix_begin|>",
5
+ "<|system|>",
6
+ "<|prompter|>",
7
+ "<|assistant|>"
8
+ ],
9
+ "bos_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": "</s>",
17
+ "pad_token": "</s>",
18
+ "sep_token": "<s>",
19
+ "unk_token": {
20
+ "content": "<unk>",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ }
26
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab1b681ec7fc02fed5edd3026687d7a692a918c4dd8e150ca2e3994a6229843b
3
+ size 534194
tokenizer_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "__type": "AddedToken",
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ "clean_up_tokenization_spaces": false,
11
+ "eos_token": {
12
+ "__type": "AddedToken",
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false
18
+ },
19
+ "model_max_length": 2048,
20
+ "pad_token": null,
21
+ "special_tokens_map_file": ".cache/models--openlm-research--open_llama_13b/snapshots/b6d7fde8392250730d24cc2fcfa3b7e5f9a03ce8/special_tokens_map.json",
22
+ "tokenizer_class": "LlamaTokenizer",
23
+ "unk_token": {
24
+ "__type": "AddedToken",
25
+ "content": "<unk>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
trainer_state.json ADDED
@@ -0,0 +1,3826 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.017956843217142,
5
+ "global_step": 5000,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.01,
12
+ "learning_rate": 4.5e-08,
13
+ "loss": 1.8279,
14
+ "step": 10
15
+ },
16
+ {
17
+ "epoch": 0.01,
18
+ "learning_rate": 9.5e-08,
19
+ "loss": 1.7023,
20
+ "step": 20
21
+ },
22
+ {
23
+ "epoch": 0.02,
24
+ "learning_rate": 1.4500000000000001e-07,
25
+ "loss": 1.6779,
26
+ "step": 30
27
+ },
28
+ {
29
+ "epoch": 0.02,
30
+ "learning_rate": 1.95e-07,
31
+ "loss": 1.6776,
32
+ "step": 40
33
+ },
34
+ {
35
+ "epoch": 0.03,
36
+ "learning_rate": 2.4500000000000004e-07,
37
+ "loss": 1.6379,
38
+ "step": 50
39
+ },
40
+ {
41
+ "epoch": 0.04,
42
+ "learning_rate": 2.9500000000000003e-07,
43
+ "loss": 1.5523,
44
+ "step": 60
45
+ },
46
+ {
47
+ "epoch": 0.04,
48
+ "learning_rate": 3.4500000000000003e-07,
49
+ "loss": 1.3693,
50
+ "step": 70
51
+ },
52
+ {
53
+ "epoch": 0.05,
54
+ "learning_rate": 3.9500000000000003e-07,
55
+ "loss": 1.3299,
56
+ "step": 80
57
+ },
58
+ {
59
+ "epoch": 0.05,
60
+ "learning_rate": 4.4500000000000003e-07,
61
+ "loss": 1.2452,
62
+ "step": 90
63
+ },
64
+ {
65
+ "epoch": 0.06,
66
+ "learning_rate": 4.95e-07,
67
+ "loss": 1.25,
68
+ "step": 100
69
+ },
70
+ {
71
+ "epoch": 0.07,
72
+ "learning_rate": 5.450000000000001e-07,
73
+ "loss": 1.1376,
74
+ "step": 110
75
+ },
76
+ {
77
+ "epoch": 0.07,
78
+ "learning_rate": 5.95e-07,
79
+ "loss": 1.1166,
80
+ "step": 120
81
+ },
82
+ {
83
+ "epoch": 0.08,
84
+ "learning_rate": 6.450000000000001e-07,
85
+ "loss": 1.1069,
86
+ "step": 130
87
+ },
88
+ {
89
+ "epoch": 0.08,
90
+ "learning_rate": 6.950000000000001e-07,
91
+ "loss": 1.0921,
92
+ "step": 140
93
+ },
94
+ {
95
+ "epoch": 0.09,
96
+ "learning_rate": 7.450000000000001e-07,
97
+ "loss": 1.0674,
98
+ "step": 150
99
+ },
100
+ {
101
+ "epoch": 0.1,
102
+ "learning_rate": 7.950000000000001e-07,
103
+ "loss": 1.0283,
104
+ "step": 160
105
+ },
106
+ {
107
+ "epoch": 0.1,
108
+ "learning_rate": 8.450000000000002e-07,
109
+ "loss": 1.0252,
110
+ "step": 170
111
+ },
112
+ {
113
+ "epoch": 0.11,
114
+ "learning_rate": 8.95e-07,
115
+ "loss": 0.993,
116
+ "step": 180
117
+ },
118
+ {
119
+ "epoch": 0.11,
120
+ "learning_rate": 9.450000000000001e-07,
121
+ "loss": 0.9808,
122
+ "step": 190
123
+ },
124
+ {
125
+ "epoch": 0.12,
126
+ "learning_rate": 9.950000000000002e-07,
127
+ "loss": 0.9879,
128
+ "step": 200
129
+ },
130
+ {
131
+ "epoch": 0.13,
132
+ "learning_rate": 1.045e-06,
133
+ "loss": 0.9724,
134
+ "step": 210
135
+ },
136
+ {
137
+ "epoch": 0.13,
138
+ "learning_rate": 1.095e-06,
139
+ "loss": 0.9544,
140
+ "step": 220
141
+ },
142
+ {
143
+ "epoch": 0.14,
144
+ "learning_rate": 1.145e-06,
145
+ "loss": 0.9349,
146
+ "step": 230
147
+ },
148
+ {
149
+ "epoch": 0.14,
150
+ "learning_rate": 1.195e-06,
151
+ "loss": 0.9247,
152
+ "step": 240
153
+ },
154
+ {
155
+ "epoch": 0.15,
156
+ "learning_rate": 1.2450000000000002e-06,
157
+ "loss": 0.9251,
158
+ "step": 250
159
+ },
160
+ {
161
+ "epoch": 0.16,
162
+ "learning_rate": 1.295e-06,
163
+ "loss": 0.938,
164
+ "step": 260
165
+ },
166
+ {
167
+ "epoch": 0.16,
168
+ "learning_rate": 1.3450000000000003e-06,
169
+ "loss": 0.8967,
170
+ "step": 270
171
+ },
172
+ {
173
+ "epoch": 0.17,
174
+ "learning_rate": 1.3950000000000002e-06,
175
+ "loss": 0.9132,
176
+ "step": 280
177
+ },
178
+ {
179
+ "epoch": 0.18,
180
+ "learning_rate": 1.445e-06,
181
+ "loss": 0.9081,
182
+ "step": 290
183
+ },
184
+ {
185
+ "epoch": 0.18,
186
+ "learning_rate": 1.495e-06,
187
+ "loss": 0.8873,
188
+ "step": 300
189
+ },
190
+ {
191
+ "epoch": 0.19,
192
+ "learning_rate": 1.545e-06,
193
+ "loss": 0.8978,
194
+ "step": 310
195
+ },
196
+ {
197
+ "epoch": 0.19,
198
+ "learning_rate": 1.5950000000000002e-06,
199
+ "loss": 0.8628,
200
+ "step": 320
201
+ },
202
+ {
203
+ "epoch": 0.2,
204
+ "learning_rate": 1.6450000000000001e-06,
205
+ "loss": 0.8651,
206
+ "step": 330
207
+ },
208
+ {
209
+ "epoch": 0.21,
210
+ "learning_rate": 1.6950000000000003e-06,
211
+ "loss": 0.8499,
212
+ "step": 340
213
+ },
214
+ {
215
+ "epoch": 0.21,
216
+ "learning_rate": 1.745e-06,
217
+ "loss": 0.8514,
218
+ "step": 350
219
+ },
220
+ {
221
+ "epoch": 0.22,
222
+ "learning_rate": 1.7950000000000002e-06,
223
+ "loss": 0.8645,
224
+ "step": 360
225
+ },
226
+ {
227
+ "epoch": 0.22,
228
+ "learning_rate": 1.8450000000000001e-06,
229
+ "loss": 0.8632,
230
+ "step": 370
231
+ },
232
+ {
233
+ "epoch": 0.23,
234
+ "learning_rate": 1.895e-06,
235
+ "loss": 0.8382,
236
+ "step": 380
237
+ },
238
+ {
239
+ "epoch": 0.24,
240
+ "learning_rate": 1.945e-06,
241
+ "loss": 0.8531,
242
+ "step": 390
243
+ },
244
+ {
245
+ "epoch": 0.24,
246
+ "learning_rate": 1.9950000000000004e-06,
247
+ "loss": 0.8413,
248
+ "step": 400
249
+ },
250
+ {
251
+ "epoch": 0.25,
252
+ "learning_rate": 2.045e-06,
253
+ "loss": 0.8272,
254
+ "step": 410
255
+ },
256
+ {
257
+ "epoch": 0.25,
258
+ "learning_rate": 2.0950000000000003e-06,
259
+ "loss": 0.831,
260
+ "step": 420
261
+ },
262
+ {
263
+ "epoch": 0.26,
264
+ "learning_rate": 2.1450000000000002e-06,
265
+ "loss": 0.8077,
266
+ "step": 430
267
+ },
268
+ {
269
+ "epoch": 0.27,
270
+ "learning_rate": 2.195e-06,
271
+ "loss": 0.8209,
272
+ "step": 440
273
+ },
274
+ {
275
+ "epoch": 0.27,
276
+ "learning_rate": 2.245e-06,
277
+ "loss": 0.8249,
278
+ "step": 450
279
+ },
280
+ {
281
+ "epoch": 0.28,
282
+ "learning_rate": 2.2950000000000005e-06,
283
+ "loss": 0.8037,
284
+ "step": 460
285
+ },
286
+ {
287
+ "epoch": 0.28,
288
+ "learning_rate": 2.345e-06,
289
+ "loss": 0.7976,
290
+ "step": 470
291
+ },
292
+ {
293
+ "epoch": 0.29,
294
+ "learning_rate": 2.395e-06,
295
+ "loss": 0.8046,
296
+ "step": 480
297
+ },
298
+ {
299
+ "epoch": 0.3,
300
+ "learning_rate": 2.4450000000000003e-06,
301
+ "loss": 0.7959,
302
+ "step": 490
303
+ },
304
+ {
305
+ "epoch": 0.3,
306
+ "learning_rate": 2.4950000000000003e-06,
307
+ "loss": 0.8176,
308
+ "step": 500
309
+ },
310
+ {
311
+ "epoch": 0.3,
312
+ "eval_oasst_export_accuracy": 0.7042471551142071,
313
+ "eval_oasst_export_loss": 1.2229081392288208,
314
+ "eval_oasst_export_runtime": 68.987,
315
+ "eval_oasst_export_samples_per_second": 30.354,
316
+ "eval_oasst_export_steps_per_second": 1.276,
317
+ "step": 500
318
+ },
319
+ {
320
+ "epoch": 0.3,
321
+ "eval_code_alpaca_accuracy": 0.8437448762092147,
322
+ "eval_code_alpaca_loss": 0.5453789234161377,
323
+ "eval_code_alpaca_runtime": 4.9365,
324
+ "eval_code_alpaca_samples_per_second": 50.643,
325
+ "eval_code_alpaca_steps_per_second": 2.228,
326
+ "step": 500
327
+ },
328
+ {
329
+ "epoch": 0.3,
330
+ "eval_evol_v2_accuracy": 0.7902228855879827,
331
+ "eval_evol_v2_loss": 0.7061598300933838,
332
+ "eval_evol_v2_runtime": 275.1097,
333
+ "eval_evol_v2_samples_per_second": 25.99,
334
+ "eval_evol_v2_steps_per_second": 1.083,
335
+ "step": 500
336
+ },
337
+ {
338
+ "epoch": 0.3,
339
+ "eval_nlu_instruct_accuracy": 0.7843999034325105,
340
+ "eval_nlu_instruct_loss": 0.757986843585968,
341
+ "eval_nlu_instruct_runtime": 3477.7847,
342
+ "eval_nlu_instruct_samples_per_second": 22.435,
343
+ "eval_nlu_instruct_steps_per_second": 0.935,
344
+ "step": 500
345
+ },
346
+ {
347
+ "epoch": 0.3,
348
+ "eval_grade_school_math_instructions_accuracy": 0.7889496290511518,
349
+ "eval_grade_school_math_instructions_loss": 0.7259232997894287,
350
+ "eval_grade_school_math_instructions_runtime": 10.7804,
351
+ "eval_grade_school_math_instructions_samples_per_second": 40.815,
352
+ "eval_grade_school_math_instructions_steps_per_second": 1.762,
353
+ "step": 500
354
+ },
355
+ {
356
+ "epoch": 0.3,
357
+ "eval_poem_instructions_accuracy": 0.49874821391497626,
358
+ "eval_poem_instructions_loss": 2.590395450592041,
359
+ "eval_poem_instructions_runtime": 21.1843,
360
+ "eval_poem_instructions_samples_per_second": 16.38,
361
+ "eval_poem_instructions_steps_per_second": 0.708,
362
+ "step": 500
363
+ },
364
+ {
365
+ "epoch": 0.3,
366
+ "eval_gpt4all_accuracy": 0.7821919414875455,
367
+ "eval_gpt4all_loss": 0.7756699323654175,
368
+ "eval_gpt4all_runtime": 3565.0056,
369
+ "eval_gpt4all_samples_per_second": 21.813,
370
+ "eval_gpt4all_steps_per_second": 0.909,
371
+ "step": 500
372
+ },
373
+ {
374
+ "epoch": 0.3,
375
+ "eval_joke_accuracy": 0.5321712147485625,
376
+ "eval_joke_loss": 1.970703125,
377
+ "eval_joke_runtime": 5.8841,
378
+ "eval_joke_samples_per_second": 12.916,
379
+ "eval_joke_steps_per_second": 0.68,
380
+ "step": 500
381
+ },
382
+ {
383
+ "epoch": 0.3,
384
+ "eval_gsm8k_accuracy": 0.8398790847611236,
385
+ "eval_gsm8k_loss": 0.5526099801063538,
386
+ "eval_gsm8k_runtime": 22.7523,
387
+ "eval_gsm8k_samples_per_second": 57.972,
388
+ "eval_gsm8k_steps_per_second": 2.417,
389
+ "step": 500
390
+ },
391
+ {
392
+ "epoch": 0.31,
393
+ "learning_rate": 2.545e-06,
394
+ "loss": 0.8266,
395
+ "step": 510
396
+ },
397
+ {
398
+ "epoch": 0.31,
399
+ "learning_rate": 2.595e-06,
400
+ "loss": 0.8252,
401
+ "step": 520
402
+ },
403
+ {
404
+ "epoch": 0.32,
405
+ "learning_rate": 2.6450000000000005e-06,
406
+ "loss": 0.806,
407
+ "step": 530
408
+ },
409
+ {
410
+ "epoch": 0.33,
411
+ "learning_rate": 2.6950000000000005e-06,
412
+ "loss": 0.7994,
413
+ "step": 540
414
+ },
415
+ {
416
+ "epoch": 0.33,
417
+ "learning_rate": 2.7450000000000004e-06,
418
+ "loss": 0.8062,
419
+ "step": 550
420
+ },
421
+ {
422
+ "epoch": 0.34,
423
+ "learning_rate": 2.7950000000000003e-06,
424
+ "loss": 0.8086,
425
+ "step": 560
426
+ },
427
+ {
428
+ "epoch": 0.34,
429
+ "learning_rate": 2.845e-06,
430
+ "loss": 0.8055,
431
+ "step": 570
432
+ },
433
+ {
434
+ "epoch": 0.35,
435
+ "learning_rate": 2.8950000000000002e-06,
436
+ "loss": 0.7825,
437
+ "step": 580
438
+ },
439
+ {
440
+ "epoch": 0.36,
441
+ "learning_rate": 2.945e-06,
442
+ "loss": 0.796,
443
+ "step": 590
444
+ },
445
+ {
446
+ "epoch": 0.36,
447
+ "learning_rate": 2.995e-06,
448
+ "loss": 0.7974,
449
+ "step": 600
450
+ },
451
+ {
452
+ "epoch": 0.37,
453
+ "learning_rate": 3.045e-06,
454
+ "loss": 0.8071,
455
+ "step": 610
456
+ },
457
+ {
458
+ "epoch": 0.37,
459
+ "learning_rate": 3.0950000000000004e-06,
460
+ "loss": 0.7967,
461
+ "step": 620
462
+ },
463
+ {
464
+ "epoch": 0.38,
465
+ "learning_rate": 3.1450000000000004e-06,
466
+ "loss": 0.778,
467
+ "step": 630
468
+ },
469
+ {
470
+ "epoch": 0.39,
471
+ "learning_rate": 3.1950000000000003e-06,
472
+ "loss": 0.7869,
473
+ "step": 640
474
+ },
475
+ {
476
+ "epoch": 0.39,
477
+ "learning_rate": 3.2450000000000003e-06,
478
+ "loss": 0.7828,
479
+ "step": 650
480
+ },
481
+ {
482
+ "epoch": 0.4,
483
+ "learning_rate": 3.2950000000000002e-06,
484
+ "loss": 0.7927,
485
+ "step": 660
486
+ },
487
+ {
488
+ "epoch": 0.4,
489
+ "learning_rate": 3.3450000000000006e-06,
490
+ "loss": 0.7709,
491
+ "step": 670
492
+ },
493
+ {
494
+ "epoch": 0.41,
495
+ "learning_rate": 3.3950000000000005e-06,
496
+ "loss": 0.7624,
497
+ "step": 680
498
+ },
499
+ {
500
+ "epoch": 0.42,
501
+ "learning_rate": 3.445e-06,
502
+ "loss": 0.7739,
503
+ "step": 690
504
+ },
505
+ {
506
+ "epoch": 0.42,
507
+ "learning_rate": 3.495e-06,
508
+ "loss": 0.77,
509
+ "step": 700
510
+ },
511
+ {
512
+ "epoch": 0.43,
513
+ "learning_rate": 3.545e-06,
514
+ "loss": 0.7814,
515
+ "step": 710
516
+ },
517
+ {
518
+ "epoch": 0.43,
519
+ "learning_rate": 3.5950000000000003e-06,
520
+ "loss": 0.7822,
521
+ "step": 720
522
+ },
523
+ {
524
+ "epoch": 0.44,
525
+ "learning_rate": 3.6450000000000003e-06,
526
+ "loss": 0.7816,
527
+ "step": 730
528
+ },
529
+ {
530
+ "epoch": 0.45,
531
+ "learning_rate": 3.695e-06,
532
+ "loss": 0.7935,
533
+ "step": 740
534
+ },
535
+ {
536
+ "epoch": 0.45,
537
+ "learning_rate": 3.745e-06,
538
+ "loss": 0.7738,
539
+ "step": 750
540
+ },
541
+ {
542
+ "epoch": 0.46,
543
+ "learning_rate": 3.7950000000000005e-06,
544
+ "loss": 0.7585,
545
+ "step": 760
546
+ },
547
+ {
548
+ "epoch": 0.46,
549
+ "learning_rate": 3.8450000000000005e-06,
550
+ "loss": 0.7776,
551
+ "step": 770
552
+ },
553
+ {
554
+ "epoch": 0.47,
555
+ "learning_rate": 3.895000000000001e-06,
556
+ "loss": 0.7681,
557
+ "step": 780
558
+ },
559
+ {
560
+ "epoch": 0.48,
561
+ "learning_rate": 3.945e-06,
562
+ "loss": 0.7836,
563
+ "step": 790
564
+ },
565
+ {
566
+ "epoch": 0.48,
567
+ "learning_rate": 3.995000000000001e-06,
568
+ "loss": 0.7687,
569
+ "step": 800
570
+ },
571
+ {
572
+ "epoch": 0.49,
573
+ "learning_rate": 4.045e-06,
574
+ "loss": 0.7852,
575
+ "step": 810
576
+ },
577
+ {
578
+ "epoch": 0.49,
579
+ "learning_rate": 4.095e-06,
580
+ "loss": 0.753,
581
+ "step": 820
582
+ },
583
+ {
584
+ "epoch": 0.5,
585
+ "learning_rate": 4.145e-06,
586
+ "loss": 0.7676,
587
+ "step": 830
588
+ },
589
+ {
590
+ "epoch": 0.51,
591
+ "learning_rate": 4.1950000000000005e-06,
592
+ "loss": 0.7626,
593
+ "step": 840
594
+ },
595
+ {
596
+ "epoch": 0.51,
597
+ "learning_rate": 4.245e-06,
598
+ "loss": 0.7578,
599
+ "step": 850
600
+ },
601
+ {
602
+ "epoch": 0.52,
603
+ "learning_rate": 4.295e-06,
604
+ "loss": 0.7743,
605
+ "step": 860
606
+ },
607
+ {
608
+ "epoch": 0.53,
609
+ "learning_rate": 4.345000000000001e-06,
610
+ "loss": 0.7662,
611
+ "step": 870
612
+ },
613
+ {
614
+ "epoch": 0.53,
615
+ "learning_rate": 4.395e-06,
616
+ "loss": 0.7709,
617
+ "step": 880
618
+ },
619
+ {
620
+ "epoch": 0.54,
621
+ "learning_rate": 4.445000000000001e-06,
622
+ "loss": 0.7638,
623
+ "step": 890
624
+ },
625
+ {
626
+ "epoch": 0.54,
627
+ "learning_rate": 4.495e-06,
628
+ "loss": 0.752,
629
+ "step": 900
630
+ },
631
+ {
632
+ "epoch": 0.55,
633
+ "learning_rate": 4.5450000000000005e-06,
634
+ "loss": 0.7744,
635
+ "step": 910
636
+ },
637
+ {
638
+ "epoch": 0.56,
639
+ "learning_rate": 4.595000000000001e-06,
640
+ "loss": 0.7607,
641
+ "step": 920
642
+ },
643
+ {
644
+ "epoch": 0.56,
645
+ "learning_rate": 4.645e-06,
646
+ "loss": 0.765,
647
+ "step": 930
648
+ },
649
+ {
650
+ "epoch": 0.57,
651
+ "learning_rate": 4.695e-06,
652
+ "loss": 0.7534,
653
+ "step": 940
654
+ },
655
+ {
656
+ "epoch": 0.57,
657
+ "learning_rate": 4.745e-06,
658
+ "loss": 0.7569,
659
+ "step": 950
660
+ },
661
+ {
662
+ "epoch": 0.58,
663
+ "learning_rate": 4.795e-06,
664
+ "loss": 0.7522,
665
+ "step": 960
666
+ },
667
+ {
668
+ "epoch": 0.59,
669
+ "learning_rate": 4.845e-06,
670
+ "loss": 0.7876,
671
+ "step": 970
672
+ },
673
+ {
674
+ "epoch": 0.59,
675
+ "learning_rate": 4.8950000000000006e-06,
676
+ "loss": 0.7586,
677
+ "step": 980
678
+ },
679
+ {
680
+ "epoch": 0.6,
681
+ "learning_rate": 4.945e-06,
682
+ "loss": 0.7658,
683
+ "step": 990
684
+ },
685
+ {
686
+ "epoch": 0.6,
687
+ "learning_rate": 4.9950000000000005e-06,
688
+ "loss": 0.7562,
689
+ "step": 1000
690
+ },
691
+ {
692
+ "epoch": 0.6,
693
+ "eval_oasst_export_accuracy": 0.7149679693032294,
694
+ "eval_oasst_export_loss": 1.1956202983856201,
695
+ "eval_oasst_export_runtime": 67.0497,
696
+ "eval_oasst_export_samples_per_second": 31.231,
697
+ "eval_oasst_export_steps_per_second": 1.312,
698
+ "step": 1000
699
+ },
700
+ {
701
+ "epoch": 0.6,
702
+ "eval_code_alpaca_accuracy": 0.8463136033229491,
703
+ "eval_code_alpaca_loss": 0.5398945212364197,
704
+ "eval_code_alpaca_runtime": 6.22,
705
+ "eval_code_alpaca_samples_per_second": 40.193,
706
+ "eval_code_alpaca_steps_per_second": 1.769,
707
+ "step": 1000
708
+ },
709
+ {
710
+ "epoch": 0.6,
711
+ "eval_evol_v2_accuracy": 0.7982416515833893,
712
+ "eval_evol_v2_loss": 0.6806697845458984,
713
+ "eval_evol_v2_runtime": 274.9092,
714
+ "eval_evol_v2_samples_per_second": 26.009,
715
+ "eval_evol_v2_steps_per_second": 1.084,
716
+ "step": 1000
717
+ },
718
+ {
719
+ "epoch": 0.6,
720
+ "eval_nlu_instruct_accuracy": 0.8084633890177162,
721
+ "eval_nlu_instruct_loss": 0.6906304955482483,
722
+ "eval_nlu_instruct_runtime": 3476.832,
723
+ "eval_nlu_instruct_samples_per_second": 22.441,
724
+ "eval_nlu_instruct_steps_per_second": 0.935,
725
+ "step": 1000
726
+ },
727
+ {
728
+ "epoch": 0.6,
729
+ "eval_grade_school_math_instructions_accuracy": 0.8071065989847716,
730
+ "eval_grade_school_math_instructions_loss": 0.6586337089538574,
731
+ "eval_grade_school_math_instructions_runtime": 9.647,
732
+ "eval_grade_school_math_instructions_samples_per_second": 45.61,
733
+ "eval_grade_school_math_instructions_steps_per_second": 1.97,
734
+ "step": 1000
735
+ },
736
+ {
737
+ "epoch": 0.6,
738
+ "eval_poem_instructions_accuracy": 0.506542535644907,
739
+ "eval_poem_instructions_loss": 2.5567362308502197,
740
+ "eval_poem_instructions_runtime": 21.1754,
741
+ "eval_poem_instructions_samples_per_second": 16.387,
742
+ "eval_poem_instructions_steps_per_second": 0.708,
743
+ "step": 1000
744
+ },
745
+ {
746
+ "epoch": 0.6,
747
+ "eval_gpt4all_accuracy": 0.7917010462623065,
748
+ "eval_gpt4all_loss": 0.7404432892799377,
749
+ "eval_gpt4all_runtime": 3565.553,
750
+ "eval_gpt4all_samples_per_second": 21.809,
751
+ "eval_gpt4all_steps_per_second": 0.909,
752
+ "step": 1000
753
+ },
754
+ {
755
+ "epoch": 0.6,
756
+ "eval_joke_accuracy": 0.5601898329834809,
757
+ "eval_joke_loss": 1.8026316165924072,
758
+ "eval_joke_runtime": 4.4938,
759
+ "eval_joke_samples_per_second": 16.912,
760
+ "eval_joke_steps_per_second": 0.89,
761
+ "step": 1000
762
+ },
763
+ {
764
+ "epoch": 0.6,
765
+ "eval_gsm8k_accuracy": 0.8582343392395189,
766
+ "eval_gsm8k_loss": 0.4936060607433319,
767
+ "eval_gsm8k_runtime": 22.7991,
768
+ "eval_gsm8k_samples_per_second": 57.853,
769
+ "eval_gsm8k_steps_per_second": 2.412,
770
+ "step": 1000
771
+ },
772
+ {
773
+ "epoch": 0.61,
774
+ "learning_rate": 5.045e-06,
775
+ "loss": 0.7579,
776
+ "step": 1010
777
+ },
778
+ {
779
+ "epoch": 0.62,
780
+ "learning_rate": 5.095e-06,
781
+ "loss": 0.7684,
782
+ "step": 1020
783
+ },
784
+ {
785
+ "epoch": 0.62,
786
+ "learning_rate": 5.145e-06,
787
+ "loss": 0.7464,
788
+ "step": 1030
789
+ },
790
+ {
791
+ "epoch": 0.63,
792
+ "learning_rate": 5.195e-06,
793
+ "loss": 0.7691,
794
+ "step": 1040
795
+ },
796
+ {
797
+ "epoch": 0.63,
798
+ "learning_rate": 5.245e-06,
799
+ "loss": 0.7385,
800
+ "step": 1050
801
+ },
802
+ {
803
+ "epoch": 0.64,
804
+ "learning_rate": 5.295e-06,
805
+ "loss": 0.7704,
806
+ "step": 1060
807
+ },
808
+ {
809
+ "epoch": 0.65,
810
+ "learning_rate": 5.3450000000000005e-06,
811
+ "loss": 0.7735,
812
+ "step": 1070
813
+ },
814
+ {
815
+ "epoch": 0.65,
816
+ "learning_rate": 5.395e-06,
817
+ "loss": 0.7448,
818
+ "step": 1080
819
+ },
820
+ {
821
+ "epoch": 0.66,
822
+ "learning_rate": 5.445e-06,
823
+ "loss": 0.7358,
824
+ "step": 1090
825
+ },
826
+ {
827
+ "epoch": 0.66,
828
+ "learning_rate": 5.495000000000001e-06,
829
+ "loss": 0.762,
830
+ "step": 1100
831
+ },
832
+ {
833
+ "epoch": 0.67,
834
+ "learning_rate": 5.545e-06,
835
+ "loss": 0.7636,
836
+ "step": 1110
837
+ },
838
+ {
839
+ "epoch": 0.68,
840
+ "learning_rate": 5.595000000000001e-06,
841
+ "loss": 0.7662,
842
+ "step": 1120
843
+ },
844
+ {
845
+ "epoch": 0.68,
846
+ "learning_rate": 5.645e-06,
847
+ "loss": 0.7565,
848
+ "step": 1130
849
+ },
850
+ {
851
+ "epoch": 0.69,
852
+ "learning_rate": 5.6950000000000005e-06,
853
+ "loss": 0.7564,
854
+ "step": 1140
855
+ },
856
+ {
857
+ "epoch": 0.69,
858
+ "learning_rate": 5.745000000000001e-06,
859
+ "loss": 0.7576,
860
+ "step": 1150
861
+ },
862
+ {
863
+ "epoch": 0.7,
864
+ "learning_rate": 5.795e-06,
865
+ "loss": 0.7588,
866
+ "step": 1160
867
+ },
868
+ {
869
+ "epoch": 0.71,
870
+ "learning_rate": 5.845000000000001e-06,
871
+ "loss": 0.7503,
872
+ "step": 1170
873
+ },
874
+ {
875
+ "epoch": 0.71,
876
+ "learning_rate": 5.895e-06,
877
+ "loss": 0.7579,
878
+ "step": 1180
879
+ },
880
+ {
881
+ "epoch": 0.72,
882
+ "learning_rate": 5.945000000000001e-06,
883
+ "loss": 0.772,
884
+ "step": 1190
885
+ },
886
+ {
887
+ "epoch": 0.72,
888
+ "learning_rate": 5.995000000000001e-06,
889
+ "loss": 0.7507,
890
+ "step": 1200
891
+ },
892
+ {
893
+ "epoch": 0.73,
894
+ "learning_rate": 6.0450000000000006e-06,
895
+ "loss": 0.7448,
896
+ "step": 1210
897
+ },
898
+ {
899
+ "epoch": 0.74,
900
+ "learning_rate": 6.095000000000001e-06,
901
+ "loss": 0.7479,
902
+ "step": 1220
903
+ },
904
+ {
905
+ "epoch": 0.74,
906
+ "learning_rate": 6.145000000000001e-06,
907
+ "loss": 0.7502,
908
+ "step": 1230
909
+ },
910
+ {
911
+ "epoch": 0.75,
912
+ "learning_rate": 6.195000000000001e-06,
913
+ "loss": 0.7449,
914
+ "step": 1240
915
+ },
916
+ {
917
+ "epoch": 0.75,
918
+ "learning_rate": 6.245000000000001e-06,
919
+ "loss": 0.7491,
920
+ "step": 1250
921
+ },
922
+ {
923
+ "epoch": 0.76,
924
+ "learning_rate": 6.295e-06,
925
+ "loss": 0.7561,
926
+ "step": 1260
927
+ },
928
+ {
929
+ "epoch": 0.77,
930
+ "learning_rate": 6.345e-06,
931
+ "loss": 0.7581,
932
+ "step": 1270
933
+ },
934
+ {
935
+ "epoch": 0.77,
936
+ "learning_rate": 6.395e-06,
937
+ "loss": 0.7544,
938
+ "step": 1280
939
+ },
940
+ {
941
+ "epoch": 0.78,
942
+ "learning_rate": 6.445e-06,
943
+ "loss": 0.7429,
944
+ "step": 1290
945
+ },
946
+ {
947
+ "epoch": 0.78,
948
+ "learning_rate": 6.4950000000000005e-06,
949
+ "loss": 0.7678,
950
+ "step": 1300
951
+ },
952
+ {
953
+ "epoch": 0.79,
954
+ "learning_rate": 6.545e-06,
955
+ "loss": 0.7655,
956
+ "step": 1310
957
+ },
958
+ {
959
+ "epoch": 0.8,
960
+ "learning_rate": 6.595e-06,
961
+ "loss": 0.7695,
962
+ "step": 1320
963
+ },
964
+ {
965
+ "epoch": 0.8,
966
+ "learning_rate": 6.645000000000001e-06,
967
+ "loss": 0.7645,
968
+ "step": 1330
969
+ },
970
+ {
971
+ "epoch": 0.81,
972
+ "learning_rate": 6.695e-06,
973
+ "loss": 0.7628,
974
+ "step": 1340
975
+ },
976
+ {
977
+ "epoch": 0.81,
978
+ "learning_rate": 6.745000000000001e-06,
979
+ "loss": 0.7597,
980
+ "step": 1350
981
+ },
982
+ {
983
+ "epoch": 0.82,
984
+ "learning_rate": 6.795e-06,
985
+ "loss": 0.7687,
986
+ "step": 1360
987
+ },
988
+ {
989
+ "epoch": 0.83,
990
+ "learning_rate": 6.8450000000000005e-06,
991
+ "loss": 0.7488,
992
+ "step": 1370
993
+ },
994
+ {
995
+ "epoch": 0.83,
996
+ "learning_rate": 6.895000000000001e-06,
997
+ "loss": 0.7454,
998
+ "step": 1380
999
+ },
1000
+ {
1001
+ "epoch": 0.84,
1002
+ "learning_rate": 6.945e-06,
1003
+ "loss": 0.7459,
1004
+ "step": 1390
1005
+ },
1006
+ {
1007
+ "epoch": 0.85,
1008
+ "learning_rate": 6.995000000000001e-06,
1009
+ "loss": 0.7595,
1010
+ "step": 1400
1011
+ },
1012
+ {
1013
+ "epoch": 0.85,
1014
+ "learning_rate": 7.045e-06,
1015
+ "loss": 0.762,
1016
+ "step": 1410
1017
+ },
1018
+ {
1019
+ "epoch": 0.86,
1020
+ "learning_rate": 7.095000000000001e-06,
1021
+ "loss": 0.7618,
1022
+ "step": 1420
1023
+ },
1024
+ {
1025
+ "epoch": 0.86,
1026
+ "learning_rate": 7.145000000000001e-06,
1027
+ "loss": 0.737,
1028
+ "step": 1430
1029
+ },
1030
+ {
1031
+ "epoch": 0.87,
1032
+ "learning_rate": 7.1950000000000006e-06,
1033
+ "loss": 0.7492,
1034
+ "step": 1440
1035
+ },
1036
+ {
1037
+ "epoch": 0.88,
1038
+ "learning_rate": 7.245000000000001e-06,
1039
+ "loss": 0.7617,
1040
+ "step": 1450
1041
+ },
1042
+ {
1043
+ "epoch": 0.88,
1044
+ "learning_rate": 7.295000000000001e-06,
1045
+ "loss": 0.7564,
1046
+ "step": 1460
1047
+ },
1048
+ {
1049
+ "epoch": 0.89,
1050
+ "learning_rate": 7.345000000000001e-06,
1051
+ "loss": 0.7612,
1052
+ "step": 1470
1053
+ },
1054
+ {
1055
+ "epoch": 0.89,
1056
+ "learning_rate": 7.395000000000001e-06,
1057
+ "loss": 0.7506,
1058
+ "step": 1480
1059
+ },
1060
+ {
1061
+ "epoch": 0.9,
1062
+ "learning_rate": 7.445000000000001e-06,
1063
+ "loss": 0.7555,
1064
+ "step": 1490
1065
+ },
1066
+ {
1067
+ "epoch": 0.91,
1068
+ "learning_rate": 7.495000000000001e-06,
1069
+ "loss": 0.7528,
1070
+ "step": 1500
1071
+ },
1072
+ {
1073
+ "epoch": 0.91,
1074
+ "eval_oasst_export_accuracy": 0.7145207936670809,
1075
+ "eval_oasst_export_loss": 1.20199453830719,
1076
+ "eval_oasst_export_runtime": 68.1714,
1077
+ "eval_oasst_export_samples_per_second": 30.717,
1078
+ "eval_oasst_export_steps_per_second": 1.291,
1079
+ "step": 1500
1080
+ },
1081
+ {
1082
+ "epoch": 0.91,
1083
+ "eval_code_alpaca_accuracy": 0.8416680330108761,
1084
+ "eval_code_alpaca_loss": 0.5439140796661377,
1085
+ "eval_code_alpaca_runtime": 5.1754,
1086
+ "eval_code_alpaca_samples_per_second": 48.306,
1087
+ "eval_code_alpaca_steps_per_second": 2.125,
1088
+ "step": 1500
1089
+ },
1090
+ {
1091
+ "epoch": 0.91,
1092
+ "eval_evol_v2_accuracy": 0.7973383052991362,
1093
+ "eval_evol_v2_loss": 0.6886976957321167,
1094
+ "eval_evol_v2_runtime": 275.0257,
1095
+ "eval_evol_v2_samples_per_second": 25.998,
1096
+ "eval_evol_v2_steps_per_second": 1.084,
1097
+ "step": 1500
1098
+ },
1099
+ {
1100
+ "epoch": 0.91,
1101
+ "eval_nlu_instruct_accuracy": 0.8144291416562309,
1102
+ "eval_nlu_instruct_loss": 0.6783603429794312,
1103
+ "eval_nlu_instruct_runtime": 3480.2092,
1104
+ "eval_nlu_instruct_samples_per_second": 22.419,
1105
+ "eval_nlu_instruct_steps_per_second": 0.934,
1106
+ "step": 1500
1107
+ },
1108
+ {
1109
+ "epoch": 0.91,
1110
+ "eval_grade_school_math_instructions_accuracy": 0.8132565404139008,
1111
+ "eval_grade_school_math_instructions_loss": 0.6372869610786438,
1112
+ "eval_grade_school_math_instructions_runtime": 8.0235,
1113
+ "eval_grade_school_math_instructions_samples_per_second": 54.839,
1114
+ "eval_grade_school_math_instructions_steps_per_second": 2.368,
1115
+ "step": 1500
1116
+ },
1117
+ {
1118
+ "epoch": 0.91,
1119
+ "eval_poem_instructions_accuracy": 0.5055101467336193,
1120
+ "eval_poem_instructions_loss": 2.5727665424346924,
1121
+ "eval_poem_instructions_runtime": 21.4941,
1122
+ "eval_poem_instructions_samples_per_second": 16.144,
1123
+ "eval_poem_instructions_steps_per_second": 0.698,
1124
+ "step": 1500
1125
+ },
1126
+ {
1127
+ "epoch": 0.91,
1128
+ "eval_gpt4all_accuracy": 0.791444802483569,
1129
+ "eval_gpt4all_loss": 0.7454394698143005,
1130
+ "eval_gpt4all_runtime": 3568.858,
1131
+ "eval_gpt4all_samples_per_second": 21.789,
1132
+ "eval_gpt4all_steps_per_second": 0.908,
1133
+ "step": 1500
1134
+ },
1135
+ {
1136
+ "epoch": 0.91,
1137
+ "eval_joke_accuracy": 0.5673085698640139,
1138
+ "eval_joke_loss": 1.7545230388641357,
1139
+ "eval_joke_runtime": 2.3532,
1140
+ "eval_joke_samples_per_second": 32.296,
1141
+ "eval_joke_steps_per_second": 1.7,
1142
+ "step": 1500
1143
+ },
1144
+ {
1145
+ "epoch": 0.91,
1146
+ "eval_gsm8k_accuracy": 0.8628002087910197,
1147
+ "eval_gsm8k_loss": 0.47572216391563416,
1148
+ "eval_gsm8k_runtime": 22.6914,
1149
+ "eval_gsm8k_samples_per_second": 58.128,
1150
+ "eval_gsm8k_steps_per_second": 2.424,
1151
+ "step": 1500
1152
+ },
1153
+ {
1154
+ "epoch": 0.91,
1155
+ "learning_rate": 7.545e-06,
1156
+ "loss": 0.7567,
1157
+ "step": 1510
1158
+ },
1159
+ {
1160
+ "epoch": 0.92,
1161
+ "learning_rate": 7.595e-06,
1162
+ "loss": 0.7925,
1163
+ "step": 1520
1164
+ },
1165
+ {
1166
+ "epoch": 0.92,
1167
+ "learning_rate": 7.645e-06,
1168
+ "loss": 0.7563,
1169
+ "step": 1530
1170
+ },
1171
+ {
1172
+ "epoch": 0.93,
1173
+ "learning_rate": 7.695e-06,
1174
+ "loss": 0.7566,
1175
+ "step": 1540
1176
+ },
1177
+ {
1178
+ "epoch": 0.94,
1179
+ "learning_rate": 7.745e-06,
1180
+ "loss": 0.7523,
1181
+ "step": 1550
1182
+ },
1183
+ {
1184
+ "epoch": 0.94,
1185
+ "learning_rate": 7.795e-06,
1186
+ "loss": 0.749,
1187
+ "step": 1560
1188
+ },
1189
+ {
1190
+ "epoch": 0.95,
1191
+ "learning_rate": 7.845e-06,
1192
+ "loss": 0.7668,
1193
+ "step": 1570
1194
+ },
1195
+ {
1196
+ "epoch": 0.95,
1197
+ "learning_rate": 7.895e-06,
1198
+ "loss": 0.7706,
1199
+ "step": 1580
1200
+ },
1201
+ {
1202
+ "epoch": 0.96,
1203
+ "learning_rate": 7.945000000000001e-06,
1204
+ "loss": 0.7329,
1205
+ "step": 1590
1206
+ },
1207
+ {
1208
+ "epoch": 0.97,
1209
+ "learning_rate": 7.995e-06,
1210
+ "loss": 0.7679,
1211
+ "step": 1600
1212
+ },
1213
+ {
1214
+ "epoch": 0.97,
1215
+ "learning_rate": 8.045e-06,
1216
+ "loss": 0.752,
1217
+ "step": 1610
1218
+ },
1219
+ {
1220
+ "epoch": 0.98,
1221
+ "learning_rate": 8.095000000000001e-06,
1222
+ "loss": 0.7768,
1223
+ "step": 1620
1224
+ },
1225
+ {
1226
+ "epoch": 0.98,
1227
+ "learning_rate": 8.145e-06,
1228
+ "loss": 0.7673,
1229
+ "step": 1630
1230
+ },
1231
+ {
1232
+ "epoch": 0.99,
1233
+ "learning_rate": 8.195e-06,
1234
+ "loss": 0.7474,
1235
+ "step": 1640
1236
+ },
1237
+ {
1238
+ "epoch": 1.0,
1239
+ "learning_rate": 8.245000000000002e-06,
1240
+ "loss": 0.7743,
1241
+ "step": 1650
1242
+ },
1243
+ {
1244
+ "epoch": 1.0,
1245
+ "learning_rate": 8.295000000000001e-06,
1246
+ "loss": 0.7491,
1247
+ "step": 1660
1248
+ },
1249
+ {
1250
+ "epoch": 1.01,
1251
+ "learning_rate": 8.345e-06,
1252
+ "loss": 0.7182,
1253
+ "step": 1670
1254
+ },
1255
+ {
1256
+ "epoch": 1.01,
1257
+ "learning_rate": 8.395e-06,
1258
+ "loss": 0.7146,
1259
+ "step": 1680
1260
+ },
1261
+ {
1262
+ "epoch": 1.02,
1263
+ "learning_rate": 8.445000000000001e-06,
1264
+ "loss": 0.702,
1265
+ "step": 1690
1266
+ },
1267
+ {
1268
+ "epoch": 1.03,
1269
+ "learning_rate": 8.495e-06,
1270
+ "loss": 0.716,
1271
+ "step": 1700
1272
+ },
1273
+ {
1274
+ "epoch": 1.03,
1275
+ "learning_rate": 8.545e-06,
1276
+ "loss": 0.6966,
1277
+ "step": 1710
1278
+ },
1279
+ {
1280
+ "epoch": 1.04,
1281
+ "learning_rate": 8.595000000000002e-06,
1282
+ "loss": 0.7042,
1283
+ "step": 1720
1284
+ },
1285
+ {
1286
+ "epoch": 1.04,
1287
+ "learning_rate": 8.645000000000001e-06,
1288
+ "loss": 0.7243,
1289
+ "step": 1730
1290
+ },
1291
+ {
1292
+ "epoch": 1.05,
1293
+ "learning_rate": 8.695e-06,
1294
+ "loss": 0.7169,
1295
+ "step": 1740
1296
+ },
1297
+ {
1298
+ "epoch": 1.06,
1299
+ "learning_rate": 8.745000000000002e-06,
1300
+ "loss": 0.7089,
1301
+ "step": 1750
1302
+ },
1303
+ {
1304
+ "epoch": 1.06,
1305
+ "learning_rate": 8.795e-06,
1306
+ "loss": 0.706,
1307
+ "step": 1760
1308
+ },
1309
+ {
1310
+ "epoch": 1.07,
1311
+ "learning_rate": 8.845000000000001e-06,
1312
+ "loss": 0.7201,
1313
+ "step": 1770
1314
+ },
1315
+ {
1316
+ "epoch": 1.07,
1317
+ "learning_rate": 8.895e-06,
1318
+ "loss": 0.7038,
1319
+ "step": 1780
1320
+ },
1321
+ {
1322
+ "epoch": 1.08,
1323
+ "learning_rate": 8.945e-06,
1324
+ "loss": 0.7261,
1325
+ "step": 1790
1326
+ },
1327
+ {
1328
+ "epoch": 1.09,
1329
+ "learning_rate": 8.995000000000001e-06,
1330
+ "loss": 0.7032,
1331
+ "step": 1800
1332
+ },
1333
+ {
1334
+ "epoch": 1.09,
1335
+ "learning_rate": 9.045e-06,
1336
+ "loss": 0.725,
1337
+ "step": 1810
1338
+ },
1339
+ {
1340
+ "epoch": 1.1,
1341
+ "learning_rate": 9.095e-06,
1342
+ "loss": 0.7157,
1343
+ "step": 1820
1344
+ },
1345
+ {
1346
+ "epoch": 1.1,
1347
+ "learning_rate": 9.145000000000001e-06,
1348
+ "loss": 0.7259,
1349
+ "step": 1830
1350
+ },
1351
+ {
1352
+ "epoch": 1.11,
1353
+ "learning_rate": 9.195000000000001e-06,
1354
+ "loss": 0.7287,
1355
+ "step": 1840
1356
+ },
1357
+ {
1358
+ "epoch": 1.12,
1359
+ "learning_rate": 9.245e-06,
1360
+ "loss": 0.7039,
1361
+ "step": 1850
1362
+ },
1363
+ {
1364
+ "epoch": 1.12,
1365
+ "learning_rate": 9.295e-06,
1366
+ "loss": 0.7186,
1367
+ "step": 1860
1368
+ },
1369
+ {
1370
+ "epoch": 1.13,
1371
+ "learning_rate": 9.345000000000001e-06,
1372
+ "loss": 0.7132,
1373
+ "step": 1870
1374
+ },
1375
+ {
1376
+ "epoch": 1.13,
1377
+ "learning_rate": 9.395e-06,
1378
+ "loss": 0.7276,
1379
+ "step": 1880
1380
+ },
1381
+ {
1382
+ "epoch": 1.14,
1383
+ "learning_rate": 9.445e-06,
1384
+ "loss": 0.7048,
1385
+ "step": 1890
1386
+ },
1387
+ {
1388
+ "epoch": 1.15,
1389
+ "learning_rate": 9.495000000000001e-06,
1390
+ "loss": 0.7223,
1391
+ "step": 1900
1392
+ },
1393
+ {
1394
+ "epoch": 1.15,
1395
+ "learning_rate": 9.545000000000001e-06,
1396
+ "loss": 0.721,
1397
+ "step": 1910
1398
+ },
1399
+ {
1400
+ "epoch": 1.16,
1401
+ "learning_rate": 9.595e-06,
1402
+ "loss": 0.717,
1403
+ "step": 1920
1404
+ },
1405
+ {
1406
+ "epoch": 1.16,
1407
+ "learning_rate": 9.645000000000002e-06,
1408
+ "loss": 0.7276,
1409
+ "step": 1930
1410
+ },
1411
+ {
1412
+ "epoch": 1.17,
1413
+ "learning_rate": 9.695000000000001e-06,
1414
+ "loss": 0.7248,
1415
+ "step": 1940
1416
+ },
1417
+ {
1418
+ "epoch": 1.18,
1419
+ "learning_rate": 9.745e-06,
1420
+ "loss": 0.7404,
1421
+ "step": 1950
1422
+ },
1423
+ {
1424
+ "epoch": 1.18,
1425
+ "learning_rate": 9.795000000000002e-06,
1426
+ "loss": 0.7055,
1427
+ "step": 1960
1428
+ },
1429
+ {
1430
+ "epoch": 1.19,
1431
+ "learning_rate": 9.845000000000001e-06,
1432
+ "loss": 0.7278,
1433
+ "step": 1970
1434
+ },
1435
+ {
1436
+ "epoch": 1.2,
1437
+ "learning_rate": 9.895000000000001e-06,
1438
+ "loss": 0.7386,
1439
+ "step": 1980
1440
+ },
1441
+ {
1442
+ "epoch": 1.2,
1443
+ "learning_rate": 9.945e-06,
1444
+ "loss": 0.7316,
1445
+ "step": 1990
1446
+ },
1447
+ {
1448
+ "epoch": 1.21,
1449
+ "learning_rate": 9.995000000000002e-06,
1450
+ "loss": 0.7317,
1451
+ "step": 2000
1452
+ },
1453
+ {
1454
+ "epoch": 1.21,
1455
+ "eval_oasst_export_accuracy": 0.7127776954366549,
1456
+ "eval_oasst_export_loss": 1.2178658246994019,
1457
+ "eval_oasst_export_runtime": 68.1598,
1458
+ "eval_oasst_export_samples_per_second": 30.722,
1459
+ "eval_oasst_export_steps_per_second": 1.291,
1460
+ "step": 2000
1461
+ },
1462
+ {
1463
+ "epoch": 1.21,
1464
+ "eval_code_alpaca_accuracy": 0.8417226867792534,
1465
+ "eval_code_alpaca_loss": 0.5532773733139038,
1466
+ "eval_code_alpaca_runtime": 5.2964,
1467
+ "eval_code_alpaca_samples_per_second": 47.202,
1468
+ "eval_code_alpaca_steps_per_second": 2.077,
1469
+ "step": 2000
1470
+ },
1471
+ {
1472
+ "epoch": 1.21,
1473
+ "eval_evol_v2_accuracy": 0.7965186099357738,
1474
+ "eval_evol_v2_loss": 0.693313717842102,
1475
+ "eval_evol_v2_runtime": 274.7294,
1476
+ "eval_evol_v2_samples_per_second": 26.026,
1477
+ "eval_evol_v2_steps_per_second": 1.085,
1478
+ "step": 2000
1479
+ },
1480
+ {
1481
+ "epoch": 1.21,
1482
+ "eval_nlu_instruct_accuracy": 0.8156692420335936,
1483
+ "eval_nlu_instruct_loss": 0.6785455942153931,
1484
+ "eval_nlu_instruct_runtime": 3477.3559,
1485
+ "eval_nlu_instruct_samples_per_second": 22.437,
1486
+ "eval_nlu_instruct_steps_per_second": 0.935,
1487
+ "step": 2000
1488
+ },
1489
+ {
1490
+ "epoch": 1.21,
1491
+ "eval_grade_school_math_instructions_accuracy": 0.8186011323701678,
1492
+ "eval_grade_school_math_instructions_loss": 0.6188520789146423,
1493
+ "eval_grade_school_math_instructions_runtime": 10.016,
1494
+ "eval_grade_school_math_instructions_samples_per_second": 43.93,
1495
+ "eval_grade_school_math_instructions_steps_per_second": 1.897,
1496
+ "step": 2000
1497
+ },
1498
+ {
1499
+ "epoch": 1.21,
1500
+ "eval_poem_instructions_accuracy": 0.5050254631608515,
1501
+ "eval_poem_instructions_loss": 2.5899674892425537,
1502
+ "eval_poem_instructions_runtime": 21.1807,
1503
+ "eval_poem_instructions_samples_per_second": 16.383,
1504
+ "eval_poem_instructions_steps_per_second": 0.708,
1505
+ "step": 2000
1506
+ },
1507
+ {
1508
+ "epoch": 1.21,
1509
+ "eval_gpt4all_accuracy": 0.7903227041985094,
1510
+ "eval_gpt4all_loss": 0.7531591057777405,
1511
+ "eval_gpt4all_runtime": 3566.1745,
1512
+ "eval_gpt4all_samples_per_second": 21.805,
1513
+ "eval_gpt4all_steps_per_second": 0.909,
1514
+ "step": 2000
1515
+ },
1516
+ {
1517
+ "epoch": 1.21,
1518
+ "eval_joke_accuracy": 0.572693255453135,
1519
+ "eval_joke_loss": 1.7435238361358643,
1520
+ "eval_joke_runtime": 4.873,
1521
+ "eval_joke_samples_per_second": 15.596,
1522
+ "eval_joke_steps_per_second": 0.821,
1523
+ "step": 2000
1524
+ },
1525
+ {
1526
+ "epoch": 1.21,
1527
+ "eval_gsm8k_accuracy": 0.8679970402152154,
1528
+ "eval_gsm8k_loss": 0.4584772288799286,
1529
+ "eval_gsm8k_runtime": 22.6037,
1530
+ "eval_gsm8k_samples_per_second": 58.353,
1531
+ "eval_gsm8k_steps_per_second": 2.433,
1532
+ "step": 2000
1533
+ },
1534
+ {
1535
+ "epoch": 1.21,
1536
+ "learning_rate": 9.985668789808918e-06,
1537
+ "loss": 0.7147,
1538
+ "step": 2010
1539
+ },
1540
+ {
1541
+ "epoch": 1.22,
1542
+ "learning_rate": 9.969745222929938e-06,
1543
+ "loss": 0.7078,
1544
+ "step": 2020
1545
+ },
1546
+ {
1547
+ "epoch": 1.23,
1548
+ "learning_rate": 9.953821656050957e-06,
1549
+ "loss": 0.7232,
1550
+ "step": 2030
1551
+ },
1552
+ {
1553
+ "epoch": 1.23,
1554
+ "learning_rate": 9.937898089171976e-06,
1555
+ "loss": 0.7238,
1556
+ "step": 2040
1557
+ },
1558
+ {
1559
+ "epoch": 1.24,
1560
+ "learning_rate": 9.921974522292994e-06,
1561
+ "loss": 0.7266,
1562
+ "step": 2050
1563
+ },
1564
+ {
1565
+ "epoch": 1.24,
1566
+ "learning_rate": 9.906050955414014e-06,
1567
+ "loss": 0.7208,
1568
+ "step": 2060
1569
+ },
1570
+ {
1571
+ "epoch": 1.25,
1572
+ "learning_rate": 9.890127388535032e-06,
1573
+ "loss": 0.7311,
1574
+ "step": 2070
1575
+ },
1576
+ {
1577
+ "epoch": 1.26,
1578
+ "learning_rate": 9.874203821656053e-06,
1579
+ "loss": 0.7219,
1580
+ "step": 2080
1581
+ },
1582
+ {
1583
+ "epoch": 1.26,
1584
+ "learning_rate": 9.85828025477707e-06,
1585
+ "loss": 0.7223,
1586
+ "step": 2090
1587
+ },
1588
+ {
1589
+ "epoch": 1.27,
1590
+ "learning_rate": 9.84235668789809e-06,
1591
+ "loss": 0.7251,
1592
+ "step": 2100
1593
+ },
1594
+ {
1595
+ "epoch": 1.27,
1596
+ "learning_rate": 9.826433121019109e-06,
1597
+ "loss": 0.7338,
1598
+ "step": 2110
1599
+ },
1600
+ {
1601
+ "epoch": 1.28,
1602
+ "learning_rate": 9.810509554140128e-06,
1603
+ "loss": 0.7347,
1604
+ "step": 2120
1605
+ },
1606
+ {
1607
+ "epoch": 1.29,
1608
+ "learning_rate": 9.794585987261147e-06,
1609
+ "loss": 0.7079,
1610
+ "step": 2130
1611
+ },
1612
+ {
1613
+ "epoch": 1.29,
1614
+ "learning_rate": 9.778662420382167e-06,
1615
+ "loss": 0.7394,
1616
+ "step": 2140
1617
+ },
1618
+ {
1619
+ "epoch": 1.3,
1620
+ "learning_rate": 9.762738853503186e-06,
1621
+ "loss": 0.7344,
1622
+ "step": 2150
1623
+ },
1624
+ {
1625
+ "epoch": 1.3,
1626
+ "learning_rate": 9.746815286624205e-06,
1627
+ "loss": 0.7289,
1628
+ "step": 2160
1629
+ },
1630
+ {
1631
+ "epoch": 1.31,
1632
+ "learning_rate": 9.730891719745224e-06,
1633
+ "loss": 0.7137,
1634
+ "step": 2170
1635
+ },
1636
+ {
1637
+ "epoch": 1.32,
1638
+ "learning_rate": 9.714968152866243e-06,
1639
+ "loss": 0.7208,
1640
+ "step": 2180
1641
+ },
1642
+ {
1643
+ "epoch": 1.32,
1644
+ "learning_rate": 9.699044585987261e-06,
1645
+ "loss": 0.7449,
1646
+ "step": 2190
1647
+ },
1648
+ {
1649
+ "epoch": 1.33,
1650
+ "learning_rate": 9.683121019108282e-06,
1651
+ "loss": 0.7258,
1652
+ "step": 2200
1653
+ },
1654
+ {
1655
+ "epoch": 1.33,
1656
+ "learning_rate": 9.6671974522293e-06,
1657
+ "loss": 0.7285,
1658
+ "step": 2210
1659
+ },
1660
+ {
1661
+ "epoch": 1.34,
1662
+ "learning_rate": 9.65127388535032e-06,
1663
+ "loss": 0.7364,
1664
+ "step": 2220
1665
+ },
1666
+ {
1667
+ "epoch": 1.35,
1668
+ "learning_rate": 9.635350318471338e-06,
1669
+ "loss": 0.726,
1670
+ "step": 2230
1671
+ },
1672
+ {
1673
+ "epoch": 1.35,
1674
+ "learning_rate": 9.619426751592357e-06,
1675
+ "loss": 0.7426,
1676
+ "step": 2240
1677
+ },
1678
+ {
1679
+ "epoch": 1.36,
1680
+ "learning_rate": 9.603503184713376e-06,
1681
+ "loss": 0.7385,
1682
+ "step": 2250
1683
+ },
1684
+ {
1685
+ "epoch": 1.36,
1686
+ "learning_rate": 9.587579617834396e-06,
1687
+ "loss": 0.7227,
1688
+ "step": 2260
1689
+ },
1690
+ {
1691
+ "epoch": 1.37,
1692
+ "learning_rate": 9.571656050955415e-06,
1693
+ "loss": 0.7336,
1694
+ "step": 2270
1695
+ },
1696
+ {
1697
+ "epoch": 1.38,
1698
+ "learning_rate": 9.555732484076434e-06,
1699
+ "loss": 0.7056,
1700
+ "step": 2280
1701
+ },
1702
+ {
1703
+ "epoch": 1.38,
1704
+ "learning_rate": 9.539808917197453e-06,
1705
+ "loss": 0.7305,
1706
+ "step": 2290
1707
+ },
1708
+ {
1709
+ "epoch": 1.39,
1710
+ "learning_rate": 9.523885350318473e-06,
1711
+ "loss": 0.7506,
1712
+ "step": 2300
1713
+ },
1714
+ {
1715
+ "epoch": 1.39,
1716
+ "learning_rate": 9.507961783439492e-06,
1717
+ "loss": 0.7413,
1718
+ "step": 2310
1719
+ },
1720
+ {
1721
+ "epoch": 1.4,
1722
+ "learning_rate": 9.492038216560511e-06,
1723
+ "loss": 0.7101,
1724
+ "step": 2320
1725
+ },
1726
+ {
1727
+ "epoch": 1.41,
1728
+ "learning_rate": 9.476114649681529e-06,
1729
+ "loss": 0.7361,
1730
+ "step": 2330
1731
+ },
1732
+ {
1733
+ "epoch": 1.41,
1734
+ "learning_rate": 9.46019108280255e-06,
1735
+ "loss": 0.7393,
1736
+ "step": 2340
1737
+ },
1738
+ {
1739
+ "epoch": 1.42,
1740
+ "learning_rate": 9.444267515923567e-06,
1741
+ "loss": 0.7135,
1742
+ "step": 2350
1743
+ },
1744
+ {
1745
+ "epoch": 1.42,
1746
+ "learning_rate": 9.428343949044588e-06,
1747
+ "loss": 0.7115,
1748
+ "step": 2360
1749
+ },
1750
+ {
1751
+ "epoch": 1.43,
1752
+ "learning_rate": 9.412420382165605e-06,
1753
+ "loss": 0.7262,
1754
+ "step": 2370
1755
+ },
1756
+ {
1757
+ "epoch": 1.44,
1758
+ "learning_rate": 9.396496815286625e-06,
1759
+ "loss": 0.7393,
1760
+ "step": 2380
1761
+ },
1762
+ {
1763
+ "epoch": 1.44,
1764
+ "learning_rate": 9.380573248407644e-06,
1765
+ "loss": 0.7193,
1766
+ "step": 2390
1767
+ },
1768
+ {
1769
+ "epoch": 1.45,
1770
+ "learning_rate": 9.364649681528663e-06,
1771
+ "loss": 0.7258,
1772
+ "step": 2400
1773
+ },
1774
+ {
1775
+ "epoch": 1.45,
1776
+ "learning_rate": 9.348726114649682e-06,
1777
+ "loss": 0.7296,
1778
+ "step": 2410
1779
+ },
1780
+ {
1781
+ "epoch": 1.46,
1782
+ "learning_rate": 9.332802547770702e-06,
1783
+ "loss": 0.7259,
1784
+ "step": 2420
1785
+ },
1786
+ {
1787
+ "epoch": 1.47,
1788
+ "learning_rate": 9.31687898089172e-06,
1789
+ "loss": 0.7297,
1790
+ "step": 2430
1791
+ },
1792
+ {
1793
+ "epoch": 1.47,
1794
+ "learning_rate": 9.30095541401274e-06,
1795
+ "loss": 0.7239,
1796
+ "step": 2440
1797
+ },
1798
+ {
1799
+ "epoch": 1.48,
1800
+ "learning_rate": 9.28503184713376e-06,
1801
+ "loss": 0.718,
1802
+ "step": 2450
1803
+ },
1804
+ {
1805
+ "epoch": 1.48,
1806
+ "learning_rate": 9.269108280254778e-06,
1807
+ "loss": 0.7239,
1808
+ "step": 2460
1809
+ },
1810
+ {
1811
+ "epoch": 1.49,
1812
+ "learning_rate": 9.253184713375796e-06,
1813
+ "loss": 0.7307,
1814
+ "step": 2470
1815
+ },
1816
+ {
1817
+ "epoch": 1.5,
1818
+ "learning_rate": 9.237261146496817e-06,
1819
+ "loss": 0.7294,
1820
+ "step": 2480
1821
+ },
1822
+ {
1823
+ "epoch": 1.5,
1824
+ "learning_rate": 9.221337579617834e-06,
1825
+ "loss": 0.7326,
1826
+ "step": 2490
1827
+ },
1828
+ {
1829
+ "epoch": 1.51,
1830
+ "learning_rate": 9.205414012738855e-06,
1831
+ "loss": 0.7295,
1832
+ "step": 2500
1833
+ },
1834
+ {
1835
+ "epoch": 1.51,
1836
+ "eval_oasst_export_accuracy": 0.7117135947727322,
1837
+ "eval_oasst_export_loss": 1.234184741973877,
1838
+ "eval_oasst_export_runtime": 68.1505,
1839
+ "eval_oasst_export_samples_per_second": 30.726,
1840
+ "eval_oasst_export_steps_per_second": 1.291,
1841
+ "step": 2500
1842
+ },
1843
+ {
1844
+ "epoch": 1.51,
1845
+ "eval_code_alpaca_accuracy": 0.8403563425698202,
1846
+ "eval_code_alpaca_loss": 0.557296872138977,
1847
+ "eval_code_alpaca_runtime": 4.9636,
1848
+ "eval_code_alpaca_samples_per_second": 50.367,
1849
+ "eval_code_alpaca_steps_per_second": 2.216,
1850
+ "step": 2500
1851
+ },
1852
+ {
1853
+ "epoch": 1.51,
1854
+ "eval_evol_v2_accuracy": 0.7950730371609088,
1855
+ "eval_evol_v2_loss": 0.70171058177948,
1856
+ "eval_evol_v2_runtime": 275.0985,
1857
+ "eval_evol_v2_samples_per_second": 25.991,
1858
+ "eval_evol_v2_steps_per_second": 1.083,
1859
+ "step": 2500
1860
+ },
1861
+ {
1862
+ "epoch": 1.51,
1863
+ "eval_nlu_instruct_accuracy": 0.8178751700867107,
1864
+ "eval_nlu_instruct_loss": 0.677690327167511,
1865
+ "eval_nlu_instruct_runtime": 3484.6437,
1866
+ "eval_nlu_instruct_samples_per_second": 22.391,
1867
+ "eval_nlu_instruct_steps_per_second": 0.933,
1868
+ "step": 2500
1869
+ },
1870
+ {
1871
+ "epoch": 1.51,
1872
+ "eval_grade_school_math_instructions_accuracy": 0.8254099960952753,
1873
+ "eval_grade_school_math_instructions_loss": 0.5937632918357849,
1874
+ "eval_grade_school_math_instructions_runtime": 8.3622,
1875
+ "eval_grade_school_math_instructions_samples_per_second": 52.618,
1876
+ "eval_grade_school_math_instructions_steps_per_second": 2.272,
1877
+ "step": 2500
1878
+ },
1879
+ {
1880
+ "epoch": 1.51,
1881
+ "eval_poem_instructions_accuracy": 0.5039080150947083,
1882
+ "eval_poem_instructions_loss": 2.603971481323242,
1883
+ "eval_poem_instructions_runtime": 21.2045,
1884
+ "eval_poem_instructions_samples_per_second": 16.364,
1885
+ "eval_poem_instructions_steps_per_second": 0.707,
1886
+ "step": 2500
1887
+ },
1888
+ {
1889
+ "epoch": 1.51,
1890
+ "eval_gpt4all_accuracy": 0.7901557992616548,
1891
+ "eval_gpt4all_loss": 0.7601524591445923,
1892
+ "eval_gpt4all_runtime": 3565.0345,
1893
+ "eval_gpt4all_samples_per_second": 21.812,
1894
+ "eval_gpt4all_steps_per_second": 0.909,
1895
+ "step": 2500
1896
+ },
1897
+ {
1898
+ "epoch": 1.51,
1899
+ "eval_joke_accuracy": 0.5776216117550425,
1900
+ "eval_joke_loss": 1.7364308834075928,
1901
+ "eval_joke_runtime": 9.9885,
1902
+ "eval_joke_samples_per_second": 7.609,
1903
+ "eval_joke_steps_per_second": 0.4,
1904
+ "step": 2500
1905
+ },
1906
+ {
1907
+ "epoch": 1.51,
1908
+ "eval_gsm8k_accuracy": 0.8719721000131928,
1909
+ "eval_gsm8k_loss": 0.4444270133972168,
1910
+ "eval_gsm8k_runtime": 22.8112,
1911
+ "eval_gsm8k_samples_per_second": 57.822,
1912
+ "eval_gsm8k_steps_per_second": 2.411,
1913
+ "step": 2500
1914
+ },
1915
+ {
1916
+ "epoch": 1.52,
1917
+ "learning_rate": 9.189490445859873e-06,
1918
+ "loss": 0.7294,
1919
+ "step": 2510
1920
+ },
1921
+ {
1922
+ "epoch": 1.52,
1923
+ "learning_rate": 9.173566878980892e-06,
1924
+ "loss": 0.7326,
1925
+ "step": 2520
1926
+ },
1927
+ {
1928
+ "epoch": 1.53,
1929
+ "learning_rate": 9.157643312101911e-06,
1930
+ "loss": 0.7375,
1931
+ "step": 2530
1932
+ },
1933
+ {
1934
+ "epoch": 1.53,
1935
+ "learning_rate": 9.14171974522293e-06,
1936
+ "loss": 0.7289,
1937
+ "step": 2540
1938
+ },
1939
+ {
1940
+ "epoch": 1.54,
1941
+ "learning_rate": 9.12579617834395e-06,
1942
+ "loss": 0.7277,
1943
+ "step": 2550
1944
+ },
1945
+ {
1946
+ "epoch": 1.55,
1947
+ "learning_rate": 9.109872611464969e-06,
1948
+ "loss": 0.7215,
1949
+ "step": 2560
1950
+ },
1951
+ {
1952
+ "epoch": 1.55,
1953
+ "learning_rate": 9.093949044585988e-06,
1954
+ "loss": 0.7397,
1955
+ "step": 2570
1956
+ },
1957
+ {
1958
+ "epoch": 1.56,
1959
+ "learning_rate": 9.078025477707008e-06,
1960
+ "loss": 0.7378,
1961
+ "step": 2580
1962
+ },
1963
+ {
1964
+ "epoch": 1.56,
1965
+ "learning_rate": 9.062101910828027e-06,
1966
+ "loss": 0.7287,
1967
+ "step": 2590
1968
+ },
1969
+ {
1970
+ "epoch": 1.57,
1971
+ "learning_rate": 9.046178343949046e-06,
1972
+ "loss": 0.7254,
1973
+ "step": 2600
1974
+ },
1975
+ {
1976
+ "epoch": 1.58,
1977
+ "learning_rate": 9.030254777070064e-06,
1978
+ "loss": 0.7291,
1979
+ "step": 2610
1980
+ },
1981
+ {
1982
+ "epoch": 1.58,
1983
+ "learning_rate": 9.014331210191084e-06,
1984
+ "loss": 0.7239,
1985
+ "step": 2620
1986
+ },
1987
+ {
1988
+ "epoch": 1.59,
1989
+ "learning_rate": 8.998407643312102e-06,
1990
+ "loss": 0.7285,
1991
+ "step": 2630
1992
+ },
1993
+ {
1994
+ "epoch": 1.59,
1995
+ "learning_rate": 8.982484076433123e-06,
1996
+ "loss": 0.7217,
1997
+ "step": 2640
1998
+ },
1999
+ {
2000
+ "epoch": 1.6,
2001
+ "learning_rate": 8.96656050955414e-06,
2002
+ "loss": 0.7123,
2003
+ "step": 2650
2004
+ },
2005
+ {
2006
+ "epoch": 1.61,
2007
+ "learning_rate": 8.95063694267516e-06,
2008
+ "loss": 0.7262,
2009
+ "step": 2660
2010
+ },
2011
+ {
2012
+ "epoch": 1.61,
2013
+ "learning_rate": 8.934713375796179e-06,
2014
+ "loss": 0.7311,
2015
+ "step": 2670
2016
+ },
2017
+ {
2018
+ "epoch": 1.62,
2019
+ "learning_rate": 8.918789808917198e-06,
2020
+ "loss": 0.7249,
2021
+ "step": 2680
2022
+ },
2023
+ {
2024
+ "epoch": 1.62,
2025
+ "learning_rate": 8.902866242038217e-06,
2026
+ "loss": 0.7244,
2027
+ "step": 2690
2028
+ },
2029
+ {
2030
+ "epoch": 1.63,
2031
+ "learning_rate": 8.886942675159237e-06,
2032
+ "loss": 0.7417,
2033
+ "step": 2700
2034
+ },
2035
+ {
2036
+ "epoch": 1.64,
2037
+ "learning_rate": 8.871019108280256e-06,
2038
+ "loss": 0.7314,
2039
+ "step": 2710
2040
+ },
2041
+ {
2042
+ "epoch": 1.64,
2043
+ "learning_rate": 8.855095541401275e-06,
2044
+ "loss": 0.7499,
2045
+ "step": 2720
2046
+ },
2047
+ {
2048
+ "epoch": 1.65,
2049
+ "learning_rate": 8.839171974522294e-06,
2050
+ "loss": 0.7364,
2051
+ "step": 2730
2052
+ },
2053
+ {
2054
+ "epoch": 1.65,
2055
+ "learning_rate": 8.823248407643313e-06,
2056
+ "loss": 0.7433,
2057
+ "step": 2740
2058
+ },
2059
+ {
2060
+ "epoch": 1.66,
2061
+ "learning_rate": 8.807324840764333e-06,
2062
+ "loss": 0.7288,
2063
+ "step": 2750
2064
+ },
2065
+ {
2066
+ "epoch": 1.67,
2067
+ "learning_rate": 8.791401273885352e-06,
2068
+ "loss": 0.7286,
2069
+ "step": 2760
2070
+ },
2071
+ {
2072
+ "epoch": 1.67,
2073
+ "learning_rate": 8.77547770700637e-06,
2074
+ "loss": 0.7229,
2075
+ "step": 2770
2076
+ },
2077
+ {
2078
+ "epoch": 1.68,
2079
+ "learning_rate": 8.75955414012739e-06,
2080
+ "loss": 0.7434,
2081
+ "step": 2780
2082
+ },
2083
+ {
2084
+ "epoch": 1.68,
2085
+ "learning_rate": 8.743630573248408e-06,
2086
+ "loss": 0.7283,
2087
+ "step": 2790
2088
+ },
2089
+ {
2090
+ "epoch": 1.69,
2091
+ "learning_rate": 8.727707006369427e-06,
2092
+ "loss": 0.7304,
2093
+ "step": 2800
2094
+ },
2095
+ {
2096
+ "epoch": 1.7,
2097
+ "learning_rate": 8.711783439490446e-06,
2098
+ "loss": 0.7212,
2099
+ "step": 2810
2100
+ },
2101
+ {
2102
+ "epoch": 1.7,
2103
+ "learning_rate": 8.695859872611466e-06,
2104
+ "loss": 0.749,
2105
+ "step": 2820
2106
+ },
2107
+ {
2108
+ "epoch": 1.71,
2109
+ "learning_rate": 8.679936305732485e-06,
2110
+ "loss": 0.7436,
2111
+ "step": 2830
2112
+ },
2113
+ {
2114
+ "epoch": 1.71,
2115
+ "learning_rate": 8.664012738853504e-06,
2116
+ "loss": 0.7186,
2117
+ "step": 2840
2118
+ },
2119
+ {
2120
+ "epoch": 1.72,
2121
+ "learning_rate": 8.648089171974523e-06,
2122
+ "loss": 0.7285,
2123
+ "step": 2850
2124
+ },
2125
+ {
2126
+ "epoch": 1.73,
2127
+ "learning_rate": 8.63216560509554e-06,
2128
+ "loss": 0.7354,
2129
+ "step": 2860
2130
+ },
2131
+ {
2132
+ "epoch": 1.73,
2133
+ "learning_rate": 8.616242038216562e-06,
2134
+ "loss": 0.7232,
2135
+ "step": 2870
2136
+ },
2137
+ {
2138
+ "epoch": 1.74,
2139
+ "learning_rate": 8.60031847133758e-06,
2140
+ "loss": 0.7225,
2141
+ "step": 2880
2142
+ },
2143
+ {
2144
+ "epoch": 1.74,
2145
+ "learning_rate": 8.5843949044586e-06,
2146
+ "loss": 0.7235,
2147
+ "step": 2890
2148
+ },
2149
+ {
2150
+ "epoch": 1.75,
2151
+ "learning_rate": 8.568471337579618e-06,
2152
+ "loss": 0.746,
2153
+ "step": 2900
2154
+ },
2155
+ {
2156
+ "epoch": 1.76,
2157
+ "learning_rate": 8.552547770700637e-06,
2158
+ "loss": 0.74,
2159
+ "step": 2910
2160
+ },
2161
+ {
2162
+ "epoch": 1.76,
2163
+ "learning_rate": 8.536624203821656e-06,
2164
+ "loss": 0.7431,
2165
+ "step": 2920
2166
+ },
2167
+ {
2168
+ "epoch": 1.77,
2169
+ "learning_rate": 8.520700636942675e-06,
2170
+ "loss": 0.7353,
2171
+ "step": 2930
2172
+ },
2173
+ {
2174
+ "epoch": 1.77,
2175
+ "learning_rate": 8.504777070063695e-06,
2176
+ "loss": 0.7127,
2177
+ "step": 2940
2178
+ },
2179
+ {
2180
+ "epoch": 1.78,
2181
+ "learning_rate": 8.488853503184714e-06,
2182
+ "loss": 0.7359,
2183
+ "step": 2950
2184
+ },
2185
+ {
2186
+ "epoch": 1.79,
2187
+ "learning_rate": 8.472929936305733e-06,
2188
+ "loss": 0.7182,
2189
+ "step": 2960
2190
+ },
2191
+ {
2192
+ "epoch": 1.79,
2193
+ "learning_rate": 8.457006369426752e-06,
2194
+ "loss": 0.7232,
2195
+ "step": 2970
2196
+ },
2197
+ {
2198
+ "epoch": 1.8,
2199
+ "learning_rate": 8.441082802547772e-06,
2200
+ "loss": 0.7422,
2201
+ "step": 2980
2202
+ },
2203
+ {
2204
+ "epoch": 1.8,
2205
+ "learning_rate": 8.42515923566879e-06,
2206
+ "loss": 0.7237,
2207
+ "step": 2990
2208
+ },
2209
+ {
2210
+ "epoch": 1.81,
2211
+ "learning_rate": 8.409235668789808e-06,
2212
+ "loss": 0.7376,
2213
+ "step": 3000
2214
+ },
2215
+ {
2216
+ "epoch": 1.81,
2217
+ "eval_oasst_export_accuracy": 0.7091762214038781,
2218
+ "eval_oasst_export_loss": 1.2443393468856812,
2219
+ "eval_oasst_export_runtime": 67.1263,
2220
+ "eval_oasst_export_samples_per_second": 31.195,
2221
+ "eval_oasst_export_steps_per_second": 1.311,
2222
+ "step": 3000
2223
+ },
2224
+ {
2225
+ "epoch": 1.81,
2226
+ "eval_code_alpaca_accuracy": 0.8380608842979723,
2227
+ "eval_code_alpaca_loss": 0.5666581988334656,
2228
+ "eval_code_alpaca_runtime": 6.0038,
2229
+ "eval_code_alpaca_samples_per_second": 41.641,
2230
+ "eval_code_alpaca_steps_per_second": 1.832,
2231
+ "step": 3000
2232
+ },
2233
+ {
2234
+ "epoch": 1.81,
2235
+ "eval_evol_v2_accuracy": 0.7950646930701502,
2236
+ "eval_evol_v2_loss": 0.7063774466514587,
2237
+ "eval_evol_v2_runtime": 274.2256,
2238
+ "eval_evol_v2_samples_per_second": 26.073,
2239
+ "eval_evol_v2_steps_per_second": 1.087,
2240
+ "step": 3000
2241
+ },
2242
+ {
2243
+ "epoch": 1.81,
2244
+ "eval_nlu_instruct_accuracy": 0.8185144938579312,
2245
+ "eval_nlu_instruct_loss": 0.6780356764793396,
2246
+ "eval_nlu_instruct_runtime": 3479.7549,
2247
+ "eval_nlu_instruct_samples_per_second": 22.422,
2248
+ "eval_nlu_instruct_steps_per_second": 0.934,
2249
+ "step": 3000
2250
+ },
2251
+ {
2252
+ "epoch": 1.81,
2253
+ "eval_grade_school_math_instructions_accuracy": 0.8272403358063256,
2254
+ "eval_grade_school_math_instructions_loss": 0.5888405442237854,
2255
+ "eval_grade_school_math_instructions_runtime": 12.6644,
2256
+ "eval_grade_school_math_instructions_samples_per_second": 34.743,
2257
+ "eval_grade_school_math_instructions_steps_per_second": 1.5,
2258
+ "step": 3000
2259
+ },
2260
+ {
2261
+ "epoch": 1.81,
2262
+ "eval_poem_instructions_accuracy": 0.5025932057398731,
2263
+ "eval_poem_instructions_loss": 2.6240994930267334,
2264
+ "eval_poem_instructions_runtime": 21.1971,
2265
+ "eval_poem_instructions_samples_per_second": 16.37,
2266
+ "eval_poem_instructions_steps_per_second": 0.708,
2267
+ "step": 3000
2268
+ },
2269
+ {
2270
+ "epoch": 1.81,
2271
+ "eval_gpt4all_accuracy": 0.7898066834593447,
2272
+ "eval_gpt4all_loss": 0.7626320719718933,
2273
+ "eval_gpt4all_runtime": 3567.7466,
2274
+ "eval_gpt4all_samples_per_second": 21.796,
2275
+ "eval_gpt4all_steps_per_second": 0.908,
2276
+ "step": 3000
2277
+ },
2278
+ {
2279
+ "epoch": 1.81,
2280
+ "eval_joke_accuracy": 0.5721456603084786,
2281
+ "eval_joke_loss": 1.7529810667037964,
2282
+ "eval_joke_runtime": 5.6117,
2283
+ "eval_joke_samples_per_second": 13.543,
2284
+ "eval_joke_steps_per_second": 0.713,
2285
+ "step": 3000
2286
+ },
2287
+ {
2288
+ "epoch": 1.81,
2289
+ "eval_gsm8k_accuracy": 0.8744156432656293,
2290
+ "eval_gsm8k_loss": 0.43758735060691833,
2291
+ "eval_gsm8k_runtime": 22.8872,
2292
+ "eval_gsm8k_samples_per_second": 57.63,
2293
+ "eval_gsm8k_steps_per_second": 2.403,
2294
+ "step": 3000
2295
+ },
2296
+ {
2297
+ "epoch": 1.82,
2298
+ "learning_rate": 8.39331210191083e-06,
2299
+ "loss": 0.7289,
2300
+ "step": 3010
2301
+ },
2302
+ {
2303
+ "epoch": 1.82,
2304
+ "learning_rate": 8.377388535031847e-06,
2305
+ "loss": 0.7262,
2306
+ "step": 3020
2307
+ },
2308
+ {
2309
+ "epoch": 1.83,
2310
+ "learning_rate": 8.361464968152868e-06,
2311
+ "loss": 0.759,
2312
+ "step": 3030
2313
+ },
2314
+ {
2315
+ "epoch": 1.83,
2316
+ "learning_rate": 8.345541401273885e-06,
2317
+ "loss": 0.7493,
2318
+ "step": 3040
2319
+ },
2320
+ {
2321
+ "epoch": 1.84,
2322
+ "learning_rate": 8.329617834394904e-06,
2323
+ "loss": 0.7508,
2324
+ "step": 3050
2325
+ },
2326
+ {
2327
+ "epoch": 1.85,
2328
+ "learning_rate": 8.313694267515924e-06,
2329
+ "loss": 0.7269,
2330
+ "step": 3060
2331
+ },
2332
+ {
2333
+ "epoch": 1.85,
2334
+ "learning_rate": 8.297770700636943e-06,
2335
+ "loss": 0.7263,
2336
+ "step": 3070
2337
+ },
2338
+ {
2339
+ "epoch": 1.86,
2340
+ "learning_rate": 8.281847133757962e-06,
2341
+ "loss": 0.7285,
2342
+ "step": 3080
2343
+ },
2344
+ {
2345
+ "epoch": 1.87,
2346
+ "learning_rate": 8.265923566878981e-06,
2347
+ "loss": 0.7372,
2348
+ "step": 3090
2349
+ },
2350
+ {
2351
+ "epoch": 1.87,
2352
+ "learning_rate": 8.25e-06,
2353
+ "loss": 0.7289,
2354
+ "step": 3100
2355
+ },
2356
+ {
2357
+ "epoch": 1.88,
2358
+ "learning_rate": 8.23407643312102e-06,
2359
+ "loss": 0.7393,
2360
+ "step": 3110
2361
+ },
2362
+ {
2363
+ "epoch": 1.88,
2364
+ "learning_rate": 8.218152866242039e-06,
2365
+ "loss": 0.744,
2366
+ "step": 3120
2367
+ },
2368
+ {
2369
+ "epoch": 1.89,
2370
+ "learning_rate": 8.202229299363058e-06,
2371
+ "loss": 0.7276,
2372
+ "step": 3130
2373
+ },
2374
+ {
2375
+ "epoch": 1.9,
2376
+ "learning_rate": 8.186305732484076e-06,
2377
+ "loss": 0.7264,
2378
+ "step": 3140
2379
+ },
2380
+ {
2381
+ "epoch": 1.9,
2382
+ "learning_rate": 8.170382165605097e-06,
2383
+ "loss": 0.7404,
2384
+ "step": 3150
2385
+ },
2386
+ {
2387
+ "epoch": 1.91,
2388
+ "learning_rate": 8.154458598726114e-06,
2389
+ "loss": 0.7281,
2390
+ "step": 3160
2391
+ },
2392
+ {
2393
+ "epoch": 1.91,
2394
+ "learning_rate": 8.138535031847135e-06,
2395
+ "loss": 0.7194,
2396
+ "step": 3170
2397
+ },
2398
+ {
2399
+ "epoch": 1.92,
2400
+ "learning_rate": 8.122611464968153e-06,
2401
+ "loss": 0.7375,
2402
+ "step": 3180
2403
+ },
2404
+ {
2405
+ "epoch": 1.93,
2406
+ "learning_rate": 8.106687898089172e-06,
2407
+ "loss": 0.7382,
2408
+ "step": 3190
2409
+ },
2410
+ {
2411
+ "epoch": 1.93,
2412
+ "learning_rate": 8.090764331210191e-06,
2413
+ "loss": 0.743,
2414
+ "step": 3200
2415
+ },
2416
+ {
2417
+ "epoch": 1.94,
2418
+ "learning_rate": 8.07484076433121e-06,
2419
+ "loss": 0.735,
2420
+ "step": 3210
2421
+ },
2422
+ {
2423
+ "epoch": 1.94,
2424
+ "learning_rate": 8.05891719745223e-06,
2425
+ "loss": 0.7384,
2426
+ "step": 3220
2427
+ },
2428
+ {
2429
+ "epoch": 1.95,
2430
+ "learning_rate": 8.042993630573249e-06,
2431
+ "loss": 0.7263,
2432
+ "step": 3230
2433
+ },
2434
+ {
2435
+ "epoch": 1.96,
2436
+ "learning_rate": 8.027070063694268e-06,
2437
+ "loss": 0.7501,
2438
+ "step": 3240
2439
+ },
2440
+ {
2441
+ "epoch": 1.96,
2442
+ "learning_rate": 8.011146496815287e-06,
2443
+ "loss": 0.7335,
2444
+ "step": 3250
2445
+ },
2446
+ {
2447
+ "epoch": 1.97,
2448
+ "learning_rate": 7.995222929936307e-06,
2449
+ "loss": 0.7409,
2450
+ "step": 3260
2451
+ },
2452
+ {
2453
+ "epoch": 1.97,
2454
+ "learning_rate": 7.979299363057326e-06,
2455
+ "loss": 0.7332,
2456
+ "step": 3270
2457
+ },
2458
+ {
2459
+ "epoch": 1.98,
2460
+ "learning_rate": 7.963375796178345e-06,
2461
+ "loss": 0.7342,
2462
+ "step": 3280
2463
+ },
2464
+ {
2465
+ "epoch": 1.99,
2466
+ "learning_rate": 7.947452229299364e-06,
2467
+ "loss": 0.7325,
2468
+ "step": 3290
2469
+ },
2470
+ {
2471
+ "epoch": 1.99,
2472
+ "learning_rate": 7.931528662420382e-06,
2473
+ "loss": 0.7248,
2474
+ "step": 3300
2475
+ },
2476
+ {
2477
+ "epoch": 2.0,
2478
+ "learning_rate": 7.915605095541403e-06,
2479
+ "loss": 0.7299,
2480
+ "step": 3310
2481
+ },
2482
+ {
2483
+ "epoch": 2.0,
2484
+ "learning_rate": 7.89968152866242e-06,
2485
+ "loss": 0.6739,
2486
+ "step": 3320
2487
+ },
2488
+ {
2489
+ "epoch": 2.01,
2490
+ "learning_rate": 7.88375796178344e-06,
2491
+ "loss": 0.6283,
2492
+ "step": 3330
2493
+ },
2494
+ {
2495
+ "epoch": 2.02,
2496
+ "learning_rate": 7.867834394904459e-06,
2497
+ "loss": 0.6366,
2498
+ "step": 3340
2499
+ },
2500
+ {
2501
+ "epoch": 2.02,
2502
+ "learning_rate": 7.851910828025478e-06,
2503
+ "loss": 0.6461,
2504
+ "step": 3350
2505
+ },
2506
+ {
2507
+ "epoch": 2.03,
2508
+ "learning_rate": 7.835987261146497e-06,
2509
+ "loss": 0.6455,
2510
+ "step": 3360
2511
+ },
2512
+ {
2513
+ "epoch": 2.03,
2514
+ "learning_rate": 7.820063694267516e-06,
2515
+ "loss": 0.6126,
2516
+ "step": 3370
2517
+ },
2518
+ {
2519
+ "epoch": 2.04,
2520
+ "learning_rate": 7.804140127388536e-06,
2521
+ "loss": 0.6255,
2522
+ "step": 3380
2523
+ },
2524
+ {
2525
+ "epoch": 2.05,
2526
+ "learning_rate": 7.788216560509555e-06,
2527
+ "loss": 0.6493,
2528
+ "step": 3390
2529
+ },
2530
+ {
2531
+ "epoch": 2.05,
2532
+ "learning_rate": 7.772292993630574e-06,
2533
+ "loss": 0.6175,
2534
+ "step": 3400
2535
+ },
2536
+ {
2537
+ "epoch": 2.06,
2538
+ "learning_rate": 7.756369426751593e-06,
2539
+ "loss": 0.6309,
2540
+ "step": 3410
2541
+ },
2542
+ {
2543
+ "epoch": 2.06,
2544
+ "learning_rate": 7.740445859872613e-06,
2545
+ "loss": 0.6424,
2546
+ "step": 3420
2547
+ },
2548
+ {
2549
+ "epoch": 2.07,
2550
+ "learning_rate": 7.724522292993632e-06,
2551
+ "loss": 0.6225,
2552
+ "step": 3430
2553
+ },
2554
+ {
2555
+ "epoch": 2.08,
2556
+ "learning_rate": 7.70859872611465e-06,
2557
+ "loss": 0.63,
2558
+ "step": 3440
2559
+ },
2560
+ {
2561
+ "epoch": 2.08,
2562
+ "learning_rate": 7.69267515923567e-06,
2563
+ "loss": 0.6237,
2564
+ "step": 3450
2565
+ },
2566
+ {
2567
+ "epoch": 2.09,
2568
+ "learning_rate": 7.676751592356688e-06,
2569
+ "loss": 0.6341,
2570
+ "step": 3460
2571
+ },
2572
+ {
2573
+ "epoch": 2.09,
2574
+ "learning_rate": 7.660828025477709e-06,
2575
+ "loss": 0.6243,
2576
+ "step": 3470
2577
+ },
2578
+ {
2579
+ "epoch": 2.1,
2580
+ "learning_rate": 7.644904458598726e-06,
2581
+ "loss": 0.635,
2582
+ "step": 3480
2583
+ },
2584
+ {
2585
+ "epoch": 2.11,
2586
+ "learning_rate": 7.628980891719746e-06,
2587
+ "loss": 0.6387,
2588
+ "step": 3490
2589
+ },
2590
+ {
2591
+ "epoch": 2.11,
2592
+ "learning_rate": 7.613057324840765e-06,
2593
+ "loss": 0.635,
2594
+ "step": 3500
2595
+ },
2596
+ {
2597
+ "epoch": 2.11,
2598
+ "eval_oasst_export_accuracy": 0.7073963863648168,
2599
+ "eval_oasst_export_loss": 1.2838494777679443,
2600
+ "eval_oasst_export_runtime": 68.1304,
2601
+ "eval_oasst_export_samples_per_second": 30.735,
2602
+ "eval_oasst_export_steps_per_second": 1.292,
2603
+ "step": 3500
2604
+ },
2605
+ {
2606
+ "epoch": 2.11,
2607
+ "eval_code_alpaca_accuracy": 0.8387713832868776,
2608
+ "eval_code_alpaca_loss": 0.5747187733650208,
2609
+ "eval_code_alpaca_runtime": 4.9938,
2610
+ "eval_code_alpaca_samples_per_second": 50.062,
2611
+ "eval_code_alpaca_steps_per_second": 2.203,
2612
+ "step": 3500
2613
+ },
2614
+ {
2615
+ "epoch": 2.11,
2616
+ "eval_evol_v2_accuracy": 0.7953503870352282,
2617
+ "eval_evol_v2_loss": 0.7144909501075745,
2618
+ "eval_evol_v2_runtime": 274.878,
2619
+ "eval_evol_v2_samples_per_second": 26.012,
2620
+ "eval_evol_v2_steps_per_second": 1.084,
2621
+ "step": 3500
2622
+ },
2623
+ {
2624
+ "epoch": 2.11,
2625
+ "eval_nlu_instruct_accuracy": 0.82024077537588,
2626
+ "eval_nlu_instruct_loss": 0.6822787523269653,
2627
+ "eval_nlu_instruct_runtime": 3475.2318,
2628
+ "eval_nlu_instruct_samples_per_second": 22.451,
2629
+ "eval_nlu_instruct_steps_per_second": 0.935,
2630
+ "step": 3500
2631
+ },
2632
+ {
2633
+ "epoch": 2.11,
2634
+ "eval_grade_school_math_instructions_accuracy": 0.8345128855915658,
2635
+ "eval_grade_school_math_instructions_loss": 0.5768687725067139,
2636
+ "eval_grade_school_math_instructions_runtime": 11.7518,
2637
+ "eval_grade_school_math_instructions_samples_per_second": 37.441,
2638
+ "eval_grade_school_math_instructions_steps_per_second": 1.617,
2639
+ "step": 3500
2640
+ },
2641
+ {
2642
+ "epoch": 2.11,
2643
+ "eval_poem_instructions_accuracy": 0.5003298676864057,
2644
+ "eval_poem_instructions_loss": 2.657353162765503,
2645
+ "eval_poem_instructions_runtime": 21.1728,
2646
+ "eval_poem_instructions_samples_per_second": 16.389,
2647
+ "eval_poem_instructions_steps_per_second": 0.708,
2648
+ "step": 3500
2649
+ },
2650
+ {
2651
+ "epoch": 2.11,
2652
+ "eval_gpt4all_accuracy": 0.7895927579325352,
2653
+ "eval_gpt4all_loss": 0.7741940021514893,
2654
+ "eval_gpt4all_runtime": 3567.353,
2655
+ "eval_gpt4all_samples_per_second": 21.798,
2656
+ "eval_gpt4all_steps_per_second": 0.909,
2657
+ "step": 3500
2658
+ },
2659
+ {
2660
+ "epoch": 2.11,
2661
+ "eval_joke_accuracy": 0.569955279729853,
2662
+ "eval_joke_loss": 1.8059210777282715,
2663
+ "eval_joke_runtime": 3.9667,
2664
+ "eval_joke_samples_per_second": 19.159,
2665
+ "eval_joke_steps_per_second": 1.008,
2666
+ "step": 3500
2667
+ },
2668
+ {
2669
+ "epoch": 2.11,
2670
+ "eval_gsm8k_accuracy": 0.8798132352856823,
2671
+ "eval_gsm8k_loss": 0.4235023558139801,
2672
+ "eval_gsm8k_runtime": 22.6764,
2673
+ "eval_gsm8k_samples_per_second": 58.166,
2674
+ "eval_gsm8k_steps_per_second": 2.425,
2675
+ "step": 3500
2676
+ },
2677
+ {
2678
+ "epoch": 2.12,
2679
+ "learning_rate": 7.597133757961784e-06,
2680
+ "loss": 0.6345,
2681
+ "step": 3510
2682
+ },
2683
+ {
2684
+ "epoch": 2.12,
2685
+ "learning_rate": 7.581210191082803e-06,
2686
+ "loss": 0.6488,
2687
+ "step": 3520
2688
+ },
2689
+ {
2690
+ "epoch": 2.13,
2691
+ "learning_rate": 7.565286624203822e-06,
2692
+ "loss": 0.6461,
2693
+ "step": 3530
2694
+ },
2695
+ {
2696
+ "epoch": 2.14,
2697
+ "learning_rate": 7.549363057324841e-06,
2698
+ "loss": 0.6517,
2699
+ "step": 3540
2700
+ },
2701
+ {
2702
+ "epoch": 2.14,
2703
+ "learning_rate": 7.533439490445861e-06,
2704
+ "loss": 0.63,
2705
+ "step": 3550
2706
+ },
2707
+ {
2708
+ "epoch": 2.15,
2709
+ "learning_rate": 7.517515923566879e-06,
2710
+ "loss": 0.6214,
2711
+ "step": 3560
2712
+ },
2713
+ {
2714
+ "epoch": 2.15,
2715
+ "learning_rate": 7.501592356687899e-06,
2716
+ "loss": 0.6433,
2717
+ "step": 3570
2718
+ },
2719
+ {
2720
+ "epoch": 2.16,
2721
+ "learning_rate": 7.485668789808918e-06,
2722
+ "loss": 0.6228,
2723
+ "step": 3580
2724
+ },
2725
+ {
2726
+ "epoch": 2.17,
2727
+ "learning_rate": 7.469745222929937e-06,
2728
+ "loss": 0.631,
2729
+ "step": 3590
2730
+ },
2731
+ {
2732
+ "epoch": 2.17,
2733
+ "learning_rate": 7.453821656050956e-06,
2734
+ "loss": 0.6441,
2735
+ "step": 3600
2736
+ },
2737
+ {
2738
+ "epoch": 2.18,
2739
+ "learning_rate": 7.437898089171975e-06,
2740
+ "loss": 0.6388,
2741
+ "step": 3610
2742
+ },
2743
+ {
2744
+ "epoch": 2.19,
2745
+ "learning_rate": 7.421974522292994e-06,
2746
+ "loss": 0.6343,
2747
+ "step": 3620
2748
+ },
2749
+ {
2750
+ "epoch": 2.19,
2751
+ "learning_rate": 7.406050955414014e-06,
2752
+ "loss": 0.615,
2753
+ "step": 3630
2754
+ },
2755
+ {
2756
+ "epoch": 2.2,
2757
+ "learning_rate": 7.390127388535032e-06,
2758
+ "loss": 0.6496,
2759
+ "step": 3640
2760
+ },
2761
+ {
2762
+ "epoch": 2.2,
2763
+ "learning_rate": 7.374203821656052e-06,
2764
+ "loss": 0.6316,
2765
+ "step": 3650
2766
+ },
2767
+ {
2768
+ "epoch": 2.21,
2769
+ "learning_rate": 7.358280254777071e-06,
2770
+ "loss": 0.635,
2771
+ "step": 3660
2772
+ },
2773
+ {
2774
+ "epoch": 2.22,
2775
+ "learning_rate": 7.34235668789809e-06,
2776
+ "loss": 0.6481,
2777
+ "step": 3670
2778
+ },
2779
+ {
2780
+ "epoch": 2.22,
2781
+ "learning_rate": 7.326433121019108e-06,
2782
+ "loss": 0.6395,
2783
+ "step": 3680
2784
+ },
2785
+ {
2786
+ "epoch": 2.23,
2787
+ "learning_rate": 7.310509554140128e-06,
2788
+ "loss": 0.6349,
2789
+ "step": 3690
2790
+ },
2791
+ {
2792
+ "epoch": 2.23,
2793
+ "learning_rate": 7.294585987261147e-06,
2794
+ "loss": 0.6231,
2795
+ "step": 3700
2796
+ },
2797
+ {
2798
+ "epoch": 2.24,
2799
+ "learning_rate": 7.278662420382167e-06,
2800
+ "loss": 0.6319,
2801
+ "step": 3710
2802
+ },
2803
+ {
2804
+ "epoch": 2.25,
2805
+ "learning_rate": 7.262738853503185e-06,
2806
+ "loss": 0.6468,
2807
+ "step": 3720
2808
+ },
2809
+ {
2810
+ "epoch": 2.25,
2811
+ "learning_rate": 7.246815286624204e-06,
2812
+ "loss": 0.6395,
2813
+ "step": 3730
2814
+ },
2815
+ {
2816
+ "epoch": 2.26,
2817
+ "learning_rate": 7.230891719745224e-06,
2818
+ "loss": 0.6552,
2819
+ "step": 3740
2820
+ },
2821
+ {
2822
+ "epoch": 2.26,
2823
+ "learning_rate": 7.214968152866243e-06,
2824
+ "loss": 0.6353,
2825
+ "step": 3750
2826
+ },
2827
+ {
2828
+ "epoch": 2.27,
2829
+ "learning_rate": 7.199044585987261e-06,
2830
+ "loss": 0.6392,
2831
+ "step": 3760
2832
+ },
2833
+ {
2834
+ "epoch": 2.28,
2835
+ "learning_rate": 7.183121019108281e-06,
2836
+ "loss": 0.6576,
2837
+ "step": 3770
2838
+ },
2839
+ {
2840
+ "epoch": 2.28,
2841
+ "learning_rate": 7.1671974522293e-06,
2842
+ "loss": 0.6411,
2843
+ "step": 3780
2844
+ },
2845
+ {
2846
+ "epoch": 2.29,
2847
+ "learning_rate": 7.15127388535032e-06,
2848
+ "loss": 0.6297,
2849
+ "step": 3790
2850
+ },
2851
+ {
2852
+ "epoch": 2.29,
2853
+ "learning_rate": 7.135350318471338e-06,
2854
+ "loss": 0.6314,
2855
+ "step": 3800
2856
+ },
2857
+ {
2858
+ "epoch": 2.3,
2859
+ "learning_rate": 7.119426751592357e-06,
2860
+ "loss": 0.6342,
2861
+ "step": 3810
2862
+ },
2863
+ {
2864
+ "epoch": 2.31,
2865
+ "learning_rate": 7.103503184713377e-06,
2866
+ "loss": 0.6364,
2867
+ "step": 3820
2868
+ },
2869
+ {
2870
+ "epoch": 2.31,
2871
+ "learning_rate": 7.087579617834396e-06,
2872
+ "loss": 0.646,
2873
+ "step": 3830
2874
+ },
2875
+ {
2876
+ "epoch": 2.32,
2877
+ "learning_rate": 7.071656050955414e-06,
2878
+ "loss": 0.6409,
2879
+ "step": 3840
2880
+ },
2881
+ {
2882
+ "epoch": 2.32,
2883
+ "learning_rate": 7.055732484076434e-06,
2884
+ "loss": 0.6314,
2885
+ "step": 3850
2886
+ },
2887
+ {
2888
+ "epoch": 2.33,
2889
+ "learning_rate": 7.039808917197453e-06,
2890
+ "loss": 0.6308,
2891
+ "step": 3860
2892
+ },
2893
+ {
2894
+ "epoch": 2.34,
2895
+ "learning_rate": 7.023885350318472e-06,
2896
+ "loss": 0.6364,
2897
+ "step": 3870
2898
+ },
2899
+ {
2900
+ "epoch": 2.34,
2901
+ "learning_rate": 7.007961783439491e-06,
2902
+ "loss": 0.6325,
2903
+ "step": 3880
2904
+ },
2905
+ {
2906
+ "epoch": 2.35,
2907
+ "learning_rate": 6.99203821656051e-06,
2908
+ "loss": 0.6358,
2909
+ "step": 3890
2910
+ },
2911
+ {
2912
+ "epoch": 2.35,
2913
+ "learning_rate": 6.976114649681529e-06,
2914
+ "loss": 0.6369,
2915
+ "step": 3900
2916
+ },
2917
+ {
2918
+ "epoch": 2.36,
2919
+ "learning_rate": 6.960191082802549e-06,
2920
+ "loss": 0.6475,
2921
+ "step": 3910
2922
+ },
2923
+ {
2924
+ "epoch": 2.37,
2925
+ "learning_rate": 6.944267515923567e-06,
2926
+ "loss": 0.6191,
2927
+ "step": 3920
2928
+ },
2929
+ {
2930
+ "epoch": 2.37,
2931
+ "learning_rate": 6.928343949044587e-06,
2932
+ "loss": 0.6504,
2933
+ "step": 3930
2934
+ },
2935
+ {
2936
+ "epoch": 2.38,
2937
+ "learning_rate": 6.912420382165606e-06,
2938
+ "loss": 0.6312,
2939
+ "step": 3940
2940
+ },
2941
+ {
2942
+ "epoch": 2.38,
2943
+ "learning_rate": 6.896496815286625e-06,
2944
+ "loss": 0.6413,
2945
+ "step": 3950
2946
+ },
2947
+ {
2948
+ "epoch": 2.39,
2949
+ "learning_rate": 6.880573248407644e-06,
2950
+ "loss": 0.6402,
2951
+ "step": 3960
2952
+ },
2953
+ {
2954
+ "epoch": 2.4,
2955
+ "learning_rate": 6.864649681528663e-06,
2956
+ "loss": 0.6471,
2957
+ "step": 3970
2958
+ },
2959
+ {
2960
+ "epoch": 2.4,
2961
+ "learning_rate": 6.848726114649682e-06,
2962
+ "loss": 0.6465,
2963
+ "step": 3980
2964
+ },
2965
+ {
2966
+ "epoch": 2.41,
2967
+ "learning_rate": 6.832802547770702e-06,
2968
+ "loss": 0.639,
2969
+ "step": 3990
2970
+ },
2971
+ {
2972
+ "epoch": 2.41,
2973
+ "learning_rate": 6.81687898089172e-06,
2974
+ "loss": 0.6439,
2975
+ "step": 4000
2976
+ },
2977
+ {
2978
+ "epoch": 2.41,
2979
+ "eval_oasst_export_accuracy": 0.7082109300873196,
2980
+ "eval_oasst_export_loss": 1.277816653251648,
2981
+ "eval_oasst_export_runtime": 68.1401,
2982
+ "eval_oasst_export_samples_per_second": 30.731,
2983
+ "eval_oasst_export_steps_per_second": 1.291,
2984
+ "step": 4000
2985
+ },
2986
+ {
2987
+ "epoch": 2.41,
2988
+ "eval_code_alpaca_accuracy": 0.8390993058971417,
2989
+ "eval_code_alpaca_loss": 0.5614765882492065,
2990
+ "eval_code_alpaca_runtime": 4.9704,
2991
+ "eval_code_alpaca_samples_per_second": 50.298,
2992
+ "eval_code_alpaca_steps_per_second": 2.213,
2993
+ "step": 4000
2994
+ },
2995
+ {
2996
+ "epoch": 2.41,
2997
+ "eval_evol_v2_accuracy": 0.7963871231321064,
2998
+ "eval_evol_v2_loss": 0.7076693177223206,
2999
+ "eval_evol_v2_runtime": 274.9975,
3000
+ "eval_evol_v2_samples_per_second": 26.0,
3001
+ "eval_evol_v2_steps_per_second": 1.084,
3002
+ "step": 4000
3003
+ },
3004
+ {
3005
+ "epoch": 2.41,
3006
+ "eval_nlu_instruct_accuracy": 0.8217512104859825,
3007
+ "eval_nlu_instruct_loss": 0.6734899282455444,
3008
+ "eval_nlu_instruct_runtime": 3476.0607,
3009
+ "eval_nlu_instruct_samples_per_second": 22.446,
3010
+ "eval_nlu_instruct_steps_per_second": 0.935,
3011
+ "step": 4000
3012
+ },
3013
+ {
3014
+ "epoch": 2.41,
3015
+ "eval_grade_school_math_instructions_accuracy": 0.8384420148379539,
3016
+ "eval_grade_school_math_instructions_loss": 0.5439364314079285,
3017
+ "eval_grade_school_math_instructions_runtime": 10.6494,
3018
+ "eval_grade_school_math_instructions_samples_per_second": 41.317,
3019
+ "eval_grade_school_math_instructions_steps_per_second": 1.784,
3020
+ "step": 4000
3021
+ },
3022
+ {
3023
+ "epoch": 2.41,
3024
+ "eval_poem_instructions_accuracy": 0.5008827284556223,
3025
+ "eval_poem_instructions_loss": 2.6464786529541016,
3026
+ "eval_poem_instructions_runtime": 21.1785,
3027
+ "eval_poem_instructions_samples_per_second": 16.385,
3028
+ "eval_poem_instructions_steps_per_second": 0.708,
3029
+ "step": 4000
3030
+ },
3031
+ {
3032
+ "epoch": 2.41,
3033
+ "eval_gpt4all_accuracy": 0.7913096259297718,
3034
+ "eval_gpt4all_loss": 0.7641447186470032,
3035
+ "eval_gpt4all_runtime": 3565.6669,
3036
+ "eval_gpt4all_samples_per_second": 21.809,
3037
+ "eval_gpt4all_steps_per_second": 0.909,
3038
+ "step": 4000
3039
+ },
3040
+ {
3041
+ "epoch": 2.41,
3042
+ "eval_joke_accuracy": 0.5768002190380579,
3043
+ "eval_joke_loss": 1.7684005498886108,
3044
+ "eval_joke_runtime": 3.0277,
3045
+ "eval_joke_samples_per_second": 25.102,
3046
+ "eval_joke_steps_per_second": 1.321,
3047
+ "step": 4000
3048
+ },
3049
+ {
3050
+ "epoch": 2.41,
3051
+ "eval_gsm8k_accuracy": 0.8816889128526934,
3052
+ "eval_gsm8k_loss": 0.4015596807003021,
3053
+ "eval_gsm8k_runtime": 23.0636,
3054
+ "eval_gsm8k_samples_per_second": 57.19,
3055
+ "eval_gsm8k_steps_per_second": 2.385,
3056
+ "step": 4000
3057
+ },
3058
+ {
3059
+ "epoch": 2.42,
3060
+ "learning_rate": 6.80095541401274e-06,
3061
+ "loss": 0.6573,
3062
+ "step": 4010
3063
+ },
3064
+ {
3065
+ "epoch": 2.43,
3066
+ "learning_rate": 6.785031847133759e-06,
3067
+ "loss": 0.6252,
3068
+ "step": 4020
3069
+ },
3070
+ {
3071
+ "epoch": 2.43,
3072
+ "learning_rate": 6.769108280254778e-06,
3073
+ "loss": 0.6471,
3074
+ "step": 4030
3075
+ },
3076
+ {
3077
+ "epoch": 2.44,
3078
+ "learning_rate": 6.753184713375796e-06,
3079
+ "loss": 0.6518,
3080
+ "step": 4040
3081
+ },
3082
+ {
3083
+ "epoch": 2.44,
3084
+ "learning_rate": 6.737261146496816e-06,
3085
+ "loss": 0.6403,
3086
+ "step": 4050
3087
+ },
3088
+ {
3089
+ "epoch": 2.45,
3090
+ "learning_rate": 6.721337579617835e-06,
3091
+ "loss": 0.638,
3092
+ "step": 4060
3093
+ },
3094
+ {
3095
+ "epoch": 2.46,
3096
+ "learning_rate": 6.705414012738855e-06,
3097
+ "loss": 0.6546,
3098
+ "step": 4070
3099
+ },
3100
+ {
3101
+ "epoch": 2.46,
3102
+ "learning_rate": 6.689490445859873e-06,
3103
+ "loss": 0.652,
3104
+ "step": 4080
3105
+ },
3106
+ {
3107
+ "epoch": 2.47,
3108
+ "learning_rate": 6.673566878980892e-06,
3109
+ "loss": 0.642,
3110
+ "step": 4090
3111
+ },
3112
+ {
3113
+ "epoch": 2.47,
3114
+ "learning_rate": 6.657643312101912e-06,
3115
+ "loss": 0.6387,
3116
+ "step": 4100
3117
+ },
3118
+ {
3119
+ "epoch": 2.48,
3120
+ "learning_rate": 6.641719745222931e-06,
3121
+ "loss": 0.6469,
3122
+ "step": 4110
3123
+ },
3124
+ {
3125
+ "epoch": 2.49,
3126
+ "learning_rate": 6.625796178343949e-06,
3127
+ "loss": 0.651,
3128
+ "step": 4120
3129
+ },
3130
+ {
3131
+ "epoch": 2.49,
3132
+ "learning_rate": 6.609872611464969e-06,
3133
+ "loss": 0.6435,
3134
+ "step": 4130
3135
+ },
3136
+ {
3137
+ "epoch": 2.5,
3138
+ "learning_rate": 6.593949044585988e-06,
3139
+ "loss": 0.6494,
3140
+ "step": 4140
3141
+ },
3142
+ {
3143
+ "epoch": 2.5,
3144
+ "learning_rate": 6.578025477707008e-06,
3145
+ "loss": 0.6331,
3146
+ "step": 4150
3147
+ },
3148
+ {
3149
+ "epoch": 2.51,
3150
+ "learning_rate": 6.562101910828026e-06,
3151
+ "loss": 0.6252,
3152
+ "step": 4160
3153
+ },
3154
+ {
3155
+ "epoch": 2.52,
3156
+ "learning_rate": 6.546178343949045e-06,
3157
+ "loss": 0.6469,
3158
+ "step": 4170
3159
+ },
3160
+ {
3161
+ "epoch": 2.52,
3162
+ "learning_rate": 6.530254777070064e-06,
3163
+ "loss": 0.6598,
3164
+ "step": 4180
3165
+ },
3166
+ {
3167
+ "epoch": 2.53,
3168
+ "learning_rate": 6.514331210191084e-06,
3169
+ "loss": 0.6513,
3170
+ "step": 4190
3171
+ },
3172
+ {
3173
+ "epoch": 2.54,
3174
+ "learning_rate": 6.498407643312102e-06,
3175
+ "loss": 0.6505,
3176
+ "step": 4200
3177
+ },
3178
+ {
3179
+ "epoch": 2.54,
3180
+ "learning_rate": 6.482484076433122e-06,
3181
+ "loss": 0.6572,
3182
+ "step": 4210
3183
+ },
3184
+ {
3185
+ "epoch": 2.55,
3186
+ "learning_rate": 6.466560509554141e-06,
3187
+ "loss": 0.6346,
3188
+ "step": 4220
3189
+ },
3190
+ {
3191
+ "epoch": 2.55,
3192
+ "learning_rate": 6.45063694267516e-06,
3193
+ "loss": 0.6518,
3194
+ "step": 4230
3195
+ },
3196
+ {
3197
+ "epoch": 2.56,
3198
+ "learning_rate": 6.434713375796179e-06,
3199
+ "loss": 0.651,
3200
+ "step": 4240
3201
+ },
3202
+ {
3203
+ "epoch": 2.57,
3204
+ "learning_rate": 6.418789808917198e-06,
3205
+ "loss": 0.6546,
3206
+ "step": 4250
3207
+ },
3208
+ {
3209
+ "epoch": 2.57,
3210
+ "learning_rate": 6.402866242038217e-06,
3211
+ "loss": 0.6551,
3212
+ "step": 4260
3213
+ },
3214
+ {
3215
+ "epoch": 2.58,
3216
+ "learning_rate": 6.386942675159237e-06,
3217
+ "loss": 0.6494,
3218
+ "step": 4270
3219
+ },
3220
+ {
3221
+ "epoch": 2.58,
3222
+ "learning_rate": 6.371019108280255e-06,
3223
+ "loss": 0.6589,
3224
+ "step": 4280
3225
+ },
3226
+ {
3227
+ "epoch": 2.59,
3228
+ "learning_rate": 6.355095541401275e-06,
3229
+ "loss": 0.6399,
3230
+ "step": 4290
3231
+ },
3232
+ {
3233
+ "epoch": 2.6,
3234
+ "learning_rate": 6.339171974522294e-06,
3235
+ "loss": 0.6354,
3236
+ "step": 4300
3237
+ },
3238
+ {
3239
+ "epoch": 2.6,
3240
+ "learning_rate": 6.323248407643313e-06,
3241
+ "loss": 0.6432,
3242
+ "step": 4310
3243
+ },
3244
+ {
3245
+ "epoch": 2.61,
3246
+ "learning_rate": 6.307324840764332e-06,
3247
+ "loss": 0.6461,
3248
+ "step": 4320
3249
+ },
3250
+ {
3251
+ "epoch": 2.61,
3252
+ "learning_rate": 6.291401273885351e-06,
3253
+ "loss": 0.644,
3254
+ "step": 4330
3255
+ },
3256
+ {
3257
+ "epoch": 2.62,
3258
+ "learning_rate": 6.27547770700637e-06,
3259
+ "loss": 0.6675,
3260
+ "step": 4340
3261
+ },
3262
+ {
3263
+ "epoch": 2.63,
3264
+ "learning_rate": 6.25955414012739e-06,
3265
+ "loss": 0.6447,
3266
+ "step": 4350
3267
+ },
3268
+ {
3269
+ "epoch": 2.63,
3270
+ "learning_rate": 6.243630573248408e-06,
3271
+ "loss": 0.6524,
3272
+ "step": 4360
3273
+ },
3274
+ {
3275
+ "epoch": 2.64,
3276
+ "learning_rate": 6.2277070063694265e-06,
3277
+ "loss": 0.6414,
3278
+ "step": 4370
3279
+ },
3280
+ {
3281
+ "epoch": 2.64,
3282
+ "learning_rate": 6.211783439490447e-06,
3283
+ "loss": 0.641,
3284
+ "step": 4380
3285
+ },
3286
+ {
3287
+ "epoch": 2.65,
3288
+ "learning_rate": 6.195859872611465e-06,
3289
+ "loss": 0.6451,
3290
+ "step": 4390
3291
+ },
3292
+ {
3293
+ "epoch": 2.66,
3294
+ "learning_rate": 6.179936305732484e-06,
3295
+ "loss": 0.6351,
3296
+ "step": 4400
3297
+ },
3298
+ {
3299
+ "epoch": 2.66,
3300
+ "learning_rate": 6.1640127388535035e-06,
3301
+ "loss": 0.6409,
3302
+ "step": 4410
3303
+ },
3304
+ {
3305
+ "epoch": 2.67,
3306
+ "learning_rate": 6.148089171974523e-06,
3307
+ "loss": 0.6294,
3308
+ "step": 4420
3309
+ },
3310
+ {
3311
+ "epoch": 2.67,
3312
+ "learning_rate": 6.132165605095541e-06,
3313
+ "loss": 0.6466,
3314
+ "step": 4430
3315
+ },
3316
+ {
3317
+ "epoch": 2.68,
3318
+ "learning_rate": 6.116242038216561e-06,
3319
+ "loss": 0.6653,
3320
+ "step": 4440
3321
+ },
3322
+ {
3323
+ "epoch": 2.69,
3324
+ "learning_rate": 6.1003184713375795e-06,
3325
+ "loss": 0.6371,
3326
+ "step": 4450
3327
+ },
3328
+ {
3329
+ "epoch": 2.69,
3330
+ "learning_rate": 6.0843949044586e-06,
3331
+ "loss": 0.6416,
3332
+ "step": 4460
3333
+ },
3334
+ {
3335
+ "epoch": 2.7,
3336
+ "learning_rate": 6.068471337579618e-06,
3337
+ "loss": 0.6463,
3338
+ "step": 4470
3339
+ },
3340
+ {
3341
+ "epoch": 2.7,
3342
+ "learning_rate": 6.052547770700637e-06,
3343
+ "loss": 0.6352,
3344
+ "step": 4480
3345
+ },
3346
+ {
3347
+ "epoch": 2.71,
3348
+ "learning_rate": 6.0366242038216564e-06,
3349
+ "loss": 0.6582,
3350
+ "step": 4490
3351
+ },
3352
+ {
3353
+ "epoch": 2.72,
3354
+ "learning_rate": 6.020700636942676e-06,
3355
+ "loss": 0.6398,
3356
+ "step": 4500
3357
+ },
3358
+ {
3359
+ "epoch": 2.72,
3360
+ "eval_oasst_export_accuracy": 0.7068643360328554,
3361
+ "eval_oasst_export_loss": 1.2810877561569214,
3362
+ "eval_oasst_export_runtime": 68.1205,
3363
+ "eval_oasst_export_samples_per_second": 30.74,
3364
+ "eval_oasst_export_steps_per_second": 1.292,
3365
+ "step": 4500
3366
+ },
3367
+ {
3368
+ "epoch": 2.72,
3369
+ "eval_code_alpaca_accuracy": 0.8397004973492922,
3370
+ "eval_code_alpaca_loss": 0.5658242106437683,
3371
+ "eval_code_alpaca_runtime": 4.4867,
3372
+ "eval_code_alpaca_samples_per_second": 55.72,
3373
+ "eval_code_alpaca_steps_per_second": 2.452,
3374
+ "step": 4500
3375
+ },
3376
+ {
3377
+ "epoch": 2.72,
3378
+ "eval_evol_v2_accuracy": 0.7957420569509923,
3379
+ "eval_evol_v2_loss": 0.7099133133888245,
3380
+ "eval_evol_v2_runtime": 275.359,
3381
+ "eval_evol_v2_samples_per_second": 25.966,
3382
+ "eval_evol_v2_steps_per_second": 1.082,
3383
+ "step": 4500
3384
+ },
3385
+ {
3386
+ "epoch": 2.72,
3387
+ "eval_nlu_instruct_accuracy": 0.8218655440157056,
3388
+ "eval_nlu_instruct_loss": 0.6718029975891113,
3389
+ "eval_nlu_instruct_runtime": 3476.8418,
3390
+ "eval_nlu_instruct_samples_per_second": 22.441,
3391
+ "eval_nlu_instruct_steps_per_second": 0.935,
3392
+ "step": 4500
3393
+ },
3394
+ {
3395
+ "epoch": 2.72,
3396
+ "eval_grade_school_math_instructions_accuracy": 0.8435181569699336,
3397
+ "eval_grade_school_math_instructions_loss": 0.5345037579536438,
3398
+ "eval_grade_school_math_instructions_runtime": 9.633,
3399
+ "eval_grade_school_math_instructions_samples_per_second": 45.676,
3400
+ "eval_grade_school_math_instructions_steps_per_second": 1.972,
3401
+ "step": 4500
3402
+ },
3403
+ {
3404
+ "epoch": 2.72,
3405
+ "eval_poem_instructions_accuracy": 0.5007816030189417,
3406
+ "eval_poem_instructions_loss": 2.6497209072113037,
3407
+ "eval_poem_instructions_runtime": 21.1714,
3408
+ "eval_poem_instructions_samples_per_second": 16.39,
3409
+ "eval_poem_instructions_steps_per_second": 0.709,
3410
+ "step": 4500
3411
+ },
3412
+ {
3413
+ "epoch": 2.72,
3414
+ "eval_gpt4all_accuracy": 0.790508486230526,
3415
+ "eval_gpt4all_loss": 0.7647537589073181,
3416
+ "eval_gpt4all_runtime": 3565.0547,
3417
+ "eval_gpt4all_samples_per_second": 21.812,
3418
+ "eval_gpt4all_steps_per_second": 0.909,
3419
+ "step": 4500
3420
+ },
3421
+ {
3422
+ "epoch": 2.72,
3423
+ "eval_joke_accuracy": 0.5753399653189741,
3424
+ "eval_joke_loss": 1.7703536748886108,
3425
+ "eval_joke_runtime": 3.6512,
3426
+ "eval_joke_samples_per_second": 20.815,
3427
+ "eval_joke_steps_per_second": 1.096,
3428
+ "step": 4500
3429
+ },
3430
+ {
3431
+ "epoch": 2.72,
3432
+ "eval_gsm8k_accuracy": 0.8853599637483724,
3433
+ "eval_gsm8k_loss": 0.391888827085495,
3434
+ "eval_gsm8k_runtime": 22.7306,
3435
+ "eval_gsm8k_samples_per_second": 58.027,
3436
+ "eval_gsm8k_steps_per_second": 2.42,
3437
+ "step": 4500
3438
+ },
3439
+ {
3440
+ "epoch": 2.72,
3441
+ "learning_rate": 6.004777070063694e-06,
3442
+ "loss": 0.6392,
3443
+ "step": 4510
3444
+ },
3445
+ {
3446
+ "epoch": 2.73,
3447
+ "learning_rate": 5.988853503184714e-06,
3448
+ "loss": 0.6533,
3449
+ "step": 4520
3450
+ },
3451
+ {
3452
+ "epoch": 2.73,
3453
+ "learning_rate": 5.9729299363057325e-06,
3454
+ "loss": 0.6432,
3455
+ "step": 4530
3456
+ },
3457
+ {
3458
+ "epoch": 2.74,
3459
+ "learning_rate": 5.957006369426752e-06,
3460
+ "loss": 0.6447,
3461
+ "step": 4540
3462
+ },
3463
+ {
3464
+ "epoch": 2.75,
3465
+ "learning_rate": 5.941082802547771e-06,
3466
+ "loss": 0.6438,
3467
+ "step": 4550
3468
+ },
3469
+ {
3470
+ "epoch": 2.75,
3471
+ "learning_rate": 5.92515923566879e-06,
3472
+ "loss": 0.6402,
3473
+ "step": 4560
3474
+ },
3475
+ {
3476
+ "epoch": 2.76,
3477
+ "learning_rate": 5.9092356687898086e-06,
3478
+ "loss": 0.6436,
3479
+ "step": 4570
3480
+ },
3481
+ {
3482
+ "epoch": 2.76,
3483
+ "learning_rate": 5.893312101910829e-06,
3484
+ "loss": 0.6565,
3485
+ "step": 4580
3486
+ },
3487
+ {
3488
+ "epoch": 2.77,
3489
+ "learning_rate": 5.877388535031847e-06,
3490
+ "loss": 0.6562,
3491
+ "step": 4590
3492
+ },
3493
+ {
3494
+ "epoch": 2.78,
3495
+ "learning_rate": 5.861464968152867e-06,
3496
+ "loss": 0.6341,
3497
+ "step": 4600
3498
+ },
3499
+ {
3500
+ "epoch": 2.78,
3501
+ "learning_rate": 5.8455414012738855e-06,
3502
+ "loss": 0.6506,
3503
+ "step": 4610
3504
+ },
3505
+ {
3506
+ "epoch": 2.79,
3507
+ "learning_rate": 5.829617834394905e-06,
3508
+ "loss": 0.6633,
3509
+ "step": 4620
3510
+ },
3511
+ {
3512
+ "epoch": 2.79,
3513
+ "learning_rate": 5.813694267515924e-06,
3514
+ "loss": 0.6586,
3515
+ "step": 4630
3516
+ },
3517
+ {
3518
+ "epoch": 2.8,
3519
+ "learning_rate": 5.797770700636943e-06,
3520
+ "loss": 0.6479,
3521
+ "step": 4640
3522
+ },
3523
+ {
3524
+ "epoch": 2.81,
3525
+ "learning_rate": 5.7818471337579615e-06,
3526
+ "loss": 0.6546,
3527
+ "step": 4650
3528
+ },
3529
+ {
3530
+ "epoch": 2.81,
3531
+ "learning_rate": 5.765923566878982e-06,
3532
+ "loss": 0.6422,
3533
+ "step": 4660
3534
+ },
3535
+ {
3536
+ "epoch": 2.82,
3537
+ "learning_rate": 5.75e-06,
3538
+ "loss": 0.6523,
3539
+ "step": 4670
3540
+ },
3541
+ {
3542
+ "epoch": 2.82,
3543
+ "learning_rate": 5.73407643312102e-06,
3544
+ "loss": 0.657,
3545
+ "step": 4680
3546
+ },
3547
+ {
3548
+ "epoch": 2.83,
3549
+ "learning_rate": 5.7181528662420385e-06,
3550
+ "loss": 0.6716,
3551
+ "step": 4690
3552
+ },
3553
+ {
3554
+ "epoch": 2.84,
3555
+ "learning_rate": 5.702229299363058e-06,
3556
+ "loss": 0.6673,
3557
+ "step": 4700
3558
+ },
3559
+ {
3560
+ "epoch": 2.84,
3561
+ "learning_rate": 5.686305732484076e-06,
3562
+ "loss": 0.6443,
3563
+ "step": 4710
3564
+ },
3565
+ {
3566
+ "epoch": 2.85,
3567
+ "learning_rate": 5.670382165605096e-06,
3568
+ "loss": 0.6539,
3569
+ "step": 4720
3570
+ },
3571
+ {
3572
+ "epoch": 2.85,
3573
+ "learning_rate": 5.6544585987261145e-06,
3574
+ "loss": 0.6551,
3575
+ "step": 4730
3576
+ },
3577
+ {
3578
+ "epoch": 2.86,
3579
+ "learning_rate": 5.638535031847135e-06,
3580
+ "loss": 0.6531,
3581
+ "step": 4740
3582
+ },
3583
+ {
3584
+ "epoch": 2.87,
3585
+ "learning_rate": 5.622611464968153e-06,
3586
+ "loss": 0.6332,
3587
+ "step": 4750
3588
+ },
3589
+ {
3590
+ "epoch": 2.87,
3591
+ "learning_rate": 5.606687898089172e-06,
3592
+ "loss": 0.6554,
3593
+ "step": 4760
3594
+ },
3595
+ {
3596
+ "epoch": 2.88,
3597
+ "learning_rate": 5.5907643312101914e-06,
3598
+ "loss": 0.6503,
3599
+ "step": 4770
3600
+ },
3601
+ {
3602
+ "epoch": 2.89,
3603
+ "learning_rate": 5.574840764331211e-06,
3604
+ "loss": 0.6384,
3605
+ "step": 4780
3606
+ },
3607
+ {
3608
+ "epoch": 2.89,
3609
+ "learning_rate": 5.558917197452229e-06,
3610
+ "loss": 0.6433,
3611
+ "step": 4790
3612
+ },
3613
+ {
3614
+ "epoch": 2.9,
3615
+ "learning_rate": 5.542993630573249e-06,
3616
+ "loss": 0.6516,
3617
+ "step": 4800
3618
+ },
3619
+ {
3620
+ "epoch": 2.9,
3621
+ "learning_rate": 5.5270700636942675e-06,
3622
+ "loss": 0.6567,
3623
+ "step": 4810
3624
+ },
3625
+ {
3626
+ "epoch": 2.91,
3627
+ "learning_rate": 5.511146496815288e-06,
3628
+ "loss": 0.6488,
3629
+ "step": 4820
3630
+ },
3631
+ {
3632
+ "epoch": 2.92,
3633
+ "learning_rate": 5.495222929936306e-06,
3634
+ "loss": 0.6434,
3635
+ "step": 4830
3636
+ },
3637
+ {
3638
+ "epoch": 2.92,
3639
+ "learning_rate": 5.479299363057325e-06,
3640
+ "loss": 0.6404,
3641
+ "step": 4840
3642
+ },
3643
+ {
3644
+ "epoch": 2.93,
3645
+ "learning_rate": 5.463375796178344e-06,
3646
+ "loss": 0.6467,
3647
+ "step": 4850
3648
+ },
3649
+ {
3650
+ "epoch": 2.93,
3651
+ "learning_rate": 5.447452229299364e-06,
3652
+ "loss": 0.6449,
3653
+ "step": 4860
3654
+ },
3655
+ {
3656
+ "epoch": 2.94,
3657
+ "learning_rate": 5.431528662420382e-06,
3658
+ "loss": 0.6329,
3659
+ "step": 4870
3660
+ },
3661
+ {
3662
+ "epoch": 2.95,
3663
+ "learning_rate": 5.415605095541402e-06,
3664
+ "loss": 0.6637,
3665
+ "step": 4880
3666
+ },
3667
+ {
3668
+ "epoch": 2.95,
3669
+ "learning_rate": 5.3996815286624205e-06,
3670
+ "loss": 0.6431,
3671
+ "step": 4890
3672
+ },
3673
+ {
3674
+ "epoch": 2.96,
3675
+ "learning_rate": 5.38375796178344e-06,
3676
+ "loss": 0.6545,
3677
+ "step": 4900
3678
+ },
3679
+ {
3680
+ "epoch": 2.96,
3681
+ "learning_rate": 5.367834394904459e-06,
3682
+ "loss": 0.6679,
3683
+ "step": 4910
3684
+ },
3685
+ {
3686
+ "epoch": 2.97,
3687
+ "learning_rate": 5.351910828025478e-06,
3688
+ "loss": 0.6596,
3689
+ "step": 4920
3690
+ },
3691
+ {
3692
+ "epoch": 2.98,
3693
+ "learning_rate": 5.3359872611464966e-06,
3694
+ "loss": 0.6546,
3695
+ "step": 4930
3696
+ },
3697
+ {
3698
+ "epoch": 2.98,
3699
+ "learning_rate": 5.320063694267517e-06,
3700
+ "loss": 0.6412,
3701
+ "step": 4940
3702
+ },
3703
+ {
3704
+ "epoch": 2.99,
3705
+ "learning_rate": 5.304140127388535e-06,
3706
+ "loss": 0.6624,
3707
+ "step": 4950
3708
+ },
3709
+ {
3710
+ "epoch": 2.99,
3711
+ "learning_rate": 5.288216560509555e-06,
3712
+ "loss": 0.6609,
3713
+ "step": 4960
3714
+ },
3715
+ {
3716
+ "epoch": 3.0,
3717
+ "learning_rate": 5.2722929936305735e-06,
3718
+ "loss": 0.6637,
3719
+ "step": 4970
3720
+ },
3721
+ {
3722
+ "epoch": 3.01,
3723
+ "learning_rate": 5.256369426751593e-06,
3724
+ "loss": 0.5777,
3725
+ "step": 4980
3726
+ },
3727
+ {
3728
+ "epoch": 3.01,
3729
+ "learning_rate": 5.240445859872612e-06,
3730
+ "loss": 0.5636,
3731
+ "step": 4990
3732
+ },
3733
+ {
3734
+ "epoch": 3.02,
3735
+ "learning_rate": 5.224522292993631e-06,
3736
+ "loss": 0.5546,
3737
+ "step": 5000
3738
+ },
3739
+ {
3740
+ "epoch": 3.02,
3741
+ "eval_oasst_export_accuracy": 0.7039114566904696,
3742
+ "eval_oasst_export_loss": 1.3318158388137817,
3743
+ "eval_oasst_export_runtime": 68.1379,
3744
+ "eval_oasst_export_samples_per_second": 30.732,
3745
+ "eval_oasst_export_steps_per_second": 1.291,
3746
+ "step": 5000
3747
+ },
3748
+ {
3749
+ "epoch": 3.02,
3750
+ "eval_code_alpaca_accuracy": 0.8384434606766137,
3751
+ "eval_code_alpaca_loss": 0.5847578048706055,
3752
+ "eval_code_alpaca_runtime": 5.0277,
3753
+ "eval_code_alpaca_samples_per_second": 49.724,
3754
+ "eval_code_alpaca_steps_per_second": 2.188,
3755
+ "step": 5000
3756
+ },
3757
+ {
3758
+ "epoch": 3.02,
3759
+ "eval_evol_v2_accuracy": 0.7943008349293793,
3760
+ "eval_evol_v2_loss": 0.7375423312187195,
3761
+ "eval_evol_v2_runtime": 274.7434,
3762
+ "eval_evol_v2_samples_per_second": 26.024,
3763
+ "eval_evol_v2_steps_per_second": 1.085,
3764
+ "step": 5000
3765
+ },
3766
+ {
3767
+ "epoch": 3.02,
3768
+ "eval_nlu_instruct_accuracy": 0.8220131298210854,
3769
+ "eval_nlu_instruct_loss": 0.6885544657707214,
3770
+ "eval_nlu_instruct_runtime": 3476.7259,
3771
+ "eval_nlu_instruct_samples_per_second": 22.442,
3772
+ "eval_nlu_instruct_steps_per_second": 0.935,
3773
+ "step": 5000
3774
+ },
3775
+ {
3776
+ "epoch": 3.02,
3777
+ "eval_grade_school_math_instructions_accuracy": 0.8461050370948848,
3778
+ "eval_grade_school_math_instructions_loss": 0.5257013440132141,
3779
+ "eval_grade_school_math_instructions_runtime": 12.7566,
3780
+ "eval_grade_school_math_instructions_samples_per_second": 34.492,
3781
+ "eval_grade_school_math_instructions_steps_per_second": 1.489,
3782
+ "step": 5000
3783
+ },
3784
+ {
3785
+ "epoch": 3.02,
3786
+ "eval_poem_instructions_accuracy": 0.49797271716962005,
3787
+ "eval_poem_instructions_loss": 2.6931512355804443,
3788
+ "eval_poem_instructions_runtime": 21.1948,
3789
+ "eval_poem_instructions_samples_per_second": 16.372,
3790
+ "eval_poem_instructions_steps_per_second": 0.708,
3791
+ "step": 5000
3792
+ },
3793
+ {
3794
+ "epoch": 3.02,
3795
+ "eval_gpt4all_accuracy": 0.7893674285927822,
3796
+ "eval_gpt4all_loss": 0.7917724251747131,
3797
+ "eval_gpt4all_runtime": 3566.3071,
3798
+ "eval_gpt4all_samples_per_second": 21.805,
3799
+ "eval_gpt4all_steps_per_second": 0.909,
3800
+ "step": 5000
3801
+ },
3802
+ {
3803
+ "epoch": 3.02,
3804
+ "eval_joke_accuracy": 0.5706854065893949,
3805
+ "eval_joke_loss": 1.8482730388641357,
3806
+ "eval_joke_runtime": 2.7911,
3807
+ "eval_joke_samples_per_second": 27.229,
3808
+ "eval_joke_steps_per_second": 1.433,
3809
+ "step": 5000
3810
+ },
3811
+ {
3812
+ "epoch": 3.02,
3813
+ "eval_gsm8k_accuracy": 0.8884000527713566,
3814
+ "eval_gsm8k_loss": 0.38383719325065613,
3815
+ "eval_gsm8k_runtime": 22.6991,
3816
+ "eval_gsm8k_samples_per_second": 58.108,
3817
+ "eval_gsm8k_steps_per_second": 2.423,
3818
+ "step": 5000
3819
+ }
3820
+ ],
3821
+ "max_steps": 8280,
3822
+ "num_train_epochs": 5,
3823
+ "total_flos": 8911630701166592.0,
3824
+ "trial_name": null,
3825
+ "trial_params": null
3826
+ }