nathan0 commited on
Commit
3b60497
·
1 Parent(s): fc81711

first commit

Browse files

mpt-7b-delta-tune-model-div-sal

README.md CHANGED
@@ -1,3 +1,9 @@
1
  ---
2
- license: apache-2.0
3
  ---
 
 
 
 
 
 
 
1
  ---
2
+ library_name: peft
3
  ---
4
+ ## Training procedure
5
+
6
+ ### Framework versions
7
+
8
+
9
+ - PEFT 0.4.0
adapter_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": null,
3
+ "base_model_name_or_path": "/home/vmagent/app/dataset/mpt-7b",
4
+ "bias": "none",
5
+ "fan_in_fan_out": false,
6
+ "inference_mode": true,
7
+ "init_lora_weights": true,
8
+ "layers_pattern": null,
9
+ "layers_to_transform": null,
10
+ "lora_alpha": 55,
11
+ "lora_dropout": 0.05,
12
+ "modules_to_save": null,
13
+ "peft_type": "LORA",
14
+ "r": 11,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "Wqkv"
18
+ ],
19
+ "task_type": "CAUSAL_LM"
20
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d538ec07bd538200952bf730d5fd0f674aa684c39af25776553151f551fa35ca
3
+ size 12467253
all_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_loss": 0.9799609184265137,
4
+ "eval_runtime": 5242.7479,
5
+ "eval_samples": 15601,
6
+ "eval_samples_per_second": 2.976,
7
+ "eval_steps_per_second": 0.372,
8
+ "eval_tokens": 1722455
9
+ }
best_model_structure.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ {"num_hidden_layers": [0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0], "r": [8, 10, 3, 6, 8, 9, 10, 8, 12, 4, 9, 12, 5, 7, 7, 10, 2, 11, 10, 8, 11, 9, 6, 10, 12, 6, 12, 7, 5, 11, 11, 8], "alpha": [2, 6, 3, 3, 5, 3, 1, 2, 6, 2, 6, 5, 1, 6, 3, 6, 5, 2, 4, 3, 4, 4, 5, 5, 5, 4, 5, 4, 6, 6, 5, 5]}
checkpoint-985/README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ ---
4
+ ## Training procedure
5
+
6
+ ### Framework versions
7
+
8
+
9
+ - PEFT 0.4.0
checkpoint-985/adapter_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": null,
3
+ "base_model_name_or_path": "/home/vmagent/app/dataset/mpt-7b",
4
+ "bias": "none",
5
+ "fan_in_fan_out": false,
6
+ "inference_mode": true,
7
+ "init_lora_weights": true,
8
+ "layers_pattern": null,
9
+ "layers_to_transform": null,
10
+ "lora_alpha": 55,
11
+ "lora_dropout": 0.05,
12
+ "modules_to_save": null,
13
+ "peft_type": "LORA",
14
+ "r": 11,
15
+ "revision": null,
16
+ "target_modules": [
17
+ "Wqkv"
18
+ ],
19
+ "task_type": "CAUSAL_LM"
20
+ }
checkpoint-985/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d538ec07bd538200952bf730d5fd0f674aa684c39af25776553151f551fa35ca
3
+ size 12467253
checkpoint-985/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e8dd10e1b991d52db97f9d7e24580bde2f62ecf59226c779994cd26c628c27a
3
+ size 24939333
checkpoint-985/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0d37ec6dfe70dfec2d14a6da6156550726b7ca980de43d4a88d7ba6c925404c
3
+ size 13553
checkpoint-985/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35b9333bfcd7d3fa9837943e8744d19f7572eb49c45fded40340ce3c47b46224
3
+ size 627
checkpoint-985/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
checkpoint-985/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-985/tokenizer_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|endoftext|>",
6
+ "model_max_length": 2048,
7
+ "tokenizer_class": "GPTNeoXTokenizer",
8
+ "unk_token": "<|endoftext|>"
9
+ }
checkpoint-985/trainer_state.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.0,
5
+ "global_step": 985,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.1,
12
+ "learning_rate": 8.984771573604062e-05,
13
+ "loss": 1.1743,
14
+ "step": 100
15
+ },
16
+ {
17
+ "epoch": 0.2,
18
+ "learning_rate": 7.969543147208121e-05,
19
+ "loss": 1.0788,
20
+ "step": 200
21
+ },
22
+ {
23
+ "epoch": 0.3,
24
+ "learning_rate": 6.954314720812183e-05,
25
+ "loss": 1.0732,
26
+ "step": 300
27
+ },
28
+ {
29
+ "epoch": 0.41,
30
+ "learning_rate": 5.939086294416244e-05,
31
+ "loss": 1.0638,
32
+ "step": 400
33
+ },
34
+ {
35
+ "epoch": 0.51,
36
+ "learning_rate": 4.9238578680203045e-05,
37
+ "loss": 1.0558,
38
+ "step": 500
39
+ },
40
+ {
41
+ "epoch": 0.61,
42
+ "learning_rate": 3.9086294416243655e-05,
43
+ "loss": 1.0475,
44
+ "step": 600
45
+ },
46
+ {
47
+ "epoch": 0.71,
48
+ "learning_rate": 2.8934010152284264e-05,
49
+ "loss": 1.0485,
50
+ "step": 700
51
+ },
52
+ {
53
+ "epoch": 0.81,
54
+ "learning_rate": 1.8781725888324874e-05,
55
+ "loss": 1.039,
56
+ "step": 800
57
+ },
58
+ {
59
+ "epoch": 0.91,
60
+ "learning_rate": 8.629441624365483e-06,
61
+ "loss": 1.0402,
62
+ "step": 900
63
+ }
64
+ ],
65
+ "max_steps": 985,
66
+ "num_train_epochs": 1,
67
+ "total_flos": 1.5601672920445747e+17,
68
+ "trial_name": null,
69
+ "trial_params": null
70
+ }
checkpoint-985/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f08761150839d8b4837eeaeeb20dfb3e2028628b007b9758c4101c7b1ab0d120
3
+ size 4091
eval_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_loss": 0.9799609184265137,
4
+ "eval_runtime": 5242.7479,
5
+ "eval_samples": 15601,
6
+ "eval_samples_per_second": 2.976,
7
+ "eval_steps_per_second": 0.372,
8
+ "eval_tokens": 1722455
9
+ }
mpt-7b-delta-tune-model-div-sal-arc_challenge ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "arc_challenge": {
4
+ "acc": 0.454778156996587,
5
+ "acc_stderr": 0.014551507060836353,
6
+ "acc_norm": 0.5017064846416383,
7
+ "acc_norm_stderr": 0.014611305705056995
8
+ }
9
+ },
10
+ "versions": {
11
+ "arc_challenge": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-causal-experimental",
15
+ "model_args": "pretrained=/home/vmagent/app/data/mpt-7b-delta-tune-model-div-sal/merged_model,use_accelerate=True,trust_remote_code=True,dtype=float16",
16
+ "num_fewshot": 25,
17
+ "batch_size": "auto",
18
+ "batch_sizes": [
19
+ 32
20
+ ],
21
+ "device": null,
22
+ "no_cache": false,
23
+ "limit": null,
24
+ "bootstrap_iters": 100000,
25
+ "description_dict": {}
26
+ }
27
+ }
mpt-7b-delta-tune-model-div-sal-hellaswag ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "hellaswag": {
4
+ "acc": 0.5733917546305517,
5
+ "acc_stderr": 0.004935735300348862,
6
+ "acc_norm": 0.7630950009958176,
7
+ "acc_norm_stderr": 0.004243145587737571
8
+ }
9
+ },
10
+ "versions": {
11
+ "hellaswag": 0
12
+ },
13
+ "config": {
14
+ "model": "hf-causal-experimental",
15
+ "model_args": "pretrained=/home/vmagent/app/data/mpt-7b-delta-tune-model-div-sal/merged_model,use_accelerate=True,trust_remote_code=True,dtype=float16",
16
+ "num_fewshot": 10,
17
+ "batch_size": "auto",
18
+ "batch_sizes": [
19
+ 32
20
+ ],
21
+ "device": null,
22
+ "no_cache": false,
23
+ "limit": null,
24
+ "bootstrap_iters": 100000,
25
+ "description_dict": {}
26
+ }
27
+ }
mpt-7b-delta-tune-model-div-sal-mmlu ADDED
@@ -0,0 +1,419 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "hendrycksTest-abstract_algebra": {
4
+ "acc": 0.27,
5
+ "acc_stderr": 0.0446196043338474,
6
+ "acc_norm": 0.27,
7
+ "acc_norm_stderr": 0.0446196043338474
8
+ },
9
+ "hendrycksTest-anatomy": {
10
+ "acc": 0.3111111111111111,
11
+ "acc_stderr": 0.039992628766177214,
12
+ "acc_norm": 0.3111111111111111,
13
+ "acc_norm_stderr": 0.039992628766177214
14
+ },
15
+ "hendrycksTest-astronomy": {
16
+ "acc": 0.24342105263157895,
17
+ "acc_stderr": 0.034923496688842384,
18
+ "acc_norm": 0.24342105263157895,
19
+ "acc_norm_stderr": 0.034923496688842384
20
+ },
21
+ "hendrycksTest-business_ethics": {
22
+ "acc": 0.28,
23
+ "acc_stderr": 0.04512608598542127,
24
+ "acc_norm": 0.28,
25
+ "acc_norm_stderr": 0.04512608598542127
26
+ },
27
+ "hendrycksTest-clinical_knowledge": {
28
+ "acc": 0.3018867924528302,
29
+ "acc_stderr": 0.028254200344438655,
30
+ "acc_norm": 0.3018867924528302,
31
+ "acc_norm_stderr": 0.028254200344438655
32
+ },
33
+ "hendrycksTest-college_biology": {
34
+ "acc": 0.2916666666666667,
35
+ "acc_stderr": 0.03800968060554857,
36
+ "acc_norm": 0.2916666666666667,
37
+ "acc_norm_stderr": 0.03800968060554857
38
+ },
39
+ "hendrycksTest-college_chemistry": {
40
+ "acc": 0.26,
41
+ "acc_stderr": 0.0440844002276808,
42
+ "acc_norm": 0.26,
43
+ "acc_norm_stderr": 0.0440844002276808
44
+ },
45
+ "hendrycksTest-college_computer_science": {
46
+ "acc": 0.37,
47
+ "acc_stderr": 0.048523658709391,
48
+ "acc_norm": 0.37,
49
+ "acc_norm_stderr": 0.048523658709391
50
+ },
51
+ "hendrycksTest-college_mathematics": {
52
+ "acc": 0.27,
53
+ "acc_stderr": 0.044619604333847394,
54
+ "acc_norm": 0.27,
55
+ "acc_norm_stderr": 0.044619604333847394
56
+ },
57
+ "hendrycksTest-college_medicine": {
58
+ "acc": 0.2543352601156069,
59
+ "acc_stderr": 0.0332055644308557,
60
+ "acc_norm": 0.2543352601156069,
61
+ "acc_norm_stderr": 0.0332055644308557
62
+ },
63
+ "hendrycksTest-college_physics": {
64
+ "acc": 0.23529411764705882,
65
+ "acc_stderr": 0.04220773659171452,
66
+ "acc_norm": 0.23529411764705882,
67
+ "acc_norm_stderr": 0.04220773659171452
68
+ },
69
+ "hendrycksTest-computer_security": {
70
+ "acc": 0.32,
71
+ "acc_stderr": 0.046882617226215034,
72
+ "acc_norm": 0.32,
73
+ "acc_norm_stderr": 0.046882617226215034
74
+ },
75
+ "hendrycksTest-conceptual_physics": {
76
+ "acc": 0.31063829787234043,
77
+ "acc_stderr": 0.03025123757921317,
78
+ "acc_norm": 0.31063829787234043,
79
+ "acc_norm_stderr": 0.03025123757921317
80
+ },
81
+ "hendrycksTest-econometrics": {
82
+ "acc": 0.2894736842105263,
83
+ "acc_stderr": 0.04266339443159394,
84
+ "acc_norm": 0.2894736842105263,
85
+ "acc_norm_stderr": 0.04266339443159394
86
+ },
87
+ "hendrycksTest-electrical_engineering": {
88
+ "acc": 0.3103448275862069,
89
+ "acc_stderr": 0.038552896163789485,
90
+ "acc_norm": 0.3103448275862069,
91
+ "acc_norm_stderr": 0.038552896163789485
92
+ },
93
+ "hendrycksTest-elementary_mathematics": {
94
+ "acc": 0.2804232804232804,
95
+ "acc_stderr": 0.023135287974325628,
96
+ "acc_norm": 0.2804232804232804,
97
+ "acc_norm_stderr": 0.023135287974325628
98
+ },
99
+ "hendrycksTest-formal_logic": {
100
+ "acc": 0.18253968253968253,
101
+ "acc_stderr": 0.03455071019102146,
102
+ "acc_norm": 0.18253968253968253,
103
+ "acc_norm_stderr": 0.03455071019102146
104
+ },
105
+ "hendrycksTest-global_facts": {
106
+ "acc": 0.33,
107
+ "acc_stderr": 0.047258156262526045,
108
+ "acc_norm": 0.33,
109
+ "acc_norm_stderr": 0.047258156262526045
110
+ },
111
+ "hendrycksTest-high_school_biology": {
112
+ "acc": 0.31290322580645163,
113
+ "acc_stderr": 0.02637756702864586,
114
+ "acc_norm": 0.31290322580645163,
115
+ "acc_norm_stderr": 0.02637756702864586
116
+ },
117
+ "hendrycksTest-high_school_chemistry": {
118
+ "acc": 0.2512315270935961,
119
+ "acc_stderr": 0.030516530732694433,
120
+ "acc_norm": 0.2512315270935961,
121
+ "acc_norm_stderr": 0.030516530732694433
122
+ },
123
+ "hendrycksTest-high_school_computer_science": {
124
+ "acc": 0.3,
125
+ "acc_stderr": 0.046056618647183814,
126
+ "acc_norm": 0.3,
127
+ "acc_norm_stderr": 0.046056618647183814
128
+ },
129
+ "hendrycksTest-high_school_european_history": {
130
+ "acc": 0.3090909090909091,
131
+ "acc_stderr": 0.036085410115739666,
132
+ "acc_norm": 0.3090909090909091,
133
+ "acc_norm_stderr": 0.036085410115739666
134
+ },
135
+ "hendrycksTest-high_school_geography": {
136
+ "acc": 0.32323232323232326,
137
+ "acc_stderr": 0.033322999210706444,
138
+ "acc_norm": 0.32323232323232326,
139
+ "acc_norm_stderr": 0.033322999210706444
140
+ },
141
+ "hendrycksTest-high_school_government_and_politics": {
142
+ "acc": 0.32124352331606215,
143
+ "acc_stderr": 0.033699508685490674,
144
+ "acc_norm": 0.32124352331606215,
145
+ "acc_norm_stderr": 0.033699508685490674
146
+ },
147
+ "hendrycksTest-high_school_macroeconomics": {
148
+ "acc": 0.3076923076923077,
149
+ "acc_stderr": 0.023400928918310502,
150
+ "acc_norm": 0.3076923076923077,
151
+ "acc_norm_stderr": 0.023400928918310502
152
+ },
153
+ "hendrycksTest-high_school_mathematics": {
154
+ "acc": 0.25925925925925924,
155
+ "acc_stderr": 0.026719240783712177,
156
+ "acc_norm": 0.25925925925925924,
157
+ "acc_norm_stderr": 0.026719240783712177
158
+ },
159
+ "hendrycksTest-high_school_microeconomics": {
160
+ "acc": 0.25630252100840334,
161
+ "acc_stderr": 0.02835962087053395,
162
+ "acc_norm": 0.25630252100840334,
163
+ "acc_norm_stderr": 0.02835962087053395
164
+ },
165
+ "hendrycksTest-high_school_physics": {
166
+ "acc": 0.271523178807947,
167
+ "acc_stderr": 0.036313298039696545,
168
+ "acc_norm": 0.271523178807947,
169
+ "acc_norm_stderr": 0.036313298039696545
170
+ },
171
+ "hendrycksTest-high_school_psychology": {
172
+ "acc": 0.27339449541284405,
173
+ "acc_stderr": 0.01910929984609828,
174
+ "acc_norm": 0.27339449541284405,
175
+ "acc_norm_stderr": 0.01910929984609828
176
+ },
177
+ "hendrycksTest-high_school_statistics": {
178
+ "acc": 0.21296296296296297,
179
+ "acc_stderr": 0.027920963147993666,
180
+ "acc_norm": 0.21296296296296297,
181
+ "acc_norm_stderr": 0.027920963147993666
182
+ },
183
+ "hendrycksTest-high_school_us_history": {
184
+ "acc": 0.25,
185
+ "acc_stderr": 0.03039153369274154,
186
+ "acc_norm": 0.25,
187
+ "acc_norm_stderr": 0.03039153369274154
188
+ },
189
+ "hendrycksTest-high_school_world_history": {
190
+ "acc": 0.2742616033755274,
191
+ "acc_stderr": 0.029041333510598025,
192
+ "acc_norm": 0.2742616033755274,
193
+ "acc_norm_stderr": 0.029041333510598025
194
+ },
195
+ "hendrycksTest-human_aging": {
196
+ "acc": 0.36771300448430494,
197
+ "acc_stderr": 0.03236198350928276,
198
+ "acc_norm": 0.36771300448430494,
199
+ "acc_norm_stderr": 0.03236198350928276
200
+ },
201
+ "hendrycksTest-human_sexuality": {
202
+ "acc": 0.33587786259541985,
203
+ "acc_stderr": 0.04142313771996665,
204
+ "acc_norm": 0.33587786259541985,
205
+ "acc_norm_stderr": 0.04142313771996665
206
+ },
207
+ "hendrycksTest-international_law": {
208
+ "acc": 0.4214876033057851,
209
+ "acc_stderr": 0.045077322787750944,
210
+ "acc_norm": 0.4214876033057851,
211
+ "acc_norm_stderr": 0.045077322787750944
212
+ },
213
+ "hendrycksTest-jurisprudence": {
214
+ "acc": 0.3333333333333333,
215
+ "acc_stderr": 0.04557239513497752,
216
+ "acc_norm": 0.3333333333333333,
217
+ "acc_norm_stderr": 0.04557239513497752
218
+ },
219
+ "hendrycksTest-logical_fallacies": {
220
+ "acc": 0.27607361963190186,
221
+ "acc_stderr": 0.0351238528370505,
222
+ "acc_norm": 0.27607361963190186,
223
+ "acc_norm_stderr": 0.0351238528370505
224
+ },
225
+ "hendrycksTest-machine_learning": {
226
+ "acc": 0.35714285714285715,
227
+ "acc_stderr": 0.04547960999764376,
228
+ "acc_norm": 0.35714285714285715,
229
+ "acc_norm_stderr": 0.04547960999764376
230
+ },
231
+ "hendrycksTest-management": {
232
+ "acc": 0.30097087378640774,
233
+ "acc_stderr": 0.045416094465039476,
234
+ "acc_norm": 0.30097087378640774,
235
+ "acc_norm_stderr": 0.045416094465039476
236
+ },
237
+ "hendrycksTest-marketing": {
238
+ "acc": 0.2863247863247863,
239
+ "acc_stderr": 0.029614323690456648,
240
+ "acc_norm": 0.2863247863247863,
241
+ "acc_norm_stderr": 0.029614323690456648
242
+ },
243
+ "hendrycksTest-medical_genetics": {
244
+ "acc": 0.28,
245
+ "acc_stderr": 0.045126085985421255,
246
+ "acc_norm": 0.28,
247
+ "acc_norm_stderr": 0.045126085985421255
248
+ },
249
+ "hendrycksTest-miscellaneous": {
250
+ "acc": 0.30395913154533843,
251
+ "acc_stderr": 0.016448321686769043,
252
+ "acc_norm": 0.30395913154533843,
253
+ "acc_norm_stderr": 0.016448321686769043
254
+ },
255
+ "hendrycksTest-moral_disputes": {
256
+ "acc": 0.2832369942196532,
257
+ "acc_stderr": 0.02425790170532337,
258
+ "acc_norm": 0.2832369942196532,
259
+ "acc_norm_stderr": 0.02425790170532337
260
+ },
261
+ "hendrycksTest-moral_scenarios": {
262
+ "acc": 0.2424581005586592,
263
+ "acc_stderr": 0.014333522059217889,
264
+ "acc_norm": 0.2424581005586592,
265
+ "acc_norm_stderr": 0.014333522059217889
266
+ },
267
+ "hendrycksTest-nutrition": {
268
+ "acc": 0.2908496732026144,
269
+ "acc_stderr": 0.026004800363952113,
270
+ "acc_norm": 0.2908496732026144,
271
+ "acc_norm_stderr": 0.026004800363952113
272
+ },
273
+ "hendrycksTest-philosophy": {
274
+ "acc": 0.28938906752411575,
275
+ "acc_stderr": 0.02575586592263294,
276
+ "acc_norm": 0.28938906752411575,
277
+ "acc_norm_stderr": 0.02575586592263294
278
+ },
279
+ "hendrycksTest-prehistory": {
280
+ "acc": 0.2808641975308642,
281
+ "acc_stderr": 0.025006469755799204,
282
+ "acc_norm": 0.2808641975308642,
283
+ "acc_norm_stderr": 0.025006469755799204
284
+ },
285
+ "hendrycksTest-professional_accounting": {
286
+ "acc": 0.2872340425531915,
287
+ "acc_stderr": 0.026992199173064356,
288
+ "acc_norm": 0.2872340425531915,
289
+ "acc_norm_stderr": 0.026992199173064356
290
+ },
291
+ "hendrycksTest-professional_law": {
292
+ "acc": 0.26401564537157757,
293
+ "acc_stderr": 0.011258435537723821,
294
+ "acc_norm": 0.26401564537157757,
295
+ "acc_norm_stderr": 0.011258435537723821
296
+ },
297
+ "hendrycksTest-professional_medicine": {
298
+ "acc": 0.20588235294117646,
299
+ "acc_stderr": 0.024562204314142317,
300
+ "acc_norm": 0.20588235294117646,
301
+ "acc_norm_stderr": 0.024562204314142317
302
+ },
303
+ "hendrycksTest-professional_psychology": {
304
+ "acc": 0.2826797385620915,
305
+ "acc_stderr": 0.018217269552053432,
306
+ "acc_norm": 0.2826797385620915,
307
+ "acc_norm_stderr": 0.018217269552053432
308
+ },
309
+ "hendrycksTest-public_relations": {
310
+ "acc": 0.32727272727272727,
311
+ "acc_stderr": 0.04494290866252089,
312
+ "acc_norm": 0.32727272727272727,
313
+ "acc_norm_stderr": 0.04494290866252089
314
+ },
315
+ "hendrycksTest-security_studies": {
316
+ "acc": 0.3020408163265306,
317
+ "acc_stderr": 0.02939360931987981,
318
+ "acc_norm": 0.3020408163265306,
319
+ "acc_norm_stderr": 0.02939360931987981
320
+ },
321
+ "hendrycksTest-sociology": {
322
+ "acc": 0.27860696517412936,
323
+ "acc_stderr": 0.03170056183497308,
324
+ "acc_norm": 0.27860696517412936,
325
+ "acc_norm_stderr": 0.03170056183497308
326
+ },
327
+ "hendrycksTest-us_foreign_policy": {
328
+ "acc": 0.31,
329
+ "acc_stderr": 0.04648231987117316,
330
+ "acc_norm": 0.31,
331
+ "acc_norm_stderr": 0.04648231987117316
332
+ },
333
+ "hendrycksTest-virology": {
334
+ "acc": 0.3313253012048193,
335
+ "acc_stderr": 0.036643147772880864,
336
+ "acc_norm": 0.3313253012048193,
337
+ "acc_norm_stderr": 0.036643147772880864
338
+ },
339
+ "hendrycksTest-world_religions": {
340
+ "acc": 0.2573099415204678,
341
+ "acc_stderr": 0.03352799844161865,
342
+ "acc_norm": 0.2573099415204678,
343
+ "acc_norm_stderr": 0.03352799844161865
344
+ }
345
+ },
346
+ "versions": {
347
+ "hendrycksTest-abstract_algebra": 1,
348
+ "hendrycksTest-anatomy": 1,
349
+ "hendrycksTest-astronomy": 1,
350
+ "hendrycksTest-business_ethics": 1,
351
+ "hendrycksTest-clinical_knowledge": 1,
352
+ "hendrycksTest-college_biology": 1,
353
+ "hendrycksTest-college_chemistry": 1,
354
+ "hendrycksTest-college_computer_science": 1,
355
+ "hendrycksTest-college_mathematics": 1,
356
+ "hendrycksTest-college_medicine": 1,
357
+ "hendrycksTest-college_physics": 1,
358
+ "hendrycksTest-computer_security": 1,
359
+ "hendrycksTest-conceptual_physics": 1,
360
+ "hendrycksTest-econometrics": 1,
361
+ "hendrycksTest-electrical_engineering": 1,
362
+ "hendrycksTest-elementary_mathematics": 1,
363
+ "hendrycksTest-formal_logic": 1,
364
+ "hendrycksTest-global_facts": 1,
365
+ "hendrycksTest-high_school_biology": 1,
366
+ "hendrycksTest-high_school_chemistry": 1,
367
+ "hendrycksTest-high_school_computer_science": 1,
368
+ "hendrycksTest-high_school_european_history": 1,
369
+ "hendrycksTest-high_school_geography": 1,
370
+ "hendrycksTest-high_school_government_and_politics": 1,
371
+ "hendrycksTest-high_school_macroeconomics": 1,
372
+ "hendrycksTest-high_school_mathematics": 1,
373
+ "hendrycksTest-high_school_microeconomics": 1,
374
+ "hendrycksTest-high_school_physics": 1,
375
+ "hendrycksTest-high_school_psychology": 1,
376
+ "hendrycksTest-high_school_statistics": 1,
377
+ "hendrycksTest-high_school_us_history": 1,
378
+ "hendrycksTest-high_school_world_history": 1,
379
+ "hendrycksTest-human_aging": 1,
380
+ "hendrycksTest-human_sexuality": 1,
381
+ "hendrycksTest-international_law": 1,
382
+ "hendrycksTest-jurisprudence": 1,
383
+ "hendrycksTest-logical_fallacies": 1,
384
+ "hendrycksTest-machine_learning": 1,
385
+ "hendrycksTest-management": 1,
386
+ "hendrycksTest-marketing": 1,
387
+ "hendrycksTest-medical_genetics": 1,
388
+ "hendrycksTest-miscellaneous": 1,
389
+ "hendrycksTest-moral_disputes": 1,
390
+ "hendrycksTest-moral_scenarios": 1,
391
+ "hendrycksTest-nutrition": 1,
392
+ "hendrycksTest-philosophy": 1,
393
+ "hendrycksTest-prehistory": 1,
394
+ "hendrycksTest-professional_accounting": 1,
395
+ "hendrycksTest-professional_law": 1,
396
+ "hendrycksTest-professional_medicine": 1,
397
+ "hendrycksTest-professional_psychology": 1,
398
+ "hendrycksTest-public_relations": 1,
399
+ "hendrycksTest-security_studies": 1,
400
+ "hendrycksTest-sociology": 1,
401
+ "hendrycksTest-us_foreign_policy": 1,
402
+ "hendrycksTest-virology": 1,
403
+ "hendrycksTest-world_religions": 1
404
+ },
405
+ "config": {
406
+ "model": "hf-causal-experimental",
407
+ "model_args": "pretrained=/home/vmagent/app/data/mpt-7b-delta-tune-model-div-sal/merged_model,use_accelerate=True,trust_remote_code=True,dtype=float16",
408
+ "num_fewshot": 5,
409
+ "batch_size": "auto",
410
+ "batch_sizes": [
411
+ 16
412
+ ],
413
+ "device": null,
414
+ "no_cache": false,
415
+ "limit": null,
416
+ "bootstrap_iters": 100000,
417
+ "description_dict": {}
418
+ }
419
+ }
mpt-7b-delta-tune-model-div-sal-truthqa ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "truthfulqa_mc": {
4
+ "mc1": 0.22643818849449204,
5
+ "mc1_stderr": 0.01465133732460258,
6
+ "mc2": 0.3448308279815954,
7
+ "mc2_stderr": 0.0135712184614352
8
+ }
9
+ },
10
+ "versions": {
11
+ "truthfulqa_mc": 1
12
+ },
13
+ "config": {
14
+ "model": "hf-causal-experimental",
15
+ "model_args": "pretrained=/home/vmagent/app/data/mpt-7b-delta-tune-model-div-sal/merged_model,use_accelerate=True,trust_remote_code=True,dtype=float16",
16
+ "num_fewshot": 0,
17
+ "batch_size": "auto",
18
+ "batch_sizes": [
19
+ 32
20
+ ],
21
+ "device": null,
22
+ "no_cache": false,
23
+ "limit": null,
24
+ "bootstrap_iters": 100000,
25
+ "description_dict": {}
26
+ }
27
+ }