csabakecskemeti
commited on
Commit
•
ec98a4a
1
Parent(s):
f5d0c90
Update README.md
Browse files
README.md
CHANGED
@@ -10,9 +10,61 @@ tags:
|
|
10 |
- transformers
|
11 |
---
|
12 |
|
13 |
-
|
14 |
![image/png](https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/X4WG8AnMFqJuWkRvA0CrW.png)
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
### Framework versions
|
18 |
|
|
|
10 |
- transformers
|
11 |
---
|
12 |
|
|
|
13 |
![image/png](https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/X4WG8AnMFqJuWkRvA0CrW.png)
|
14 |
|
15 |
+
### eval
|
16 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
17 |
+
|----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
|
18 |
+
|hellaswag | 1|none | 0|acc |↑ |0.5141|± |0.0050|
|
19 |
+
| | |none | 0|acc_norm |↑ |0.6793|± |0.0047|
|
20 |
+
|leaderboard_bbh | N/A| | | | | | | |
|
21 |
+
| - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.6040|± |0.0310|
|
22 |
+
| - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5668|± |0.0363|
|
23 |
+
| - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
24 |
+
| - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.3760|± |0.0307|
|
25 |
+
| - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.5400|± |0.0316|
|
26 |
+
| - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.2200|± |0.0263|
|
27 |
+
| - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5640|± |0.0314|
|
28 |
+
| - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.4560|± |0.0316|
|
29 |
+
| - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.4360|± |0.0314|
|
30 |
+
| - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
31 |
+
| - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.6360|± |0.0305|
|
32 |
+
| - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.6200|± |0.0308|
|
33 |
+
| - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.4120|± |0.0312|
|
34 |
+
| - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.3219|± |0.0388|
|
35 |
+
| - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.3440|± |0.0301|
|
36 |
+
| - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.3240|± |0.0297|
|
37 |
+
| - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.3120|± |0.0294|
|
38 |
+
| - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.4494|± |0.0374|
|
39 |
+
| - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.6040|± |0.0310|
|
40 |
+
| - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.1000|± |0.0190|
|
41 |
+
| - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.1600|± |0.0232|
|
42 |
+
| - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1200|± |0.0206|
|
43 |
+
| - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3440|± |0.0301|
|
44 |
+
| - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.5160|± |0.0317|
|
45 |
+
|leaderboard_gpqa | N/A| | | | | | | |
|
46 |
+
| - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.2727|± |0.0317|
|
47 |
+
| - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2802|± |0.0192|
|
48 |
+
| - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2545|± |0.0206|
|
49 |
+
|leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.5252|± | N/A|
|
50 |
+
| | |none | 0|inst_level_strict_acc |↑ |0.4748|± | N/A|
|
51 |
+
| | |none | 0|prompt_level_loose_acc |↑ |0.3919|± |0.0210|
|
52 |
+
| | |none | 0|prompt_level_strict_acc|↑ |0.3420|± |0.0204|
|
53 |
+
|leaderboard_math_hard | N/A| | | | | | | |
|
54 |
+
| - leaderboard_math_algebra_hard | 2|none | 4|exact_match |↑ |0.2150|± |0.0235|
|
55 |
+
| - leaderboard_math_counting_and_prob_hard | 2|none | 4|exact_match |↑ |0.0244|± |0.0140|
|
56 |
+
| - leaderboard_math_geometry_hard | 2|none | 4|exact_match |↑ |0.0606|± |0.0208|
|
57 |
+
| - leaderboard_math_intermediate_algebra_hard | 2|none | 4|exact_match |↑ |0.0143|± |0.0071|
|
58 |
+
| - leaderboard_math_num_theory_hard | 2|none | 4|exact_match |↑ |0.0649|± |0.0199|
|
59 |
+
| - leaderboard_math_prealgebra_hard | 2|none | 4|exact_match |↑ |0.1762|± |0.0275|
|
60 |
+
| - leaderboard_math_precalculus_hard | 2|none | 4|exact_match |↑ |0.0519|± |0.0192|
|
61 |
+
|leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.2822|± |0.0041|
|
62 |
+
|leaderboard_musr | N/A| | | | | | | |
|
63 |
+
| - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.5400|± |0.0316|
|
64 |
+
| - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2344|± |0.0265|
|
65 |
+
| - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.3200|± |0.0296|
|
66 |
+
|
67 |
+
|
68 |
|
69 |
### Framework versions
|
70 |
|