eval
Browse files
README.md
CHANGED
@@ -105,6 +105,55 @@ CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable
|
|
105 |
```
|
106 |
|
107 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
```
|
109 |
|
110 |
```bash
|
|
|
105 |
```
|
106 |
|
107 |
```
|
108 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
109 |
+
|-----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
|
110 |
+
|leaderboard | N/A| | | | | | | |
|
111 |
+
| - leaderboard_bbh | N/A| | | | | | | |
|
112 |
+
| - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.4560|± |0.0316|
|
113 |
+
| - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5187|± |0.0366|
|
114 |
+
| - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.2000|± |0.0253|
|
115 |
+
| - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.3400|± |0.0300|
|
116 |
+
| - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.4680|± |0.0316|
|
117 |
+
| - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.0880|± |0.0180|
|
118 |
+
| - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5160|± |0.0317|
|
119 |
+
| - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.1880|± |0.0248|
|
120 |
+
| - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.1440|± |0.0222|
|
121 |
+
| - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.3360|± |0.0299|
|
122 |
+
| - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.2680|± |0.0281|
|
123 |
+
| - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.5800|± |0.0313|
|
124 |
+
| - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.0560|± |0.0146|
|
125 |
+
| - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.2055|± |0.0336|
|
126 |
+
| - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.1400|± |0.0220|
|
127 |
+
| - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.2160|± |0.0261|
|
128 |
+
| - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.1120|± |0.0200|
|
129 |
+
| - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.5056|± |0.0376|
|
130 |
+
| - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.4800|± |0.0317|
|
131 |
+
| - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.2840|± |0.0286|
|
132 |
+
| - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.2400|± |0.0271|
|
133 |
+
| - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1520|± |0.0228|
|
134 |
+
| - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3320|± |0.0298|
|
135 |
+
| - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
136 |
+
| - leaderboard_gpqa | N/A| | | | | | | |
|
137 |
+
| - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.2071|± |0.0289|
|
138 |
+
| - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2637|± |0.0189|
|
139 |
+
| - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2612|± |0.0208|
|
140 |
+
| - leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.2590|± | N/A|
|
141 |
+
| | |none | 0|inst_level_strict_acc |↑ |0.2494|± | N/A|
|
142 |
+
| | |none | 0|prompt_level_loose_acc |↑ |0.1497|± |0.0154|
|
143 |
+
| | |none | 0|prompt_level_strict_acc|↑ |0.1405|± |0.0150|
|
144 |
+
| - leaderboard_math_hard | N/A| | | | | | | |
|
145 |
+
| - leaderboard_math_algebra_hard | 2|none | 4|exact_match |↑ |0.0008|± |0.0008|
|
146 |
+
| - leaderboard_math_counting_and_prob_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
147 |
+
| - leaderboard_math_geometry_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
148 |
+
| - leaderboard_math_intermediate_algebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
149 |
+
| - leaderboard_math_num_theory_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
150 |
+
| - leaderboard_math_prealgebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
151 |
+
| - leaderboard_math_precalculus_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
152 |
+
| - leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.1112|± |0.0029|
|
153 |
+
| - leaderboard_musr | N/A| | | | | | | |
|
154 |
+
| - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.5240|± |0.0316|
|
155 |
+
| - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2578|± |0.0274|
|
156 |
+
| - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.3960|± |0.0310|
|
157 |
```
|
158 |
|
159 |
```bash
|