readme: eval
Browse files
README.md
CHANGED
@@ -105,6 +105,55 @@ CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable
|
|
105 |
```
|
106 |
|
107 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
```
|
109 |
|
110 |
```bash
|
|
|
105 |
```
|
106 |
|
107 |
```
|
108 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
109 |
+
|-----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
|
110 |
+
|leaderboard | N/A| | | | | | | |
|
111 |
+
| - leaderboard_bbh | N/A| | | | | | | |
|
112 |
+
| - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
|
113 |
+
| - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5187|± |0.0366|
|
114 |
+
| - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.2280|± |0.0266|
|
115 |
+
| - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.2760|± |0.0283|
|
116 |
+
| - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.4680|± |0.0316|
|
117 |
+
| - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.1040|± |0.0193|
|
118 |
+
| - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5160|± |0.0317|
|
119 |
+
| - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.1840|± |0.0246|
|
120 |
+
| - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.1520|± |0.0228|
|
121 |
+
| - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.3360|± |0.0299|
|
122 |
+
| - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.2760|± |0.0283|
|
123 |
+
| - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.5080|± |0.0317|
|
124 |
+
| - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.0560|± |0.0146|
|
125 |
+
| - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.2397|± |0.0355|
|
126 |
+
| - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.1440|± |0.0222|
|
127 |
+
| - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.2080|± |0.0257|
|
128 |
+
| - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.1200|± |0.0206|
|
129 |
+
| - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.5393|± |0.0375|
|
130 |
+
| - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.5400|± |0.0316|
|
131 |
+
| - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.2720|± |0.0282|
|
132 |
+
| - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.2080|± |0.0257|
|
133 |
+
| - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1520|± |0.0228|
|
134 |
+
| - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3320|± |0.0298|
|
135 |
+
| - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
136 |
+
| - leaderboard_gpqa | N/A| | | | | | | |
|
137 |
+
| - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.1970|± |0.0283|
|
138 |
+
| - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2766|± |0.0192|
|
139 |
+
| - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2746|± |0.0211|
|
140 |
+
| - leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.2338|± | N/A|
|
141 |
+
| | |none | 0|inst_level_strict_acc |↑ |0.2278|± | N/A|
|
142 |
+
| | |none | 0|prompt_level_loose_acc |↑ |0.1128|± |0.0136|
|
143 |
+
| | |none | 0|prompt_level_strict_acc|↑ |0.1091|± |0.0134|
|
144 |
+
| - leaderboard_math_hard | N/A| | | | | | | |
|
145 |
+
| - leaderboard_math_algebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
146 |
+
| - leaderboard_math_counting_and_prob_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
147 |
+
| - leaderboard_math_geometry_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
148 |
+
| - leaderboard_math_intermediate_algebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
149 |
+
| - leaderboard_math_num_theory_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
150 |
+
| - leaderboard_math_prealgebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
151 |
+
| - leaderboard_math_precalculus_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
|
152 |
+
| - leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.1155|± |0.0029|
|
153 |
+
| - leaderboard_musr | N/A| | | | | | | |
|
154 |
+
| - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.5040|± |0.0317|
|
155 |
+
| - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2422|± |0.0268|
|
156 |
+
| - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.2880|± |0.0287|
|
157 |
```
|
158 |
|
159 |
```bash
|