Update README.md
Browse files
README.md
CHANGED
|
@@ -269,5 +269,49 @@ The model was evaluated on the OpenLLM leaderboard task, using [lm-evaluation-ha
|
|
| 269 |
<td><b>29.12</b></td>
|
| 270 |
<td><b>100.59</b></td>
|
| 271 |
</tr>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 272 |
</tbody>
|
| 273 |
</table>
|
|
|
|
| 269 |
<td><b>29.12</b></td>
|
| 270 |
<td><b>100.59</b></td>
|
| 271 |
</tr>
|
| 272 |
+
|
| 273 |
+
<td rowspan="4" ><strong>Coding</strong>
|
| 274 |
+
</td>
|
| 275 |
+
<td>HumanEval pass@1
|
| 276 |
+
</td>
|
| 277 |
+
<td>84.80
|
| 278 |
+
</td>
|
| 279 |
+
<td>85.40
|
| 280 |
+
</td>
|
| 281 |
+
<td>100.71
|
| 282 |
+
</td>
|
| 283 |
+
</tr>
|
| 284 |
+
<tr>
|
| 285 |
+
<td>HumanEval+ pass@1
|
| 286 |
+
</td>
|
| 287 |
+
<td>78.70
|
| 288 |
+
</td>
|
| 289 |
+
<td>79.90
|
| 290 |
+
</td>
|
| 291 |
+
<td>101.52
|
| 292 |
+
</td>
|
| 293 |
+
</tr>
|
| 294 |
+
<tr>
|
| 295 |
+
<td>MBPP pass@1
|
| 296 |
+
</td>
|
| 297 |
+
<td>72.80
|
| 298 |
+
</td>
|
| 299 |
+
<td>73.50
|
| 300 |
+
</td>
|
| 301 |
+
<td>100.96
|
| 302 |
+
</td>
|
| 303 |
+
</tr>
|
| 304 |
+
<tr>
|
| 305 |
+
<td>MBPP+ pass@1
|
| 306 |
+
</td>
|
| 307 |
+
<td>62.70
|
| 308 |
+
</td>
|
| 309 |
+
<td>64.80
|
| 310 |
+
</td>
|
| 311 |
+
<td>103.35
|
| 312 |
+
</td>
|
| 313 |
+
</tr>
|
| 314 |
+
|
| 315 |
+
|
| 316 |
</tbody>
|
| 317 |
</table>
|