Update README.md
Browse files
README.md
CHANGED
|
@@ -107,8 +107,14 @@ auto-round --model "OPEA/Qwen2.5-72B-Instruct-int2-sym-inc" --eval --eval_bs 16
|
|
| 107 |
|
| 108 |
| Metric | BF16 | INT4 |
|
| 109 |
| :----------------------------------------- | :----: | :----: |
|
| 110 |
-
| Avg | 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
| mmlu | 0.8334 | 0.7634 |
|
|
|
|
|
|
|
| 112 |
| lambada_openai | 0.7518 | 0.7215 |
|
| 113 |
| hellaswag | 0.7031 | 0.6464 |
|
| 114 |
| winogrande | 0.7601 | 0.7553 |
|
|
|
|
| 107 |
|
| 108 |
| Metric | BF16 | INT4 |
|
| 109 |
| :----------------------------------------- | :----: | :----: |
|
| 110 |
+
| Avg | 0.7413 | 0.6835 |
|
| 111 |
+
| leaderboard_mmlu_pro 5 shots | 0.5919 | 0.4602 |
|
| 112 |
+
| leaderboard_ifeval inst_level_strict_acc | 0.7770 | 0.6715 |
|
| 113 |
+
| leaderboard_ifeval prompt_level_strict_acc | 0.6858 | 0.5379 |
|
| 114 |
+
| gsm8k(5shot) strict match | 0.9037 | 0.8666 |
|
| 115 |
| mmlu | 0.8334 | 0.7634 |
|
| 116 |
+
| cmmlu | 0.8727 | 0.7772 |
|
| 117 |
+
| ceval-valid | 0.8975 | 0.7786 |
|
| 118 |
| lambada_openai | 0.7518 | 0.7215 |
|
| 119 |
| hellaswag | 0.7031 | 0.6464 |
|
| 120 |
| winogrande | 0.7601 | 0.7553 |
|