Update README.md
Browse files
README.md
CHANGED
|
@@ -36,12 +36,6 @@ The [GPTQ](https://arxiv.org/abs/2210.17323) algorithm is applied for quantizati
|
|
| 36 |
This model was created by using the [llm-compressor](https://github.com/vllm-project/llm-compressor) library as presented in the code snipet below.
|
| 37 |
|
| 38 |
```python
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
from transformers import AutoTokenizer
|
| 46 |
from llmcompressor.transformers import SparseAutoModelForCausalLM, oneshot
|
| 47 |
from llmcompressor.modifiers.quantization import GPTQModifier
|
|
@@ -132,9 +126,9 @@ lm_eval \
|
|
| 132 |
<tr>
|
| 133 |
<td>MMLU (5-shot)
|
| 134 |
</td>
|
| 135 |
-
<td>26.
|
| 136 |
</td>
|
| 137 |
-
<td>25.
|
| 138 |
</td>
|
| 139 |
<td>96.12%
|
| 140 |
</td>
|
|
@@ -162,41 +156,41 @@ lm_eval \
|
|
| 162 |
<tr>
|
| 163 |
<td>Hellaswag (10-shot)
|
| 164 |
</td>
|
| 165 |
-
<td>
|
| 166 |
</td>
|
| 167 |
-
<td>
|
| 168 |
</td>
|
| 169 |
-
<td>
|
| 170 |
</td>
|
| 171 |
</tr>
|
| 172 |
<tr>
|
| 173 |
<td>Winogrande (5-shot)
|
| 174 |
</td>
|
| 175 |
-
<td>
|
| 176 |
</td>
|
| 177 |
-
<td>
|
| 178 |
</td>
|
| 179 |
-
<td>
|
| 180 |
</td>
|
| 181 |
</tr>
|
| 182 |
<tr>
|
| 183 |
<td>TruthfulQA (0-shot)
|
| 184 |
</td>
|
| 185 |
-
<td>
|
| 186 |
</td>
|
| 187 |
-
<td>
|
| 188 |
</td>
|
| 189 |
-
<td>98.
|
| 190 |
</td>
|
| 191 |
</tr>
|
| 192 |
<tr>
|
| 193 |
<td><strong>Average</strong>
|
| 194 |
</td>
|
| 195 |
-
<td><strong>
|
| 196 |
</td>
|
| 197 |
-
<td><strong>
|
| 198 |
</td>
|
| 199 |
-
<td><strong>
|
| 200 |
</td>
|
| 201 |
</tr>
|
| 202 |
</table>
|
|
|
|
| 36 |
This model was created by using the [llm-compressor](https://github.com/vllm-project/llm-compressor) library as presented in the code snipet below.
|
| 37 |
|
| 38 |
```python
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
from transformers import AutoTokenizer
|
| 40 |
from llmcompressor.transformers import SparseAutoModelForCausalLM, oneshot
|
| 41 |
from llmcompressor.modifiers.quantization import GPTQModifier
|
|
|
|
| 126 |
<tr>
|
| 127 |
<td>MMLU (5-shot)
|
| 128 |
</td>
|
| 129 |
+
<td>26.220
|
| 130 |
</td>
|
| 131 |
+
<td>25.202
|
| 132 |
</td>
|
| 133 |
<td>96.12%
|
| 134 |
</td>
|
|
|
|
| 156 |
<tr>
|
| 157 |
<td>Hellaswag (10-shot)
|
| 158 |
</td>
|
| 159 |
+
<td>41.41
|
| 160 |
</td>
|
| 161 |
+
<td>40.81
|
| 162 |
</td>
|
| 163 |
+
<td>98.55%
|
| 164 |
</td>
|
| 165 |
</tr>
|
| 166 |
<tr>
|
| 167 |
<td>Winogrande (5-shot)
|
| 168 |
</td>
|
| 169 |
+
<td>50.039
|
| 170 |
</td>
|
| 171 |
+
<td>53.591
|
| 172 |
</td>
|
| 173 |
+
<td>107.10%
|
| 174 |
</td>
|
| 175 |
</tr>
|
| 176 |
<tr>
|
| 177 |
<td>TruthfulQA (0-shot)
|
| 178 |
</td>
|
| 179 |
+
<td>40.38
|
| 180 |
</td>
|
| 181 |
+
<td>39.87
|
| 182 |
</td>
|
| 183 |
+
<td>98.74%
|
| 184 |
</td>
|
| 185 |
</tr>
|
| 186 |
<tr>
|
| 187 |
<td><strong>Average</strong>
|
| 188 |
</td>
|
| 189 |
+
<td><strong>31.55</strong>
|
| 190 |
</td>
|
| 191 |
+
<td><strong>31.91</strong>
|
| 192 |
</td>
|
| 193 |
+
<td><strong>101.16%</strong>
|
| 194 |
</td>
|
| 195 |
</tr>
|
| 196 |
</table>
|