Buckets:
| <meta charset="utf-8" /><meta name="hf:doc:metadata" content="{"title":"AWQ","local":"awq","sections":[{"title":"Fused modules","local":"fused-modules","sections":[],"depth":2},{"title":"ExLlamaV2","local":"exllamav2","sections":[],"depth":2},{"title":"CPU","local":"cpu","sections":[],"depth":2},{"title":"Resources","local":"resources","sections":[],"depth":2}],"depth":1}"> | |
| <link href="/docs/transformers/pr_33892/en/_app/immutable/assets/0.e3b0c442.css" rel="modulepreload"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/entry/start.b2c4257a.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/scheduler.31fdf58d.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/singletons.9860629f.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/index.252883d5.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/paths.e85c0ec8.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/entry/app.05ef1f97.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/preload-helper.40847a0e.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/index.2f76fdf0.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/nodes/0.ca4aafa4.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/each.e59479a4.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/nodes/523.2e5c41db.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/CopyLLMTxtMenu.ff482081.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/MermaidChart.svelte_svelte_type_style_lang.71f274cc.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/IconCopy.ac192424.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/CodeBlock.ab12f8e1.js"> | |
| <link rel="modulepreload" href="/docs/transformers/pr_33892/en/_app/immutable/chunks/HfOption.fb051768.js"><!-- HEAD_svelte-u9bgzb_START --><meta name="hf:doc:metadata" content="{"title":"AWQ","local":"awq","sections":[{"title":"Fused modules","local":"fused-modules","sections":[],"depth":2},{"title":"ExLlamaV2","local":"exllamav2","sections":[],"depth":2},{"title":"CPU","local":"cpu","sections":[],"depth":2},{"title":"Resources","local":"resources","sections":[],"depth":2}],"depth":1}"><!-- HEAD_svelte-u9bgzb_END --> <p></p> <div class="items-center shrink-0 min-w-[100px] max-sm:min-w-[50px] justify-end ml-auto flex" style="float: right; margin-left: 10px; display: inline-flex; position: relative; z-index: 10;"><div class="inline-flex rounded-md max-sm:rounded-sm"><button class="inline-flex items-center gap-1 max-sm:gap-0.5 h-6 max-sm:h-5 px-2 max-sm:px-1.5 text-[11px] max-sm:text-[9px] font-medium text-gray-800 border border-r-0 rounded-l-md max-sm:rounded-l-sm border-gray-200 bg-white hover:shadow-inner dark:border-gray-850 dark:bg-gray-950 dark:text-gray-200 dark:hover:bg-gray-800" aria-live="polite"><span class="inline-flex items-center justify-center rounded-md p-0.5 max-sm:p-0"><svg class="w-3 h-3 max-sm:w-2.5 max-sm:h-2.5" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg></span> <span>Copy page</span></button> <button class="inline-flex items-center justify-center w-6 max-sm:w-5 h-6 max-sm:h-5 disabled:pointer-events-none text-sm text-gray-500 hover:text-gray-700 dark:hover:text-white rounded-r-md max-sm:rounded-r-sm border border-l transition border-gray-200 bg-white hover:shadow-inner dark:border-gray-850 dark:bg-gray-950 dark:text-gray-200 dark:hover:bg-gray-800" aria-haspopup="menu" aria-expanded="false" aria-label="Open copy menu"><svg class="transition-transform text-gray-400 overflow-visible w-3 h-3 max-sm:w-2.5 max-sm:h-2.5 rotate-0" width="1em" height="1em" viewBox="0 0 12 7" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1 1L6 6L11 1" stroke="currentColor"></path></svg></button></div> </div> <h1 class="relative group"><a id="awq" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#awq"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>AWQ</span></h1> <p data-svelte-h="svelte-1b3motd"><a href="https://hf.co/papers/2306.00978" rel="nofollow">Activation-aware Weight Quantization (AWQ)</a> preserves a small fraction of the weights that are important for LLM performance to compress a model to 4-bits with minimal performance degradation.</p> <p data-svelte-h="svelte-vomabp">There are several libraries for quantizing models with the AWQ algorithm, such as <a href="https://github.com/mit-han-lab/llm-awq" rel="nofollow">llm-awq</a>, <a href="https://github.com/casper-hansen/AutoAWQ" rel="nofollow">autoawq</a> or <a href="https://huggingface.co/docs/optimum/main/en/intel/optimization_inc" rel="nofollow">optimum-intel</a>. Transformers supports loading models quantized with the llm-awq and autoawq libraries. This guide will show you how to load models quantized with autoawq, but the process is similar for llm-awq quantized models.</p> <p data-svelte-h="svelte-14s6he6">Run the command below to install autoawq</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install autoawq<!-- HTML_TAG_END --></pre></div> <blockquote class="warning" data-svelte-h="svelte-16pppmu"><p>AutoAWQ downgrades Transformers to version 4.47.1. If you want to do inference with AutoAWQ, you may need to reinstall your Transformers’ version after installing AutoAWQ.</p></blockquote> <p data-svelte-h="svelte-og22vq">Identify an AWQ-quantized model by checking the <code>quant_method</code> key in the models <a href="https://huggingface.co/TheBloke/zephyr-7B-alpha-AWQ/blob/main/config.json" rel="nofollow">config.json</a> file.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-punctuation">{</span> | |
| <span class="hljs-attr">"_name_or_path"</span><span class="hljs-punctuation">:</span> <span class="hljs-string">"/workspace/process/huggingfaceh4_zephyr-7b-alpha/source"</span><span class="hljs-punctuation">,</span> | |
| <span class="hljs-attr">"architectures"</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">[</span> | |
| <span class="hljs-string">"MistralForCausalLM"</span> | |
| <span class="hljs-punctuation">]</span><span class="hljs-punctuation">,</span> | |
| ... | |
| ... | |
| ... | |
| <span class="hljs-attr">"quantization_config"</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">{</span> | |
| <span class="hljs-attr">"quant_method"</span><span class="hljs-punctuation">:</span> <span class="hljs-string">"awq"</span><span class="hljs-punctuation">,</span> | |
| <span class="hljs-attr">"zero_point"</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">true</span></span><span class="hljs-punctuation">,</span> | |
| <span class="hljs-attr">"group_size"</span><span class="hljs-punctuation">:</span> <span class="hljs-number">128</span><span class="hljs-punctuation">,</span> | |
| <span class="hljs-attr">"bits"</span><span class="hljs-punctuation">:</span> <span class="hljs-number">4</span><span class="hljs-punctuation">,</span> | |
| <span class="hljs-attr">"version"</span><span class="hljs-punctuation">:</span> <span class="hljs-string">"gemm"</span> | |
| <span class="hljs-punctuation">}</span> | |
| <span class="hljs-punctuation">}</span><!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-p8bve5">Load the AWQ-quantized model with <a href="/docs/transformers/pr_33892/en/main_classes/model#transformers.PreTrainedModel.from_pretrained">from_pretrained()</a>. This automatically sets the other weights to fp16 by default for performance reasons. Use the <code>dtype</code> parameter to load these other weights in a different format.</p> <p data-svelte-h="svelte-1pq80c7">If the model is loaded on the CPU, use the <code>device_map</code> parameter to move it to an accelerator.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer | |
| <span class="hljs-keyword">from</span> accelerate <span class="hljs-keyword">import</span> Accelerator | |
| <span class="hljs-keyword">import</span> torch | |
| device = Accelerator().device | |
| model = AutoModelForCausalLM.from_pretrained( | |
| <span class="hljs-string">"TheBloke/zephyr-7B-alpha-AWQ"</span>, | |
| dtype=torch.float32, | |
| device_map=device | |
| )<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-42py21">Use <code>attn_implementation</code> to enable <a href="../perf_infer_gpu_one#flashattention-2">FlashAttention2</a> to further accelerate inference.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer | |
| model = AutoModelForCausalLM.from_pretrained( | |
| <span class="hljs-string">"TheBloke/zephyr-7B-alpha-AWQ"</span>, | |
| attn_implementation=<span class="hljs-string">"flash_attention_2"</span>, | |
| device_map=<span class="hljs-string">"cuda:0"</span> | |
| )<!-- HTML_TAG_END --></pre></div> <h2 class="relative group"><a id="fused-modules" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#fused-modules"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Fused modules</span></h2> <p data-svelte-h="svelte-32kkq6">Fused modules offer improved accuracy and performance. They are supported out-of-the-box for AWQ modules for <a href="https://huggingface.co/meta-llama" rel="nofollow">Llama</a> and <a href="https://huggingface.co/mistralai/Mistral-7B-v0.1" rel="nofollow">Mistral</a> architectures, but you can also fuse AWQ modules for unsupported architectures.</p> <blockquote class="warning" data-svelte-h="svelte-1d1t0bc"><p>Fused modules cannot be combined with other optimization techniques such as FlashAttention2.</p></blockquote> <div class="flex space-x-2 items-center my-1.5 mr-8 h-7 !pl-0 -mx-3 md:mx-0"><div class="flex items-center border rounded-lg px-1.5 py-1 leading-none select-none text-smd border-gray-800 bg-black dark:bg-gray-700 text-white">supported architectures </div><div class="flex items-center border rounded-lg px-1.5 py-1 leading-none select-none text-smd text-gray-500 cursor-pointer opacity-90 hover:text-gray-700 dark:hover:text-gray-200 hover:shadow-sm">unsupported architectures </div></div> <div class="language-select"><p data-svelte-h="svelte-1mb6dzt">Create an <a href="/docs/transformers/pr_33892/en/main_classes/quantization#transformers.AwqConfig">AwqConfig</a> and set the parameters <code>fuse_max_seq_len</code> and <code>do_fuse=True</code> to enable fused modules. The <code>fuse_max_seq_len</code> parameter is the total sequence length and it should include the context length and the expected generation length. Set it to a larger value to be safe.</p> <p data-svelte-h="svelte-1qsxkvh">The example below fuses the AWQ modules of the <a href="https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-AWQ" rel="nofollow">TheBloke/Mistral-7B-OpenOrca-AWQ</a> model.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch | |
| <span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AwqConfig, AutoModelForCausalLM | |
| quantization_config = AwqConfig( | |
| bits=<span class="hljs-number">4</span>, | |
| fuse_max_seq_len=<span class="hljs-number">512</span>, | |
| do_fuse=<span class="hljs-literal">True</span>, | |
| ) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| <span class="hljs-string">"TheBloke/Mistral-7B-OpenOrca-AWQ"</span>, | |
| quantization_config=quantization_config | |
| ).to(<span class="hljs-number">0</span>)<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-81zmsw">The <a href="https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-AWQ" rel="nofollow">TheBloke/Mistral-7B-OpenOrca-AWQ</a> model was benchmarked with <code>batch_size=1</code> with and without fused modules.</p> <figcaption class="text-center text-gray-500 text-lg" data-svelte-h="svelte-grefv0">Unfused module</figcaption> <table data-svelte-h="svelte-19aactm"><thead><tr><th align="right">Batch Size</th> <th align="right">Prefill Length</th> <th align="right">Decode Length</th> <th align="right">Prefill tokens/s</th> <th align="right">Decode tokens/s</th> <th align="left">Memory (VRAM)</th></tr></thead> <tbody><tr><td align="right">1</td> <td align="right">32</td> <td align="right">32</td> <td align="right">60.0984</td> <td align="right">38.4537</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">64</td> <td align="right">64</td> <td align="right">1333.67</td> <td align="right">31.6604</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">128</td> <td align="right">128</td> <td align="right">2434.06</td> <td align="right">31.6272</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">256</td> <td align="right">256</td> <td align="right">3072.26</td> <td align="right">38.1731</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">512</td> <td align="right">512</td> <td align="right">3184.74</td> <td align="right">31.6819</td> <td align="left">4.59 GB (5.80%)</td></tr> <tr><td align="right">1</td> <td align="right">1024</td> <td align="right">1024</td> <td align="right">3148.18</td> <td align="right">36.8031</td> <td align="left">4.81 GB (6.07%)</td></tr> <tr><td align="right">1</td> <td align="right">2048</td> <td align="right">2048</td> <td align="right">2927.33</td> <td align="right">35.2676</td> <td align="left">5.73 GB (7.23%)</td></tr></tbody></table> <figcaption class="text-center text-gray-500 text-lg" data-svelte-h="svelte-1r15bg7">Fused module</figcaption> <table data-svelte-h="svelte-19fczbk"><thead><tr><th align="right">Batch Size</th> <th align="right">Prefill Length</th> <th align="right">Decode Length</th> <th align="right">Prefill tokens/s</th> <th align="right">Decode tokens/s</th> <th align="left">Memory (VRAM)</th></tr></thead> <tbody><tr><td align="right">1</td> <td align="right">32</td> <td align="right">32</td> <td align="right">81.4899</td> <td align="right">80.2569</td> <td align="left">4.00 GB (5.05%)</td></tr> <tr><td align="right">1</td> <td align="right">64</td> <td align="right">64</td> <td align="right">1756.1</td> <td align="right">106.26</td> <td align="left">4.00 GB (5.05%)</td></tr> <tr><td align="right">1</td> <td align="right">128</td> <td align="right">128</td> <td align="right">2479.32</td> <td align="right">105.631</td> <td align="left">4.00 GB (5.06%)</td></tr> <tr><td align="right">1</td> <td align="right">256</td> <td align="right">256</td> <td align="right">1813.6</td> <td align="right">85.7485</td> <td align="left">4.01 GB (5.06%)</td></tr> <tr><td align="right">1</td> <td align="right">512</td> <td align="right">512</td> <td align="right">2848.9</td> <td align="right">97.701</td> <td align="left">4.11 GB (5.19%)</td></tr> <tr><td align="right">1</td> <td align="right">1024</td> <td align="right">1024</td> <td align="right">3044.35</td> <td align="right">87.7323</td> <td align="left">4.41 GB (5.57%)</td></tr> <tr><td align="right">1</td> <td align="right">2048</td> <td align="right">2048</td> <td align="right">2715.11</td> <td align="right">89.4709</td> <td align="left">5.57 GB (7.04%)</td></tr></tbody></table> <p data-svelte-h="svelte-gu8e8k">The speed and throughput of fused and unfused modules were also tested with the <a href="https://github.com/huggingface/optimum-benchmark" rel="nofollow">optimum-benchmark</a> library.</p> <div class="flex gap-4" data-svelte-h="svelte-1ke50ja"><div><img class="rounded-xl" src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/quantization/fused_forward_memory_plot.png" alt="generate throughput per batch size"> <figcaption class="mt-2 text-center text-sm text-gray-500">forward peak memory/batch size</figcaption></div> <div><img class="rounded-xl" src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/quantization/fused_generate_throughput_plot.png" alt="forward latency per batch size"> <figcaption class="mt-2 text-center text-sm text-gray-500">generate throughput/batch size</figcaption></div></div> </div> <h2 class="relative group"><a id="exllamav2" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#exllamav2"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>ExLlamaV2</span></h2> <p data-svelte-h="svelte-1frcaex"><a href="https://github.com/turboderp/exllamav2" rel="nofollow">ExLlamaV2</a> kernels support faster prefill and decoding. Run the command below to install the latest version of autoawq with ExLlamaV2 support.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install git+https://github.com/casper-hansen/AutoAWQ.git<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-1t6xcwc">Set <code>version="exllama"</code> in <a href="/docs/transformers/pr_33892/en/main_classes/quantization#transformers.AwqConfig">AwqConfig</a> to enable ExLlamaV2 kernels.</p> <blockquote class="tip" data-svelte-h="svelte-37zgf9"><p>ExLlamaV2 is supported on AMD GPUs.</p></blockquote> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch | |
| <span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer, AwqConfig | |
| quantization_config = AwqConfig(version=<span class="hljs-string">"exllama"</span>) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| <span class="hljs-string">"TheBloke/Mistral-7B-Instruct-v0.1-AWQ"</span>, | |
| quantization_config=quantization_config, | |
| device_map=<span class="hljs-string">"auto"</span>, | |
| )<!-- HTML_TAG_END --></pre></div> <h2 class="relative group"><a id="cpu" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#cpu"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>CPU</span></h2> <p data-svelte-h="svelte-30l4yi"><a href="https://intel.github.io/intel-extension-for-pytorch/cpu/latest/" rel="nofollow">Intel Extension for PyTorch (IPEX)</a> is designed to enable performance optimizations on Intel hardware. Run the command below to install the latest version of autoawq with IPEX support.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install intel-extension-for-pytorch <span class="hljs-comment"># for IPEX-GPU refer to https://intel.github.io/intel-extension-for-pytorch/xpu/2.5.10+xpu/ </span> | |
| pip install git+https://github.com/casper-hansen/AutoAWQ.git<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-1rv1nfu">Set <code>version="ipex"</code> in <a href="/docs/transformers/pr_33892/en/main_classes/quantization#transformers.AwqConfig">AwqConfig</a> to enable ExLlamaV2 kernels.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch | |
| <span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer, AwqConfig | |
| device = <span class="hljs-string">"cpu"</span> <span class="hljs-comment"># set to "xpu" for Intel GPU</span> | |
| quantization_config = AwqConfig(version=<span class="hljs-string">"ipex"</span>) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| <span class="hljs-string">"TheBloke/TinyLlama-1.1B-Chat-v0.3-AWQ"</span>, | |
| quantization_config=quantization_config, | |
| device_map=device, | |
| )<!-- HTML_TAG_END --></pre></div> <h2 class="relative group"><a id="resources" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#resources"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Resources</span></h2> <p data-svelte-h="svelte-o4txta">Run the AWQ demo <a href="https://colab.research.google.com/drive/1HzZH89yAXJaZgwJDhQj9LqSBux932BvY#scrollTo=Wwsg6nCwoThm" rel="nofollow">notebook</a> for more examples of how to quantize a model, push a quantized model to the Hub, and more.</p> <a class="!text-gray-400 !no-underline text-sm flex items-center not-prose mt-4" href="https://github.com/huggingface/transformers/blob/main/docs/source/en/quantization/awq.md" target="_blank"><svg class="mr-1" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M31,16l-7,7l-1.41-1.41L28.17,16l-5.58-5.59L24,9l7,7z"></path><path d="M1,16l7-7l1.41,1.41L3.83,16l5.58,5.59L8,23l-7-7z"></path><path d="M12.419,25.484L17.639,6.552l1.932,0.518L14.351,26.002z"></path></svg> <span data-svelte-h="svelte-zjs2n5"><span class="underline">Update</span> on GitHub</span></a> <p></p> | |
| <script> | |
| { | |
| __sveltekit_16tnnm8 = { | |
| assets: "/docs/transformers/pr_33892/en", | |
| base: "/docs/transformers/pr_33892/en", | |
| env: {} | |
| }; | |
| const element = document.currentScript.parentElement; | |
| const data = [null,null]; | |
| Promise.all([ | |
| import("/docs/transformers/pr_33892/en/_app/immutable/entry/start.b2c4257a.js"), | |
| import("/docs/transformers/pr_33892/en/_app/immutable/entry/app.05ef1f97.js") | |
| ]).then(([kit, app]) => { | |
| kit.start(app, element, { | |
| node_ids: [0, 523], | |
| data, | |
| form: null, | |
| error: null | |
| }); | |
| }); | |
| } | |
| </script> | |
Xet Storage Details
- Size:
- 37.4 kB
- Xet hash:
- 71c316a9e70ebb1a9a4379121711c6e3e442e9540d8faaaf58fae13ec87899bc
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.