Buckets:

rtrm's picture
download
raw
37.6 kB
<meta charset="utf-8" /><meta name="hf:doc:metadata" content="{&quot;title&quot;:&quot;AWQ&quot;,&quot;local&quot;:&quot;awq&quot;,&quot;sections&quot;:[{&quot;title&quot;:&quot;Fused modules&quot;,&quot;local&quot;:&quot;fused-modules&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;ExLlama-v2 support&quot;,&quot;local&quot;:&quot;exllama-v2-support&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;CPU support&quot;,&quot;local&quot;:&quot;cpu-support&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2}],&quot;depth&quot;:1}">
<link href="/docs/transformers/pr_33913/en/_app/immutable/assets/0.e3b0c442.css" rel="modulepreload">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/entry/start.b67f883f.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/scheduler.25b97de1.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/singletons.62a184e0.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/index.e188933d.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/paths.51881b9e.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/entry/app.e436b1f2.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/index.d9030fc9.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/nodes/0.05e395f5.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/each.e59479a4.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/nodes/397.e8a8c8f8.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/Tip.baa67368.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/CodeBlock.e6cd0d95.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/EditOnGithub.91d95064.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/HfOption.1e589c90.js">
<link rel="modulepreload" href="/docs/transformers/pr_33913/en/_app/immutable/chunks/stores.c3f24f16.js"><!-- HEAD_svelte-u9bgzb_START --><meta name="hf:doc:metadata" content="{&quot;title&quot;:&quot;AWQ&quot;,&quot;local&quot;:&quot;awq&quot;,&quot;sections&quot;:[{&quot;title&quot;:&quot;Fused modules&quot;,&quot;local&quot;:&quot;fused-modules&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;ExLlama-v2 support&quot;,&quot;local&quot;:&quot;exllama-v2-support&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;CPU support&quot;,&quot;local&quot;:&quot;cpu-support&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2}],&quot;depth&quot;:1}"><!-- HEAD_svelte-u9bgzb_END --> <p></p> <h1 class="relative group"><a id="awq" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#awq"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>AWQ</span></h1> <div class="course-tip bg-gradient-to-br dark:bg-gradient-to-r before:border-green-500 dark:before:border-green-800 from-green-50 dark:from-gray-900 to-white dark:to-gray-950 border border-green-50 text-green-700 dark:text-gray-400"><p data-svelte-h="svelte-1yzcfmc">Try AWQ quantization with this <a href="https://colab.research.google.com/drive/1HzZH89yAXJaZgwJDhQj9LqSBux932BvY" rel="nofollow">notebook</a>!</p></div> <p data-svelte-h="svelte-1c5vyni"><a href="https://hf.co/papers/2306.00978" rel="nofollow">Activation-aware Weight Quantization (AWQ)</a> doesn’t quantize all the weights in a model, and instead, it preserves a small percentage of weights that are important for LLM performance. This significantly reduces quantization loss such that you can run models in 4-bit precision without experiencing any performance degradation.</p> <p data-svelte-h="svelte-vomabp">There are several libraries for quantizing models with the AWQ algorithm, such as <a href="https://github.com/mit-han-lab/llm-awq" rel="nofollow">llm-awq</a>, <a href="https://github.com/casper-hansen/AutoAWQ" rel="nofollow">autoawq</a> or <a href="https://huggingface.co/docs/optimum/main/en/intel/optimization_inc" rel="nofollow">optimum-intel</a>. Transformers supports loading models quantized with the llm-awq and autoawq libraries. This guide will show you how to load models quantized with autoawq, but the process is similar for llm-awq quantized models.</p> <p data-svelte-h="svelte-1ozln1k">Make sure you have autoawq installed:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install autoawq<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-1lxz9t">AWQ-quantized models can be identified by checking the <code>quantization_config</code> attribute in the model’s <a href="https://huggingface.co/TheBloke/zephyr-7B-alpha-AWQ/blob/main/config.json" rel="nofollow">config.json</a> file:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-punctuation">{</span>
<span class="hljs-attr">&quot;_name_or_path&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;/workspace/process/huggingfaceh4_zephyr-7b-alpha/source&quot;</span><span class="hljs-punctuation">,</span>
<span class="hljs-attr">&quot;architectures&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">[</span>
<span class="hljs-string">&quot;MistralForCausalLM&quot;</span>
<span class="hljs-punctuation">]</span><span class="hljs-punctuation">,</span>
...
...
...
<span class="hljs-attr">&quot;quantization_config&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">{</span>
<span class="hljs-attr">&quot;quant_method&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;awq&quot;</span><span class="hljs-punctuation">,</span>
<span class="hljs-attr">&quot;zero_point&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">true</span></span><span class="hljs-punctuation">,</span>
<span class="hljs-attr">&quot;group_size&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">128</span><span class="hljs-punctuation">,</span>
<span class="hljs-attr">&quot;bits&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">4</span><span class="hljs-punctuation">,</span>
<span class="hljs-attr">&quot;version&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;gemm&quot;</span>
<span class="hljs-punctuation">}</span>
<span class="hljs-punctuation">}</span><!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-uf9igq">A quantized model is loaded with the <a href="/docs/transformers/pr_33913/en/main_classes/model#transformers.PreTrainedModel.from_pretrained">from_pretrained()</a> method. If you loaded your model on the CPU, make sure to move it to a GPU device first. Use the <code>device_map</code> parameter to specify where to place the model:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer
model_id = <span class="hljs-string">&quot;TheBloke/zephyr-7B-alpha-AWQ&quot;</span>
model = AutoModelForCausalLM.from_pretrained(model_id, device_map=<span class="hljs-string">&quot;cuda:0&quot;</span>)<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-19un5li">Loading an AWQ-quantized model automatically sets other weights to fp16 by default for performance reasons. If you want to load these other weights in a different format, use the <code>torch_dtype</code> parameter:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer
model_id = <span class="hljs-string">&quot;TheBloke/zephyr-7B-alpha-AWQ&quot;</span>
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float32)<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-1m8a19h">AWQ quantization can also be combined with <a href="../perf_infer_gpu_one#flashattention-2">FlashAttention-2</a> to further accelerate inference:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer
model = AutoModelForCausalLM.from_pretrained(<span class="hljs-string">&quot;TheBloke/zephyr-7B-alpha-AWQ&quot;</span>, attn_implementation=<span class="hljs-string">&quot;flash_attention_2&quot;</span>, device_map=<span class="hljs-string">&quot;cuda:0&quot;</span>)<!-- HTML_TAG_END --></pre></div> <h2 class="relative group"><a id="fused-modules" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#fused-modules"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Fused modules</span></h2> <p data-svelte-h="svelte-jywid1">Fused modules offers improved accuracy and performance and it is supported out-of-the-box for AWQ modules for <a href="https://huggingface.co/meta-llama" rel="nofollow">Llama</a> and <a href="https://huggingface.co/mistralai/Mistral-7B-v0.1" rel="nofollow">Mistral</a> architectures, but you can also fuse AWQ modules for unsupported architectures.</p> <div class="course-tip course-tip-orange bg-gradient-to-br dark:bg-gradient-to-r before:border-orange-500 dark:before:border-orange-800 from-orange-50 dark:from-gray-900 to-white dark:to-gray-950 border border-orange-50 text-orange-700 dark:text-gray-400"><p data-svelte-h="svelte-hwgpn3">Fused modules cannot be combined with other optimization techniques such as FlashAttention-2.</p></div> <div class="flex space-x-2 items-center my-1.5 mr-8 h-7 !pl-0 -mx-3 md:mx-0"><div class="flex items-center border rounded-lg px-1.5 py-1 leading-none select-none text-smd border-gray-800 bg-black dark:bg-gray-700 text-white">supported architectures </div><div class="flex items-center border rounded-lg px-1.5 py-1 leading-none select-none text-smd text-gray-500 cursor-pointer opacity-90 hover:text-gray-700 dark:hover:text-gray-200 hover:shadow-sm">unsupported architectures </div></div> <div class="language-select"><p data-svelte-h="svelte-blasgb">To enable fused modules for supported architectures, create an <a href="/docs/transformers/pr_33913/en/main_classes/quantization#transformers.AwqConfig">AwqConfig</a> and set the parameters <code>fuse_max_seq_len</code> and <code>do_fuse=True</code>. The <code>fuse_max_seq_len</code> parameter is the total sequence length and it should include the context length and the expected generation length. You can set it to a larger value to be safe.</p> <p data-svelte-h="svelte-1zxz2i">For example, to fuse the AWQ modules of the <a href="https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-AWQ" rel="nofollow">TheBloke/Mistral-7B-OpenOrca-AWQ</a> model.</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AwqConfig, AutoModelForCausalLM
model_id = <span class="hljs-string">&quot;TheBloke/Mistral-7B-OpenOrca-AWQ&quot;</span>
quantization_config = AwqConfig(
bits=<span class="hljs-number">4</span>,
fuse_max_seq_len=<span class="hljs-number">512</span>,
do_fuse=<span class="hljs-literal">True</span>,
)
model = AutoModelForCausalLM.from_pretrained(model_id, quantization_config=quantization_config).to(<span class="hljs-number">0</span>)<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-81zmsw">The <a href="https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-AWQ" rel="nofollow">TheBloke/Mistral-7B-OpenOrca-AWQ</a> model was benchmarked with <code>batch_size=1</code> with and without fused modules.</p> <figcaption class="text-center text-gray-500 text-lg" data-svelte-h="svelte-grefv0">Unfused module</figcaption> <table data-svelte-h="svelte-19aactm"><thead><tr><th align="right">Batch Size</th> <th align="right">Prefill Length</th> <th align="right">Decode Length</th> <th align="right">Prefill tokens/s</th> <th align="right">Decode tokens/s</th> <th align="left">Memory (VRAM)</th></tr></thead> <tbody><tr><td align="right">1</td> <td align="right">32</td> <td align="right">32</td> <td align="right">60.0984</td> <td align="right">38.4537</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">64</td> <td align="right">64</td> <td align="right">1333.67</td> <td align="right">31.6604</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">128</td> <td align="right">128</td> <td align="right">2434.06</td> <td align="right">31.6272</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">256</td> <td align="right">256</td> <td align="right">3072.26</td> <td align="right">38.1731</td> <td align="left">4.50 GB (5.68%)</td></tr> <tr><td align="right">1</td> <td align="right">512</td> <td align="right">512</td> <td align="right">3184.74</td> <td align="right">31.6819</td> <td align="left">4.59 GB (5.80%)</td></tr> <tr><td align="right">1</td> <td align="right">1024</td> <td align="right">1024</td> <td align="right">3148.18</td> <td align="right">36.8031</td> <td align="left">4.81 GB (6.07%)</td></tr> <tr><td align="right">1</td> <td align="right">2048</td> <td align="right">2048</td> <td align="right">2927.33</td> <td align="right">35.2676</td> <td align="left">5.73 GB (7.23%)</td></tr></tbody></table> <figcaption class="text-center text-gray-500 text-lg" data-svelte-h="svelte-1r15bg7">Fused module</figcaption> <table data-svelte-h="svelte-19fczbk"><thead><tr><th align="right">Batch Size</th> <th align="right">Prefill Length</th> <th align="right">Decode Length</th> <th align="right">Prefill tokens/s</th> <th align="right">Decode tokens/s</th> <th align="left">Memory (VRAM)</th></tr></thead> <tbody><tr><td align="right">1</td> <td align="right">32</td> <td align="right">32</td> <td align="right">81.4899</td> <td align="right">80.2569</td> <td align="left">4.00 GB (5.05%)</td></tr> <tr><td align="right">1</td> <td align="right">64</td> <td align="right">64</td> <td align="right">1756.1</td> <td align="right">106.26</td> <td align="left">4.00 GB (5.05%)</td></tr> <tr><td align="right">1</td> <td align="right">128</td> <td align="right">128</td> <td align="right">2479.32</td> <td align="right">105.631</td> <td align="left">4.00 GB (5.06%)</td></tr> <tr><td align="right">1</td> <td align="right">256</td> <td align="right">256</td> <td align="right">1813.6</td> <td align="right">85.7485</td> <td align="left">4.01 GB (5.06%)</td></tr> <tr><td align="right">1</td> <td align="right">512</td> <td align="right">512</td> <td align="right">2848.9</td> <td align="right">97.701</td> <td align="left">4.11 GB (5.19%)</td></tr> <tr><td align="right">1</td> <td align="right">1024</td> <td align="right">1024</td> <td align="right">3044.35</td> <td align="right">87.7323</td> <td align="left">4.41 GB (5.57%)</td></tr> <tr><td align="right">1</td> <td align="right">2048</td> <td align="right">2048</td> <td align="right">2715.11</td> <td align="right">89.4709</td> <td align="left">5.57 GB (7.04%)</td></tr></tbody></table> <p data-svelte-h="svelte-gu8e8k">The speed and throughput of fused and unfused modules were also tested with the <a href="https://github.com/huggingface/optimum-benchmark" rel="nofollow">optimum-benchmark</a> library.</p> <div class="flex gap-4" data-svelte-h="svelte-1ke50ja"><div><img class="rounded-xl" src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/quantization/fused_forward_memory_plot.png" alt="generate throughput per batch size"> <figcaption class="mt-2 text-center text-sm text-gray-500">forward peak memory/batch size</figcaption></div> <div><img class="rounded-xl" src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/quantization/fused_generate_throughput_plot.png" alt="forward latency per batch size"> <figcaption class="mt-2 text-center text-sm text-gray-500">generate throughput/batch size</figcaption></div></div> </div> <h2 class="relative group"><a id="exllama-v2-support" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#exllama-v2-support"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>ExLlama-v2 support</span></h2> <p data-svelte-h="svelte-1wvygzu">Recent versions of <code>autoawq</code> supports ExLlama-v2 kernels for faster prefill and decoding. To get started, first install the latest version of <code>autoawq</code> by running:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install git+https://github.com/casper-hansen/AutoAWQ.git<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-in32kr">Get started by passing an <code>AwqConfig()</code> with <code>version=&quot;exllama&quot;</code>.</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer, AwqConfig
quantization_config = AwqConfig(version=<span class="hljs-string">&quot;exllama&quot;</span>)
model = AutoModelForCausalLM.from_pretrained(
<span class="hljs-string">&quot;TheBloke/Mistral-7B-Instruct-v0.1-AWQ&quot;</span>,
quantization_config=quantization_config,
device_map=<span class="hljs-string">&quot;auto&quot;</span>,
)
input_ids = torch.randint(<span class="hljs-number">0</span>, <span class="hljs-number">100</span>, (<span class="hljs-number">1</span>, <span class="hljs-number">128</span>), dtype=torch.long, device=<span class="hljs-string">&quot;cuda&quot;</span>)
output = model(input_ids)
<span class="hljs-built_in">print</span>(output.logits)
tokenizer = AutoTokenizer.from_pretrained(<span class="hljs-string">&quot;TheBloke/Mistral-7B-Instruct-v0.1-AWQ&quot;</span>)
input_ids = tokenizer.encode(<span class="hljs-string">&quot;How to make a cake&quot;</span>, return_tensors=<span class="hljs-string">&quot;pt&quot;</span>).to(model.device)
output = model.generate(input_ids, do_sample=<span class="hljs-literal">True</span>, max_length=<span class="hljs-number">50</span>, pad_token_id=<span class="hljs-number">50256</span>)
<span class="hljs-built_in">print</span>(tokenizer.decode(output[<span class="hljs-number">0</span>], skip_special_tokens=<span class="hljs-literal">True</span>))<!-- HTML_TAG_END --></pre></div> <div class="course-tip course-tip-orange bg-gradient-to-br dark:bg-gradient-to-r before:border-orange-500 dark:before:border-orange-800 from-orange-50 dark:from-gray-900 to-white dark:to-gray-950 border border-orange-50 text-orange-700 dark:text-gray-400"><p data-svelte-h="svelte-147si7w">Note this feature is supported on AMD GPUs.</p></div> <h2 class="relative group"><a id="cpu-support" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#cpu-support"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>CPU support</span></h2> <p data-svelte-h="svelte-co0lac">Recent versions of <code>autoawq</code> supports CPU with ipex op optimizations. To get started, first install the latest version of <code>autoawq</code> by running:</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->pip install intel-extension-for-pytorch
pip install git+https://github.com/casper-hansen/AutoAWQ.git<!-- HTML_TAG_END --></pre></div> <p data-svelte-h="svelte-i49rd">Get started by passing an <code>AwqConfig()</code> with <code>version=&quot;ipex&quot;</code>.</p> <div class="code-block relative"><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM, AutoTokenizer, AwqConfig
quantization_config = AwqConfig(version=<span class="hljs-string">&quot;ipex&quot;</span>)
model = AutoModelForCausalLM.from_pretrained(
<span class="hljs-string">&quot;TheBloke/TinyLlama-1.1B-Chat-v0.3-AWQ&quot;</span>,
quantization_config=quantization_config,
device_map=<span class="hljs-string">&quot;cpu&quot;</span>,
)
input_ids = torch.randint(<span class="hljs-number">0</span>, <span class="hljs-number">100</span>, (<span class="hljs-number">1</span>, <span class="hljs-number">128</span>), dtype=torch.long, device=<span class="hljs-string">&quot;cpu&quot;</span>)
output = model(input_ids)
<span class="hljs-built_in">print</span>(output.logits)
tokenizer = AutoTokenizer.from_pretrained(<span class="hljs-string">&quot;TheBloke/TinyLlama-1.1B-Chat-v0.3-AWQ&quot;</span>)
input_ids = tokenizer.encode(<span class="hljs-string">&quot;How to make a cake&quot;</span>, return_tensors=<span class="hljs-string">&quot;pt&quot;</span>)
pad_token_id = tokenizer.eos_token_id
output = model.generate(input_ids, do_sample=<span class="hljs-literal">True</span>, max_length=<span class="hljs-number">50</span>, pad_token_id=pad_token_id)
<span class="hljs-built_in">print</span>(tokenizer.decode(output[<span class="hljs-number">0</span>], skip_special_tokens=<span class="hljs-literal">True</span>))<!-- HTML_TAG_END --></pre></div> <div class="course-tip course-tip-orange bg-gradient-to-br dark:bg-gradient-to-r before:border-orange-500 dark:before:border-orange-800 from-orange-50 dark:from-gray-900 to-white dark:to-gray-950 border border-orange-50 text-orange-700 dark:text-gray-400"><p data-svelte-h="svelte-1jtmxte">Note this feature is supported on Intel CPUs.</p></div> <a class="!text-gray-400 !no-underline text-sm flex items-center not-prose mt-4" href="https://github.com/huggingface/transformers/blob/main/docs/source/en/quantization/awq.md" target="_blank"><span data-svelte-h="svelte-1kd6by1">&lt;</span> <span data-svelte-h="svelte-x0xyl0">&gt;</span> <span data-svelte-h="svelte-1dajgef"><span class="underline ml-1.5">Update</span> on GitHub</span></a> <p></p>
<script>
{
__sveltekit_z647wz = {
assets: "/docs/transformers/pr_33913/en",
base: "/docs/transformers/pr_33913/en",
env: {}
};
const element = document.currentScript.parentElement;
const data = [null,null];
Promise.all([
import("/docs/transformers/pr_33913/en/_app/immutable/entry/start.b67f883f.js"),
import("/docs/transformers/pr_33913/en/_app/immutable/entry/app.e436b1f2.js")
]).then(([kit, app]) => {
kit.start(app, element, {
node_ids: [0, 397],
data,
form: null,
error: null
});
});
}
</script>

Xet Storage Details

Size:
37.6 kB
·
Xet hash:
7e0c7e6e57938ac8a0d499063a973913ff108a83c419f94d491bebc2d58579d3

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.