Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- bin/accelerate +10 -0
- bin/accelerate-estimate-memory +10 -0
- bin/activate.nu +102 -0
- bin/cpuinfo +10 -0
- bin/distro +10 -0
- bin/dotenv +10 -0
- bin/f2py +10 -0
- bin/fastapi +10 -0
- bin/flashinfer +10 -0
- bin/flask +10 -0
- bin/get_gprof +75 -0
- bin/gguf-convert-endian +10 -0
- bin/gguf-editor-gui +10 -0
- bin/httpx +10 -0
- bin/jsonschema +10 -0
- bin/proton-viewer +10 -0
- bin/pydoc.bat +22 -0
- bin/ray +10 -0
- bin/torchfrtrace +10 -0
- bin/transformers +10 -0
- bin/tune +10 -0
- bin/vllm +10 -0
- bin/websockets +10 -0
- lib/python3.13/site-packages/__editable___easysteer_0_1_0_finder.py +85 -0
- lib/python3.13/site-packages/__editable___vllm_0_1_dev10891_ge8dee828a_precompiled_finder.py +85 -0
- lib/python3.13/site-packages/_soundfile.py +11 -0
- lib/python3.13/site-packages/_virtualenv.py +101 -0
- lib/python3.13/site-packages/build_backend.py +164 -0
- lib/python3.13/site-packages/build_utils.py +46 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/INSTALLER +1 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/METADATA +466 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/RECORD +19 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/REQUESTED +0 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/entry_points.txt +2 -0
- lib/python3.13/site-packages/email_validator-2.3.0.dist-info/top_level.txt +1 -0
- lib/python3.13/site-packages/example.py +169 -0
- lib/python3.13/site-packages/gguf/__init__.py +9 -0
- lib/python3.13/site-packages/gguf/constants.py +2438 -0
- lib/python3.13/site-packages/gguf/lazy.py +223 -0
- lib/python3.13/site-packages/gguf/py.typed +0 -0
- lib/python3.13/site-packages/gguf/quants.py +1269 -0
- lib/python3.13/site-packages/gguf/tensor_mapping.py +1280 -0
- lib/python3.13/site-packages/isympy.py +342 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/INSTALLER +1 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/LICENSE +18 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/METADATA +47 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/RECORD +48 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/REQUESTED +0 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/WHEEL +5 -0
- lib/python3.13/site-packages/lark-1.2.2.dist-info/top_level.txt +1 -0
bin/accelerate
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from accelerate.commands.accelerate_cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/accelerate-estimate-memory
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from accelerate.commands.estimate import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/activate.nu
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-202x The virtualenv developers
|
| 2 |
+
#
|
| 3 |
+
# Permission is hereby granted, free of charge, to any person obtaining
|
| 4 |
+
# a copy of this software and associated documentation files (the
|
| 5 |
+
# "Software"), to deal in the Software without restriction, including
|
| 6 |
+
# without limitation the rights to use, copy, modify, merge, publish,
|
| 7 |
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
| 8 |
+
# permit persons to whom the Software is furnished to do so, subject to
|
| 9 |
+
# the following conditions:
|
| 10 |
+
#
|
| 11 |
+
# The above copyright notice and this permission notice shall be
|
| 12 |
+
# included in all copies or substantial portions of the Software.
|
| 13 |
+
#
|
| 14 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 15 |
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 16 |
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 17 |
+
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
| 18 |
+
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
| 19 |
+
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
| 20 |
+
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 21 |
+
|
| 22 |
+
# virtualenv activation module:
|
| 23 |
+
# - Activate with `overlay use activate.nu`
|
| 24 |
+
# - Deactivate with `deactivate`, as usual
|
| 25 |
+
#
|
| 26 |
+
# To customize the overlay name, you can call `overlay use activate.nu as foo`, but then simply `deactivate` won't work
|
| 27 |
+
# because it is just an alias to hide the "activate" overlay. You'd need to call `overlay hide foo` manually.
|
| 28 |
+
|
| 29 |
+
module warning {
|
| 30 |
+
export-env {
|
| 31 |
+
const file = path self
|
| 32 |
+
error make -u {
|
| 33 |
+
msg: $"`($file | path basename)` is meant to be used with `overlay use`, not `source`"
|
| 34 |
+
}
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
use warning
|
| 40 |
+
|
| 41 |
+
export-env {
|
| 42 |
+
|
| 43 |
+
let nu_ver = (version | get version | split row '.' | take 2 | each { into int })
|
| 44 |
+
if $nu_ver.0 == 0 and $nu_ver.1 < 106 {
|
| 45 |
+
error make {
|
| 46 |
+
msg: 'virtualenv Nushell activation requires Nushell 0.106 or greater.'
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
def is-string [x] {
|
| 51 |
+
($x | describe) == 'string'
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
def has-env [...names] {
|
| 55 |
+
$names | each {|n| $n in $env } | all {|i| $i }
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
def is-env-true [name: string] {
|
| 59 |
+
if (has-env $name) {
|
| 60 |
+
let val = ($env | get --optional $name)
|
| 61 |
+
if ($val | describe) == 'bool' {
|
| 62 |
+
$val
|
| 63 |
+
} else {
|
| 64 |
+
not ($val | is-empty)
|
| 65 |
+
}
|
| 66 |
+
} else {
|
| 67 |
+
false
|
| 68 |
+
}
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
let virtual_env = '/mnt/nw/home/m.yu/repos/EasySteer/.venv'
|
| 72 |
+
let bin = 'bin'
|
| 73 |
+
let path_name = if (has-env 'Path') { 'Path' } else { 'PATH' }
|
| 74 |
+
let venv_path = ([$virtual_env $bin] | path join)
|
| 75 |
+
let new_path = ($env | get $path_name | prepend $venv_path)
|
| 76 |
+
let virtual_env_prompt = if ('EasySteer' | is-empty) {
|
| 77 |
+
($virtual_env | path basename)
|
| 78 |
+
} else {
|
| 79 |
+
'EasySteer'
|
| 80 |
+
}
|
| 81 |
+
let new_env = { $path_name: $new_path VIRTUAL_ENV: $virtual_env VIRTUAL_ENV_PROMPT: $virtual_env_prompt }
|
| 82 |
+
let old_prompt_command = if (has-env 'PROMPT_COMMAND') { $env.PROMPT_COMMAND } else { '' }
|
| 83 |
+
let new_env = if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') {
|
| 84 |
+
$new_env
|
| 85 |
+
} else {
|
| 86 |
+
let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) '
|
| 87 |
+
let new_prompt = if (has-env 'PROMPT_COMMAND') {
|
| 88 |
+
if ('closure' in ($old_prompt_command | describe)) {
|
| 89 |
+
{|| $'($virtual_prefix)(do $old_prompt_command)' }
|
| 90 |
+
} else {
|
| 91 |
+
{|| $'($virtual_prefix)($old_prompt_command)' }
|
| 92 |
+
}
|
| 93 |
+
} else {
|
| 94 |
+
{|| $'($virtual_prefix)' }
|
| 95 |
+
}
|
| 96 |
+
$new_env | merge { PROMPT_COMMAND: $new_prompt VIRTUAL_PREFIX: $virtual_prefix }
|
| 97 |
+
}
|
| 98 |
+
load-env $new_env
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
export alias pydoc = python -m pydoc
|
| 102 |
+
export alias deactivate = overlay hide activate
|
bin/cpuinfo
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from cpuinfo import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/distro
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from distro.distro import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/dotenv
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from dotenv.__main__ import cli
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(cli())
|
bin/f2py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from numpy.f2py.f2py2e import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/fastapi
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from fastapi.cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/flashinfer
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from flashinfer.__main__ import cli
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(cli())
|
bin/flask
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from flask.cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/get_gprof
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
#
|
| 3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
| 4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
| 5 |
+
# Copyright (c) 2016-2025 The Uncertainty Quantification Foundation.
|
| 6 |
+
# License: 3-clause BSD. The full license text is available at:
|
| 7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
| 8 |
+
'''
|
| 9 |
+
build profile graph for the given instance
|
| 10 |
+
|
| 11 |
+
running:
|
| 12 |
+
$ get_gprof <args> <instance>
|
| 13 |
+
|
| 14 |
+
executes:
|
| 15 |
+
gprof2dot -f pstats <args> <type>.prof | dot -Tpng -o <type>.call.png
|
| 16 |
+
|
| 17 |
+
where:
|
| 18 |
+
<args> are arguments for gprof2dot, such as "-n 5 -e 5"
|
| 19 |
+
<instance> is code to create the instance to profile
|
| 20 |
+
<type> is the class of the instance (i.e. type(instance))
|
| 21 |
+
|
| 22 |
+
For example:
|
| 23 |
+
$ get_gprof -n 5 -e 1 "import numpy; numpy.array([1,2])"
|
| 24 |
+
|
| 25 |
+
will create 'ndarray.call.png' with the profile graph for numpy.array([1,2]),
|
| 26 |
+
where '-n 5' eliminates nodes below 5% threshold, similarly '-e 1' eliminates
|
| 27 |
+
edges below 1% threshold
|
| 28 |
+
'''
|
| 29 |
+
|
| 30 |
+
if __name__ == "__main__":
|
| 31 |
+
import sys
|
| 32 |
+
if len(sys.argv) < 2:
|
| 33 |
+
print ("Please provide an object instance (e.g. 'import math; math.pi')")
|
| 34 |
+
sys.exit()
|
| 35 |
+
# grab args for gprof2dot
|
| 36 |
+
args = sys.argv[1:-1]
|
| 37 |
+
args = ' '.join(args)
|
| 38 |
+
# last arg builds the object
|
| 39 |
+
obj = sys.argv[-1]
|
| 40 |
+
obj = obj.split(';')
|
| 41 |
+
# multi-line prep for generating an instance
|
| 42 |
+
for line in obj[:-1]:
|
| 43 |
+
exec(line)
|
| 44 |
+
# one-line generation of an instance
|
| 45 |
+
try:
|
| 46 |
+
obj = eval(obj[-1])
|
| 47 |
+
except Exception:
|
| 48 |
+
print ("Error processing object instance")
|
| 49 |
+
sys.exit()
|
| 50 |
+
|
| 51 |
+
# get object 'name'
|
| 52 |
+
objtype = type(obj)
|
| 53 |
+
name = getattr(objtype, '__name__', getattr(objtype, '__class__', objtype))
|
| 54 |
+
|
| 55 |
+
# profile dumping an object
|
| 56 |
+
import dill
|
| 57 |
+
import os
|
| 58 |
+
import cProfile
|
| 59 |
+
#name = os.path.splitext(os.path.basename(__file__))[0]
|
| 60 |
+
cProfile.run("dill.dumps(obj)", filename="%s.prof" % name)
|
| 61 |
+
msg = "gprof2dot -f pstats %s %s.prof | dot -Tpng -o %s.call.png" % (args, name, name)
|
| 62 |
+
try:
|
| 63 |
+
res = os.system(msg)
|
| 64 |
+
except Exception:
|
| 65 |
+
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
| 66 |
+
if res:
|
| 67 |
+
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
| 68 |
+
|
| 69 |
+
# get stats
|
| 70 |
+
f_prof = "%s.prof" % name
|
| 71 |
+
import pstats
|
| 72 |
+
stats = pstats.Stats(f_prof, stream=sys.stdout)
|
| 73 |
+
stats.strip_dirs().sort_stats('cumtime')
|
| 74 |
+
stats.print_stats(20) #XXX: save to file instead of print top 20?
|
| 75 |
+
os.remove(f_prof)
|
bin/gguf-convert-endian
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from gguf.scripts.gguf_convert_endian import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/gguf-editor-gui
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from gguf.scripts.gguf_editor_gui import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/httpx
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from httpx import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/jsonschema
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from jsonschema.cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/proton-viewer
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from triton.profiler.viewer import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/pydoc.bat
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
@REM Copyright (c) 2020-202x The virtualenv developers
|
| 2 |
+
@REM
|
| 3 |
+
@REM Permission is hereby granted, free of charge, to any person obtaining
|
| 4 |
+
@REM a copy of this software and associated documentation files (the
|
| 5 |
+
@REM "Software"), to deal in the Software without restriction, including
|
| 6 |
+
@REM without limitation the rights to use, copy, modify, merge, publish,
|
| 7 |
+
@REM distribute, sublicense, and/or sell copies of the Software, and to
|
| 8 |
+
@REM permit persons to whom the Software is furnished to do so, subject to
|
| 9 |
+
@REM the following conditions:
|
| 10 |
+
@REM
|
| 11 |
+
@REM The above copyright notice and this permission notice shall be
|
| 12 |
+
@REM included in all copies or substantial portions of the Software.
|
| 13 |
+
@REM
|
| 14 |
+
@REM THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 15 |
+
@REM EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 16 |
+
@REM MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 17 |
+
@REM NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
| 18 |
+
@REM LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
| 19 |
+
@REM OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
| 20 |
+
@REM WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 21 |
+
|
| 22 |
+
python.exe -m pydoc %*
|
bin/ray
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from ray.scripts.scripts import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/torchfrtrace
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from tools.flight_recorder.fr_trace import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/transformers
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from transformers.commands.transformers_cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/tune
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from ray.tune.cli.scripts import cli
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(cli())
|
bin/vllm
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from vllm.entrypoints.cli.main import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
bin/websockets
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/mnt/nw/home/m.yu/repos/EasySteer/.venv/bin/python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
import sys
|
| 4 |
+
from websockets.cli import main
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
if sys.argv[0].endswith("-script.pyw"):
|
| 7 |
+
sys.argv[0] = sys.argv[0][:-11]
|
| 8 |
+
elif sys.argv[0].endswith(".exe"):
|
| 9 |
+
sys.argv[0] = sys.argv[0][:-4]
|
| 10 |
+
sys.exit(main())
|
lib/python3.13/site-packages/__editable___easysteer_0_1_0_finder.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
import sys
|
| 3 |
+
from importlib.machinery import ModuleSpec, PathFinder
|
| 4 |
+
from importlib.machinery import all_suffixes as module_suffixes
|
| 5 |
+
from importlib.util import spec_from_file_location
|
| 6 |
+
from itertools import chain
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
MAPPING: dict[str, str] = {'easysteer': '/mnt/nw/home/m.yu/repos/EasySteer/easysteer'}
|
| 10 |
+
NAMESPACES: dict[str, list[str]] = {'easysteer.reft': ['/mnt/nw/home/m.yu/repos/EasySteer/easysteer/reft'], 'easysteer.reft.results': ['/mnt/nw/home/m.yu/repos/EasySteer/easysteer/reft/results'], 'easysteer.reft.results.loreft': ['/mnt/nw/home/m.yu/repos/EasySteer/easysteer/reft/results/loreft'], 'easysteer.reft.results.ssv': ['/mnt/nw/home/m.yu/repos/EasySteer/easysteer/reft/results/ssv'], 'easysteer.reft.pyreft.examples.notebooks': ['/mnt/nw/home/m.yu/repos/EasySteer/easysteer/reft/pyreft/examples/notebooks']}
|
| 11 |
+
PATH_PLACEHOLDER = '__editable__.easysteer-0.1.0.finder' + ".__path_hook__"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class _EditableFinder: # MetaPathFinder
|
| 15 |
+
@classmethod
|
| 16 |
+
def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None: # type: ignore
|
| 17 |
+
# Top-level packages and modules (we know these exist in the FS)
|
| 18 |
+
if fullname in MAPPING:
|
| 19 |
+
pkg_path = MAPPING[fullname]
|
| 20 |
+
return cls._find_spec(fullname, Path(pkg_path))
|
| 21 |
+
|
| 22 |
+
# Handle immediate children modules (required for namespaces to work)
|
| 23 |
+
# To avoid problems with case sensitivity in the file system we delegate
|
| 24 |
+
# to the importlib.machinery implementation.
|
| 25 |
+
parent, _, child = fullname.rpartition(".")
|
| 26 |
+
if parent and parent in MAPPING:
|
| 27 |
+
return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
|
| 28 |
+
|
| 29 |
+
# Other levels of nesting should be handled automatically by importlib
|
| 30 |
+
# using the parent path.
|
| 31 |
+
return None
|
| 32 |
+
|
| 33 |
+
@classmethod
|
| 34 |
+
def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None:
|
| 35 |
+
init = candidate_path / "__init__.py"
|
| 36 |
+
candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
|
| 37 |
+
for candidate in chain([init], candidates):
|
| 38 |
+
if candidate.exists():
|
| 39 |
+
return spec_from_file_location(fullname, candidate)
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _EditableNamespaceFinder: # PathEntryFinder
|
| 44 |
+
@classmethod
|
| 45 |
+
def _path_hook(cls, path) -> type[_EditableNamespaceFinder]:
|
| 46 |
+
if path == PATH_PLACEHOLDER:
|
| 47 |
+
return cls
|
| 48 |
+
raise ImportError
|
| 49 |
+
|
| 50 |
+
@classmethod
|
| 51 |
+
def _paths(cls, fullname: str) -> list[str]:
|
| 52 |
+
paths = NAMESPACES[fullname]
|
| 53 |
+
if not paths and fullname in MAPPING:
|
| 54 |
+
paths = [MAPPING[fullname]]
|
| 55 |
+
# Always add placeholder, for 2 reasons:
|
| 56 |
+
# 1. __path__ cannot be empty for the spec to be considered namespace.
|
| 57 |
+
# 2. In the case of nested namespaces, we need to force
|
| 58 |
+
# import machinery to query _EditableNamespaceFinder again.
|
| 59 |
+
return [*paths, PATH_PLACEHOLDER]
|
| 60 |
+
|
| 61 |
+
@classmethod
|
| 62 |
+
def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None: # type: ignore
|
| 63 |
+
if fullname in NAMESPACES:
|
| 64 |
+
spec = ModuleSpec(fullname, None, is_package=True)
|
| 65 |
+
spec.submodule_search_locations = cls._paths(fullname)
|
| 66 |
+
return spec
|
| 67 |
+
return None
|
| 68 |
+
|
| 69 |
+
@classmethod
|
| 70 |
+
def find_module(cls, _fullname) -> None:
|
| 71 |
+
return None
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def install():
|
| 75 |
+
if not any(finder == _EditableFinder for finder in sys.meta_path):
|
| 76 |
+
sys.meta_path.append(_EditableFinder)
|
| 77 |
+
|
| 78 |
+
if not NAMESPACES:
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
|
| 82 |
+
# PathEntryFinder is needed to create NamespaceSpec without private APIS
|
| 83 |
+
sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
|
| 84 |
+
if PATH_PLACEHOLDER not in sys.path:
|
| 85 |
+
sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
|
lib/python3.13/site-packages/__editable___vllm_0_1_dev10891_ge8dee828a_precompiled_finder.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
import sys
|
| 3 |
+
from importlib.machinery import ModuleSpec, PathFinder
|
| 4 |
+
from importlib.machinery import all_suffixes as module_suffixes
|
| 5 |
+
from importlib.util import spec_from_file_location
|
| 6 |
+
from itertools import chain
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
MAPPING: dict[str, str] = {'vllm': '/mnt/nw/home/m.yu/repos/EasySteer/vllm-steer/vllm'}
|
| 10 |
+
NAMESPACES: dict[str, list[str]] = {'vllm.model_executor.layers.quantization.utils.configs': ['/mnt/nw/home/m.yu/repos/EasySteer/vllm-steer/vllm/model_executor/layers/quantization/utils/configs'], 'vllm.model_executor.layers.fused_moe.configs': ['/mnt/nw/home/m.yu/repos/EasySteer/vllm-steer/vllm/model_executor/layers/fused_moe/configs']}
|
| 11 |
+
PATH_PLACEHOLDER = '__editable__.vllm-0.1.dev10891+ge8dee828a.precompiled.finder' + ".__path_hook__"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class _EditableFinder: # MetaPathFinder
|
| 15 |
+
@classmethod
|
| 16 |
+
def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None: # type: ignore
|
| 17 |
+
# Top-level packages and modules (we know these exist in the FS)
|
| 18 |
+
if fullname in MAPPING:
|
| 19 |
+
pkg_path = MAPPING[fullname]
|
| 20 |
+
return cls._find_spec(fullname, Path(pkg_path))
|
| 21 |
+
|
| 22 |
+
# Handle immediate children modules (required for namespaces to work)
|
| 23 |
+
# To avoid problems with case sensitivity in the file system we delegate
|
| 24 |
+
# to the importlib.machinery implementation.
|
| 25 |
+
parent, _, child = fullname.rpartition(".")
|
| 26 |
+
if parent and parent in MAPPING:
|
| 27 |
+
return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
|
| 28 |
+
|
| 29 |
+
# Other levels of nesting should be handled automatically by importlib
|
| 30 |
+
# using the parent path.
|
| 31 |
+
return None
|
| 32 |
+
|
| 33 |
+
@classmethod
|
| 34 |
+
def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None:
|
| 35 |
+
init = candidate_path / "__init__.py"
|
| 36 |
+
candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
|
| 37 |
+
for candidate in chain([init], candidates):
|
| 38 |
+
if candidate.exists():
|
| 39 |
+
return spec_from_file_location(fullname, candidate)
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _EditableNamespaceFinder: # PathEntryFinder
|
| 44 |
+
@classmethod
|
| 45 |
+
def _path_hook(cls, path) -> type[_EditableNamespaceFinder]:
|
| 46 |
+
if path == PATH_PLACEHOLDER:
|
| 47 |
+
return cls
|
| 48 |
+
raise ImportError
|
| 49 |
+
|
| 50 |
+
@classmethod
|
| 51 |
+
def _paths(cls, fullname: str) -> list[str]:
|
| 52 |
+
paths = NAMESPACES[fullname]
|
| 53 |
+
if not paths and fullname in MAPPING:
|
| 54 |
+
paths = [MAPPING[fullname]]
|
| 55 |
+
# Always add placeholder, for 2 reasons:
|
| 56 |
+
# 1. __path__ cannot be empty for the spec to be considered namespace.
|
| 57 |
+
# 2. In the case of nested namespaces, we need to force
|
| 58 |
+
# import machinery to query _EditableNamespaceFinder again.
|
| 59 |
+
return [*paths, PATH_PLACEHOLDER]
|
| 60 |
+
|
| 61 |
+
@classmethod
|
| 62 |
+
def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None: # type: ignore
|
| 63 |
+
if fullname in NAMESPACES:
|
| 64 |
+
spec = ModuleSpec(fullname, None, is_package=True)
|
| 65 |
+
spec.submodule_search_locations = cls._paths(fullname)
|
| 66 |
+
return spec
|
| 67 |
+
return None
|
| 68 |
+
|
| 69 |
+
@classmethod
|
| 70 |
+
def find_module(cls, _fullname) -> None:
|
| 71 |
+
return None
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def install():
|
| 75 |
+
if not any(finder == _EditableFinder for finder in sys.meta_path):
|
| 76 |
+
sys.meta_path.append(_EditableFinder)
|
| 77 |
+
|
| 78 |
+
if not NAMESPACES:
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
|
| 82 |
+
# PathEntryFinder is needed to create NamespaceSpec without private APIS
|
| 83 |
+
sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
|
| 84 |
+
if PATH_PLACEHOLDER not in sys.path:
|
| 85 |
+
sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
|
lib/python3.13/site-packages/_soundfile.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# auto-generated file
|
| 2 |
+
import _cffi_backend
|
| 3 |
+
|
| 4 |
+
ffi = _cffi_backend.FFI('_soundfile',
|
| 5 |
+
_version = 0x2601,
|
| 6 |
+
_types = b'\x00\x00\x12\x0D\x00\x00\x68\x03\x00\x00\x07\x01\x00\x00\x67\x03\x00\x00\x75\x03\x00\x00\x00\x0F\x00\x00\x12\x0D\x00\x00\x6A\x03\x00\x00\x07\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x12\x0D\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x69\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x12\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x67\x03\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x12\x11\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x12\x11\x00\x00\x6A\x03\x00\x00\x1C\x01\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x12\x11\x00\x00\x07\x01\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x02\x0D\x00\x00\x12\x11\x00\x00\x07\x01\x00\x00\x04\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x6B\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x6F\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x02\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x17\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x74\x03\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x12\x11\x00\x00\x04\x11\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x17\x01\x00\x00\x07\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x04\x11\x00\x00\x17\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x36\x0D\x00\x00\x75\x03\x00\x00\x17\x01\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x75\x0D\x00\x00\x12\x11\x00\x00\x00\x0F\x00\x00\x00\x09\x00\x00\x01\x09\x00\x00\x02\x09\x00\x00\x03\x09\x00\x00\x02\x01\x00\x00\x0E\x01\x00\x00\x00\x0B\x00\x00\x01\x0B\x00\x00\x02\x0B\x00\x00\x0D\x01\x00\x00\x51\x03\x00\x00\x56\x03\x00\x00\x59\x03\x00\x00\x5E\x03\x00\x00\x05\x01\x00\x00\x00\x01',
|
| 7 |
+
_globals = (b'\xFF\xFF\xFF\x0BSFC_FILE_TRUNCATE',4224,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_INFO',4136,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_MAJOR',4145,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_MAJOR_COUNT',4144,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_SUBTYPE',4147,b'\xFF\xFF\xFF\x0BSFC_GET_FORMAT_SUBTYPE_COUNT',4146,b'\xFF\xFF\xFF\x0BSFC_GET_LIB_VERSION',4096,b'\xFF\xFF\xFF\x0BSFC_GET_LOG_INFO',4097,b'\xFF\xFF\xFF\x0BSFC_SET_BITRATE_MODE',4869,b'\xFF\xFF\xFF\x0BSFC_SET_CLIPPING',4288,b'\xFF\xFF\xFF\x0BSFC_SET_COMPRESSION_LEVEL',4865,b'\xFF\xFF\xFF\x0BSFC_SET_SCALE_FLOAT_INT_READ',4116,b'\xFF\xFF\xFF\x0BSFC_SET_SCALE_INT_FLOAT_WRITE',4117,b'\xFF\xFF\xFF\x0BSFM_RDWR',48,b'\xFF\xFF\xFF\x0BSFM_READ',16,b'\xFF\xFF\xFF\x0BSFM_WRITE',32,b'\xFF\xFF\xFF\x0BSF_BITRATE_MODE_AVERAGE',1,b'\xFF\xFF\xFF\x0BSF_BITRATE_MODE_CONSTANT',0,b'\xFF\xFF\xFF\x0BSF_BITRATE_MODE_VARIABLE',2,b'\xFF\xFF\xFF\x0BSF_FALSE',0,b'\xFF\xFF\xFF\x0BSF_FORMAT_ENDMASK',805306368,b'\xFF\xFF\xFF\x0BSF_FORMAT_SUBMASK',65535,b'\xFF\xFF\xFF\x0BSF_FORMAT_TYPEMASK',268369920,b'\xFF\xFF\xFF\x0BSF_TRUE',1,b'\x00\x00\x20\x23sf_close',0,b'\x00\x00\x2D\x23sf_command',0,b'\x00\x00\x20\x23sf_error',0,b'\x00\x00\x18\x23sf_error_number',0,b'\x00\x00\x23\x23sf_error_str',0,b'\x00\x00\x1D\x23sf_format_check',0,b'\x00\x00\x14\x23sf_get_string',0,b'\x00\x00\x06\x23sf_open',0,b'\x00\x00\x0B\x23sf_open_fd',0,b'\x00\x00\x00\x23sf_open_virtual',0,b'\x00\x00\x20\x23sf_perror',0,b'\x00\x00\x33\x23sf_read_double',0,b'\x00\x00\x38\x23sf_read_float',0,b'\x00\x00\x3D\x23sf_read_int',0,b'\x00\x00\x4C\x23sf_read_raw',0,b'\x00\x00\x47\x23sf_read_short',0,b'\x00\x00\x4C\x23sf_readf_double',0,b'\x00\x00\x4C\x23sf_readf_float',0,b'\x00\x00\x4C\x23sf_readf_int',0,b'\x00\x00\x4C\x23sf_readf_short',0,b'\x00\x00\x42\x23sf_seek',0,b'\x00\x00\x28\x23sf_set_string',0,b'\x00\x00\x11\x23sf_strerror',0,b'\x00\x00\x1B\x23sf_version_string',0,b'\x00\x00\x33\x23sf_write_double',0,b'\x00\x00\x38\x23sf_write_float',0,b'\x00\x00\x3D\x23sf_write_int',0,b'\x00\x00\x4C\x23sf_write_raw',0,b'\x00\x00\x47\x23sf_write_short',0,b'\x00\x00\x63\x23sf_write_sync',0,b'\x00\x00\x4C\x23sf_writef_double',0,b'\x00\x00\x4C\x23sf_writef_float',0,b'\x00\x00\x4C\x23sf_writef_int',0,b'\x00\x00\x4C\x23sf_writef_short',0),
|
| 8 |
+
_struct_unions = ((b'\x00\x00\x00\x66\x00\x00\x00\x02SF_FORMAT_INFO',b'\x00\x00\x02\x11format',b'\x00\x00\x07\x11name',b'\x00\x00\x07\x11extension'),(b'\x00\x00\x00\x67\x00\x00\x00\x02SF_INFO',b'\x00\x00\x36\x11frames',b'\x00\x00\x02\x11samplerate',b'\x00\x00\x02\x11channels',b'\x00\x00\x02\x11format',b'\x00\x00\x02\x11sections',b'\x00\x00\x02\x11seekable'),(b'\x00\x00\x00\x68\x00\x00\x00\x02SF_VIRTUAL_IO',b'\x00\x00\x71\x11get_filelen',b'\x00\x00\x70\x11seek',b'\x00\x00\x72\x11read',b'\x00\x00\x73\x11write',b'\x00\x00\x71\x11tell'),(b'\x00\x00\x00\x69\x00\x00\x00\x10SNDFILE_tag',)),
|
| 9 |
+
_enums = (b'\x00\x00\x00\x6C\x00\x00\x00\x16$1\x00SF_FORMAT_SUBMASK,SF_FORMAT_TYPEMASK,SF_FORMAT_ENDMASK',b'\x00\x00\x00\x6D\x00\x00\x00\x16$2\x00SFC_GET_LIB_VERSION,SFC_GET_LOG_INFO,SFC_GET_FORMAT_INFO,SFC_GET_FORMAT_MAJOR_COUNT,SFC_GET_FORMAT_MAJOR,SFC_GET_FORMAT_SUBTYPE_COUNT,SFC_GET_FORMAT_SUBTYPE,SFC_FILE_TRUNCATE,SFC_SET_CLIPPING,SFC_SET_SCALE_FLOAT_INT_READ,SFC_SET_SCALE_INT_FLOAT_WRITE,SFC_SET_COMPRESSION_LEVEL,SFC_SET_BITRATE_MODE',b'\x00\x00\x00\x6E\x00\x00\x00\x16$3\x00SF_FALSE,SF_TRUE,SFM_READ,SFM_WRITE,SFM_RDWR,SF_BITRATE_MODE_CONSTANT,SF_BITRATE_MODE_AVERAGE,SF_BITRATE_MODE_VARIABLE'),
|
| 10 |
+
_typenames = (b'\x00\x00\x00\x66SF_FORMAT_INFO',b'\x00\x00\x00\x67SF_INFO',b'\x00\x00\x00\x68SF_VIRTUAL_IO',b'\x00\x00\x00\x69SNDFILE',b'\x00\x00\x00\x36sf_count_t',b'\x00\x00\x00\x71sf_vio_get_filelen',b'\x00\x00\x00\x72sf_vio_read',b'\x00\x00\x00\x70sf_vio_seek',b'\x00\x00\x00\x71sf_vio_tell',b'\x00\x00\x00\x73sf_vio_write'),
|
| 11 |
+
)
|
lib/python3.13/site-packages/_virtualenv.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Patches that are applied at runtime to the virtual environment."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
VIRTUALENV_PATCH_FILE = os.path.join(__file__)
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def patch_dist(dist):
|
| 10 |
+
"""
|
| 11 |
+
Distutils allows user to configure some arguments via a configuration file:
|
| 12 |
+
https://docs.python.org/3.11/install/index.html#distutils-configuration-files.
|
| 13 |
+
|
| 14 |
+
Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
|
| 15 |
+
""" # noqa: D205
|
| 16 |
+
# we cannot allow some install config as that would get packages installed outside of the virtual environment
|
| 17 |
+
old_parse_config_files = dist.Distribution.parse_config_files
|
| 18 |
+
|
| 19 |
+
def parse_config_files(self, *args, **kwargs):
|
| 20 |
+
result = old_parse_config_files(self, *args, **kwargs)
|
| 21 |
+
install = self.get_option_dict("install")
|
| 22 |
+
|
| 23 |
+
if "prefix" in install: # the prefix governs where to install the libraries
|
| 24 |
+
install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
|
| 25 |
+
for base in ("purelib", "platlib", "headers", "scripts", "data"):
|
| 26 |
+
key = f"install_{base}"
|
| 27 |
+
if key in install: # do not allow global configs to hijack venv paths
|
| 28 |
+
install.pop(key, None)
|
| 29 |
+
return result
|
| 30 |
+
|
| 31 |
+
dist.Distribution.parse_config_files = parse_config_files
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
# Import hook that patches some modules to ignore configuration values that break package installation in case
|
| 35 |
+
# of virtual environments.
|
| 36 |
+
_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
|
| 37 |
+
# https://docs.python.org/3/library/importlib.html#setting-up-an-importer
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class _Finder:
|
| 41 |
+
"""A meta path finder that allows patching the imported distutils modules."""
|
| 42 |
+
|
| 43 |
+
fullname = None
|
| 44 |
+
|
| 45 |
+
# lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
|
| 46 |
+
# because there are gevent-based applications that need to be first to import threading by themselves.
|
| 47 |
+
# See https://github.com/pypa/virtualenv/issues/1895 for details.
|
| 48 |
+
lock = [] # noqa: RUF012
|
| 49 |
+
|
| 50 |
+
def find_spec(self, fullname, path, target=None): # noqa: ARG002
|
| 51 |
+
if fullname in _DISTUTILS_PATCH and self.fullname is None:
|
| 52 |
+
# initialize lock[0] lazily
|
| 53 |
+
if len(self.lock) == 0:
|
| 54 |
+
import threading
|
| 55 |
+
|
| 56 |
+
lock = threading.Lock()
|
| 57 |
+
# there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
|
| 58 |
+
# observing .lock as empty, and further going into hereby initialization. However due to the GIL,
|
| 59 |
+
# list.append() operation is atomic and this way only one of the threads will "win" to put the lock
|
| 60 |
+
# - that every thread will use - into .lock[0].
|
| 61 |
+
# https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
|
| 62 |
+
self.lock.append(lock)
|
| 63 |
+
|
| 64 |
+
from functools import partial
|
| 65 |
+
from importlib.util import find_spec
|
| 66 |
+
|
| 67 |
+
with self.lock[0]:
|
| 68 |
+
self.fullname = fullname
|
| 69 |
+
try:
|
| 70 |
+
spec = find_spec(fullname, path)
|
| 71 |
+
if spec is not None:
|
| 72 |
+
# https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
|
| 73 |
+
is_new_api = hasattr(spec.loader, "exec_module")
|
| 74 |
+
func_name = "exec_module" if is_new_api else "load_module"
|
| 75 |
+
old = getattr(spec.loader, func_name)
|
| 76 |
+
func = self.exec_module if is_new_api else self.load_module
|
| 77 |
+
if old is not func:
|
| 78 |
+
try: # noqa: SIM105
|
| 79 |
+
setattr(spec.loader, func_name, partial(func, old))
|
| 80 |
+
except AttributeError:
|
| 81 |
+
pass # C-Extension loaders are r/o such as zipimporter with <3.7
|
| 82 |
+
return spec
|
| 83 |
+
finally:
|
| 84 |
+
self.fullname = None
|
| 85 |
+
return None
|
| 86 |
+
|
| 87 |
+
@staticmethod
|
| 88 |
+
def exec_module(old, module):
|
| 89 |
+
old(module)
|
| 90 |
+
if module.__name__ in _DISTUTILS_PATCH:
|
| 91 |
+
patch_dist(module)
|
| 92 |
+
|
| 93 |
+
@staticmethod
|
| 94 |
+
def load_module(old, name):
|
| 95 |
+
module = old(name)
|
| 96 |
+
if module.__name__ in _DISTUTILS_PATCH:
|
| 97 |
+
patch_dist(module)
|
| 98 |
+
return module
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
sys.meta_path.insert(0, _Finder())
|
lib/python3.13/site-packages/build_backend.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Copyright (c) 2023 by FlashInfer team.
|
| 3 |
+
|
| 4 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
you may not use this file except in compliance with the License.
|
| 6 |
+
You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
See the License for the specific language governing permissions and
|
| 14 |
+
limitations under the License.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
import shutil
|
| 19 |
+
from pathlib import Path
|
| 20 |
+
|
| 21 |
+
from setuptools import build_meta as orig
|
| 22 |
+
from build_utils import get_git_version
|
| 23 |
+
|
| 24 |
+
_root = Path(__file__).parent.resolve()
|
| 25 |
+
_data_dir = _root / "flashinfer" / "data"
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _create_build_metadata():
|
| 29 |
+
"""Create build metadata file with version information."""
|
| 30 |
+
version_file = _root / "version.txt"
|
| 31 |
+
if version_file.exists():
|
| 32 |
+
with open(version_file, "r") as f:
|
| 33 |
+
version = f.read().strip()
|
| 34 |
+
else:
|
| 35 |
+
version = "0.0.0+unknown"
|
| 36 |
+
|
| 37 |
+
# Add dev suffix if specified
|
| 38 |
+
dev_suffix = os.environ.get("FLASHINFER_DEV_RELEASE_SUFFIX", "")
|
| 39 |
+
if dev_suffix:
|
| 40 |
+
version = f"{version}.dev{dev_suffix}"
|
| 41 |
+
|
| 42 |
+
# Get git version
|
| 43 |
+
git_version = get_git_version(cwd=_root)
|
| 44 |
+
|
| 45 |
+
# Create build metadata in the source tree
|
| 46 |
+
package_dir = Path(__file__).parent / "flashinfer"
|
| 47 |
+
build_meta_file = package_dir / "_build_meta.py"
|
| 48 |
+
|
| 49 |
+
# Check if we're in a git repository
|
| 50 |
+
git_dir = Path(__file__).parent / ".git"
|
| 51 |
+
in_git_repo = git_dir.exists()
|
| 52 |
+
|
| 53 |
+
# If file exists and not in git repo (installing from sdist), keep existing file
|
| 54 |
+
if build_meta_file.exists() and not in_git_repo:
|
| 55 |
+
print("Build metadata file already exists (not in git repo), keeping it")
|
| 56 |
+
return version
|
| 57 |
+
|
| 58 |
+
# In git repo (editable) or file doesn't exist, create/update it
|
| 59 |
+
with open(build_meta_file, "w") as f:
|
| 60 |
+
f.write('"""Build metadata for flashinfer package."""\n')
|
| 61 |
+
f.write(f'__version__ = "{version}"\n')
|
| 62 |
+
f.write(f'__git_version__ = "{git_version}"\n')
|
| 63 |
+
|
| 64 |
+
print(f"Created build metadata file with version {version}")
|
| 65 |
+
return version
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# Create build metadata as soon as this module is imported
|
| 69 |
+
_create_build_metadata()
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def write_if_different(path: Path, content: str) -> None:
|
| 73 |
+
if path.exists() and path.read_text() == content:
|
| 74 |
+
return
|
| 75 |
+
path.parent.mkdir(parents=True, exist_ok=True)
|
| 76 |
+
path.write_text(content)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def _create_data_dir(use_symlinks=True):
|
| 80 |
+
_data_dir.mkdir(parents=True, exist_ok=True)
|
| 81 |
+
|
| 82 |
+
def ln(source: str, target: str) -> None:
|
| 83 |
+
src = _root / source
|
| 84 |
+
dst = _data_dir / target
|
| 85 |
+
if dst.exists():
|
| 86 |
+
if dst.is_symlink():
|
| 87 |
+
dst.unlink()
|
| 88 |
+
elif dst.is_dir():
|
| 89 |
+
shutil.rmtree(dst)
|
| 90 |
+
else:
|
| 91 |
+
dst.unlink()
|
| 92 |
+
|
| 93 |
+
if use_symlinks:
|
| 94 |
+
dst.symlink_to(src, target_is_directory=True)
|
| 95 |
+
else:
|
| 96 |
+
# For wheel/sdist, copy actual files instead of symlinks
|
| 97 |
+
if src.exists():
|
| 98 |
+
shutil.copytree(src, dst, symlinks=False, dirs_exist_ok=True)
|
| 99 |
+
|
| 100 |
+
ln("3rdparty/cutlass", "cutlass")
|
| 101 |
+
ln("3rdparty/spdlog", "spdlog")
|
| 102 |
+
ln("csrc", "csrc")
|
| 103 |
+
ln("include", "include")
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _prepare_for_wheel():
|
| 107 |
+
# For wheel, copy actual files instead of symlinks so they are included in the wheel
|
| 108 |
+
if _data_dir.exists():
|
| 109 |
+
shutil.rmtree(_data_dir)
|
| 110 |
+
_create_data_dir(use_symlinks=False)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _prepare_for_editable():
|
| 114 |
+
# For editable install, use symlinks so changes are reflected immediately
|
| 115 |
+
if _data_dir.exists():
|
| 116 |
+
shutil.rmtree(_data_dir)
|
| 117 |
+
_create_data_dir(use_symlinks=True)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def _prepare_for_sdist():
|
| 121 |
+
# For sdist, copy actual files instead of symlinks so they are included in the tarball
|
| 122 |
+
if _data_dir.exists():
|
| 123 |
+
shutil.rmtree(_data_dir)
|
| 124 |
+
_create_data_dir(use_symlinks=False)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def get_requires_for_build_wheel(config_settings=None):
|
| 128 |
+
_prepare_for_wheel()
|
| 129 |
+
return []
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def get_requires_for_build_sdist(config_settings=None):
|
| 133 |
+
_prepare_for_sdist()
|
| 134 |
+
return []
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def get_requires_for_build_editable(config_settings=None):
|
| 138 |
+
_prepare_for_editable()
|
| 139 |
+
return []
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
|
| 143 |
+
_prepare_for_wheel()
|
| 144 |
+
return orig.prepare_metadata_for_build_wheel(metadata_directory, config_settings)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def prepare_metadata_for_build_editable(metadata_directory, config_settings=None):
|
| 148 |
+
_prepare_for_editable()
|
| 149 |
+
return orig.prepare_metadata_for_build_editable(metadata_directory, config_settings)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
|
| 153 |
+
_prepare_for_editable()
|
| 154 |
+
return orig.build_editable(wheel_directory, config_settings, metadata_directory)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def build_sdist(sdist_directory, config_settings=None):
|
| 158 |
+
_prepare_for_sdist()
|
| 159 |
+
return orig.build_sdist(sdist_directory, config_settings)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
|
| 163 |
+
_prepare_for_wheel()
|
| 164 |
+
return orig.build_wheel(wheel_directory, config_settings, metadata_directory)
|
lib/python3.13/site-packages/build_utils.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Copyright (c) 2025 by FlashInfer team.
|
| 3 |
+
|
| 4 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
you may not use this file except in compliance with the License.
|
| 6 |
+
You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
See the License for the specific language governing permissions and
|
| 14 |
+
limitations under the License.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
"""Shared build utilities for flashinfer packages."""
|
| 18 |
+
|
| 19 |
+
import subprocess
|
| 20 |
+
from pathlib import Path
|
| 21 |
+
from typing import Optional
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def get_git_version(cwd: Optional[Path] = None) -> str:
|
| 25 |
+
"""
|
| 26 |
+
Get git commit hash.
|
| 27 |
+
|
| 28 |
+
Args:
|
| 29 |
+
cwd: Working directory for git command. If None, uses current directory.
|
| 30 |
+
|
| 31 |
+
Returns:
|
| 32 |
+
Git commit hash or "unknown" if git is not available.
|
| 33 |
+
"""
|
| 34 |
+
try:
|
| 35 |
+
git_version = (
|
| 36 |
+
subprocess.check_output(
|
| 37 |
+
["git", "rev-parse", "HEAD"],
|
| 38 |
+
cwd=cwd,
|
| 39 |
+
stderr=subprocess.DEVNULL,
|
| 40 |
+
)
|
| 41 |
+
.decode("ascii")
|
| 42 |
+
.strip()
|
| 43 |
+
)
|
| 44 |
+
return git_version
|
| 45 |
+
except Exception:
|
| 46 |
+
return "unknown"
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
uv
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,466 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: email-validator
|
| 3 |
+
Version: 2.3.0
|
| 4 |
+
Summary: A robust email address syntax and deliverability validation library.
|
| 5 |
+
Home-page: https://github.com/JoshData/python-email-validator
|
| 6 |
+
Author: Joshua Tauberer
|
| 7 |
+
Author-email: jt@occams.info
|
| 8 |
+
License: Unlicense
|
| 9 |
+
Keywords: email address validator
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: License :: OSI Approved :: The Unlicense (Unlicense)
|
| 13 |
+
Classifier: Programming Language :: Python :: 3
|
| 14 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 15 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 19 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 20 |
+
Requires-Python: >=3.8
|
| 21 |
+
Description-Content-Type: text/markdown
|
| 22 |
+
License-File: LICENSE
|
| 23 |
+
Requires-Dist: dnspython>=2.0.0
|
| 24 |
+
Requires-Dist: idna>=2.0.0
|
| 25 |
+
Dynamic: license-file
|
| 26 |
+
|
| 27 |
+
email-validator: Validate Email Addresses
|
| 28 |
+
=========================================
|
| 29 |
+
|
| 30 |
+
A robust email address syntax and deliverability validation library for
|
| 31 |
+
Python 3.8+ by [Joshua Tauberer](https://joshdata.me).
|
| 32 |
+
|
| 33 |
+
This library validates that a string is of the form `name@example.com`
|
| 34 |
+
and optionally checks that the domain name is set up to receive email.
|
| 35 |
+
This is the sort of validation you would want when you are identifying
|
| 36 |
+
users by their email address like on a registration form.
|
| 37 |
+
|
| 38 |
+
Key features:
|
| 39 |
+
|
| 40 |
+
* Checks that an email address has the correct syntax --- great for
|
| 41 |
+
email-based registration/login forms or validating data.
|
| 42 |
+
* Gives friendly English error messages when validation fails that you
|
| 43 |
+
can display to end-users.
|
| 44 |
+
* Checks deliverability (optional): Does the domain name resolve?
|
| 45 |
+
(You can override the default DNS resolver to add query caching.)
|
| 46 |
+
* Supports internationalized domain names (like `@ツ.life`),
|
| 47 |
+
internationalized local parts (like `ツ@example.com`),
|
| 48 |
+
and optionally parses display names (e.g. `"My Name" <me@example.com>`).
|
| 49 |
+
* Rejects addresses with invalid or unsafe Unicode characters,
|
| 50 |
+
obsolete email address syntax that you'd find unexpected,
|
| 51 |
+
special use domain names like `@localhost`,
|
| 52 |
+
and domains without a dot by default.
|
| 53 |
+
This is an opinionated library!
|
| 54 |
+
* Normalizes email addresses (important for internationalized
|
| 55 |
+
and quoted-string addresses! see below).
|
| 56 |
+
* Python type annotations are used.
|
| 57 |
+
|
| 58 |
+
This is an opinionated library. You should definitely also consider using
|
| 59 |
+
the less-opinionated [pyIsEmail](https://github.com/michaelherold/pyIsEmail)
|
| 60 |
+
if it works better for you.
|
| 61 |
+
|
| 62 |
+
[](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml)
|
| 63 |
+
|
| 64 |
+
View the [CHANGELOG / Release Notes](CHANGELOG.md) for the version history of changes in the library. Occasionally this README is ahead of the latest published package --- see the CHANGELOG for details.
|
| 65 |
+
|
| 66 |
+
---
|
| 67 |
+
|
| 68 |
+
Installation
|
| 69 |
+
------------
|
| 70 |
+
|
| 71 |
+
This package [is on PyPI](https://pypi.org/project/email-validator/), so:
|
| 72 |
+
|
| 73 |
+
```sh
|
| 74 |
+
pip install email-validator
|
| 75 |
+
```
|
| 76 |
+
|
| 77 |
+
(You might need to use `pip3` depending on your local environment.)
|
| 78 |
+
|
| 79 |
+
Quick Start
|
| 80 |
+
-----------
|
| 81 |
+
|
| 82 |
+
If you're validating a user's email address before creating a user
|
| 83 |
+
account in your application, you might do this:
|
| 84 |
+
|
| 85 |
+
```python
|
| 86 |
+
from email_validator import validate_email, EmailNotValidError
|
| 87 |
+
|
| 88 |
+
email = "my+address@example.org"
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
|
| 92 |
+
# Check that the email address is valid. Turn on check_deliverability
|
| 93 |
+
# for first-time validations like on account creation pages (but not
|
| 94 |
+
# login pages).
|
| 95 |
+
emailinfo = validate_email(email, check_deliverability=False)
|
| 96 |
+
|
| 97 |
+
# After this point, use only the normalized form of the email address,
|
| 98 |
+
# especially before going to a database query.
|
| 99 |
+
email = emailinfo.normalized
|
| 100 |
+
|
| 101 |
+
except EmailNotValidError as e:
|
| 102 |
+
|
| 103 |
+
# The exception message is human-readable explanation of why it's
|
| 104 |
+
# not a valid (or deliverable) email address.
|
| 105 |
+
print(str(e))
|
| 106 |
+
```
|
| 107 |
+
|
| 108 |
+
This validates the address and gives you its normalized form. You should
|
| 109 |
+
**put the normalized form in your database** and always normalize before
|
| 110 |
+
checking if an address is in your database. When using this in a login form,
|
| 111 |
+
set `check_deliverability` to `False` to avoid unnecessary DNS queries.
|
| 112 |
+
|
| 113 |
+
Usage
|
| 114 |
+
-----
|
| 115 |
+
|
| 116 |
+
### Overview
|
| 117 |
+
|
| 118 |
+
The module provides a function `validate_email(email_address)` which
|
| 119 |
+
takes an email address and:
|
| 120 |
+
|
| 121 |
+
- Raises a `EmailNotValidError` with a helpful, human-readable error
|
| 122 |
+
message explaining why the email address is not valid, or
|
| 123 |
+
- Returns an object with a normalized form of the email address (which
|
| 124 |
+
you should use!) and other information about it.
|
| 125 |
+
|
| 126 |
+
When an email address is not valid, `validate_email` raises either an
|
| 127 |
+
`EmailSyntaxError` if the form of the address is invalid or an
|
| 128 |
+
`EmailUndeliverableError` if the domain name fails DNS checks. Both
|
| 129 |
+
exception classes are subclasses of `EmailNotValidError`, which in turn
|
| 130 |
+
is a subclass of `ValueError`.
|
| 131 |
+
|
| 132 |
+
But when an email address is valid, an object is returned containing
|
| 133 |
+
a normalized form of the email address (which you should use!) and
|
| 134 |
+
other information.
|
| 135 |
+
|
| 136 |
+
The validator doesn't, by default, permit obsoleted forms of email addresses
|
| 137 |
+
that no one uses anymore even though they are still valid and deliverable, since
|
| 138 |
+
they will probably give you grief if you're using email for login. (See
|
| 139 |
+
later in the document about how to allow some obsolete forms.)
|
| 140 |
+
|
| 141 |
+
The validator optionally checks that the domain name in the email address has
|
| 142 |
+
a DNS MX record indicating that it can receive email. (Except a Null MX record.
|
| 143 |
+
If there is no MX record, a fallback A/AAAA-record is permitted, unless
|
| 144 |
+
a reject-all SPF record is present.) DNS is slow and sometimes unavailable or
|
| 145 |
+
unreliable, so consider whether these checks are useful for your use case and
|
| 146 |
+
turn them off if they aren't.
|
| 147 |
+
There is nothing to be gained by trying to actually contact an SMTP server, so
|
| 148 |
+
that's not done here. For privacy, security, and practicality reasons, servers
|
| 149 |
+
are good at not giving away whether an address is
|
| 150 |
+
deliverable or not: email addresses that appear to accept mail at first
|
| 151 |
+
can bounce mail after a delay, and bounced mail may indicate a temporary
|
| 152 |
+
failure of a good email address (sometimes an intentional failure, like
|
| 153 |
+
greylisting).
|
| 154 |
+
|
| 155 |
+
### Options
|
| 156 |
+
|
| 157 |
+
The `validate_email` function also accepts the following keyword arguments
|
| 158 |
+
(defaults are as shown below):
|
| 159 |
+
|
| 160 |
+
`check_deliverability=True`: If true, DNS queries are made to check that the domain name in the email address (the part after the @-sign) can receive mail, as described above. Set to `False` to skip this DNS-based check. It is recommended to pass `False` when performing validation for login pages (but not account creation pages) since re-validation of a previously validated domain in your database by querying DNS at every login is probably undesirable. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn this off for all calls by default.
|
| 161 |
+
|
| 162 |
+
`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown below is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache.
|
| 163 |
+
|
| 164 |
+
`test_environment=False`: If `True`, DNS-based deliverability checks are disabled and `test` and `**.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default.
|
| 165 |
+
|
| 166 |
+
`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would
|
| 167 |
+
require the
|
| 168 |
+
[SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default.
|
| 169 |
+
|
| 170 |
+
`allow_quoted_local=False`: Set to `True` to allow obscure and potentially problematic email addresses in which the part of the address before the @-sign contains spaces, @-signs, or other surprising characters when the local part is surrounded in quotes (so-called quoted-string local parts). In the object returned by `validate_email`, the normalized local part removes any unnecessary backslash-escaping and even removes the surrounding quotes if the address would be valid without them. You can also set `email_validator.ALLOW_QUOTED_LOCAL` to `True` to turn this on for all calls by default.
|
| 171 |
+
|
| 172 |
+
`allow_domain_literal=False`: Set to `True` to allow bracketed IPv4 and "IPv6:"-prefixed IPv6 addresses in the domain part of the email address. No deliverability checks are performed for these addresses. In the object returned by `validate_email`, the normalized domain will use the condensed IPv6 format, if applicable. The object's `domain_address` attribute will hold the parsed `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object if applicable. You can also set `email_validator.ALLOW_DOMAIN_LITERAL` to `True` to turn this on for all calls by default.
|
| 173 |
+
|
| 174 |
+
`allow_display_name=False`: Set to `True` to allow a display name and bracketed address in the input string, like `My Name <me@example.org>`. It's implemented in the spirit but not the letter of RFC 5322 3.4, so it may be stricter or more relaxed than what you want. The display name, if present, is provided in the returned object's `display_name` field after being unquoted and unescaped. You can also set `email_validator.ALLOW_DISPLAY_NAME` to `True` to turn this on for all calls by default.
|
| 175 |
+
|
| 176 |
+
`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e.
|
| 177 |
+
`@example.com`), e.g. for validating Postfix aliases.
|
| 178 |
+
|
| 179 |
+
`strict=False`: Set to `True` to perform additional syntax checks (currently only a local part length check). This should be used by mail service providers at address creation to ensure email addresses meet broad compatibility requirements.
|
| 180 |
+
|
| 181 |
+
### DNS timeout and cache
|
| 182 |
+
|
| 183 |
+
When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you:
|
| 184 |
+
|
| 185 |
+
```python
|
| 186 |
+
from email_validator import validate_email, caching_resolver
|
| 187 |
+
|
| 188 |
+
resolver = caching_resolver(timeout=10)
|
| 189 |
+
|
| 190 |
+
while True:
|
| 191 |
+
validate_email(email, dns_resolver=resolver)
|
| 192 |
+
```
|
| 193 |
+
|
| 194 |
+
### Test addresses
|
| 195 |
+
|
| 196 |
+
This library rejects email addresses that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailSyntaxError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost` (although they might be able to still do so via a malicious MX record). However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this:
|
| 197 |
+
|
| 198 |
+
1. Add `test_environment=True` to the call to `validate_email` (see above).
|
| 199 |
+
2. Set `email_validator.TEST_ENVIRONMENT` to `True` globally.
|
| 200 |
+
3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`, e.g.:
|
| 201 |
+
|
| 202 |
+
```python
|
| 203 |
+
import email_validator
|
| 204 |
+
email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test")
|
| 205 |
+
```
|
| 206 |
+
|
| 207 |
+
It is tempting to use `@example.com/net/org` in tests. They are *not* in this library's `SPECIAL_USE_DOMAIN_NAMES` list so you can, but shouldn't, use them. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will nevertheless reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead.
|
| 208 |
+
|
| 209 |
+
Internationalized email addresses
|
| 210 |
+
---------------------------------
|
| 211 |
+
|
| 212 |
+
The email protocol SMTP and the domain name system DNS have historically
|
| 213 |
+
only allowed English (ASCII) characters in email addresses and domain names,
|
| 214 |
+
respectively. Each has adapted to internationalization in a separate
|
| 215 |
+
way, creating two separate aspects to email address internationalization.
|
| 216 |
+
|
| 217 |
+
(If your mail submission library doesn't support Unicode at all, then
|
| 218 |
+
immediately prior to mail submission you must replace the email address with
|
| 219 |
+
its ASCII-ized form. This library gives you back the ASCII-ized form in the
|
| 220 |
+
`ascii_email` field in the returned object.)
|
| 221 |
+
|
| 222 |
+
### Internationalized domain names (IDN)
|
| 223 |
+
|
| 224 |
+
The first is [internationalized domain names (RFC
|
| 225 |
+
5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS
|
| 226 |
+
system has not been updated with Unicode support. Instead, internationalized
|
| 227 |
+
domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)"
|
| 228 |
+
form starting with `xn--`. When an email address has non-ASCII
|
| 229 |
+
characters in its domain part, the domain part is replaced with its IDNA
|
| 230 |
+
ASCII equivalent form in the process of mail transmission. Your mail
|
| 231 |
+
submission library probably does this for you transparently. ([Compliance
|
| 232 |
+
around the web is not very good though](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html).) This library conforms to IDNA 2008
|
| 233 |
+
using the [idna](https://github.com/kjd/idna) module by Kim Davies.
|
| 234 |
+
|
| 235 |
+
### Internationalized local parts
|
| 236 |
+
|
| 237 |
+
The second sort of internationalization is internationalization in the
|
| 238 |
+
*local* part of the address (before the @-sign). In non-internationalized
|
| 239 |
+
email addresses, only English letters, numbers, and some punctuation
|
| 240 |
+
(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address
|
| 241 |
+
local parts, a wider range of Unicode characters are allowed.
|
| 242 |
+
|
| 243 |
+
Email addresses with these non-ASCII characters require that your mail
|
| 244 |
+
submission library and all the mail servers along the route to the destination,
|
| 245 |
+
including your own outbound mail server, all support the
|
| 246 |
+
[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension.
|
| 247 |
+
Support for SMTPUTF8 varies. If you know ahead of time that SMTPUTF8 is not
|
| 248 |
+
supported by your mail submission stack, then you must filter out addresses that
|
| 249 |
+
require SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above).
|
| 250 |
+
This will cause the validation function to raise a `EmailSyntaxError` if
|
| 251 |
+
delivery would require SMTPUTF8. If you do not set `allow_smtputf8=False`,
|
| 252 |
+
you can also check the value of the `smtputf8` field in the returned object.
|
| 253 |
+
|
| 254 |
+
### Unsafe Unicode characters are rejected
|
| 255 |
+
|
| 256 |
+
A surprisingly large number of Unicode characters are not safe to display,
|
| 257 |
+
especially when the email address is concatenated with other text, so this
|
| 258 |
+
library tries to protect you by not permitting reserved, non-, private use,
|
| 259 |
+
formatting (which can be used to alter the display order of characters),
|
| 260 |
+
whitespace, and control characters, and combining characters
|
| 261 |
+
as the first character of the local part and the domain name (so that they
|
| 262 |
+
cannot combine with something outside of the email address string or with
|
| 263 |
+
the @-sign). See https://qntm.org/safe and https://trojansource.codes/
|
| 264 |
+
for relevant prior work. (Other than whitespace, these are checks that
|
| 265 |
+
you should be applying to nearly all user inputs in a security-sensitive
|
| 266 |
+
context.) This does not guard against the well known problem that many
|
| 267 |
+
Unicode characters look alike, which can be used to fool humans reading
|
| 268 |
+
displayed text.
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
Normalization
|
| 272 |
+
-------------
|
| 273 |
+
|
| 274 |
+
### Unicode Normalization
|
| 275 |
+
|
| 276 |
+
The use of Unicode in email addresses introduced a normalization
|
| 277 |
+
problem. Different Unicode strings can look identical and have the same
|
| 278 |
+
semantic meaning to the user. The `normalized` field returned on successful
|
| 279 |
+
validation provides the correctly normalized form of the given email
|
| 280 |
+
address.
|
| 281 |
+
|
| 282 |
+
For example, the CJK fullwidth Latin letters are considered semantically
|
| 283 |
+
equivalent in domain names to their ASCII counterparts. This library
|
| 284 |
+
normalizes them to their ASCII counterparts (as required by IDNA):
|
| 285 |
+
|
| 286 |
+
```python
|
| 287 |
+
emailinfo = validate_email("me@Domain.com")
|
| 288 |
+
print(emailinfo.normalized)
|
| 289 |
+
print(emailinfo.ascii_email)
|
| 290 |
+
# prints "me@domain.com" twice
|
| 291 |
+
```
|
| 292 |
+
|
| 293 |
+
Because an end-user might type their email address in different (but
|
| 294 |
+
equivalent) un-normalized forms at different times, you ought to
|
| 295 |
+
replace what they enter with the normalized form immediately prior to
|
| 296 |
+
going into your database (during account creation), querying your database
|
| 297 |
+
(during login), or sending outbound mail.
|
| 298 |
+
|
| 299 |
+
The normalizations include lowercasing the domain part of the email
|
| 300 |
+
address (domain names are case-insensitive), [Unicode "NFC"
|
| 301 |
+
normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the
|
| 302 |
+
whole address (which turns characters plus [combining
|
| 303 |
+
characters](https://en.wikipedia.org/wiki/Combining_character) into
|
| 304 |
+
precomposed characters where possible, replacement of [fullwidth and
|
| 305 |
+
halfwidth
|
| 306 |
+
characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms)
|
| 307 |
+
in the domain part, possibly other
|
| 308 |
+
[UTS46](http://unicode.org/reports/tr46) mappings on the domain part,
|
| 309 |
+
and conversion from Punycode to Unicode characters.
|
| 310 |
+
|
| 311 |
+
Normalization may change the characters in the email address and the
|
| 312 |
+
length of the email address, such that a string might be a valid address
|
| 313 |
+
before normalization but invalid after, or vice versa. This library only
|
| 314 |
+
permits addresses that are valid both before and after normalization.
|
| 315 |
+
|
| 316 |
+
(See [RFC 6532 (internationalized email) section
|
| 317 |
+
3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895
|
| 318 |
+
(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).)
|
| 319 |
+
|
| 320 |
+
### Other Normalization
|
| 321 |
+
|
| 322 |
+
Normalization is also applied to quoted-string local parts and domain
|
| 323 |
+
literal IPv6 addresses if you have allowed them by the `allow_quoted_local`
|
| 324 |
+
and `allow_domain_literal` options. In quoted-string local parts, unnecessary
|
| 325 |
+
backslash escaping is removed and even the surrounding quotes are removed if
|
| 326 |
+
they are unnecessary. For IPv6 domain literals, the IPv6 address is
|
| 327 |
+
normalized to condensed form. [RFC 2142](https://datatracker.ietf.org/doc/html/rfc2142)
|
| 328 |
+
also requires lowercase normalization for some specific mailbox names like `postmaster@`.
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
Examples
|
| 332 |
+
--------
|
| 333 |
+
|
| 334 |
+
For the email address `test@joshdata.me`, the returned object is:
|
| 335 |
+
|
| 336 |
+
```python
|
| 337 |
+
ValidatedEmail(
|
| 338 |
+
normalized='test@joshdata.me',
|
| 339 |
+
local_part='test',
|
| 340 |
+
domain='joshdata.me',
|
| 341 |
+
ascii_email='test@joshdata.me',
|
| 342 |
+
ascii_local_part='test',
|
| 343 |
+
ascii_domain='joshdata.me',
|
| 344 |
+
smtputf8=False)
|
| 345 |
+
```
|
| 346 |
+
|
| 347 |
+
For the fictitious but valid address `example@ツ.ⓁⒾⒻⒺ`, which has an
|
| 348 |
+
internationalized domain but ASCII local part, the returned object is:
|
| 349 |
+
|
| 350 |
+
```python
|
| 351 |
+
ValidatedEmail(
|
| 352 |
+
normalized='example@ツ.life',
|
| 353 |
+
local_part='example',
|
| 354 |
+
domain='ツ.life',
|
| 355 |
+
ascii_email='example@xn--bdk.life',
|
| 356 |
+
ascii_local_part='example',
|
| 357 |
+
ascii_domain='xn--bdk.life',
|
| 358 |
+
smtputf8=False)
|
| 359 |
+
|
| 360 |
+
```
|
| 361 |
+
|
| 362 |
+
Note that `normalized` and other fields provide a normalized form of the
|
| 363 |
+
email address, domain name, and (in other cases) local part (see earlier
|
| 364 |
+
discussion of normalization), which you should use in your database.
|
| 365 |
+
|
| 366 |
+
Calling `validate_email` with the ASCII form of the above email address,
|
| 367 |
+
`example@xn--bdk.life`, returns the exact same information (i.e., the
|
| 368 |
+
`normalized` field always will contain Unicode characters, not Punycode).
|
| 369 |
+
|
| 370 |
+
For the fictitious address `ツ-test@joshdata.me`, which has an
|
| 371 |
+
internationalized local part, the returned object is:
|
| 372 |
+
|
| 373 |
+
```python
|
| 374 |
+
ValidatedEmail(
|
| 375 |
+
normalized='ツ-test@joshdata.me',
|
| 376 |
+
local_part='ツ-test',
|
| 377 |
+
domain='joshdata.me',
|
| 378 |
+
ascii_email=None,
|
| 379 |
+
ascii_local_part=None,
|
| 380 |
+
ascii_domain='joshdata.me',
|
| 381 |
+
smtputf8=True)
|
| 382 |
+
```
|
| 383 |
+
|
| 384 |
+
Now `smtputf8` is `True` and `ascii_email` is `None` because the local
|
| 385 |
+
part of the address is internationalized. The `local_part` and `normalized` fields
|
| 386 |
+
return the normalized form of the address.
|
| 387 |
+
|
| 388 |
+
Return value
|
| 389 |
+
------------
|
| 390 |
+
|
| 391 |
+
When an email address passes validation, the fields in the returned object
|
| 392 |
+
are:
|
| 393 |
+
|
| 394 |
+
| Field | Value |
|
| 395 |
+
| -----:|-------|
|
| 396 |
+
| `normalized` | The normalized form of the email address that you should put in your database. This combines the `local_part` and `domain` fields (see below). |
|
| 397 |
+
| `ascii_email` | If set, an ASCII-only form of the normalized email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. |
|
| 398 |
+
| `local_part` | The normalized local part of the given email address (before the @-sign). Normalization includes Unicode NFC normalization and removing unnecessary quoted-string quotes and backslashes. If `allow_quoted_local` is True and the surrounding quotes are necessary, the quotes _will_ be present in this field. |
|
| 399 |
+
| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. |
|
| 400 |
+
| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. |
|
| 401 |
+
| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. |
|
| 402 |
+
| `domain_address` | If domain literals are allowed and if the email address contains one, an `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object. |
|
| 403 |
+
| `display_name` | If no display name was present and angle brackets do not surround the address, this will be `None`; otherwise, it will be set to the display name, or the empty string if there were angle brackets but no display name. If the display name was quoted, it will be unquoted and unescaped. |
|
| 404 |
+
| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. |
|
| 405 |
+
| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. |
|
| 406 |
+
| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. |
|
| 407 |
+
| `spf` | Any SPF record found while checking deliverability. Only set if the SPF record is queried. |
|
| 408 |
+
|
| 409 |
+
Assumptions
|
| 410 |
+
-----------
|
| 411 |
+
|
| 412 |
+
By design, this validator does not pass all email addresses that
|
| 413 |
+
strictly conform to the standards. Many email address forms are obsolete
|
| 414 |
+
or likely to cause trouble:
|
| 415 |
+
|
| 416 |
+
* The validator assumes the email address is intended to be
|
| 417 |
+
usable on the public Internet. The domain part
|
| 418 |
+
of the email address must be a resolvable domain name
|
| 419 |
+
(see the deliverability checks described above).
|
| 420 |
+
Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml)
|
| 421 |
+
and their subdomains, as well as
|
| 422 |
+
domain names without a `.`, are rejected as a syntax error
|
| 423 |
+
(except see the `test_environment` parameter above).
|
| 424 |
+
* Obsolete email syntaxes are rejected:
|
| 425 |
+
The unusual ["(comment)" syntax](https://github.com/JoshData/python-email-validator/issues/77)
|
| 426 |
+
is rejected. Extremely old obsolete syntaxes are
|
| 427 |
+
rejected. Quoted-string local parts and domain-literal addresses
|
| 428 |
+
are rejected by default, but there are options to allow them (see above).
|
| 429 |
+
No one uses these forms anymore, and I can't think of any reason why anyone
|
| 430 |
+
using this library would need to accept them.
|
| 431 |
+
|
| 432 |
+
Testing
|
| 433 |
+
-------
|
| 434 |
+
|
| 435 |
+
Tests can be run using
|
| 436 |
+
|
| 437 |
+
```sh
|
| 438 |
+
pip install -r test_requirements.txt
|
| 439 |
+
make test
|
| 440 |
+
```
|
| 441 |
+
|
| 442 |
+
Tests run with mocked DNS responses. When adding or changing tests, temporarily turn on the `BUILD_MOCKED_DNS_RESPONSE_DATA` flag in `tests/mocked_dns_responses.py` to re-build the database of mocked responses from live queries.
|
| 443 |
+
|
| 444 |
+
For Project Maintainers
|
| 445 |
+
-----------------------
|
| 446 |
+
|
| 447 |
+
The package is distributed as a universal wheel and as a source package.
|
| 448 |
+
|
| 449 |
+
To release:
|
| 450 |
+
|
| 451 |
+
* Update CHANGELOG.md.
|
| 452 |
+
* Update the version number in `email_validator/version.py`.
|
| 453 |
+
* Make & push a commit with the new version number and make sure tests pass.
|
| 454 |
+
* Make a release at https://github.com/JoshData/python-email-validator/releases/new creating a new tag (or use command below).
|
| 455 |
+
* Publish a source and wheel distribution to pypi (see command below).
|
| 456 |
+
|
| 457 |
+
```sh
|
| 458 |
+
git tag v$(cat email_validator/version.py | sed "s/.* = //" | sed 's/"//g')
|
| 459 |
+
git push --tags
|
| 460 |
+
./release_to_pypi.sh
|
| 461 |
+
```
|
| 462 |
+
|
| 463 |
+
License
|
| 464 |
+
-------
|
| 465 |
+
|
| 466 |
+
This project is free of any copyright restrictions per the [Unlicense](https://unlicense.org/). (Prior to Feb. 4, 2024, the project was made available under the terms of the [CC0 1.0 Universal public domain dedication](http://creativecommons.org/publicdomain/zero/1.0/).) See [LICENSE](LICENSE) and [CONTRIBUTING.md](CONTRIBUTING.md).
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/email_validator,sha256=MqSUVoFzpngmj7FBn_rk8-FtH_HuBUhvK32l6w_9pTc,341
|
| 2 |
+
email_validator-2.3.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 3 |
+
email_validator-2.3.0.dist-info/METADATA,sha256=Kpe4Hu_NhWvICwNG9H-i2AC5pDi_j5IxrgD-kx1cn7w,26006
|
| 4 |
+
email_validator-2.3.0.dist-info/RECORD,,
|
| 5 |
+
email_validator-2.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
email_validator-2.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
| 7 |
+
email_validator-2.3.0.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66
|
| 8 |
+
email_validator-2.3.0.dist-info/licenses/LICENSE,sha256=ZyF5dS4QkTSj-yvdB4Cyn9t6A5dPD1hqE66tUSlWLUw,1212
|
| 9 |
+
email_validator-2.3.0.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16
|
| 10 |
+
email_validator/__init__.py,sha256=g3oVBGdXGJATgBnVqt5Q7pUhXM9QrmOl5qWSu_RtWmQ,4381
|
| 11 |
+
email_validator/__main__.py,sha256=uc6i2EMCK67cCgcHr5ZFG5LqB3khljmR7lNAYZGSUKY,2302
|
| 12 |
+
email_validator/deliverability.py,sha256=ZIjFkgWMzxYexanwKhrRHLTnjWMqlR5b0ltOnlA0u-E,7216
|
| 13 |
+
email_validator/exceptions.py,sha256=Ry2j5FMpEe9JthmTF3zF5pGgWer-QmWc1m0szXAZ7fo,434
|
| 14 |
+
email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 15 |
+
email_validator/rfc_constants.py,sha256=LhUiBZLBw_Nn-KHkH--nVwOWFlgz2aCuauj98ZSl-gk,3443
|
| 16 |
+
email_validator/syntax.py,sha256=puufskeIG6_ORWb7fvRdV_Yczmk4bibNZPs9TjWE1K0,38971
|
| 17 |
+
email_validator/types.py,sha256=mvmwN9R3lFx9Tv9wtWvDzxfit6mr_5wQmY2I0HjuqRk,5588
|
| 18 |
+
email_validator/validate_email.py,sha256=bmrdQ9dGt1-Mk0rwDRrX-l6xbYhQ0US20Dz46Aatnkk,9928
|
| 19 |
+
email_validator/version.py,sha256=CpK8IH_dCUAwg9tqv7zm9FxbBFkxCnED1JUiRe7cftU,22
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/REQUESTED
ADDED
|
File without changes
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
email_validator = email_validator.__main__:main
|
lib/python3.13/site-packages/email_validator-2.3.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
email_validator
|
lib/python3.13/site-packages/example.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#################################################################################
|
| 2 |
+
# Copyright (c) 2020, NVIDIA Corporation. All rights reserved. #
|
| 3 |
+
# #
|
| 4 |
+
# Redistribution and use in source and binary forms, with or without #
|
| 5 |
+
# modification, are permitted provided that the following conditions are met: #
|
| 6 |
+
# #
|
| 7 |
+
# * Redistributions of source code must retain the above copyright notice, #
|
| 8 |
+
# this list of conditions and the following disclaimer. #
|
| 9 |
+
# * Redistributions in binary form must reproduce the above copyright #
|
| 10 |
+
# notice, this list of conditions and the following disclaimer in the #
|
| 11 |
+
# documentation and/or other materials provided with the distribution. #
|
| 12 |
+
# * Neither the name of the NVIDIA Corporation nor the names of its #
|
| 13 |
+
# contributors may be used to endorse or promote products derived from #
|
| 14 |
+
# this software without specific prior written permission. #
|
| 15 |
+
# #
|
| 16 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" #
|
| 17 |
+
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
|
| 18 |
+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
|
| 19 |
+
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
|
| 20 |
+
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
|
| 21 |
+
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
|
| 22 |
+
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
|
| 23 |
+
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
|
| 24 |
+
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
|
| 25 |
+
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
|
| 26 |
+
# THE POSSIBILITY OF SUCH DAMAGE. #
|
| 27 |
+
#################################################################################
|
| 28 |
+
|
| 29 |
+
#
|
| 30 |
+
# Sample script to demonstrate the usage of NVML API python bindings
|
| 31 |
+
#
|
| 32 |
+
|
| 33 |
+
# To Run:
|
| 34 |
+
# $ python ./example.py
|
| 35 |
+
|
| 36 |
+
from pynvml import *
|
| 37 |
+
|
| 38 |
+
#
|
| 39 |
+
# Helper function
|
| 40 |
+
#
|
| 41 |
+
def StrVirt(mode):
|
| 42 |
+
if mode == NVML_GPU_VIRTUALIZATION_MODE_NONE:
|
| 43 |
+
return "None";
|
| 44 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
| 45 |
+
return "Pass-Through";
|
| 46 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_VGPU:
|
| 47 |
+
return "VGPU";
|
| 48 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VGPU:
|
| 49 |
+
return "Host VGPU";
|
| 50 |
+
elif mode == NVML_GPU_VIRTUALIZATION_MODE_HOST_VSGA:
|
| 51 |
+
return "Host VSGA";
|
| 52 |
+
else:
|
| 53 |
+
return "Unknown";
|
| 54 |
+
|
| 55 |
+
#
|
| 56 |
+
# Converts errors into string messages
|
| 57 |
+
#
|
| 58 |
+
def handleError(err):
|
| 59 |
+
if (err.value == NVML_ERROR_NOT_SUPPORTED):
|
| 60 |
+
return "N/A"
|
| 61 |
+
else:
|
| 62 |
+
return err.__str__()
|
| 63 |
+
|
| 64 |
+
#######
|
| 65 |
+
def deviceQuery():
|
| 66 |
+
|
| 67 |
+
strResult = ''
|
| 68 |
+
try:
|
| 69 |
+
#
|
| 70 |
+
# Initialize NVML
|
| 71 |
+
#
|
| 72 |
+
nvmlInit()
|
| 73 |
+
|
| 74 |
+
strResult += ' <driver_version>' + str(nvmlSystemGetDriverVersion()) + '</driver_version>\n'
|
| 75 |
+
|
| 76 |
+
deviceCount = nvmlDeviceGetCount()
|
| 77 |
+
strResult += ' <attached_gpus>' + str(deviceCount) + '</attached_gpus>\n'
|
| 78 |
+
|
| 79 |
+
for i in range(0, deviceCount):
|
| 80 |
+
handle = nvmlDeviceGetHandleByIndex(i)
|
| 81 |
+
|
| 82 |
+
pciInfo = nvmlDeviceGetPciInfo(handle)
|
| 83 |
+
|
| 84 |
+
strResult += ' <gpu id="%s">\n' % pciInfo.busId
|
| 85 |
+
|
| 86 |
+
strResult += ' <product_name>' + nvmlDeviceGetName(handle) + '</product_name>\n'
|
| 87 |
+
|
| 88 |
+
brandNames = {NVML_BRAND_UNKNOWN : "Unknown",
|
| 89 |
+
NVML_BRAND_QUADRO : "Quadro",
|
| 90 |
+
NVML_BRAND_TESLA : "Tesla",
|
| 91 |
+
NVML_BRAND_NVS : "NVS",
|
| 92 |
+
NVML_BRAND_GRID : "Grid",
|
| 93 |
+
NVML_BRAND_TITAN : "Titan",
|
| 94 |
+
NVML_BRAND_GEFORCE : "GeForce",
|
| 95 |
+
NVML_BRAND_NVIDIA_VAPPS : "NVIDIA Virtual Applications",
|
| 96 |
+
NVML_BRAND_NVIDIA_VPC : "NVIDIA Virtual PC",
|
| 97 |
+
NVML_BRAND_NVIDIA_VCS : "NVIDIA Virtual Compute Server",
|
| 98 |
+
NVML_BRAND_NVIDIA_VWS : "NVIDIA RTX Virtual Workstation",
|
| 99 |
+
NVML_BRAND_NVIDIA_CLOUD_GAMING : "NVIDIA Cloud Gaming",
|
| 100 |
+
NVML_BRAND_QUADRO_RTX : "Quadro RTX",
|
| 101 |
+
NVML_BRAND_NVIDIA_RTX : "NVIDIA RTX",
|
| 102 |
+
NVML_BRAND_NVIDIA : "NVIDIA",
|
| 103 |
+
NVML_BRAND_GEFORCE_RTX : "GeForce RTX",
|
| 104 |
+
NVML_BRAND_TITAN_RTX : "TITAN RTX",
|
| 105 |
+
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
try:
|
| 109 |
+
# If nvmlDeviceGetBrand() succeeds it is guaranteed to be in the dictionary
|
| 110 |
+
brandName = brandNames[nvmlDeviceGetBrand(handle)]
|
| 111 |
+
except NVMLError as err:
|
| 112 |
+
brandName = handleError(err)
|
| 113 |
+
|
| 114 |
+
strResult += ' <product_brand>' + brandName + '</product_brand>\n'
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
serial = nvmlDeviceGetSerial(handle)
|
| 118 |
+
except NVMLError as err:
|
| 119 |
+
serial = handleError(err)
|
| 120 |
+
|
| 121 |
+
strResult += ' <serial>' + serial + '</serial>\n'
|
| 122 |
+
|
| 123 |
+
try:
|
| 124 |
+
uuid = nvmlDeviceGetUUID(handle)
|
| 125 |
+
except NVMLError as err:
|
| 126 |
+
uuid = handleError(err)
|
| 127 |
+
|
| 128 |
+
strResult += ' <uuid>' + uuid + '</uuid>\n'
|
| 129 |
+
|
| 130 |
+
strResult += ' <gpu_virtualization_mode>\n'
|
| 131 |
+
try:
|
| 132 |
+
mode = StrVirt(nvmlDeviceGetVirtualizationMode(handle))
|
| 133 |
+
except NVMLError as err:
|
| 134 |
+
mode = handleError(err)
|
| 135 |
+
strResult += ' <virtualization_mode>' + mode + '</virtualization_mode>\n'
|
| 136 |
+
strResult += ' </gpu_virtualization_mode>\n'
|
| 137 |
+
|
| 138 |
+
try:
|
| 139 |
+
gridLicensableFeatures = nvmlDeviceGetGridLicensableFeatures(handle)
|
| 140 |
+
if gridLicensableFeatures.isGridLicenseSupported == 1:
|
| 141 |
+
strResult += ' <vgpu_software_licensed_product>\n'
|
| 142 |
+
for i in range(gridLicensableFeatures.licensableFeaturesCount):
|
| 143 |
+
if gridLicensableFeatures.gridLicensableFeatures[i].featureState == 0:
|
| 144 |
+
if nvmlDeviceGetVirtualizationMode(handle) == NVML_GPU_VIRTUALIZATION_MODE_PASSTHROUGH:
|
| 145 |
+
strResult += ' <licensed_product_name>' + 'NVIDIA Virtual Applications' + '</licensed_product_name>\n'
|
| 146 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
| 147 |
+
else:
|
| 148 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
| 149 |
+
strResult += ' <license_status>' + 'Unlicensed' + '</license_status>\n'
|
| 150 |
+
else:
|
| 151 |
+
strResult += ' <licensed_product_name>' + gridLicensableFeatures.gridLicensableFeatures[i].productName + '</licensed_product_name>\n'
|
| 152 |
+
strResult += ' <license_status>' + 'Licensed' + '</license_status>\n'
|
| 153 |
+
strResult += ' </vgpu_software_licensed_product>\n'
|
| 154 |
+
except NVMLError as err:
|
| 155 |
+
gridLicensableFeatures = handleError(err)
|
| 156 |
+
|
| 157 |
+
strResult += ' </gpu>\n'
|
| 158 |
+
|
| 159 |
+
except NVMLError as err:
|
| 160 |
+
strResult += 'example.py: ' + err.__str__() + '\n'
|
| 161 |
+
|
| 162 |
+
nvmlShutdown()
|
| 163 |
+
|
| 164 |
+
return strResult
|
| 165 |
+
|
| 166 |
+
# If this is not exectued when module is imported
|
| 167 |
+
if __name__ == "__main__":
|
| 168 |
+
print(deviceQuery())
|
| 169 |
+
|
lib/python3.13/site-packages/gguf/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .constants import *
|
| 2 |
+
from .lazy import *
|
| 3 |
+
from .gguf_reader import *
|
| 4 |
+
from .gguf_writer import *
|
| 5 |
+
from .quants import *
|
| 6 |
+
from .tensor_mapping import *
|
| 7 |
+
from .vocab import *
|
| 8 |
+
from .utility import *
|
| 9 |
+
from .metadata import *
|
lib/python3.13/site-packages/gguf/constants.py
ADDED
|
@@ -0,0 +1,2438 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from enum import Enum, IntEnum, auto
|
| 4 |
+
from typing import Any
|
| 5 |
+
|
| 6 |
+
#
|
| 7 |
+
# constants
|
| 8 |
+
#
|
| 9 |
+
|
| 10 |
+
GGUF_MAGIC = 0x46554747 # "GGUF"
|
| 11 |
+
GGUF_VERSION = 3
|
| 12 |
+
GGUF_DEFAULT_ALIGNMENT = 32
|
| 13 |
+
GGML_QUANT_VERSION = 2 # GGML_QNT_VERSION from ggml.h
|
| 14 |
+
|
| 15 |
+
#
|
| 16 |
+
# metadata keys
|
| 17 |
+
#
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Keys:
|
| 21 |
+
class General:
|
| 22 |
+
TYPE = "general.type"
|
| 23 |
+
ARCHITECTURE = "general.architecture"
|
| 24 |
+
QUANTIZATION_VERSION = "general.quantization_version"
|
| 25 |
+
ALIGNMENT = "general.alignment"
|
| 26 |
+
FILE_TYPE = "general.file_type"
|
| 27 |
+
|
| 28 |
+
# Authorship Metadata
|
| 29 |
+
NAME = "general.name"
|
| 30 |
+
AUTHOR = "general.author"
|
| 31 |
+
VERSION = "general.version"
|
| 32 |
+
ORGANIZATION = "general.organization"
|
| 33 |
+
|
| 34 |
+
FINETUNE = "general.finetune"
|
| 35 |
+
BASENAME = "general.basename"
|
| 36 |
+
|
| 37 |
+
DESCRIPTION = "general.description"
|
| 38 |
+
QUANTIZED_BY = "general.quantized_by"
|
| 39 |
+
|
| 40 |
+
SIZE_LABEL = "general.size_label"
|
| 41 |
+
|
| 42 |
+
# Licensing details
|
| 43 |
+
LICENSE = "general.license"
|
| 44 |
+
LICENSE_NAME = "general.license.name"
|
| 45 |
+
LICENSE_LINK = "general.license.link"
|
| 46 |
+
|
| 47 |
+
# Typically represents the converted GGUF repo (Unless native)
|
| 48 |
+
URL = "general.url" # Model Website/Paper
|
| 49 |
+
DOI = "general.doi"
|
| 50 |
+
UUID = "general.uuid"
|
| 51 |
+
REPO_URL = "general.repo_url" # Model Source Repository (git/svn/etc...)
|
| 52 |
+
|
| 53 |
+
# Model Source during conversion
|
| 54 |
+
SOURCE_URL = "general.source.url" # Model Website/Paper
|
| 55 |
+
SOURCE_DOI = "general.source.doi"
|
| 56 |
+
SOURCE_UUID = "general.source.uuid"
|
| 57 |
+
SOURCE_REPO_URL = "general.source.repo_url" # Model Source Repository (git/svn/etc...)
|
| 58 |
+
|
| 59 |
+
# Base Model Source. There can be more than one source if it's a merged
|
| 60 |
+
# model like with 'Mistral-7B-Merge-14-v0.1'. This will assist in
|
| 61 |
+
# tracing linage of models as it is finetuned or merged over time.
|
| 62 |
+
BASE_MODEL_COUNT = "general.base_model.count"
|
| 63 |
+
BASE_MODEL_NAME = "general.base_model.{id}.name"
|
| 64 |
+
BASE_MODEL_AUTHOR = "general.base_model.{id}.author"
|
| 65 |
+
BASE_MODEL_VERSION = "general.base_model.{id}.version"
|
| 66 |
+
BASE_MODEL_ORGANIZATION = "general.base_model.{id}.organization"
|
| 67 |
+
BASE_MODEL_DESCRIPTION = "general.base_model.{id}.description"
|
| 68 |
+
BASE_MODEL_URL = "general.base_model.{id}.url" # Model Website/Paper
|
| 69 |
+
BASE_MODEL_DOI = "general.base_model.{id}.doi"
|
| 70 |
+
BASE_MODEL_UUID = "general.base_model.{id}.uuid"
|
| 71 |
+
BASE_MODEL_REPO_URL = "general.base_model.{id}.repo_url" # Model Source Repository (git/svn/etc...)
|
| 72 |
+
|
| 73 |
+
# Dataset Source
|
| 74 |
+
DATASET_COUNT = "general.dataset.count"
|
| 75 |
+
DATASET_NAME = "general.dataset.{id}.name"
|
| 76 |
+
DATASET_AUTHOR = "general.dataset.{id}.author"
|
| 77 |
+
DATASET_VERSION = "general.dataset.{id}.version"
|
| 78 |
+
DATASET_ORGANIZATION = "general.dataset.{id}.organization"
|
| 79 |
+
DATASET_DESCRIPTION = "general.dataset.{id}.description"
|
| 80 |
+
DATASET_URL = "general.dataset.{id}.url" # Model Website/Paper
|
| 81 |
+
DATASET_DOI = "general.dataset.{id}.doi"
|
| 82 |
+
DATASET_UUID = "general.dataset.{id}.uuid"
|
| 83 |
+
DATASET_REPO_URL = "general.dataset.{id}.repo_url" # Model Source Repository (git/svn/etc...)
|
| 84 |
+
|
| 85 |
+
# Array based KV stores
|
| 86 |
+
TAGS = "general.tags"
|
| 87 |
+
LANGUAGES = "general.languages"
|
| 88 |
+
|
| 89 |
+
class LLM:
|
| 90 |
+
VOCAB_SIZE = "{arch}.vocab_size"
|
| 91 |
+
CONTEXT_LENGTH = "{arch}.context_length"
|
| 92 |
+
EMBEDDING_LENGTH = "{arch}.embedding_length"
|
| 93 |
+
FEATURES_LENGTH = "{arch}.features_length"
|
| 94 |
+
BLOCK_COUNT = "{arch}.block_count"
|
| 95 |
+
LEADING_DENSE_BLOCK_COUNT = "{arch}.leading_dense_block_count"
|
| 96 |
+
FEED_FORWARD_LENGTH = "{arch}.feed_forward_length"
|
| 97 |
+
EXPERT_FEED_FORWARD_LENGTH = "{arch}.expert_feed_forward_length"
|
| 98 |
+
EXPERT_SHARED_FEED_FORWARD_LENGTH = "{arch}.expert_shared_feed_forward_length"
|
| 99 |
+
USE_PARALLEL_RESIDUAL = "{arch}.use_parallel_residual"
|
| 100 |
+
TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout"
|
| 101 |
+
EXPERT_COUNT = "{arch}.expert_count"
|
| 102 |
+
EXPERT_USED_COUNT = "{arch}.expert_used_count"
|
| 103 |
+
EXPERT_SHARED_COUNT = "{arch}.expert_shared_count"
|
| 104 |
+
EXPERT_WEIGHTS_SCALE = "{arch}.expert_weights_scale"
|
| 105 |
+
EXPERT_WEIGHTS_NORM = "{arch}.expert_weights_norm"
|
| 106 |
+
EXPERT_GATING_FUNC = "{arch}.expert_gating_func"
|
| 107 |
+
MOE_EVERY_N_LAYERS = "{arch}.moe_every_n_layers"
|
| 108 |
+
POOLING_TYPE = "{arch}.pooling_type"
|
| 109 |
+
LOGIT_SCALE = "{arch}.logit_scale"
|
| 110 |
+
DECODER_START_TOKEN_ID = "{arch}.decoder_start_token_id"
|
| 111 |
+
ATTN_LOGIT_SOFTCAPPING = "{arch}.attn_logit_softcapping"
|
| 112 |
+
FINAL_LOGIT_SOFTCAPPING = "{arch}.final_logit_softcapping"
|
| 113 |
+
SWIN_NORM = "{arch}.swin_norm"
|
| 114 |
+
RESCALE_EVERY_N_LAYERS = "{arch}.rescale_every_n_layers"
|
| 115 |
+
TIME_MIX_EXTRA_DIM = "{arch}.time_mix_extra_dim"
|
| 116 |
+
TIME_DECAY_EXTRA_DIM = "{arch}.time_decay_extra_dim"
|
| 117 |
+
RESIDUAL_SCALE = "{arch}.residual_scale"
|
| 118 |
+
EMBEDDING_SCALE = "{arch}.embedding_scale"
|
| 119 |
+
TOKEN_SHIFT_COUNT = "{arch}.token_shift_count"
|
| 120 |
+
INTERLEAVE_MOE_LAYER_STEP = "{arch}.interleave_moe_layer_step"
|
| 121 |
+
|
| 122 |
+
class Attention:
|
| 123 |
+
HEAD_COUNT = "{arch}.attention.head_count"
|
| 124 |
+
HEAD_COUNT_KV = "{arch}.attention.head_count_kv"
|
| 125 |
+
MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias"
|
| 126 |
+
CLAMP_KQV = "{arch}.attention.clamp_kqv"
|
| 127 |
+
KEY_LENGTH = "{arch}.attention.key_length"
|
| 128 |
+
VALUE_LENGTH = "{arch}.attention.value_length"
|
| 129 |
+
LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon"
|
| 130 |
+
LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon"
|
| 131 |
+
GROUPNORM_EPS = "{arch}.attention.group_norm_epsilon"
|
| 132 |
+
GROUPNORM_GROUPS = "{arch}.attention.group_norm_groups"
|
| 133 |
+
CAUSAL = "{arch}.attention.causal"
|
| 134 |
+
Q_LORA_RANK = "{arch}.attention.q_lora_rank"
|
| 135 |
+
KV_LORA_RANK = "{arch}.attention.kv_lora_rank"
|
| 136 |
+
DECAY_LORA_RANK = "{arch}.attention.decay_lora_rank"
|
| 137 |
+
ICLR_LORA_RANK = "{arch}.attention.iclr_lora_rank"
|
| 138 |
+
VALUE_RESIDUAL_MIX_LORA_RANK = "{arch}.attention.value_residual_mix_lora_rank"
|
| 139 |
+
GATE_LORA_RANK = "{arch}.attention.gate_lora_rank"
|
| 140 |
+
REL_BUCKETS_COUNT = "{arch}.attention.relative_buckets_count"
|
| 141 |
+
SLIDING_WINDOW = "{arch}.attention.sliding_window"
|
| 142 |
+
SCALE = "{arch}.attention.scale"
|
| 143 |
+
KEY_LENGTH_MLA = "{arch}.attention.key_length_mla"
|
| 144 |
+
VALUE_LENGTH_MLA = "{arch}.attention.value_length_mla"
|
| 145 |
+
|
| 146 |
+
class Rope:
|
| 147 |
+
DIMENSION_COUNT = "{arch}.rope.dimension_count"
|
| 148 |
+
DIMENSION_SECTIONS = "{arch}.rope.dimension_sections"
|
| 149 |
+
FREQ_BASE = "{arch}.rope.freq_base"
|
| 150 |
+
SCALING_TYPE = "{arch}.rope.scaling.type"
|
| 151 |
+
SCALING_FACTOR = "{arch}.rope.scaling.factor"
|
| 152 |
+
SCALING_ATTN_FACTOR = "{arch}.rope.scaling.attn_factor"
|
| 153 |
+
SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length"
|
| 154 |
+
SCALING_FINETUNED = "{arch}.rope.scaling.finetuned"
|
| 155 |
+
SCALING_YARN_LOG_MUL = "{arch}.rope.scaling.yarn_log_multiplier"
|
| 156 |
+
|
| 157 |
+
class Split:
|
| 158 |
+
LLM_KV_SPLIT_NO = "split.no"
|
| 159 |
+
LLM_KV_SPLIT_COUNT = "split.count"
|
| 160 |
+
LLM_KV_SPLIT_TENSORS_COUNT = "split.tensors.count"
|
| 161 |
+
|
| 162 |
+
class SSM:
|
| 163 |
+
CONV_KERNEL = "{arch}.ssm.conv_kernel"
|
| 164 |
+
INNER_SIZE = "{arch}.ssm.inner_size"
|
| 165 |
+
STATE_SIZE = "{arch}.ssm.state_size"
|
| 166 |
+
TIME_STEP_RANK = "{arch}.ssm.time_step_rank"
|
| 167 |
+
DT_B_C_RMS = "{arch}.ssm.dt_b_c_rms"
|
| 168 |
+
|
| 169 |
+
class WKV:
|
| 170 |
+
HEAD_SIZE = "{arch}.wkv.head_size"
|
| 171 |
+
|
| 172 |
+
class PosNet:
|
| 173 |
+
EMBEDDING_LENGTH = "{arch}.posnet.embedding_length"
|
| 174 |
+
BLOCK_COUNT = "{arch}.posnet.block_count"
|
| 175 |
+
|
| 176 |
+
class ConvNext:
|
| 177 |
+
EMBEDDING_LENGTH = "{arch}.convnext.embedding_length"
|
| 178 |
+
BLOCK_COUNT = "{arch}.convnext.block_count"
|
| 179 |
+
|
| 180 |
+
class Classifier:
|
| 181 |
+
OUTPUT_LABELS = "{arch}.classifier.output_labels"
|
| 182 |
+
|
| 183 |
+
class Tokenizer:
|
| 184 |
+
MODEL = "tokenizer.ggml.model"
|
| 185 |
+
PRE = "tokenizer.ggml.pre"
|
| 186 |
+
LIST = "tokenizer.ggml.tokens"
|
| 187 |
+
TOKEN_TYPE = "tokenizer.ggml.token_type"
|
| 188 |
+
TOKEN_TYPE_COUNT = "tokenizer.ggml.token_type_count" # for BERT-style token types
|
| 189 |
+
SCORES = "tokenizer.ggml.scores"
|
| 190 |
+
MERGES = "tokenizer.ggml.merges"
|
| 191 |
+
BOS_ID = "tokenizer.ggml.bos_token_id"
|
| 192 |
+
EOS_ID = "tokenizer.ggml.eos_token_id"
|
| 193 |
+
EOT_ID = "tokenizer.ggml.eot_token_id"
|
| 194 |
+
EOM_ID = "tokenizer.ggml.eom_token_id"
|
| 195 |
+
UNK_ID = "tokenizer.ggml.unknown_token_id"
|
| 196 |
+
SEP_ID = "tokenizer.ggml.seperator_token_id"
|
| 197 |
+
PAD_ID = "tokenizer.ggml.padding_token_id"
|
| 198 |
+
MASK_ID = "tokenizer.ggml.mask_token_id"
|
| 199 |
+
ADD_BOS = "tokenizer.ggml.add_bos_token"
|
| 200 |
+
ADD_EOS = "tokenizer.ggml.add_eos_token"
|
| 201 |
+
ADD_PREFIX = "tokenizer.ggml.add_space_prefix"
|
| 202 |
+
REMOVE_EXTRA_WS = "tokenizer.ggml.remove_extra_whitespaces"
|
| 203 |
+
PRECOMPILED_CHARSMAP = "tokenizer.ggml.precompiled_charsmap"
|
| 204 |
+
HF_JSON = "tokenizer.huggingface.json"
|
| 205 |
+
RWKV = "tokenizer.rwkv.world"
|
| 206 |
+
CHAT_TEMPLATE = "tokenizer.chat_template"
|
| 207 |
+
CHAT_TEMPLATE_N = "tokenizer.chat_template.{name}"
|
| 208 |
+
CHAT_TEMPLATES = "tokenizer.chat_templates"
|
| 209 |
+
# FIM/Infill special tokens constants
|
| 210 |
+
FIM_PRE_ID = "tokenizer.ggml.fim_pre_token_id"
|
| 211 |
+
FIM_SUF_ID = "tokenizer.ggml.fim_suf_token_id"
|
| 212 |
+
FIM_MID_ID = "tokenizer.ggml.fim_mid_token_id"
|
| 213 |
+
FIM_PAD_ID = "tokenizer.ggml.fim_pad_token_id"
|
| 214 |
+
FIM_REP_ID = "tokenizer.ggml.fim_rep_token_id"
|
| 215 |
+
FIM_SEP_ID = "tokenizer.ggml.fim_sep_token_id"
|
| 216 |
+
# deprecated:
|
| 217 |
+
PREFIX_ID = "tokenizer.ggml.prefix_token_id"
|
| 218 |
+
SUFFIX_ID = "tokenizer.ggml.suffix_token_id"
|
| 219 |
+
MIDDLE_ID = "tokenizer.ggml.middle_token_id"
|
| 220 |
+
|
| 221 |
+
class Adapter:
|
| 222 |
+
TYPE = "adapter.type"
|
| 223 |
+
LORA_ALPHA = "adapter.lora.alpha"
|
| 224 |
+
|
| 225 |
+
class Clip:
|
| 226 |
+
PROJECTOR_TYPE = "clip.projector_type"
|
| 227 |
+
HAS_VISION_ENCODER = "clip.has_vision_encoder"
|
| 228 |
+
HAS_AUDIO_ENCODER = "clip.has_audio_encoder"
|
| 229 |
+
HAS_LLAVA_PROJECTOR = "clip.has_llava_projector"
|
| 230 |
+
|
| 231 |
+
class ClipVision:
|
| 232 |
+
IMAGE_SIZE = "clip.vision.image_size"
|
| 233 |
+
PATCH_SIZE = "clip.vision.patch_size"
|
| 234 |
+
EMBEDDING_LENGTH = "clip.vision.embedding_length"
|
| 235 |
+
FEED_FORWARD_LENGTH = "clip.vision.feed_forward_length"
|
| 236 |
+
PROJECTION_DIM = "clip.vision.projection_dim"
|
| 237 |
+
BLOCK_COUNT = "clip.vision.block_count"
|
| 238 |
+
IMAGE_MEAN = "clip.vision.image_mean"
|
| 239 |
+
IMAGE_STD = "clip.vision.image_std"
|
| 240 |
+
SPATIAL_MERGE_SIZE = "clip.vision.spatial_merge_size"
|
| 241 |
+
USE_GELU = "clip.use_gelu"
|
| 242 |
+
USE_SILU = "clip.use_silu"
|
| 243 |
+
N_WA_PATTERN = "clip.vision.n_wa_pattern" # used by qwen2.5vl
|
| 244 |
+
|
| 245 |
+
class Attention:
|
| 246 |
+
HEAD_COUNT = "clip.vision.attention.head_count"
|
| 247 |
+
LAYERNORM_EPS = "clip.vision.attention.layer_norm_epsilon"
|
| 248 |
+
|
| 249 |
+
class Projector:
|
| 250 |
+
SCALE_FACTOR = "clip.vision.projector.scale_factor"
|
| 251 |
+
|
| 252 |
+
class ClipAudio:
|
| 253 |
+
NUM_MEL_BINS = "clip.audio.num_mel_bins"
|
| 254 |
+
EMBEDDING_LENGTH = "clip.audio.embedding_length"
|
| 255 |
+
FEED_FORWARD_LENGTH = "clip.audio.feed_forward_length"
|
| 256 |
+
PROJECTION_DIM = "clip.audio.projection_dim"
|
| 257 |
+
BLOCK_COUNT = "clip.audio.block_count"
|
| 258 |
+
|
| 259 |
+
class Attention:
|
| 260 |
+
HEAD_COUNT = "clip.audio.attention.head_count"
|
| 261 |
+
LAYERNORM_EPS = "clip.audio.attention.layer_norm_epsilon"
|
| 262 |
+
|
| 263 |
+
class Projector:
|
| 264 |
+
STACK_FACTOR = "clip.audio.projector.stack_factor"
|
| 265 |
+
|
| 266 |
+
#
|
| 267 |
+
# recommended mapping of model tensor names for storage in gguf
|
| 268 |
+
#
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class GGUFType:
|
| 272 |
+
MODEL = "model"
|
| 273 |
+
ADAPTER = "adapter"
|
| 274 |
+
MMPROJ = "mmproj" # dummy, unused for now
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class MODEL_ARCH(IntEnum):
|
| 278 |
+
MMPROJ = auto() # dummy arch for clip.cpp
|
| 279 |
+
LLAMA = auto()
|
| 280 |
+
LLAMA4 = auto()
|
| 281 |
+
DECI = auto()
|
| 282 |
+
FALCON = auto()
|
| 283 |
+
BAICHUAN = auto()
|
| 284 |
+
GROK = auto()
|
| 285 |
+
GPT2 = auto()
|
| 286 |
+
GPTJ = auto()
|
| 287 |
+
GPTNEOX = auto()
|
| 288 |
+
MPT = auto()
|
| 289 |
+
STARCODER = auto()
|
| 290 |
+
REFACT = auto()
|
| 291 |
+
BERT = auto()
|
| 292 |
+
NOMIC_BERT = auto()
|
| 293 |
+
NOMIC_BERT_MOE = auto()
|
| 294 |
+
NEO_BERT = auto()
|
| 295 |
+
JINA_BERT_V2 = auto()
|
| 296 |
+
BLOOM = auto()
|
| 297 |
+
STABLELM = auto()
|
| 298 |
+
QWEN = auto()
|
| 299 |
+
QWEN2 = auto()
|
| 300 |
+
QWEN2MOE = auto()
|
| 301 |
+
QWEN2VL = auto()
|
| 302 |
+
QWEN3 = auto()
|
| 303 |
+
QWEN3MOE = auto()
|
| 304 |
+
PHI2 = auto()
|
| 305 |
+
PHI3 = auto()
|
| 306 |
+
PHIMOE = auto()
|
| 307 |
+
PLAMO = auto()
|
| 308 |
+
CODESHELL = auto()
|
| 309 |
+
ORION = auto()
|
| 310 |
+
INTERNLM2 = auto()
|
| 311 |
+
MINICPM = auto()
|
| 312 |
+
MINICPM3 = auto()
|
| 313 |
+
GEMMA = auto()
|
| 314 |
+
GEMMA2 = auto()
|
| 315 |
+
GEMMA3 = auto()
|
| 316 |
+
STARCODER2 = auto()
|
| 317 |
+
RWKV6 = auto()
|
| 318 |
+
RWKV6QWEN2 = auto()
|
| 319 |
+
RWKV7 = auto()
|
| 320 |
+
ARWKV7 = auto()
|
| 321 |
+
MAMBA = auto()
|
| 322 |
+
XVERSE = auto()
|
| 323 |
+
COMMAND_R = auto()
|
| 324 |
+
COHERE2 = auto()
|
| 325 |
+
DBRX = auto()
|
| 326 |
+
OLMO = auto()
|
| 327 |
+
OLMO2 = auto()
|
| 328 |
+
OLMOE = auto()
|
| 329 |
+
OPENELM = auto()
|
| 330 |
+
ARCTIC = auto()
|
| 331 |
+
DEEPSEEK = auto()
|
| 332 |
+
DEEPSEEK2 = auto()
|
| 333 |
+
CHATGLM = auto()
|
| 334 |
+
GLM4 = auto()
|
| 335 |
+
BITNET = auto()
|
| 336 |
+
T5 = auto()
|
| 337 |
+
T5ENCODER = auto()
|
| 338 |
+
JAIS = auto()
|
| 339 |
+
NEMOTRON = auto()
|
| 340 |
+
EXAONE = auto()
|
| 341 |
+
GRANITE = auto()
|
| 342 |
+
GRANITE_MOE = auto()
|
| 343 |
+
CHAMELEON = auto()
|
| 344 |
+
WAVTOKENIZER_DEC = auto()
|
| 345 |
+
PLM = auto()
|
| 346 |
+
BAILINGMOE = auto()
|
| 347 |
+
DOTS1 = auto()
|
| 348 |
+
ARCEE = auto()
|
| 349 |
+
|
| 350 |
+
|
| 351 |
+
class VISION_PROJECTOR_TYPE(IntEnum):
|
| 352 |
+
MLP = auto()
|
| 353 |
+
LDP = auto()
|
| 354 |
+
LDPV2 = auto()
|
| 355 |
+
RESAMPLER = auto()
|
| 356 |
+
GLM_EDGE = auto()
|
| 357 |
+
MERGER = auto()
|
| 358 |
+
GEMMA3 = auto()
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
class MODEL_TENSOR(IntEnum):
|
| 362 |
+
TOKEN_EMBD = auto()
|
| 363 |
+
TOKEN_EMBD_NORM = auto()
|
| 364 |
+
TOKEN_TYPES = auto()
|
| 365 |
+
POS_EMBD = auto()
|
| 366 |
+
OUTPUT = auto()
|
| 367 |
+
OUTPUT_NORM = auto()
|
| 368 |
+
ROPE_FREQS = auto()
|
| 369 |
+
ROPE_FACTORS_LONG = auto()
|
| 370 |
+
ROPE_FACTORS_SHORT = auto()
|
| 371 |
+
ATTN_Q = auto()
|
| 372 |
+
ATTN_K = auto()
|
| 373 |
+
ATTN_V = auto()
|
| 374 |
+
ATTN_QKV = auto()
|
| 375 |
+
ATTN_OUT = auto()
|
| 376 |
+
ATTN_NORM = auto()
|
| 377 |
+
ATTN_NORM_2 = auto()
|
| 378 |
+
ATTN_OUT_NORM = auto()
|
| 379 |
+
ATTN_POST_NORM = auto()
|
| 380 |
+
ATTN_ROT_EMBD = auto()
|
| 381 |
+
FFN_GATE_INP = auto()
|
| 382 |
+
FFN_GATE_INP_SHEXP = auto()
|
| 383 |
+
FFN_NORM = auto()
|
| 384 |
+
FFN_PRE_NORM = auto()
|
| 385 |
+
FFN_POST_NORM = auto()
|
| 386 |
+
FFN_GATE = auto()
|
| 387 |
+
FFN_DOWN = auto()
|
| 388 |
+
FFN_UP = auto()
|
| 389 |
+
FFN_ACT = auto()
|
| 390 |
+
FFN_NORM_EXP = auto()
|
| 391 |
+
FFN_GATE_EXP = auto()
|
| 392 |
+
FFN_DOWN_EXP = auto()
|
| 393 |
+
FFN_UP_EXP = auto()
|
| 394 |
+
FFN_GATE_SHEXP = auto()
|
| 395 |
+
FFN_DOWN_SHEXP = auto()
|
| 396 |
+
FFN_UP_SHEXP = auto()
|
| 397 |
+
FFN_EXP_PROBS_B = auto()
|
| 398 |
+
ATTN_Q_NORM = auto()
|
| 399 |
+
ATTN_K_NORM = auto()
|
| 400 |
+
LAYER_OUT_NORM = auto()
|
| 401 |
+
SSM_IN = auto()
|
| 402 |
+
SSM_CONV1D = auto()
|
| 403 |
+
SSM_X = auto()
|
| 404 |
+
SSM_DT = auto()
|
| 405 |
+
SSM_A = auto()
|
| 406 |
+
SSM_D = auto()
|
| 407 |
+
SSM_OUT = auto()
|
| 408 |
+
TIME_MIX_W0 = auto()
|
| 409 |
+
TIME_MIX_W1 = auto()
|
| 410 |
+
TIME_MIX_W2 = auto()
|
| 411 |
+
TIME_MIX_A0 = auto()
|
| 412 |
+
TIME_MIX_A1 = auto()
|
| 413 |
+
TIME_MIX_A2 = auto()
|
| 414 |
+
TIME_MIX_V0 = auto()
|
| 415 |
+
TIME_MIX_V1 = auto()
|
| 416 |
+
TIME_MIX_V2 = auto()
|
| 417 |
+
TIME_MIX_G1 = auto()
|
| 418 |
+
TIME_MIX_G2 = auto()
|
| 419 |
+
TIME_MIX_K_K = auto()
|
| 420 |
+
TIME_MIX_K_A = auto()
|
| 421 |
+
TIME_MIX_R_K = auto()
|
| 422 |
+
TIME_MIX_LERP_X = auto()
|
| 423 |
+
TIME_MIX_LERP_K = auto()
|
| 424 |
+
TIME_MIX_LERP_V = auto()
|
| 425 |
+
TIME_MIX_LERP_R = auto()
|
| 426 |
+
TIME_MIX_LERP_G = auto()
|
| 427 |
+
TIME_MIX_LERP_FUSED = auto()
|
| 428 |
+
TIME_MIX_LERP_W = auto()
|
| 429 |
+
TIME_MIX_FIRST = auto()
|
| 430 |
+
TIME_MIX_DECAY = auto()
|
| 431 |
+
TIME_MIX_DECAY_W1 = auto()
|
| 432 |
+
TIME_MIX_DECAY_W2 = auto()
|
| 433 |
+
TIME_MIX_KEY = auto()
|
| 434 |
+
TIME_MIX_VALUE = auto()
|
| 435 |
+
TIME_MIX_RECEPTANCE = auto()
|
| 436 |
+
TIME_MIX_GATE = auto()
|
| 437 |
+
TIME_MIX_LN = auto()
|
| 438 |
+
TIME_MIX_OUTPUT = auto()
|
| 439 |
+
CHANNEL_MIX_LERP_K = auto()
|
| 440 |
+
CHANNEL_MIX_LERP_R = auto()
|
| 441 |
+
CHANNEL_MIX_KEY = auto()
|
| 442 |
+
CHANNEL_MIX_RECEPTANCE = auto()
|
| 443 |
+
CHANNEL_MIX_VALUE = auto()
|
| 444 |
+
ATTN_Q_A = auto()
|
| 445 |
+
ATTN_Q_B = auto()
|
| 446 |
+
ATTN_KV_A_MQA = auto()
|
| 447 |
+
ATTN_KV_B = auto()
|
| 448 |
+
ATTN_K_B = auto()
|
| 449 |
+
ATTN_V_B = auto()
|
| 450 |
+
ATTN_Q_A_NORM = auto()
|
| 451 |
+
ATTN_KV_A_NORM = auto()
|
| 452 |
+
FFN_SUB_NORM = auto()
|
| 453 |
+
ATTN_SUB_NORM = auto()
|
| 454 |
+
DEC_ATTN_NORM = auto()
|
| 455 |
+
DEC_ATTN_Q = auto()
|
| 456 |
+
DEC_ATTN_K = auto()
|
| 457 |
+
DEC_ATTN_V = auto()
|
| 458 |
+
DEC_ATTN_OUT = auto()
|
| 459 |
+
DEC_ATTN_REL_B = auto()
|
| 460 |
+
DEC_CROSS_ATTN_NORM = auto()
|
| 461 |
+
DEC_CROSS_ATTN_Q = auto()
|
| 462 |
+
DEC_CROSS_ATTN_K = auto()
|
| 463 |
+
DEC_CROSS_ATTN_V = auto()
|
| 464 |
+
DEC_CROSS_ATTN_OUT = auto()
|
| 465 |
+
DEC_CROSS_ATTN_REL_B = auto()
|
| 466 |
+
DEC_FFN_NORM = auto()
|
| 467 |
+
DEC_FFN_GATE = auto()
|
| 468 |
+
DEC_FFN_DOWN = auto()
|
| 469 |
+
DEC_FFN_UP = auto()
|
| 470 |
+
DEC_OUTPUT_NORM = auto()
|
| 471 |
+
ENC_ATTN_NORM = auto()
|
| 472 |
+
ENC_ATTN_Q = auto()
|
| 473 |
+
ENC_ATTN_K = auto()
|
| 474 |
+
ENC_ATTN_V = auto()
|
| 475 |
+
ENC_ATTN_OUT = auto()
|
| 476 |
+
ENC_ATTN_REL_B = auto()
|
| 477 |
+
ENC_FFN_NORM = auto()
|
| 478 |
+
ENC_FFN_GATE = auto()
|
| 479 |
+
ENC_FFN_DOWN = auto()
|
| 480 |
+
ENC_FFN_UP = auto()
|
| 481 |
+
ENC_OUTPUT_NORM = auto()
|
| 482 |
+
CLS = auto() # classifier
|
| 483 |
+
CLS_OUT = auto() # classifier output projection
|
| 484 |
+
CONV1D = auto()
|
| 485 |
+
CONVNEXT_DW = auto()
|
| 486 |
+
CONVNEXT_NORM = auto()
|
| 487 |
+
CONVNEXT_PW1 = auto()
|
| 488 |
+
CONVNEXT_PW2 = auto()
|
| 489 |
+
CONVNEXT_GAMMA = auto()
|
| 490 |
+
POSNET_CONV1 = auto()
|
| 491 |
+
POSNET_CONV2 = auto()
|
| 492 |
+
POSNET_NORM = auto()
|
| 493 |
+
POSNET_NORM1 = auto()
|
| 494 |
+
POSNET_NORM2 = auto()
|
| 495 |
+
POSNET_ATTN_NORM = auto()
|
| 496 |
+
POSNET_ATTN_Q = auto()
|
| 497 |
+
POSNET_ATTN_K = auto()
|
| 498 |
+
POSNET_ATTN_V = auto()
|
| 499 |
+
POSNET_ATTN_OUT = auto()
|
| 500 |
+
# vision
|
| 501 |
+
V_MMPROJ = auto()
|
| 502 |
+
V_MMPROJ_FC = auto()
|
| 503 |
+
V_MMPROJ_MLP = auto()
|
| 504 |
+
V_MMPROJ_PEG = auto()
|
| 505 |
+
V_ENC_EMBD_CLS = auto()
|
| 506 |
+
V_ENC_EMBD_PATCH = auto()
|
| 507 |
+
V_ENC_EMBD_POS = auto()
|
| 508 |
+
V_ENC_INPUT_NORM = auto()
|
| 509 |
+
V_ENC_ATTN_Q = auto()
|
| 510 |
+
V_ENC_ATTN_Q_NORM = auto()
|
| 511 |
+
V_ENC_ATTN_K = auto()
|
| 512 |
+
V_ENC_ATTN_K_NORM = auto()
|
| 513 |
+
V_ENC_ATTN_V = auto()
|
| 514 |
+
V_ENC_ATTN_O = auto()
|
| 515 |
+
V_ENC_ATTN_O_NORM = auto()
|
| 516 |
+
V_ENC_POST_ATTN_NORM = auto()
|
| 517 |
+
V_ENC_FFN_UP = auto()
|
| 518 |
+
V_ENC_FFN_GATE = auto()
|
| 519 |
+
V_ENC_FFN_DOWN = auto()
|
| 520 |
+
V_LAYER_SCALE_1 = auto()
|
| 521 |
+
V_LAYER_SCALE_2 = auto()
|
| 522 |
+
V_PRE_NORM = auto()
|
| 523 |
+
V_POST_NORM = auto()
|
| 524 |
+
V_MM_INP_NORM = auto()
|
| 525 |
+
V_MM_INP_PROJ = auto() # gemma3
|
| 526 |
+
V_MM_SOFT_EMB_NORM = auto() # gemma3
|
| 527 |
+
V_RESMPL_POS_EMBD_K = auto() # minicpmv
|
| 528 |
+
V_RESMPL_ATTN_Q = auto() # minicpmv
|
| 529 |
+
V_RESMPL_ATTN_K = auto() # minicpmv
|
| 530 |
+
V_RESMPL_ATTN_V = auto() # minicpmv
|
| 531 |
+
V_RESMPL_ATTN_OUT = auto() # minicpmv
|
| 532 |
+
V_RESMPL_KV = auto() # minicpmv
|
| 533 |
+
V_RESMPL_KV_NORM = auto() # minicpmv
|
| 534 |
+
V_RESMPL_POST_NORM = auto() # minicpmv
|
| 535 |
+
V_RESMPL_Q_NORM = auto() # minicpmv
|
| 536 |
+
V_RESMPL_PROJ = auto() # minicpmv
|
| 537 |
+
V_RESMPL_QUERY = auto() # minicpmv
|
| 538 |
+
V_TOK_EMBD_IMG_BREAK = auto() # pixtral
|
| 539 |
+
V_MM_PATCH_MERGER = auto() # mistral small 3.1
|
| 540 |
+
# audio (mtmd)
|
| 541 |
+
A_ENC_EMBD_POS = auto()
|
| 542 |
+
A_ENC_CONV1D = auto()
|
| 543 |
+
A_PRE_NORM = auto()
|
| 544 |
+
A_POST_NORM = auto()
|
| 545 |
+
A_ENC_ATTN_Q = auto()
|
| 546 |
+
A_ENC_ATTN_K = auto()
|
| 547 |
+
A_ENC_ATTN_V = auto()
|
| 548 |
+
A_ENC_INPUT_NORM = auto()
|
| 549 |
+
A_ENC_OUTPUT = auto()
|
| 550 |
+
A_ENC_OUTPUT_NORM = auto()
|
| 551 |
+
A_ENC_FFN_UP = auto()
|
| 552 |
+
A_ENC_FFN_GATE = auto()
|
| 553 |
+
A_ENC_FFN_DOWN = auto()
|
| 554 |
+
A_MMPROJ = auto()
|
| 555 |
+
A_MMPROJ_FC = auto()
|
| 556 |
+
A_MM_NORM_PRE = auto()
|
| 557 |
+
A_MM_NORM_MID = auto()
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = {
|
| 561 |
+
MODEL_ARCH.MMPROJ: "clip", # dummy arch for clip.cpp
|
| 562 |
+
MODEL_ARCH.LLAMA: "llama",
|
| 563 |
+
MODEL_ARCH.LLAMA4: "llama4",
|
| 564 |
+
MODEL_ARCH.DECI: "deci",
|
| 565 |
+
MODEL_ARCH.FALCON: "falcon",
|
| 566 |
+
MODEL_ARCH.BAICHUAN: "baichuan",
|
| 567 |
+
MODEL_ARCH.GROK: "grok",
|
| 568 |
+
MODEL_ARCH.GPT2: "gpt2",
|
| 569 |
+
MODEL_ARCH.GPTJ: "gptj",
|
| 570 |
+
MODEL_ARCH.GPTNEOX: "gptneox",
|
| 571 |
+
MODEL_ARCH.MPT: "mpt",
|
| 572 |
+
MODEL_ARCH.STARCODER: "starcoder",
|
| 573 |
+
MODEL_ARCH.REFACT: "refact",
|
| 574 |
+
MODEL_ARCH.BERT: "bert",
|
| 575 |
+
MODEL_ARCH.NOMIC_BERT: "nomic-bert",
|
| 576 |
+
MODEL_ARCH.NOMIC_BERT_MOE: "nomic-bert-moe",
|
| 577 |
+
MODEL_ARCH.NEO_BERT: "neo-bert",
|
| 578 |
+
MODEL_ARCH.JINA_BERT_V2: "jina-bert-v2",
|
| 579 |
+
MODEL_ARCH.BLOOM: "bloom",
|
| 580 |
+
MODEL_ARCH.STABLELM: "stablelm",
|
| 581 |
+
MODEL_ARCH.QWEN: "qwen",
|
| 582 |
+
MODEL_ARCH.QWEN2: "qwen2",
|
| 583 |
+
MODEL_ARCH.QWEN2MOE: "qwen2moe",
|
| 584 |
+
MODEL_ARCH.QWEN2VL: "qwen2vl",
|
| 585 |
+
MODEL_ARCH.QWEN3: "qwen3",
|
| 586 |
+
MODEL_ARCH.QWEN3MOE: "qwen3moe",
|
| 587 |
+
MODEL_ARCH.PHI2: "phi2",
|
| 588 |
+
MODEL_ARCH.PHI3: "phi3",
|
| 589 |
+
MODEL_ARCH.PHIMOE: "phimoe",
|
| 590 |
+
MODEL_ARCH.PLAMO: "plamo",
|
| 591 |
+
MODEL_ARCH.CODESHELL: "codeshell",
|
| 592 |
+
MODEL_ARCH.ORION: "orion",
|
| 593 |
+
MODEL_ARCH.INTERNLM2: "internlm2",
|
| 594 |
+
MODEL_ARCH.MINICPM: "minicpm",
|
| 595 |
+
MODEL_ARCH.MINICPM3: "minicpm3",
|
| 596 |
+
MODEL_ARCH.GEMMA: "gemma",
|
| 597 |
+
MODEL_ARCH.GEMMA2: "gemma2",
|
| 598 |
+
MODEL_ARCH.GEMMA3: "gemma3",
|
| 599 |
+
MODEL_ARCH.STARCODER2: "starcoder2",
|
| 600 |
+
MODEL_ARCH.RWKV6: "rwkv6",
|
| 601 |
+
MODEL_ARCH.RWKV6QWEN2: "rwkv6qwen2",
|
| 602 |
+
MODEL_ARCH.RWKV7: "rwkv7",
|
| 603 |
+
MODEL_ARCH.ARWKV7: "arwkv7",
|
| 604 |
+
MODEL_ARCH.MAMBA: "mamba",
|
| 605 |
+
MODEL_ARCH.XVERSE: "xverse",
|
| 606 |
+
MODEL_ARCH.COMMAND_R: "command-r",
|
| 607 |
+
MODEL_ARCH.COHERE2: "cohere2",
|
| 608 |
+
MODEL_ARCH.DBRX: "dbrx",
|
| 609 |
+
MODEL_ARCH.OLMO: "olmo",
|
| 610 |
+
MODEL_ARCH.OLMO2: "olmo2",
|
| 611 |
+
MODEL_ARCH.OLMOE: "olmoe",
|
| 612 |
+
MODEL_ARCH.OPENELM: "openelm",
|
| 613 |
+
MODEL_ARCH.ARCTIC: "arctic",
|
| 614 |
+
MODEL_ARCH.DEEPSEEK: "deepseek",
|
| 615 |
+
MODEL_ARCH.DEEPSEEK2: "deepseek2",
|
| 616 |
+
MODEL_ARCH.CHATGLM: "chatglm",
|
| 617 |
+
MODEL_ARCH.GLM4: "glm4",
|
| 618 |
+
MODEL_ARCH.BITNET: "bitnet",
|
| 619 |
+
MODEL_ARCH.T5: "t5",
|
| 620 |
+
MODEL_ARCH.T5ENCODER: "t5encoder",
|
| 621 |
+
MODEL_ARCH.JAIS: "jais",
|
| 622 |
+
MODEL_ARCH.NEMOTRON: "nemotron",
|
| 623 |
+
MODEL_ARCH.EXAONE: "exaone",
|
| 624 |
+
MODEL_ARCH.GRANITE: "granite",
|
| 625 |
+
MODEL_ARCH.GRANITE_MOE: "granitemoe",
|
| 626 |
+
MODEL_ARCH.CHAMELEON: "chameleon",
|
| 627 |
+
MODEL_ARCH.WAVTOKENIZER_DEC: "wavtokenizer-dec",
|
| 628 |
+
MODEL_ARCH.PLM: "plm",
|
| 629 |
+
MODEL_ARCH.BAILINGMOE: "bailingmoe",
|
| 630 |
+
MODEL_ARCH.DOTS1: "dots1",
|
| 631 |
+
MODEL_ARCH.ARCEE: "arcee",
|
| 632 |
+
}
|
| 633 |
+
|
| 634 |
+
VISION_PROJECTOR_TYPE_NAMES: dict[VISION_PROJECTOR_TYPE, str] = {
|
| 635 |
+
VISION_PROJECTOR_TYPE.MLP: "mlp",
|
| 636 |
+
VISION_PROJECTOR_TYPE.LDP: "ldp",
|
| 637 |
+
VISION_PROJECTOR_TYPE.LDPV2: "ldpv2",
|
| 638 |
+
VISION_PROJECTOR_TYPE.RESAMPLER: "resampler",
|
| 639 |
+
VISION_PROJECTOR_TYPE.GLM_EDGE: "adapter",
|
| 640 |
+
VISION_PROJECTOR_TYPE.MERGER: "qwen2vl_merger",
|
| 641 |
+
VISION_PROJECTOR_TYPE.GEMMA3: "gemma3",
|
| 642 |
+
}
|
| 643 |
+
|
| 644 |
+
TENSOR_NAMES: dict[MODEL_TENSOR, str] = {
|
| 645 |
+
MODEL_TENSOR.TOKEN_EMBD: "token_embd",
|
| 646 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM: "token_embd_norm",
|
| 647 |
+
MODEL_TENSOR.TOKEN_TYPES: "token_types",
|
| 648 |
+
MODEL_TENSOR.POS_EMBD: "position_embd",
|
| 649 |
+
MODEL_TENSOR.OUTPUT_NORM: "output_norm",
|
| 650 |
+
MODEL_TENSOR.OUTPUT: "output",
|
| 651 |
+
MODEL_TENSOR.ROPE_FREQS: "rope_freqs",
|
| 652 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG: "rope_factors_long",
|
| 653 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT: "rope_factors_short",
|
| 654 |
+
MODEL_TENSOR.ATTN_NORM: "blk.{bid}.attn_norm",
|
| 655 |
+
MODEL_TENSOR.ATTN_NORM_2: "blk.{bid}.attn_norm_2",
|
| 656 |
+
MODEL_TENSOR.ATTN_QKV: "blk.{bid}.attn_qkv",
|
| 657 |
+
MODEL_TENSOR.ATTN_Q: "blk.{bid}.attn_q",
|
| 658 |
+
MODEL_TENSOR.ATTN_K: "blk.{bid}.attn_k",
|
| 659 |
+
MODEL_TENSOR.ATTN_V: "blk.{bid}.attn_v",
|
| 660 |
+
MODEL_TENSOR.ATTN_OUT: "blk.{bid}.attn_output",
|
| 661 |
+
MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd",
|
| 662 |
+
MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm",
|
| 663 |
+
MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm",
|
| 664 |
+
MODEL_TENSOR.ATTN_OUT_NORM: "blk.{bid}.attn_output_norm",
|
| 665 |
+
MODEL_TENSOR.ATTN_POST_NORM: "blk.{bid}.post_attention_norm",
|
| 666 |
+
MODEL_TENSOR.FFN_GATE_INP: "blk.{bid}.ffn_gate_inp",
|
| 667 |
+
MODEL_TENSOR.FFN_GATE_INP_SHEXP: "blk.{bid}.ffn_gate_inp_shexp",
|
| 668 |
+
MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm",
|
| 669 |
+
MODEL_TENSOR.FFN_PRE_NORM: "blk.{bid}.ffn_norm",
|
| 670 |
+
MODEL_TENSOR.FFN_POST_NORM: "blk.{bid}.post_ffw_norm",
|
| 671 |
+
MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate",
|
| 672 |
+
MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down",
|
| 673 |
+
MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up",
|
| 674 |
+
MODEL_TENSOR.FFN_GATE_SHEXP: "blk.{bid}.ffn_gate_shexp",
|
| 675 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP: "blk.{bid}.ffn_down_shexp",
|
| 676 |
+
MODEL_TENSOR.FFN_UP_SHEXP: "blk.{bid}.ffn_up_shexp",
|
| 677 |
+
MODEL_TENSOR.FFN_ACT: "blk.{bid}.ffn",
|
| 678 |
+
MODEL_TENSOR.FFN_NORM_EXP: "blk.{bid}.ffn_norm_exps",
|
| 679 |
+
MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate_exps",
|
| 680 |
+
MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down_exps",
|
| 681 |
+
MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up_exps",
|
| 682 |
+
MODEL_TENSOR.FFN_EXP_PROBS_B: "blk.{bid}.exp_probs_b",
|
| 683 |
+
MODEL_TENSOR.LAYER_OUT_NORM: "blk.{bid}.layer_output_norm",
|
| 684 |
+
MODEL_TENSOR.SSM_IN: "blk.{bid}.ssm_in",
|
| 685 |
+
MODEL_TENSOR.SSM_CONV1D: "blk.{bid}.ssm_conv1d",
|
| 686 |
+
MODEL_TENSOR.SSM_X: "blk.{bid}.ssm_x",
|
| 687 |
+
MODEL_TENSOR.SSM_DT: "blk.{bid}.ssm_dt",
|
| 688 |
+
MODEL_TENSOR.SSM_A: "blk.{bid}.ssm_a",
|
| 689 |
+
MODEL_TENSOR.SSM_D: "blk.{bid}.ssm_d",
|
| 690 |
+
MODEL_TENSOR.SSM_OUT: "blk.{bid}.ssm_out",
|
| 691 |
+
MODEL_TENSOR.TIME_MIX_W0: "blk.{bid}.time_mix_w0",
|
| 692 |
+
MODEL_TENSOR.TIME_MIX_W1: "blk.{bid}.time_mix_w1",
|
| 693 |
+
MODEL_TENSOR.TIME_MIX_W2: "blk.{bid}.time_mix_w2",
|
| 694 |
+
MODEL_TENSOR.TIME_MIX_A0: "blk.{bid}.time_mix_a0",
|
| 695 |
+
MODEL_TENSOR.TIME_MIX_A1: "blk.{bid}.time_mix_a1",
|
| 696 |
+
MODEL_TENSOR.TIME_MIX_A2: "blk.{bid}.time_mix_a2",
|
| 697 |
+
MODEL_TENSOR.TIME_MIX_V0: "blk.{bid}.time_mix_v0",
|
| 698 |
+
MODEL_TENSOR.TIME_MIX_V1: "blk.{bid}.time_mix_v1",
|
| 699 |
+
MODEL_TENSOR.TIME_MIX_V2: "blk.{bid}.time_mix_v2",
|
| 700 |
+
MODEL_TENSOR.TIME_MIX_G1: "blk.{bid}.time_mix_g1",
|
| 701 |
+
MODEL_TENSOR.TIME_MIX_G2: "blk.{bid}.time_mix_g2",
|
| 702 |
+
MODEL_TENSOR.TIME_MIX_K_K: "blk.{bid}.time_mix_k_k",
|
| 703 |
+
MODEL_TENSOR.TIME_MIX_K_A: "blk.{bid}.time_mix_k_a",
|
| 704 |
+
MODEL_TENSOR.TIME_MIX_R_K: "blk.{bid}.time_mix_r_k",
|
| 705 |
+
MODEL_TENSOR.TIME_MIX_LERP_X: "blk.{bid}.time_mix_lerp_x",
|
| 706 |
+
MODEL_TENSOR.TIME_MIX_LERP_K: "blk.{bid}.time_mix_lerp_k",
|
| 707 |
+
MODEL_TENSOR.TIME_MIX_LERP_V: "blk.{bid}.time_mix_lerp_v",
|
| 708 |
+
MODEL_TENSOR.TIME_MIX_LERP_R: "blk.{bid}.time_mix_lerp_r",
|
| 709 |
+
MODEL_TENSOR.TIME_MIX_LERP_G: "blk.{bid}.time_mix_lerp_g",
|
| 710 |
+
MODEL_TENSOR.TIME_MIX_LERP_FUSED: "blk.{bid}.time_mix_lerp_fused",
|
| 711 |
+
MODEL_TENSOR.TIME_MIX_LERP_W: "blk.{bid}.time_mix_lerp_w",
|
| 712 |
+
MODEL_TENSOR.TIME_MIX_FIRST: "blk.{bid}.time_mix_first",
|
| 713 |
+
MODEL_TENSOR.TIME_MIX_DECAY: "blk.{bid}.time_mix_decay",
|
| 714 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W1: "blk.{bid}.time_mix_decay_w1",
|
| 715 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W2: "blk.{bid}.time_mix_decay_w2",
|
| 716 |
+
MODEL_TENSOR.TIME_MIX_KEY: "blk.{bid}.time_mix_key",
|
| 717 |
+
MODEL_TENSOR.TIME_MIX_VALUE: "blk.{bid}.time_mix_value",
|
| 718 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE: "blk.{bid}.time_mix_receptance",
|
| 719 |
+
MODEL_TENSOR.TIME_MIX_GATE: "blk.{bid}.time_mix_gate",
|
| 720 |
+
MODEL_TENSOR.TIME_MIX_LN: "blk.{bid}.time_mix_ln",
|
| 721 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT: "blk.{bid}.time_mix_output",
|
| 722 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_K: "blk.{bid}.channel_mix_lerp_k",
|
| 723 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_R: "blk.{bid}.channel_mix_lerp_r",
|
| 724 |
+
MODEL_TENSOR.CHANNEL_MIX_KEY: "blk.{bid}.channel_mix_key",
|
| 725 |
+
MODEL_TENSOR.CHANNEL_MIX_RECEPTANCE: "blk.{bid}.channel_mix_receptance",
|
| 726 |
+
MODEL_TENSOR.CHANNEL_MIX_VALUE: "blk.{bid}.channel_mix_value",
|
| 727 |
+
MODEL_TENSOR.ATTN_Q_A: "blk.{bid}.attn_q_a",
|
| 728 |
+
MODEL_TENSOR.ATTN_Q_B: "blk.{bid}.attn_q_b",
|
| 729 |
+
MODEL_TENSOR.ATTN_KV_A_MQA: "blk.{bid}.attn_kv_a_mqa",
|
| 730 |
+
MODEL_TENSOR.ATTN_KV_B: "blk.{bid}.attn_kv_b",
|
| 731 |
+
MODEL_TENSOR.ATTN_K_B: "blk.{bid}.attn_k_b",
|
| 732 |
+
MODEL_TENSOR.ATTN_V_B: "blk.{bid}.attn_v_b",
|
| 733 |
+
MODEL_TENSOR.ATTN_Q_A_NORM: "blk.{bid}.attn_q_a_norm",
|
| 734 |
+
MODEL_TENSOR.ATTN_KV_A_NORM: "blk.{bid}.attn_kv_a_norm",
|
| 735 |
+
MODEL_TENSOR.ATTN_SUB_NORM: "blk.{bid}.attn_sub_norm",
|
| 736 |
+
MODEL_TENSOR.FFN_SUB_NORM: "blk.{bid}.ffn_sub_norm",
|
| 737 |
+
MODEL_TENSOR.DEC_ATTN_NORM: "dec.blk.{bid}.attn_norm",
|
| 738 |
+
MODEL_TENSOR.DEC_ATTN_Q: "dec.blk.{bid}.attn_q",
|
| 739 |
+
MODEL_TENSOR.DEC_ATTN_K: "dec.blk.{bid}.attn_k",
|
| 740 |
+
MODEL_TENSOR.DEC_ATTN_V: "dec.blk.{bid}.attn_v",
|
| 741 |
+
MODEL_TENSOR.DEC_ATTN_OUT: "dec.blk.{bid}.attn_o",
|
| 742 |
+
MODEL_TENSOR.DEC_ATTN_REL_B: "dec.blk.{bid}.attn_rel_b",
|
| 743 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_NORM: "dec.blk.{bid}.cross_attn_norm",
|
| 744 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_Q: "dec.blk.{bid}.cross_attn_q",
|
| 745 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_K: "dec.blk.{bid}.cross_attn_k",
|
| 746 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_V: "dec.blk.{bid}.cross_attn_v",
|
| 747 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_OUT: "dec.blk.{bid}.cross_attn_o",
|
| 748 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_REL_B: "dec.blk.{bid}.cross_attn_rel_b",
|
| 749 |
+
MODEL_TENSOR.DEC_FFN_NORM: "dec.blk.{bid}.ffn_norm",
|
| 750 |
+
MODEL_TENSOR.DEC_FFN_GATE: "dec.blk.{bid}.ffn_gate",
|
| 751 |
+
MODEL_TENSOR.DEC_FFN_DOWN: "dec.blk.{bid}.ffn_down",
|
| 752 |
+
MODEL_TENSOR.DEC_FFN_UP: "dec.blk.{bid}.ffn_up",
|
| 753 |
+
MODEL_TENSOR.DEC_OUTPUT_NORM: "dec.output_norm",
|
| 754 |
+
MODEL_TENSOR.ENC_ATTN_NORM: "enc.blk.{bid}.attn_norm",
|
| 755 |
+
MODEL_TENSOR.ENC_ATTN_Q: "enc.blk.{bid}.attn_q",
|
| 756 |
+
MODEL_TENSOR.ENC_ATTN_K: "enc.blk.{bid}.attn_k",
|
| 757 |
+
MODEL_TENSOR.ENC_ATTN_V: "enc.blk.{bid}.attn_v",
|
| 758 |
+
MODEL_TENSOR.ENC_ATTN_OUT: "enc.blk.{bid}.attn_o",
|
| 759 |
+
MODEL_TENSOR.ENC_ATTN_REL_B: "enc.blk.{bid}.attn_rel_b",
|
| 760 |
+
MODEL_TENSOR.ENC_FFN_NORM: "enc.blk.{bid}.ffn_norm",
|
| 761 |
+
MODEL_TENSOR.ENC_FFN_GATE: "enc.blk.{bid}.ffn_gate",
|
| 762 |
+
MODEL_TENSOR.ENC_FFN_DOWN: "enc.blk.{bid}.ffn_down",
|
| 763 |
+
MODEL_TENSOR.ENC_FFN_UP: "enc.blk.{bid}.ffn_up",
|
| 764 |
+
MODEL_TENSOR.ENC_OUTPUT_NORM: "enc.output_norm",
|
| 765 |
+
MODEL_TENSOR.CLS: "cls",
|
| 766 |
+
MODEL_TENSOR.CLS_OUT: "cls.output",
|
| 767 |
+
MODEL_TENSOR.CONV1D: "conv1d",
|
| 768 |
+
MODEL_TENSOR.CONVNEXT_DW: "convnext.{bid}.dw",
|
| 769 |
+
MODEL_TENSOR.CONVNEXT_NORM: "convnext.{bid}.norm",
|
| 770 |
+
MODEL_TENSOR.CONVNEXT_PW1: "convnext.{bid}.pw1",
|
| 771 |
+
MODEL_TENSOR.CONVNEXT_PW2: "convnext.{bid}.pw2",
|
| 772 |
+
MODEL_TENSOR.CONVNEXT_GAMMA: "convnext.{bid}.gamma",
|
| 773 |
+
MODEL_TENSOR.POSNET_CONV1: "posnet.{bid}.conv1",
|
| 774 |
+
MODEL_TENSOR.POSNET_CONV2: "posnet.{bid}.conv2",
|
| 775 |
+
MODEL_TENSOR.POSNET_NORM: "posnet.{bid}.norm",
|
| 776 |
+
MODEL_TENSOR.POSNET_NORM1: "posnet.{bid}.norm1",
|
| 777 |
+
MODEL_TENSOR.POSNET_NORM2: "posnet.{bid}.norm2",
|
| 778 |
+
MODEL_TENSOR.POSNET_ATTN_NORM: "posnet.{bid}.attn_norm",
|
| 779 |
+
MODEL_TENSOR.POSNET_ATTN_Q: "posnet.{bid}.attn_q",
|
| 780 |
+
MODEL_TENSOR.POSNET_ATTN_K: "posnet.{bid}.attn_k",
|
| 781 |
+
MODEL_TENSOR.POSNET_ATTN_V: "posnet.{bid}.attn_v",
|
| 782 |
+
MODEL_TENSOR.POSNET_ATTN_OUT: "posnet.{bid}.attn_output",
|
| 783 |
+
# vision
|
| 784 |
+
MODEL_TENSOR.V_MMPROJ: "mm.{bid}",
|
| 785 |
+
MODEL_TENSOR.V_MMPROJ_FC: "mm.model.fc",
|
| 786 |
+
MODEL_TENSOR.V_MMPROJ_MLP: "mm.model.mlp.{bid}",
|
| 787 |
+
MODEL_TENSOR.V_MMPROJ_PEG: "mm.model.peg.{bid}",
|
| 788 |
+
MODEL_TENSOR.V_ENC_EMBD_CLS: "v.class_embd",
|
| 789 |
+
MODEL_TENSOR.V_ENC_EMBD_PATCH: "v.patch_embd",
|
| 790 |
+
MODEL_TENSOR.V_ENC_EMBD_POS: "v.position_embd",
|
| 791 |
+
MODEL_TENSOR.V_ENC_ATTN_Q: "v.blk.{bid}.attn_q",
|
| 792 |
+
MODEL_TENSOR.V_ENC_ATTN_Q_NORM: "v.blk.{bid}.attn_q_norm",
|
| 793 |
+
MODEL_TENSOR.V_ENC_ATTN_K: "v.blk.{bid}.attn_k",
|
| 794 |
+
MODEL_TENSOR.V_ENC_ATTN_K_NORM: "v.blk.{bid}.attn_k_norm",
|
| 795 |
+
MODEL_TENSOR.V_ENC_ATTN_V: "v.blk.{bid}.attn_v",
|
| 796 |
+
MODEL_TENSOR.V_ENC_INPUT_NORM: "v.blk.{bid}.ln1",
|
| 797 |
+
MODEL_TENSOR.V_ENC_ATTN_O: "v.blk.{bid}.attn_out",
|
| 798 |
+
MODEL_TENSOR.V_ENC_ATTN_O_NORM: "v.blk.{bid}.attn_out_norm",
|
| 799 |
+
MODEL_TENSOR.V_ENC_POST_ATTN_NORM: "v.blk.{bid}.ln2",
|
| 800 |
+
MODEL_TENSOR.V_ENC_FFN_UP: "v.blk.{bid}.ffn_up",
|
| 801 |
+
MODEL_TENSOR.V_ENC_FFN_GATE: "v.blk.{bid}.ffn_gate",
|
| 802 |
+
MODEL_TENSOR.V_ENC_FFN_DOWN: "v.blk.{bid}.ffn_down",
|
| 803 |
+
MODEL_TENSOR.V_LAYER_SCALE_1: "v.blk.{bid}.ls1",
|
| 804 |
+
MODEL_TENSOR.V_LAYER_SCALE_2: "v.blk.{bid}.ls2",
|
| 805 |
+
MODEL_TENSOR.V_PRE_NORM: "v.pre_ln",
|
| 806 |
+
MODEL_TENSOR.V_POST_NORM: "v.post_ln",
|
| 807 |
+
MODEL_TENSOR.V_MM_INP_PROJ: "mm.input_projection",
|
| 808 |
+
MODEL_TENSOR.V_MM_INP_NORM: "mm.input_norm",
|
| 809 |
+
MODEL_TENSOR.V_MM_SOFT_EMB_NORM: "mm.soft_emb_norm",
|
| 810 |
+
MODEL_TENSOR.V_RESMPL_POS_EMBD_K: "resampler.pos_embd_k",
|
| 811 |
+
MODEL_TENSOR.V_RESMPL_ATTN_Q: "resampler.attn.q",
|
| 812 |
+
MODEL_TENSOR.V_RESMPL_ATTN_K: "resampler.attn.k",
|
| 813 |
+
MODEL_TENSOR.V_RESMPL_ATTN_V: "resampler.attn.v",
|
| 814 |
+
MODEL_TENSOR.V_RESMPL_ATTN_OUT: "resampler.attn.out",
|
| 815 |
+
MODEL_TENSOR.V_RESMPL_KV: "resampler.kv",
|
| 816 |
+
MODEL_TENSOR.V_RESMPL_KV_NORM: "resampler.ln_kv",
|
| 817 |
+
MODEL_TENSOR.V_RESMPL_POST_NORM: "resampler.ln_post",
|
| 818 |
+
MODEL_TENSOR.V_RESMPL_Q_NORM: "resampler.ln_q",
|
| 819 |
+
MODEL_TENSOR.V_RESMPL_PROJ: "resampler.proj",
|
| 820 |
+
MODEL_TENSOR.V_RESMPL_QUERY: "resampler.query",
|
| 821 |
+
MODEL_TENSOR.V_TOK_EMBD_IMG_BREAK: "v.token_embd.img_break", # pixtral
|
| 822 |
+
MODEL_TENSOR.V_MM_PATCH_MERGER: "mm.patch_merger", # mistral small 3.1
|
| 823 |
+
# audio (mtmd)
|
| 824 |
+
MODEL_TENSOR.A_ENC_EMBD_POS: "a.position_embd",
|
| 825 |
+
MODEL_TENSOR.A_ENC_CONV1D: "a.conv1d.{bid}",
|
| 826 |
+
MODEL_TENSOR.A_PRE_NORM: "a.pre_ln",
|
| 827 |
+
MODEL_TENSOR.A_POST_NORM: "a.post_ln",
|
| 828 |
+
MODEL_TENSOR.A_ENC_ATTN_Q: "a.blk.{bid}.attn_q",
|
| 829 |
+
MODEL_TENSOR.A_ENC_ATTN_K: "a.blk.{bid}.attn_k",
|
| 830 |
+
MODEL_TENSOR.A_ENC_ATTN_V: "a.blk.{bid}.attn_v",
|
| 831 |
+
MODEL_TENSOR.A_ENC_INPUT_NORM: "a.blk.{bid}.ln1",
|
| 832 |
+
MODEL_TENSOR.A_ENC_OUTPUT: "a.blk.{bid}.attn_out",
|
| 833 |
+
MODEL_TENSOR.A_ENC_OUTPUT_NORM: "a.blk.{bid}.ln2",
|
| 834 |
+
MODEL_TENSOR.A_ENC_FFN_UP: "a.blk.{bid}.ffn_up",
|
| 835 |
+
MODEL_TENSOR.A_ENC_FFN_GATE: "a.blk.{bid}.ffn_gate",
|
| 836 |
+
MODEL_TENSOR.A_ENC_FFN_DOWN: "a.blk.{bid}.ffn_down",
|
| 837 |
+
MODEL_TENSOR.A_MMPROJ: "mm.a.mlp.{bid}",
|
| 838 |
+
MODEL_TENSOR.A_MMPROJ_FC: "mm.a.fc",
|
| 839 |
+
MODEL_TENSOR.A_MM_NORM_PRE: "mm.a.norm_pre",
|
| 840 |
+
MODEL_TENSOR.A_MM_NORM_MID: "mm.a.norm_mid",
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = {
|
| 844 |
+
MODEL_ARCH.MMPROJ: [
|
| 845 |
+
MODEL_TENSOR.V_MMPROJ,
|
| 846 |
+
MODEL_TENSOR.V_MMPROJ_FC,
|
| 847 |
+
MODEL_TENSOR.V_MMPROJ_MLP,
|
| 848 |
+
MODEL_TENSOR.V_MMPROJ_PEG,
|
| 849 |
+
MODEL_TENSOR.V_ENC_EMBD_CLS,
|
| 850 |
+
MODEL_TENSOR.V_ENC_EMBD_PATCH,
|
| 851 |
+
MODEL_TENSOR.V_ENC_EMBD_POS,
|
| 852 |
+
MODEL_TENSOR.V_ENC_INPUT_NORM,
|
| 853 |
+
MODEL_TENSOR.V_ENC_ATTN_Q,
|
| 854 |
+
MODEL_TENSOR.V_ENC_ATTN_Q_NORM,
|
| 855 |
+
MODEL_TENSOR.V_ENC_ATTN_K,
|
| 856 |
+
MODEL_TENSOR.V_ENC_ATTN_K_NORM,
|
| 857 |
+
MODEL_TENSOR.V_ENC_ATTN_V,
|
| 858 |
+
MODEL_TENSOR.V_ENC_ATTN_O,
|
| 859 |
+
MODEL_TENSOR.V_ENC_ATTN_O_NORM,
|
| 860 |
+
MODEL_TENSOR.V_ENC_POST_ATTN_NORM,
|
| 861 |
+
MODEL_TENSOR.V_ENC_FFN_UP,
|
| 862 |
+
MODEL_TENSOR.V_ENC_FFN_GATE,
|
| 863 |
+
MODEL_TENSOR.V_ENC_FFN_DOWN,
|
| 864 |
+
MODEL_TENSOR.V_LAYER_SCALE_1,
|
| 865 |
+
MODEL_TENSOR.V_LAYER_SCALE_2,
|
| 866 |
+
MODEL_TENSOR.V_PRE_NORM,
|
| 867 |
+
MODEL_TENSOR.V_POST_NORM,
|
| 868 |
+
MODEL_TENSOR.V_MM_INP_PROJ,
|
| 869 |
+
MODEL_TENSOR.V_MM_INP_NORM,
|
| 870 |
+
MODEL_TENSOR.V_MM_SOFT_EMB_NORM,
|
| 871 |
+
MODEL_TENSOR.V_RESMPL_POS_EMBD_K,
|
| 872 |
+
MODEL_TENSOR.V_RESMPL_ATTN_Q,
|
| 873 |
+
MODEL_TENSOR.V_RESMPL_ATTN_K,
|
| 874 |
+
MODEL_TENSOR.V_RESMPL_ATTN_V,
|
| 875 |
+
MODEL_TENSOR.V_RESMPL_ATTN_OUT,
|
| 876 |
+
MODEL_TENSOR.V_RESMPL_KV,
|
| 877 |
+
MODEL_TENSOR.V_RESMPL_KV_NORM,
|
| 878 |
+
MODEL_TENSOR.V_RESMPL_POST_NORM,
|
| 879 |
+
MODEL_TENSOR.V_RESMPL_Q_NORM,
|
| 880 |
+
MODEL_TENSOR.V_RESMPL_PROJ,
|
| 881 |
+
MODEL_TENSOR.V_RESMPL_QUERY,
|
| 882 |
+
MODEL_TENSOR.V_TOK_EMBD_IMG_BREAK,
|
| 883 |
+
MODEL_TENSOR.V_MM_PATCH_MERGER,
|
| 884 |
+
# audio
|
| 885 |
+
MODEL_TENSOR.A_ENC_EMBD_POS,
|
| 886 |
+
MODEL_TENSOR.A_ENC_CONV1D,
|
| 887 |
+
MODEL_TENSOR.A_PRE_NORM,
|
| 888 |
+
MODEL_TENSOR.A_POST_NORM,
|
| 889 |
+
MODEL_TENSOR.A_ENC_ATTN_Q,
|
| 890 |
+
MODEL_TENSOR.A_ENC_ATTN_K,
|
| 891 |
+
MODEL_TENSOR.A_ENC_ATTN_V,
|
| 892 |
+
MODEL_TENSOR.A_ENC_INPUT_NORM,
|
| 893 |
+
MODEL_TENSOR.A_ENC_OUTPUT,
|
| 894 |
+
MODEL_TENSOR.A_ENC_OUTPUT_NORM,
|
| 895 |
+
MODEL_TENSOR.A_ENC_FFN_UP,
|
| 896 |
+
MODEL_TENSOR.A_ENC_FFN_GATE,
|
| 897 |
+
MODEL_TENSOR.A_ENC_FFN_DOWN,
|
| 898 |
+
MODEL_TENSOR.A_MMPROJ,
|
| 899 |
+
MODEL_TENSOR.A_MMPROJ_FC,
|
| 900 |
+
MODEL_TENSOR.A_MM_NORM_PRE,
|
| 901 |
+
MODEL_TENSOR.A_MM_NORM_MID,
|
| 902 |
+
],
|
| 903 |
+
MODEL_ARCH.LLAMA: [
|
| 904 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 905 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 906 |
+
MODEL_TENSOR.OUTPUT,
|
| 907 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 908 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 909 |
+
MODEL_TENSOR.ATTN_Q,
|
| 910 |
+
MODEL_TENSOR.ATTN_K,
|
| 911 |
+
MODEL_TENSOR.ATTN_V,
|
| 912 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 913 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 914 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 915 |
+
MODEL_TENSOR.FFN_NORM,
|
| 916 |
+
MODEL_TENSOR.FFN_GATE,
|
| 917 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 918 |
+
MODEL_TENSOR.FFN_UP,
|
| 919 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 920 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 921 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 922 |
+
],
|
| 923 |
+
MODEL_ARCH.LLAMA4: [
|
| 924 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 925 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 926 |
+
MODEL_TENSOR.OUTPUT,
|
| 927 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 928 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 929 |
+
MODEL_TENSOR.ATTN_Q,
|
| 930 |
+
MODEL_TENSOR.ATTN_K,
|
| 931 |
+
MODEL_TENSOR.ATTN_V,
|
| 932 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 933 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 934 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 935 |
+
MODEL_TENSOR.FFN_NORM,
|
| 936 |
+
MODEL_TENSOR.FFN_GATE,
|
| 937 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 938 |
+
MODEL_TENSOR.FFN_UP,
|
| 939 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 940 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 941 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 942 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 943 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 944 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 945 |
+
],
|
| 946 |
+
MODEL_ARCH.DECI: [
|
| 947 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 948 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 949 |
+
MODEL_TENSOR.OUTPUT,
|
| 950 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 951 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 952 |
+
MODEL_TENSOR.ATTN_Q,
|
| 953 |
+
MODEL_TENSOR.ATTN_K,
|
| 954 |
+
MODEL_TENSOR.ATTN_V,
|
| 955 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 956 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 957 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 958 |
+
MODEL_TENSOR.FFN_NORM,
|
| 959 |
+
MODEL_TENSOR.FFN_GATE,
|
| 960 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 961 |
+
MODEL_TENSOR.FFN_UP,
|
| 962 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 963 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 964 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 965 |
+
],
|
| 966 |
+
MODEL_ARCH.GROK: [
|
| 967 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 968 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 969 |
+
MODEL_TENSOR.OUTPUT,
|
| 970 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 971 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 972 |
+
MODEL_TENSOR.ATTN_Q,
|
| 973 |
+
MODEL_TENSOR.ATTN_K,
|
| 974 |
+
MODEL_TENSOR.ATTN_V,
|
| 975 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 976 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 977 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 978 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 979 |
+
MODEL_TENSOR.FFN_NORM,
|
| 980 |
+
MODEL_TENSOR.FFN_GATE,
|
| 981 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 982 |
+
MODEL_TENSOR.FFN_UP,
|
| 983 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 984 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 985 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 986 |
+
MODEL_TENSOR.LAYER_OUT_NORM,
|
| 987 |
+
],
|
| 988 |
+
MODEL_ARCH.GPTNEOX: [
|
| 989 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 990 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 991 |
+
MODEL_TENSOR.OUTPUT,
|
| 992 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 993 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 994 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 995 |
+
MODEL_TENSOR.FFN_NORM,
|
| 996 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 997 |
+
MODEL_TENSOR.FFN_UP,
|
| 998 |
+
],
|
| 999 |
+
MODEL_ARCH.FALCON: [
|
| 1000 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1001 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1002 |
+
MODEL_TENSOR.OUTPUT,
|
| 1003 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1004 |
+
MODEL_TENSOR.ATTN_NORM_2,
|
| 1005 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1006 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1007 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1008 |
+
MODEL_TENSOR.FFN_UP,
|
| 1009 |
+
],
|
| 1010 |
+
MODEL_ARCH.BAICHUAN: [
|
| 1011 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1012 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1013 |
+
MODEL_TENSOR.OUTPUT,
|
| 1014 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1015 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1016 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1017 |
+
MODEL_TENSOR.ATTN_K,
|
| 1018 |
+
MODEL_TENSOR.ATTN_V,
|
| 1019 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1020 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1021 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1022 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1023 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1024 |
+
MODEL_TENSOR.FFN_UP,
|
| 1025 |
+
],
|
| 1026 |
+
MODEL_ARCH.STARCODER: [
|
| 1027 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1028 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1029 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1030 |
+
MODEL_TENSOR.OUTPUT,
|
| 1031 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1032 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1033 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1034 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1035 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1036 |
+
MODEL_TENSOR.FFN_UP,
|
| 1037 |
+
],
|
| 1038 |
+
MODEL_ARCH.BERT: [
|
| 1039 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1040 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1041 |
+
MODEL_TENSOR.TOKEN_TYPES,
|
| 1042 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1043 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1044 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 1045 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1046 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1047 |
+
MODEL_TENSOR.ATTN_K,
|
| 1048 |
+
MODEL_TENSOR.ATTN_V,
|
| 1049 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1050 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1051 |
+
MODEL_TENSOR.FFN_UP,
|
| 1052 |
+
MODEL_TENSOR.LAYER_OUT_NORM,
|
| 1053 |
+
MODEL_TENSOR.CLS,
|
| 1054 |
+
MODEL_TENSOR.CLS_OUT,
|
| 1055 |
+
],
|
| 1056 |
+
MODEL_ARCH.NOMIC_BERT: [
|
| 1057 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1058 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1059 |
+
MODEL_TENSOR.TOKEN_TYPES,
|
| 1060 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1061 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1062 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 1063 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1064 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1065 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1066 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1067 |
+
MODEL_TENSOR.FFN_UP,
|
| 1068 |
+
MODEL_TENSOR.LAYER_OUT_NORM,
|
| 1069 |
+
],
|
| 1070 |
+
MODEL_ARCH.NOMIC_BERT_MOE: [
|
| 1071 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1072 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1073 |
+
MODEL_TENSOR.TOKEN_TYPES,
|
| 1074 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1075 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1076 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 1077 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1078 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1079 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1080 |
+
MODEL_TENSOR.FFN_UP,
|
| 1081 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1082 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1083 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1084 |
+
MODEL_TENSOR.LAYER_OUT_NORM,
|
| 1085 |
+
],
|
| 1086 |
+
MODEL_ARCH.NEO_BERT: [
|
| 1087 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1088 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1089 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1090 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1091 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1092 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1093 |
+
MODEL_TENSOR.FFN_UP,
|
| 1094 |
+
MODEL_TENSOR.ENC_OUTPUT_NORM,
|
| 1095 |
+
MODEL_TENSOR.CLS,
|
| 1096 |
+
MODEL_TENSOR.CLS_OUT,
|
| 1097 |
+
],
|
| 1098 |
+
MODEL_ARCH.JINA_BERT_V2: [
|
| 1099 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1100 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1101 |
+
MODEL_TENSOR.TOKEN_TYPES,
|
| 1102 |
+
MODEL_TENSOR.ATTN_NORM_2,
|
| 1103 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 1104 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1105 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1106 |
+
MODEL_TENSOR.ATTN_K,
|
| 1107 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1108 |
+
MODEL_TENSOR.ATTN_V,
|
| 1109 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1110 |
+
MODEL_TENSOR.FFN_UP,
|
| 1111 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1112 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1113 |
+
MODEL_TENSOR.LAYER_OUT_NORM,
|
| 1114 |
+
MODEL_TENSOR.CLS,
|
| 1115 |
+
],
|
| 1116 |
+
MODEL_ARCH.MPT: [
|
| 1117 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1118 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1119 |
+
MODEL_TENSOR.OUTPUT,
|
| 1120 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1121 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1122 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1123 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1124 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1125 |
+
MODEL_TENSOR.FFN_UP,
|
| 1126 |
+
MODEL_TENSOR.FFN_ACT,
|
| 1127 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1128 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1129 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1130 |
+
],
|
| 1131 |
+
MODEL_ARCH.GPTJ: [
|
| 1132 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1133 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1134 |
+
MODEL_TENSOR.OUTPUT,
|
| 1135 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1136 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1137 |
+
MODEL_TENSOR.ATTN_K,
|
| 1138 |
+
MODEL_TENSOR.ATTN_V,
|
| 1139 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1140 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1141 |
+
MODEL_TENSOR.FFN_UP,
|
| 1142 |
+
],
|
| 1143 |
+
MODEL_ARCH.REFACT: [
|
| 1144 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1145 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1146 |
+
MODEL_TENSOR.OUTPUT,
|
| 1147 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1148 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1149 |
+
MODEL_TENSOR.ATTN_K,
|
| 1150 |
+
MODEL_TENSOR.ATTN_V,
|
| 1151 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1152 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1153 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1154 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1155 |
+
MODEL_TENSOR.FFN_UP,
|
| 1156 |
+
],
|
| 1157 |
+
MODEL_ARCH.BLOOM: [
|
| 1158 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1159 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1160 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1161 |
+
MODEL_TENSOR.OUTPUT,
|
| 1162 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1163 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1164 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1165 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1166 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1167 |
+
MODEL_TENSOR.FFN_UP,
|
| 1168 |
+
],
|
| 1169 |
+
MODEL_ARCH.STABLELM: [
|
| 1170 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1171 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1172 |
+
MODEL_TENSOR.OUTPUT,
|
| 1173 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1174 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1175 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1176 |
+
MODEL_TENSOR.ATTN_K,
|
| 1177 |
+
MODEL_TENSOR.ATTN_V,
|
| 1178 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1179 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1180 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1181 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1182 |
+
MODEL_TENSOR.FFN_UP,
|
| 1183 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1184 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1185 |
+
],
|
| 1186 |
+
MODEL_ARCH.QWEN: [
|
| 1187 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1188 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1189 |
+
MODEL_TENSOR.OUTPUT,
|
| 1190 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1191 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1192 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1193 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1194 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1195 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1196 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1197 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1198 |
+
MODEL_TENSOR.FFN_UP,
|
| 1199 |
+
],
|
| 1200 |
+
MODEL_ARCH.QWEN2: [
|
| 1201 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1202 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1203 |
+
MODEL_TENSOR.OUTPUT,
|
| 1204 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1205 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1206 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1207 |
+
MODEL_TENSOR.ATTN_K,
|
| 1208 |
+
MODEL_TENSOR.ATTN_V,
|
| 1209 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1210 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1211 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1212 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1213 |
+
MODEL_TENSOR.FFN_UP,
|
| 1214 |
+
],
|
| 1215 |
+
MODEL_ARCH.QWEN2VL: [
|
| 1216 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1217 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1218 |
+
MODEL_TENSOR.OUTPUT,
|
| 1219 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1220 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1221 |
+
MODEL_TENSOR.ATTN_K,
|
| 1222 |
+
MODEL_TENSOR.ATTN_V,
|
| 1223 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1224 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1225 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1226 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1227 |
+
MODEL_TENSOR.FFN_UP,
|
| 1228 |
+
],
|
| 1229 |
+
MODEL_ARCH.QWEN2MOE: [
|
| 1230 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1231 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1232 |
+
MODEL_TENSOR.OUTPUT,
|
| 1233 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1234 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1235 |
+
MODEL_TENSOR.ATTN_K,
|
| 1236 |
+
MODEL_TENSOR.ATTN_V,
|
| 1237 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1238 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1239 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1240 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1241 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1242 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1243 |
+
MODEL_TENSOR.FFN_GATE_INP_SHEXP,
|
| 1244 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 1245 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 1246 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 1247 |
+
],
|
| 1248 |
+
MODEL_ARCH.QWEN3: [
|
| 1249 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1250 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1251 |
+
MODEL_TENSOR.OUTPUT,
|
| 1252 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1253 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1254 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1255 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1256 |
+
MODEL_TENSOR.ATTN_K,
|
| 1257 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1258 |
+
MODEL_TENSOR.ATTN_V,
|
| 1259 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1260 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1261 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1262 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1263 |
+
MODEL_TENSOR.FFN_UP,
|
| 1264 |
+
],
|
| 1265 |
+
MODEL_ARCH.QWEN3MOE: [
|
| 1266 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1267 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1268 |
+
MODEL_TENSOR.OUTPUT,
|
| 1269 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1270 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1271 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1272 |
+
MODEL_TENSOR.ATTN_K,
|
| 1273 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1274 |
+
MODEL_TENSOR.ATTN_V,
|
| 1275 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1276 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1277 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1278 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1279 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1280 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1281 |
+
],
|
| 1282 |
+
MODEL_ARCH.PLAMO: [
|
| 1283 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1284 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1285 |
+
MODEL_TENSOR.OUTPUT,
|
| 1286 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1287 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1288 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1289 |
+
MODEL_TENSOR.ATTN_K,
|
| 1290 |
+
MODEL_TENSOR.ATTN_V,
|
| 1291 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1292 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1293 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1294 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1295 |
+
MODEL_TENSOR.FFN_UP,
|
| 1296 |
+
],
|
| 1297 |
+
MODEL_ARCH.GPT2: [
|
| 1298 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1299 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1300 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1301 |
+
MODEL_TENSOR.OUTPUT,
|
| 1302 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1303 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1304 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1305 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1306 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1307 |
+
MODEL_TENSOR.FFN_UP,
|
| 1308 |
+
],
|
| 1309 |
+
MODEL_ARCH.PHI2: [
|
| 1310 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1311 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1312 |
+
MODEL_TENSOR.OUTPUT,
|
| 1313 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1314 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1315 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1316 |
+
MODEL_TENSOR.ATTN_K,
|
| 1317 |
+
MODEL_TENSOR.ATTN_V,
|
| 1318 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1319 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1320 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1321 |
+
MODEL_TENSOR.FFN_UP,
|
| 1322 |
+
],
|
| 1323 |
+
MODEL_ARCH.PHI3: [
|
| 1324 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1325 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1326 |
+
MODEL_TENSOR.OUTPUT,
|
| 1327 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG,
|
| 1328 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT,
|
| 1329 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1330 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1331 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1332 |
+
MODEL_TENSOR.ATTN_K,
|
| 1333 |
+
MODEL_TENSOR.ATTN_V,
|
| 1334 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1335 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1336 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1337 |
+
MODEL_TENSOR.FFN_UP,
|
| 1338 |
+
],
|
| 1339 |
+
MODEL_ARCH.PHIMOE: [
|
| 1340 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1341 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1342 |
+
MODEL_TENSOR.OUTPUT,
|
| 1343 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG,
|
| 1344 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT,
|
| 1345 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1346 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1347 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1348 |
+
MODEL_TENSOR.ATTN_K,
|
| 1349 |
+
MODEL_TENSOR.ATTN_V,
|
| 1350 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1351 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1352 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1353 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1354 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1355 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1356 |
+
],
|
| 1357 |
+
MODEL_ARCH.CODESHELL: [
|
| 1358 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1359 |
+
MODEL_TENSOR.POS_EMBD,
|
| 1360 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1361 |
+
MODEL_TENSOR.OUTPUT,
|
| 1362 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1363 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1364 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1365 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1366 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1367 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1368 |
+
MODEL_TENSOR.FFN_UP,
|
| 1369 |
+
],
|
| 1370 |
+
MODEL_ARCH.ORION: [
|
| 1371 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1372 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1373 |
+
MODEL_TENSOR.OUTPUT,
|
| 1374 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1375 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1376 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1377 |
+
MODEL_TENSOR.ATTN_K,
|
| 1378 |
+
MODEL_TENSOR.ATTN_V,
|
| 1379 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1380 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1381 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1382 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1383 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1384 |
+
MODEL_TENSOR.FFN_UP,
|
| 1385 |
+
],
|
| 1386 |
+
MODEL_ARCH.INTERNLM2: [
|
| 1387 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1388 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1389 |
+
MODEL_TENSOR.OUTPUT,
|
| 1390 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1391 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1392 |
+
MODEL_TENSOR.ATTN_K,
|
| 1393 |
+
MODEL_TENSOR.ATTN_V,
|
| 1394 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1395 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1396 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1397 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1398 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1399 |
+
MODEL_TENSOR.FFN_UP,
|
| 1400 |
+
],
|
| 1401 |
+
MODEL_ARCH.MINICPM: [
|
| 1402 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1403 |
+
MODEL_TENSOR.OUTPUT,
|
| 1404 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1405 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1406 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG,
|
| 1407 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT,
|
| 1408 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1409 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1410 |
+
MODEL_TENSOR.ATTN_K,
|
| 1411 |
+
MODEL_TENSOR.ATTN_V,
|
| 1412 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1413 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1414 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1415 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1416 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1417 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1418 |
+
MODEL_TENSOR.FFN_UP,
|
| 1419 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1420 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1421 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1422 |
+
],
|
| 1423 |
+
MODEL_ARCH.MINICPM3: [
|
| 1424 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1425 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1426 |
+
MODEL_TENSOR.OUTPUT,
|
| 1427 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG,
|
| 1428 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT,
|
| 1429 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1430 |
+
MODEL_TENSOR.ATTN_Q_A,
|
| 1431 |
+
MODEL_TENSOR.ATTN_Q_B,
|
| 1432 |
+
MODEL_TENSOR.ATTN_KV_A_MQA,
|
| 1433 |
+
MODEL_TENSOR.ATTN_KV_B,
|
| 1434 |
+
MODEL_TENSOR.ATTN_Q_A_NORM,
|
| 1435 |
+
MODEL_TENSOR.ATTN_KV_A_NORM,
|
| 1436 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1437 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1438 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1439 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1440 |
+
MODEL_TENSOR.FFN_UP,
|
| 1441 |
+
],
|
| 1442 |
+
MODEL_ARCH.GEMMA: [
|
| 1443 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1444 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1445 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1446 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1447 |
+
MODEL_TENSOR.ATTN_K,
|
| 1448 |
+
MODEL_TENSOR.ATTN_V,
|
| 1449 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1450 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1451 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1452 |
+
MODEL_TENSOR.FFN_UP,
|
| 1453 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1454 |
+
],
|
| 1455 |
+
MODEL_ARCH.GEMMA2: [
|
| 1456 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1457 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1458 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1459 |
+
MODEL_TENSOR.ATTN_K,
|
| 1460 |
+
MODEL_TENSOR.ATTN_V,
|
| 1461 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1462 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1463 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1464 |
+
MODEL_TENSOR.FFN_UP,
|
| 1465 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1466 |
+
MODEL_TENSOR.ATTN_POST_NORM,
|
| 1467 |
+
MODEL_TENSOR.FFN_PRE_NORM,
|
| 1468 |
+
MODEL_TENSOR.FFN_POST_NORM,
|
| 1469 |
+
],
|
| 1470 |
+
MODEL_ARCH.GEMMA3: [
|
| 1471 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1472 |
+
MODEL_TENSOR.OUTPUT,
|
| 1473 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1474 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1475 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1476 |
+
MODEL_TENSOR.ATTN_K,
|
| 1477 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1478 |
+
MODEL_TENSOR.ATTN_V,
|
| 1479 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1480 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1481 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1482 |
+
MODEL_TENSOR.FFN_UP,
|
| 1483 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1484 |
+
MODEL_TENSOR.ATTN_POST_NORM,
|
| 1485 |
+
MODEL_TENSOR.FFN_PRE_NORM,
|
| 1486 |
+
MODEL_TENSOR.FFN_POST_NORM,
|
| 1487 |
+
],
|
| 1488 |
+
MODEL_ARCH.STARCODER2: [
|
| 1489 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1490 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1491 |
+
MODEL_TENSOR.OUTPUT,
|
| 1492 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1493 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1494 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1495 |
+
MODEL_TENSOR.ATTN_K,
|
| 1496 |
+
MODEL_TENSOR.ATTN_V,
|
| 1497 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1498 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1499 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1500 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1501 |
+
MODEL_TENSOR.FFN_UP,
|
| 1502 |
+
],
|
| 1503 |
+
MODEL_ARCH.RWKV6: [
|
| 1504 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1505 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1506 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1507 |
+
MODEL_TENSOR.OUTPUT,
|
| 1508 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1509 |
+
MODEL_TENSOR.ATTN_NORM_2,
|
| 1510 |
+
MODEL_TENSOR.TIME_MIX_W1,
|
| 1511 |
+
MODEL_TENSOR.TIME_MIX_W2,
|
| 1512 |
+
MODEL_TENSOR.TIME_MIX_LERP_X,
|
| 1513 |
+
MODEL_TENSOR.TIME_MIX_LERP_K,
|
| 1514 |
+
MODEL_TENSOR.TIME_MIX_LERP_V,
|
| 1515 |
+
MODEL_TENSOR.TIME_MIX_LERP_R,
|
| 1516 |
+
MODEL_TENSOR.TIME_MIX_LERP_G,
|
| 1517 |
+
MODEL_TENSOR.TIME_MIX_LERP_W,
|
| 1518 |
+
MODEL_TENSOR.TIME_MIX_LERP_FUSED,
|
| 1519 |
+
MODEL_TENSOR.TIME_MIX_FIRST,
|
| 1520 |
+
MODEL_TENSOR.TIME_MIX_DECAY,
|
| 1521 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W1,
|
| 1522 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W2,
|
| 1523 |
+
MODEL_TENSOR.TIME_MIX_KEY,
|
| 1524 |
+
MODEL_TENSOR.TIME_MIX_VALUE,
|
| 1525 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE,
|
| 1526 |
+
MODEL_TENSOR.TIME_MIX_GATE,
|
| 1527 |
+
MODEL_TENSOR.TIME_MIX_LN,
|
| 1528 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT,
|
| 1529 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_K,
|
| 1530 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_R,
|
| 1531 |
+
MODEL_TENSOR.CHANNEL_MIX_KEY,
|
| 1532 |
+
MODEL_TENSOR.CHANNEL_MIX_RECEPTANCE,
|
| 1533 |
+
MODEL_TENSOR.CHANNEL_MIX_VALUE,
|
| 1534 |
+
],
|
| 1535 |
+
MODEL_ARCH.RWKV6QWEN2: [
|
| 1536 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1537 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1538 |
+
MODEL_TENSOR.OUTPUT,
|
| 1539 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1540 |
+
MODEL_TENSOR.TIME_MIX_W1,
|
| 1541 |
+
MODEL_TENSOR.TIME_MIX_W2,
|
| 1542 |
+
MODEL_TENSOR.TIME_MIX_LERP_X,
|
| 1543 |
+
MODEL_TENSOR.TIME_MIX_LERP_K,
|
| 1544 |
+
MODEL_TENSOR.TIME_MIX_LERP_V,
|
| 1545 |
+
MODEL_TENSOR.TIME_MIX_LERP_R,
|
| 1546 |
+
MODEL_TENSOR.TIME_MIX_LERP_G,
|
| 1547 |
+
MODEL_TENSOR.TIME_MIX_LERP_W,
|
| 1548 |
+
MODEL_TENSOR.TIME_MIX_LERP_FUSED,
|
| 1549 |
+
MODEL_TENSOR.TIME_MIX_FIRST,
|
| 1550 |
+
MODEL_TENSOR.TIME_MIX_DECAY,
|
| 1551 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W1,
|
| 1552 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W2,
|
| 1553 |
+
MODEL_TENSOR.TIME_MIX_KEY,
|
| 1554 |
+
MODEL_TENSOR.TIME_MIX_VALUE,
|
| 1555 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE,
|
| 1556 |
+
MODEL_TENSOR.TIME_MIX_GATE,
|
| 1557 |
+
MODEL_TENSOR.TIME_MIX_LN,
|
| 1558 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT,
|
| 1559 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1560 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1561 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1562 |
+
MODEL_TENSOR.FFN_UP,
|
| 1563 |
+
],
|
| 1564 |
+
MODEL_ARCH.RWKV7: [
|
| 1565 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1566 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1567 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1568 |
+
MODEL_TENSOR.OUTPUT,
|
| 1569 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1570 |
+
MODEL_TENSOR.ATTN_NORM_2,
|
| 1571 |
+
MODEL_TENSOR.TIME_MIX_LERP_FUSED,
|
| 1572 |
+
MODEL_TENSOR.TIME_MIX_W0,
|
| 1573 |
+
MODEL_TENSOR.TIME_MIX_W1,
|
| 1574 |
+
MODEL_TENSOR.TIME_MIX_W2,
|
| 1575 |
+
MODEL_TENSOR.TIME_MIX_A0,
|
| 1576 |
+
MODEL_TENSOR.TIME_MIX_A1,
|
| 1577 |
+
MODEL_TENSOR.TIME_MIX_A2,
|
| 1578 |
+
MODEL_TENSOR.TIME_MIX_V0,
|
| 1579 |
+
MODEL_TENSOR.TIME_MIX_V1,
|
| 1580 |
+
MODEL_TENSOR.TIME_MIX_V2,
|
| 1581 |
+
MODEL_TENSOR.TIME_MIX_G1,
|
| 1582 |
+
MODEL_TENSOR.TIME_MIX_G2,
|
| 1583 |
+
MODEL_TENSOR.TIME_MIX_K_K,
|
| 1584 |
+
MODEL_TENSOR.TIME_MIX_K_A,
|
| 1585 |
+
MODEL_TENSOR.TIME_MIX_R_K,
|
| 1586 |
+
MODEL_TENSOR.TIME_MIX_KEY,
|
| 1587 |
+
MODEL_TENSOR.TIME_MIX_VALUE,
|
| 1588 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE,
|
| 1589 |
+
MODEL_TENSOR.TIME_MIX_LN,
|
| 1590 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT,
|
| 1591 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_K,
|
| 1592 |
+
MODEL_TENSOR.CHANNEL_MIX_KEY,
|
| 1593 |
+
MODEL_TENSOR.CHANNEL_MIX_VALUE,
|
| 1594 |
+
],
|
| 1595 |
+
MODEL_ARCH.ARWKV7: [
|
| 1596 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1597 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 1598 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1599 |
+
MODEL_TENSOR.OUTPUT,
|
| 1600 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1601 |
+
MODEL_TENSOR.TIME_MIX_LERP_FUSED,
|
| 1602 |
+
MODEL_TENSOR.TIME_MIX_W0,
|
| 1603 |
+
MODEL_TENSOR.TIME_MIX_W1,
|
| 1604 |
+
MODEL_TENSOR.TIME_MIX_W2,
|
| 1605 |
+
MODEL_TENSOR.TIME_MIX_A0,
|
| 1606 |
+
MODEL_TENSOR.TIME_MIX_A1,
|
| 1607 |
+
MODEL_TENSOR.TIME_MIX_A2,
|
| 1608 |
+
MODEL_TENSOR.TIME_MIX_V0,
|
| 1609 |
+
MODEL_TENSOR.TIME_MIX_V1,
|
| 1610 |
+
MODEL_TENSOR.TIME_MIX_V2,
|
| 1611 |
+
MODEL_TENSOR.TIME_MIX_G1,
|
| 1612 |
+
MODEL_TENSOR.TIME_MIX_G2,
|
| 1613 |
+
MODEL_TENSOR.TIME_MIX_K_K,
|
| 1614 |
+
MODEL_TENSOR.TIME_MIX_K_A,
|
| 1615 |
+
MODEL_TENSOR.TIME_MIX_R_K,
|
| 1616 |
+
MODEL_TENSOR.TIME_MIX_KEY,
|
| 1617 |
+
MODEL_TENSOR.TIME_MIX_VALUE,
|
| 1618 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE,
|
| 1619 |
+
MODEL_TENSOR.TIME_MIX_LN,
|
| 1620 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT,
|
| 1621 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1622 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1623 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1624 |
+
MODEL_TENSOR.FFN_UP,
|
| 1625 |
+
],
|
| 1626 |
+
MODEL_ARCH.MAMBA: [
|
| 1627 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1628 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1629 |
+
MODEL_TENSOR.OUTPUT,
|
| 1630 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1631 |
+
MODEL_TENSOR.SSM_IN,
|
| 1632 |
+
MODEL_TENSOR.SSM_CONV1D,
|
| 1633 |
+
MODEL_TENSOR.SSM_X,
|
| 1634 |
+
MODEL_TENSOR.SSM_DT,
|
| 1635 |
+
MODEL_TENSOR.SSM_A,
|
| 1636 |
+
MODEL_TENSOR.SSM_D,
|
| 1637 |
+
MODEL_TENSOR.SSM_OUT,
|
| 1638 |
+
],
|
| 1639 |
+
MODEL_ARCH.XVERSE: [
|
| 1640 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1641 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1642 |
+
MODEL_TENSOR.OUTPUT,
|
| 1643 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1644 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1645 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1646 |
+
MODEL_TENSOR.ATTN_K,
|
| 1647 |
+
MODEL_TENSOR.ATTN_V,
|
| 1648 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1649 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1650 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1651 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1652 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1653 |
+
MODEL_TENSOR.FFN_UP,
|
| 1654 |
+
],
|
| 1655 |
+
MODEL_ARCH.COMMAND_R: [
|
| 1656 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1657 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1658 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1659 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1660 |
+
MODEL_TENSOR.ATTN_K,
|
| 1661 |
+
MODEL_TENSOR.ATTN_V,
|
| 1662 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1663 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1664 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1665 |
+
MODEL_TENSOR.FFN_UP,
|
| 1666 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1667 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1668 |
+
],
|
| 1669 |
+
MODEL_ARCH.COHERE2: [
|
| 1670 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1671 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1672 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1673 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1674 |
+
MODEL_TENSOR.ATTN_K,
|
| 1675 |
+
MODEL_TENSOR.ATTN_V,
|
| 1676 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1677 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1678 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1679 |
+
MODEL_TENSOR.FFN_UP,
|
| 1680 |
+
],
|
| 1681 |
+
MODEL_ARCH.DBRX: [
|
| 1682 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1683 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1684 |
+
MODEL_TENSOR.OUTPUT,
|
| 1685 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1686 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1687 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1688 |
+
MODEL_TENSOR.ATTN_OUT_NORM,
|
| 1689 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1690 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1691 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1692 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1693 |
+
],
|
| 1694 |
+
MODEL_ARCH.OLMO: [
|
| 1695 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1696 |
+
MODEL_TENSOR.OUTPUT,
|
| 1697 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1698 |
+
MODEL_TENSOR.ATTN_K,
|
| 1699 |
+
MODEL_TENSOR.ATTN_V,
|
| 1700 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1701 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1702 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1703 |
+
MODEL_TENSOR.FFN_UP,
|
| 1704 |
+
],
|
| 1705 |
+
MODEL_ARCH.OLMO2: [
|
| 1706 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1707 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1708 |
+
MODEL_TENSOR.OUTPUT,
|
| 1709 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1710 |
+
MODEL_TENSOR.ATTN_K,
|
| 1711 |
+
MODEL_TENSOR.ATTN_V,
|
| 1712 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1713 |
+
MODEL_TENSOR.ATTN_POST_NORM,
|
| 1714 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1715 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1716 |
+
MODEL_TENSOR.FFN_POST_NORM,
|
| 1717 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1718 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1719 |
+
MODEL_TENSOR.FFN_UP,
|
| 1720 |
+
],
|
| 1721 |
+
MODEL_ARCH.OLMOE: [
|
| 1722 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1723 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1724 |
+
MODEL_TENSOR.OUTPUT,
|
| 1725 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1726 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1727 |
+
MODEL_TENSOR.ATTN_K,
|
| 1728 |
+
MODEL_TENSOR.ATTN_V,
|
| 1729 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1730 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1731 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1732 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1733 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1734 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1735 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1736 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1737 |
+
],
|
| 1738 |
+
MODEL_ARCH.OPENELM: [
|
| 1739 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1740 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1741 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1742 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1743 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 1744 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 1745 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1746 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1747 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1748 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1749 |
+
MODEL_TENSOR.FFN_UP,
|
| 1750 |
+
],
|
| 1751 |
+
MODEL_ARCH.ARCTIC: [
|
| 1752 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1753 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1754 |
+
MODEL_TENSOR.OUTPUT,
|
| 1755 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1756 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1757 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1758 |
+
MODEL_TENSOR.ATTN_K,
|
| 1759 |
+
MODEL_TENSOR.ATTN_V,
|
| 1760 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1761 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1762 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1763 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1764 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1765 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1766 |
+
MODEL_TENSOR.FFN_UP,
|
| 1767 |
+
MODEL_TENSOR.FFN_NORM_EXP,
|
| 1768 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1769 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1770 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1771 |
+
],
|
| 1772 |
+
MODEL_ARCH.DEEPSEEK: [
|
| 1773 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1774 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1775 |
+
MODEL_TENSOR.OUTPUT,
|
| 1776 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1777 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1778 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1779 |
+
MODEL_TENSOR.ATTN_K,
|
| 1780 |
+
MODEL_TENSOR.ATTN_V,
|
| 1781 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1782 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1783 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1784 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1785 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1786 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1787 |
+
MODEL_TENSOR.FFN_UP,
|
| 1788 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1789 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1790 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1791 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 1792 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 1793 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 1794 |
+
],
|
| 1795 |
+
MODEL_ARCH.DEEPSEEK2: [
|
| 1796 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1797 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1798 |
+
MODEL_TENSOR.OUTPUT,
|
| 1799 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1800 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1801 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1802 |
+
MODEL_TENSOR.ATTN_Q_A,
|
| 1803 |
+
MODEL_TENSOR.ATTN_Q_B,
|
| 1804 |
+
MODEL_TENSOR.ATTN_KV_A_MQA,
|
| 1805 |
+
MODEL_TENSOR.ATTN_KV_B,
|
| 1806 |
+
MODEL_TENSOR.ATTN_K_B,
|
| 1807 |
+
MODEL_TENSOR.ATTN_V_B,
|
| 1808 |
+
MODEL_TENSOR.ATTN_Q_A_NORM,
|
| 1809 |
+
MODEL_TENSOR.ATTN_KV_A_NORM,
|
| 1810 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1811 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1812 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 1813 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1814 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1815 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1816 |
+
MODEL_TENSOR.FFN_UP,
|
| 1817 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 1818 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 1819 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 1820 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 1821 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 1822 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 1823 |
+
MODEL_TENSOR.FFN_EXP_PROBS_B,
|
| 1824 |
+
],
|
| 1825 |
+
MODEL_ARCH.PLM: [
|
| 1826 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1827 |
+
MODEL_TENSOR.OUTPUT,
|
| 1828 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1829 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1830 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1831 |
+
MODEL_TENSOR.ATTN_KV_A_MQA,
|
| 1832 |
+
MODEL_TENSOR.ATTN_KV_A_NORM,
|
| 1833 |
+
MODEL_TENSOR.ATTN_KV_B,
|
| 1834 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1835 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1836 |
+
MODEL_TENSOR.FFN_UP,
|
| 1837 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1838 |
+
],
|
| 1839 |
+
MODEL_ARCH.CHATGLM : [
|
| 1840 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1841 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1842 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1843 |
+
MODEL_TENSOR.OUTPUT,
|
| 1844 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1845 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1846 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1847 |
+
MODEL_TENSOR.ATTN_K,
|
| 1848 |
+
MODEL_TENSOR.ATTN_V,
|
| 1849 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1850 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1851 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1852 |
+
MODEL_TENSOR.FFN_UP,
|
| 1853 |
+
],
|
| 1854 |
+
MODEL_ARCH.GLM4 : [
|
| 1855 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1856 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1857 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1858 |
+
MODEL_TENSOR.OUTPUT,
|
| 1859 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1860 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1861 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1862 |
+
MODEL_TENSOR.ATTN_K,
|
| 1863 |
+
MODEL_TENSOR.ATTN_V,
|
| 1864 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1865 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1866 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1867 |
+
MODEL_TENSOR.FFN_UP,
|
| 1868 |
+
MODEL_TENSOR.ATTN_POST_NORM,
|
| 1869 |
+
MODEL_TENSOR.FFN_POST_NORM,
|
| 1870 |
+
],
|
| 1871 |
+
MODEL_ARCH.BITNET: [
|
| 1872 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1873 |
+
MODEL_TENSOR.ATTN_K,
|
| 1874 |
+
MODEL_TENSOR.ATTN_V,
|
| 1875 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1876 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1877 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1878 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1879 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1880 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1881 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1882 |
+
MODEL_TENSOR.FFN_UP,
|
| 1883 |
+
MODEL_TENSOR.ATTN_SUB_NORM,
|
| 1884 |
+
MODEL_TENSOR.FFN_SUB_NORM,
|
| 1885 |
+
],
|
| 1886 |
+
MODEL_ARCH.T5: [
|
| 1887 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1888 |
+
MODEL_TENSOR.OUTPUT,
|
| 1889 |
+
MODEL_TENSOR.DEC_ATTN_NORM,
|
| 1890 |
+
MODEL_TENSOR.DEC_ATTN_Q,
|
| 1891 |
+
MODEL_TENSOR.DEC_ATTN_K,
|
| 1892 |
+
MODEL_TENSOR.DEC_ATTN_V,
|
| 1893 |
+
MODEL_TENSOR.DEC_ATTN_OUT,
|
| 1894 |
+
MODEL_TENSOR.DEC_ATTN_REL_B,
|
| 1895 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_NORM,
|
| 1896 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_Q,
|
| 1897 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_K,
|
| 1898 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_V,
|
| 1899 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_OUT,
|
| 1900 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_REL_B,
|
| 1901 |
+
MODEL_TENSOR.DEC_FFN_NORM,
|
| 1902 |
+
MODEL_TENSOR.DEC_FFN_GATE,
|
| 1903 |
+
MODEL_TENSOR.DEC_FFN_DOWN,
|
| 1904 |
+
MODEL_TENSOR.DEC_FFN_UP,
|
| 1905 |
+
MODEL_TENSOR.DEC_OUTPUT_NORM,
|
| 1906 |
+
MODEL_TENSOR.ENC_ATTN_NORM,
|
| 1907 |
+
MODEL_TENSOR.ENC_ATTN_Q,
|
| 1908 |
+
MODEL_TENSOR.ENC_ATTN_K,
|
| 1909 |
+
MODEL_TENSOR.ENC_ATTN_V,
|
| 1910 |
+
MODEL_TENSOR.ENC_ATTN_OUT,
|
| 1911 |
+
MODEL_TENSOR.ENC_ATTN_REL_B,
|
| 1912 |
+
MODEL_TENSOR.ENC_FFN_NORM,
|
| 1913 |
+
MODEL_TENSOR.ENC_FFN_GATE,
|
| 1914 |
+
MODEL_TENSOR.ENC_FFN_DOWN,
|
| 1915 |
+
MODEL_TENSOR.ENC_FFN_UP,
|
| 1916 |
+
MODEL_TENSOR.ENC_OUTPUT_NORM,
|
| 1917 |
+
],
|
| 1918 |
+
MODEL_ARCH.T5ENCODER: [
|
| 1919 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1920 |
+
MODEL_TENSOR.OUTPUT,
|
| 1921 |
+
MODEL_TENSOR.ENC_ATTN_NORM,
|
| 1922 |
+
MODEL_TENSOR.ENC_ATTN_Q,
|
| 1923 |
+
MODEL_TENSOR.ENC_ATTN_K,
|
| 1924 |
+
MODEL_TENSOR.ENC_ATTN_V,
|
| 1925 |
+
MODEL_TENSOR.ENC_ATTN_OUT,
|
| 1926 |
+
MODEL_TENSOR.ENC_ATTN_REL_B,
|
| 1927 |
+
MODEL_TENSOR.ENC_FFN_NORM,
|
| 1928 |
+
MODEL_TENSOR.ENC_FFN_GATE,
|
| 1929 |
+
MODEL_TENSOR.ENC_FFN_DOWN,
|
| 1930 |
+
MODEL_TENSOR.ENC_FFN_UP,
|
| 1931 |
+
MODEL_TENSOR.ENC_OUTPUT_NORM,
|
| 1932 |
+
],
|
| 1933 |
+
MODEL_ARCH.JAIS: [
|
| 1934 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1935 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1936 |
+
MODEL_TENSOR.OUTPUT,
|
| 1937 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1938 |
+
MODEL_TENSOR.ATTN_QKV,
|
| 1939 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1940 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1941 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1942 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1943 |
+
MODEL_TENSOR.FFN_UP,
|
| 1944 |
+
],
|
| 1945 |
+
MODEL_ARCH.NEMOTRON: [
|
| 1946 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1947 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1948 |
+
MODEL_TENSOR.OUTPUT,
|
| 1949 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1950 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1951 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1952 |
+
MODEL_TENSOR.ATTN_K,
|
| 1953 |
+
MODEL_TENSOR.ATTN_V,
|
| 1954 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1955 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1956 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1957 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1958 |
+
MODEL_TENSOR.FFN_UP,
|
| 1959 |
+
],
|
| 1960 |
+
MODEL_ARCH.EXAONE: [
|
| 1961 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1962 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1963 |
+
MODEL_TENSOR.OUTPUT,
|
| 1964 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 1965 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1966 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1967 |
+
MODEL_TENSOR.ATTN_K,
|
| 1968 |
+
MODEL_TENSOR.ATTN_V,
|
| 1969 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1970 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 1971 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1972 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1973 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1974 |
+
MODEL_TENSOR.FFN_UP,
|
| 1975 |
+
],
|
| 1976 |
+
MODEL_ARCH.GRANITE: [
|
| 1977 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1978 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1979 |
+
MODEL_TENSOR.OUTPUT,
|
| 1980 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1981 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1982 |
+
MODEL_TENSOR.ATTN_K,
|
| 1983 |
+
MODEL_TENSOR.ATTN_V,
|
| 1984 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1985 |
+
MODEL_TENSOR.FFN_NORM,
|
| 1986 |
+
MODEL_TENSOR.FFN_GATE,
|
| 1987 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 1988 |
+
MODEL_TENSOR.FFN_UP,
|
| 1989 |
+
],
|
| 1990 |
+
MODEL_ARCH.GRANITE_MOE: [
|
| 1991 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 1992 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 1993 |
+
MODEL_TENSOR.OUTPUT,
|
| 1994 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 1995 |
+
MODEL_TENSOR.ATTN_Q,
|
| 1996 |
+
MODEL_TENSOR.ATTN_K,
|
| 1997 |
+
MODEL_TENSOR.ATTN_V,
|
| 1998 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 1999 |
+
MODEL_TENSOR.FFN_NORM,
|
| 2000 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 2001 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 2002 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 2003 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 2004 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 2005 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 2006 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 2007 |
+
],
|
| 2008 |
+
MODEL_ARCH.CHAMELEON: [
|
| 2009 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 2010 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 2011 |
+
MODEL_TENSOR.OUTPUT,
|
| 2012 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 2013 |
+
MODEL_TENSOR.ATTN_Q,
|
| 2014 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 2015 |
+
MODEL_TENSOR.ATTN_K,
|
| 2016 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 2017 |
+
MODEL_TENSOR.ATTN_V,
|
| 2018 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 2019 |
+
MODEL_TENSOR.FFN_NORM,
|
| 2020 |
+
MODEL_TENSOR.FFN_GATE,
|
| 2021 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 2022 |
+
MODEL_TENSOR.FFN_UP,
|
| 2023 |
+
],
|
| 2024 |
+
MODEL_ARCH.WAVTOKENIZER_DEC: [
|
| 2025 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 2026 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM,
|
| 2027 |
+
MODEL_TENSOR.CONV1D,
|
| 2028 |
+
MODEL_TENSOR.CONVNEXT_DW,
|
| 2029 |
+
MODEL_TENSOR.CONVNEXT_NORM,
|
| 2030 |
+
MODEL_TENSOR.CONVNEXT_PW1,
|
| 2031 |
+
MODEL_TENSOR.CONVNEXT_PW2,
|
| 2032 |
+
MODEL_TENSOR.CONVNEXT_GAMMA,
|
| 2033 |
+
MODEL_TENSOR.OUTPUT,
|
| 2034 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 2035 |
+
MODEL_TENSOR.POSNET_CONV1,
|
| 2036 |
+
MODEL_TENSOR.POSNET_CONV2,
|
| 2037 |
+
MODEL_TENSOR.POSNET_NORM,
|
| 2038 |
+
MODEL_TENSOR.POSNET_NORM1,
|
| 2039 |
+
MODEL_TENSOR.POSNET_NORM2,
|
| 2040 |
+
MODEL_TENSOR.POSNET_ATTN_NORM,
|
| 2041 |
+
MODEL_TENSOR.POSNET_ATTN_Q,
|
| 2042 |
+
MODEL_TENSOR.POSNET_ATTN_K,
|
| 2043 |
+
MODEL_TENSOR.POSNET_ATTN_V,
|
| 2044 |
+
MODEL_TENSOR.POSNET_ATTN_OUT,
|
| 2045 |
+
],
|
| 2046 |
+
MODEL_ARCH.BAILINGMOE: [
|
| 2047 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 2048 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 2049 |
+
MODEL_TENSOR.OUTPUT,
|
| 2050 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2051 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 2052 |
+
MODEL_TENSOR.ATTN_Q,
|
| 2053 |
+
MODEL_TENSOR.ATTN_K,
|
| 2054 |
+
MODEL_TENSOR.ATTN_V,
|
| 2055 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 2056 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 2057 |
+
MODEL_TENSOR.FFN_NORM,
|
| 2058 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 2059 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 2060 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 2061 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 2062 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 2063 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 2064 |
+
],
|
| 2065 |
+
MODEL_ARCH.DOTS1: [
|
| 2066 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 2067 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 2068 |
+
MODEL_TENSOR.OUTPUT,
|
| 2069 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 2070 |
+
MODEL_TENSOR.ATTN_Q,
|
| 2071 |
+
MODEL_TENSOR.ATTN_Q_NORM,
|
| 2072 |
+
MODEL_TENSOR.ATTN_K,
|
| 2073 |
+
MODEL_TENSOR.ATTN_K_NORM,
|
| 2074 |
+
MODEL_TENSOR.ATTN_V,
|
| 2075 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 2076 |
+
MODEL_TENSOR.FFN_EXP_PROBS_B,
|
| 2077 |
+
MODEL_TENSOR.FFN_NORM,
|
| 2078 |
+
MODEL_TENSOR.FFN_GATE,
|
| 2079 |
+
MODEL_TENSOR.FFN_GATE_EXP,
|
| 2080 |
+
MODEL_TENSOR.FFN_GATE_INP,
|
| 2081 |
+
MODEL_TENSOR.FFN_GATE_SHEXP,
|
| 2082 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 2083 |
+
MODEL_TENSOR.FFN_DOWN_EXP,
|
| 2084 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP,
|
| 2085 |
+
MODEL_TENSOR.FFN_UP,
|
| 2086 |
+
MODEL_TENSOR.FFN_UP_EXP,
|
| 2087 |
+
MODEL_TENSOR.FFN_UP_SHEXP,
|
| 2088 |
+
],
|
| 2089 |
+
MODEL_ARCH.ARCEE: [
|
| 2090 |
+
MODEL_TENSOR.TOKEN_EMBD,
|
| 2091 |
+
MODEL_TENSOR.OUTPUT_NORM,
|
| 2092 |
+
MODEL_TENSOR.OUTPUT,
|
| 2093 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2094 |
+
MODEL_TENSOR.ATTN_NORM,
|
| 2095 |
+
MODEL_TENSOR.ATTN_Q,
|
| 2096 |
+
MODEL_TENSOR.ATTN_K,
|
| 2097 |
+
MODEL_TENSOR.ATTN_V,
|
| 2098 |
+
MODEL_TENSOR.ATTN_OUT,
|
| 2099 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2100 |
+
MODEL_TENSOR.FFN_NORM,
|
| 2101 |
+
MODEL_TENSOR.FFN_DOWN,
|
| 2102 |
+
MODEL_TENSOR.FFN_UP,
|
| 2103 |
+
],
|
| 2104 |
+
# TODO
|
| 2105 |
+
}
|
| 2106 |
+
|
| 2107 |
+
# tensors that will not be serialized
|
| 2108 |
+
MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = {
|
| 2109 |
+
MODEL_ARCH.LLAMA: [
|
| 2110 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2111 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2112 |
+
],
|
| 2113 |
+
MODEL_ARCH.DECI: [
|
| 2114 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2115 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2116 |
+
],
|
| 2117 |
+
MODEL_ARCH.BAICHUAN: [
|
| 2118 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2119 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2120 |
+
],
|
| 2121 |
+
MODEL_ARCH.QWEN: [
|
| 2122 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2123 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2124 |
+
],
|
| 2125 |
+
MODEL_ARCH.CODESHELL: [
|
| 2126 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2127 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2128 |
+
],
|
| 2129 |
+
MODEL_ARCH.ORION: [
|
| 2130 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2131 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2132 |
+
],
|
| 2133 |
+
MODEL_ARCH.STARCODER2: [
|
| 2134 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2135 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2136 |
+
],
|
| 2137 |
+
MODEL_ARCH.XVERSE: [
|
| 2138 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2139 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2140 |
+
],
|
| 2141 |
+
MODEL_ARCH.DEEPSEEK: [
|
| 2142 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2143 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2144 |
+
],
|
| 2145 |
+
MODEL_ARCH.DEEPSEEK2: [
|
| 2146 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2147 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2148 |
+
],
|
| 2149 |
+
MODEL_ARCH.CHATGLM: [
|
| 2150 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2151 |
+
],
|
| 2152 |
+
MODEL_ARCH.NEMOTRON: [
|
| 2153 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2154 |
+
MODEL_TENSOR.ATTN_ROT_EMBD,
|
| 2155 |
+
],
|
| 2156 |
+
MODEL_ARCH.BAILINGMOE: [
|
| 2157 |
+
MODEL_TENSOR.ROPE_FREQS,
|
| 2158 |
+
],
|
| 2159 |
+
}
|
| 2160 |
+
|
| 2161 |
+
#
|
| 2162 |
+
# types
|
| 2163 |
+
#
|
| 2164 |
+
|
| 2165 |
+
|
| 2166 |
+
class TokenType(IntEnum):
|
| 2167 |
+
NORMAL = 1
|
| 2168 |
+
UNKNOWN = 2
|
| 2169 |
+
CONTROL = 3
|
| 2170 |
+
USER_DEFINED = 4
|
| 2171 |
+
UNUSED = 5
|
| 2172 |
+
BYTE = 6
|
| 2173 |
+
|
| 2174 |
+
|
| 2175 |
+
class RopeScalingType(Enum):
|
| 2176 |
+
NONE = 'none'
|
| 2177 |
+
LINEAR = 'linear'
|
| 2178 |
+
YARN = 'yarn'
|
| 2179 |
+
LONGROPE = 'longrope'
|
| 2180 |
+
|
| 2181 |
+
|
| 2182 |
+
class PoolingType(IntEnum):
|
| 2183 |
+
NONE = 0
|
| 2184 |
+
MEAN = 1
|
| 2185 |
+
CLS = 2
|
| 2186 |
+
LAST = 3
|
| 2187 |
+
RANK = 4
|
| 2188 |
+
|
| 2189 |
+
|
| 2190 |
+
class GGMLQuantizationType(IntEnum):
|
| 2191 |
+
F32 = 0
|
| 2192 |
+
F16 = 1
|
| 2193 |
+
Q4_0 = 2
|
| 2194 |
+
Q4_1 = 3
|
| 2195 |
+
Q5_0 = 6
|
| 2196 |
+
Q5_1 = 7
|
| 2197 |
+
Q8_0 = 8
|
| 2198 |
+
Q8_1 = 9
|
| 2199 |
+
Q2_K = 10
|
| 2200 |
+
Q3_K = 11
|
| 2201 |
+
Q4_K = 12
|
| 2202 |
+
Q5_K = 13
|
| 2203 |
+
Q6_K = 14
|
| 2204 |
+
Q8_K = 15
|
| 2205 |
+
IQ2_XXS = 16
|
| 2206 |
+
IQ2_XS = 17
|
| 2207 |
+
IQ3_XXS = 18
|
| 2208 |
+
IQ1_S = 19
|
| 2209 |
+
IQ4_NL = 20
|
| 2210 |
+
IQ3_S = 21
|
| 2211 |
+
IQ2_S = 22
|
| 2212 |
+
IQ4_XS = 23
|
| 2213 |
+
I8 = 24
|
| 2214 |
+
I16 = 25
|
| 2215 |
+
I32 = 26
|
| 2216 |
+
I64 = 27
|
| 2217 |
+
F64 = 28
|
| 2218 |
+
IQ1_M = 29
|
| 2219 |
+
BF16 = 30
|
| 2220 |
+
TQ1_0 = 34
|
| 2221 |
+
TQ2_0 = 35
|
| 2222 |
+
|
| 2223 |
+
|
| 2224 |
+
class ExpertGatingFuncType(IntEnum):
|
| 2225 |
+
SOFTMAX = 1
|
| 2226 |
+
SIGMOID = 2
|
| 2227 |
+
|
| 2228 |
+
|
| 2229 |
+
# TODO: add GGMLFileType from ggml_ftype in ggml.h
|
| 2230 |
+
|
| 2231 |
+
|
| 2232 |
+
# from llama_ftype in llama.h
|
| 2233 |
+
# ALL VALUES SHOULD BE THE SAME HERE AS THEY ARE OVER THERE.
|
| 2234 |
+
class LlamaFileType(IntEnum):
|
| 2235 |
+
ALL_F32 = 0
|
| 2236 |
+
MOSTLY_F16 = 1 # except 1d tensors
|
| 2237 |
+
MOSTLY_Q4_0 = 2 # except 1d tensors
|
| 2238 |
+
MOSTLY_Q4_1 = 3 # except 1d tensors
|
| 2239 |
+
# MOSTLY_Q4_1_SOME_F16 = 4 # tok_embeddings.weight and output.weight are F16
|
| 2240 |
+
# MOSTLY_Q4_2 = 5 # support has been removed
|
| 2241 |
+
# MOSTLY_Q4_3 = 6 # support has been removed
|
| 2242 |
+
MOSTLY_Q8_0 = 7 # except 1d tensors
|
| 2243 |
+
MOSTLY_Q5_0 = 8 # except 1d tensors
|
| 2244 |
+
MOSTLY_Q5_1 = 9 # except 1d tensors
|
| 2245 |
+
MOSTLY_Q2_K = 10 # except 1d tensors
|
| 2246 |
+
MOSTLY_Q3_K_S = 11 # except 1d tensors
|
| 2247 |
+
MOSTLY_Q3_K_M = 12 # except 1d tensors
|
| 2248 |
+
MOSTLY_Q3_K_L = 13 # except 1d tensors
|
| 2249 |
+
MOSTLY_Q4_K_S = 14 # except 1d tensors
|
| 2250 |
+
MOSTLY_Q4_K_M = 15 # except 1d tensors
|
| 2251 |
+
MOSTLY_Q5_K_S = 16 # except 1d tensors
|
| 2252 |
+
MOSTLY_Q5_K_M = 17 # except 1d tensors
|
| 2253 |
+
MOSTLY_Q6_K = 18 # except 1d tensors
|
| 2254 |
+
MOSTLY_IQ2_XXS = 19 # except 1d tensors
|
| 2255 |
+
MOSTLY_IQ2_XS = 20 # except 1d tensors
|
| 2256 |
+
MOSTLY_Q2_K_S = 21 # except 1d tensors
|
| 2257 |
+
MOSTLY_IQ3_XS = 22 # except 1d tensors
|
| 2258 |
+
MOSTLY_IQ3_XXS = 23 # except 1d tensors
|
| 2259 |
+
MOSTLY_IQ1_S = 24 # except 1d tensors
|
| 2260 |
+
MOSTLY_IQ4_NL = 25 # except 1d tensors
|
| 2261 |
+
MOSTLY_IQ3_S = 26 # except 1d tensors
|
| 2262 |
+
MOSTLY_IQ3_M = 27 # except 1d tensors
|
| 2263 |
+
MOSTLY_IQ2_S = 28 # except 1d tensors
|
| 2264 |
+
MOSTLY_IQ2_M = 29 # except 1d tensors
|
| 2265 |
+
MOSTLY_IQ4_XS = 30 # except 1d tensors
|
| 2266 |
+
MOSTLY_IQ1_M = 31 # except 1d tensors
|
| 2267 |
+
MOSTLY_BF16 = 32 # except 1d tensors
|
| 2268 |
+
# MOSTLY_Q4_0_4_4 = 33 # removed from gguf files, use Q4_0 and runtime repack
|
| 2269 |
+
# MOSTLY_Q4_0_4_8 = 34 # removed from gguf files, use Q4_0 and runtime repack
|
| 2270 |
+
# MOSTLY_Q4_0_8_8 = 35 # removed from gguf files, use Q4_0 and runtime repack
|
| 2271 |
+
MOSTLY_TQ1_0 = 36 # except 1d tensors
|
| 2272 |
+
MOSTLY_TQ2_0 = 37 # except 1d tensors
|
| 2273 |
+
|
| 2274 |
+
GUESSED = 1024 # not specified in the model file
|
| 2275 |
+
|
| 2276 |
+
|
| 2277 |
+
class GGUFEndian(IntEnum):
|
| 2278 |
+
LITTLE = 0
|
| 2279 |
+
BIG = 1
|
| 2280 |
+
|
| 2281 |
+
|
| 2282 |
+
class GGUFValueType(IntEnum):
|
| 2283 |
+
UINT8 = 0
|
| 2284 |
+
INT8 = 1
|
| 2285 |
+
UINT16 = 2
|
| 2286 |
+
INT16 = 3
|
| 2287 |
+
UINT32 = 4
|
| 2288 |
+
INT32 = 5
|
| 2289 |
+
FLOAT32 = 6
|
| 2290 |
+
BOOL = 7
|
| 2291 |
+
STRING = 8
|
| 2292 |
+
ARRAY = 9
|
| 2293 |
+
UINT64 = 10
|
| 2294 |
+
INT64 = 11
|
| 2295 |
+
FLOAT64 = 12
|
| 2296 |
+
|
| 2297 |
+
@staticmethod
|
| 2298 |
+
def get_type(val: Any) -> GGUFValueType:
|
| 2299 |
+
if isinstance(val, (str, bytes, bytearray)):
|
| 2300 |
+
return GGUFValueType.STRING
|
| 2301 |
+
elif isinstance(val, list):
|
| 2302 |
+
return GGUFValueType.ARRAY
|
| 2303 |
+
elif isinstance(val, float):
|
| 2304 |
+
return GGUFValueType.FLOAT32
|
| 2305 |
+
elif isinstance(val, bool):
|
| 2306 |
+
return GGUFValueType.BOOL
|
| 2307 |
+
elif isinstance(val, int):
|
| 2308 |
+
return GGUFValueType.INT32
|
| 2309 |
+
# TODO: need help with 64-bit types in Python
|
| 2310 |
+
else:
|
| 2311 |
+
raise ValueError(f"Unknown type: {type(val)}")
|
| 2312 |
+
|
| 2313 |
+
|
| 2314 |
+
class VisionProjectorType:
|
| 2315 |
+
GEMMA3 = "gemma3"
|
| 2316 |
+
IDEFICS3 = "idefics3"
|
| 2317 |
+
PIXTRAL = "pixtral"
|
| 2318 |
+
LLAMA4 = "llama4"
|
| 2319 |
+
QWEN2VL = "qwen2vl_merger"
|
| 2320 |
+
QWEN25VL = "qwen2.5vl_merger"
|
| 2321 |
+
ULTRAVOX = "ultravox"
|
| 2322 |
+
INTERNVL = "internvl"
|
| 2323 |
+
QWEN2A = "qwen2a" # audio
|
| 2324 |
+
QWEN25O = "qwen2.5o" # omni
|
| 2325 |
+
|
| 2326 |
+
|
| 2327 |
+
# Items here are (block size, type size)
|
| 2328 |
+
QK_K = 256
|
| 2329 |
+
GGML_QUANT_SIZES: dict[GGMLQuantizationType, tuple[int, int]] = {
|
| 2330 |
+
GGMLQuantizationType.F32: (1, 4),
|
| 2331 |
+
GGMLQuantizationType.F16: (1, 2),
|
| 2332 |
+
GGMLQuantizationType.Q4_0: (32, 2 + 16),
|
| 2333 |
+
GGMLQuantizationType.Q4_1: (32, 2 + 2 + 16),
|
| 2334 |
+
GGMLQuantizationType.Q5_0: (32, 2 + 4 + 16),
|
| 2335 |
+
GGMLQuantizationType.Q5_1: (32, 2 + 2 + 4 + 16),
|
| 2336 |
+
GGMLQuantizationType.Q8_0: (32, 2 + 32),
|
| 2337 |
+
GGMLQuantizationType.Q8_1: (32, 4 + 4 + 32),
|
| 2338 |
+
GGMLQuantizationType.Q2_K: (256, 2 + 2 + QK_K // 16 + QK_K // 4),
|
| 2339 |
+
GGMLQuantizationType.Q3_K: (256, 2 + QK_K // 4 + QK_K // 8 + 12),
|
| 2340 |
+
GGMLQuantizationType.Q4_K: (256, 2 + 2 + QK_K // 2 + 12),
|
| 2341 |
+
GGMLQuantizationType.Q5_K: (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12),
|
| 2342 |
+
GGMLQuantizationType.Q6_K: (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16),
|
| 2343 |
+
GGMLQuantizationType.Q8_K: (256, 4 + QK_K + QK_K // 8),
|
| 2344 |
+
GGMLQuantizationType.IQ2_XXS: (256, 2 + QK_K // 4),
|
| 2345 |
+
GGMLQuantizationType.IQ2_XS: (256, 2 + QK_K // 4 + QK_K // 32),
|
| 2346 |
+
GGMLQuantizationType.IQ3_XXS: (256, 2 + QK_K // 4 + QK_K // 8),
|
| 2347 |
+
GGMLQuantizationType.IQ1_S: (256, 2 + QK_K // 8 + QK_K // 16),
|
| 2348 |
+
GGMLQuantizationType.IQ4_NL: (32, 2 + 16),
|
| 2349 |
+
GGMLQuantizationType.IQ3_S: (256, 2 + QK_K // 4 + QK_K // 8 + QK_K // 32 + 4),
|
| 2350 |
+
GGMLQuantizationType.IQ2_S: (256, 2 + QK_K // 4 + QK_K // 16),
|
| 2351 |
+
GGMLQuantizationType.IQ4_XS: (256, 2 + 2 + QK_K // 2 + QK_K // 64),
|
| 2352 |
+
GGMLQuantizationType.I8: (1, 1),
|
| 2353 |
+
GGMLQuantizationType.I16: (1, 2),
|
| 2354 |
+
GGMLQuantizationType.I32: (1, 4),
|
| 2355 |
+
GGMLQuantizationType.I64: (1, 8),
|
| 2356 |
+
GGMLQuantizationType.F64: (1, 8),
|
| 2357 |
+
GGMLQuantizationType.IQ1_M: (256, QK_K // 8 + QK_K // 16 + QK_K // 32),
|
| 2358 |
+
GGMLQuantizationType.BF16: (1, 2),
|
| 2359 |
+
GGMLQuantizationType.TQ1_0: (256, 2 + 4 * 13),
|
| 2360 |
+
GGMLQuantizationType.TQ2_0: (256, 2 + 64),
|
| 2361 |
+
}
|
| 2362 |
+
|
| 2363 |
+
|
| 2364 |
+
# Aliases for backward compatibility.
|
| 2365 |
+
|
| 2366 |
+
# general
|
| 2367 |
+
KEY_GENERAL_ARCHITECTURE = Keys.General.ARCHITECTURE
|
| 2368 |
+
KEY_GENERAL_QUANTIZATION_VERSION = Keys.General.QUANTIZATION_VERSION
|
| 2369 |
+
KEY_GENERAL_ALIGNMENT = Keys.General.ALIGNMENT
|
| 2370 |
+
KEY_GENERAL_NAME = Keys.General.NAME
|
| 2371 |
+
KEY_GENERAL_AUTHOR = Keys.General.AUTHOR
|
| 2372 |
+
KEY_GENERAL_URL = Keys.General.URL
|
| 2373 |
+
KEY_GENERAL_DESCRIPTION = Keys.General.DESCRIPTION
|
| 2374 |
+
KEY_GENERAL_LICENSE = Keys.General.LICENSE
|
| 2375 |
+
KEY_GENERAL_SOURCE_URL = Keys.General.SOURCE_URL
|
| 2376 |
+
KEY_GENERAL_FILE_TYPE = Keys.General.FILE_TYPE
|
| 2377 |
+
|
| 2378 |
+
# LLM
|
| 2379 |
+
KEY_VOCAB_SIZE = Keys.LLM.VOCAB_SIZE
|
| 2380 |
+
KEY_CONTEXT_LENGTH = Keys.LLM.CONTEXT_LENGTH
|
| 2381 |
+
KEY_EMBEDDING_LENGTH = Keys.LLM.EMBEDDING_LENGTH
|
| 2382 |
+
KEY_BLOCK_COUNT = Keys.LLM.BLOCK_COUNT
|
| 2383 |
+
KEY_FEED_FORWARD_LENGTH = Keys.LLM.FEED_FORWARD_LENGTH
|
| 2384 |
+
KEY_USE_PARALLEL_RESIDUAL = Keys.LLM.USE_PARALLEL_RESIDUAL
|
| 2385 |
+
KEY_TENSOR_DATA_LAYOUT = Keys.LLM.TENSOR_DATA_LAYOUT
|
| 2386 |
+
|
| 2387 |
+
# attention
|
| 2388 |
+
KEY_ATTENTION_HEAD_COUNT = Keys.Attention.HEAD_COUNT
|
| 2389 |
+
KEY_ATTENTION_HEAD_COUNT_KV = Keys.Attention.HEAD_COUNT_KV
|
| 2390 |
+
KEY_ATTENTION_MAX_ALIBI_BIAS = Keys.Attention.MAX_ALIBI_BIAS
|
| 2391 |
+
KEY_ATTENTION_CLAMP_KQV = Keys.Attention.CLAMP_KQV
|
| 2392 |
+
KEY_ATTENTION_LAYERNORM_EPS = Keys.Attention.LAYERNORM_EPS
|
| 2393 |
+
KEY_ATTENTION_LAYERNORM_RMS_EPS = Keys.Attention.LAYERNORM_RMS_EPS
|
| 2394 |
+
|
| 2395 |
+
# RoPE
|
| 2396 |
+
KEY_ROPE_DIMENSION_COUNT = Keys.Rope.DIMENSION_COUNT
|
| 2397 |
+
KEY_ROPE_FREQ_BASE = Keys.Rope.FREQ_BASE
|
| 2398 |
+
KEY_ROPE_SCALING_TYPE = Keys.Rope.SCALING_TYPE
|
| 2399 |
+
KEY_ROPE_SCALING_FACTOR = Keys.Rope.SCALING_FACTOR
|
| 2400 |
+
KEY_ROPE_SCALING_ORIG_CTX_LEN = Keys.Rope.SCALING_ORIG_CTX_LEN
|
| 2401 |
+
KEY_ROPE_SCALING_FINETUNED = Keys.Rope.SCALING_FINETUNED
|
| 2402 |
+
|
| 2403 |
+
# SSM
|
| 2404 |
+
KEY_SSM_CONV_KERNEL = Keys.SSM.CONV_KERNEL
|
| 2405 |
+
KEY_SSM_INNER_SIZE = Keys.SSM.INNER_SIZE
|
| 2406 |
+
KEY_SSM_STATE_SIZE = Keys.SSM.STATE_SIZE
|
| 2407 |
+
KEY_SSM_TIME_STEP_RANK = Keys.SSM.TIME_STEP_RANK
|
| 2408 |
+
KEY_SSM_DT_B_C_RMS = Keys.SSM.DT_B_C_RMS
|
| 2409 |
+
|
| 2410 |
+
# tokenization
|
| 2411 |
+
KEY_TOKENIZER_MODEL = Keys.Tokenizer.MODEL
|
| 2412 |
+
KEY_TOKENIZER_PRE = Keys.Tokenizer.PRE
|
| 2413 |
+
KEY_TOKENIZER_LIST = Keys.Tokenizer.LIST
|
| 2414 |
+
KEY_TOKENIZER_TOKEN_TYPE = Keys.Tokenizer.TOKEN_TYPE
|
| 2415 |
+
KEY_TOKENIZER_SCORES = Keys.Tokenizer.SCORES
|
| 2416 |
+
KEY_TOKENIZER_MERGES = Keys.Tokenizer.MERGES
|
| 2417 |
+
KEY_TOKENIZER_BOS_ID = Keys.Tokenizer.BOS_ID
|
| 2418 |
+
KEY_TOKENIZER_EOS_ID = Keys.Tokenizer.EOS_ID
|
| 2419 |
+
KEY_TOKENIZER_EOT_ID = Keys.Tokenizer.EOT_ID
|
| 2420 |
+
KEY_TOKENIZER_EOM_ID = Keys.Tokenizer.EOM_ID
|
| 2421 |
+
KEY_TOKENIZER_UNK_ID = Keys.Tokenizer.UNK_ID
|
| 2422 |
+
KEY_TOKENIZER_SEP_ID = Keys.Tokenizer.SEP_ID
|
| 2423 |
+
KEY_TOKENIZER_PAD_ID = Keys.Tokenizer.PAD_ID
|
| 2424 |
+
KEY_TOKENIZER_MASK_ID = Keys.Tokenizer.MASK_ID
|
| 2425 |
+
KEY_TOKENIZER_HF_JSON = Keys.Tokenizer.HF_JSON
|
| 2426 |
+
KEY_TOKENIZER_RWKV = Keys.Tokenizer.RWKV
|
| 2427 |
+
|
| 2428 |
+
KEY_TOKENIZER_FIM_PRE_ID = Keys.Tokenizer.FIM_PRE_ID
|
| 2429 |
+
KEY_TOKENIZER_FIM_SUF_ID = Keys.Tokenizer.FIM_SUF_ID
|
| 2430 |
+
KEY_TOKENIZER_FIM_MID_ID = Keys.Tokenizer.FIM_MID_ID
|
| 2431 |
+
KEY_TOKENIZER_FIM_PAD_ID = Keys.Tokenizer.FIM_PAD_ID
|
| 2432 |
+
KEY_TOKENIZER_FIM_REP_ID = Keys.Tokenizer.FIM_REP_ID
|
| 2433 |
+
KEY_TOKENIZER_FIM_SEP_ID = Keys.Tokenizer.FIM_SEP_ID
|
| 2434 |
+
|
| 2435 |
+
# deprecated
|
| 2436 |
+
KEY_TOKENIZER_PREFIX_ID = Keys.Tokenizer.PREFIX_ID
|
| 2437 |
+
KEY_TOKENIZER_SUFFIX_ID = Keys.Tokenizer.SUFFIX_ID
|
| 2438 |
+
KEY_TOKENIZER_MIDDLE_ID = Keys.Tokenizer.MIDDLE_ID
|
lib/python3.13/site-packages/gguf/lazy.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
from abc import ABC, ABCMeta, abstractmethod
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from typing import Any, Callable
|
| 6 |
+
|
| 7 |
+
import numpy as np
|
| 8 |
+
from numpy.typing import DTypeLike
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class LazyMeta(ABCMeta):
|
| 15 |
+
|
| 16 |
+
def __new__(cls, name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs):
|
| 17 |
+
def __getattr__(self, name: str) -> Any:
|
| 18 |
+
meta_attr = getattr(self._meta, name)
|
| 19 |
+
if callable(meta_attr):
|
| 20 |
+
return type(self)._wrap_fn(
|
| 21 |
+
(lambda s, *args, **kwargs: getattr(s, name)(*args, **kwargs)),
|
| 22 |
+
use_self=self,
|
| 23 |
+
)
|
| 24 |
+
elif isinstance(meta_attr, self._tensor_type):
|
| 25 |
+
# e.g. self.T with torch.Tensor should still be wrapped
|
| 26 |
+
return type(self)._wrap_fn(lambda s: getattr(s, name))(self)
|
| 27 |
+
else:
|
| 28 |
+
# no need to wrap non-tensor properties,
|
| 29 |
+
# and they likely don't depend on the actual contents of the tensor
|
| 30 |
+
return meta_attr
|
| 31 |
+
|
| 32 |
+
namespace["__getattr__"] = __getattr__
|
| 33 |
+
|
| 34 |
+
# need to make a builder for the wrapped wrapper to copy the name,
|
| 35 |
+
# or else it fails with very cryptic error messages,
|
| 36 |
+
# because somehow the same string would end up in every closures
|
| 37 |
+
def mk_wrap(op_name: str, *, meta_noop: bool = False):
|
| 38 |
+
# need to wrap the wrapper to get self
|
| 39 |
+
def wrapped_special_op(self, *args, **kwargs):
|
| 40 |
+
return type(self)._wrap_fn(
|
| 41 |
+
getattr(type(self)._tensor_type, op_name),
|
| 42 |
+
meta_noop=meta_noop,
|
| 43 |
+
)(self, *args, **kwargs)
|
| 44 |
+
return wrapped_special_op
|
| 45 |
+
|
| 46 |
+
# special methods bypass __getattr__, so they need to be added manually
|
| 47 |
+
# ref: https://docs.python.org/3/reference/datamodel.html#special-lookup
|
| 48 |
+
# NOTE: doing this from a metaclass is very convenient
|
| 49 |
+
# TODO: make this even more comprehensive
|
| 50 |
+
for binary_op in (
|
| 51 |
+
"lt", "le", "eq", "ne", "ge", "gt", "not"
|
| 52 |
+
"abs", "add", "and", "floordiv", "invert", "lshift", "mod", "mul", "matmul",
|
| 53 |
+
"neg", "or", "pos", "pow", "rshift", "sub", "truediv", "xor",
|
| 54 |
+
"iadd", "iand", "ifloordiv", "ilshift", "imod", "imul", "ior", "irshift", "isub", "ixor",
|
| 55 |
+
"radd", "rand", "rfloordiv", "rmul", "ror", "rpow", "rsub", "rtruediv", "rxor",
|
| 56 |
+
):
|
| 57 |
+
attr_name = f"__{binary_op}__"
|
| 58 |
+
# the result of these operators usually has the same shape and dtype as the input,
|
| 59 |
+
# so evaluation on the meta tensor can be skipped.
|
| 60 |
+
namespace[attr_name] = mk_wrap(attr_name, meta_noop=True)
|
| 61 |
+
|
| 62 |
+
for special_op in (
|
| 63 |
+
"getitem", "setitem", "len",
|
| 64 |
+
):
|
| 65 |
+
attr_name = f"__{special_op}__"
|
| 66 |
+
namespace[attr_name] = mk_wrap(attr_name, meta_noop=False)
|
| 67 |
+
|
| 68 |
+
return super().__new__(cls, name, bases, namespace, **kwargs)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# Tree of lazy tensors
|
| 72 |
+
class LazyBase(ABC, metaclass=LazyMeta):
|
| 73 |
+
_tensor_type: type
|
| 74 |
+
_meta: Any
|
| 75 |
+
_data: Any | None
|
| 76 |
+
_args: tuple
|
| 77 |
+
_kwargs: dict[str, Any]
|
| 78 |
+
_func: Callable[[Any], Any] | None
|
| 79 |
+
|
| 80 |
+
def __init__(self, *, meta: Any, data: Any | None = None, args: tuple = (), kwargs: dict[str, Any] | None = None, func: Callable[[Any], Any] | None = None):
|
| 81 |
+
super().__init__()
|
| 82 |
+
self._meta = meta
|
| 83 |
+
self._data = data
|
| 84 |
+
self._args = args
|
| 85 |
+
self._kwargs = kwargs if kwargs is not None else {}
|
| 86 |
+
self._func = func
|
| 87 |
+
assert self._func is not None or self._data is not None
|
| 88 |
+
|
| 89 |
+
def __init_subclass__(cls) -> None:
|
| 90 |
+
if "_tensor_type" not in cls.__dict__:
|
| 91 |
+
raise TypeError(f"property '_tensor_type' must be defined for {cls!r}")
|
| 92 |
+
return super().__init_subclass__()
|
| 93 |
+
|
| 94 |
+
@staticmethod
|
| 95 |
+
def _recurse_apply(o: Any, fn: Callable[[Any], Any]) -> Any:
|
| 96 |
+
# TODO: dict and set
|
| 97 |
+
if isinstance(o, (list, tuple)):
|
| 98 |
+
L = []
|
| 99 |
+
for item in o:
|
| 100 |
+
L.append(LazyBase._recurse_apply(item, fn))
|
| 101 |
+
if isinstance(o, tuple):
|
| 102 |
+
L = tuple(L)
|
| 103 |
+
return L
|
| 104 |
+
elif isinstance(o, LazyBase):
|
| 105 |
+
return fn(o)
|
| 106 |
+
else:
|
| 107 |
+
return o
|
| 108 |
+
|
| 109 |
+
@classmethod
|
| 110 |
+
def _wrap_fn(cls, fn: Callable, *, use_self: LazyBase | None = None, meta_noop: bool | DTypeLike | tuple[DTypeLike, Callable[[tuple[int, ...]], tuple[int, ...]]] = False) -> Callable[[Any], Any]:
|
| 111 |
+
def wrapped_fn(*args, **kwargs):
|
| 112 |
+
if kwargs is None:
|
| 113 |
+
kwargs = {}
|
| 114 |
+
args = ((use_self,) if use_self is not None else ()) + args
|
| 115 |
+
|
| 116 |
+
meta_args = LazyBase._recurse_apply(args, lambda t: t._meta)
|
| 117 |
+
# TODO: maybe handle tensors in kwargs too
|
| 118 |
+
|
| 119 |
+
if isinstance(meta_noop, bool) and not meta_noop:
|
| 120 |
+
try:
|
| 121 |
+
res = fn(*meta_args, **kwargs)
|
| 122 |
+
except NotImplementedError:
|
| 123 |
+
# running some operations on PyTorch's Meta tensors can cause this exception
|
| 124 |
+
res = None
|
| 125 |
+
else:
|
| 126 |
+
# some operators don't need to actually run on the meta tensors
|
| 127 |
+
assert len(args) > 0
|
| 128 |
+
res = args[0]
|
| 129 |
+
assert isinstance(res, cls)
|
| 130 |
+
res = res._meta
|
| 131 |
+
# allow operations to override the dtype and shape
|
| 132 |
+
if meta_noop is not True:
|
| 133 |
+
if isinstance(meta_noop, tuple):
|
| 134 |
+
dtype, shape = meta_noop
|
| 135 |
+
assert callable(shape)
|
| 136 |
+
res = cls.meta_with_dtype_and_shape(dtype, shape(res.shape))
|
| 137 |
+
else:
|
| 138 |
+
res = cls.meta_with_dtype_and_shape(meta_noop, res.shape)
|
| 139 |
+
|
| 140 |
+
if isinstance(res, cls._tensor_type):
|
| 141 |
+
return cls(meta=cls.eager_to_meta(res), args=args, kwargs=kwargs, func=fn)
|
| 142 |
+
elif isinstance(res, tuple) and all(isinstance(t, cls._tensor_type) for t in res):
|
| 143 |
+
# share the evaluation between lazy tuple elements
|
| 144 |
+
shared_args: list = [args, None]
|
| 145 |
+
|
| 146 |
+
def eager_tuple_element(a: list[Any], i: int = 0, /, **kw) -> LazyBase:
|
| 147 |
+
assert len(a) == 2
|
| 148 |
+
if a[1] is None:
|
| 149 |
+
a[1] = fn(*a[0], **kw)
|
| 150 |
+
return a[1][i]
|
| 151 |
+
return tuple(cls(meta=cls.eager_to_meta(res[i]), args=(shared_args, i), kwargs=kwargs, func=eager_tuple_element) for i in range(len(res)))
|
| 152 |
+
else:
|
| 153 |
+
del res # not needed
|
| 154 |
+
# non-tensor return likely relies on the contents of the args
|
| 155 |
+
# (e.g. the result of torch.equal)
|
| 156 |
+
eager_args = cls.to_eager(args)
|
| 157 |
+
return fn(*eager_args, **kwargs)
|
| 158 |
+
return wrapped_fn
|
| 159 |
+
|
| 160 |
+
@classmethod
|
| 161 |
+
def to_eager(cls, t: Any) -> Any:
|
| 162 |
+
def simple_to_eager(_t: LazyBase) -> Any:
|
| 163 |
+
if _t._data is not None:
|
| 164 |
+
return _t._data
|
| 165 |
+
|
| 166 |
+
# NOTE: there's a recursion limit in Python (usually 1000)
|
| 167 |
+
|
| 168 |
+
assert _t._func is not None
|
| 169 |
+
_t._args = cls._recurse_apply(_t._args, simple_to_eager)
|
| 170 |
+
_t._data = _t._func(*_t._args, **_t._kwargs)
|
| 171 |
+
# sanity check
|
| 172 |
+
assert _t._data is not None
|
| 173 |
+
assert _t._data.dtype == _t._meta.dtype
|
| 174 |
+
assert _t._data.shape == _t._meta.shape
|
| 175 |
+
|
| 176 |
+
return _t._data
|
| 177 |
+
|
| 178 |
+
# recurse into lists and/or tuples, keeping their structure
|
| 179 |
+
return cls._recurse_apply(t, simple_to_eager)
|
| 180 |
+
|
| 181 |
+
@classmethod
|
| 182 |
+
def eager_to_meta(cls, t: Any) -> Any:
|
| 183 |
+
return cls.meta_with_dtype_and_shape(t.dtype, t.shape)
|
| 184 |
+
|
| 185 |
+
# must be overridden, meta tensor init is backend-specific
|
| 186 |
+
@classmethod
|
| 187 |
+
@abstractmethod
|
| 188 |
+
def meta_with_dtype_and_shape(cls, dtype: Any, shape: Any) -> Any: pass
|
| 189 |
+
|
| 190 |
+
@classmethod
|
| 191 |
+
def from_eager(cls, t: Any) -> Any:
|
| 192 |
+
if type(t) is cls:
|
| 193 |
+
# already lazy
|
| 194 |
+
return t
|
| 195 |
+
elif isinstance(t, cls._tensor_type):
|
| 196 |
+
return cls(meta=cls.eager_to_meta(t), data=t)
|
| 197 |
+
else:
|
| 198 |
+
return TypeError(f"{type(t)!r} is not compatible with {cls._tensor_type!r}")
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class LazyNumpyTensor(LazyBase):
|
| 202 |
+
_tensor_type = np.ndarray
|
| 203 |
+
|
| 204 |
+
shape: tuple[int, ...] # Makes the type checker happy in quants.py
|
| 205 |
+
|
| 206 |
+
@classmethod
|
| 207 |
+
def meta_with_dtype_and_shape(cls, dtype: DTypeLike, shape: tuple[int, ...]) -> np.ndarray[Any, Any]:
|
| 208 |
+
# The initial idea was to use np.nan as the fill value,
|
| 209 |
+
# but non-float types like np.int16 can't use that.
|
| 210 |
+
# So zero it is.
|
| 211 |
+
cheat = np.zeros(1, dtype)
|
| 212 |
+
return np.lib.stride_tricks.as_strided(cheat, shape, (0 for _ in shape))
|
| 213 |
+
|
| 214 |
+
def astype(self, dtype, *args, **kwargs):
|
| 215 |
+
meta = type(self).meta_with_dtype_and_shape(dtype, self._meta.shape)
|
| 216 |
+
full_args = (self, dtype,) + args
|
| 217 |
+
return type(self)(meta=meta, args=full_args, kwargs=kwargs, func=(lambda a, *args, **kwargs: a.astype(*args, **kwargs)))
|
| 218 |
+
|
| 219 |
+
def tofile(self, *args, **kwargs):
|
| 220 |
+
eager = LazyNumpyTensor.to_eager(self)
|
| 221 |
+
return eager.tofile(*args, **kwargs)
|
| 222 |
+
|
| 223 |
+
# TODO: __array_function__
|
lib/python3.13/site-packages/gguf/py.typed
ADDED
|
File without changes
|
lib/python3.13/site-packages/gguf/quants.py
ADDED
|
@@ -0,0 +1,1269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
from abc import ABC, abstractmethod
|
| 3 |
+
from typing import Any, Callable, Sequence
|
| 4 |
+
from math import log2, ceil
|
| 5 |
+
|
| 6 |
+
from numpy.typing import DTypeLike
|
| 7 |
+
|
| 8 |
+
from .constants import GGML_QUANT_SIZES, GGMLQuantizationType, QK_K
|
| 9 |
+
from .lazy import LazyNumpyTensor
|
| 10 |
+
|
| 11 |
+
import numpy as np
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def quant_shape_to_byte_shape(shape: Sequence[int], quant_type: GGMLQuantizationType) -> tuple[int, ...]:
|
| 15 |
+
block_size, type_size = GGML_QUANT_SIZES[quant_type]
|
| 16 |
+
if shape[-1] % block_size != 0:
|
| 17 |
+
raise ValueError(f"Quantized tensor row size ({shape[-1]}) is not a multiple of {quant_type.name} block size ({block_size})")
|
| 18 |
+
return (*shape[:-1], shape[-1] // block_size * type_size)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def quant_shape_from_byte_shape(shape: Sequence[int], quant_type: GGMLQuantizationType) -> tuple[int, ...]:
|
| 22 |
+
block_size, type_size = GGML_QUANT_SIZES[quant_type]
|
| 23 |
+
if shape[-1] % type_size != 0:
|
| 24 |
+
raise ValueError(f"Quantized tensor bytes per row ({shape[-1]}) is not a multiple of {quant_type.name} type size ({type_size})")
|
| 25 |
+
return (*shape[:-1], shape[-1] // type_size * block_size)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
# This is faster than np.vectorize and np.apply_along_axis because it works on more than one row at a time
|
| 29 |
+
def _apply_over_grouped_rows(func: Callable[[np.ndarray], np.ndarray], arr: np.ndarray, otype: DTypeLike, oshape: tuple[int, ...]) -> np.ndarray:
|
| 30 |
+
rows = arr.reshape((-1, arr.shape[-1]))
|
| 31 |
+
osize = 1
|
| 32 |
+
for dim in oshape:
|
| 33 |
+
osize *= dim
|
| 34 |
+
out = np.empty(shape=osize, dtype=otype)
|
| 35 |
+
# compute over groups of 16 rows (arbitrary, but seems good for performance)
|
| 36 |
+
n_groups = (rows.shape[0] // 16) or 1
|
| 37 |
+
np.concatenate([func(group).ravel() for group in np.array_split(rows, n_groups)], axis=0, out=out)
|
| 38 |
+
return out.reshape(oshape)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# round away from zero
|
| 42 |
+
# ref: https://stackoverflow.com/a/59143326/22827863
|
| 43 |
+
def np_roundf(n: np.ndarray) -> np.ndarray:
|
| 44 |
+
a = abs(n)
|
| 45 |
+
floored = np.floor(a)
|
| 46 |
+
b = floored + np.floor(2 * (a - floored))
|
| 47 |
+
return np.sign(n) * b
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class QuantError(Exception): ...
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
_type_traits: dict[GGMLQuantizationType, type[__Quant]] = {}
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def quantize(data: np.ndarray, qtype: GGMLQuantizationType) -> np.ndarray:
|
| 57 |
+
if qtype == GGMLQuantizationType.F32:
|
| 58 |
+
return data.astype(np.float32, copy=False)
|
| 59 |
+
elif qtype == GGMLQuantizationType.F16:
|
| 60 |
+
return data.astype(np.float16, copy=False)
|
| 61 |
+
elif (q := _type_traits.get(qtype)) is not None:
|
| 62 |
+
return q.quantize(data)
|
| 63 |
+
else:
|
| 64 |
+
raise NotImplementedError(f"Quantization for {qtype.name} is not yet implemented")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def dequantize(data: np.ndarray, qtype: GGMLQuantizationType) -> np.ndarray:
|
| 68 |
+
if qtype == GGMLQuantizationType.F32:
|
| 69 |
+
return data.view(np.float32)
|
| 70 |
+
elif qtype == GGMLQuantizationType.F16:
|
| 71 |
+
return data.view(np.float16).astype(np.float32)
|
| 72 |
+
elif (q := _type_traits.get(qtype)) is not None:
|
| 73 |
+
return q.dequantize(data)
|
| 74 |
+
else:
|
| 75 |
+
raise NotImplementedError(f"Dequantization for {qtype.name} is not yet implemented")
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class __Quant(ABC):
|
| 79 |
+
qtype: GGMLQuantizationType
|
| 80 |
+
block_size: int
|
| 81 |
+
type_size: int
|
| 82 |
+
|
| 83 |
+
grid: np.ndarray[Any, np.dtype[np.float32]] | None = None
|
| 84 |
+
grid_shape: tuple[int, int] = (0, 0)
|
| 85 |
+
grid_map: tuple[int | float, ...] = ()
|
| 86 |
+
grid_hex: bytes | None = None
|
| 87 |
+
|
| 88 |
+
def __init__(self):
|
| 89 |
+
return TypeError("Quant conversion classes can't have instances")
|
| 90 |
+
|
| 91 |
+
def __init_subclass__(cls, qtype: GGMLQuantizationType) -> None:
|
| 92 |
+
cls.qtype = qtype
|
| 93 |
+
cls.block_size, cls.type_size = GGML_QUANT_SIZES[qtype]
|
| 94 |
+
cls.__quantize_lazy = LazyNumpyTensor._wrap_fn(
|
| 95 |
+
cls.__quantize_array,
|
| 96 |
+
meta_noop=(np.uint8, cls.__shape_to_bytes)
|
| 97 |
+
)
|
| 98 |
+
cls.__dequantize_lazy = LazyNumpyTensor._wrap_fn(
|
| 99 |
+
cls.__dequantize_array,
|
| 100 |
+
meta_noop=(np.float32, cls.__shape_from_bytes)
|
| 101 |
+
)
|
| 102 |
+
assert qtype not in _type_traits
|
| 103 |
+
_type_traits[qtype] = cls
|
| 104 |
+
|
| 105 |
+
@classmethod
|
| 106 |
+
def init_grid(cls):
|
| 107 |
+
if cls.grid is not None or cls.grid_hex is None:
|
| 108 |
+
return
|
| 109 |
+
|
| 110 |
+
bits_per_elem = ceil(log2(len(cls.grid_map)))
|
| 111 |
+
assert bits_per_elem != 0, cls.qtype.name
|
| 112 |
+
elems_per_byte = 8 // bits_per_elem
|
| 113 |
+
|
| 114 |
+
grid = np.frombuffer(cls.grid_hex, dtype=np.uint8)
|
| 115 |
+
# decode hexadecimal chars from grid
|
| 116 |
+
grid = grid.reshape((-1, 2))
|
| 117 |
+
grid = (np.where(grid > 0x40, grid + 9, grid) & 0x0F) << np.array([4, 0], dtype=np.uint8).reshape((1, 2))
|
| 118 |
+
grid = grid[..., 0] | grid[..., 1]
|
| 119 |
+
# unpack the grid values
|
| 120 |
+
grid = grid.reshape((-1, 1)) >> np.array([i for i in range(0, 8, 8 // elems_per_byte)], dtype=np.uint8).reshape((1, elems_per_byte))
|
| 121 |
+
grid = (grid & ((1 << bits_per_elem) - 1)).reshape((-1, 1))
|
| 122 |
+
grid_map = np.array(cls.grid_map, dtype=np.float32).reshape((1, -1))
|
| 123 |
+
grid = np.take_along_axis(grid_map, grid, axis=-1)
|
| 124 |
+
cls.grid = grid.reshape((1, 1, *cls.grid_shape))
|
| 125 |
+
|
| 126 |
+
@classmethod
|
| 127 |
+
@abstractmethod
|
| 128 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 129 |
+
raise NotImplementedError
|
| 130 |
+
|
| 131 |
+
@classmethod
|
| 132 |
+
@abstractmethod
|
| 133 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 134 |
+
raise NotImplementedError
|
| 135 |
+
|
| 136 |
+
@classmethod
|
| 137 |
+
def quantize_rows(cls, rows: np.ndarray) -> np.ndarray:
|
| 138 |
+
rows = rows.astype(np.float32, copy=False)
|
| 139 |
+
shape = rows.shape
|
| 140 |
+
n_blocks = rows.size // cls.block_size
|
| 141 |
+
blocks = rows.reshape((n_blocks, cls.block_size))
|
| 142 |
+
blocks = cls.quantize_blocks(blocks)
|
| 143 |
+
assert blocks.dtype == np.uint8
|
| 144 |
+
assert blocks.shape[-1] == cls.type_size
|
| 145 |
+
return blocks.reshape(cls.__shape_to_bytes(shape))
|
| 146 |
+
|
| 147 |
+
@classmethod
|
| 148 |
+
def dequantize_rows(cls, rows: np.ndarray) -> np.ndarray:
|
| 149 |
+
rows = rows.view(np.uint8)
|
| 150 |
+
shape = rows.shape
|
| 151 |
+
n_blocks = rows.size // cls.type_size
|
| 152 |
+
blocks = rows.reshape((n_blocks, cls.type_size))
|
| 153 |
+
blocks = cls.dequantize_blocks(blocks)
|
| 154 |
+
assert blocks.dtype == np.float32
|
| 155 |
+
assert blocks.shape[-1] == cls.block_size
|
| 156 |
+
return blocks.reshape(cls.__shape_from_bytes(shape))
|
| 157 |
+
|
| 158 |
+
@classmethod
|
| 159 |
+
def __shape_to_bytes(cls, shape: Sequence[int]):
|
| 160 |
+
return quant_shape_to_byte_shape(shape, cls.qtype)
|
| 161 |
+
|
| 162 |
+
@classmethod
|
| 163 |
+
def __shape_from_bytes(cls, shape: Sequence[int]):
|
| 164 |
+
return quant_shape_from_byte_shape(shape, cls.qtype)
|
| 165 |
+
|
| 166 |
+
@classmethod
|
| 167 |
+
def __quantize_array(cls, array: np.ndarray) -> np.ndarray:
|
| 168 |
+
return _apply_over_grouped_rows(cls.quantize_rows, arr=array, otype=np.uint8, oshape=cls.__shape_to_bytes(array.shape))
|
| 169 |
+
|
| 170 |
+
@classmethod
|
| 171 |
+
def __dequantize_array(cls, array: np.ndarray) -> np.ndarray:
|
| 172 |
+
cls.init_grid()
|
| 173 |
+
return _apply_over_grouped_rows(cls.dequantize_rows, arr=array, otype=np.float32, oshape=cls.__shape_from_bytes(array.shape))
|
| 174 |
+
|
| 175 |
+
@classmethod
|
| 176 |
+
def __quantize_lazy(cls, lazy_tensor: LazyNumpyTensor, /) -> Any:
|
| 177 |
+
pass
|
| 178 |
+
|
| 179 |
+
@classmethod
|
| 180 |
+
def __dequantize_lazy(cls, lazy_tensor: LazyNumpyTensor, /) -> Any:
|
| 181 |
+
pass
|
| 182 |
+
|
| 183 |
+
@classmethod
|
| 184 |
+
def can_quantize(cls, tensor: np.ndarray | LazyNumpyTensor) -> bool:
|
| 185 |
+
return tensor.shape[-1] % cls.block_size == 0
|
| 186 |
+
|
| 187 |
+
@classmethod
|
| 188 |
+
def quantize(cls, tensor: np.ndarray | LazyNumpyTensor) -> np.ndarray:
|
| 189 |
+
if not cls.can_quantize(tensor):
|
| 190 |
+
raise QuantError(f"Can't quantize tensor with shape {tensor.shape} to {cls.qtype.name}")
|
| 191 |
+
if isinstance(tensor, LazyNumpyTensor):
|
| 192 |
+
return cls.__quantize_lazy(tensor)
|
| 193 |
+
else:
|
| 194 |
+
return cls.__quantize_array(tensor)
|
| 195 |
+
|
| 196 |
+
@classmethod
|
| 197 |
+
def dequantize(cls, tensor: np.ndarray | LazyNumpyTensor) -> np.ndarray:
|
| 198 |
+
if isinstance(tensor, LazyNumpyTensor):
|
| 199 |
+
return cls.__dequantize_lazy(tensor)
|
| 200 |
+
else:
|
| 201 |
+
return cls.__dequantize_array(tensor)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class BF16(__Quant, qtype=GGMLQuantizationType.BF16):
|
| 205 |
+
@classmethod
|
| 206 |
+
# same as ggml_compute_fp32_to_bf16 in ggml-impl.h
|
| 207 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 208 |
+
n = blocks.view(np.uint32)
|
| 209 |
+
# force nan to quiet
|
| 210 |
+
n = np.where((n & 0x7fffffff) > 0x7f800000, (n & np.uint32(0xffff0000)) | np.uint32(64 << 16), n)
|
| 211 |
+
# round to nearest even
|
| 212 |
+
n = (np.uint64(n) + (0x7fff + ((n >> 16) & 1))) >> 16
|
| 213 |
+
return n.astype(np.uint16).view(np.uint8)
|
| 214 |
+
|
| 215 |
+
@classmethod
|
| 216 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 217 |
+
return (blocks.view(np.int16).astype(np.int32) << 16).view(np.float32)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class Q4_0(__Quant, qtype=GGMLQuantizationType.Q4_0):
|
| 221 |
+
@classmethod
|
| 222 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 223 |
+
n_blocks = blocks.shape[0]
|
| 224 |
+
|
| 225 |
+
imax = abs(blocks).argmax(axis=-1, keepdims=True)
|
| 226 |
+
max = np.take_along_axis(blocks, imax, axis=-1)
|
| 227 |
+
|
| 228 |
+
d = max / -8
|
| 229 |
+
with np.errstate(divide="ignore"):
|
| 230 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 231 |
+
# FIXME: Q4_0's reference rounding is cursed and depends on FMA
|
| 232 |
+
qs = np.trunc((np.float64(blocks) * np.float64(id)) + np.float64(8.5), dtype=np.float32).astype(np.uint8).clip(0, 15)
|
| 233 |
+
|
| 234 |
+
qs = qs.reshape((n_blocks, 2, cls.block_size // 2))
|
| 235 |
+
qs = qs[..., 0, :] | (qs[..., 1, :] << np.uint8(4))
|
| 236 |
+
|
| 237 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 238 |
+
|
| 239 |
+
return np.concatenate([d, qs], axis=-1)
|
| 240 |
+
|
| 241 |
+
@classmethod
|
| 242 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 243 |
+
n_blocks = blocks.shape[0]
|
| 244 |
+
|
| 245 |
+
d, qs = np.hsplit(blocks, [2])
|
| 246 |
+
|
| 247 |
+
d = d.view(np.float16).astype(np.float32)
|
| 248 |
+
|
| 249 |
+
qs = qs.reshape((n_blocks, -1, 1, cls.block_size // 2)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 250 |
+
qs = (qs & np.uint8(0x0F)).reshape((n_blocks, -1)).astype(np.int8) - np.int8(8)
|
| 251 |
+
|
| 252 |
+
return (d * qs.astype(np.float32))
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class Q4_1(__Quant, qtype=GGMLQuantizationType.Q4_1):
|
| 256 |
+
@classmethod
|
| 257 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 258 |
+
n_blocks = blocks.shape[0]
|
| 259 |
+
|
| 260 |
+
max = blocks.max(axis=-1, keepdims=True)
|
| 261 |
+
min = blocks.min(axis=-1, keepdims=True)
|
| 262 |
+
|
| 263 |
+
d = (max - min) / 15
|
| 264 |
+
with np.errstate(divide="ignore"):
|
| 265 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 266 |
+
qs = np.trunc((blocks - min) * id + np.float32(0.5), dtype=np.float32).astype(np.uint8).clip(0, 15)
|
| 267 |
+
|
| 268 |
+
qs = qs.reshape((n_blocks, 2, cls.block_size // 2))
|
| 269 |
+
qs = qs[..., 0, :] | (qs[..., 1, :] << np.uint8(4))
|
| 270 |
+
|
| 271 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 272 |
+
m = min.astype(np.float16).view(np.uint8)
|
| 273 |
+
|
| 274 |
+
return np.concatenate([d, m, qs], axis=-1)
|
| 275 |
+
|
| 276 |
+
@classmethod
|
| 277 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 278 |
+
n_blocks = blocks.shape[0]
|
| 279 |
+
|
| 280 |
+
d, rest = np.hsplit(blocks, [2])
|
| 281 |
+
m, qs = np.hsplit(rest, [2])
|
| 282 |
+
|
| 283 |
+
d = d.view(np.float16).astype(np.float32)
|
| 284 |
+
m = m.view(np.float16).astype(np.float32)
|
| 285 |
+
|
| 286 |
+
qs = qs.reshape((n_blocks, -1, 1, cls.block_size // 2)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 287 |
+
qs = (qs & np.uint8(0x0F)).reshape((n_blocks, -1)).astype(np.float32)
|
| 288 |
+
|
| 289 |
+
return (d * qs) + m
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class Q5_0(__Quant, qtype=GGMLQuantizationType.Q5_0):
|
| 293 |
+
@classmethod
|
| 294 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 295 |
+
n_blocks = blocks.shape[0]
|
| 296 |
+
|
| 297 |
+
imax = abs(blocks).argmax(axis=-1, keepdims=True)
|
| 298 |
+
max = np.take_along_axis(blocks, imax, axis=-1)
|
| 299 |
+
|
| 300 |
+
d = max / -16
|
| 301 |
+
with np.errstate(divide="ignore"):
|
| 302 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 303 |
+
# FIXME: Q5_0's reference rounding is cursed and depends on FMA
|
| 304 |
+
q = np.trunc((np.float64(blocks) * np.float64(id)) + np.float64(16.5), dtype=np.float32).astype(np.uint8).clip(0, 31)
|
| 305 |
+
|
| 306 |
+
qs = q.reshape((n_blocks, 2, cls.block_size // 2))
|
| 307 |
+
qs = (qs[..., 0, :] & np.uint8(0x0F)) | (qs[..., 1, :] << np.uint8(4))
|
| 308 |
+
|
| 309 |
+
qh = np.packbits(q.reshape((n_blocks, 1, 32)) >> np.uint8(4), axis=-1, bitorder="little").reshape(n_blocks, 4)
|
| 310 |
+
|
| 311 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 312 |
+
|
| 313 |
+
return np.concatenate([d, qh, qs], axis=-1)
|
| 314 |
+
|
| 315 |
+
@classmethod
|
| 316 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 317 |
+
n_blocks = blocks.shape[0]
|
| 318 |
+
|
| 319 |
+
d, rest = np.hsplit(blocks, [2])
|
| 320 |
+
qh, qs = np.hsplit(rest, [4])
|
| 321 |
+
|
| 322 |
+
d = d.view(np.float16).astype(np.float32)
|
| 323 |
+
qh = qh.view(np.uint32)
|
| 324 |
+
|
| 325 |
+
qh = qh.reshape((n_blocks, 1)) >> np.array([i for i in range(32)], dtype=np.uint32).reshape((1, 32))
|
| 326 |
+
ql = qs.reshape((n_blocks, -1, 1, cls.block_size // 2)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 327 |
+
qh = (qh & np.uint32(0x01)).astype(np.uint8)
|
| 328 |
+
ql = (ql & np.uint8(0x0F)).reshape((n_blocks, -1))
|
| 329 |
+
|
| 330 |
+
qs = (ql | (qh << np.uint8(4))).astype(np.int8) - np.int8(16)
|
| 331 |
+
|
| 332 |
+
return (d * qs.astype(np.float32))
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
class Q5_1(__Quant, qtype=GGMLQuantizationType.Q5_1):
|
| 336 |
+
@classmethod
|
| 337 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 338 |
+
n_blocks = blocks.shape[0]
|
| 339 |
+
|
| 340 |
+
max = blocks.max(axis=-1, keepdims=True)
|
| 341 |
+
min = blocks.min(axis=-1, keepdims=True)
|
| 342 |
+
|
| 343 |
+
d = (max - min) / 31
|
| 344 |
+
with np.errstate(divide="ignore"):
|
| 345 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 346 |
+
q = np.trunc((blocks - min) * id + np.float32(0.5), dtype=np.float32).astype(np.uint8).clip(0, 31)
|
| 347 |
+
|
| 348 |
+
qs = q.reshape((n_blocks, 2, cls.block_size // 2))
|
| 349 |
+
qs = (qs[..., 0, :] & np.uint8(0x0F)) | (qs[..., 1, :] << np.uint8(4))
|
| 350 |
+
|
| 351 |
+
qh = np.packbits(q.reshape((n_blocks, 1, 32)) >> np.uint8(4), axis=-1, bitorder="little").reshape(n_blocks, 4)
|
| 352 |
+
|
| 353 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 354 |
+
m = min.astype(np.float16).view(np.uint8)
|
| 355 |
+
|
| 356 |
+
return np.concatenate([d, m, qh, qs], axis=-1)
|
| 357 |
+
|
| 358 |
+
@classmethod
|
| 359 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 360 |
+
n_blocks = blocks.shape[0]
|
| 361 |
+
|
| 362 |
+
d, rest = np.hsplit(blocks, [2])
|
| 363 |
+
m, rest = np.hsplit(rest, [2])
|
| 364 |
+
qh, qs = np.hsplit(rest, [4])
|
| 365 |
+
|
| 366 |
+
d = d.view(np.float16).astype(np.float32)
|
| 367 |
+
m = m.view(np.float16).astype(np.float32)
|
| 368 |
+
qh = qh.view(np.uint32)
|
| 369 |
+
|
| 370 |
+
qh = qh.reshape((n_blocks, 1)) >> np.array([i for i in range(32)], dtype=np.uint32).reshape((1, 32))
|
| 371 |
+
ql = qs.reshape((n_blocks, -1, 1, cls.block_size // 2)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 372 |
+
qh = (qh & np.uint32(0x01)).astype(np.uint8)
|
| 373 |
+
ql = (ql & np.uint8(0x0F)).reshape((n_blocks, -1))
|
| 374 |
+
|
| 375 |
+
qs = (ql | (qh << np.uint8(4))).astype(np.float32)
|
| 376 |
+
|
| 377 |
+
return (d * qs) + m
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
class Q8_0(__Quant, qtype=GGMLQuantizationType.Q8_0):
|
| 381 |
+
@classmethod
|
| 382 |
+
# Implementation of Q8_0 with bit-exact same results as reference implementation in ggml-quants.c
|
| 383 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 384 |
+
|
| 385 |
+
d = abs(blocks).max(axis=1, keepdims=True) / 127
|
| 386 |
+
with np.errstate(divide="ignore"):
|
| 387 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 388 |
+
qs = np_roundf(blocks * id)
|
| 389 |
+
|
| 390 |
+
# (n_blocks, 2)
|
| 391 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 392 |
+
# (n_blocks, block_size)
|
| 393 |
+
qs = qs.astype(np.int8).view(np.uint8)
|
| 394 |
+
|
| 395 |
+
return np.concatenate([d, qs], axis=1)
|
| 396 |
+
|
| 397 |
+
@classmethod
|
| 398 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 399 |
+
d, x = np.split(blocks, [2], axis=1)
|
| 400 |
+
d = d.view(np.float16).astype(np.float32)
|
| 401 |
+
x = x.view(np.int8).astype(np.float32)
|
| 402 |
+
|
| 403 |
+
return (x * d)
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
class Q2_K(__Quant, qtype=GGMLQuantizationType.Q2_K):
|
| 407 |
+
@classmethod
|
| 408 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 409 |
+
n_blocks = blocks.shape[0]
|
| 410 |
+
|
| 411 |
+
scales, rest = np.hsplit(blocks, [QK_K // 16])
|
| 412 |
+
qs, rest = np.hsplit(rest, [QK_K // 4])
|
| 413 |
+
d, dmin = np.hsplit(rest, [2])
|
| 414 |
+
|
| 415 |
+
d = d.view(np.float16).astype(np.float32)
|
| 416 |
+
dmin = dmin.view(np.float16).astype(np.float32)
|
| 417 |
+
|
| 418 |
+
# (n_blocks, 16, 1)
|
| 419 |
+
dl = (d * (scales & 0xF).astype(np.float32)).reshape((n_blocks, QK_K // 16, 1))
|
| 420 |
+
ml = (dmin * (scales >> 4).astype(np.float32)).reshape((n_blocks, QK_K // 16, 1))
|
| 421 |
+
|
| 422 |
+
shift = np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 423 |
+
|
| 424 |
+
qs = (qs.reshape((n_blocks, -1, 1, 32)) >> shift) & np.uint8(3)
|
| 425 |
+
|
| 426 |
+
qs = qs.reshape((n_blocks, QK_K // 16, 16)).astype(np.float32)
|
| 427 |
+
|
| 428 |
+
qs = dl * qs - ml
|
| 429 |
+
|
| 430 |
+
return qs.reshape((n_blocks, -1))
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
class Q3_K(__Quant, qtype=GGMLQuantizationType.Q3_K):
|
| 434 |
+
@classmethod
|
| 435 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 436 |
+
n_blocks = blocks.shape[0]
|
| 437 |
+
|
| 438 |
+
hmask, rest = np.hsplit(blocks, [QK_K // 8])
|
| 439 |
+
qs, rest = np.hsplit(rest, [QK_K // 4])
|
| 440 |
+
scales, d = np.hsplit(rest, [12])
|
| 441 |
+
|
| 442 |
+
d = d.view(np.float16).astype(np.float32)
|
| 443 |
+
|
| 444 |
+
# The scales are packed at 6-bit each in this pattern:
|
| 445 |
+
# 0: IIIIAAAA
|
| 446 |
+
# 1: JJJJBBBB
|
| 447 |
+
# 2: KKKKCCCC
|
| 448 |
+
# 3: LLLLDDDD
|
| 449 |
+
# 4: MMMMEEEE
|
| 450 |
+
# 5: NNNNFFFF
|
| 451 |
+
# 6: OOOOGGGG
|
| 452 |
+
# 7: PPPPHHHH
|
| 453 |
+
# 8: MMIIEEAA
|
| 454 |
+
# 9: NNJJFFBB
|
| 455 |
+
# 10: OOKKGGCC
|
| 456 |
+
# 11: PPLLHHDD
|
| 457 |
+
lscales, hscales = np.hsplit(scales, [8])
|
| 458 |
+
lscales = lscales.reshape((n_blocks, 1, 8)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 2, 1))
|
| 459 |
+
lscales = lscales.reshape((n_blocks, 16))
|
| 460 |
+
hscales = hscales.reshape((n_blocks, 1, 4)) >> np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 4, 1))
|
| 461 |
+
hscales = hscales.reshape((n_blocks, 16))
|
| 462 |
+
scales = (lscales & np.uint8(0x0F)) | ((hscales & np.uint8(0x03)) << np.uint8(4))
|
| 463 |
+
scales = (scales.astype(np.int8) - np.int8(32)).astype(np.float32)
|
| 464 |
+
|
| 465 |
+
dl = (d * scales).reshape((n_blocks, 16, 1))
|
| 466 |
+
|
| 467 |
+
ql = qs.reshape((n_blocks, -1, 1, 32)) >> np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 468 |
+
qh = hmask.reshape(n_blocks, -1, 1, 32) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 8, 1))
|
| 469 |
+
ql = ql.reshape((n_blocks, 16, QK_K // 16)) & np.uint8(3)
|
| 470 |
+
qh = (qh.reshape((n_blocks, 16, QK_K // 16)) & np.uint8(1))
|
| 471 |
+
qh = qh ^ np.uint8(1) # strangely, the offset is zero when the bitmask is 1
|
| 472 |
+
q = (ql.astype(np.int8) - (qh << np.uint8(2)).astype(np.int8)).astype(np.float32)
|
| 473 |
+
|
| 474 |
+
return (dl * q).reshape((n_blocks, QK_K))
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
class Q4_K(__Quant, qtype=GGMLQuantizationType.Q4_K):
|
| 478 |
+
K_SCALE_SIZE = 12
|
| 479 |
+
|
| 480 |
+
@staticmethod
|
| 481 |
+
def get_scale_min(scales: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
|
| 482 |
+
n_blocks = scales.shape[0]
|
| 483 |
+
scales = scales.view(np.uint8)
|
| 484 |
+
### Unpacking the following: ###
|
| 485 |
+
# 0 EEAAAAAA
|
| 486 |
+
# 1 FFBBBBBB
|
| 487 |
+
# 2 GGCCCCCC
|
| 488 |
+
# 3 HHDDDDDD
|
| 489 |
+
# 4 eeaaaaaa
|
| 490 |
+
# 5 ffbbbbbb
|
| 491 |
+
# 6 ggcccccc
|
| 492 |
+
# 7 hhdddddd
|
| 493 |
+
# 8 eeeeEEEE
|
| 494 |
+
# 9 ffffFFFF
|
| 495 |
+
# 10 ggggGGGG
|
| 496 |
+
# 11 hhhhHHHH
|
| 497 |
+
scales = scales.reshape((n_blocks, 3, 4))
|
| 498 |
+
d, m, m_d = np.split(scales, 3, axis=-2)
|
| 499 |
+
|
| 500 |
+
sc = np.concatenate([d & 0x3F, (m_d & 0x0F) | ((d >> 2) & 0x30)], axis=-1)
|
| 501 |
+
min = np.concatenate([m & 0x3F, (m_d >> 4) | ((m >> 2) & 0x30)], axis=-1)
|
| 502 |
+
|
| 503 |
+
return (sc.reshape((n_blocks, 8)), min.reshape((n_blocks, 8)))
|
| 504 |
+
|
| 505 |
+
@classmethod
|
| 506 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 507 |
+
n_blocks = blocks.shape[0]
|
| 508 |
+
|
| 509 |
+
d, rest = np.hsplit(blocks, [2])
|
| 510 |
+
dmin, rest = np.hsplit(rest, [2])
|
| 511 |
+
scales, qs = np.hsplit(rest, [cls.K_SCALE_SIZE])
|
| 512 |
+
|
| 513 |
+
d = d.view(np.float16).astype(np.float32)
|
| 514 |
+
dmin = dmin.view(np.float16).astype(np.float32)
|
| 515 |
+
|
| 516 |
+
sc, m = Q4_K.get_scale_min(scales)
|
| 517 |
+
|
| 518 |
+
d = (d * sc.astype(np.float32)).reshape((n_blocks, -1, 1))
|
| 519 |
+
dm = (dmin * m.astype(np.float32)).reshape((n_blocks, -1, 1))
|
| 520 |
+
|
| 521 |
+
qs = qs.reshape((n_blocks, -1, 1, 32)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 522 |
+
qs = (qs & np.uint8(0x0F)).reshape((n_blocks, -1, 32)).astype(np.float32)
|
| 523 |
+
|
| 524 |
+
return (d * qs - dm).reshape((n_blocks, QK_K))
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
class Q5_K(__Quant, qtype=GGMLQuantizationType.Q5_K):
|
| 528 |
+
@classmethod
|
| 529 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 530 |
+
n_blocks = blocks.shape[0]
|
| 531 |
+
|
| 532 |
+
d, rest = np.hsplit(blocks, [2])
|
| 533 |
+
dmin, rest = np.hsplit(rest, [2])
|
| 534 |
+
scales, rest = np.hsplit(rest, [Q4_K.K_SCALE_SIZE])
|
| 535 |
+
qh, qs = np.hsplit(rest, [QK_K // 8])
|
| 536 |
+
|
| 537 |
+
d = d.view(np.float16).astype(np.float32)
|
| 538 |
+
dmin = dmin.view(np.float16).astype(np.float32)
|
| 539 |
+
|
| 540 |
+
sc, m = Q4_K.get_scale_min(scales)
|
| 541 |
+
|
| 542 |
+
d = (d * sc.astype(np.float32)).reshape((n_blocks, -1, 1))
|
| 543 |
+
dm = (dmin * m.astype(np.float32)).reshape((n_blocks, -1, 1))
|
| 544 |
+
|
| 545 |
+
ql = qs.reshape((n_blocks, -1, 1, 32)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 546 |
+
qh = qh.reshape((n_blocks, -1, 1, 32)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 8, 1))
|
| 547 |
+
ql = (ql & np.uint8(0x0F)).reshape((n_blocks, -1, 32))
|
| 548 |
+
qh = (qh & np.uint8(0x01)).reshape((n_blocks, -1, 32))
|
| 549 |
+
q = (ql | (qh << np.uint8(4))).astype(np.float32)
|
| 550 |
+
|
| 551 |
+
return (d * q - dm).reshape((n_blocks, QK_K))
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
class Q6_K(__Quant, qtype=GGMLQuantizationType.Q6_K):
|
| 555 |
+
@classmethod
|
| 556 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 557 |
+
n_blocks = blocks.shape[0]
|
| 558 |
+
|
| 559 |
+
ql, rest = np.hsplit(blocks, [QK_K // 2])
|
| 560 |
+
qh, rest = np.hsplit(rest, [QK_K // 4])
|
| 561 |
+
scales, d = np.hsplit(rest, [QK_K // 16])
|
| 562 |
+
|
| 563 |
+
scales = scales.view(np.int8).astype(np.float32)
|
| 564 |
+
d = d.view(np.float16).astype(np.float32)
|
| 565 |
+
d = (d * scales).reshape((n_blocks, QK_K // 16, 1))
|
| 566 |
+
|
| 567 |
+
ql = ql.reshape((n_blocks, -1, 1, 64)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 568 |
+
ql = (ql & np.uint8(0x0F)).reshape((n_blocks, -1, 32))
|
| 569 |
+
qh = qh.reshape((n_blocks, -1, 1, 32)) >> np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 570 |
+
qh = (qh & np.uint8(0x03)).reshape((n_blocks, -1, 32))
|
| 571 |
+
q = (ql | (qh << np.uint8(4))).astype(np.int8) - np.int8(32)
|
| 572 |
+
q = q.reshape((n_blocks, QK_K // 16, -1)).astype(np.float32)
|
| 573 |
+
|
| 574 |
+
return (d * q).reshape((n_blocks, QK_K))
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
class TQ1_0(__Quant, qtype=GGMLQuantizationType.TQ1_0):
|
| 578 |
+
@classmethod
|
| 579 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 580 |
+
n_blocks = blocks.shape[0]
|
| 581 |
+
|
| 582 |
+
d = abs(blocks).max(axis=-1, keepdims=True)
|
| 583 |
+
with np.errstate(divide="ignore"):
|
| 584 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 585 |
+
qs = np_roundf(blocks * id)
|
| 586 |
+
qs = (qs.astype(np.int8) + np.int8(1)).astype(np.uint8)
|
| 587 |
+
|
| 588 |
+
qs0, qs1, qh = qs[..., :(32 * 5)], qs[..., (32 * 5):(48 * 5)], qs[..., (48 * 5):]
|
| 589 |
+
qs0 = qs0.reshape((n_blocks, -1, 5, 32)) * np.array([81, 27, 9, 3, 1], dtype=np.uint8).reshape((1, 1, 5, 1))
|
| 590 |
+
qs0 = np.sum(qs0, axis=-2).reshape((n_blocks, -1))
|
| 591 |
+
qs1 = qs1.reshape((n_blocks, -1, 5, 16)) * np.array([81, 27, 9, 3, 1], dtype=np.uint8).reshape((1, 1, 5, 1))
|
| 592 |
+
qs1 = np.sum(qs1, axis=-2).reshape((n_blocks, -1))
|
| 593 |
+
qh = qh.reshape((n_blocks, -1, 4, 4)) * np.array([81, 27, 9, 3], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 594 |
+
qh = np.sum(qh, axis=-2).reshape((n_blocks, -1))
|
| 595 |
+
qs = np.concatenate([qs0, qs1, qh], axis=-1)
|
| 596 |
+
qs = (qs.astype(np.uint16) * 256 + (243 - 1)) // 243
|
| 597 |
+
|
| 598 |
+
qs = qs.astype(np.uint8)
|
| 599 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 600 |
+
|
| 601 |
+
return np.concatenate([qs, d], axis=-1)
|
| 602 |
+
|
| 603 |
+
@classmethod
|
| 604 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 605 |
+
n_blocks = blocks.shape[0]
|
| 606 |
+
|
| 607 |
+
qs, rest = np.hsplit(blocks, [(QK_K - 4 * QK_K // 64) // 5])
|
| 608 |
+
qh, d = np.hsplit(rest, [QK_K // 64])
|
| 609 |
+
|
| 610 |
+
d = d.view(np.float16).astype(np.float32)
|
| 611 |
+
|
| 612 |
+
qs0, qs1 = qs[..., :32], qs[..., 32:]
|
| 613 |
+
qs0 = qs0.reshape((n_blocks, -1, 1, 32)) * np.array([1, 3, 9, 27, 81], dtype=np.uint8).reshape((1, 1, 5, 1))
|
| 614 |
+
qs0 = qs0.reshape((n_blocks, -1))
|
| 615 |
+
qs1 = qs1.reshape((n_blocks, -1, 1, 16)) * np.array([1, 3, 9, 27, 81], dtype=np.uint8).reshape((1, 1, 5, 1))
|
| 616 |
+
qs1 = qs1.reshape((n_blocks, -1))
|
| 617 |
+
qh = qh.reshape((n_blocks, -1, 1, 4)) * np.array([1, 3, 9, 27], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 618 |
+
qh = qh.reshape((n_blocks, -1))
|
| 619 |
+
qs = np.concatenate([qs0, qs1, qh], axis=-1)
|
| 620 |
+
qs = ((qs.astype(np.uint16) * 3) >> 8).astype(np.int8) - np.int8(1)
|
| 621 |
+
|
| 622 |
+
return (d * qs.astype(np.float32))
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
class TQ2_0(__Quant, qtype=GGMLQuantizationType.TQ2_0):
|
| 626 |
+
@classmethod
|
| 627 |
+
def quantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 628 |
+
n_blocks = blocks.shape[0]
|
| 629 |
+
|
| 630 |
+
d = abs(blocks).max(axis=-1, keepdims=True)
|
| 631 |
+
with np.errstate(divide="ignore"):
|
| 632 |
+
id = np.where(d == 0, 0, 1 / d)
|
| 633 |
+
qs = np_roundf(blocks * id)
|
| 634 |
+
qs = (qs.astype(np.int8) + np.int8(1)).astype(np.uint8)
|
| 635 |
+
|
| 636 |
+
qs = qs.reshape((n_blocks, -1, 4, 32)) << np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 637 |
+
qs = qs[..., 0, :] | qs[..., 1, :] | qs[..., 2, :] | qs[..., 3, :]
|
| 638 |
+
qs = qs.reshape((n_blocks, -1))
|
| 639 |
+
|
| 640 |
+
d = d.astype(np.float16).view(np.uint8)
|
| 641 |
+
|
| 642 |
+
return np.concatenate([qs, d], axis=-1)
|
| 643 |
+
|
| 644 |
+
@classmethod
|
| 645 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 646 |
+
n_blocks = blocks.shape[0]
|
| 647 |
+
|
| 648 |
+
qs, d = np.hsplit(blocks, [QK_K // 4])
|
| 649 |
+
|
| 650 |
+
d = d.view(np.float16).astype(np.float32)
|
| 651 |
+
|
| 652 |
+
qs = qs.reshape((n_blocks, -1, 1, 32)) >> np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4, 1))
|
| 653 |
+
qs = (qs & 0x03).reshape((n_blocks, -1)).astype(np.int8) - np.int8(1)
|
| 654 |
+
|
| 655 |
+
return (d * qs.astype(np.float32))
|
| 656 |
+
|
| 657 |
+
|
| 658 |
+
class IQ2_XXS(__Quant, qtype=GGMLQuantizationType.IQ2_XXS):
|
| 659 |
+
ksigns: bytes = (
|
| 660 |
+
b"\x00\x81\x82\x03\x84\x05\x06\x87\x88\x09\x0a\x8b\x0c\x8d\x8e\x0f"
|
| 661 |
+
b"\x90\x11\x12\x93\x14\x95\x96\x17\x18\x99\x9a\x1b\x9c\x1d\x1e\x9f"
|
| 662 |
+
b"\xa0\x21\x22\xa3\x24\xa5\xa6\x27\x28\xa9\xaa\x2b\xac\x2d\x2e\xaf"
|
| 663 |
+
b"\x30\xb1\xb2\x33\xb4\x35\x36\xb7\xb8\x39\x3a\xbb\x3c\xbd\xbe\x3f"
|
| 664 |
+
b"\xc0\x41\x42\xc3\x44\xc5\xc6\x47\x48\xc9\xca\x4b\xcc\x4d\x4e\xcf"
|
| 665 |
+
b"\x50\xd1\xd2\x53\xd4\x55\x56\xd7\xd8\x59\x5a\xdb\x5c\xdd\xde\x5f"
|
| 666 |
+
b"\x60\xe1\xe2\x63\xe4\x65\x66\xe7\xe8\x69\x6a\xeb\x6c\xed\xee\x6f"
|
| 667 |
+
b"\xf0\x71\x72\xf3\x74\xf5\xf6\x77\x78\xf9\xfa\x7b\xfc\x7d\x7e\xff"
|
| 668 |
+
)
|
| 669 |
+
|
| 670 |
+
# iq2xxs_grid, but with each byte of the original packed in 2 bits,
|
| 671 |
+
# by mapping 0x08 to 0, 0x19 to 1, and 0x2b to 2.
|
| 672 |
+
grid_shape = (256, 8)
|
| 673 |
+
grid_map = (0x08, 0x19, 0x2b)
|
| 674 |
+
grid_hex = (
|
| 675 |
+
b"00000200050008000a00110014002000220028002a0041004400500058006100"
|
| 676 |
+
b"6400800082008a00a20001010401100115014001840198010002020222028202"
|
| 677 |
+
b"010404041004210424044004420448046004810484049004a404000502050805"
|
| 678 |
+
b"200546056905800591050906100640068406a406000805080808140828084108"
|
| 679 |
+
b"440850085208880804094009020a140a01100410101021104010601084109010"
|
| 680 |
+
b"951000110811201150115a118011241245120014081420142514491480141815"
|
| 681 |
+
b"6215001616160118041810184018811800190519a019511a002002200a204420"
|
| 682 |
+
b"6120802082202921482100220222012404241024402456240025412564259026"
|
| 683 |
+
b"082820289428442a014004401040184021402440404048405640604081408440"
|
| 684 |
+
b"9040004120416141804185410142104248425642684200440844204480449944"
|
| 685 |
+
b"124524450046014804481048404845480049584961498249454a904a00500850"
|
| 686 |
+
b"1150195020508050885004514251a4519152905492540a550156545600581158"
|
| 687 |
+
b"195864584059085a046010604060686000615561186260620064056410651265"
|
| 688 |
+
b"84654268008002800a8041808280048118814081118201840484108415844084"
|
| 689 |
+
b"608400854685948509864086608602880489118a0490109024904090a1901691"
|
| 690 |
+
b"8091459200942294449451958198209902a050a085a009a100a218a450a804a9"
|
| 691 |
+
)
|
| 692 |
+
|
| 693 |
+
@classmethod
|
| 694 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 695 |
+
n_blocks = blocks.shape[0]
|
| 696 |
+
|
| 697 |
+
d, qs = np.hsplit(blocks, [2])
|
| 698 |
+
|
| 699 |
+
d = d.view(np.float16).astype(np.float32)
|
| 700 |
+
|
| 701 |
+
qs = qs.view(np.uint32).reshape(n_blocks, -1, 2)
|
| 702 |
+
|
| 703 |
+
db = d * (np.float32(0.5) + (qs[..., 1] >> 28).astype(np.float32)) * np.float32(0.25)
|
| 704 |
+
db = db.reshape((n_blocks, -1, 1, 1))
|
| 705 |
+
|
| 706 |
+
# get the sign indices and unpack the bits
|
| 707 |
+
signs = qs[..., 1].reshape((n_blocks, -1, 1)) >> np.array([0, 7, 14, 21], dtype=np.uint32).reshape((1, 1, 4))
|
| 708 |
+
ksigns = np.frombuffer(cls.ksigns, dtype=np.uint8).reshape((1, 1, 1, 128))
|
| 709 |
+
signs = (signs & np.uint32(0x7F)).reshape((n_blocks, -1, 4, 1))
|
| 710 |
+
signs = np.take_along_axis(ksigns, signs, axis=-1)
|
| 711 |
+
signs = signs.reshape((n_blocks, -1, 4, 1)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 1, 8))
|
| 712 |
+
signs = signs & np.uint8(0x01)
|
| 713 |
+
signs = np.where(signs == 0, np.float32(1), np.float32(-1))
|
| 714 |
+
signs = signs.reshape((n_blocks, -1, 4, 8))
|
| 715 |
+
|
| 716 |
+
assert cls.grid is not None
|
| 717 |
+
grid = np.take_along_axis(cls.grid, qs[..., 0].copy().view(np.uint8).reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 718 |
+
grid = grid.reshape((n_blocks, -1, 4, 8))
|
| 719 |
+
|
| 720 |
+
return (db * grid * signs).reshape((n_blocks, -1))
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
class IQ2_XS(__Quant, qtype=GGMLQuantizationType.IQ2_XS):
|
| 724 |
+
# iq2xs_grid, but with each byte of the original packed in 2 bits,
|
| 725 |
+
# by mapping 0x08 to 0, 0x19 to 1, and 0x2b to 2.
|
| 726 |
+
grid_shape = (512, 8)
|
| 727 |
+
grid_map = (0x08, 0x19, 0x2b)
|
| 728 |
+
grid_hex = (
|
| 729 |
+
b"00000200050008000a0011001400160019002000220025002800410044004600"
|
| 730 |
+
b"49005000520055005800610064008000820085008800910094009900a0000101"
|
| 731 |
+
b"04010601090110011201150118011a0121012401400142014501480151015401"
|
| 732 |
+
b"6001680181018401900100020202050208021102140220024102440250025502"
|
| 733 |
+
b"80028a0201040404060409041004120415041804210424044004420445044804"
|
| 734 |
+
b"5104540456046004810484049004000502050505080511051405200541054405"
|
| 735 |
+
b"500561058005010604061006260640064206840600080208050808080a081108"
|
| 736 |
+
b"14082008250841084408500858088008a008aa08010904091009400981098909"
|
| 737 |
+
b"000a200a280a960aa00a01100410061009101010121015101810211024104010"
|
| 738 |
+
b"4210451048105110541060106a10811084109010001102110511081111111411"
|
| 739 |
+
b"2011411144115011801194119611011204120612101240126012001402140514"
|
| 740 |
+
b"0814111414142014411444144914501464148014011504151015401500161416"
|
| 741 |
+
b"49160118041810181218401854188618001905196619511aa91a002002200520"
|
| 742 |
+
b"08200a201120142020204120442050208020a020012104211021402148216521"
|
| 743 |
+
b"002222228022a82201240424102429244024002541255225992501261a26a626"
|
| 744 |
+
b"002808280a28202855288828a22868299029082a202a822a882a8a2a01400440"
|
| 745 |
+
b"0640094010401240154018402140244040404240454048404a40514054406040"
|
| 746 |
+
b"6540814084409040004102410541084111411441204141414441504180418541"
|
| 747 |
+
b"a241014204421042124229424042004402440544084411441444194420444144"
|
| 748 |
+
b"4444504480449444014504451045244540459a4500460a464446504601480448"
|
| 749 |
+
b"1048404845485448624800491149444950496949044a00500250055008501150"
|
| 750 |
+
b"145020502850415044505050805001510451105115514051425100524452aa52"
|
| 751 |
+
b"0154045410542154405460548154a154005508558055885521566856a1560058"
|
| 752 |
+
b"14584158505899581a5940594259855a0160046010604060546062608660a960"
|
| 753 |
+
b"006124624a62926200641664106540654565a46501686a682569066a546a626a"
|
| 754 |
+
b"00800280058008801180148020802a8041804480508080808280a880aa800181"
|
| 755 |
+
b"0481068110814081518159810082208280828282a082a8820184048410841284"
|
| 756 |
+
b"158440846084898400854485a58518866a860088088825885a8880888288a888"
|
| 757 |
+
b"0689228a808a888a968aa88a0190049010904090569084900091229164915692"
|
| 758 |
+
b"89920094059444945094589429959095929541965198a6984999159a609a00a0"
|
| 759 |
+
b"02a008a00aa020a02aa0a0a051a159a1a6a100a202a208a22aa280a2a0a240a4"
|
| 760 |
+
b"95a465a698a60aa820a822a828a8a0a8a8a804a984a986a928aa2aaa91aaaaaa"
|
| 761 |
+
)
|
| 762 |
+
|
| 763 |
+
@classmethod
|
| 764 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 765 |
+
n_blocks = blocks.shape[0]
|
| 766 |
+
|
| 767 |
+
d, rest = np.hsplit(blocks, [2])
|
| 768 |
+
qs, scales = np.hsplit(rest, [2 * QK_K // 8])
|
| 769 |
+
|
| 770 |
+
d = d.view(np.float16).astype(np.float32)
|
| 771 |
+
qs = qs.view(np.uint16)
|
| 772 |
+
|
| 773 |
+
scales = scales.reshape((n_blocks, -1, 1)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2))
|
| 774 |
+
scales = (scales & 0x0F).reshape((n_blocks, -1))
|
| 775 |
+
db = d * (np.float32(0.5) + scales) * np.float32(0.25)
|
| 776 |
+
db = db.reshape((n_blocks, -1, 1, 1))
|
| 777 |
+
|
| 778 |
+
# get the sign indices and unpack the bits
|
| 779 |
+
signs = np.frombuffer(IQ2_XXS.ksigns, dtype=np.uint8).reshape(1, 1, 128)
|
| 780 |
+
signs = np.take_along_axis(signs, (qs >> 9).reshape((n_blocks, -1, 1)), axis=-1)
|
| 781 |
+
signs = signs.reshape((n_blocks, -1, 1)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 8))
|
| 782 |
+
signs = signs & np.uint8(0x01)
|
| 783 |
+
signs = np.where(signs == 0, np.float32(1), np.float32(-1))
|
| 784 |
+
signs = signs.reshape((n_blocks, -1, 2, 8))
|
| 785 |
+
|
| 786 |
+
assert cls.grid is not None
|
| 787 |
+
grid = np.take_along_axis(cls.grid, (qs & np.uint16(511)).reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 788 |
+
grid = grid.reshape((n_blocks, -1, 2, 8))
|
| 789 |
+
|
| 790 |
+
return (db * grid * signs).reshape((n_blocks, -1))
|
| 791 |
+
|
| 792 |
+
|
| 793 |
+
class IQ2_S(__Quant, qtype=GGMLQuantizationType.IQ2_S):
|
| 794 |
+
# iq2s_grid, but with each byte of the original packed in 2 bits,
|
| 795 |
+
# by mapping 0x08 to 0, 0x19 to 1, and 0x2b to 2.
|
| 796 |
+
grid_shape = (1024, 8)
|
| 797 |
+
grid_map = (0x08, 0x19, 0x2b)
|
| 798 |
+
grid_hex = (
|
| 799 |
+
b"00000200050008000a0011001400160019002000220025002800410044004600"
|
| 800 |
+
b"490050005200550058006100640066006900800082008500880091009400a000"
|
| 801 |
+
b"a500aa0001010401060109011001120115011801210124014001420145014801"
|
| 802 |
+
b"510154015601590160016501680181018401900192019501a101a40100020202"
|
| 803 |
+
b"050208021102140220022a02410244024602490250025502800285028a029402"
|
| 804 |
+
b"a202010404040604090410041204150418042104240426042904400442044504"
|
| 805 |
+
b"48044a0451045404560459046004620465048104840486048904900495049804"
|
| 806 |
+
b"a104a40400050205050508050a05110514051605190520052505280541054405"
|
| 807 |
+
b"46054905500552055505580561056405800582058505880591059405a0050106"
|
| 808 |
+
b"0406060609061006150640064506480651065406600681068406900600080208"
|
| 809 |
+
b"050808081108140816081908200825082a084108440846084908500852085508"
|
| 810 |
+
b"580861086408800885089408aa08010904091009120915091809210940094509"
|
| 811 |
+
b"480951095409600981099009000a110a140a220a280a2a0a500a990a01100410"
|
| 812 |
+
b"0610091010101210151018102110241026104010421045104810511054105610"
|
| 813 |
+
b"59106010621065106810811084108610901095109810a110a410001102110511"
|
| 814 |
+
b"08110a1111111411161119112011221125112811411144114611491150115211"
|
| 815 |
+
b"5511581161116411801182118511881191119411011204120912101215122112"
|
| 816 |
+
b"2412401245125112541281128412901200140214051408141114141416141914"
|
| 817 |
+
b"2014251428144114441446144914501452145514581461146414801482148514"
|
| 818 |
+
b"881491149414a014011504150615091510151215151518152115241540154215"
|
| 819 |
+
b"4515481551155415601581158415901500160516081611161416201641164416"
|
| 820 |
+
b"50168016aa160118041806180918101815181818211840184218451848185118"
|
| 821 |
+
b"541860188118841800190219051908191119141920194119441950196919a219"
|
| 822 |
+
b"041a101a401a561a00200220052008201120142016201920202025202a204120"
|
| 823 |
+
b"4420502052205520642080208a209420aa200121042110211221152121214021"
|
| 824 |
+
b"4221452151215421602181218421902100220a22222228222a22442250228822"
|
| 825 |
+
b"8a22a82201240424062409241024152418242124242440244224452448245124"
|
| 826 |
+
b"5424602481248424902400250525082511251425202541254425502566258025"
|
| 827 |
+
b"0126042610264026592600280528112814284128442850288a28aa2801290429"
|
| 828 |
+
b"102995290a2a222a642a882a8a2a014004400640094010401240154018401a40"
|
| 829 |
+
b"21402440264040404240454048404a4051405440564059406040624065408140"
|
| 830 |
+
b"8440904095409840a140a4400041024105410841114114411641194120412241"
|
| 831 |
+
b"2541414144414641494150415241554158416141644180418241854188419141"
|
| 832 |
+
b"9441a04101420442104212421542184224424042454248425142544260428142"
|
| 833 |
+
b"844200440244054408440a441144144416441944204422442544284441444444"
|
| 834 |
+
b"46444944504452445544584461446444804482448544884491449444a0440145"
|
| 835 |
+
b"0445064509451045124515451845214524454045424545454845514554456045"
|
| 836 |
+
b"6a4581458445904500460246054608461146144620464146444650468046a546"
|
| 837 |
+
b"0148044809481048124815481848214824484048424845484848514854486048"
|
| 838 |
+
b"84489048004902490549084911491449204941494449504980499649014a044a"
|
| 839 |
+
b"104a404a00500250055008501150145016501950205022502550285041504450"
|
| 840 |
+
b"4650495050505250555058506150645080508250855088509150945001510451"
|
| 841 |
+
b"0651095110511251155118512151245140514251455148515151545160518151"
|
| 842 |
+
b"8451905100520552085211521452205241524452505269528052015404540654"
|
| 843 |
+
b"0954105412541554185421542454405442544554485451545454605481548454"
|
| 844 |
+
b"9054005502550555085511551455205541554455505580550156045610562656"
|
| 845 |
+
b"405600580258055808581158145820584158445850585a588058015904591059"
|
| 846 |
+
b"4059005a195a855aa85a01600460066010601260156018602160246040604560"
|
| 847 |
+
b"4860516054606060846090600061026105610861116114612061416144615061"
|
| 848 |
+
b"806199610462106240625662a162006405640864116414642064416444645064"
|
| 849 |
+
b"806401650465106540654a656865926500669466016804681068656898680069"
|
| 850 |
+
b"2a69426aa16a0080028005800880118014801980208025804180448050805280"
|
| 851 |
+
b"5580588061808080858091809480018104810981108112811581188121812481"
|
| 852 |
+
b"408142814581488151815481818184819081a981008205820a82118214824182"
|
| 853 |
+
b"4482508201840484068409841084128415841884218440844284458448845184"
|
| 854 |
+
b"5484608481848484908400850285058508851185148520854185448550858085"
|
| 855 |
+
b"8a85018604861086298640860088058811881488418844885088a28801890489"
|
| 856 |
+
b"40896589228a588a5a8a828aa28a019004900990109012901590189024904090"
|
| 857 |
+
b"4290459048905190549060908190849090900091059111911491419144915091"
|
| 858 |
+
b"5a910192049210924092a6920094029405940894119414942094419444945094"
|
| 859 |
+
b"8094969401950495109540959895a19500964696649601980498109826984098"
|
| 860 |
+
b"a998009949995299909a00a005a00aa014a022a02aa041a044a050a0a2a0aaa0"
|
| 861 |
+
b"40a165a102a20aa222a228a22aa282a288a28aa2a8a201a404a410a440a489a4"
|
| 862 |
+
b"a4a400a519a551a60aa828a8a2a854a986a908aa0aaa20aa22aa28aa88aaaaaa"
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
@classmethod
|
| 866 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 867 |
+
n_blocks = blocks.shape[0]
|
| 868 |
+
|
| 869 |
+
d, rest = np.hsplit(blocks, [2])
|
| 870 |
+
qs, rest = np.hsplit(rest, [QK_K // 8])
|
| 871 |
+
signs, rest = np.hsplit(rest, [QK_K // 8])
|
| 872 |
+
qh, scales = np.hsplit(rest, [QK_K // 32])
|
| 873 |
+
|
| 874 |
+
d = d.view(np.float16).astype(np.float32)
|
| 875 |
+
|
| 876 |
+
scales = scales.reshape((n_blocks, -1, 1)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2))
|
| 877 |
+
scales = (scales & 0x0F).reshape((n_blocks, -1))
|
| 878 |
+
db = d * (np.float32(0.5) + scales) * np.float32(0.25)
|
| 879 |
+
db = db.reshape((n_blocks, -1, 1, 1))
|
| 880 |
+
|
| 881 |
+
# unpack the sign bits
|
| 882 |
+
signs = signs.reshape((n_blocks, -1, 1)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 8))
|
| 883 |
+
signs = signs & np.uint8(0x01)
|
| 884 |
+
signs = np.where(signs == 0, np.float32(1), np.float32(-1))
|
| 885 |
+
signs = signs.reshape((n_blocks, -1, 2, 8))
|
| 886 |
+
|
| 887 |
+
qh = qh.reshape((n_blocks, -1, 1)) >> np.array([0, 2, 4, 6], dtype=np.uint8).reshape((1, 1, 4))
|
| 888 |
+
qs = qs.astype(np.uint16) | ((qh & 0x03).astype(np.uint16) << 8).reshape((n_blocks, -1))
|
| 889 |
+
|
| 890 |
+
assert cls.grid is not None
|
| 891 |
+
grid = np.take_along_axis(cls.grid, qs.reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 892 |
+
grid = grid.reshape((n_blocks, -1, 2, 8))
|
| 893 |
+
|
| 894 |
+
return (db * grid * signs).reshape((n_blocks, -1))
|
| 895 |
+
|
| 896 |
+
|
| 897 |
+
class IQ3_XXS(__Quant, qtype=GGMLQuantizationType.IQ3_XXS):
|
| 898 |
+
grid_shape = (256, 4)
|
| 899 |
+
grid_map = (0x04, 0x0c, 0x14, 0x1c, 0x24, 0x2c, 0x34, 0x3e)
|
| 900 |
+
grid_hex = (
|
| 901 |
+
b"0000020004001100130017002000220031004200730075000101030110011201"
|
| 902 |
+
b"2101250130013201410154017001000202020402110220022202310233023702"
|
| 903 |
+
b"5102570275020103070310031203250370031304370444045704730475040105"
|
| 904 |
+
b"0705320552053506640610071407160743076107011003101010121021102310"
|
| 905 |
+
b"3010321034104710501000110211111120112211011203121012121221123012"
|
| 906 |
+
b"7212001302132013311346136613011405145014201524154615711505162217"
|
| 907 |
+
b"4017002002201120132020202220262031204220012103210521102112212121"
|
| 908 |
+
b"3021632167217021002202221122172220222222372240225522012310231423"
|
| 909 |
+
b"7023742335245324032527254125742501270327162745270130103012302130"
|
| 910 |
+
b"2330503065307230003102312031313144314631013203321032253252327232"
|
| 911 |
+
b"1133333330344734723400350635223555351436363663363337603704401740"
|
| 912 |
+
b"3540374053405740744120423742404260426642074345430444514464442545"
|
| 913 |
+
b"4345704505471047124730471250415070500051065126515551145232527252"
|
| 914 |
+
b"0253535310542354275472540255315550562457425724604460466064602161"
|
| 915 |
+
b"6161176264623063366344640565526533660367216703700570077010703270"
|
| 916 |
+
b"5270267140711272457252720073157333736073217441740075027524753076"
|
| 917 |
+
)
|
| 918 |
+
|
| 919 |
+
@classmethod
|
| 920 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 921 |
+
n_blocks = blocks.shape[0]
|
| 922 |
+
|
| 923 |
+
d, rest = np.hsplit(blocks, [2])
|
| 924 |
+
qs, scales = np.hsplit(rest, [QK_K // 4])
|
| 925 |
+
|
| 926 |
+
d = d.view(np.float16).astype(np.float32)
|
| 927 |
+
scales = scales.view(np.uint32)
|
| 928 |
+
|
| 929 |
+
db = d * (np.float32(0.5) + (scales >> 28).astype(np.float32)) * np.float32(0.5)
|
| 930 |
+
db = db.reshape((n_blocks, -1, 1, 1))
|
| 931 |
+
|
| 932 |
+
# get the sign indices and unpack the bits
|
| 933 |
+
signs = scales.reshape((n_blocks, -1, 1)) >> np.array([0, 7, 14, 21], dtype=np.uint32).reshape((1, 1, 4))
|
| 934 |
+
ksigns = np.frombuffer(IQ2_XXS.ksigns, dtype=np.uint8).reshape((1, 1, 1, 128))
|
| 935 |
+
signs = (signs & np.uint32(0x7F)).reshape((n_blocks, -1, 4, 1))
|
| 936 |
+
signs = np.take_along_axis(ksigns, signs, axis=-1)
|
| 937 |
+
signs = signs.reshape((n_blocks, -1, 4, 1)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 1, 8))
|
| 938 |
+
signs = signs & np.uint8(0x01)
|
| 939 |
+
signs = np.where(signs == 0, np.float32(1), np.float32(-1))
|
| 940 |
+
signs = signs.reshape((n_blocks, -1, 4, 8))
|
| 941 |
+
|
| 942 |
+
assert cls.grid is not None
|
| 943 |
+
grid = np.take_along_axis(cls.grid, qs.reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 944 |
+
grid = grid.reshape((n_blocks, -1, 4, 8))
|
| 945 |
+
|
| 946 |
+
return (db * grid * signs).reshape((n_blocks, -1))
|
| 947 |
+
|
| 948 |
+
|
| 949 |
+
class IQ3_S(__Quant, qtype=GGMLQuantizationType.IQ3_S):
|
| 950 |
+
grid_shape = (512, 4)
|
| 951 |
+
grid_map = (0x01, 0x03, 0x05, 0x07, 0x09, 0x0b, 0x0d, 0x0f)
|
| 952 |
+
grid_hex = (
|
| 953 |
+
b"0000010002000500070010001100120014001600200021002500330040004200"
|
| 954 |
+
b"4500470051005300600062007100740077000001010102010401100111011501"
|
| 955 |
+
b"2001230127013101350144016101650172010002010205020702100213021602"
|
| 956 |
+
b"2102250230023402420245024702510253027002730203031103150320032203"
|
| 957 |
+
b"3103330336034403500352036703710375030004130417042104240432044004"
|
| 958 |
+
b"4304510470040205040520052205260533054105450547056605730506061106"
|
| 959 |
+
b"1306310652067106000702070407200722072607330750075407001001100210"
|
| 960 |
+
b"0410101011101310151017102010221031103410361054105610611072100011"
|
| 961 |
+
b"0111031106111011141121113011331141115011521170117611001212121512"
|
| 962 |
+
b"1712201224123212401243125512601272120113041307131013131321132713"
|
| 963 |
+
b"3013341341136213701303140514121414143114331442144614501454140115"
|
| 964 |
+
b"1015131521153015321551152016241627164416461601170317101712172117"
|
| 965 |
+
b"3517411762177017002001200320052007201020122014201620212023202720"
|
| 966 |
+
b"3020322041204320452050205220672070207320752000210221102113211721"
|
| 967 |
+
b"2221252131213421422151210122042207222122232230223722412253225722"
|
| 968 |
+
b"7122742200230223052311232223242331233323422350236623012407242024"
|
| 969 |
+
b"2324322435244124722475240425112522253725402553257025002602260726"
|
| 970 |
+
b"2126552661260527112726273027432750270230113013301530173022303130"
|
| 971 |
+
b"3330353042304430473051306330713001310331053114312131233140316031"
|
| 972 |
+
b"7231763100321232203232323432503201331033143321332333273330334133"
|
| 973 |
+
b"4333473355337333033411341634223431345234603464340135103512352535"
|
| 974 |
+
b"3235443556357335163641360137033720372237353700400440124020402440"
|
| 975 |
+
b"2740324041405040704002410741114113412241304135414341514155410142"
|
| 976 |
+
b"0342104215422142334240425742624270420443114313432043224331433543"
|
| 977 |
+
b"0044024424443744404471440545074521456245134634466046104715473047"
|
| 978 |
+
b"4347514702501050145022504050445047505250665074500151035105511251"
|
| 979 |
+
b"2151325172510052115223523052365253520253075310532753445351536553"
|
| 980 |
+
b"7353015404542054325446541255265551555355425602570457225711601360"
|
| 981 |
+
b"1560316033606060006120612761646112623462426255626262706200631463"
|
| 982 |
+
b"2163406325644364626400650365346560650566406611671367007004700770"
|
| 983 |
+
b"2070227036704070547062700271117124714371457101720472107216722172"
|
| 984 |
+
b"3072517202733273357353730174057413742074507422754275027631760077"
|
| 985 |
+
)
|
| 986 |
+
|
| 987 |
+
@classmethod
|
| 988 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 989 |
+
n_blocks = blocks.shape[0]
|
| 990 |
+
|
| 991 |
+
d, rest = np.hsplit(blocks, [2])
|
| 992 |
+
qs, rest = np.hsplit(rest, [QK_K // 4])
|
| 993 |
+
qh, rest = np.hsplit(rest, [QK_K // 32])
|
| 994 |
+
signs, scales = np.hsplit(rest, [QK_K // 8])
|
| 995 |
+
|
| 996 |
+
d = d.view(np.float16).astype(np.float32)
|
| 997 |
+
|
| 998 |
+
scales = scales.reshape((n_blocks, -1, 1)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2))
|
| 999 |
+
scales = (scales & 0x0F).reshape((n_blocks, -1))
|
| 1000 |
+
db = d * (1 + 2 * scales)
|
| 1001 |
+
db = db.reshape((n_blocks, -1, 1, 1))
|
| 1002 |
+
|
| 1003 |
+
# unpack the sign bits
|
| 1004 |
+
signs = signs.reshape((n_blocks, -1, 1)) >> np.array([i for i in range(8)], dtype=np.uint8).reshape((1, 1, 8))
|
| 1005 |
+
signs = signs & np.uint8(0x01)
|
| 1006 |
+
signs = np.where(signs == 0, np.float32(1), np.float32(-1))
|
| 1007 |
+
signs = signs.reshape((n_blocks, -1, 4, 8))
|
| 1008 |
+
|
| 1009 |
+
qh = qh.reshape((n_blocks, -1, 1)) >> np.array([i for i in range(8)], dtype=np.uint8)
|
| 1010 |
+
qh = (qh & 0x01).astype(np.uint16).reshape((n_blocks, -1))
|
| 1011 |
+
qs = qs.astype(np.uint16) | (qh << 8)
|
| 1012 |
+
|
| 1013 |
+
assert cls.grid is not None
|
| 1014 |
+
grid = np.take_along_axis(cls.grid, qs.reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 1015 |
+
grid = grid.reshape((n_blocks, -1, 4, 8))
|
| 1016 |
+
|
| 1017 |
+
return (db * grid * signs).reshape((n_blocks, -1))
|
| 1018 |
+
|
| 1019 |
+
|
| 1020 |
+
class IQ1_S(__Quant, qtype=GGMLQuantizationType.IQ1_S):
|
| 1021 |
+
# iq1s_grid, with each byte packed into 2 bits
|
| 1022 |
+
# -1, 0, 1 <=> 0, 1, 2
|
| 1023 |
+
grid_shape = (2048, 8)
|
| 1024 |
+
grid_map = (-1, 0, 1)
|
| 1025 |
+
grid_hex = (
|
| 1026 |
+
b"00000200050008000a00110015002000220028002a0045005100540056006500"
|
| 1027 |
+
b"8000820088008a009500a000a200a800aa000401050111011401160119011a01"
|
| 1028 |
+
b"2501410146014901520155015a0161016401660168018501910194019601a501"
|
| 1029 |
+
b"0002020208020a0215022002220228022a024502510259026402690280028202"
|
| 1030 |
+
b"88028a02910295029902a002a202a802aa021104140416042504410449045504"
|
| 1031 |
+
b"5a046404650491049904a5040105040505050605150518051a05290540054505"
|
| 1032 |
+
b"4a0550055105540555055605590560056205650568056a058105910595059805"
|
| 1033 |
+
b"9a05a105a405a505a605a9051406190641064406500652065506580660066106"
|
| 1034 |
+
b"6606690685069106940699060008020808080a0815082008220828082a084508"
|
| 1035 |
+
b"5108560865088008820888088a089508a008a208a808aa080509110914091909"
|
| 1036 |
+
b"2409250941095009510955096109640969099109940996099909a509000a020a"
|
| 1037 |
+
b"080a0a0a150a200a220a280a2a0a450a510a590a610a650a800a820a850a880a"
|
| 1038 |
+
b"8a0a950aa00aa20aa80aaa0a1010111014101910241025104110441050105510"
|
| 1039 |
+
b"58106110641065106910911094109610a110a510011104110611091110111211"
|
| 1040 |
+
b"1511181121112411291145114a11501151115211541155115611591160116511"
|
| 1041 |
+
b"841192119511a111a41111121412161225124012461249125212551258125a12"
|
| 1042 |
+
b"641266128512911294129612a512011406140914141415141814191421142614"
|
| 1043 |
+
b"41144514461448144a1451145414551456145914621465146814841489149014"
|
| 1044 |
+
b"94149514981499149a14a114a414a514a914021505150a151115141515151615"
|
| 1045 |
+
b"191520152215251528152a154115441545154615511552155415551556155915"
|
| 1046 |
+
b"5a1561156415651566156915801582158415851588158a159015911594159515"
|
| 1047 |
+
b"961599159a15a015a215a51501160416051606161516161618161a1621162616"
|
| 1048 |
+
b"401642164416451648164a165116551656165816591661166416651668166916"
|
| 1049 |
+
b"6a1686168a1692169516a416a916111816182518411844184618491850185518"
|
| 1050 |
+
b"58185a1860186118641866186918851891189418a5181019121915191a192119"
|
| 1051 |
+
b"25194219441945194819511954195519561959195a19601965196a1989199119"
|
| 1052 |
+
b"921995199819a119a619a919091a161a241a261a441a461a491a501a521a551a"
|
| 1053 |
+
b"581a611a661a691a851a911a961a9a1a0020022008200a201520202022202520"
|
| 1054 |
+
b"28202a20452051205920612065208020822088208a209520a020a220a520a820"
|
| 1055 |
+
b"aa2005211121142119212521422144214921552158215a216121642165216621"
|
| 1056 |
+
b"8521902196219921a521012208220a22112215222022222228222a2245225122"
|
| 1057 |
+
b"562259226522812288228a2291229522a022a222a822aa220524142416241924"
|
| 1058 |
+
b"252444244524462449245224552458245a2466248524912494249924a124a524"
|
| 1059 |
+
b"0925152521252925402545254825512554255525592562256525682589259025"
|
| 1060 |
+
b"9425952598259a25a125a425a625a92505261026122619262526412649265526"
|
| 1061 |
+
b"6026612669268426862690269a260028022808280a2815282028222828282a28"
|
| 1062 |
+
b"45285128542865288028822888288a28a028a228a828aa280929112914291929"
|
| 1063 |
+
b"2529462949295229552961296429662969298529902996299929a429a529002a"
|
| 1064 |
+
b"022a082a0a2a202a222a282a2a2a452a512a562a592a652a802a822a882a8a2a"
|
| 1065 |
+
b"952aa02aa22aa82aaa2a054011401640254049405240554058405a4061406440"
|
| 1066 |
+
b"664094409940a140a6400041014104410641094112411541164118411a412141"
|
| 1067 |
+
b"26412941454148414a41514154415541564159415a41654168416a4181418441"
|
| 1068 |
+
b"8641904192419541a041a141a241054211421442164225424142524255425a42"
|
| 1069 |
+
b"6442694289429442a5420144154419442944454448444a445144544455445644"
|
| 1070 |
+
b"61446244654468446a44814486448944904492449544a044a144a94401450245"
|
| 1071 |
+
b"05450a4511451445154516451945204525452a45414544454545464549455045"
|
| 1072 |
+
b"5145544555455645584559456145644565456645694582458445854588459145"
|
| 1073 |
+
b"94459545964599459a45a545a845aa450146054609461446154618461a462146"
|
| 1074 |
+
b"2446294640464246454648465046514652465546564659466246654668468146"
|
| 1075 |
+
b"85468a4694469546a146a446a6460548114815481a4825484248494850485548"
|
| 1076 |
+
b"5848614864486648694885489148944896489948a5480149054906490a491049"
|
| 1077 |
+
b"144915491849214924492649404945494a495149524954495549564959496049"
|
| 1078 |
+
b"6249654966496a49864989499249954996499849a149a449a649a949164a444a"
|
| 1079 |
+
b"464a494a554a584a5a4a644a694a944aa54a0150045005500650095012501550"
|
| 1080 |
+
b"1a50215024502950405045504850515054505550565059506550685086508950"
|
| 1081 |
+
b"95509850a050a150a650a9500551085109510a51115114511551165118511951"
|
| 1082 |
+
b"20512551265128512a5141514451455146514951505151515251545155515651"
|
| 1083 |
+
b"585159515a51615164516551665169518251855191519451955196519951a051"
|
| 1084 |
+
b"a551aa5101520652125215521a5221522452425245524a525152545255525652"
|
| 1085 |
+
b"595262526552855290529252955299529a52a452045405541154145415541654"
|
| 1086 |
+
b"185419542154255428542a54415444544554465449544a545054515454545554"
|
| 1087 |
+
b"5654585459545a54615462546454655466546954805488548a54915494549554"
|
| 1088 |
+
b"96549954a154a454a554aa540155025504550555065509551055115512551455"
|
| 1089 |
+
b"1555165519551a55215524552555265529554055415542554455455546554855"
|
| 1090 |
+
b"4955505551555255545555555655585559555a55605561556455655566556855"
|
| 1091 |
+
b"69556a5581558455855589558a559055915594559555965598559955a155a455"
|
| 1092 |
+
b"a555a655a9550056015602560456065608560956115614561556185619562056"
|
| 1093 |
+
b"2156225624562556265628562956415645564656485649564a56505651565256"
|
| 1094 |
+
b"545655565656585659565a566156645665566956825685568656885689568a56"
|
| 1095 |
+
b"915695569a56a256a556a656a856a95604580558065809581058155818582158"
|
| 1096 |
+
b"2a58455848584a58515854585558565858585958605862586458655882588958"
|
| 1097 |
+
b"9058925895589858a158a9580159025905590a59115914591559165919592559"
|
| 1098 |
+
b"41594459455946594959505951595259545955595659585959595a5961596459"
|
| 1099 |
+
b"655966596959815985598959915994599559965998599959a559045a085a155a"
|
| 1100 |
+
b"1a5a205a255a265a295a455a485a495a515a555a565a585a595a625a655a685a"
|
| 1101 |
+
b"6a5a815a8a5a925a955a965a985a9a5aa15a0560146016601960256044605060"
|
| 1102 |
+
b"5560566058605a60616064606660696081609660a56001610461066109611261"
|
| 1103 |
+
b"15612161226126612961456149615161556156615961656166616a6184618a61"
|
| 1104 |
+
b"92619561a161a661a96111621662196240624162466255625662586260628562"
|
| 1105 |
+
b"91629662a56211641264156416641a6421642664296440644264456448644a64"
|
| 1106 |
+
b"516454645564566459645a646064626465648464856489649064926494649564"
|
| 1107 |
+
b"966498649a64a164a464a964056508650a651165156516651965446545654665"
|
| 1108 |
+
b"496550655165546555655665596561656465656566656965866589658a659165"
|
| 1109 |
+
b"9565966599659a65a265a565a665a86502660966156620662666286629664066"
|
| 1110 |
+
b"456648664a66516654665566566658665a666066656668668066826685668a66"
|
| 1111 |
+
b"9466966698669966a066a466a666aa661668196825684168526855685a686168"
|
| 1112 |
+
b"6968856891689868a66801690469106915692169246926692969406941694569"
|
| 1113 |
+
b"4669486951695469556956695969606965696a69826984698a699569a169a469"
|
| 1114 |
+
b"a569a969116a166a186a416a446a496a506a556a586a5a6a646a656a696a866a"
|
| 1115 |
+
b"946a986a9a6aa66a0080028008800a802080228028802a804580508051805480"
|
| 1116 |
+
b"5680598065808080828088808a809580a080a280a880aa800581118114811681"
|
| 1117 |
+
b"1981258141814481498150815281558156815881598164816681698185818981"
|
| 1118 |
+
b"948196819981a5810082028208820a8215822082228228822a82518254825982"
|
| 1119 |
+
b"65828082828288828a829582a082a282a882aa82148419844184448451845584"
|
| 1120 |
+
b"5a846184648469849484998401850985128515851a8526852985408541854585"
|
| 1121 |
+
b"4885518554855585568559855a856585668568856a8581858485868589859085"
|
| 1122 |
+
b"928595859885a68511861686198625864186448649864a865086558659865a86"
|
| 1123 |
+
b"618666866a86858691869a86a4860088028808880a8815882088228828882a88"
|
| 1124 |
+
b"41884588518854885988658869888088828888888a889588a088a288a888aa88"
|
| 1125 |
+
b"05890689118914891689258941894489468949895089528955895a8961896489"
|
| 1126 |
+
b"858996899989a589008a028a088a0a8a158a208a228a288a2a8a458a518a548a"
|
| 1127 |
+
b"568a808a828a888a8a8a958aa08aa28aa88aaa8a059011901690189019902590"
|
| 1128 |
+
b"419046904990559058905a9069906a9085909190949096909990a59001910491"
|
| 1129 |
+
b"069109911091159118911a912191249126912991409145915091519154915591"
|
| 1130 |
+
b"569159916291659184918691929195919891a191a491a691a991059211921492"
|
| 1131 |
+
b"19922592449246924992509252925592589266926992859294929692a9920194"
|
| 1132 |
+
b"04940694109415941894269440944a9451945494559456945894599460946194"
|
| 1133 |
+
b"62946594849486949294949495949894a194a9940095059508950a9510951195"
|
| 1134 |
+
b"14951595169519952195259529952a9541954495459546954995509551955295"
|
| 1135 |
+
b"549555955695589559955a956195649565956695699581958595889591959295"
|
| 1136 |
+
b"94959595969599959a95a095a295a595a895aa95019604961096159619962096"
|
| 1137 |
+
b"2696299645964896499651965296559656965996659668968296849689968a96"
|
| 1138 |
+
b"929694969596a496a696a9960598169819982598419846985098529855985698"
|
| 1139 |
+
b"5a98649865988598919896989998a59804990699099910991299159918991a99"
|
| 1140 |
+
b"209921992499269940994299459948994a995199549955995699599962996599"
|
| 1141 |
+
b"66996a99819984999099929995999a99a199a699059a159a259a449a469a499a"
|
| 1142 |
+
b"509a559a589a619a859a919a949a959a969a00a002a008a00aa015a020a022a0"
|
| 1143 |
+
b"28a02aa045a051a054a056a059a080a082a088a08aa095a0a0a0a2a0a8a0aaa0"
|
| 1144 |
+
b"05a109a111a114a116a119a11aa146a149a151a155a158a15aa161a164a185a1"
|
| 1145 |
+
b"90a192a196a199a102a208a20aa210a219a222a228a22aa245a251a256a259a2"
|
| 1146 |
+
b"65a280a282a288a28aa295a2a0a2a2a2a8a2aaa219a425a441a444a450a454a4"
|
| 1147 |
+
b"55a458a45aa461a465a466a468a469a485a406a509a510a512a515a518a526a5"
|
| 1148 |
+
b"29a542a545a551a554a555a556a559a565a56aa581a584a585a586a589a592a5"
|
| 1149 |
+
b"95a598a505a611a616a61aa621a625a644a646a64aa652a655a656a658a660a6"
|
| 1150 |
+
b"62a686a690a695a696a699a6a1a6a4a6a6a600a802a808a80aa820a822a828a8"
|
| 1151 |
+
b"2aa851a854a856a859a880a882a888a88aa895a8a0a8a2a8a8a8aaa805a914a9"
|
| 1152 |
+
b"19a921a925a941a950a955a95aa961a966a969a990a996a900aa02aa08aa0aaa"
|
| 1153 |
+
b"20aa22aa28aa2aaa51aa54aa56aa80aa82aa88aa8aaa95aaa0aaa2aaa8aaaaaa"
|
| 1154 |
+
)
|
| 1155 |
+
|
| 1156 |
+
delta = np.float32(0.125)
|
| 1157 |
+
|
| 1158 |
+
@classmethod
|
| 1159 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 1160 |
+
n_blocks = blocks.shape[0]
|
| 1161 |
+
|
| 1162 |
+
d, rest = np.hsplit(blocks, [2])
|
| 1163 |
+
qs, qh = np.hsplit(rest, [QK_K // 8])
|
| 1164 |
+
|
| 1165 |
+
d = d.view(np.float16).astype(np.float32)
|
| 1166 |
+
qh = qh.view(np.uint16)
|
| 1167 |
+
|
| 1168 |
+
dl = d * (2 * ((qh >> 12) & 7) + 1)
|
| 1169 |
+
dl = dl.reshape((n_blocks, -1, 1, 1))
|
| 1170 |
+
delta = np.where((qh & np.uint16(0x8000)) == 0, cls.delta, -cls.delta)
|
| 1171 |
+
delta = delta.reshape((n_blocks, -1, 1, 1))
|
| 1172 |
+
|
| 1173 |
+
qh = qh.reshape((n_blocks, -1, 1)) >> np.array([0, 3, 6, 9], dtype=np.uint16).reshape((1, 1, 4))
|
| 1174 |
+
qs = qs.astype(np.uint16) | ((qh & 7) << 8).reshape((n_blocks, -1))
|
| 1175 |
+
|
| 1176 |
+
assert cls.grid is not None
|
| 1177 |
+
grid = np.take_along_axis(cls.grid, qs.reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 1178 |
+
grid = grid.reshape((n_blocks, -1, 4, 8))
|
| 1179 |
+
|
| 1180 |
+
return (dl * (grid + delta)).reshape((n_blocks, -1))
|
| 1181 |
+
|
| 1182 |
+
|
| 1183 |
+
class IQ1_M(__Quant, qtype=GGMLQuantizationType.IQ1_M):
|
| 1184 |
+
grid_shape = IQ1_S.grid_shape
|
| 1185 |
+
grid_map = IQ1_S.grid_map
|
| 1186 |
+
grid_hex = IQ1_S.grid_hex
|
| 1187 |
+
|
| 1188 |
+
delta = IQ1_S.delta
|
| 1189 |
+
|
| 1190 |
+
# Okay *this* type is weird. It's the only one which stores the f16 scales in multiple parts.
|
| 1191 |
+
@classmethod
|
| 1192 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 1193 |
+
n_blocks = blocks.shape[0]
|
| 1194 |
+
|
| 1195 |
+
qs, rest = np.hsplit(blocks, [QK_K // 8])
|
| 1196 |
+
qh, scales = np.hsplit(rest, [QK_K // 16])
|
| 1197 |
+
|
| 1198 |
+
# The f16 scale is packed across multiple bytes
|
| 1199 |
+
scales = scales.view(np.uint16)
|
| 1200 |
+
d = (scales.reshape((n_blocks, 4)) & np.uint16(0xF000)) >> np.array([12, 8, 4, 0], dtype=np.uint16).reshape((1, 4))
|
| 1201 |
+
d = d[..., 0] | d[..., 1] | d[..., 2] | d[..., 3]
|
| 1202 |
+
d = d.view(np.float16).astype(np.float32).reshape((n_blocks, 1))
|
| 1203 |
+
|
| 1204 |
+
scales = scales.reshape(n_blocks, -1, 1) >> np.array([0, 3, 6, 9], dtype=np.uint16).reshape((1, 1, 4))
|
| 1205 |
+
scales = (scales & 0x07).reshape((n_blocks, -1))
|
| 1206 |
+
dl = d * (2 * scales + 1)
|
| 1207 |
+
dl = dl.reshape((n_blocks, -1, 2, 1, 1))
|
| 1208 |
+
|
| 1209 |
+
qh = qh.reshape((n_blocks, -1, 1)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2))
|
| 1210 |
+
qs = qs.astype(np.uint16) | ((qh & 0x07).astype(np.uint16) << 8).reshape((n_blocks, -1))
|
| 1211 |
+
|
| 1212 |
+
delta = np.where(qh & 0x08 == 0, cls.delta, -cls.delta)
|
| 1213 |
+
delta = delta.reshape((n_blocks, -1, 2, 2, 1))
|
| 1214 |
+
|
| 1215 |
+
assert cls.grid is not None
|
| 1216 |
+
grid = np.take_along_axis(cls.grid, qs.reshape((n_blocks, -1, 1, 1)), axis=-2)
|
| 1217 |
+
grid = grid.reshape((n_blocks, -1, 2, 2, 8))
|
| 1218 |
+
|
| 1219 |
+
return (dl * (grid + delta)).reshape((n_blocks, -1))
|
| 1220 |
+
|
| 1221 |
+
|
| 1222 |
+
class IQ4_NL(__Quant, qtype=GGMLQuantizationType.IQ4_NL):
|
| 1223 |
+
kvalues = (-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113)
|
| 1224 |
+
|
| 1225 |
+
@classmethod
|
| 1226 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 1227 |
+
n_blocks = blocks.shape[0]
|
| 1228 |
+
|
| 1229 |
+
d, qs = np.hsplit(blocks, [2])
|
| 1230 |
+
|
| 1231 |
+
d = d.view(np.float16).astype(np.float32)
|
| 1232 |
+
|
| 1233 |
+
qs = qs.reshape((n_blocks, -1, 1, cls.block_size // 2)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 1234 |
+
|
| 1235 |
+
qs = (qs & np.uint8(0x0F)).reshape((n_blocks, -1, 1))
|
| 1236 |
+
|
| 1237 |
+
kvalues = np.array(cls.kvalues, dtype=np.int8).reshape(1, 1, 16)
|
| 1238 |
+
qs = np.take_along_axis(kvalues, qs, axis=-1).astype(np.float32).reshape((n_blocks, -1))
|
| 1239 |
+
|
| 1240 |
+
return (d * qs)
|
| 1241 |
+
|
| 1242 |
+
|
| 1243 |
+
class IQ4_XS(__Quant, qtype=GGMLQuantizationType.IQ4_XS):
|
| 1244 |
+
@classmethod
|
| 1245 |
+
def dequantize_blocks(cls, blocks: np.ndarray) -> np.ndarray:
|
| 1246 |
+
n_blocks = blocks.shape[0]
|
| 1247 |
+
|
| 1248 |
+
d, rest = np.hsplit(blocks, [2])
|
| 1249 |
+
scales_h, rest = np.hsplit(rest, [2])
|
| 1250 |
+
scales_l, qs = np.hsplit(rest, [QK_K // 64])
|
| 1251 |
+
|
| 1252 |
+
d = d.view(np.float16).astype(np.float32)
|
| 1253 |
+
scales_h = scales_h.view(np.uint16)
|
| 1254 |
+
|
| 1255 |
+
scales_l = scales_l.reshape((n_blocks, -1, 1)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2))
|
| 1256 |
+
scales_h = scales_h.reshape((n_blocks, 1, -1)) >> np.array([2 * i for i in range(QK_K // 32)], dtype=np.uint16).reshape((1, -1, 1))
|
| 1257 |
+
scales_l = scales_l.reshape((n_blocks, -1)) & np.uint8(0x0F)
|
| 1258 |
+
scales_h = scales_h.reshape((n_blocks, -1)).astype(np.uint8) & np.uint8(0x03)
|
| 1259 |
+
|
| 1260 |
+
scales = (scales_l | (scales_h << np.uint8(4))).astype(np.int8) - np.int8(32)
|
| 1261 |
+
dl = (d * scales.astype(np.float32)).reshape((n_blocks, -1, 1))
|
| 1262 |
+
|
| 1263 |
+
qs = qs.reshape((n_blocks, -1, 1, 16)) >> np.array([0, 4], dtype=np.uint8).reshape((1, 1, 2, 1))
|
| 1264 |
+
qs = qs.reshape((n_blocks, -1, 32, 1)) & np.uint8(0x0F)
|
| 1265 |
+
|
| 1266 |
+
kvalues = np.array(IQ4_NL.kvalues, dtype=np.int8).reshape((1, 1, 1, -1))
|
| 1267 |
+
qs = np.take_along_axis(kvalues, qs, axis=-1).astype(np.float32).reshape((n_blocks, -1, 32))
|
| 1268 |
+
|
| 1269 |
+
return (dl * qs).reshape((n_blocks, -1))
|
lib/python3.13/site-packages/gguf/tensor_mapping.py
ADDED
|
@@ -0,0 +1,1280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Sequence
|
| 4 |
+
|
| 5 |
+
from .constants import MODEL_ARCH, MODEL_TENSOR, MODEL_TENSORS, TENSOR_NAMES
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TensorNameMap:
|
| 9 |
+
mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = {
|
| 10 |
+
# Token embeddings
|
| 11 |
+
MODEL_TENSOR.TOKEN_EMBD: (
|
| 12 |
+
"gpt_neox.embed_in", # gptneox
|
| 13 |
+
"transformer.wte", # gpt2 gpt-j mpt refact qwen dbrx jais exaone
|
| 14 |
+
"transformer.word_embeddings", # falcon
|
| 15 |
+
"word_embeddings", # bloom
|
| 16 |
+
"model.embed_tokens", # llama-hf nemotron olmoe olmo2 rwkv6qwen2 glm4-0414
|
| 17 |
+
"tok_embeddings", # llama-pth
|
| 18 |
+
"embeddings.word_embeddings", # bert nomic-bert
|
| 19 |
+
"language_model.embedding.word_embeddings", # persimmon
|
| 20 |
+
"wte", # gpt2
|
| 21 |
+
"transformer.embd.wte", # phi2
|
| 22 |
+
"model.tok_embeddings", # internlm2
|
| 23 |
+
"model.embedding", # mamba-qbert
|
| 24 |
+
"backbone.embedding", # mamba
|
| 25 |
+
"backbone.embeddings", # mamba-hf
|
| 26 |
+
"transformer.in_out_embed", # Grok
|
| 27 |
+
"embedding.word_embeddings", # chatglm
|
| 28 |
+
"transformer.token_embeddings", # openelm
|
| 29 |
+
"shared", # t5
|
| 30 |
+
"rwkv.embeddings", # rwkv6
|
| 31 |
+
"model.embeddings", # rwkv7
|
| 32 |
+
"model.word_embeddings", # bailingmoe
|
| 33 |
+
"language_model.model.embed_tokens", # llama4
|
| 34 |
+
"encoder", # neobert
|
| 35 |
+
),
|
| 36 |
+
|
| 37 |
+
# Token type embeddings
|
| 38 |
+
MODEL_TENSOR.TOKEN_TYPES: (
|
| 39 |
+
"embeddings.token_type_embeddings", # bert nomic-bert
|
| 40 |
+
),
|
| 41 |
+
|
| 42 |
+
# Normalization of token embeddings
|
| 43 |
+
MODEL_TENSOR.TOKEN_EMBD_NORM: (
|
| 44 |
+
"word_embeddings_layernorm", # bloom
|
| 45 |
+
"embeddings.LayerNorm", # bert
|
| 46 |
+
"emb_ln", # nomic-bert
|
| 47 |
+
"transformer.norm", # openelm
|
| 48 |
+
"rwkv.blocks.0.pre_ln", # rwkv
|
| 49 |
+
"rwkv.blocks.0.pre_ln", # rwkv6
|
| 50 |
+
"model.pre_ln", # rwkv7
|
| 51 |
+
"model.layers.0.pre_norm", # rwkv7
|
| 52 |
+
"backbone.norm", # wavtokenizer
|
| 53 |
+
),
|
| 54 |
+
|
| 55 |
+
# Position embeddings
|
| 56 |
+
MODEL_TENSOR.POS_EMBD: (
|
| 57 |
+
"transformer.wpe", # gpt2
|
| 58 |
+
"embeddings.position_embeddings", # bert
|
| 59 |
+
"wpe", # gpt2
|
| 60 |
+
),
|
| 61 |
+
|
| 62 |
+
# Output
|
| 63 |
+
MODEL_TENSOR.OUTPUT: (
|
| 64 |
+
"embed_out", # gptneox
|
| 65 |
+
"lm_head", # gpt2 mpt falcon llama-hf baichuan qwen mamba dbrx jais nemotron exaone olmoe olmo2 phimoe
|
| 66 |
+
"output", # llama-pth bloom internlm2
|
| 67 |
+
"word_embeddings_for_head", # persimmon
|
| 68 |
+
"lm_head.linear", # phi2
|
| 69 |
+
"output_layer", # chatglm
|
| 70 |
+
"head", # rwkv
|
| 71 |
+
"head.out", # wavtokenizer
|
| 72 |
+
"lm_head", # llama4
|
| 73 |
+
),
|
| 74 |
+
|
| 75 |
+
# Output norm
|
| 76 |
+
MODEL_TENSOR.OUTPUT_NORM: (
|
| 77 |
+
"gpt_neox.final_layer_norm", # gptneox
|
| 78 |
+
"transformer.ln_f", # gpt2 gpt-j falcon jais exaone
|
| 79 |
+
"model.norm", # llama-hf baichuan internlm2 olmoe olmo2 phimoe
|
| 80 |
+
"norm", # llama-pth
|
| 81 |
+
"transformer.norm_f", # mpt dbrx
|
| 82 |
+
"ln_f", # refact bloom qwen gpt2
|
| 83 |
+
"language_model.encoder.final_layernorm", # persimmon
|
| 84 |
+
"model.final_layernorm", # persimmon
|
| 85 |
+
"lm_head.ln", # phi2
|
| 86 |
+
"model.norm_f", # mamba-qbert
|
| 87 |
+
"backbone.norm_f", # mamba
|
| 88 |
+
"transformer.rms_norm", # Grok
|
| 89 |
+
"encoder.final_layernorm", # chatglm
|
| 90 |
+
"transformer.norm", # openelm
|
| 91 |
+
"model.norm", # nemotron
|
| 92 |
+
"rwkv.ln_out", # rwkv6
|
| 93 |
+
"model.ln_out", # rwkv7
|
| 94 |
+
"backbone.final_layer_norm", # wavtokenizer
|
| 95 |
+
"model.norm", # llama4
|
| 96 |
+
),
|
| 97 |
+
|
| 98 |
+
# Rope frequencies
|
| 99 |
+
MODEL_TENSOR.ROPE_FREQS: (
|
| 100 |
+
"rope.freqs", # llama-pth
|
| 101 |
+
"rotary_pos_emb.inv_freq", # chatglm
|
| 102 |
+
),
|
| 103 |
+
|
| 104 |
+
MODEL_TENSOR.ROPE_FACTORS_LONG: (),
|
| 105 |
+
MODEL_TENSOR.ROPE_FACTORS_SHORT: (),
|
| 106 |
+
|
| 107 |
+
MODEL_TENSOR.CONV1D: (
|
| 108 |
+
"backbone.embed", # roberta
|
| 109 |
+
),
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
block_mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = {
|
| 113 |
+
# Attention norm
|
| 114 |
+
MODEL_TENSOR.ATTN_NORM: (
|
| 115 |
+
"gpt_neox.layers.{bid}.input_layernorm", # gptneox
|
| 116 |
+
"transformer.h.{bid}.ln_1", # gpt2 gpt-j refact qwen jais exaone
|
| 117 |
+
"transformer.blocks.{bid}.norm_1", # mpt
|
| 118 |
+
"transformer.h.{bid}.input_layernorm", # falcon7b
|
| 119 |
+
"h.{bid}.input_layernorm", # bloom
|
| 120 |
+
"transformer.h.{bid}.ln_mlp", # falcon40b
|
| 121 |
+
"model.layers.{bid}.input_layernorm", # llama-hf nemotron olmoe phimoe
|
| 122 |
+
"layers.{bid}.attention_norm", # llama-pth
|
| 123 |
+
"language_model.encoder.layers.{bid}.input_layernorm", # persimmon
|
| 124 |
+
"model.layers.{bid}.ln1", # yi
|
| 125 |
+
"h.{bid}.ln_1", # gpt2
|
| 126 |
+
"transformer.h.{bid}.ln", # phi2
|
| 127 |
+
"model.layers.layers.{bid}.norm", # plamo
|
| 128 |
+
"model.layers.{bid}.attention_norm", # internlm2
|
| 129 |
+
"model.layers.{bid}.norm", # mamba-qbert
|
| 130 |
+
"backbone.layers.{bid}.norm", # mamba
|
| 131 |
+
"transformer.decoder_layer.{bid}.rms_norm", # Grok
|
| 132 |
+
"transformer.blocks.{bid}.norm_attn_norm.norm_1", # dbrx
|
| 133 |
+
"encoder.layers.{bid}.input_layernorm", # chatglm
|
| 134 |
+
"transformer.layers.{bid}.attn_norm", # openelm
|
| 135 |
+
"rwkv.blocks.{bid}.ln1", # rwkv6
|
| 136 |
+
"model.layers.{bid}.ln1", # rwkv7
|
| 137 |
+
"model.layers.{bid}.input_layernorm", # llama4
|
| 138 |
+
"transformer_encoder.{bid}.attention_norm", # neobert
|
| 139 |
+
),
|
| 140 |
+
|
| 141 |
+
# Attention norm 2
|
| 142 |
+
MODEL_TENSOR.ATTN_NORM_2: (
|
| 143 |
+
"transformer.h.{bid}.ln_attn", # falcon40b
|
| 144 |
+
"encoder.layer.{bid}.layer_norm_1", # jina-v2-code
|
| 145 |
+
"rwkv.blocks.{bid}.ln2", # rwkv6
|
| 146 |
+
"model.layers.{bid}.ln2", # rwkv7
|
| 147 |
+
),
|
| 148 |
+
|
| 149 |
+
# Attention query-key-value
|
| 150 |
+
MODEL_TENSOR.ATTN_QKV: (
|
| 151 |
+
"gpt_neox.layers.{bid}.attention.query_key_value", # gptneox
|
| 152 |
+
"transformer.h.{bid}.attn.c_attn", # gpt2 qwen jais
|
| 153 |
+
"transformer.blocks.{bid}.attn.Wqkv", # mpt
|
| 154 |
+
"transformer.blocks.{bid}.norm_attn_norm.attn.Wqkv", # dbrx
|
| 155 |
+
"transformer.h.{bid}.self_attention.query_key_value", # falcon
|
| 156 |
+
"h.{bid}.self_attention.query_key_value", # bloom
|
| 157 |
+
"language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon
|
| 158 |
+
"model.layers.{bid}.self_attn.query_key_value", # persimmon
|
| 159 |
+
"h.{bid}.attn.c_attn", # gpt2
|
| 160 |
+
"transformer.h.{bid}.mixer.Wqkv", # phi2
|
| 161 |
+
"encoder.layers.{bid}.attn.Wqkv", # nomic-bert
|
| 162 |
+
"encoder.layers.{bid}.mixer.Wqkv", # jina
|
| 163 |
+
"model.layers.{bid}.self_attn.qkv_proj", # phi3
|
| 164 |
+
"encoder.layers.{bid}.self_attention.query_key_value", # chatglm
|
| 165 |
+
"transformer.layers.{bid}.attn.qkv_proj", # openelm
|
| 166 |
+
"transformer_encoder.{bid}.qkv", # neobert
|
| 167 |
+
),
|
| 168 |
+
|
| 169 |
+
# Attention query
|
| 170 |
+
MODEL_TENSOR.ATTN_Q: (
|
| 171 |
+
"model.layers.{bid}.self_attn.q_proj", # llama-hf nemotron olmoe olmo2 phimoe
|
| 172 |
+
"model.layers.{bid}.self_attn.q_proj_no_perm", # llama-custom
|
| 173 |
+
"layers.{bid}.attention.wq", # llama-pth
|
| 174 |
+
"encoder.layer.{bid}.attention.self.query", # bert
|
| 175 |
+
"transformer.layer.{bid}.attention.q_lin", # distillbert
|
| 176 |
+
"transformer.h.{bid}.attn.q_proj", # gpt-j
|
| 177 |
+
"model.layers.layers.{bid}.self_attn.q_proj", # plamo
|
| 178 |
+
"model.layers.{bid}.attention.wq", # internlm2
|
| 179 |
+
"transformer.decoder_layer.{bid}.multi_head_attention.query",# Grok
|
| 180 |
+
"transformer.h.{bid}.attn.attention.q_proj", # exaone
|
| 181 |
+
"model.layers.{bid}.self_attn.q_proj", # llama4
|
| 182 |
+
),
|
| 183 |
+
|
| 184 |
+
# Attention key
|
| 185 |
+
MODEL_TENSOR.ATTN_K: (
|
| 186 |
+
"model.layers.{bid}.self_attn.k_proj", # llama-hf nemotron olmoe olmo2 phimoe
|
| 187 |
+
"model.layers.{bid}.self_attn.k_proj_no_perm", # llama-custom
|
| 188 |
+
"layers.{bid}.attention.wk", # llama-pth
|
| 189 |
+
"encoder.layer.{bid}.attention.self.key", # bert
|
| 190 |
+
"transformer.layer.{bid}.attention.k_lin", # distillbert
|
| 191 |
+
"transformer.h.{bid}.attn.k_proj", # gpt-j
|
| 192 |
+
"transformer.h.{bid}.attn.k", # refact
|
| 193 |
+
"model.layers.layers.{bid}.self_attn.k_proj", # plamo
|
| 194 |
+
"model.layers.{bid}.attention.wk", # internlm2
|
| 195 |
+
"transformer.decoder_layer.{bid}.multi_head_attention.key",# Grok
|
| 196 |
+
"transformer.h.{bid}.attn.attention.k_proj", # exaone
|
| 197 |
+
"model.layers.{bid}.self_attn.k_proj", # llama4
|
| 198 |
+
),
|
| 199 |
+
|
| 200 |
+
# Attention value
|
| 201 |
+
MODEL_TENSOR.ATTN_V: (
|
| 202 |
+
"model.layers.{bid}.self_attn.v_proj", # llama-hf nemotron olmoe olmo2 phimoe
|
| 203 |
+
"layers.{bid}.attention.wv", # llama-pth
|
| 204 |
+
"encoder.layer.{bid}.attention.self.value", # bert
|
| 205 |
+
"transformer.layer.{bid}.attention.v_lin", # distillbert
|
| 206 |
+
"transformer.h.{bid}.attn.v_proj", # gpt-j
|
| 207 |
+
"transformer.h.{bid}.attn.v", # refact
|
| 208 |
+
"model.layers.layers.{bid}.self_attn.v_proj", # plamo
|
| 209 |
+
"model.layers.{bid}.attention.wv", # internlm2
|
| 210 |
+
"transformer.decoder_layer.{bid}.multi_head_attention.value",# Grok
|
| 211 |
+
"transformer.h.{bid}.attn.attention.v_proj", # exaone
|
| 212 |
+
"model.layers.{bid}.self_attn.v_proj", # llama4
|
| 213 |
+
),
|
| 214 |
+
|
| 215 |
+
# Attention output
|
| 216 |
+
MODEL_TENSOR.ATTN_OUT: (
|
| 217 |
+
"gpt_neox.layers.{bid}.attention.dense", # gptneox
|
| 218 |
+
"transformer.h.{bid}.attn.c_proj", # gpt2 refact qwen jais
|
| 219 |
+
"transformer.blocks.{bid}.attn.out_proj", # mpt
|
| 220 |
+
"transformer.h.{bid}.self_attention.dense", # falcon
|
| 221 |
+
"h.{bid}.self_attention.dense", # bloom
|
| 222 |
+
"model.layers.{bid}.self_attn.o_proj", # llama-hf nemotron olmoe olmo2 phimoe
|
| 223 |
+
"model.layers.{bid}.self_attn.linear_attn", # deci
|
| 224 |
+
"layers.{bid}.attention.wo", # llama-pth
|
| 225 |
+
"encoder.layer.{bid}.attention.output.dense", # bert
|
| 226 |
+
"transformer.layer.{bid}.attention.out_lin", # distillbert
|
| 227 |
+
"transformer.h.{bid}.attn.out_proj", # gpt-j
|
| 228 |
+
"language_model.encoder.layers.{bid}.self_attention.dense", # persimmon
|
| 229 |
+
"model.layers.{bid}.self_attn.dense", # persimmon
|
| 230 |
+
"h.{bid}.attn.c_proj", # gpt2
|
| 231 |
+
"transformer.h.{bid}.mixer.out_proj", # phi2
|
| 232 |
+
"model.layers.layers.{bid}.self_attn.o_proj", # plamo
|
| 233 |
+
"model.layers.{bid}.attention.wo", # internlm2
|
| 234 |
+
"encoder.layers.{bid}.attn.out_proj", # nomic-bert
|
| 235 |
+
"encoder.layers.{bid}.mixer.out_proj", # jina
|
| 236 |
+
"transformer.decoder_layer.{bid}.multi_head_attention.linear", # Grok
|
| 237 |
+
"transformer.blocks.{bid}.norm_attn_norm.attn.out_proj", # dbrx
|
| 238 |
+
"encoder.layers.{bid}.self_attention.dense", # chatglm
|
| 239 |
+
"transformer.layers.{bid}.attn.out_proj", # openelm
|
| 240 |
+
"transformer.h.{bid}.attn.attention.out_proj", # exaone
|
| 241 |
+
"model.layers.{bid}.self_attn.o_proj", # llama4
|
| 242 |
+
"transformer_encoder.{bid}.wo", # neobert
|
| 243 |
+
),
|
| 244 |
+
|
| 245 |
+
# Attention output norm
|
| 246 |
+
MODEL_TENSOR.ATTN_OUT_NORM: (
|
| 247 |
+
"encoder.layer.{bid}.attention.output.LayerNorm", # bert
|
| 248 |
+
"transformer.layer.{bid}.sa_layer_norm", # distillbert
|
| 249 |
+
"encoder.layers.{bid}.norm1", # nomic-bert
|
| 250 |
+
"transformer.decoder_layer.{bid}.rms_norm_1", # Grok
|
| 251 |
+
"transformer.blocks.{bid}.norm_attn_norm.norm_2", # dbrx
|
| 252 |
+
),
|
| 253 |
+
|
| 254 |
+
MODEL_TENSOR.ATTN_POST_NORM: (
|
| 255 |
+
"model.layers.{bid}.post_attention_layernorm", # gemma2 olmo2 # ge
|
| 256 |
+
"model.layers.{bid}.post_self_attn_layernorm", # glm-4-0414
|
| 257 |
+
),
|
| 258 |
+
|
| 259 |
+
# Rotary embeddings
|
| 260 |
+
MODEL_TENSOR.ATTN_ROT_EMBD: (
|
| 261 |
+
"model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf
|
| 262 |
+
"layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth
|
| 263 |
+
"model.layers.layers.{bid}.self_attn.rotary_emb.inv_freq", # plamo
|
| 264 |
+
"transformer.h.{bid}.attn.rotary_emb.inv_freq", # codeshell
|
| 265 |
+
),
|
| 266 |
+
|
| 267 |
+
# Feed-forward norm
|
| 268 |
+
MODEL_TENSOR.FFN_NORM: (
|
| 269 |
+
"gpt_neox.layers.{bid}.post_attention_layernorm", # gptneox
|
| 270 |
+
"transformer.h.{bid}.ln_2", # gpt2 refact qwen jais exaone
|
| 271 |
+
"h.{bid}.post_attention_layernorm", # bloom
|
| 272 |
+
"transformer.blocks.{bid}.norm_2", # mpt
|
| 273 |
+
"model.layers.{bid}.post_attention_layernorm", # llama-hf nemotron olmoe phimoe
|
| 274 |
+
"layers.{bid}.ffn_norm", # llama-pth
|
| 275 |
+
"language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon
|
| 276 |
+
"model.layers.{bid}.ln2", # yi
|
| 277 |
+
"h.{bid}.ln_2", # gpt2
|
| 278 |
+
"model.layers.{bid}.ffn_norm", # internlm2
|
| 279 |
+
"transformer.decoder_layer.{bid}.rms_norm_2", # Grok
|
| 280 |
+
"encoder.layers.{bid}.post_attention_layernorm", # chatglm
|
| 281 |
+
"transformer.layers.{bid}.ffn_norm", # openelm
|
| 282 |
+
"model.layers.{bid}.post_attention_layernorm", # llama4
|
| 283 |
+
"transformer_encoder.{bid}.ffn_norm", # neobert
|
| 284 |
+
),
|
| 285 |
+
|
| 286 |
+
# Post feed-forward norm
|
| 287 |
+
MODEL_TENSOR.FFN_PRE_NORM: (
|
| 288 |
+
"model.layers.{bid}.pre_feedforward_layernorm", # gemma2
|
| 289 |
+
),
|
| 290 |
+
|
| 291 |
+
# Post feed-forward norm
|
| 292 |
+
MODEL_TENSOR.FFN_POST_NORM: (
|
| 293 |
+
"model.layers.{bid}.post_feedforward_layernorm", # gemma2 olmo2
|
| 294 |
+
"model.layers.{bid}.post_mlp_layernorm", # glm-4-0414
|
| 295 |
+
),
|
| 296 |
+
|
| 297 |
+
MODEL_TENSOR.FFN_GATE_INP: (
|
| 298 |
+
"layers.{bid}.feed_forward.gate", # mixtral
|
| 299 |
+
"model.layers.{bid}.block_sparse_moe.gate", # mixtral phimoe
|
| 300 |
+
"model.layers.{bid}.mlp.gate", # qwen2moe olmoe
|
| 301 |
+
"transformer.decoder_layer.{bid}.router", # Grok
|
| 302 |
+
"transformer.blocks.{bid}.ffn.router.layer", # dbrx
|
| 303 |
+
"model.layers.{bid}.block_sparse_moe.router.layer", # granitemoe
|
| 304 |
+
"model.layers.{bid}.feed_forward.router", # llama4
|
| 305 |
+
"encoder.layers.{bid}.mlp.router.layer", # nomic-bert-moe
|
| 306 |
+
),
|
| 307 |
+
|
| 308 |
+
MODEL_TENSOR.FFN_GATE_INP_SHEXP: (
|
| 309 |
+
"model.layers.{bid}.mlp.shared_expert_gate", # qwen2moe
|
| 310 |
+
),
|
| 311 |
+
|
| 312 |
+
MODEL_TENSOR.FFN_EXP_PROBS_B: (
|
| 313 |
+
"model.layers.{bid}.mlp.gate.e_score_correction", # deepseek-v3 dots1
|
| 314 |
+
),
|
| 315 |
+
|
| 316 |
+
# Feed-forward up
|
| 317 |
+
MODEL_TENSOR.FFN_UP: (
|
| 318 |
+
"gpt_neox.layers.{bid}.mlp.dense_h_to_4h", # gptneox
|
| 319 |
+
"transformer.h.{bid}.mlp.c_fc", # gpt2 jais
|
| 320 |
+
"transformer.blocks.{bid}.ffn.up_proj", # mpt
|
| 321 |
+
"transformer.h.{bid}.mlp.dense_h_to_4h", # falcon
|
| 322 |
+
"h.{bid}.mlp.dense_h_to_4h", # bloom
|
| 323 |
+
"model.layers.{bid}.mlp.up_proj", # llama-hf refact nemotron olmo2
|
| 324 |
+
"layers.{bid}.feed_forward.w3", # llama-pth
|
| 325 |
+
"encoder.layer.{bid}.intermediate.dense", # bert
|
| 326 |
+
"transformer.layer.{bid}.ffn.lin1", # distillbert
|
| 327 |
+
"transformer.h.{bid}.mlp.fc_in", # gpt-j
|
| 328 |
+
"transformer.h.{bid}.mlp.linear_3", # refact
|
| 329 |
+
"language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon
|
| 330 |
+
"model.layers.{bid}.mlp.dense_h_to_4h", # persimmon
|
| 331 |
+
"transformer.h.{bid}.mlp.w1", # qwen
|
| 332 |
+
"h.{bid}.mlp.c_fc", # gpt2
|
| 333 |
+
"transformer.h.{bid}.mlp.fc1", # phi2
|
| 334 |
+
"model.layers.{bid}.mlp.fc1", # phi2
|
| 335 |
+
"model.layers.{bid}.mlp.gate_up_proj", # phi3 glm-4-0414
|
| 336 |
+
"model.layers.layers.{bid}.mlp.up_proj", # plamo
|
| 337 |
+
"model.layers.{bid}.feed_forward.w3", # internlm2
|
| 338 |
+
"encoder.layers.{bid}.mlp.fc11", # nomic-bert
|
| 339 |
+
"encoder.layers.{bid}.mlp.fc1", # nomic-bert-moe
|
| 340 |
+
"model.layers.{bid}.mlp.c_fc", # starcoder2
|
| 341 |
+
"encoder.layer.{bid}.mlp.gated_layers_v", # jina-bert-v2 (split up/gate, no longer used)
|
| 342 |
+
"encoder.layer.{bid}.mlp.gated_layers", # jina-bert-v2 (GEGLU)
|
| 343 |
+
"encoder.layer.{bid}.mlp.up_gated_layer", # jina-v2-code (GEGLU)
|
| 344 |
+
"model.layers.{bid}.residual_mlp.w3", # arctic
|
| 345 |
+
"encoder.layers.{bid}.mlp.dense_h_to_4h", # chatglm
|
| 346 |
+
"transformer.h.{bid}.mlp.c_fc_1", # exaone
|
| 347 |
+
"model.layers.{bid}.feed_forward.up_proj", # llama4
|
| 348 |
+
"transformer_encoder.{bid}.ffn.w12", # neobert
|
| 349 |
+
),
|
| 350 |
+
|
| 351 |
+
MODEL_TENSOR.FFN_UP_EXP: (
|
| 352 |
+
"layers.{bid}.feed_forward.experts.w3", # mixtral (merged)
|
| 353 |
+
"transformer.decoder_layer.{bid}.moe.linear_v", # Grok (merged)
|
| 354 |
+
"transformer.blocks.{bid}.ffn.experts.mlp.v1", # dbrx
|
| 355 |
+
"model.layers.{bid}.mlp.experts.up_proj", # qwen2moe olmoe (merged)
|
| 356 |
+
"model.layers.{bid}.block_sparse_moe.experts.w3", # phimoe (merged)
|
| 357 |
+
"model.layers.{bid}.feed_forward.experts.up_proj", # llama4
|
| 358 |
+
"encoder.layers.{bid}.mlp.experts.mlp.w1", # nomic-bert-moe
|
| 359 |
+
),
|
| 360 |
+
|
| 361 |
+
MODEL_TENSOR.FFN_UP_SHEXP: (
|
| 362 |
+
"model.layers.{bid}.mlp.shared_expert.up_proj", # qwen2moe
|
| 363 |
+
"model.layers.{bid}.mlp.shared_experts.up_proj", # deepseek deepseek2
|
| 364 |
+
"model.layers.{bid}.feed_forward.shared_expert.up_proj", # llama4
|
| 365 |
+
),
|
| 366 |
+
|
| 367 |
+
# AWQ-activation gate
|
| 368 |
+
MODEL_TENSOR.FFN_ACT: (
|
| 369 |
+
"transformer.blocks.{bid}.ffn.act", # mpt
|
| 370 |
+
),
|
| 371 |
+
|
| 372 |
+
# Feed-forward gate
|
| 373 |
+
MODEL_TENSOR.FFN_GATE: (
|
| 374 |
+
"model.layers.{bid}.mlp.gate_proj", # llama-hf refact olmo2
|
| 375 |
+
"layers.{bid}.feed_forward.w1", # llama-pth
|
| 376 |
+
"transformer.h.{bid}.mlp.w2", # qwen
|
| 377 |
+
"transformer.h.{bid}.mlp.c_fc2", # jais
|
| 378 |
+
"model.layers.layers.{bid}.mlp.gate_proj", # plamo
|
| 379 |
+
"model.layers.{bid}.feed_forward.w1", # internlm2
|
| 380 |
+
"encoder.layers.{bid}.mlp.fc12", # nomic-bert
|
| 381 |
+
"encoder.layer.{bid}.mlp.gated_layers_w", # jina-bert-v2 (split up/gate, no longer used)
|
| 382 |
+
"transformer.h.{bid}.mlp.linear_1", # refact
|
| 383 |
+
"model.layers.{bid}.residual_mlp.w1", # arctic
|
| 384 |
+
"transformer.h.{bid}.mlp.c_fc_0", # exaone
|
| 385 |
+
"model.layers.{bid}.feed_forward.gate_proj", # llama4
|
| 386 |
+
),
|
| 387 |
+
|
| 388 |
+
MODEL_TENSOR.FFN_GATE_EXP: (
|
| 389 |
+
"layers.{bid}.feed_forward.experts.w1", # mixtral (merged)
|
| 390 |
+
"transformer.decoder_layer.{bid}.moe.linear", # Grok (merged)
|
| 391 |
+
"transformer.blocks.{bid}.ffn.experts.mlp.w1", # dbrx
|
| 392 |
+
"model.layers.{bid}.mlp.experts.gate_proj", # qwen2moe olmoe (merged)
|
| 393 |
+
"model.layers.{bid}.block_sparse_moe.experts.w1", # phimoe (merged)
|
| 394 |
+
"model.layers.{bid}.feed_forward.experts.gate_proj", # llama4
|
| 395 |
+
),
|
| 396 |
+
|
| 397 |
+
MODEL_TENSOR.FFN_GATE_SHEXP: (
|
| 398 |
+
"model.layers.{bid}.mlp.shared_expert.gate_proj", # qwen2moe
|
| 399 |
+
"model.layers.{bid}.mlp.shared_experts.gate_proj", # deepseek deepseek2
|
| 400 |
+
"model.layers.{bid}.feed_forward.shared_expert.gate_proj", # llama4
|
| 401 |
+
),
|
| 402 |
+
|
| 403 |
+
# Feed-forward down
|
| 404 |
+
MODEL_TENSOR.FFN_DOWN: (
|
| 405 |
+
"gpt_neox.layers.{bid}.mlp.dense_4h_to_h", # gptneox
|
| 406 |
+
"transformer.h.{bid}.mlp.c_proj", # gpt2 refact qwen jais
|
| 407 |
+
"transformer.blocks.{bid}.ffn.down_proj", # mpt
|
| 408 |
+
"transformer.h.{bid}.mlp.dense_4h_to_h", # falcon
|
| 409 |
+
"h.{bid}.mlp.dense_4h_to_h", # bloom
|
| 410 |
+
"model.layers.{bid}.mlp.down_proj", # llama-hf nemotron olmo2
|
| 411 |
+
"layers.{bid}.feed_forward.w2", # llama-pth
|
| 412 |
+
"encoder.layer.{bid}.output.dense", # bert
|
| 413 |
+
"transformer.layer.{bid}.ffn.lin2", # distillbert
|
| 414 |
+
"transformer.h.{bid}.mlp.fc_out", # gpt-j
|
| 415 |
+
"language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon
|
| 416 |
+
"model.layers.{bid}.mlp.dense_4h_to_h", # persimmon
|
| 417 |
+
"h.{bid}.mlp.c_proj", # gpt2
|
| 418 |
+
"transformer.h.{bid}.mlp.fc2", # phi2
|
| 419 |
+
"model.layers.{bid}.mlp.fc2", # phi2
|
| 420 |
+
"model.layers.layers.{bid}.mlp.down_proj", # plamo
|
| 421 |
+
"model.layers.{bid}.feed_forward.w2", # internlm2
|
| 422 |
+
"encoder.layers.{bid}.mlp.fc2", # nomic-bert
|
| 423 |
+
"model.layers.{bid}.mlp.c_proj", # starcoder2
|
| 424 |
+
"encoder.layer.{bid}.mlp.wo", # jina-bert-v2
|
| 425 |
+
"transformer.layers.{bid}.ffn.proj_2", # openelm
|
| 426 |
+
"model.layers.{bid}.residual_mlp.w2", # arctic
|
| 427 |
+
"encoder.layer.{bid}.mlp.down_layer", # jina-bert-v2
|
| 428 |
+
"encoder.layers.{bid}.mlp.dense_4h_to_h", # chatglm
|
| 429 |
+
"model.layers.h.{bid}.mlp.c_proj", # exaone
|
| 430 |
+
"model.layers.{bid}.feed_forward.down_proj", # llama4
|
| 431 |
+
"transformer_encoder.{bid}.ffn.w3", # neobert
|
| 432 |
+
),
|
| 433 |
+
|
| 434 |
+
MODEL_TENSOR.FFN_DOWN_EXP: (
|
| 435 |
+
"layers.{bid}.feed_forward.experts.w2", # mixtral (merged)
|
| 436 |
+
"transformer.decoder_layer.{bid}.moe.linear_1", # Grok (merged)
|
| 437 |
+
"transformer.blocks.{bid}.ffn.experts.mlp.w2", # dbrx
|
| 438 |
+
"model.layers.{bid}.mlp.experts.down_proj", # qwen2moe olmoe (merged)
|
| 439 |
+
"model.layers.{bid}.block_sparse_moe.output_linear", # granitemoe
|
| 440 |
+
"model.layers.{bid}.block_sparse_moe.experts.w2", # phimoe (merged)
|
| 441 |
+
"model.layers.{bid}.feed_forward.experts.down_proj", # llama4
|
| 442 |
+
"encoder.layers.{bid}.mlp.experts.mlp.w2", # nomic-bert-moe
|
| 443 |
+
),
|
| 444 |
+
|
| 445 |
+
MODEL_TENSOR.FFN_DOWN_SHEXP: (
|
| 446 |
+
"model.layers.{bid}.mlp.shared_expert.down_proj", # qwen2moe
|
| 447 |
+
"model.layers.{bid}.mlp.shared_experts.down_proj", # deepseek deepseek2
|
| 448 |
+
"model.layers.{bid}.feed_forward.shared_expert.down_proj", # llama4
|
| 449 |
+
"model.layers.{bid}.shared_mlp.output_linear", # granitemoe
|
| 450 |
+
),
|
| 451 |
+
|
| 452 |
+
MODEL_TENSOR.ATTN_Q_NORM: (
|
| 453 |
+
"language_model.encoder.layers.{bid}.self_attention.q_layernorm",
|
| 454 |
+
"model.layers.{bid}.self_attn.q_layernorm", # persimmon
|
| 455 |
+
"model.layers.{bid}.self_attn.q_norm", # cohere olmoe chameleon olmo2
|
| 456 |
+
"transformer.blocks.{bid}.attn.q_ln", # sea-lion
|
| 457 |
+
"encoder.layer.{bid}.attention.self.layer_norm_q", # jina-bert-v2
|
| 458 |
+
"transformer.layers.{bid}.attn.q_norm", # openelm
|
| 459 |
+
),
|
| 460 |
+
|
| 461 |
+
MODEL_TENSOR.ATTN_K_NORM: (
|
| 462 |
+
"language_model.encoder.layers.{bid}.self_attention.k_layernorm",
|
| 463 |
+
"model.layers.{bid}.self_attn.k_layernorm", # persimmon
|
| 464 |
+
"model.layers.{bid}.self_attn.k_norm", # cohere olmoe chameleon olmo2
|
| 465 |
+
"transformer.blocks.{bid}.attn.k_ln", # sea-lion
|
| 466 |
+
"encoder.layer.{bid}.attention.self.layer_norm_k", # jina-bert-v2
|
| 467 |
+
"transformer.layers.{bid}.attn.k_norm", # openelm
|
| 468 |
+
),
|
| 469 |
+
|
| 470 |
+
MODEL_TENSOR.ROPE_FREQS: (
|
| 471 |
+
"language_model.encoder.layers.{bid}.self_attention.rotary_emb.inv_freq", # persimmon
|
| 472 |
+
),
|
| 473 |
+
|
| 474 |
+
MODEL_TENSOR.LAYER_OUT_NORM: (
|
| 475 |
+
"encoder.layer.{bid}.output.LayerNorm", # bert
|
| 476 |
+
"transformer.layer.{bid}.output_layer_norm", # distillbert
|
| 477 |
+
"encoder.layers.{bid}.norm2", # nomic-bert
|
| 478 |
+
"transformer.decoder_layer.{bid}.rms_norm_3", # Grok
|
| 479 |
+
"encoder.layer.{bid}.mlp.layernorm", # jina-bert-v2
|
| 480 |
+
"encoder.layer.{bid}.layer_norm_2" # jina-v2-code
|
| 481 |
+
),
|
| 482 |
+
|
| 483 |
+
MODEL_TENSOR.SSM_IN: (
|
| 484 |
+
"model.layers.{bid}.in_proj",
|
| 485 |
+
"backbone.layers.{bid}.mixer.in_proj",
|
| 486 |
+
),
|
| 487 |
+
|
| 488 |
+
MODEL_TENSOR.SSM_CONV1D: (
|
| 489 |
+
"model.layers.{bid}.conv1d",
|
| 490 |
+
"backbone.layers.{bid}.mixer.conv1d",
|
| 491 |
+
),
|
| 492 |
+
|
| 493 |
+
MODEL_TENSOR.SSM_X: (
|
| 494 |
+
"model.layers.{bid}.x_proj",
|
| 495 |
+
"backbone.layers.{bid}.mixer.x_proj",
|
| 496 |
+
),
|
| 497 |
+
|
| 498 |
+
MODEL_TENSOR.SSM_DT: (
|
| 499 |
+
"model.layers.{bid}.dt_proj",
|
| 500 |
+
"backbone.layers.{bid}.mixer.dt_proj",
|
| 501 |
+
),
|
| 502 |
+
|
| 503 |
+
MODEL_TENSOR.SSM_A: (
|
| 504 |
+
"model.layers.{bid}.A_log",
|
| 505 |
+
"backbone.layers.{bid}.mixer.A_log",
|
| 506 |
+
),
|
| 507 |
+
|
| 508 |
+
MODEL_TENSOR.SSM_D: (
|
| 509 |
+
"model.layers.{bid}.D",
|
| 510 |
+
"backbone.layers.{bid}.mixer.D",
|
| 511 |
+
),
|
| 512 |
+
|
| 513 |
+
MODEL_TENSOR.SSM_OUT: (
|
| 514 |
+
"model.layers.{bid}.out_proj",
|
| 515 |
+
"backbone.layers.{bid}.mixer.out_proj",
|
| 516 |
+
),
|
| 517 |
+
|
| 518 |
+
MODEL_TENSOR.TIME_MIX_W0: (
|
| 519 |
+
"model.layers.{bid}.attention.w0", # rwkv7
|
| 520 |
+
),
|
| 521 |
+
|
| 522 |
+
MODEL_TENSOR.TIME_MIX_W1: (
|
| 523 |
+
"rwkv.blocks.{bid}.attention.time_maa_w1", # rwkv6
|
| 524 |
+
"model.layers.{bid}.self_attn.time_maa_w1", # rwkv6qwen2
|
| 525 |
+
"model.layers.{bid}.attention.w1", # rwkv7
|
| 526 |
+
),
|
| 527 |
+
|
| 528 |
+
MODEL_TENSOR.TIME_MIX_W2: (
|
| 529 |
+
"rwkv.blocks.{bid}.attention.time_maa_w2", # rwkv6
|
| 530 |
+
"model.layers.{bid}.self_attn.time_maa_w2", # rwkv6qwen2
|
| 531 |
+
"model.layers.{bid}.attention.w2", # rwkv7
|
| 532 |
+
),
|
| 533 |
+
|
| 534 |
+
MODEL_TENSOR.TIME_MIX_A0: (
|
| 535 |
+
"model.layers.{bid}.attention.a0", # rwkv7
|
| 536 |
+
),
|
| 537 |
+
|
| 538 |
+
MODEL_TENSOR.TIME_MIX_A1: (
|
| 539 |
+
"model.layers.{bid}.attention.a1", # rwkv7
|
| 540 |
+
),
|
| 541 |
+
|
| 542 |
+
MODEL_TENSOR.TIME_MIX_A2: (
|
| 543 |
+
"model.layers.{bid}.attention.a2", # rwkv7
|
| 544 |
+
),
|
| 545 |
+
|
| 546 |
+
MODEL_TENSOR.TIME_MIX_V0: (
|
| 547 |
+
"model.layers.{bid}.attention.v0", # rwkv7
|
| 548 |
+
),
|
| 549 |
+
|
| 550 |
+
MODEL_TENSOR.TIME_MIX_V1: (
|
| 551 |
+
"model.layers.{bid}.attention.v1", # rwkv7
|
| 552 |
+
),
|
| 553 |
+
|
| 554 |
+
MODEL_TENSOR.TIME_MIX_V2: (
|
| 555 |
+
"model.layers.{bid}.attention.v2", # rwkv7
|
| 556 |
+
),
|
| 557 |
+
|
| 558 |
+
MODEL_TENSOR.TIME_MIX_G1: (
|
| 559 |
+
"model.layers.{bid}.attention.g1", # rwkv7
|
| 560 |
+
),
|
| 561 |
+
|
| 562 |
+
MODEL_TENSOR.TIME_MIX_G2: (
|
| 563 |
+
"model.layers.{bid}.attention.g2", # rwkv7
|
| 564 |
+
),
|
| 565 |
+
|
| 566 |
+
MODEL_TENSOR.TIME_MIX_K_K: (
|
| 567 |
+
"model.layers.{bid}.attention.k_k", # rwkv7
|
| 568 |
+
),
|
| 569 |
+
|
| 570 |
+
MODEL_TENSOR.TIME_MIX_K_A: (
|
| 571 |
+
"model.layers.{bid}.attention.k_a", # rwkv7
|
| 572 |
+
),
|
| 573 |
+
|
| 574 |
+
MODEL_TENSOR.TIME_MIX_R_K: (
|
| 575 |
+
"model.layers.{bid}.attention.r_k", # rwkv7
|
| 576 |
+
),
|
| 577 |
+
|
| 578 |
+
MODEL_TENSOR.TIME_MIX_LERP_X: (
|
| 579 |
+
"rwkv.blocks.{bid}.attention.time_maa_x", # rwkv6
|
| 580 |
+
"model.layers.{bid}.self_attn.time_maa_x", # rwkv6qwen2
|
| 581 |
+
),
|
| 582 |
+
|
| 583 |
+
MODEL_TENSOR.TIME_MIX_LERP_K: (
|
| 584 |
+
"rwkv.blocks.{bid}.attention.time_maa_k", # rwkv6
|
| 585 |
+
"model.layers.{bid}.self_attn.time_maa_k", # rwkv6qwen2
|
| 586 |
+
),
|
| 587 |
+
|
| 588 |
+
MODEL_TENSOR.TIME_MIX_LERP_V: (
|
| 589 |
+
"rwkv.blocks.{bid}.attention.time_maa_v", # rwkv6
|
| 590 |
+
"model.layers.{bid}.self_attn.time_maa_v", # rwkv6qwen2
|
| 591 |
+
),
|
| 592 |
+
|
| 593 |
+
MODEL_TENSOR.TIME_MIX_LERP_R: (
|
| 594 |
+
"rwkv.blocks.{bid}.attention.time_maa_r", # rwkv6
|
| 595 |
+
"model.layers.{bid}.self_attn.time_maa_r", # rwkv6qwen2
|
| 596 |
+
),
|
| 597 |
+
|
| 598 |
+
MODEL_TENSOR.TIME_MIX_LERP_G: (
|
| 599 |
+
"rwkv.blocks.{bid}.attention.time_maa_g", # rwkv6
|
| 600 |
+
"model.layers.{bid}.self_attn.time_maa_g", # rwkv6qwen2
|
| 601 |
+
),
|
| 602 |
+
|
| 603 |
+
MODEL_TENSOR.TIME_MIX_LERP_W: (
|
| 604 |
+
"rwkv.blocks.{bid}.attention.time_maa_w", # rwkv6
|
| 605 |
+
"model.layers.{bid}.self_attn.time_maa_w", # rwkv6qwen2
|
| 606 |
+
),
|
| 607 |
+
|
| 608 |
+
MODEL_TENSOR.TIME_MIX_FIRST: (
|
| 609 |
+
"rwkv.blocks.{bid}.attention.time_faaaa", # rwkv6
|
| 610 |
+
),
|
| 611 |
+
|
| 612 |
+
MODEL_TENSOR.TIME_MIX_DECAY: (
|
| 613 |
+
"rwkv.blocks.{bid}.attention.time_decay", # rwkv6
|
| 614 |
+
"model.layers.{bid}.self_attn.time_decay", # rwkv6qwen2
|
| 615 |
+
),
|
| 616 |
+
|
| 617 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W1: (
|
| 618 |
+
"rwkv.blocks.{bid}.attention.time_decay_w1", # rwkv6
|
| 619 |
+
"model.layers.{bid}.self_attn.time_decay_w1", # rwkv6qwen2
|
| 620 |
+
),
|
| 621 |
+
|
| 622 |
+
MODEL_TENSOR.TIME_MIX_DECAY_W2: (
|
| 623 |
+
"rwkv.blocks.{bid}.attention.time_decay_w2", # rwkv6
|
| 624 |
+
"model.layers.{bid}.self_attn.time_decay_w2", # rwkv6qwen2
|
| 625 |
+
),
|
| 626 |
+
|
| 627 |
+
MODEL_TENSOR.TIME_MIX_KEY: (
|
| 628 |
+
"rwkv.blocks.{bid}.attention.key", # rwkv6
|
| 629 |
+
"model.layers.{bid}.self_attn.k_proj", # rwkv6qwen2
|
| 630 |
+
"model.layers.{bid}.attention.key", # rwkv7
|
| 631 |
+
"model.layers.{bid}.attention.k_proj", # rwkv7
|
| 632 |
+
),
|
| 633 |
+
|
| 634 |
+
MODEL_TENSOR.TIME_MIX_VALUE: (
|
| 635 |
+
"rwkv.blocks.{bid}.attention.value", # rwkv6
|
| 636 |
+
"model.layers.{bid}.self_attn.v_proj", # rwkv6qwen2
|
| 637 |
+
"model.layers.{bid}.attention.value", # rwkv7
|
| 638 |
+
"model.layers.{bid}.attention.v_proj", # rwkv7
|
| 639 |
+
),
|
| 640 |
+
|
| 641 |
+
MODEL_TENSOR.TIME_MIX_RECEPTANCE: (
|
| 642 |
+
"rwkv.blocks.{bid}.attention.receptance", # rwkv6
|
| 643 |
+
"model.layers.{bid}.self_attn.q_proj", # rwkv6qwen2
|
| 644 |
+
"model.layers.{bid}.attention.receptance", # rwkv7
|
| 645 |
+
"model.layers.{bid}.attention.r_proj", # rwkv7
|
| 646 |
+
),
|
| 647 |
+
|
| 648 |
+
MODEL_TENSOR.TIME_MIX_GATE: (
|
| 649 |
+
"rwkv.blocks.{bid}.attention.gate", # rwkv6
|
| 650 |
+
"model.layers.{bid}.self_attn.gate", # rwkv6qwen2
|
| 651 |
+
),
|
| 652 |
+
|
| 653 |
+
MODEL_TENSOR.TIME_MIX_LN: (
|
| 654 |
+
"rwkv.blocks.{bid}.attention.ln_x", # rwkv6
|
| 655 |
+
"model.layers.{bid}.attention.ln_x" # rwkv7
|
| 656 |
+
),
|
| 657 |
+
|
| 658 |
+
MODEL_TENSOR.TIME_MIX_OUTPUT: (
|
| 659 |
+
"rwkv.blocks.{bid}.attention.output", # rwkv6
|
| 660 |
+
"model.layers.{bid}.self_attn.o_proj", # rwkv6qwen2
|
| 661 |
+
"model.layers.{bid}.attention.output", # rwkv7
|
| 662 |
+
"model.layers.{bid}.attention.o_proj", # rwkv7
|
| 663 |
+
),
|
| 664 |
+
|
| 665 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_K: (
|
| 666 |
+
"rwkv.blocks.{bid}.feed_forward.time_maa_k", # rwkv6
|
| 667 |
+
"model.layers.{bid}.feed_forward.x_k", # rwkv7
|
| 668 |
+
),
|
| 669 |
+
|
| 670 |
+
MODEL_TENSOR.CHANNEL_MIX_LERP_R: (
|
| 671 |
+
"rwkv.blocks.{bid}.feed_forward.time_maa_r", # rwkv6
|
| 672 |
+
),
|
| 673 |
+
|
| 674 |
+
MODEL_TENSOR.CHANNEL_MIX_KEY: (
|
| 675 |
+
"rwkv.blocks.{bid}.feed_forward.key", # rwkv6
|
| 676 |
+
"model.layers.{bid}.feed_forward.key", # rwkv7
|
| 677 |
+
),
|
| 678 |
+
|
| 679 |
+
MODEL_TENSOR.CHANNEL_MIX_RECEPTANCE: (
|
| 680 |
+
"rwkv.blocks.{bid}.feed_forward.receptance", # rwkv6
|
| 681 |
+
),
|
| 682 |
+
|
| 683 |
+
MODEL_TENSOR.CHANNEL_MIX_VALUE: (
|
| 684 |
+
"rwkv.blocks.{bid}.feed_forward.value", # rwkv6
|
| 685 |
+
"model.layers.{bid}.feed_forward.value", # rwkv7
|
| 686 |
+
),
|
| 687 |
+
|
| 688 |
+
MODEL_TENSOR.ATTN_Q_A: (
|
| 689 |
+
"model.layers.{bid}.self_attn.q_a_proj", # deepseek2
|
| 690 |
+
),
|
| 691 |
+
|
| 692 |
+
MODEL_TENSOR.ATTN_Q_B: (
|
| 693 |
+
"model.layers.{bid}.self_attn.q_b_proj", # deepseek2
|
| 694 |
+
),
|
| 695 |
+
|
| 696 |
+
MODEL_TENSOR.ATTN_KV_A_MQA: (
|
| 697 |
+
"model.layers.{bid}.self_attn.kv_a_proj_with_mqa", # deepseek2
|
| 698 |
+
),
|
| 699 |
+
|
| 700 |
+
MODEL_TENSOR.ATTN_KV_B: (
|
| 701 |
+
"model.layers.{bid}.self_attn.kv_b_proj", # deepseek2
|
| 702 |
+
),
|
| 703 |
+
|
| 704 |
+
MODEL_TENSOR.ATTN_K_B: (
|
| 705 |
+
"model.layers.{bid}.self_attn.k_b_proj", # deepseek2
|
| 706 |
+
),
|
| 707 |
+
|
| 708 |
+
MODEL_TENSOR.ATTN_V_B: (
|
| 709 |
+
"model.layers.{bid}.self_attn.v_b_proj", # deepseek2
|
| 710 |
+
),
|
| 711 |
+
|
| 712 |
+
MODEL_TENSOR.ATTN_Q_A_NORM: (
|
| 713 |
+
"model.layers.{bid}.self_attn.q_a_layernorm", # deepseek2
|
| 714 |
+
),
|
| 715 |
+
|
| 716 |
+
MODEL_TENSOR.ATTN_KV_A_NORM: (
|
| 717 |
+
"model.layers.{bid}.self_attn.kv_a_layernorm", # deepseek2
|
| 718 |
+
),
|
| 719 |
+
|
| 720 |
+
MODEL_TENSOR.ATTN_SUB_NORM: (
|
| 721 |
+
"model.layers.{bid}.self_attn.inner_attn_ln", # bitnet
|
| 722 |
+
),
|
| 723 |
+
|
| 724 |
+
MODEL_TENSOR.FFN_SUB_NORM: (
|
| 725 |
+
"model.layers.{bid}.mlp.ffn_layernorm", # bitnet
|
| 726 |
+
),
|
| 727 |
+
|
| 728 |
+
MODEL_TENSOR.DEC_ATTN_NORM: (
|
| 729 |
+
"decoder.block.{bid}.layer.0.layer_norm", # t5
|
| 730 |
+
),
|
| 731 |
+
|
| 732 |
+
MODEL_TENSOR.DEC_ATTN_Q: (
|
| 733 |
+
"decoder.block.{bid}.layer.0.SelfAttention.q", # t5
|
| 734 |
+
),
|
| 735 |
+
|
| 736 |
+
MODEL_TENSOR.DEC_ATTN_K: (
|
| 737 |
+
"decoder.block.{bid}.layer.0.SelfAttention.k", # t5
|
| 738 |
+
),
|
| 739 |
+
|
| 740 |
+
MODEL_TENSOR.DEC_ATTN_V: (
|
| 741 |
+
"decoder.block.{bid}.layer.0.SelfAttention.v", # t5
|
| 742 |
+
),
|
| 743 |
+
|
| 744 |
+
MODEL_TENSOR.DEC_ATTN_OUT: (
|
| 745 |
+
"decoder.block.{bid}.layer.0.SelfAttention.o", # t5
|
| 746 |
+
),
|
| 747 |
+
|
| 748 |
+
MODEL_TENSOR.DEC_ATTN_REL_B: (
|
| 749 |
+
"decoder.block.{bid}.layer.0.SelfAttention.relative_attention_bias", # t5
|
| 750 |
+
),
|
| 751 |
+
|
| 752 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_NORM: (
|
| 753 |
+
"decoder.block.{bid}.layer.1.layer_norm", # t5
|
| 754 |
+
),
|
| 755 |
+
|
| 756 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_Q: (
|
| 757 |
+
"decoder.block.{bid}.layer.1.EncDecAttention.q", # t5
|
| 758 |
+
),
|
| 759 |
+
|
| 760 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_K: (
|
| 761 |
+
"decoder.block.{bid}.layer.1.EncDecAttention.k", # t5
|
| 762 |
+
),
|
| 763 |
+
|
| 764 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_V: (
|
| 765 |
+
"decoder.block.{bid}.layer.1.EncDecAttention.v", # t5
|
| 766 |
+
),
|
| 767 |
+
|
| 768 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_OUT: (
|
| 769 |
+
"decoder.block.{bid}.layer.1.EncDecAttention.o", # t5
|
| 770 |
+
),
|
| 771 |
+
|
| 772 |
+
MODEL_TENSOR.DEC_CROSS_ATTN_REL_B: (
|
| 773 |
+
"decoder.block.{bid}.layer.1.EncDecAttention.relative_attention_bias", # t5
|
| 774 |
+
),
|
| 775 |
+
|
| 776 |
+
MODEL_TENSOR.DEC_FFN_NORM: (
|
| 777 |
+
"decoder.block.{bid}.layer.2.layer_norm", # t5
|
| 778 |
+
),
|
| 779 |
+
|
| 780 |
+
MODEL_TENSOR.DEC_FFN_GATE: (
|
| 781 |
+
"decoder.block.{bid}.layer.2.DenseReluDense.wi_0", # flan-t5
|
| 782 |
+
),
|
| 783 |
+
|
| 784 |
+
MODEL_TENSOR.DEC_FFN_UP: (
|
| 785 |
+
"decoder.block.{bid}.layer.2.DenseReluDense.wi", # t5
|
| 786 |
+
"decoder.block.{bid}.layer.2.DenseReluDense.wi_1", # flan-t5
|
| 787 |
+
),
|
| 788 |
+
|
| 789 |
+
MODEL_TENSOR.DEC_FFN_DOWN: (
|
| 790 |
+
"decoder.block.{bid}.layer.2.DenseReluDense.wo", # t5
|
| 791 |
+
),
|
| 792 |
+
|
| 793 |
+
MODEL_TENSOR.DEC_OUTPUT_NORM: (
|
| 794 |
+
"decoder.final_layer_norm", # t5
|
| 795 |
+
),
|
| 796 |
+
|
| 797 |
+
MODEL_TENSOR.ENC_ATTN_NORM: (
|
| 798 |
+
"encoder.block.{bid}.layer.0.layer_norm", # t5
|
| 799 |
+
),
|
| 800 |
+
|
| 801 |
+
MODEL_TENSOR.ENC_ATTN_Q: (
|
| 802 |
+
"encoder.block.{bid}.layer.0.SelfAttention.q", # t5
|
| 803 |
+
),
|
| 804 |
+
|
| 805 |
+
MODEL_TENSOR.ENC_ATTN_K: (
|
| 806 |
+
"encoder.block.{bid}.layer.0.SelfAttention.k", # t5
|
| 807 |
+
),
|
| 808 |
+
|
| 809 |
+
MODEL_TENSOR.ENC_ATTN_V: (
|
| 810 |
+
"encoder.block.{bid}.layer.0.SelfAttention.v", # t5
|
| 811 |
+
),
|
| 812 |
+
|
| 813 |
+
MODEL_TENSOR.ENC_ATTN_OUT: (
|
| 814 |
+
"encoder.block.{bid}.layer.0.SelfAttention.o", # t5
|
| 815 |
+
),
|
| 816 |
+
|
| 817 |
+
MODEL_TENSOR.ENC_ATTN_REL_B: (
|
| 818 |
+
"encoder.block.{bid}.layer.0.SelfAttention.relative_attention_bias", # t5
|
| 819 |
+
),
|
| 820 |
+
|
| 821 |
+
MODEL_TENSOR.ENC_FFN_NORM: (
|
| 822 |
+
"encoder.block.{bid}.layer.1.layer_norm", # t5
|
| 823 |
+
),
|
| 824 |
+
|
| 825 |
+
MODEL_TENSOR.ENC_FFN_GATE: (
|
| 826 |
+
"encoder.block.{bid}.layer.1.DenseReluDense.wi_0", # flan-t5
|
| 827 |
+
),
|
| 828 |
+
|
| 829 |
+
MODEL_TENSOR.ENC_FFN_UP: (
|
| 830 |
+
"encoder.block.{bid}.layer.1.DenseReluDense.wi", # t5
|
| 831 |
+
"encoder.block.{bid}.layer.1.DenseReluDense.wi_1", # flan-t5
|
| 832 |
+
),
|
| 833 |
+
|
| 834 |
+
MODEL_TENSOR.ENC_FFN_DOWN: (
|
| 835 |
+
"encoder.block.{bid}.layer.1.DenseReluDense.wo", # t5
|
| 836 |
+
),
|
| 837 |
+
|
| 838 |
+
############################################################################
|
| 839 |
+
# TODO: these do not belong to block_mappings_cfg - move them to mappings_cfg
|
| 840 |
+
MODEL_TENSOR.ENC_OUTPUT_NORM: (
|
| 841 |
+
"encoder.final_layer_norm", # t5
|
| 842 |
+
"layer_norm", # neobert
|
| 843 |
+
),
|
| 844 |
+
|
| 845 |
+
MODEL_TENSOR.CLS: (
|
| 846 |
+
"classifier", # jina
|
| 847 |
+
"classifier.dense", # roberta
|
| 848 |
+
"pre_classifier", # distillbert
|
| 849 |
+
"dense", # neobert
|
| 850 |
+
),
|
| 851 |
+
|
| 852 |
+
MODEL_TENSOR.CLS_OUT: (
|
| 853 |
+
"classifier.out_proj", # roberta
|
| 854 |
+
),
|
| 855 |
+
#############################################################################
|
| 856 |
+
|
| 857 |
+
MODEL_TENSOR.CONVNEXT_DW: (
|
| 858 |
+
"backbone.convnext.{bid}.dwconv", # wavtokenizer
|
| 859 |
+
),
|
| 860 |
+
|
| 861 |
+
MODEL_TENSOR.CONVNEXT_NORM: (
|
| 862 |
+
"backbone.convnext.{bid}.norm", # wavtokenizer
|
| 863 |
+
),
|
| 864 |
+
|
| 865 |
+
MODEL_TENSOR.CONVNEXT_PW1: (
|
| 866 |
+
"backbone.convnext.{bid}.pwconv1", # wavtokenizer
|
| 867 |
+
),
|
| 868 |
+
|
| 869 |
+
MODEL_TENSOR.CONVNEXT_PW2: (
|
| 870 |
+
"backbone.convnext.{bid}.pwconv2", # wavtokenizer
|
| 871 |
+
),
|
| 872 |
+
|
| 873 |
+
MODEL_TENSOR.CONVNEXT_GAMMA: (
|
| 874 |
+
"backbone.convnext.{bid}.gamma", # wavtokenizer
|
| 875 |
+
),
|
| 876 |
+
|
| 877 |
+
MODEL_TENSOR.POSNET_CONV1: (
|
| 878 |
+
"backbone.posnet.{bid}.conv1", # wavtokenizer
|
| 879 |
+
),
|
| 880 |
+
|
| 881 |
+
MODEL_TENSOR.POSNET_CONV2: (
|
| 882 |
+
"backbone.posnet.{bid}.conv2", # wavtokenizer
|
| 883 |
+
),
|
| 884 |
+
|
| 885 |
+
MODEL_TENSOR.POSNET_NORM: (
|
| 886 |
+
"backbone.posnet.{bid}.norm", # wavtokenizer
|
| 887 |
+
),
|
| 888 |
+
|
| 889 |
+
MODEL_TENSOR.POSNET_NORM1: (
|
| 890 |
+
"backbone.posnet.{bid}.norm1", # wavtokenizer
|
| 891 |
+
),
|
| 892 |
+
|
| 893 |
+
MODEL_TENSOR.POSNET_NORM2: (
|
| 894 |
+
"backbone.posnet.{bid}.norm2", # wavtokenizer
|
| 895 |
+
),
|
| 896 |
+
|
| 897 |
+
MODEL_TENSOR.POSNET_ATTN_NORM: (
|
| 898 |
+
"backbone.posnet.{bid}.norm", # wavtokenizer
|
| 899 |
+
),
|
| 900 |
+
|
| 901 |
+
MODEL_TENSOR.POSNET_ATTN_Q: (
|
| 902 |
+
"backbone.posnet.{bid}.q", # wavtokenizer
|
| 903 |
+
),
|
| 904 |
+
|
| 905 |
+
MODEL_TENSOR.POSNET_ATTN_K: (
|
| 906 |
+
"backbone.posnet.{bid}.k", # wavtokenizer
|
| 907 |
+
),
|
| 908 |
+
|
| 909 |
+
MODEL_TENSOR.POSNET_ATTN_V: (
|
| 910 |
+
"backbone.posnet.{bid}.v", # wavtokenizer
|
| 911 |
+
),
|
| 912 |
+
|
| 913 |
+
MODEL_TENSOR.POSNET_ATTN_OUT: (
|
| 914 |
+
"backbone.posnet.{bid}.proj_out", # wavtokenizer
|
| 915 |
+
),
|
| 916 |
+
|
| 917 |
+
#############################################################################
|
| 918 |
+
## Vision encoder
|
| 919 |
+
|
| 920 |
+
MODEL_TENSOR.V_MMPROJ: (
|
| 921 |
+
"multi_modal_projector.linear_{bid}",
|
| 922 |
+
"visual.merger.mlp.{bid}", # qwen2vl
|
| 923 |
+
),
|
| 924 |
+
|
| 925 |
+
MODEL_TENSOR.V_MMPROJ_FC: (
|
| 926 |
+
"model.connector.modality_projection.proj", # SmolVLM
|
| 927 |
+
),
|
| 928 |
+
|
| 929 |
+
MODEL_TENSOR.V_MMPROJ_MLP: (
|
| 930 |
+
"model.mm_projector.mlp.mlp.{bid}",
|
| 931 |
+
"vision_model.vision_adapter.mlp.fc{bid}", # llama 4
|
| 932 |
+
"mlp1.{bid}", # InternVL
|
| 933 |
+
),
|
| 934 |
+
|
| 935 |
+
MODEL_TENSOR.V_MMPROJ_PEG: (
|
| 936 |
+
"model.mm_projector.peg.peg.{bid}",
|
| 937 |
+
),
|
| 938 |
+
|
| 939 |
+
MODEL_TENSOR.V_ENC_EMBD_CLS: (
|
| 940 |
+
"vision_tower.vision_model.embeddings.class_embedding",
|
| 941 |
+
"vision_model.class_embedding", # llama 4
|
| 942 |
+
),
|
| 943 |
+
|
| 944 |
+
MODEL_TENSOR.V_ENC_EMBD_PATCH: (
|
| 945 |
+
"vision_tower.vision_model.embeddings.patch_embedding",
|
| 946 |
+
"vpm.embeddings.patch_embedding",
|
| 947 |
+
"model.vision_model.embeddings.patch_embedding", # SmolVLM
|
| 948 |
+
"vision_tower.patch_conv", # pixtral
|
| 949 |
+
"vision_model.patch_embedding.linear", # llama 4
|
| 950 |
+
"visual.patch_embed.proj", # qwen2vl
|
| 951 |
+
),
|
| 952 |
+
|
| 953 |
+
MODEL_TENSOR.V_ENC_EMBD_POS: (
|
| 954 |
+
"vision_tower.vision_model.embeddings.position_embedding",
|
| 955 |
+
"vpm.embeddings.position_embedding",
|
| 956 |
+
"model.vision_model.embeddings.position_embedding", # SmolVLM
|
| 957 |
+
"vision_model.positional_embedding_vlm", # llama 4
|
| 958 |
+
),
|
| 959 |
+
|
| 960 |
+
MODEL_TENSOR.V_ENC_ATTN_Q: (
|
| 961 |
+
"vision_tower.vision_model.encoder.layers.{bid}.self_attn.q_proj",
|
| 962 |
+
"vpm.encoder.layers.{bid}.self_attn.q_proj",
|
| 963 |
+
"model.vision_model.encoder.layers.{bid}.self_attn.q_proj", # SmolVLM
|
| 964 |
+
"vision_model.model.layers.{bid}.self_attn.q_proj", # llama4
|
| 965 |
+
"vision_tower.transformer.layers.{bid}.attention.q_proj", # pixtral
|
| 966 |
+
"visual.blocks.{bid}.attn.q", # qwen2vl, generated
|
| 967 |
+
),
|
| 968 |
+
|
| 969 |
+
MODEL_TENSOR.V_ENC_ATTN_Q_NORM: (
|
| 970 |
+
"vision_tower.vision_model.encoder.layers.{bid}.attn.q_norm", # InternVL
|
| 971 |
+
),
|
| 972 |
+
|
| 973 |
+
MODEL_TENSOR.V_ENC_ATTN_K: (
|
| 974 |
+
"vision_tower.vision_model.encoder.layers.{bid}.self_attn.k_proj",
|
| 975 |
+
"vpm.encoder.layers.{bid}.self_attn.k_proj",
|
| 976 |
+
"model.vision_model.encoder.layers.{bid}.self_attn.k_proj", # SmolVLM
|
| 977 |
+
"vision_model.model.layers.{bid}.self_attn.k_proj", # llama4
|
| 978 |
+
"vision_tower.transformer.layers.{bid}.attention.k_proj", # pixtral
|
| 979 |
+
"visual.blocks.{bid}.attn.k", # qwen2vl, generated
|
| 980 |
+
),
|
| 981 |
+
|
| 982 |
+
MODEL_TENSOR.V_ENC_ATTN_K_NORM: (
|
| 983 |
+
"vision_tower.vision_model.encoder.layers.{bid}.attn.k_norm", # InternVL
|
| 984 |
+
),
|
| 985 |
+
|
| 986 |
+
MODEL_TENSOR.V_ENC_ATTN_V: (
|
| 987 |
+
"vision_tower.vision_model.encoder.layers.{bid}.self_attn.v_proj",
|
| 988 |
+
"vpm.encoder.layers.{bid}.self_attn.v_proj",
|
| 989 |
+
"model.vision_model.encoder.layers.{bid}.self_attn.v_proj", # SmolVLM
|
| 990 |
+
"vision_model.model.layers.{bid}.self_attn.v_proj", # llama4
|
| 991 |
+
"vision_tower.transformer.layers.{bid}.attention.v_proj", # pixtral
|
| 992 |
+
"visual.blocks.{bid}.attn.v", # qwen2vl, generated
|
| 993 |
+
),
|
| 994 |
+
|
| 995 |
+
MODEL_TENSOR.V_ENC_INPUT_NORM: (
|
| 996 |
+
"vision_tower.vision_model.encoder.layers.{bid}.layer_norm1",
|
| 997 |
+
"vision_tower.vision_model.encoder.layers.{bid}.norm1", # InternVL
|
| 998 |
+
"vpm.encoder.layers.{bid}.layer_norm1",
|
| 999 |
+
"model.vision_model.encoder.layers.{bid}.layer_norm1", # SmolVLM
|
| 1000 |
+
"vision_tower.transformer.layers.{bid}.attention_norm", # pixtral
|
| 1001 |
+
"vision_model.model.layers.{bid}.input_layernorm", # llama4
|
| 1002 |
+
"visual.blocks.{bid}.norm1", # qwen2vl
|
| 1003 |
+
),
|
| 1004 |
+
|
| 1005 |
+
MODEL_TENSOR.V_ENC_ATTN_O: (
|
| 1006 |
+
"vision_tower.vision_model.encoder.layers.{bid}.self_attn.out_proj",
|
| 1007 |
+
"vision_tower.vision_model.encoder.layers.{bid}.attn.proj", # InternVL
|
| 1008 |
+
"vpm.encoder.layers.{bid}.self_attn.out_proj",
|
| 1009 |
+
"model.vision_model.encoder.layers.{bid}.self_attn.out_proj", # SmolVLM
|
| 1010 |
+
"vision_model.model.layers.{bid}.self_attn.o_proj", # llama4
|
| 1011 |
+
"vision_tower.transformer.layers.{bid}.attention.o_proj", # pixtral
|
| 1012 |
+
"visual.blocks.{bid}.attn.proj", # qwen2vl
|
| 1013 |
+
),
|
| 1014 |
+
|
| 1015 |
+
MODEL_TENSOR.V_ENC_POST_ATTN_NORM: (
|
| 1016 |
+
"vision_tower.vision_model.encoder.layers.{bid}.layer_norm2",
|
| 1017 |
+
"vision_tower.vision_model.encoder.layers.{bid}.norm2", # InternVL
|
| 1018 |
+
"vpm.encoder.layers.{bid}.layer_norm2",
|
| 1019 |
+
"model.vision_model.encoder.layers.{bid}.layer_norm2", # SmolVLM
|
| 1020 |
+
"vision_model.model.layers.{bid}.post_attention_layernorm", # llama4
|
| 1021 |
+
"vision_tower.transformer.layers.{bid}.ffn_norm", # pixtral
|
| 1022 |
+
"visual.blocks.{bid}.norm2", # qwen2vl
|
| 1023 |
+
),
|
| 1024 |
+
|
| 1025 |
+
MODEL_TENSOR.V_ENC_FFN_UP: (
|
| 1026 |
+
"vision_tower.vision_model.encoder.layers.{bid}.mlp.fc1",
|
| 1027 |
+
"vpm.encoder.layers.{bid}.mlp.fc1",
|
| 1028 |
+
"model.vision_model.encoder.layers.{bid}.mlp.fc1", # SmolVLM, gemma3
|
| 1029 |
+
"vision_tower.transformer.layers.{bid}.feed_forward.up_proj", # pixtral
|
| 1030 |
+
"vision_model.model.layers.{bid}.mlp.fc1", # llama4
|
| 1031 |
+
"visual.blocks.{bid}.mlp.fc1", # qwen2vl
|
| 1032 |
+
"visual.blocks.{bid}.mlp.up_proj", # qwen2.5vl
|
| 1033 |
+
),
|
| 1034 |
+
|
| 1035 |
+
MODEL_TENSOR.V_ENC_FFN_GATE: (
|
| 1036 |
+
"vision_tower.transformer.layers.{bid}.feed_forward.gate_proj", # pixtral
|
| 1037 |
+
"visual.blocks.{bid}.mlp.gate_proj", # qwen2.5vl
|
| 1038 |
+
),
|
| 1039 |
+
|
| 1040 |
+
MODEL_TENSOR.V_ENC_FFN_DOWN: (
|
| 1041 |
+
"vision_tower.vision_model.encoder.layers.{bid}.mlp.fc2",
|
| 1042 |
+
"vpm.encoder.layers.{bid}.mlp.fc2",
|
| 1043 |
+
"model.vision_model.encoder.layers.{bid}.mlp.fc2", # SmolVLM, gemma3
|
| 1044 |
+
"vision_tower.transformer.layers.{bid}.feed_forward.down_proj", # pixtral
|
| 1045 |
+
"vision_model.model.layers.{bid}.mlp.fc2", # llama4
|
| 1046 |
+
"visual.blocks.{bid}.mlp.fc2", # qwen2vl
|
| 1047 |
+
"visual.blocks.{bid}.mlp.down_proj", # qwen2.5vl
|
| 1048 |
+
),
|
| 1049 |
+
|
| 1050 |
+
MODEL_TENSOR.V_LAYER_SCALE_1: (
|
| 1051 |
+
"vision_tower.vision_model.encoder.layers.{bid}.ls1", # InternVL
|
| 1052 |
+
),
|
| 1053 |
+
|
| 1054 |
+
MODEL_TENSOR.V_LAYER_SCALE_2: (
|
| 1055 |
+
"vision_tower.vision_model.encoder.layers.{bid}.ls2", # InternVL
|
| 1056 |
+
),
|
| 1057 |
+
|
| 1058 |
+
MODEL_TENSOR.V_PRE_NORM: (
|
| 1059 |
+
"vision_tower.vision_model.pre_layrnorm",
|
| 1060 |
+
"vision_tower.ln_pre", # pixtral
|
| 1061 |
+
"vision_model.layernorm_pre", # llama4
|
| 1062 |
+
),
|
| 1063 |
+
|
| 1064 |
+
MODEL_TENSOR.V_POST_NORM: (
|
| 1065 |
+
"vision_tower.vision_model.post_layernorm",
|
| 1066 |
+
"model.vision_model.post_layernorm", # SmolVLM
|
| 1067 |
+
"vision_model.layernorm_post", # llama4
|
| 1068 |
+
"visual.merger.ln_q", # qwen2vl
|
| 1069 |
+
),
|
| 1070 |
+
|
| 1071 |
+
MODEL_TENSOR.V_MM_INP_PROJ: (
|
| 1072 |
+
"multi_modal_projector.mm_input_projection",
|
| 1073 |
+
),
|
| 1074 |
+
|
| 1075 |
+
MODEL_TENSOR.V_MM_INP_NORM: (
|
| 1076 |
+
"multi_modal_projector.norm",
|
| 1077 |
+
),
|
| 1078 |
+
|
| 1079 |
+
MODEL_TENSOR.V_MM_SOFT_EMB_NORM: (
|
| 1080 |
+
"multi_modal_projector.mm_soft_emb_norm",
|
| 1081 |
+
),
|
| 1082 |
+
|
| 1083 |
+
MODEL_TENSOR.V_RESMPL_POS_EMBD_K: (
|
| 1084 |
+
"resampler.pos_embed_k",
|
| 1085 |
+
),
|
| 1086 |
+
|
| 1087 |
+
MODEL_TENSOR.V_RESMPL_ATTN_Q: (
|
| 1088 |
+
"resampler.attn.in_proj_q", # tensor generated from resampler.attn.in_proj
|
| 1089 |
+
),
|
| 1090 |
+
|
| 1091 |
+
MODEL_TENSOR.V_RESMPL_ATTN_K: (
|
| 1092 |
+
"resampler.attn.in_proj_k", # tensor generated from resampler.attn.in_proj
|
| 1093 |
+
),
|
| 1094 |
+
|
| 1095 |
+
MODEL_TENSOR.V_RESMPL_ATTN_V: (
|
| 1096 |
+
"resampler.attn.in_proj_v", # tensor generated from resampler.attn.in_proj
|
| 1097 |
+
),
|
| 1098 |
+
|
| 1099 |
+
MODEL_TENSOR.V_RESMPL_ATTN_OUT: (
|
| 1100 |
+
"resampler.attn.out_proj",
|
| 1101 |
+
),
|
| 1102 |
+
|
| 1103 |
+
MODEL_TENSOR.V_RESMPL_KV: (
|
| 1104 |
+
"resampler.kv_proj",
|
| 1105 |
+
),
|
| 1106 |
+
|
| 1107 |
+
MODEL_TENSOR.V_RESMPL_POST_NORM: (
|
| 1108 |
+
"resampler.ln_post",
|
| 1109 |
+
),
|
| 1110 |
+
|
| 1111 |
+
MODEL_TENSOR.V_RESMPL_KV_NORM: (
|
| 1112 |
+
"resampler.ln_kv",
|
| 1113 |
+
),
|
| 1114 |
+
|
| 1115 |
+
MODEL_TENSOR.V_RESMPL_Q_NORM: (
|
| 1116 |
+
"resampler.ln_q",
|
| 1117 |
+
),
|
| 1118 |
+
|
| 1119 |
+
MODEL_TENSOR.V_RESMPL_PROJ: (
|
| 1120 |
+
"resampler.proj",
|
| 1121 |
+
),
|
| 1122 |
+
|
| 1123 |
+
MODEL_TENSOR.V_RESMPL_QUERY: (
|
| 1124 |
+
"resampler.query",
|
| 1125 |
+
),
|
| 1126 |
+
|
| 1127 |
+
MODEL_TENSOR.V_TOK_EMBD_IMG_BREAK: (
|
| 1128 |
+
"v.token_embd.img_break", # for pixtral, this is a generated vector
|
| 1129 |
+
),
|
| 1130 |
+
|
| 1131 |
+
MODEL_TENSOR.V_MM_PATCH_MERGER: (
|
| 1132 |
+
"multi_modal_projector.patch_merger.merging_layer", # mistral small 3.1
|
| 1133 |
+
),
|
| 1134 |
+
|
| 1135 |
+
# audio (mtmd)
|
| 1136 |
+
|
| 1137 |
+
MODEL_TENSOR.A_ENC_EMBD_POS: (
|
| 1138 |
+
"audio_tower.embed_positions", # ultravox
|
| 1139 |
+
),
|
| 1140 |
+
|
| 1141 |
+
MODEL_TENSOR.A_ENC_CONV1D: (
|
| 1142 |
+
"audio_tower.conv{bid}", # ultravox
|
| 1143 |
+
),
|
| 1144 |
+
|
| 1145 |
+
MODEL_TENSOR.A_PRE_NORM: (),
|
| 1146 |
+
|
| 1147 |
+
MODEL_TENSOR.A_POST_NORM: (
|
| 1148 |
+
"audio_tower.layer_norm", # ultravox
|
| 1149 |
+
"audio_tower.ln_post", # qwen2omni
|
| 1150 |
+
),
|
| 1151 |
+
|
| 1152 |
+
MODEL_TENSOR.A_ENC_ATTN_Q: (
|
| 1153 |
+
"audio_tower.layers.{bid}.self_attn.q_proj", # ultravox
|
| 1154 |
+
),
|
| 1155 |
+
|
| 1156 |
+
MODEL_TENSOR.A_ENC_ATTN_K: (
|
| 1157 |
+
"audio_tower.layers.{bid}.self_attn.k_proj", # ultravox
|
| 1158 |
+
),
|
| 1159 |
+
|
| 1160 |
+
MODEL_TENSOR.A_ENC_ATTN_V: (
|
| 1161 |
+
"audio_tower.layers.{bid}.self_attn.v_proj", # ultravox
|
| 1162 |
+
),
|
| 1163 |
+
|
| 1164 |
+
MODEL_TENSOR.A_ENC_INPUT_NORM: (
|
| 1165 |
+
"audio_tower.layers.{bid}.self_attn_layer_norm", # ultravox
|
| 1166 |
+
),
|
| 1167 |
+
|
| 1168 |
+
MODEL_TENSOR.A_ENC_OUTPUT: (
|
| 1169 |
+
"audio_tower.layers.{bid}.self_attn.out_proj", # ultravox
|
| 1170 |
+
),
|
| 1171 |
+
|
| 1172 |
+
MODEL_TENSOR.A_ENC_OUTPUT_NORM: (
|
| 1173 |
+
"audio_tower.layers.{bid}.final_layer_norm", # ultravox
|
| 1174 |
+
),
|
| 1175 |
+
|
| 1176 |
+
MODEL_TENSOR.A_ENC_FFN_UP: (
|
| 1177 |
+
"audio_tower.layers.{bid}.fc1", # ultravox
|
| 1178 |
+
),
|
| 1179 |
+
|
| 1180 |
+
MODEL_TENSOR.A_ENC_FFN_GATE: (),
|
| 1181 |
+
|
| 1182 |
+
MODEL_TENSOR.A_ENC_FFN_DOWN: (
|
| 1183 |
+
"audio_tower.layers.{bid}.fc2", # ultravox
|
| 1184 |
+
),
|
| 1185 |
+
|
| 1186 |
+
# note: some tensors below has "audio." pseudo-prefix, to prevent conflicts with vision tensors
|
| 1187 |
+
# this prefix is added in the conversion code in modify_tensors()
|
| 1188 |
+
|
| 1189 |
+
MODEL_TENSOR.A_MMPROJ: (
|
| 1190 |
+
"audio.multi_modal_projector.linear_{bid}", # ultravox
|
| 1191 |
+
),
|
| 1192 |
+
|
| 1193 |
+
MODEL_TENSOR.A_MMPROJ_FC: (
|
| 1194 |
+
"audio.multi_modal_projector.linear", # qwen2audio
|
| 1195 |
+
"audio_tower.proj", # qwen2omni
|
| 1196 |
+
),
|
| 1197 |
+
|
| 1198 |
+
MODEL_TENSOR.A_MM_NORM_PRE: (
|
| 1199 |
+
"audio.multi_modal_projector.ln_pre", # ultravox
|
| 1200 |
+
),
|
| 1201 |
+
|
| 1202 |
+
MODEL_TENSOR.A_MM_NORM_MID: (
|
| 1203 |
+
"audio.multi_modal_projector.ln_mid", # ultravox
|
| 1204 |
+
),
|
| 1205 |
+
}
|
| 1206 |
+
|
| 1207 |
+
# architecture-specific block mappings
|
| 1208 |
+
arch_block_mappings_cfg: dict[MODEL_ARCH, dict[MODEL_TENSOR, tuple[str, ...]]] = {
|
| 1209 |
+
MODEL_ARCH.ARCTIC: {
|
| 1210 |
+
MODEL_TENSOR.FFN_NORM: (
|
| 1211 |
+
"model.layers.{bid}.residual_layernorm",
|
| 1212 |
+
),
|
| 1213 |
+
MODEL_TENSOR.FFN_NORM_EXP: (
|
| 1214 |
+
"model.layers.{bid}.post_attention_layernorm",
|
| 1215 |
+
),
|
| 1216 |
+
},
|
| 1217 |
+
}
|
| 1218 |
+
|
| 1219 |
+
mapping: dict[str, tuple[MODEL_TENSOR, str]]
|
| 1220 |
+
|
| 1221 |
+
def __init__(self, arch: MODEL_ARCH, n_blocks: int):
|
| 1222 |
+
self.mapping = {}
|
| 1223 |
+
for tensor, keys in self.mappings_cfg.items():
|
| 1224 |
+
if tensor not in MODEL_TENSORS[arch]:
|
| 1225 |
+
continue
|
| 1226 |
+
tensor_name = TENSOR_NAMES[tensor]
|
| 1227 |
+
self.mapping[tensor_name] = (tensor, tensor_name)
|
| 1228 |
+
for key in keys:
|
| 1229 |
+
self.mapping[key] = (tensor, tensor_name)
|
| 1230 |
+
if arch in self.arch_block_mappings_cfg:
|
| 1231 |
+
self.block_mappings_cfg.update(self.arch_block_mappings_cfg[arch])
|
| 1232 |
+
for bid in range(n_blocks):
|
| 1233 |
+
for tensor, keys in self.block_mappings_cfg.items():
|
| 1234 |
+
if tensor not in MODEL_TENSORS[arch]:
|
| 1235 |
+
continue
|
| 1236 |
+
|
| 1237 |
+
tensor_name = TENSOR_NAMES[tensor].format(bid = bid)
|
| 1238 |
+
self.mapping[tensor_name] = (tensor, tensor_name)
|
| 1239 |
+
for key in keys:
|
| 1240 |
+
key = key.format(bid = bid)
|
| 1241 |
+
self.mapping[key] = (tensor, tensor_name)
|
| 1242 |
+
|
| 1243 |
+
def get_type_and_name(self, key: str, try_suffixes: Sequence[str] = ()) -> tuple[MODEL_TENSOR, str] | None:
|
| 1244 |
+
result = self.mapping.get(key)
|
| 1245 |
+
if result is not None:
|
| 1246 |
+
return result
|
| 1247 |
+
for suffix in try_suffixes:
|
| 1248 |
+
if key.endswith(suffix):
|
| 1249 |
+
result = self.mapping.get(key[:-len(suffix)])
|
| 1250 |
+
if result is not None:
|
| 1251 |
+
return result[0], result[1] + suffix
|
| 1252 |
+
return None
|
| 1253 |
+
|
| 1254 |
+
def get_name(self, key: str, try_suffixes: Sequence[str] = ()) -> str | None:
|
| 1255 |
+
result = self.get_type_and_name(key, try_suffixes = try_suffixes)
|
| 1256 |
+
if result is None:
|
| 1257 |
+
return None
|
| 1258 |
+
return result[1]
|
| 1259 |
+
|
| 1260 |
+
def get_type(self, key: str, try_suffixes: Sequence[str] = ()) -> MODEL_TENSOR | None:
|
| 1261 |
+
result = self.get_type_and_name(key, try_suffixes = try_suffixes)
|
| 1262 |
+
if result is None:
|
| 1263 |
+
return None
|
| 1264 |
+
return result[0]
|
| 1265 |
+
|
| 1266 |
+
def __getitem__(self, key: str) -> str:
|
| 1267 |
+
try:
|
| 1268 |
+
return self.mapping[key][1]
|
| 1269 |
+
except KeyError:
|
| 1270 |
+
raise KeyError(key)
|
| 1271 |
+
|
| 1272 |
+
def __contains__(self, key: str) -> bool:
|
| 1273 |
+
return key in self.mapping
|
| 1274 |
+
|
| 1275 |
+
def __repr__(self) -> str:
|
| 1276 |
+
return repr(self.mapping)
|
| 1277 |
+
|
| 1278 |
+
|
| 1279 |
+
def get_tensor_name_map(arch: MODEL_ARCH, n_blocks: int) -> TensorNameMap:
|
| 1280 |
+
return TensorNameMap(arch, n_blocks)
|
lib/python3.13/site-packages/isympy.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Python shell for SymPy.
|
| 3 |
+
|
| 4 |
+
This is just a normal Python shell (IPython shell if you have the
|
| 5 |
+
IPython package installed), that executes the following commands for
|
| 6 |
+
the user:
|
| 7 |
+
|
| 8 |
+
>>> from __future__ import division
|
| 9 |
+
>>> from sympy import *
|
| 10 |
+
>>> x, y, z, t = symbols('x y z t')
|
| 11 |
+
>>> k, m, n = symbols('k m n', integer=True)
|
| 12 |
+
>>> f, g, h = symbols('f g h', cls=Function)
|
| 13 |
+
>>> init_printing()
|
| 14 |
+
|
| 15 |
+
So starting 'isympy' is equivalent to starting Python (or IPython) and
|
| 16 |
+
executing the above commands by hand. It is intended for easy and quick
|
| 17 |
+
experimentation with SymPy. isympy is a good way to use SymPy as an
|
| 18 |
+
interactive calculator. If you have IPython and Matplotlib installed, then
|
| 19 |
+
interactive plotting is enabled by default.
|
| 20 |
+
|
| 21 |
+
COMMAND LINE OPTIONS
|
| 22 |
+
--------------------
|
| 23 |
+
|
| 24 |
+
-c CONSOLE, --console=CONSOLE
|
| 25 |
+
|
| 26 |
+
Use the specified shell (Python or IPython) shell as the console
|
| 27 |
+
backend instead of the default one (IPython if present, Python
|
| 28 |
+
otherwise), e.g.:
|
| 29 |
+
|
| 30 |
+
$isympy -c python
|
| 31 |
+
|
| 32 |
+
CONSOLE must be one of 'ipython' or 'python'
|
| 33 |
+
|
| 34 |
+
-p PRETTY, --pretty PRETTY
|
| 35 |
+
|
| 36 |
+
Setup pretty-printing in SymPy. When pretty-printing is enabled,
|
| 37 |
+
expressions can be printed with Unicode or ASCII. The default is
|
| 38 |
+
to use pretty-printing (with Unicode if the terminal supports it).
|
| 39 |
+
When this option is 'no', expressions will not be pretty-printed
|
| 40 |
+
and ASCII will be used:
|
| 41 |
+
|
| 42 |
+
$isympy -p no
|
| 43 |
+
|
| 44 |
+
PRETTY must be one of 'unicode', 'ascii', or 'no'
|
| 45 |
+
|
| 46 |
+
-t TYPES, --types=TYPES
|
| 47 |
+
|
| 48 |
+
Setup the ground types for the polys. By default, gmpy ground types
|
| 49 |
+
are used if gmpy2 or gmpy is installed, otherwise it falls back to python
|
| 50 |
+
ground types, which are a little bit slower. You can manually
|
| 51 |
+
choose python ground types even if gmpy is installed (e.g., for
|
| 52 |
+
testing purposes):
|
| 53 |
+
|
| 54 |
+
$isympy -t python
|
| 55 |
+
|
| 56 |
+
TYPES must be one of 'gmpy', 'gmpy1' or 'python'
|
| 57 |
+
|
| 58 |
+
Note that the ground type gmpy1 is primarily intended for testing; it
|
| 59 |
+
forces the use of gmpy version 1 even if gmpy2 is available.
|
| 60 |
+
|
| 61 |
+
This is the same as setting the environment variable
|
| 62 |
+
SYMPY_GROUND_TYPES to the given ground type (e.g.,
|
| 63 |
+
SYMPY_GROUND_TYPES='gmpy')
|
| 64 |
+
|
| 65 |
+
The ground types can be determined interactively from the variable
|
| 66 |
+
sympy.polys.domains.GROUND_TYPES.
|
| 67 |
+
|
| 68 |
+
-o ORDER, --order ORDER
|
| 69 |
+
|
| 70 |
+
Setup the ordering of terms for printing. The default is lex, which
|
| 71 |
+
orders terms lexicographically (e.g., x**2 + x + 1). You can choose
|
| 72 |
+
other orderings, such as rev-lex, which will use reverse
|
| 73 |
+
lexicographic ordering (e.g., 1 + x + x**2):
|
| 74 |
+
|
| 75 |
+
$isympy -o rev-lex
|
| 76 |
+
|
| 77 |
+
ORDER must be one of 'lex', 'rev-lex', 'grlex', 'rev-grlex',
|
| 78 |
+
'grevlex', 'rev-grevlex', 'old', or 'none'.
|
| 79 |
+
|
| 80 |
+
Note that for very large expressions, ORDER='none' may speed up
|
| 81 |
+
printing considerably but the terms will have no canonical order.
|
| 82 |
+
|
| 83 |
+
-q, --quiet
|
| 84 |
+
|
| 85 |
+
Print only Python's and SymPy's versions to stdout at startup.
|
| 86 |
+
|
| 87 |
+
-d, --doctest
|
| 88 |
+
|
| 89 |
+
Use the same format that should be used for doctests. This is
|
| 90 |
+
equivalent to -c python -p no.
|
| 91 |
+
|
| 92 |
+
-C, --no-cache
|
| 93 |
+
|
| 94 |
+
Disable the caching mechanism. Disabling the cache may slow certain
|
| 95 |
+
operations down considerably. This is useful for testing the cache,
|
| 96 |
+
or for benchmarking, as the cache can result in deceptive timings.
|
| 97 |
+
|
| 98 |
+
This is equivalent to setting the environment variable
|
| 99 |
+
SYMPY_USE_CACHE to 'no'.
|
| 100 |
+
|
| 101 |
+
-a, --auto-symbols (requires at least IPython 0.11)
|
| 102 |
+
|
| 103 |
+
Automatically create missing symbols. Normally, typing a name of a
|
| 104 |
+
Symbol that has not been instantiated first would raise NameError,
|
| 105 |
+
but with this option enabled, any undefined name will be
|
| 106 |
+
automatically created as a Symbol.
|
| 107 |
+
|
| 108 |
+
Note that this is intended only for interactive, calculator style
|
| 109 |
+
usage. In a script that uses SymPy, Symbols should be instantiated
|
| 110 |
+
at the top, so that it's clear what they are.
|
| 111 |
+
|
| 112 |
+
This will not override any names that are already defined, which
|
| 113 |
+
includes the single character letters represented by the mnemonic
|
| 114 |
+
QCOSINE (see the "Gotchas and Pitfalls" document in the
|
| 115 |
+
documentation). You can delete existing names by executing "del
|
| 116 |
+
name". If a name is defined, typing "'name' in dir()" will return True.
|
| 117 |
+
|
| 118 |
+
The Symbols that are created using this have default assumptions.
|
| 119 |
+
If you want to place assumptions on symbols, you should create them
|
| 120 |
+
using symbols() or var().
|
| 121 |
+
|
| 122 |
+
Finally, this only works in the top level namespace. So, for
|
| 123 |
+
example, if you define a function in isympy with an undefined
|
| 124 |
+
Symbol, it will not work.
|
| 125 |
+
|
| 126 |
+
See also the -i and -I options.
|
| 127 |
+
|
| 128 |
+
-i, --int-to-Integer (requires at least IPython 0.11)
|
| 129 |
+
|
| 130 |
+
Automatically wrap int literals with Integer. This makes it so that
|
| 131 |
+
things like 1/2 will come out as Rational(1, 2), rather than 0.5. This
|
| 132 |
+
works by preprocessing the source and wrapping all int literals with
|
| 133 |
+
Integer. Note that this will not change the behavior of int literals
|
| 134 |
+
assigned to variables, and it also won't change the behavior of functions
|
| 135 |
+
that return int literals.
|
| 136 |
+
|
| 137 |
+
If you want an int, you can wrap the literal in int(), e.g. int(3)/int(2)
|
| 138 |
+
gives 1.5 (with division imported from __future__).
|
| 139 |
+
|
| 140 |
+
-I, --interactive (requires at least IPython 0.11)
|
| 141 |
+
|
| 142 |
+
This is equivalent to --auto-symbols --int-to-Integer. Future options
|
| 143 |
+
designed for ease of interactive use may be added to this.
|
| 144 |
+
|
| 145 |
+
-D, --debug
|
| 146 |
+
|
| 147 |
+
Enable debugging output. This is the same as setting the
|
| 148 |
+
environment variable SYMPY_DEBUG to 'True'. The debug status is set
|
| 149 |
+
in the variable SYMPY_DEBUG within isympy.
|
| 150 |
+
|
| 151 |
+
-- IPython options
|
| 152 |
+
|
| 153 |
+
Additionally you can pass command line options directly to the IPython
|
| 154 |
+
interpreter (the standard Python shell is not supported). However you
|
| 155 |
+
need to add the '--' separator between two types of options, e.g the
|
| 156 |
+
startup banner option and the colors option. You need to enter the
|
| 157 |
+
options as required by the version of IPython that you are using, too:
|
| 158 |
+
|
| 159 |
+
in IPython 0.11,
|
| 160 |
+
|
| 161 |
+
$isympy -q -- --colors=NoColor
|
| 162 |
+
|
| 163 |
+
or older versions of IPython,
|
| 164 |
+
|
| 165 |
+
$isympy -q -- -colors NoColor
|
| 166 |
+
|
| 167 |
+
See also isympy --help.
|
| 168 |
+
"""
|
| 169 |
+
|
| 170 |
+
import os
|
| 171 |
+
import sys
|
| 172 |
+
|
| 173 |
+
# DO NOT IMPORT SYMPY HERE! Or the setting of the sympy environment variables
|
| 174 |
+
# by the command line will break.
|
| 175 |
+
|
| 176 |
+
def main() -> None:
|
| 177 |
+
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
| 178 |
+
|
| 179 |
+
VERSION = None
|
| 180 |
+
if '--version' in sys.argv:
|
| 181 |
+
# We cannot import sympy before this is run, because flags like -C and
|
| 182 |
+
# -t set environment variables that must be set before SymPy is
|
| 183 |
+
# imported. The only thing we need to import it for is to get the
|
| 184 |
+
# version, which only matters with the --version flag.
|
| 185 |
+
import sympy
|
| 186 |
+
VERSION = sympy.__version__
|
| 187 |
+
|
| 188 |
+
usage = 'isympy [options] -- [ipython options]'
|
| 189 |
+
parser = ArgumentParser(
|
| 190 |
+
usage=usage,
|
| 191 |
+
description=__doc__,
|
| 192 |
+
formatter_class=RawDescriptionHelpFormatter,
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
parser.add_argument('--version', action='version', version=VERSION)
|
| 196 |
+
|
| 197 |
+
parser.add_argument(
|
| 198 |
+
'-c', '--console',
|
| 199 |
+
dest='console',
|
| 200 |
+
action='store',
|
| 201 |
+
default=None,
|
| 202 |
+
choices=['ipython', 'python'],
|
| 203 |
+
metavar='CONSOLE',
|
| 204 |
+
help='select type of interactive session: ipython | python; defaults '
|
| 205 |
+
'to ipython if IPython is installed, otherwise python')
|
| 206 |
+
|
| 207 |
+
parser.add_argument(
|
| 208 |
+
'-p', '--pretty',
|
| 209 |
+
dest='pretty',
|
| 210 |
+
action='store',
|
| 211 |
+
default=None,
|
| 212 |
+
metavar='PRETTY',
|
| 213 |
+
choices=['unicode', 'ascii', 'no'],
|
| 214 |
+
help='setup pretty printing: unicode | ascii | no; defaults to '
|
| 215 |
+
'unicode printing if the terminal supports it, otherwise ascii')
|
| 216 |
+
|
| 217 |
+
parser.add_argument(
|
| 218 |
+
'-t', '--types',
|
| 219 |
+
dest='types',
|
| 220 |
+
action='store',
|
| 221 |
+
default=None,
|
| 222 |
+
metavar='TYPES',
|
| 223 |
+
choices=['gmpy', 'gmpy1', 'python'],
|
| 224 |
+
help='setup ground types: gmpy | gmpy1 | python; defaults to gmpy if gmpy2 '
|
| 225 |
+
'or gmpy is installed, otherwise python')
|
| 226 |
+
|
| 227 |
+
parser.add_argument(
|
| 228 |
+
'-o', '--order',
|
| 229 |
+
dest='order',
|
| 230 |
+
action='store',
|
| 231 |
+
default=None,
|
| 232 |
+
metavar='ORDER',
|
| 233 |
+
choices=['lex', 'grlex', 'grevlex', 'rev-lex', 'rev-grlex', 'rev-grevlex', 'old', 'none'],
|
| 234 |
+
help='setup ordering of terms: [rev-]lex | [rev-]grlex | [rev-]grevlex | old | none; defaults to lex')
|
| 235 |
+
|
| 236 |
+
parser.add_argument(
|
| 237 |
+
'-q', '--quiet',
|
| 238 |
+
dest='quiet',
|
| 239 |
+
action='store_true',
|
| 240 |
+
default=False,
|
| 241 |
+
help='print only version information at startup')
|
| 242 |
+
|
| 243 |
+
parser.add_argument(
|
| 244 |
+
'-d', '--doctest',
|
| 245 |
+
dest='doctest',
|
| 246 |
+
action='store_true',
|
| 247 |
+
default=False,
|
| 248 |
+
help='use the doctest format for output (you can just copy and paste it)')
|
| 249 |
+
|
| 250 |
+
parser.add_argument(
|
| 251 |
+
'-C', '--no-cache',
|
| 252 |
+
dest='cache',
|
| 253 |
+
action='store_false',
|
| 254 |
+
default=True,
|
| 255 |
+
help='disable caching mechanism')
|
| 256 |
+
|
| 257 |
+
parser.add_argument(
|
| 258 |
+
'-a', '--auto-symbols',
|
| 259 |
+
dest='auto_symbols',
|
| 260 |
+
action='store_true',
|
| 261 |
+
default=False,
|
| 262 |
+
help='automatically construct missing symbols')
|
| 263 |
+
|
| 264 |
+
parser.add_argument(
|
| 265 |
+
'-i', '--int-to-Integer',
|
| 266 |
+
dest='auto_int_to_Integer',
|
| 267 |
+
action='store_true',
|
| 268 |
+
default=False,
|
| 269 |
+
help="automatically wrap int literals with Integer")
|
| 270 |
+
|
| 271 |
+
parser.add_argument(
|
| 272 |
+
'-I', '--interactive',
|
| 273 |
+
dest='interactive',
|
| 274 |
+
action='store_true',
|
| 275 |
+
default=False,
|
| 276 |
+
help="equivalent to -a -i")
|
| 277 |
+
|
| 278 |
+
parser.add_argument(
|
| 279 |
+
'-D', '--debug',
|
| 280 |
+
dest='debug',
|
| 281 |
+
action='store_true',
|
| 282 |
+
default=False,
|
| 283 |
+
help='enable debugging output')
|
| 284 |
+
|
| 285 |
+
(options, ipy_args) = parser.parse_known_args()
|
| 286 |
+
if '--' in ipy_args:
|
| 287 |
+
ipy_args.remove('--')
|
| 288 |
+
|
| 289 |
+
if not options.cache:
|
| 290 |
+
os.environ['SYMPY_USE_CACHE'] = 'no'
|
| 291 |
+
|
| 292 |
+
if options.types:
|
| 293 |
+
os.environ['SYMPY_GROUND_TYPES'] = options.types
|
| 294 |
+
|
| 295 |
+
if options.debug:
|
| 296 |
+
os.environ['SYMPY_DEBUG'] = str(options.debug)
|
| 297 |
+
|
| 298 |
+
if options.doctest:
|
| 299 |
+
options.pretty = 'no'
|
| 300 |
+
options.console = 'python'
|
| 301 |
+
|
| 302 |
+
session = options.console
|
| 303 |
+
|
| 304 |
+
if session is not None:
|
| 305 |
+
ipython = session == 'ipython'
|
| 306 |
+
else:
|
| 307 |
+
try:
|
| 308 |
+
import IPython # noqa: F401
|
| 309 |
+
ipython = True
|
| 310 |
+
except ImportError:
|
| 311 |
+
if not options.quiet:
|
| 312 |
+
from sympy.interactive.session import no_ipython
|
| 313 |
+
print(no_ipython)
|
| 314 |
+
ipython = False
|
| 315 |
+
|
| 316 |
+
args = {
|
| 317 |
+
'pretty_print': True,
|
| 318 |
+
'use_unicode': None,
|
| 319 |
+
'use_latex': None,
|
| 320 |
+
'order': None,
|
| 321 |
+
'argv': ipy_args,
|
| 322 |
+
}
|
| 323 |
+
|
| 324 |
+
if options.pretty == 'unicode':
|
| 325 |
+
args['use_unicode'] = True
|
| 326 |
+
elif options.pretty == 'ascii':
|
| 327 |
+
args['use_unicode'] = False
|
| 328 |
+
elif options.pretty == 'no':
|
| 329 |
+
args['pretty_print'] = False
|
| 330 |
+
|
| 331 |
+
if options.order is not None:
|
| 332 |
+
args['order'] = options.order
|
| 333 |
+
|
| 334 |
+
args['quiet'] = options.quiet
|
| 335 |
+
args['auto_symbols'] = options.auto_symbols or options.interactive
|
| 336 |
+
args['auto_int_to_Integer'] = options.auto_int_to_Integer or options.interactive
|
| 337 |
+
|
| 338 |
+
from sympy.interactive import init_session
|
| 339 |
+
init_session(ipython, **args)
|
| 340 |
+
|
| 341 |
+
if __name__ == "__main__":
|
| 342 |
+
main()
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
uv
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright © 2017 Erez Shinan
|
| 2 |
+
|
| 3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 4 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 5 |
+
the Software without restriction, including without limitation the rights to
|
| 6 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
| 7 |
+
the Software, and to permit persons to whom the Software is furnished to do so,
|
| 8 |
+
subject to the following conditions:
|
| 9 |
+
|
| 10 |
+
The above copyright notice and this permission notice shall be included in all
|
| 11 |
+
copies or substantial portions of the Software.
|
| 12 |
+
|
| 13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 14 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 15 |
+
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
| 16 |
+
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 17 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
| 18 |
+
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: lark
|
| 3 |
+
Version: 1.2.2
|
| 4 |
+
Summary: a modern parsing library
|
| 5 |
+
Author-email: Erez Shinan <erezshin@gmail.com>
|
| 6 |
+
License: MIT
|
| 7 |
+
Project-URL: Homepage, https://github.com/lark-parser/lark
|
| 8 |
+
Project-URL: Download, https://github.com/lark-parser/lark/tarball/master
|
| 9 |
+
Keywords: Earley,LALR,parser,parsing,ast
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Programming Language :: Python :: 3
|
| 13 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 14 |
+
Classifier: Topic :: Text Processing :: General
|
| 15 |
+
Classifier: Topic :: Text Processing :: Linguistic
|
| 16 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 17 |
+
Requires-Python: >=3.8
|
| 18 |
+
Description-Content-Type: text/markdown
|
| 19 |
+
License-File: LICENSE
|
| 20 |
+
Provides-Extra: atomic_cache
|
| 21 |
+
Requires-Dist: atomicwrites ; extra == 'atomic_cache'
|
| 22 |
+
Provides-Extra: interegular
|
| 23 |
+
Requires-Dist: interegular <0.4.0,>=0.3.1 ; extra == 'interegular'
|
| 24 |
+
Provides-Extra: nearley
|
| 25 |
+
Requires-Dist: js2py ; extra == 'nearley'
|
| 26 |
+
Provides-Extra: regex
|
| 27 |
+
Requires-Dist: regex ; extra == 'regex'
|
| 28 |
+
|
| 29 |
+
Lark is a modern general-purpose parsing library for Python.
|
| 30 |
+
With Lark, you can parse any context-free grammar, efficiently, with very little code.
|
| 31 |
+
Main Features:
|
| 32 |
+
- Builds a parse-tree (AST) automagically, based on the structure of the grammar
|
| 33 |
+
- Earley parser
|
| 34 |
+
- Can parse all context-free grammars
|
| 35 |
+
- Full support for ambiguous grammars
|
| 36 |
+
- LALR(1) parser
|
| 37 |
+
- Fast and light, competitive with PLY
|
| 38 |
+
- Can generate a stand-alone parser
|
| 39 |
+
- CYK parser, for highly ambiguous grammars
|
| 40 |
+
- EBNF grammar
|
| 41 |
+
- Unicode fully supported
|
| 42 |
+
- Automatic line & column tracking
|
| 43 |
+
- Standard library of terminals (strings, numbers, names, etc.)
|
| 44 |
+
- Import grammars from Nearley.js
|
| 45 |
+
- Extensive test suite
|
| 46 |
+
- And much more!
|
| 47 |
+
Since version 1.2, only Python versions 3.8 and up are supported.
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
lark-1.2.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 2 |
+
lark-1.2.2.dist-info/LICENSE,sha256=Lu5g9S1OETV7-J5ysDTQUOKF5H_aE2HlZi-zIu4n13E,1055
|
| 3 |
+
lark-1.2.2.dist-info/METADATA,sha256=S-69HuNJr0ktlvb7J5XE48ghb_6ahYn8ksdW9HcB-d0,1831
|
| 4 |
+
lark-1.2.2.dist-info/RECORD,,
|
| 5 |
+
lark-1.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
lark-1.2.2.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
|
| 7 |
+
lark-1.2.2.dist-info/entry_points.txt,sha256=WXYg_uCUdFlxQDPUhli3HFah37bNNFQfXLdzCqsacGI,61
|
| 8 |
+
lark-1.2.2.dist-info/top_level.txt,sha256=dyS6jg8hCHHkXWvsfcIMO8rjlv_bdzAxiE0lkkzJ5hk,5
|
| 9 |
+
lark/__init__.py,sha256=bc0tK7h7XwHA-Y4vVeJoNIqSMA-MHVTihq8yy795WXo,744
|
| 10 |
+
lark/__pyinstaller/__init__.py,sha256=_PpFm44f_mwHlCpvYgv9ZgubLfNDc3PlePVir4sxRfI,182
|
| 11 |
+
lark/__pyinstaller/hook-lark.py,sha256=5aFHiZWVHPRdHT8qnb4kW4JSOql5GusHodHR25_q9sU,599
|
| 12 |
+
lark/ast_utils.py,sha256=jwn44ocNQhZGbfcFsEZnwi_gGvPbNgzjQ-0RuEtwDzI,2117
|
| 13 |
+
lark/common.py,sha256=M9-CFAUP3--OkftyyWjke-Kc1-pQMczT1MluHCFwdy4,3008
|
| 14 |
+
lark/exceptions.py,sha256=g76ygMPfSMl6ukKqFAZVpR2EAJTOOdyfJ_ALXc_MCR8,10939
|
| 15 |
+
lark/grammar.py,sha256=DR17QSLSKCRhMOqx2UQh4n-Ywu4CD-wjdQxtuM8OHkY,3665
|
| 16 |
+
lark/grammars/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 17 |
+
lark/grammars/common.lark,sha256=FV9xGIPiPqHRM4ULAxP6jApXRTVsSwbOe697I9s7DLs,885
|
| 18 |
+
lark/grammars/lark.lark,sha256=nq1NTZYqm_DPI2mjRIlpd3ZcxPjGhapA4GUzkcfBTQs,1541
|
| 19 |
+
lark/grammars/python.lark,sha256=WMakTkpzCqOd0jUjYONI3LOnSy2KRN9NoL9pFtAZYCI,10641
|
| 20 |
+
lark/grammars/unicode.lark,sha256=d9YCz0XWimdl4F8M5YCptavBcFG9D58Yd4aMwxjYtEI,96
|
| 21 |
+
lark/indenter.py,sha256=L5uNDYUMNrk4ZTWKmW0Tu-H-3GGErLOHygMC32N_twE,4221
|
| 22 |
+
lark/lark.py,sha256=_IHWmTxt43kfd9eYVtwx58zEWWSFAq9_gKH7Oeu5PZs,28184
|
| 23 |
+
lark/lexer.py,sha256=OwgQPCpQ-vUi-2aeZztsydd4DLkEgCbZeucvEPvHFi4,24037
|
| 24 |
+
lark/load_grammar.py,sha256=WYZDxyO6omhA8NKyMjSckfAMwVKuIMF3liiYXE_-kHo,53946
|
| 25 |
+
lark/parse_tree_builder.py,sha256=jT_3gCEkBGZoTXAWSnhMn1kRuJILWB-E7XkUciYNHI4,14412
|
| 26 |
+
lark/parser_frontends.py,sha256=mxMXxux2hkfTfE859wuVp4-Fr1no6YVEUt8toDjEdPQ,10165
|
| 27 |
+
lark/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 28 |
+
lark/parsers/cyk.py,sha256=c3GLk3kq23Xwb8MqUOjvivwP488KJY6NUWgxqeR5980,12192
|
| 29 |
+
lark/parsers/earley.py,sha256=03sW9vfBkcH4NR72EBt8HkndDKSVSH3IdRnDulXWy24,15117
|
| 30 |
+
lark/parsers/earley_common.py,sha256=e2e6NrNucw-WMiNV8HqQ_TpGx6P7v_S8f5aEcF0Tkqo,1620
|
| 31 |
+
lark/parsers/earley_forest.py,sha256=w4JTb4tVMewue8dL-gCO96-Uo0wd4BbQUfSfIhr7txY,31332
|
| 32 |
+
lark/parsers/grammar_analysis.py,sha256=rQ4Sn9EP8gjXGTZXEiWLW0KByPPpeKpN5hSIQZgNl3I,7141
|
| 33 |
+
lark/parsers/lalr_analysis.py,sha256=DGHFk2tIluIyeFEVFfsMRU77DVbd598IJnUUOXO04yo,12207
|
| 34 |
+
lark/parsers/lalr_interactive_parser.py,sha256=LsgfT1gdne8pXHTCsN6bl6zD6Pdh2dDqp1rIWOzp7Yw,5757
|
| 35 |
+
lark/parsers/lalr_parser.py,sha256=6U8jP1AlUsuGxgJBWMq15WuGuyaolsLPevcf8HZ_zZk,4586
|
| 36 |
+
lark/parsers/lalr_parser_state.py,sha256=QZ12p4CtvcvFAIKIqkeDBJYgEU3ntQllBJDYXb419ls,3793
|
| 37 |
+
lark/parsers/xearley.py,sha256=DboXMNtuN0G-SXrrDm5zgUDUekz85h0Rih2PRvcf1LM,7825
|
| 38 |
+
lark/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 39 |
+
lark/reconstruct.py,sha256=s7CevBXchUG_fe2otdAITxIaSXCEIiSjy4Sbh5QC0hs,3763
|
| 40 |
+
lark/tools/__init__.py,sha256=FeKYmVUjXSt-vlQm2ktyWkcxaOCTOkZnHD_kOUWjUuA,2469
|
| 41 |
+
lark/tools/nearley.py,sha256=QaLYdW6mYQdDq8JKMisV3lvPqzF0wPgu8q8BtsSA33g,6265
|
| 42 |
+
lark/tools/serialize.py,sha256=nwt46LNxkDm0T_Uh9k2wS4fcfgvZQ2dy4-YC_aKhTQk,965
|
| 43 |
+
lark/tools/standalone.py,sha256=6eXDqBuzZSpE5BGZm_Fh6X5yRhAPYxNVyl2aUU3ABzA,5627
|
| 44 |
+
lark/tree.py,sha256=aWWHMazid8bbJanhmCjK9XK2jRFJ6N6WmlwXJGTsz28,8522
|
| 45 |
+
lark/tree_matcher.py,sha256=jHdZJggn405SXmPpGf9U9HLrrsfP4eNNZaj267UTB00,6003
|
| 46 |
+
lark/tree_templates.py,sha256=sSnfw1m8txAkJOYhcQrooG7xajVyVplunzTnNsxY720,6139
|
| 47 |
+
lark/utils.py,sha256=3qd1-c0YgHYklvx1hA28qF7N_Ty1Zz6TbtCFMzQanNk,11270
|
| 48 |
+
lark/visitors.py,sha256=VJ3T1m8p78MwXJotpOAvn06mYEqKyuIlhsAF51U-a3w,21422
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/REQUESTED
ADDED
|
File without changes
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (72.2.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
lib/python3.13/site-packages/lark-1.2.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
lark
|