Bleak commited on
Commit
a5a3a6c
·
verified ·
1 Parent(s): 885d64d

Upload 37 files

Browse files
Files changed (38) hide show
  1. .gitattributes +24 -0
  2. Dockerfile +57 -0
  3. entrypoint.sh +9 -0
  4. llama-b6795-bin-ubuntu-x64/build/bin/LICENSE +21 -0
  5. llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-curl +9 -0
  6. llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-httplib +21 -0
  7. llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-jsonhpp +21 -0
  8. llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-linenoise +26 -0
  9. llama-b6795-bin-ubuntu-x64/build/bin/libggml-base.so +3 -0
  10. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-alderlake.so +3 -0
  11. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-haswell.so +3 -0
  12. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-icelake.so +3 -0
  13. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sandybridge.so +3 -0
  14. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sapphirerapids.so +3 -0
  15. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-skylakex.so +3 -0
  16. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sse42.so +3 -0
  17. llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-x64.so +3 -0
  18. llama-b6795-bin-ubuntu-x64/build/bin/libggml-rpc.so +3 -0
  19. llama-b6795-bin-ubuntu-x64/build/bin/libggml.so +0 -0
  20. llama-b6795-bin-ubuntu-x64/build/bin/libllama.so +3 -0
  21. llama-b6795-bin-ubuntu-x64/build/bin/libmtmd.so +3 -0
  22. llama-b6795-bin-ubuntu-x64/build/bin/llama-batched-bench +3 -0
  23. llama-b6795-bin-ubuntu-x64/build/bin/llama-bench +3 -0
  24. llama-b6795-bin-ubuntu-x64/build/bin/llama-cli +3 -0
  25. llama-b6795-bin-ubuntu-x64/build/bin/llama-gemma3-cli +0 -0
  26. llama-b6795-bin-ubuntu-x64/build/bin/llama-gguf-split +0 -0
  27. llama-b6795-bin-ubuntu-x64/build/bin/llama-imatrix +3 -0
  28. llama-b6795-bin-ubuntu-x64/build/bin/llama-llava-cli +0 -0
  29. llama-b6795-bin-ubuntu-x64/build/bin/llama-minicpmv-cli +0 -0
  30. llama-b6795-bin-ubuntu-x64/build/bin/llama-mtmd-cli +3 -0
  31. llama-b6795-bin-ubuntu-x64/build/bin/llama-perplexity +3 -0
  32. llama-b6795-bin-ubuntu-x64/build/bin/llama-quantize +3 -0
  33. llama-b6795-bin-ubuntu-x64/build/bin/llama-qwen2vl-cli +0 -0
  34. llama-b6795-bin-ubuntu-x64/build/bin/llama-run +3 -0
  35. llama-b6795-bin-ubuntu-x64/build/bin/llama-server +3 -0
  36. llama-b6795-bin-ubuntu-x64/build/bin/llama-tokenize +3 -0
  37. llama-b6795-bin-ubuntu-x64/build/bin/llama-tts +3 -0
  38. llama-b6795-bin-ubuntu-x64/build/bin/rpc-server +3 -0
.gitattributes CHANGED
@@ -33,3 +33,27 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-base.so filter=lfs diff=lfs merge=lfs -text
37
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-alderlake.so filter=lfs diff=lfs merge=lfs -text
38
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-haswell.so filter=lfs diff=lfs merge=lfs -text
39
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-icelake.so filter=lfs diff=lfs merge=lfs -text
40
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sandybridge.so filter=lfs diff=lfs merge=lfs -text
41
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sapphirerapids.so filter=lfs diff=lfs merge=lfs -text
42
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-skylakex.so filter=lfs diff=lfs merge=lfs -text
43
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sse42.so filter=lfs diff=lfs merge=lfs -text
44
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-x64.so filter=lfs diff=lfs merge=lfs -text
45
+ llama-b6795-bin-ubuntu-x64/build/bin/libggml-rpc.so filter=lfs diff=lfs merge=lfs -text
46
+ llama-b6795-bin-ubuntu-x64/build/bin/libllama.so filter=lfs diff=lfs merge=lfs -text
47
+ llama-b6795-bin-ubuntu-x64/build/bin/libmtmd.so filter=lfs diff=lfs merge=lfs -text
48
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-batched-bench filter=lfs diff=lfs merge=lfs -text
49
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-bench filter=lfs diff=lfs merge=lfs -text
50
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-cli filter=lfs diff=lfs merge=lfs -text
51
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-imatrix filter=lfs diff=lfs merge=lfs -text
52
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-mtmd-cli filter=lfs diff=lfs merge=lfs -text
53
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-perplexity filter=lfs diff=lfs merge=lfs -text
54
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-quantize filter=lfs diff=lfs merge=lfs -text
55
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-run filter=lfs diff=lfs merge=lfs -text
56
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-server filter=lfs diff=lfs merge=lfs -text
57
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-tokenize filter=lfs diff=lfs merge=lfs -text
58
+ llama-b6795-bin-ubuntu-x64/build/bin/llama-tts filter=lfs diff=lfs merge=lfs -text
59
+ llama-b6795-bin-ubuntu-x64/build/bin/rpc-server filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Ollama-API By BleakPrestiger
2
+ # Builder stage
3
+ FROM ubuntu:latest
4
+
5
+ WORKDIR /app
6
+
7
+ COPY ..
8
+
9
+ # In your Dockerfile
10
+ RUN pip install huggingface_hub
11
+
12
+ # Download the model during the build process
13
+ RUN python -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='Qwen/Qwen3-1.7B-GGUF', filename='Qwen3-1.7B-Q8_0.gguf', local_dir='./')"
14
+
15
+ # Update packages and install curl and gnupg
16
+ RUN apt-get update && apt-get upgrade -y && apt-get install -y \
17
+ curl \
18
+ wget \
19
+ gnupg
20
+
21
+ RUN cd llama-b6795-bin-ubuntu-x64/build/bin && chmod +x ./llama-server && ./llama-server --model Qwen3-1.7B-Q8_0.gguf --ctx-size-draft 32767 --ctx-size 32767 --temp 1.0 --top-k 64 --top-k 0.95 --min-p 0.0 --log-file llama.log &
22
+
23
+ # Copy the entry point script
24
+ #COPY entrypoint.sh /entrypoint.sh
25
+ RUN chmod +x /entrypoint.sh
26
+
27
+ # Set the entry point script as the default command
28
+ ENTRYPOINT ["/entrypoint.sh"]
29
+ #CMD ["ollama", "serve"]
30
+
31
+ # Expose the server port
32
+ EXPOSE 7860
33
+
34
+ # Add NVIDIA package repositories
35
+ #RUN curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
36
+ #&& echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list
37
+
38
+ # Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy)
39
+ #RUN apt-get update && apt-get install -y nvidia-container-toolkit || true
40
+
41
+ # Install application
42
+ #RUN curl https://ollama.ai/install.sh | sh
43
+ # Below is to fix embedding bug as per
44
+ # RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh
45
+
46
+
47
+ # Create the directory and give appropriate permissions
48
+ #RUN mkdir -p /.ollama && chmod 777 /.ollama
49
+
50
+ #WORKDIR /.ollama
51
+
52
+ # Set the entry point script as the default command
53
+ #ENTRYPOINT ["/entrypoint.sh"]
54
+ #CMD ["ollama", "serve"]
55
+
56
+ # Set the model as an environment variable (this can be overridden)
57
+ #ENV model=${model}
entrypoint.sh ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Starting server
4
+ echo "Starting Llama-Cpp server"
5
+ sleep 10
6
+
7
+ cd llama-b6795-bin-ubuntu-x64/build/bin && chmod +x ./llama-server && ./llama-server --model Qwen3-1.7B-Q8_0.gguf --ctx-size-draft 32767 --ctx-size 32767 --temp 1.0 --top-k 64 --top-k 0.95 --min-p 0.0 --log-file llama.log &
8
+
9
+ wait
llama-b6795-bin-ubuntu-x64/build/bin/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2023-2024 The ggml authors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-curl ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 1996 - 2025, Daniel Stenberg, daniel@haxx.se, and many contributors, see the THANKS file.
2
+
3
+ All rights reserved.
4
+
5
+ Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
6
+
7
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8
+
9
+ Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization of the copyright holder.
llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-httplib ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2017 yhirose
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-jsonhpp ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2013-2025 Niels Lohmann
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
llama-b6795-bin-ubuntu-x64/build/bin/LICENSE-linenoise ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2010-2014, Salvatore Sanfilippo <antirez at gmail dot com>
2
+ Copyright (c) 2010-2013, Pieter Noordhuis <pcnoordhuis at gmail dot com>
3
+ Copyright (c) 2025, Eric Curtin <ericcurtin17 at gmail dot com>
4
+
5
+ All rights reserved.
6
+
7
+ Redistribution and use in source and binary forms, with or without
8
+ modification, are permitted provided that the following conditions are met:
9
+
10
+ * Redistributions of source code must retain the above copyright notice,
11
+ this list of conditions and the following disclaimer.
12
+
13
+ * Redistributions in binary form must reproduce the above copyright notice,
14
+ this list of conditions and the following disclaimer in the documentation
15
+ and/or other materials provided with the distribution.
16
+
17
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
18
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
21
+ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
24
+ ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
llama-b6795-bin-ubuntu-x64/build/bin/libggml-base.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a54db8db9ceb2c9f0dd06f1d5426722d867bba0adcb2117f8fc118c8ebb68b68
3
+ size 732912
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-alderlake.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a60c08cb296cb24e222a220aacd6da92153905b2400d9b7c6bc2ae314308c80a
3
+ size 898088
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-haswell.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43c3aacfcd1c059f79c55500c461ebad66d90f97a6a2a7f01aa79f19a9338928
3
+ size 902184
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-icelake.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4856733fb03f94a3e8322e047e075e0dbee04b482d0c9a9087a5665a7b6682cc
3
+ size 1030784
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sandybridge.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3c596ed7164d676f646858c311c30b138e1cac10872f5ceeb28a2504dcc9ad0
3
+ size 842336
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sapphirerapids.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9e506d741f6587f762a923146cbc99e55a95be3f2cbfa8e92b0bdc2b611b433
3
+ size 1294008
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-skylakex.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7db970948fad0442a17c769acf76fa13d80b3a81c9e2b092038799c98e8ca732
3
+ size 1030784
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-sse42.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3111a3da185cfbf9fdb55bbb21c185c70433ffc29db0a247544b86a3178cc20d
3
+ size 647232
llama-b6795-bin-ubuntu-x64/build/bin/libggml-cpu-x64.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14ed7504d6386c89110a1467b75f78e0c1e767395e4d4facada55c753792a6a5
3
+ size 643280
llama-b6795-bin-ubuntu-x64/build/bin/libggml-rpc.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8b72d9069dd76f863e0ea59f322c38611080e1e084d92d5542b0f8cab40e028
3
+ size 122760
llama-b6795-bin-ubuntu-x64/build/bin/libggml.so ADDED
Binary file (54.6 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/libllama.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d59750db191a206b980db6cfaa5f8d3ff01198426fc085ab1ed05e76f31d9f9b
3
+ size 2619816
llama-b6795-bin-ubuntu-x64/build/bin/libmtmd.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d686883f12a674a9ddbb5ebee41615a26446f4f057da232e92c59f12f782fc16
3
+ size 790872
llama-b6795-bin-ubuntu-x64/build/bin/llama-batched-bench ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:844c7449d89ba4ed8560fb6bd7c1c46cf984b5f0e9f77596bef2ad5fa0a3fd60
3
+ size 2483336
llama-b6795-bin-ubuntu-x64/build/bin/llama-bench ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46c6af7ab543eb4870212b04389cb248c18830e668a91355717ac7d1695c691c
3
+ size 527584
llama-b6795-bin-ubuntu-x64/build/bin/llama-cli ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a00914f3d4ee0a1f3e287274c48a3358e5b0d7caeab92a9c91c74a95ba9a4803
3
+ size 2523112
llama-b6795-bin-ubuntu-x64/build/bin/llama-gemma3-cli ADDED
Binary file (16.9 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/llama-gguf-split ADDED
Binary file (48.2 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/llama-imatrix ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09817c0b66ad5dba8a39274bb8d8a3b35057d2a127d7f77f27ed9c74a7f43d2a
3
+ size 2582792
llama-b6795-bin-ubuntu-x64/build/bin/llama-llava-cli ADDED
Binary file (16.9 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/llama-minicpmv-cli ADDED
Binary file (16.9 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/llama-mtmd-cli ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2602497cdff3ab9307292111e7000378e9a5e95461ff2068954c8887ae9a134d
3
+ size 2506432
llama-b6795-bin-ubuntu-x64/build/bin/llama-perplexity ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d4bb1f3ed979b60de3192e278fa4b654afcb00b5349268d0740bde096a24d30
3
+ size 2576736
llama-b6795-bin-ubuntu-x64/build/bin/llama-quantize ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e299255f83e7e1c91b7f18be6570012571c219bcc326fb61eebc48da1af2f6d
3
+ size 372560
llama-b6795-bin-ubuntu-x64/build/bin/llama-qwen2vl-cli ADDED
Binary file (16.9 kB). View file
 
llama-b6795-bin-ubuntu-x64/build/bin/llama-run ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc745f23a5788d1b18d8fa08e0d3f316fb22a23ebd470d3f72932b74df4b1f8b
3
+ size 2009184
llama-b6795-bin-ubuntu-x64/build/bin/llama-server ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:beb5ec4869c4aa5559815d1786d010a756a773a4c3ccef6bdd18222cef4b1789
3
+ size 4328720
llama-b6795-bin-ubuntu-x64/build/bin/llama-tokenize ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9388697b1bb98eefc6b5c95d4e4150fd245b6bbd4289241c36e415dd7a9d18b0
3
+ size 324080
llama-b6795-bin-ubuntu-x64/build/bin/llama-tts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04884bf2228fd78c1f3feb3604b46b98c05ed23642fecd6b1ce7eb6b3dd5a231
3
+ size 2591840
llama-b6795-bin-ubuntu-x64/build/bin/rpc-server ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4c4974823037421c0157695d918607387c62cb509844384ee2c8f47ec0901c7
3
+ size 190224