Spaces:
Sleeping
Sleeping
Build llama.cpp statically to avoid shared library dependencies
Browse files- Dockerfile +7 -1
Dockerfile
CHANGED
|
@@ -17,12 +17,13 @@ RUN apt-get update && apt-get install -y \
|
|
| 17 |
# Compile llama.cpp from source (for translation feature)
|
| 18 |
# This ensures compatibility with the container's architecture
|
| 19 |
# Disable CURL since we don't need it for local GGUF model inference
|
|
|
|
| 20 |
RUN cd /tmp && \
|
| 21 |
git clone --depth 1 --branch master https://github.com/ggerganov/llama.cpp.git && \
|
| 22 |
cd llama.cpp && \
|
| 23 |
mkdir build && \
|
| 24 |
cd build && \
|
| 25 |
-
cmake .. -DCMAKE_BUILD_TYPE=Release -DLLAMA_CURL=OFF && \
|
| 26 |
# Build the llama-cli target (the command-line interface we need) \
|
| 27 |
cmake --build . --config Release --target llama-cli -j$(nproc) && \
|
| 28 |
# Find the binary (it might be in different locations) \
|
|
@@ -40,6 +41,11 @@ RUN cd /tmp && \
|
|
| 40 |
exit 1; \
|
| 41 |
fi && \
|
| 42 |
chmod 755 /usr/local/bin/llama-main && \
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
cd / && \
|
| 44 |
rm -rf /tmp/llama.cpp && \
|
| 45 |
echo "✅ llama.cpp compiled and installed to /usr/local/bin/llama-main"
|
|
|
|
| 17 |
# Compile llama.cpp from source (for translation feature)
|
| 18 |
# This ensures compatibility with the container's architecture
|
| 19 |
# Disable CURL since we don't need it for local GGUF model inference
|
| 20 |
+
# Build statically linked to avoid shared library dependencies
|
| 21 |
RUN cd /tmp && \
|
| 22 |
git clone --depth 1 --branch master https://github.com/ggerganov/llama.cpp.git && \
|
| 23 |
cd llama.cpp && \
|
| 24 |
mkdir build && \
|
| 25 |
cd build && \
|
| 26 |
+
cmake .. -DCMAKE_BUILD_TYPE=Release -DLLAMA_CURL=OFF -DBUILD_SHARED_LIBS=OFF && \
|
| 27 |
# Build the llama-cli target (the command-line interface we need) \
|
| 28 |
cmake --build . --config Release --target llama-cli -j$(nproc) && \
|
| 29 |
# Find the binary (it might be in different locations) \
|
|
|
|
| 41 |
exit 1; \
|
| 42 |
fi && \
|
| 43 |
chmod 755 /usr/local/bin/llama-main && \
|
| 44 |
+
# Also copy any shared libraries if they exist (fallback) \
|
| 45 |
+
if [ -f libllama.so ]; then \
|
| 46 |
+
cp libllama.so /usr/local/lib/ && \
|
| 47 |
+
ldconfig; \
|
| 48 |
+
fi && \
|
| 49 |
cd / && \
|
| 50 |
rm -rf /tmp/llama.cpp && \
|
| 51 |
echo "✅ llama.cpp compiled and installed to /usr/local/bin/llama-main"
|