Spaces:
Build error
Build error
Update Dockerfile
Browse files- Dockerfile +63 -18
Dockerfile
CHANGED
|
@@ -1,18 +1,63 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Use Ubuntu as base image
|
| 2 |
+
FROM ubuntu:22.04
|
| 3 |
+
|
| 4 |
+
# Set working directory
|
| 5 |
+
WORKDIR /workspaces/empty
|
| 6 |
+
|
| 7 |
+
# Update package list and install required dependencies
|
| 8 |
+
RUN apt-get update && apt-get install -y \
|
| 9 |
+
wget \
|
| 10 |
+
unzip \
|
| 11 |
+
curl \
|
| 12 |
+
libgomp1 \
|
| 13 |
+
libopenblas0 \
|
| 14 |
+
libc6 \
|
| 15 |
+
libgcc-s1 \
|
| 16 |
+
libstdc++6 \
|
| 17 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 18 |
+
|
| 19 |
+
# Step 1: Download llama.cpp Precompiled Binary
|
| 20 |
+
RUN wget https://github.com/ggml-org/llama.cpp/releases/download/b5452/llama-b5452-bin-ubuntu-x64.zip
|
| 21 |
+
|
| 22 |
+
# Step 2: Unzip and examine contents
|
| 23 |
+
RUN unzip llama-b5452-bin-ubuntu-x64.zip && ls -la
|
| 24 |
+
|
| 25 |
+
# Step 3: Create build directory and move binaries
|
| 26 |
+
RUN mkdir -p /workspaces/empty/build/bin && \
|
| 27 |
+
if [ -d "llama-b5452-bin-ubuntu-x64" ]; then \
|
| 28 |
+
mv llama-b5452-bin-ubuntu-x64/* /workspaces/empty/build/bin/; \
|
| 29 |
+
rmdir llama-b5452-bin-ubuntu-x64; \
|
| 30 |
+
else \
|
| 31 |
+
find . -maxdepth 1 -name "*llama*" -type f -executable -exec mv {} /workspaces/empty/build/bin/ \; ; \
|
| 32 |
+
fi
|
| 33 |
+
|
| 34 |
+
# Make sure binaries are executable
|
| 35 |
+
RUN chmod +x /workspaces/empty/build/bin/*
|
| 36 |
+
|
| 37 |
+
# Step 4 & 5: Set Environment Variables for PATH and LD_LIBRARY_PATH
|
| 38 |
+
ENV PATH="/workspaces/empty/build/bin:${PATH}"
|
| 39 |
+
ENV LD_LIBRARY_PATH="/workspaces/empty/build/bin:${LD_LIBRARY_PATH}"
|
| 40 |
+
|
| 41 |
+
# Clean up downloaded zip file (do this before switching users)
|
| 42 |
+
RUN rm -f llama-b5452-bin-ubuntu-x64.zip
|
| 43 |
+
|
| 44 |
+
# Create cache directory and set permissions
|
| 45 |
+
RUN mkdir -p /.cache/llama.cpp && \
|
| 46 |
+
chmod -R 777 /.cache
|
| 47 |
+
|
| 48 |
+
# Create a non-root user for running the application
|
| 49 |
+
RUN groupadd -r llama && useradd -r -g llama llama && \
|
| 50 |
+
mkdir -p /home/llama/.cache/llama.cpp && \
|
| 51 |
+
chown -R llama:llama /home/llama
|
| 52 |
+
|
| 53 |
+
# Set environment variable for cache directory
|
| 54 |
+
ENV XDG_CACHE_HOME=/home/llama/.cache
|
| 55 |
+
|
| 56 |
+
# Switch to non-root user
|
| 57 |
+
USER llama
|
| 58 |
+
|
| 59 |
+
# Expose the default port for llama-server (usually 8080)
|
| 60 |
+
EXPOSE 8080
|
| 61 |
+
|
| 62 |
+
# Step 6: Set the default command to run llama-server
|
| 63 |
+
CMD ["llama-server", "-hf", "ggml-org/SmolVLM-500M-Instruct-GGUF"]
|