File size: 1,297 Bytes
9f8b738
 
 
 
 
 
 
 
 
 
 
 
 
5f73a13
 
 
0498b9a
 
 
 
 
 
 
 
 
 
 
 
 
 
5f73a13
0498b9a
5f73a13
0498b9a
 
 
 
 
9f8b738
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
FROM ollama/ollama:latest

# Install necessary tools and Ngrok
RUN apt-get update && apt-get install -y \
    curl \
    wget \
    && rm -rf /var/lib/apt/lists/*

# Download and install Ngrok
RUN wget -q https://bin.equinox.io/c/bNyj1mQVY4c/ngrok-v3-stable-linux-amd64.tgz \
    && tar -xzf ngrok-v3-stable-linux-amd64.tgz -C /usr/local/bin \
    && rm ngrok-v3-stable-linux-amd64.tgz

# Create /app directory
RUN mkdir -p /app

# Create start_services.sh script
RUN echo '#!/bin/bash\n\
# Start Ollama server in the background\n\
ollama serve &\n\
# Wait for Ollama to start\n\
echo "Waiting for Ollama to start..."\n\
timeout 60 bash -c "until curl -s http://localhost:11434/api/tags > /dev/null 2>&1; do sleep 1; done"\n\
if [ $? -ne 0 ]; then\n\
  echo "Error: Ollama failed to start within 60 seconds"\n\
  exit 1\n\
fi\n\
# Pull TinyLlama model\n\
echo "Pulling TinyLlama model..."\n\
ollama pull tinyllama\n\
# Set Ngrok authtoken\n\
echo "Starting Ngrok tunnel..."\n\
ngrok authtoken <your-ngrok-authtoken>\n\
ngrok http 11434 --host-header="localhost:11434" &\n\
# Keep the container running\n\
wait' > /app/start_services.sh

# Make the script executable
RUN chmod +x /app/start_services.sh

# Expose Ollama API port
EXPOSE 11434

# Run the startup script
CMD ["/app/start_services.sh"]