Spaces:
Build error
Build error
update
Browse files- start.sh +16 -17
- start_bkp.sh +22 -0
start.sh
CHANGED
|
@@ -1,22 +1,21 @@
|
|
| 1 |
#!/bin/bash
|
| 2 |
# Starting server
|
| 3 |
-
echo "Starting server"
|
| 4 |
-
#ollama serve &
|
| 5 |
-
#sleep 1
|
| 6 |
-
#ollama pull llama3
|
| 7 |
-
#python run.py
|
| 8 |
-
# Start Ollama in the background.
|
| 9 |
ollama serve &
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
echo "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
ollama pull llama3
|
| 16 |
-
echo "🟢 Done!"
|
| 17 |
-
# Wait for Ollama process to finish.
|
| 18 |
-
wait $pid
|
| 19 |
-
# Start the Python application
|
| 20 |
-
echo "Starting Python application..."
|
| 21 |
-
python ./run.py
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
#!/bin/bash
|
| 2 |
# Starting server
|
| 3 |
+
echo "Starting Ollama server..."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
ollama serve &
|
| 5 |
+
|
| 6 |
+
# Wait for the Ollama server to be ready
|
| 7 |
+
echo "Waiting for Ollama server to be ready..."
|
| 8 |
+
until curl -sSf http://localhost:11434/api/status > /dev/null; do
|
| 9 |
+
echo "Waiting for Ollama server to start..."
|
| 10 |
+
sleep 2
|
| 11 |
+
done
|
| 12 |
+
|
| 13 |
+
echo "Ollama server is ready."
|
| 14 |
+
|
| 15 |
+
# Pull the required model
|
| 16 |
+
echo "Pulling llama3 model..."
|
| 17 |
ollama pull llama3
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
+
# Start the web UI
|
| 20 |
+
echo "Starting web UI..."
|
| 21 |
+
python run.py
|
start_bkp.sh
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Starting server
|
| 3 |
+
echo "Starting server"
|
| 4 |
+
#ollama serve &
|
| 5 |
+
#sleep 1
|
| 6 |
+
#ollama pull llama3
|
| 7 |
+
#python run.py
|
| 8 |
+
# Start Ollama in the background.
|
| 9 |
+
ollama serve &
|
| 10 |
+
# Record Process ID.
|
| 11 |
+
pid=$!
|
| 12 |
+
# Pause for Ollama to start.
|
| 13 |
+
sleep 5
|
| 14 |
+
echo "🔴 Retrieve LLAMA3 model..."
|
| 15 |
+
ollama pull llama3
|
| 16 |
+
echo "🟢 Done!"
|
| 17 |
+
# Wait for Ollama process to finish.
|
| 18 |
+
wait $pid
|
| 19 |
+
# Start the Python application
|
| 20 |
+
echo "Starting Python application..."
|
| 21 |
+
python ./run.py
|
| 22 |
+
|