Spaces:
Sleeping
Sleeping
File size: 4,941 Bytes
a89888b dca816b a89888b dca816b a89888b dca816b a89888b dca816b e0b2bb1 a89888b dca816b a89888b dca816b a89888b dca816b e0b2bb1 dca816b e0b2bb1 dca816b 9c15d37 dca816b 9c15d37 dca816b 9c15d37 a89888b dca816b a89888b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 |
#!/bin/bash
# Build script for MedAI Processing with dynamic local/cloud mode support
echo "🏗️ MedAI Processing Build Script"
echo "=================================="
# Function to prompt user for yes/no
prompt_yes_no() {
local prompt="$1"
local response
while true; do
read -p "$prompt (y/n): " response
case $response in
[Yy]* ) return 0;;
[Nn]* ) return 1;;
* ) echo "Please answer yes (y) or no (n).";;
esac
done
}
# Check if mode is specified as argument
if [ "$1" = "local" ]; then
MODE="local"
elif [ "$1" = "cloud" ]; then
MODE="cloud"
else
# Interactive mode selection
echo "Please select the runtime mode:"
echo "1) Local mode (MedAlpaca-13b) - No API costs, complete privacy, heavy dependencies"
echo "2) Cloud mode (NVIDIA/Gemini APIs) - Faster processing, requires API keys, lightweight"
echo ""
while true; do
read -p "Enter your choice (1 or 2): " choice
case $choice in
1) MODE="local"; break;;
2) MODE="cloud"; break;;
*) echo "Please enter 1 for local mode or 2 for cloud mode.";;
esac
done
fi
echo ""
echo "Selected mode: $MODE"
echo ""
# Ask if user wants to build/rebuild Docker image
if prompt_yes_no "Would you like to build/rebuild the Docker image?"; then
echo ""
if [ "$MODE" = "local" ]; then
echo "🏠 Building in LOCAL mode (MedAlpaca-13b)..."
echo " - Installing local LLM dependencies (torch-cuda, accelerate, bitsandbytes, etc.)"
echo " - Excluding cloud API dependencies (google-genai, google-auth, etc.)"
docker build --build-arg IS_LOCAL=true -t medai-processing:local .
else
echo "☁️ Building in CLOUD mode (NVIDIA/Gemini APIs)..."
echo " - Installing cloud API dependencies (google-genai, google-auth, etc.)"
echo " - Excluding heavy local LLM dependencies (torch-cuda, accelerate, bitsandbytes, etc.)"
docker build --build-arg IS_LOCAL=false -t medai-processing:cloud .
fi
if [ $? -eq 0 ]; then
echo ""
echo "✅ Build completed successfully!"
else
echo ""
echo "❌ Build failed! Please check the error messages above."
exit 1
fi
else
echo "⏭️ Skipping Docker build..."
fi
echo ""
# Ask if user wants to run the container
if prompt_yes_no "Would you like to run the Docker container now?"; then
echo ""
echo "🚀 Starting Docker container..."
# Set the image name based on mode
if [ "$MODE" = "local" ]; then
IMAGE_NAME="medai-processing:local"
IS_LOCAL_FLAG="-e IS_LOCAL=true"
else
IMAGE_NAME="medai-processing:cloud"
IS_LOCAL_FLAG="-e IS_LOCAL=false"
fi
# Build docker run command
DOCKER_CMD="docker run -p 7860:7860 $IS_LOCAL_FLAG"
# Add environment variables
if [ "$MODE" = "local" ]; then
echo "🔑 Please provide your Hugging Face token:"
read -p "HF_TOKEN: " hf_token
if [ -n "$hf_token" ]; then
# Validate HF token format (should start with hf_)
if [[ "$hf_token" =~ ^hf_ ]]; then
DOCKER_CMD="$DOCKER_CMD -e HF_TOKEN=\"$hf_token\""
else
echo "⚠️ Warning: HF token should start with 'hf_'. Adding anyway..."
DOCKER_CMD="$DOCKER_CMD -e HF_TOKEN=\"$hf_token\""
fi
else
echo "⚠️ Warning: No HF_TOKEN provided. Model download may fail."
fi
else
echo "🔑 Please provide your API keys:"
read -p "NVIDIA_API_1: " nvidia_key
read -p "GEMINI_API_1: " gemini_key
if [ -n "$nvidia_key" ]; then
DOCKER_CMD="$DOCKER_CMD -e NVIDIA_API_1=\"$nvidia_key\""
fi
if [ -n "$gemini_key" ]; then
DOCKER_CMD="$DOCKER_CMD -e GEMINI_API_1=\"$gemini_key\""
fi
if [ -z "$nvidia_key" ] && [ -z "$gemini_key" ]; then
echo "⚠️ Warning: No API keys provided. Processing may fail."
fi
fi
# Add image name
DOCKER_CMD="$DOCKER_CMD $IMAGE_NAME"
# Check if image exists
if ! docker image inspect "$IMAGE_NAME" >/dev/null 2>&1; then
echo "❌ Error: Docker image '$IMAGE_NAME' not found!"
echo " Please build the image first by running this script and choosing 'yes' for building."
exit 1
fi
# Run the container
echo "Running: $DOCKER_CMD"
echo ""
eval "$DOCKER_CMD"
else
echo ""
echo "📋 Manual run command:"
if [ "$MODE" = "local" ]; then
echo " docker run -p 7860:7860 -e IS_LOCAL=true -e HF_TOKEN=your_token_here medai-processing:local"
else
echo " docker run -p 7860:7860 -e IS_LOCAL=false -e NVIDIA_API_1=your_key -e GEMINI_API_1=your_key medai-processing:cloud"
fi
fi
|