Spaces:
Paused
Paused
| set -e | |
| # Use /tmp for all writable data | |
| export DATA_DIR="/tmp/open-webui-data" | |
| export HF_STORAGE_REPO="${HF_STORAGE_REPO:-nxdev-org/open-webui-storage}" | |
| export SYNC_INTERVAL="${SYNC_INTERVAL:-300}" | |
| # Set all HuggingFace and cache directories to /tmp | |
| export HF_HOME="/tmp/hf_cache" | |
| export HUGGINGFACE_HUB_CACHE="/tmp/hf_cache" | |
| export TRANSFORMERS_CACHE="/tmp/hf_cache" | |
| export SENTENCE_TRANSFORMERS_HOME="/tmp/hf_cache" | |
| # Override Open WebUI environment variables | |
| export STATIC_DIR="/tmp/static" | |
| export UPLOAD_DIR="/tmp/uploads" | |
| echo "Starting Open WebUI with HF Dataset persistence..." | |
| echo "Data directory: $DATA_DIR" | |
| echo "HF Repository: $HF_STORAGE_REPO" | |
| echo "HF Cache: $HF_HOME" | |
| # insytall uv, custom installer | |
| mkdir -p /tmp/bin | |
| export UV_CACHE_DIR="/tmp/uv_cache" | |
| curl -LsSf https://astral.sh/uv/install.sh | env UV_UNMANAGED_INSTALL="/tmp/bin" UV_CACHE_DIR="/tmp/uv_cache" sh | |
| #source /tmp/bin/env | |
| export PATH="/tmp/bin:$PATH" | |
| # set nvm cache directory | |
| export NPM_CONFIG_CACHE=/tmp/.npm_cache | |
| export XDG_CONFIG_HOME=/tmp/.config | |
| # <--- ADD THIS LINE for Caddy's config | |
| export XDG_DATA_HOME=/tmp/.local/share | |
| # <--- ADD THIS LINE for Caddy's data/locks (equivalent to ~/.local/share) | |
| # Ensure the custom config and data directories exist and are writable | |
| # These will be created by 'appuser' if `USER appuser` is active, or by root if before. | |
| # Since they are in /tmp, 'appuser' will have write permissions regardless if created by root. | |
| mkdir -p ${XDG_CONFIG_HOME}/caddy \ | |
| ${XDG_DATA_HOME}/caddy/locks | |
| # Ensure the custom cache directory exists for npm (though /tmp generally is fine) | |
| # It's good practice to create it, and it will be owned by `appuser` if `USER appuser` is active. | |
| # No need to mkdir for UV_CACHE_DIR as uv will create it if needed. | |
| mkdir -p ${NPM_CONFIG_CACHE} | |
| source ${HOME}/add_bash_util.sh | |
| source "${NVM_DIR}/nvm.sh" | |
| node -v && npm -v | |
| uv venv /tmp/.venv | |
| source /tmp/.venv/bin/activate | |
| uv pip install --no-cache-dir \ | |
| huggingface_hub \ | |
| datasets | |
| # Create all necessary directories | |
| mkdir -p "$DATA_DIR" "$HF_HOME" "$STATIC_DIR" "$UPLOAD_DIR" | |
| # Copy static files to writable location | |
| if [ -d "/app/backend/open_webui/static" ]; then | |
| echo "Copying static files to writable location..." | |
| cp -r /app/backend/open_webui/static/* "$STATIC_DIR/" 2>/dev/null || true | |
| fi | |
| # Test write permissions | |
| if touch "$DATA_DIR/test" 2>/dev/null; then | |
| rm "$DATA_DIR/test" | |
| echo "Data directory is writable" | |
| else | |
| echo "Warning: Data directory may not be writable" | |
| fi | |
| # Check if HF_TOKEN is set | |
| if [ -z "$HF_TOKEN" ]; then | |
| echo "Warning: HF_TOKEN not set. Sync functionality will be limited." | |
| else | |
| echo "HF_TOKEN is set, proceeding with sync..." | |
| fi | |
| # Download existing data on startup | |
| echo "Syncing data from Hugging Face..." | |
| python3 ${HOME}/sync_storage.py download | |
| # Function to handle graceful shutdown | |
| cleanup() { | |
| echo "Shutting down gracefully..." | |
| # Upload final data state | |
| if [ -n "$HF_TOKEN" ]; then | |
| echo "Uploading final data state..." | |
| python3 ${HOME}/sync_storage.py upload | |
| fi | |
| # Kill background processes | |
| kill $SYNC_PID 2>/dev/null || true | |
| kill $WEBUI_PID 2>/dev/null || true | |
| exit 0 | |
| } | |
| # Set up signal handlers | |
| trap cleanup SIGTERM SIGINT | |
| # Background sync function | |
| background_sync() { | |
| if [ -n "$HF_TOKEN" ]; then | |
| while true; do | |
| sleep $SYNC_INTERVAL | |
| echo "Periodic sync to Hugging Face..." | |
| python3 ${HOME}/sync_storage.py upload | |
| done | |
| else | |
| echo "Skipping background sync - no HF_TOKEN" | |
| while true; do | |
| sleep 3600 | |
| done | |
| fi | |
| } | |
| # Start background sync | |
| background_sync & | |
| SYNC_PID=$! | |
| # Start Open WebUI | |
| echo "Starting Open WebUI..." | |
| # Set environment variables for Open WebUI | |
| export WEBUI_SECRET_KEY="${WEBUI_SECRET_KEY:-$(openssl rand -hex 32)}" | |
| # Start Open WebUI in background | |
| # deactivate uv | |
| deactivate && /app/backend/start.sh & | |
| WEBUI_PID=$! | |
| # Wait for Open WebUI process | |
| # wait $WEBUI_PID | |
| # readeck | |
| mkdir -p $DATA_DIR/readeck-data | |
| readeck_url=$(curl -X 'GET' 'https://codeberg.org/api/v1/repos/readeck/readeck/releases/latest' -H 'accept: application/json' | jq -r '.assets[] | .browser_download_url | select(. | endswith("linux-amd64"))') | |
| echo download readeck from $readeck_url | |
| wget -q $readeck_url -O $DATA_DIR/readeck-data/readeck | |
| chmod a+x $DATA_DIR/readeck-data/readeck | |
| cp ${HOME}/readeck.toml $DATA_DIR/readeck-data/readeck.toml | |
| echo "Starting readeck..." | |
| cd $DATA_DIR/readeck-data && ./readeck serve -config ./readeck.toml & | |
| gh_install caddyserver/caddy linux_amd64.tar.gz /tmp/caddy.tar.gz | |
| mkdir -p /tmp/caddy | |
| tar -xzf /tmp/caddy.tar.gz -C /tmp/caddy | |
| # Start the Node.js application | |
| git clone https://github.com/waxz/Gemini-CLI-2-API.git /tmp/Gemini-CLI-2-API -b waxz-patch-1 | |
| git clone https://github.com/waxz/gemini-cli-openai /tmp/gemini-cli-openai | |
| git clone https://github.com/snailyp/gemini-balance.git /tmp/gemini-balance | |
| source /tmp/.venv/bin/activate | |
| uv pip install -r /tmp/gemini-balance/requirements.txt | |
| cat << EOF | tee /tmp/Gemini-CLI-2-API/config.json | |
| { | |
| "REQUIRED_API_KEY": "${REQUIRED_API_KEY:-}", | |
| "SERVER_PORT": 3000, | |
| "HOST": "localhost", | |
| "MODEL_PROVIDER": "gemini-cli-oauth", | |
| "OPENAI_API_KEY": "${OPENAI_API_KEY:-}", | |
| "OPENAI_BASE_URL": "https://api.openai.com/v1", | |
| "CLAUDE_API_KEY": "${CLAUDE_API_KEY:-}", | |
| "CLAUDE_BASE_URL": "https://api.anthropic.com/v1", | |
| "PROJECT_ID": "${PROJECT_ID:-}", | |
| "PROMPT_LOG_MODE": "console", | |
| "GEMINI_OAUTH_CREDS_FILE_PATH":"/tmp/gemini_oauth_creds.json" | |
| } | |
| EOF | |
| cat << EOF | tee /tmp/gemini_oauth_creds.json | |
| { | |
| "access_token": "${GEMINI_OAUTH_ACCESS_TOKEN:-}", | |
| "refresh_token": "${GEMINI_OAUTH_REFRESH_TOKEN:-}", | |
| "scope": "https://www.googleapis.com/auth/cloud-platform", | |
| "token_type": "Bearer", | |
| "expiry_date": 1753880406425 | |
| } | |
| EOF | |
| cat << EOF | tee /tmp/gemini-balance/.env | |
| # 数据库配置 | |
| DATABASE_TYPE=mysql | |
| #SQLITE_DATABASE=default_db | |
| MYSQL_HOST=${MYSQL_HOST} | |
| #MYSQL_SOCKET=/run/mysqld/mysqld.sock | |
| MYSQL_PORT=${MYSQL_PORT} | |
| MYSQL_USER=${MYSQL_USER} | |
| MYSQL_PASSWORD=${MYSQL_PASSWORD} | |
| MYSQL_DATABASE=${MYSQL_DATABASE} | |
| API_KEYS=${GEMINI_API_KEYS} | |
| ALLOWED_TOKENS=["${REQUIRED_API_KEY}"] | |
| AUTH_TOKEN=${REQUIRED_API_KEY} | |
| # For Vertex AI Platform API Keys | |
| VERTEX_API_KEYS=["AQ.Abxxxxxxxxxxxxxxxxxxx"] | |
| # For Vertex AI Platform Express API Base URL | |
| VERTEX_EXPRESS_BASE_URL=https://aiplatform.googleapis.com/v1beta1/publishers/google | |
| TEST_MODEL=gemini-1.5-flash | |
| THINKING_MODELS=["gemini-2.5-flash-preview-04-17"] | |
| THINKING_BUDGET_MAP={"gemini-2.5-flash-preview-04-17": 4000} | |
| IMAGE_MODELS=["gemini-2.0-flash-exp"] | |
| SEARCH_MODELS=["gemini-2.0-flash-exp","gemini-2.0-pro-exp"] | |
| FILTERED_MODELS=["gemini-1.0-pro-vision-latest", "gemini-pro-vision", "chat-bison-001", "text-bison-001", "embedding-gecko-001"] | |
| # 是否启用网址上下文,默认启用 | |
| URL_CONTEXT_ENABLED=false | |
| URL_CONTEXT_MODELS=["gemini-2.5-pro","gemini-2.5-flash","gemini-2.5-flash-lite","gemini-2.0-flash","gemini-2.0-flash-live-001"] | |
| TOOLS_CODE_EXECUTION_ENABLED=false | |
| SHOW_SEARCH_LINK=true | |
| SHOW_THINKING_PROCESS=true | |
| BASE_URL=https://generativelanguage.googleapis.com/v1beta | |
| MAX_FAILURES=10 | |
| MAX_RETRIES=3 | |
| CHECK_INTERVAL_HOURS=1 | |
| TIMEZONE=Asia/Shanghai | |
| # 请求超时时间(秒) | |
| TIME_OUT=300 | |
| # 代理服务器配置 (支持 http 和 socks5) | |
| # 示例: PROXIES=["http://user:pass@host:port", "socks5://host:port"] | |
| PROXIES=[] | |
| # 对同一个API_KEY使用代理列表中固定的IP策略 | |
| PROXIES_USE_CONSISTENCY_HASH_BY_API_KEY=true | |
| #########################image_generate 相关配置########################### | |
| PAID_KEY=AIzaSyxxxxxxxxxxxxxxxxxxx | |
| CREATE_IMAGE_MODEL=imagen-3.0-generate-002 | |
| UPLOAD_PROVIDER=smms | |
| SMMS_SECRET_TOKEN=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX | |
| PICGO_API_KEY=xxxx | |
| CLOUDFLARE_IMGBED_URL=https://xxxxxxx.pages.dev/upload | |
| CLOUDFLARE_IMGBED_AUTH_CODE=xxxxxxxxx | |
| CLOUDFLARE_IMGBED_UPLOAD_FOLDER= | |
| ########################################################################## | |
| #########################stream_optimizer 相关配置######################## | |
| STREAM_OPTIMIZER_ENABLED=false | |
| STREAM_MIN_DELAY=0.016 | |
| STREAM_MAX_DELAY=0.024 | |
| STREAM_SHORT_TEXT_THRESHOLD=10 | |
| STREAM_LONG_TEXT_THRESHOLD=50 | |
| STREAM_CHUNK_SIZE=5 | |
| ########################################################################## | |
| ######################### 日志配置 ####################################### | |
| # 日志级别 (debug, info, warning, error, critical),默认为 info | |
| LOG_LEVEL=info | |
| # 是否开启自动删除错误日志 | |
| AUTO_DELETE_ERROR_LOGS_ENABLED=true | |
| # 自动删除多少天前的错误日志 (1, 7, 30) | |
| AUTO_DELETE_ERROR_LOGS_DAYS=7 | |
| # 是否开启自动删除请求日志 | |
| AUTO_DELETE_REQUEST_LOGS_ENABLED=false | |
| # 自动删除多少天前的请求日志 (1, 7, 30) | |
| AUTO_DELETE_REQUEST_LOGS_DAYS=30 | |
| ########################################################################## | |
| # 假流式配置 (Fake Streaming Configuration) | |
| # 是否启用假流式输出 | |
| FAKE_STREAM_ENABLED=True | |
| # 假流式发送空数据的间隔时间(秒) | |
| FAKE_STREAM_EMPTY_DATA_INTERVAL_SECONDS=5 | |
| # 安全设置 (JSON 字符串格式) | |
| # 注意:这里的示例值可能需要根据实际模型支持情况调整 | |
| SAFETY_SETTINGS=[{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "OFF"}, {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "OFF"}, {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "OFF"}, {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "OFF"}, {"category": "HARM_CATEGORY_CIVIC_INTEGRITY", "threshold": "BLOCK_NONE"}] | |
| URL_NORMALIZATION_ENABLED=false | |
| # tts配置 | |
| TTS_MODEL=gemini-2.5-flash-preview-tts | |
| TTS_VOICE_NAME=Zephyr | |
| TTS_SPEED=normal | |
| #########################Files API 相关配置######################## | |
| # 是否启用文件过期自动清理 | |
| FILES_CLEANUP_ENABLED=true | |
| # 文件过期清理间隔(小时) | |
| FILES_CLEANUP_INTERVAL_HOURS=1 | |
| # 是否启用用户文件隔离(每个用户只能看到自己上传的文件) | |
| FILES_USER_ISOLATION_ENABLED=true | |
| ########################################################################## | |
| EOF | |
| cat << EOF | tee /tmp/gemini-cli-openai/.dev.vars.test | |
| # Required: OAuth2 credentials JSON from Gemini CLI authentication | |
| GCP_SERVICE_ACCOUNT=${GCP_SERVICE_ACCOUNT} | |
| # Optional: Google Cloud Project ID (auto-discovered if not set) | |
| GEMINI_PROJECT_ID=${GEMINI_PROJECT_ID} | |
| # Optional: API key for authentication (if not set, API is public) | |
| # When set, clients must include "Authorization: Bearer <your-api-key>" header | |
| # Example: sk-1234567890abcdef1234567890abcdef | |
| OPENAI_API_KEY=${REQUIRED_API_KEY} | |
| EOF | |
| ${HOME}/crypt.sh decrypt ${HOME}/gemini/enc.dev.var.x -e GEMINI_AUTH_ENC_PASS -o /tmp/gemini-cli-openai/.dev.var.x | |
| ${HOME}/crypt.sh decrypt ${HOME}/gemini/enc.dev.var.j -e GEMINI_AUTH_ENC_PASS -o /tmp/gemini-cli-openai/.dev.var.j | |
| ${HOME}/crypt.sh decrypt ${HOME}/gemini/enc.dev.var.w -e GEMINI_AUTH_ENC_PASS -o /tmp/gemini-cli-openai/.dev.var.w | |
| ${HOME}/crypt.sh decrypt ${HOME}/gemini/enc.dev.var.l -e GEMINI_AUTH_ENC_PASS -o /tmp/gemini-cli-openai/.dev.var.l | |
| cd /tmp/Gemini-CLI-2-API && npm install && node src/api-server.js & | |
| cd /tmp/gemini-balance && uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload & | |
| cd /tmp/gemini-cli-openai && npm i | |
| cp /tmp/gemini-cli-openai/.dev.var.j /tmp/gemini-cli-openai/.dev.vars | |
| echo "check /tmp/gemini-cli-openai/.dev.vars" | |
| cat /tmp/gemini-cli-openai/.dev.vars | |
| cd /tmp/gemini-cli-openai && npx wrangler dev --port 8881 --host 0.0.0.0 & | |
| # cd /tmp/gemini-cli-openai && npm i && npx wrangler dev --port 8882 --host 0.0.0.0 --env j & | |
| # cd /tmp/gemini-cli-openai && npm i && npx wrangler dev --port 8883 --host 0.0.0.0 --env w & | |
| # cd /tmp/gemini-cli-openai && npm i && npx wrangler dev --port 8884 --host 0.0.0.0 --env l & | |
| /tmp/caddy/caddy run --config ${HOME}/Caddyfile & | |
| MAIN_PID=$! | |
| # Wait for caddy process | |
| wait $MAIN_PID | |