daili / entrypoint.sh
pjpjq's picture
fix(proxy): 避免启动时覆盖持久化配置
26fe5d7
#!/bin/sh
set -eu
APP_PORT="${PORT:-8317}"
APP_HOST="${HOST:-}"
AUTH_BASE="${WRITABLE_PATH:-/tmp}"
AUTH_DIR="${AUTH_BASE}/.cli-proxy-api"
API_KEY_VALUE="${API_KEY:-1111}"
EXTRA_API_KEYS_VALUE="${EXTRA_API_KEYS:-}"
MGMT_KEY_VALUE="${MANAGEMENT_PASSWORD:-$API_KEY_VALUE}"
PROXY_URL_VALUE="${PROXY_URL:-direct}"
PROXY_URL_WAS_SET="${PROXY_URL+x}"
USAGE_SNAPSHOT_PATH="${USAGE_SNAPSHOT_PATH:-${AUTH_BASE}/usage-state/usage-export.json}"
USAGE_AUTOSAVE_INTERVAL="${USAGE_AUTOSAVE_INTERVAL:-300}"
OBJECTSTORE_SYNC_INTERVAL="${OBJECTSTORE_SYNC_INTERVAL:-60}"
CLEANUP_INTERVAL="${CLEANUP_INTERVAL:-60}"
CLEANUP_ENABLED="${CLEANUP_ENABLED:-1}"
USAGE_SNAPSHOT_URL="${USAGE_SNAPSHOT_URL:-}"
USAGE_SNAPSHOT_READ_URL="${USAGE_SNAPSHOT_READ_URL:-$USAGE_SNAPSHOT_URL}"
USAGE_SNAPSHOT_WRITE_URL="${USAGE_SNAPSHOT_WRITE_URL:-$USAGE_SNAPSHOT_URL}"
USAGE_SNAPSHOT_WRITE_METHOD="${USAGE_SNAPSHOT_WRITE_METHOD:-PUT}"
USAGE_SNAPSHOT_AUTHORIZATION="${USAGE_SNAPSHOT_AUTHORIZATION:-}"
USAGE_R2_ACCOUNT_ID="${USAGE_R2_ACCOUNT_ID:-${OPENAI_AUTH_R2_ACCOUNT_ID:-}}"
USAGE_R2_BUCKET="${USAGE_R2_BUCKET:-${OPENAI_AUTH_R2_BUCKET:-}}"
USAGE_R2_KEY="${USAGE_R2_KEY:-usage-state/daili/usage-export.json}"
USAGE_R2_ENDPOINT="${USAGE_R2_ENDPOINT:-}"
R2_ACCESS_KEY_VALUE="${R2_ACCESS_KEY_ID:-}"
R2_SECRET_KEY_VALUE="${R2_SECRET_ACCESS_KEY:-}"
MC_CONFIG_DIR="${MC_CONFIG_DIR:-${AUTH_BASE}/.mc}"
if [ "${USAGE_HF_TOKEN+x}" = "x" ]; then
USAGE_HF_TOKEN="${USAGE_HF_TOKEN-}"
elif [ -n "$USAGE_R2_ACCOUNT_ID" ] && [ -n "$USAGE_R2_BUCKET" ] && [ -n "$R2_ACCESS_KEY_VALUE" ] && [ -n "$R2_SECRET_KEY_VALUE" ]; then
# Prefer the existing R2 snapshot backend and avoid implicitly reusing HF_TOKEN.
USAGE_HF_TOKEN=""
else
USAGE_HF_TOKEN="${HF_TOKEN:-}"
fi
USAGE_HF_REPO_ID="${USAGE_HF_REPO_ID:-pjpjq/daili-usage-state}"
USAGE_HF_PATH="${USAGE_HF_PATH:-usage-export.json}"
USAGE_HF_ROTATE_INTERVAL="${USAGE_HF_ROTATE_INTERVAL:-3600}"
USAGE_HF_ROTATE_KEEP="${USAGE_HF_ROTATE_KEEP:-24}"
MODEL_PRICES_SOURCE_URL="${MODEL_PRICES_SOURCE_URL:-https://zhanzhong.zeabur.app/api/pricing}"
MODEL_PRICES_OUTPUT_PATH="${MODEL_PRICES_OUTPUT_PATH:-${AUTH_BASE}/usage-state/model-prices.json}"
MODEL_PRICES_FETCH_TIMEOUT="${MODEL_PRICES_FETCH_TIMEOUT:-20}"
MANAGEMENT_HTML_PATH="${MANAGEMENT_HTML_PATH:-${AUTH_BASE}/static/management.html}"
CONFIG_PATH="/opt/daili/config.yaml"
GATEWAY_CONFIG_PATH="$CONFIG_PATH"
GATEWAY_BIN="/usr/local/bin/daili-gateway"
GATEWAY_PID=""
AUTOSAVE_PID=""
OBJECTSTORE_SYNC_PID=""
R2_ALIAS_NAME="daili-r2"
OBJECTSTORE_ENDPOINT_VALUE="${OBJECTSTORE_ENDPOINT:-}"
OBJECTSTORE_ACCESS_KEY_VALUE="${OBJECTSTORE_ACCESS_KEY:-}"
OBJECTSTORE_SECRET_KEY_VALUE="${OBJECTSTORE_SECRET_KEY:-}"
OBJECTSTORE_BUCKET_VALUE="${OBJECTSTORE_BUCKET:-}"
OBJECTSTORE_LOCAL_BASE="${OBJECTSTORE_LOCAL_PATH:-$AUTH_BASE}"
OBJECTSTORE_MIRROR_ROOT="${OBJECTSTORE_LOCAL_BASE}/objectstore"
OBJECTSTORE_CONFIG_PATH="${OBJECTSTORE_MIRROR_ROOT}/config/config.yaml"
OBJECTSTORE_AUTH_DIR="${OBJECTSTORE_MIRROR_ROOT}/auths"
OBJECTSTORE_ALIAS_NAME="daili-objectstore"
CLEANUP_PID=""
UPSTREAM_BRIDGE_PID=""
UPSTREAM_SOCKS_HOST="${UPSTREAM_SOCKS_HOST:-127.0.0.1}"
UPSTREAM_SOCKS_PORT="${UPSTREAM_SOCKS_PORT:-1080}"
UPSTREAM_SINGBOX_CONFIG_PATH="${UPSTREAM_SINGBOX_CONFIG_PATH:-${AUTH_BASE}/upstream-proxy/sing-box.json}"
SINGBOX_BIN="/usr/local/bin/sing-box"
yaml_escape() {
printf '%s' "$1" | sed 's/\\/\\\\/g; s/"/\\"/g'
}
render_api_keys_yaml() {
API_KEY_VALUE_ENV="$API_KEY_VALUE" EXTRA_API_KEYS_ENV="$EXTRA_API_KEYS_VALUE" /usr/local/bin/python3 - <<'PY'
import os
keys = []
seen = set()
for chunk in (os.environ.get("API_KEY_VALUE_ENV", ""), os.environ.get("EXTRA_API_KEYS_ENV", "")):
for part in chunk.replace(",", "\n").splitlines():
key = part.strip()
if not key or key in seen:
continue
seen.add(key)
keys.append(key)
def esc(text: str) -> str:
return text.replace("\\", "\\\\").replace('"', '\\"')
for key in keys:
print(f' - "{esc(key)}"')
PY
}
active_auth_dir() {
if [ "$GATEWAY_CONFIG_PATH" = "$OBJECTSTORE_CONFIG_PATH" ]; then
printf '%s' "$OBJECTSTORE_AUTH_DIR"
return 0
fi
printf '%s' "$AUTH_DIR"
}
write_config_to_path() {
target_path="$1"
target_auth_dir="$2"
mkdir -p "$target_auth_dir"
mkdir -p "$(dirname "$target_path")"
API_KEYS_YAML=$(render_api_keys_yaml)
MGMT_KEY_ESC=$(yaml_escape "$MGMT_KEY_VALUE")
HOST_ESC=$(yaml_escape "$APP_HOST")
PROXY_URL_ESC=$(yaml_escape "$PROXY_URL_VALUE")
cat >"$target_path" <<EOF
host: "$HOST_ESC"
port: $APP_PORT
auth-dir: "$target_auth_dir"
api-keys:
$API_KEYS_YAML
remote-management:
allow-remote: true
secret-key: "$MGMT_KEY_ESC"
usage-statistics-enabled: true
proxy-url: "$PROXY_URL_ESC"
EOF
}
write_config() {
write_config_to_path "$CONFIG_PATH" "$(active_auth_dir)"
}
rewrite_active_gateway_config() {
[ "$GATEWAY_CONFIG_PATH" = "$CONFIG_PATH" ] || return 0
write_config_to_path "$GATEWAY_CONFIG_PATH" "$(active_auth_dir)"
}
has_ss_upstream_secret() {
[ -n "${SS_UPSTREAM_URL:-}" ] || [ -n "${SS_URL:-}" ] || [ -n "${SS_UPSTREAM_URL_B64:-}" ] || [ -n "${SS_URL_B64:-}" ] || [ -n "${SS_UPSTREAM_URL_FILE:-}" ]
}
wait_for_upstream_bridge() {
env UPSTREAM_SOCKS_HOST="$UPSTREAM_SOCKS_HOST" UPSTREAM_SOCKS_PORT="$UPSTREAM_SOCKS_PORT" \
/usr/local/bin/python3 - <<'PY'
import os
import socket
import sys
import time
host = os.environ["UPSTREAM_SOCKS_HOST"]
port = int(os.environ["UPSTREAM_SOCKS_PORT"])
deadline = time.time() + 20
while time.time() < deadline:
sock = socket.socket()
sock.settimeout(1)
try:
sock.connect((host, port))
except OSError:
time.sleep(0.25)
else:
sock.close()
sys.exit(0)
finally:
try:
sock.close()
except OSError:
pass
sys.exit(1)
PY
}
start_upstream_bridge() {
has_ss_upstream_secret || return 0
[ -x "$SINGBOX_BIN" ] || {
echo "sing-box binary not found at $SINGBOX_BIN" >&2
return 1
}
echo "starting Shadowsocks upstream bridge on ${UPSTREAM_SOCKS_HOST}:${UPSTREAM_SOCKS_PORT}" >&2
env \
UPSTREAM_SOCKS_HOST="$UPSTREAM_SOCKS_HOST" \
UPSTREAM_SOCKS_PORT="$UPSTREAM_SOCKS_PORT" \
UPSTREAM_SINGBOX_CONFIG_PATH="$UPSTREAM_SINGBOX_CONFIG_PATH" \
/usr/local/bin/python3 /opt/daili/ss_bridge.py write-config
"$SINGBOX_BIN" check -c "$UPSTREAM_SINGBOX_CONFIG_PATH" >/dev/null
"$SINGBOX_BIN" run -c "$UPSTREAM_SINGBOX_CONFIG_PATH" &
UPSTREAM_BRIDGE_PID="$!"
if ! wait_for_upstream_bridge; then
echo "Shadowsocks upstream bridge failed to become ready" >&2
return 1
fi
if [ "$PROXY_URL_WAS_SET" != "x" ] || [ "$PROXY_URL_VALUE" = "direct" ]; then
PROXY_URL_VALUE="socks5h://${UPSTREAM_SOCKS_HOST}:${UPSTREAM_SOCKS_PORT}"
fi
}
wait_for_management() {
i=0
while [ "$i" -lt 60 ]; do
code=$(curl -s -o /dev/null -w '%{http_code}' -H "X-Management-Key: $MGMT_KEY_VALUE" "http://127.0.0.1:${APP_PORT}/v0/management/usage" || true)
if [ "$code" = "200" ]; then
return 0
fi
i=$((i + 1))
sleep 1
done
return 1
}
snapshot_mode() {
if [ -n "$USAGE_SNAPSHOT_READ_URL" ] || [ -n "$USAGE_SNAPSHOT_WRITE_URL" ]; then
printf '%s' "url"
return 0
fi
if [ -n "$USAGE_R2_ACCOUNT_ID" ] && [ -n "$USAGE_R2_BUCKET" ] && [ -n "$R2_ACCESS_KEY_VALUE" ] && [ -n "$R2_SECRET_KEY_VALUE" ]; then
printf '%s' "r2"
return 0
fi
if [ -n "$USAGE_HF_TOKEN" ] && [ -n "$USAGE_HF_REPO_ID" ]; then
printf '%s' "hf"
return 0
fi
printf '%s' "local"
}
objectstore_enabled() {
if [ -n "$OBJECTSTORE_ENDPOINT_VALUE" ] && [ -n "$OBJECTSTORE_BUCKET_VALUE" ] && [ -n "$OBJECTSTORE_ACCESS_KEY_VALUE" ] && [ -n "$OBJECTSTORE_SECRET_KEY_VALUE" ]; then
printf '%s' "1"
return 0
fi
printf '%s' "0"
}
ensure_objectstore_config() {
[ "$(objectstore_enabled)" = "1" ] || return 1
mkdir -p "$MC_CONFIG_DIR"
OBJECTSTORE_ALIAS="$OBJECTSTORE_ALIAS_NAME" \
OBJECTSTORE_ENDPOINT="$OBJECTSTORE_ENDPOINT_VALUE" \
OBJECTSTORE_ACCESS_KEY="$OBJECTSTORE_ACCESS_KEY_VALUE" \
OBJECTSTORE_SECRET_KEY="$OBJECTSTORE_SECRET_KEY_VALUE" \
OBJECTSTORE_BUCKET="$OBJECTSTORE_BUCKET_VALUE" \
OBJECTSTORE_ROOT="$OBJECTSTORE_MIRROR_ROOT" \
OBJECTSTORE_CONFIG_FALLBACK="$CONFIG_PATH" \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
MANAGEMENT_PASSWORD="$MGMT_KEY_VALUE" \
PORT="$APP_PORT" \
/usr/local/bin/python3 /opt/daili/objectstore_sync.py "$1" >/dev/null 2>&1
}
restore_objectstore_state() {
[ "$(objectstore_enabled)" = "1" ] || return 1
ensure_objectstore_config restore || return 1
GATEWAY_CONFIG_PATH="$OBJECTSTORE_CONFIG_PATH"
}
sync_objectstore_state() {
[ "$(objectstore_enabled)" = "1" ] || return 0
set +e
ensure_objectstore_config sync
sync_status="$?"
set -e
if [ "$sync_status" -ne 0 ]; then
echo "objectstore sync failed" >&2
fi
}
ensure_r2_config() {
[ "$(snapshot_mode)" = "r2" ] || return 1
mkdir -p "$MC_CONFIG_DIR"
endpoint="$USAGE_R2_ENDPOINT"
if [ -z "$endpoint" ]; then
endpoint="https://${USAGE_R2_ACCOUNT_ID}.r2.cloudflarestorage.com"
fi
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
mc alias set "$R2_ALIAS_NAME" "$endpoint" "$R2_ACCESS_KEY_VALUE" "$R2_SECRET_KEY_VALUE" --api S3v4 --path on >/dev/null 2>&1
}
download_r2_snapshot() {
[ "$(snapshot_mode)" = "r2" ] || return 0
ensure_r2_config || return 0
target="${R2_ALIAS_NAME}/${USAGE_R2_BUCKET}/${USAGE_R2_KEY}"
tmp_path="${USAGE_SNAPSHOT_PATH}.download"
mkdir -p "$(dirname "$USAGE_SNAPSHOT_PATH")"
set +e
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
mc stat "$target" >/dev/null 2>&1
exists="$?"
if [ "$exists" -eq 0 ]; then
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
mc cp "$target" "$tmp_path" >/dev/null 2>&1
status="$?"
else
status=0
fi
set -e
if [ "$exists" -eq 0 ] && [ "$status" -eq 0 ]; then
mv "$tmp_path" "$USAGE_SNAPSHOT_PATH"
else
rm -f "$tmp_path"
fi
}
upload_r2_snapshot() {
[ "$(snapshot_mode)" = "r2" ] || return 0
[ -s "$USAGE_SNAPSHOT_PATH" ] || return 0
ensure_r2_config || return 0
target="${R2_ALIAS_NAME}/${USAGE_R2_BUCKET}/${USAGE_R2_KEY}"
set +e
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
mc cp "$USAGE_SNAPSHOT_PATH" "$target" >/dev/null 2>&1
status="$?"
set -e
if [ "$status" -ne 0 ]; then
echo "usage snapshot upload failed to r2://${USAGE_R2_BUCKET}/${USAGE_R2_KEY}" >&2
fi
}
download_hf_snapshot() {
[ "$(snapshot_mode)" = "hf" ] || return 0
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
USAGE_HF_TOKEN="$USAGE_HF_TOKEN" \
USAGE_HF_REPO_ID="$USAGE_HF_REPO_ID" \
USAGE_HF_PATH="$USAGE_HF_PATH" \
USAGE_SNAPSHOT_PATH="$USAGE_SNAPSHOT_PATH" \
/usr/local/bin/python3 /opt/daili/hf_snapshot.py download >/dev/null 2>&1 || true
}
upload_hf_snapshot() {
force_upload="${1:-0}"
[ "$(snapshot_mode)" = "hf" ] || return 0
[ -s "$USAGE_SNAPSHOT_PATH" ] || return 0
env -u HTTP_PROXY -u HTTPS_PROXY -u ALL_PROXY -u http_proxy -u https_proxy -u all_proxy \
USAGE_HF_TOKEN="$USAGE_HF_TOKEN" \
USAGE_HF_REPO_ID="$USAGE_HF_REPO_ID" \
USAGE_HF_PATH="$USAGE_HF_PATH" \
USAGE_HF_ROTATE_INTERVAL="$USAGE_HF_ROTATE_INTERVAL" \
USAGE_HF_ROTATE_KEEP="$USAGE_HF_ROTATE_KEEP" \
USAGE_HF_FORCE_UPLOAD="$force_upload" \
USAGE_SNAPSHOT_PATH="$USAGE_SNAPSHOT_PATH" \
/usr/local/bin/python3 /opt/daili/hf_snapshot.py upload >/dev/null 2>&1 || true
}
download_remote_snapshot() {
[ "$(snapshot_mode)" = "url" ] || return 0
[ -n "$USAGE_SNAPSHOT_READ_URL" ] || return 0
tmp_path="${USAGE_SNAPSHOT_PATH}.download"
mkdir -p "$(dirname "$USAGE_SNAPSHOT_PATH")"
set +e
if [ -n "$USAGE_SNAPSHOT_AUTHORIZATION" ]; then
code=$(curl --noproxy '*' -sS -L -o "$tmp_path" -w '%{http_code}' \
-H "Authorization: $USAGE_SNAPSHOT_AUTHORIZATION" \
"$USAGE_SNAPSHOT_READ_URL")
else
code=$(curl --noproxy '*' -sS -L -o "$tmp_path" -w '%{http_code}' "$USAGE_SNAPSHOT_READ_URL")
fi
status="$?"
set -e
if [ "$status" -ne 0 ]; then
rm -f "$tmp_path"
echo "usage snapshot download failed from ${USAGE_SNAPSHOT_READ_URL}" >&2
return 0
fi
if [ "$code" = "200" ]; then
mv "$tmp_path" "$USAGE_SNAPSHOT_PATH"
return 0
fi
rm -f "$tmp_path"
if [ "$code" != "404" ]; then
echo "usage snapshot download returned HTTP ${code} from ${USAGE_SNAPSHOT_READ_URL}" >&2
fi
}
upload_remote_snapshot() {
[ "$(snapshot_mode)" = "url" ] || return 0
[ -n "$USAGE_SNAPSHOT_WRITE_URL" ] || return 0
[ -s "$USAGE_SNAPSHOT_PATH" ] || return 0
set +e
if [ -n "$USAGE_SNAPSHOT_AUTHORIZATION" ]; then
code=$(curl --noproxy '*' -sS -o /dev/null -w '%{http_code}' -X "$USAGE_SNAPSHOT_WRITE_METHOD" \
-H 'Content-Type: application/json' \
-H "Authorization: $USAGE_SNAPSHOT_AUTHORIZATION" \
--data-binary "@${USAGE_SNAPSHOT_PATH}" \
"$USAGE_SNAPSHOT_WRITE_URL")
else
code=$(curl --noproxy '*' -sS -o /dev/null -w '%{http_code}' -X "$USAGE_SNAPSHOT_WRITE_METHOD" \
-H 'Content-Type: application/json' \
--data-binary "@${USAGE_SNAPSHOT_PATH}" \
"$USAGE_SNAPSHOT_WRITE_URL")
fi
status="$?"
set -e
if [ "$status" -ne 0 ] || [ "$code" -lt 200 ] || [ "$code" -ge 300 ]; then
echo "usage snapshot upload failed (status=${status}, http=${code}) to ${USAGE_SNAPSHOT_WRITE_URL}" >&2
fi
}
backup_usage() {
force_upload="0"
if [ "${1:-}" = "force" ]; then
force_upload="1"
fi
[ -n "$GATEWAY_PID" ] || return 0
kill -0 "$GATEWAY_PID" 2>/dev/null || return 0
wait_for_management || return 0
tmp_path="${USAGE_SNAPSHOT_PATH}.tmp"
mkdir -p "$(dirname "$USAGE_SNAPSHOT_PATH")"
if curl -fsS -H "X-Management-Key: $MGMT_KEY_VALUE" "http://127.0.0.1:${APP_PORT}/v0/management/usage/export" >"$tmp_path"; then
mv "$tmp_path" "$USAGE_SNAPSHOT_PATH"
upload_r2_snapshot
upload_hf_snapshot "$force_upload"
upload_remote_snapshot
else
rm -f "$tmp_path"
fi
}
refresh_model_prices() {
env \
MODEL_PRICES_SOURCE_URL="$MODEL_PRICES_SOURCE_URL" \
MODEL_PRICES_OUTPUT_PATH="$MODEL_PRICES_OUTPUT_PATH" \
MODEL_PRICES_FETCH_TIMEOUT="$MODEL_PRICES_FETCH_TIMEOUT" \
MANAGEMENT_HTML_PATH="$MANAGEMENT_HTML_PATH" \
WRITABLE_PATH="$AUTH_BASE" \
/usr/local/bin/python3 /opt/daili/model_price_bootstrap.py >/dev/null 2>&1 || true
}
restore_usage() {
download_r2_snapshot
download_hf_snapshot
download_remote_snapshot
[ -s "$USAGE_SNAPSHOT_PATH" ] || return 0
wait_for_management || return 0
curl -fsS -X POST \
-H "X-Management-Key: $MGMT_KEY_VALUE" \
-H "Content-Type: application/json" \
--data-binary "@${USAGE_SNAPSHOT_PATH}" \
"http://127.0.0.1:${APP_PORT}/v0/management/usage/import" >/dev/null
}
start_autosave_loop() {
case "$USAGE_AUTOSAVE_INTERVAL" in
''|0) return 0 ;;
esac
(
while true; do
sleep "$USAGE_AUTOSAVE_INTERVAL"
refresh_model_prices
backup_usage
done
) &
AUTOSAVE_PID="$!"
}
start_objectstore_sync_loop() {
[ "$(objectstore_enabled)" = "1" ] || return 0
case "$OBJECTSTORE_SYNC_INTERVAL" in
''|0) return 0 ;;
esac
(
while true; do
sleep "$OBJECTSTORE_SYNC_INTERVAL"
sync_objectstore_state
done
) &
OBJECTSTORE_SYNC_PID="$!"
}
run_cleanup_invalid_auths() {
[ "$CLEANUP_ENABLED" = "1" ] || return 0
[ -n "$GATEWAY_PID" ] || return 0
kill -0 "$GATEWAY_PID" 2>/dev/null || return 0
wait_for_management || return 0
MANAGEMENT_PASSWORD="$MGMT_KEY_VALUE" PORT="$APP_PORT" \
OBJECTSTORE_ENDPOINT="$OBJECTSTORE_ENDPOINT_VALUE" \
OBJECTSTORE_BUCKET="$OBJECTSTORE_BUCKET_VALUE" \
OBJECTSTORE_ACCESS_KEY="$OBJECTSTORE_ACCESS_KEY_VALUE" \
OBJECTSTORE_SECRET_KEY="$OBJECTSTORE_SECRET_KEY_VALUE" \
OBJECTSTORE_LOCAL_PATH="$OBJECTSTORE_LOCAL_BASE" \
MC_CONFIG_DIR="$MC_CONFIG_DIR" \
/usr/local/bin/python3 /opt/daili/cleanup_invalid_auths.py 2>&1 || true
}
start_cleanup_loop() {
[ "$CLEANUP_ENABLED" = "1" ] || return 0
case "$CLEANUP_INTERVAL" in
''|0) return 0 ;;
esac
(
while true; do
sleep "$CLEANUP_INTERVAL"
run_cleanup_invalid_auths
done
) &
CLEANUP_PID="$!"
}
stop_background_jobs() {
if [ -n "$AUTOSAVE_PID" ] && kill -0 "$AUTOSAVE_PID" 2>/dev/null; then
kill "$AUTOSAVE_PID" 2>/dev/null || true
wait "$AUTOSAVE_PID" 2>/dev/null || true
fi
if [ -n "$OBJECTSTORE_SYNC_PID" ] && kill -0 "$OBJECTSTORE_SYNC_PID" 2>/dev/null; then
kill "$OBJECTSTORE_SYNC_PID" 2>/dev/null || true
wait "$OBJECTSTORE_SYNC_PID" 2>/dev/null || true
fi
if [ -n "$CLEANUP_PID" ] && kill -0 "$CLEANUP_PID" 2>/dev/null; then
kill "$CLEANUP_PID" 2>/dev/null || true
wait "$CLEANUP_PID" 2>/dev/null || true
fi
if [ -n "$UPSTREAM_BRIDGE_PID" ] && kill -0 "$UPSTREAM_BRIDGE_PID" 2>/dev/null; then
kill "$UPSTREAM_BRIDGE_PID" 2>/dev/null || true
wait "$UPSTREAM_BRIDGE_PID" 2>/dev/null || true
fi
}
shutdown() {
stop_background_jobs
backup_usage force
sync_objectstore_state
if [ -n "$GATEWAY_PID" ] && kill -0 "$GATEWAY_PID" 2>/dev/null; then
kill "$GATEWAY_PID" 2>/dev/null || true
wait "$GATEWAY_PID" 2>/dev/null || true
fi
}
trap 'shutdown; exit 0' INT TERM
start_upstream_bridge
write_config
restore_objectstore_state || true
rewrite_active_gateway_config
# 清除代理(强制 OAuth 直连)及 R2 auth 变量,避免 Go 二进制自己触发 object store bootstrap。
unset HTTP_PROXY HTTPS_PROXY ALL_PROXY http_proxy https_proxy all_proxy
unset OPENAI_AUTH_R2_ACCOUNT_ID OPENAI_AUTH_R2_BUCKET OPENAI_AUTH_R2_PREFIX R2_ACCESS_KEY_ID R2_SECRET_ACCESS_KEY
env -u OBJECTSTORE_ENDPOINT -u OBJECTSTORE_ACCESS_KEY -u OBJECTSTORE_SECRET_KEY -u OBJECTSTORE_BUCKET -u OBJECTSTORE_LOCAL_PATH \
-u objectstore_endpoint -u objectstore_access_key -u objectstore_secret_key -u objectstore_bucket -u objectstore_local_path \
"$GATEWAY_BIN" -config "$GATEWAY_CONFIG_PATH" &
GATEWAY_PID="$!"
restore_usage || true
refresh_model_prices
start_autosave_loop
start_objectstore_sync_loop
start_cleanup_loop
wait "$GATEWAY_PID"
status="$?"
stop_background_jobs
refresh_model_prices
backup_usage force
sync_objectstore_state
exit "$status"