Lenson commited on
Commit
124c1a0
·
1 Parent(s): 581d2f1

feat: add Supabase Storage sync for config persistence

Browse files

- Add download/upload functions using Supabase Storage API
- Sync config/ and data/ directories
- Default bucket: codex-proxy
- Remove datasets config (Docker Spaces doesn't support)

Files changed (2) hide show
  1. .hf/setup.sh +130 -6
  2. hf_space_metadata.yaml +0 -2
.hf/setup.sh CHANGED
@@ -5,21 +5,145 @@ echo "========================================"
5
  echo " Codex Proxy - HF Spaces Setup"
6
  echo "========================================"
7
 
8
- # HF Spaces automatically mounts datasets to /data
9
- # Data persistence is handled by HF infrastructure
 
 
10
 
11
  # Set HF Space environment variables if available
12
  if [ -n "$HF_SPACE_ID" ]; then
13
  echo "Running on Hugging Face Spaces: $HF_SPACE_ID"
14
  fi
15
 
16
- # Ensure directories exist (data mounted by HF if configured)
17
  mkdir -p /app/data
18
  mkdir -p /app/config
19
 
20
- # Change to app directory
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  cd /app
22
 
23
- # Start the server
24
- echo "Starting Codex Proxy..."
25
  exec node dist/index.js
 
5
  echo " Codex Proxy - HF Spaces Setup"
6
  echo "========================================"
7
 
8
+ # Supabase Storage configuration
9
+ SUPABASE_URL="${SUPABASE_URL:-https://mgcfsinockiucyvptluv.supabase.co}"
10
+ SUPABASE_KEY="${SUPABASE_KEY:-}"
11
+ SUPABASE_BUCKET="${SUPABASE_BUCKET:-codex-proxy}"
12
 
13
  # Set HF Space environment variables if available
14
  if [ -n "$HF_SPACE_ID" ]; then
15
  echo "Running on Hugging Face Spaces: $HF_SPACE_ID"
16
  fi
17
 
18
+ # Ensure directories exist
19
  mkdir -p /app/data
20
  mkdir -p /app/config
21
 
22
+ echo "[1/3] Starting Supabase sync..."
23
+
24
+ # ===== Supabase Storage Functions =====
25
+
26
+ download_from_supabase() {
27
+ echo "Downloading from Supabase Storage..."
28
+ if [ -z "$SUPABASE_KEY" ]; then
29
+ echo "SUPABASE_KEY not set, skipping download"
30
+ return 1
31
+ fi
32
+
33
+ local remote_list=$(curl -s -X POST \
34
+ -H "Authorization: Bearer $SUPABASE_KEY" \
35
+ -H "Content-Type: application/json" \
36
+ -d '{"prefix":"","search":"","limit":100,"offsets":[],"sortBy":{"column":"name","order":"asc"},"recursive":true}' \
37
+ "$SUPABASE_URL/storage/v1/object/list/$SUPABASE_BUCKET")
38
+
39
+ if [ -z "$remote_list" ] || echo "$remote_list" | grep -q "error"; then
40
+ echo "No remote data found or error, starting fresh"
41
+ return 1
42
+ fi
43
+
44
+ echo "$remote_list" | python3 -c "
45
+ import json, sys, os, urllib.request
46
+
47
+ data = json.load(sys.stdin)
48
+ supabase_url = '$SUPABASE_URL'
49
+ supabase_key = '$SUPABASE_KEY'
50
+ bucket = '$SUPABASE_BUCKET'
51
+ app_dir = '/app'
52
+
53
+ for item in data:
54
+ if item.get('id'):
55
+ remote_path = item['name']
56
+ # Skip root level files
57
+ if '/' not in remote_path:
58
+ continue
59
+ local_path = os.path.join(app_dir, remote_path)
60
+ os.makedirs(os.path.dirname(local_path), exist_ok=True)
61
+
62
+ url = f'{supabase_url}/storage/v1/object/{bucket}/{remote_path}'
63
+ req = urllib.request.Request(url, headers={'Authorization': f'Bearer {supabase_key}'})
64
+ try:
65
+ with urllib.request.urlopen(req) as resp:
66
+ with open(local_path, 'wb') as f:
67
+ f.write(resp.read())
68
+ print(f'Downloaded: {remote_path}')
69
+ except Exception as e:
70
+ print(f'Failed: {remote_path} - {e}')
71
+ "
72
+ echo "Download complete"
73
+ }
74
+
75
+ upload_to_supabase() {
76
+ if [ -z "$SUPABASE_KEY" ]; then
77
+ return 1
78
+ fi
79
+
80
+ # Upload config directory
81
+ if [ -d "/app/config" ]; then
82
+ find /app/config -type f | while read file; do
83
+ local relative_path="$(echo "$file" | sed 's|/app/||')"
84
+ local size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null)
85
+
86
+ if [ "$size" -gt 5000000 ]; then
87
+ echo "Skipping large file: $file ($size bytes)"
88
+ continue
89
+ fi
90
+
91
+ curl -s -X POST \
92
+ -H "Authorization: Bearer $SUPABASE_KEY" \
93
+ -H "Content-Type: application/octet-stream" \
94
+ --data-binary @"$file" \
95
+ "$SUPABASE_URL/storage/v1/object/$SUPABASE_BUCKET/$relative_path" && \
96
+ echo "Uploaded: $relative_path"
97
+ done
98
+ fi
99
+
100
+ # Upload data directory
101
+ if [ -d "/app/data" ]; then
102
+ find /app/data -type f | while read file; do
103
+ local relative_path="$(echo "$file" | sed 's|/app/||')"
104
+ local size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null)
105
+
106
+ if [ "$size" -gt 5000000 ]; then
107
+ echo "Skipping large file: $file ($size bytes)"
108
+ continue
109
+ fi
110
+
111
+ curl -s -X POST \
112
+ -H "Authorization: Bearer $SUPABASE_KEY" \
113
+ -H "Content-Type: application/octet-stream" \
114
+ --data-binary @"$file" \
115
+ "$SUPABASE_URL/storage/v1/object/$SUPABASE_BUCKET/$relative_path" && \
116
+ echo "Uploaded: $relative_path"
117
+ done
118
+ fi
119
+ }
120
+
121
+ start_supabase_sync() {
122
+ echo "Starting Supabase sync..."
123
+
124
+ if [ -z "$SUPABASE_KEY" ]; then
125
+ echo "SUPABASE_KEY not set. Set it in HF Spaces secrets."
126
+ return 1
127
+ fi
128
+
129
+ download_from_supabase
130
+
131
+ while true; do
132
+ echo "[$(date)] Uploading to Supabase..."
133
+ upload_to_supabase
134
+ echo "Upload complete, waiting 3 minutes..."
135
+ sleep 180
136
+ done &
137
+
138
+ echo "Supabase sync background process started (PID: $!)"
139
+ }
140
+
141
+ start_supabase_sync &
142
+
143
+ echo "[2/3] Downloading config from Supabase..."
144
+ download_from_supabase
145
+
146
+ echo "[3/3] Starting Codex Proxy..."
147
  cd /app
148
 
 
 
149
  exec node dist/index.js
hf_space_metadata.yaml CHANGED
@@ -7,6 +7,4 @@ sdk: docker
7
  app_port: 7860
8
  pinned: false
9
  license: mit
10
- datasets:
11
- - lenson78/codex-proxy-data
12
  ---
 
7
  app_port: 7860
8
  pinned: false
9
  license: mit
 
 
10
  ---