ford442 commited on
Commit
f4cc550
·
verified ·
1 Parent(s): 81511a9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +90 -17
app.py CHANGED
@@ -104,18 +104,18 @@ async def list_gcs_folder(folder: str = Query(..., description="Folder name, e.g
104
  # This handles cases where user types "song" but folder is "songs/"
105
  config = STORAGE_MAP.get(folder)
106
  prefix = config["folder"] if config else f"{folder}/"
107
-
108
  try:
109
  # 2. Run GCS List Blobs in a thread (to keep server fast)
110
  def _fetch_blobs():
111
  # 'delimiter' makes it behave like a folder (doesn't show sub-sub-files)
112
  blobs = bucket.list_blobs(prefix=prefix, delimiter="/")
113
-
114
  file_list = []
115
  for blob in blobs:
116
  # Remove the folder prefix (e.g. "songs/beat1.json" -> "beat1.json")
117
  name = blob.name.replace(prefix, "")
118
- if name and name != "":
119
  file_list.append({
120
  "filename": name,
121
  "size": blob.size,
@@ -129,7 +129,7 @@ async def list_gcs_folder(folder: str = Query(..., description="Folder name, e.g
129
 
130
  except Exception as e:
131
  raise HTTPException(status_code=500, detail=str(e))
132
-
133
  # --- MODELS ---
134
  class ItemPayload(BaseModel):
135
  name: str
@@ -159,7 +159,7 @@ def _write_json_sync(blob_path, data):
159
  blob = bucket.blob(blob_path)
160
  # Upload as JSON string with correct content type
161
  blob.upload_from_string(
162
- json.dumps(data),
163
  content_type='application/json'
164
  )
165
 
@@ -210,9 +210,9 @@ async def upload_item(payload: ItemPayload):
210
  "date": date_str,
211
  "type": item_type,
212
  "description": payload.description,
213
- "filename": filename
214
  }
215
-
216
  # Add meta to the actual data file too
217
  payload.data["_cloud_meta"] = meta
218
 
@@ -235,6 +235,79 @@ async def upload_item(payload: ItemPayload):
235
  except Exception as e:
236
  raise HTTPException(500, f"Upload failed: {str(e)}")
237
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
  # --- 3. FETCH JSON ITEM ---
239
  @app.get("/api/songs/{item_id}")
240
  async def get_item(item_id: str, type: Optional[str] = Query(None)):
@@ -244,11 +317,11 @@ async def get_item(item_id: str, type: Optional[str] = Query(None)):
244
  for t in search_types:
245
  config = STORAGE_MAP.get(t)
246
  filepath = f"{config['folder']}{item_id}.json"
247
-
248
  # Check existence efficiently
249
  blob = bucket.blob(filepath)
250
  exists = await run_io(blob.exists)
251
-
252
  if exists:
253
  data = await run_io(blob.download_as_text)
254
  return json.loads(data)
@@ -279,7 +352,7 @@ async def upload_sample(file: UploadFile = File(...), author: str = Form(...), d
279
  try:
280
  # 1. Stream Upload to GCS
281
  blob = bucket.blob(full_path)
282
-
283
  # GCS Python client doesn't support async streaming upload easily out of the box,
284
  # but upload_from_file is efficient.
285
  # We wrap the spooled temp file from FastAPI
@@ -293,7 +366,7 @@ async def upload_sample(file: UploadFile = File(...), author: str = Form(...), d
293
 
294
  await run_io(_update_idx)
295
  await cache.delete("library:sample")
296
-
297
  return {"success": True, "id": sample_id}
298
  except Exception as e:
299
  raise HTTPException(500, str(e))
@@ -301,11 +374,11 @@ async def upload_sample(file: UploadFile = File(...), author: str = Form(...), d
301
  @app.get("/api/samples/{sample_id}")
302
  async def get_sample(sample_id: str):
303
  config = STORAGE_MAP["sample"]
304
-
305
  # 1. Lookup in Index (to get original filename/extension)
306
  idx = await run_io(_read_json_sync, config["index"])
307
  entry = next((i for i in idx if i["id"] == sample_id), None)
308
-
309
  if not entry:
310
  raise HTTPException(404, "Sample not found in index")
311
 
@@ -336,19 +409,19 @@ async def sync_gcs_storage():
336
  Scans Google Cloud Storage to rebuild JSON indexes based on actual files.
337
  """
338
  report = {}
339
-
340
  async with INDEX_LOCK:
341
  for item_type, config in STORAGE_MAP.items():
342
  if item_type == "default": continue
343
 
344
  added = 0
345
  removed = 0
346
-
347
  try:
348
  # 1. List ALL objects in this folder prefix
349
  # prefix="songs/" returns "songs/123.json", "songs/456.json", etc.
350
  blobs = await run_io(lambda: list(bucket.list_blobs(prefix=config["folder"])))
351
-
352
  # Filter out the index file itself
353
  actual_files = []
354
  for b in blobs:
@@ -359,7 +432,7 @@ async def sync_gcs_storage():
359
 
360
  # 2. Get Current Index
361
  index_data = await run_io(_read_json_sync, config["index"])
362
-
363
  # 3. Compare
364
  index_map = {item["filename"]: item for item in index_data}
365
  disk_set = set(actual_files)
 
104
  # This handles cases where user types "song" but folder is "songs/"
105
  config = STORAGE_MAP.get(folder)
106
  prefix = config["folder"] if config else f"{folder}/"
107
+
108
  try:
109
  # 2. Run GCS List Blobs in a thread (to keep server fast)
110
  def _fetch_blobs():
111
  # 'delimiter' makes it behave like a folder (doesn't show sub-sub-files)
112
  blobs = bucket.list_blobs(prefix=prefix, delimiter="/")
113
+
114
  file_list = []
115
  for blob in blobs:
116
  # Remove the folder prefix (e.g. "songs/beat1.json" -> "beat1.json")
117
  name = blob.name.replace(prefix, "")
118
+ if name and name != "":
119
  file_list.append({
120
  "filename": name,
121
  "size": blob.size,
 
129
 
130
  except Exception as e:
131
  raise HTTPException(status_code=500, detail=str(e))
132
+
133
  # --- MODELS ---
134
  class ItemPayload(BaseModel):
135
  name: str
 
159
  blob = bucket.blob(blob_path)
160
  # Upload as JSON string with correct content type
161
  blob.upload_from_string(
162
+ json.dumps(data),
163
  content_type='application/json'
164
  )
165
 
 
210
  "date": date_str,
211
  "type": item_type,
212
  "description": payload.description,
213
+ "filename": filename
214
  }
215
+
216
  # Add meta to the actual data file too
217
  payload.data["_cloud_meta"] = meta
218
 
 
235
  except Exception as e:
236
  raise HTTPException(500, f"Upload failed: {str(e)}")
237
 
238
+ # --- 2.5 UPDATE JSON (PUT) ---
239
+ @app.put("/api/songs/{item_id}")
240
+ async def update_item(item_id: str, payload: ItemPayload):
241
+ # Verify type configuration
242
+ item_type = payload.type if payload.type in STORAGE_MAP else "song"
243
+ config = STORAGE_MAP[item_type]
244
+
245
+ # Assume filename is {id}.json (standard convention for this app)
246
+ filename = f"{item_id}.json"
247
+ full_path = f"{config['folder']}{filename}"
248
+
249
+ # We preserve the original ID but update other metadata
250
+ # Note: 'date' field in metadata is typically 'created_at'.
251
+ # We could add an 'updated_at' field if needed, but for now we keep 'date' as is or update it?
252
+ # Usually you don't change the creation date.
253
+ # We will fetch the original creation date if possible, or just use current if not found.
254
+ # However, to avoid complexity, we'll just update the metadata entry in the index.
255
+
256
+ date_str = datetime.now().strftime("%Y-%m-%d") # Use current date as 'last updated' effectively?
257
+ # Or should we try to preserve original date?
258
+ # Let's try to preserve it by reading the index first.
259
+
260
+ new_meta = {
261
+ "id": item_id,
262
+ "name": payload.name,
263
+ "author": payload.author,
264
+ "date": date_str, # Defaulting to now if not found
265
+ "type": item_type,
266
+ "description": payload.description,
267
+ "filename": filename
268
+ }
269
+
270
+ # Add meta to data
271
+ payload.data["_cloud_meta"] = new_meta
272
+
273
+ async with INDEX_LOCK:
274
+ try:
275
+ # 1. Check if file exists (optional, but good for 404)
276
+ # blob = bucket.blob(full_path)
277
+ # if not await run_io(blob.exists):
278
+ # raise HTTPException(404, "Item not found")
279
+
280
+ # 2. Write the Data File (Overwrite)
281
+ await run_io(_write_json_sync, full_path, payload.data)
282
+
283
+ # 3. Update the Index
284
+ def _update_index_logic():
285
+ current = _read_json_sync(config["index"])
286
+ if not isinstance(current, list): current = []
287
+
288
+ # Find and remove existing entry for this ID
289
+ # Also, capture the original date if possible to preserve "Created Date" behavior
290
+ # But user might want "Updated Date". Let's stick to updating it to "now" so it bubbles to top.
291
+ existing_index = next((i for i, item in enumerate(current) if item.get("id") == item_id), -1)
292
+
293
+ if existing_index != -1:
294
+ # Preserve original creation date if desired, but user wants 'latest' usually.
295
+ # Let's keep it simple: Remove old, insert new at top (Fresh Update)
296
+ current.pop(existing_index)
297
+
298
+ current.insert(0, new_meta)
299
+ _write_json_sync(config["index"], current)
300
+
301
+ await run_io(_update_index_logic)
302
+
303
+ # Clear cache
304
+ await cache.clear()
305
+ return {"success": True, "id": item_id, "action": "updated"}
306
+
307
+ except Exception as e:
308
+ raise HTTPException(500, f"Update failed: {str(e)}")
309
+
310
+
311
  # --- 3. FETCH JSON ITEM ---
312
  @app.get("/api/songs/{item_id}")
313
  async def get_item(item_id: str, type: Optional[str] = Query(None)):
 
317
  for t in search_types:
318
  config = STORAGE_MAP.get(t)
319
  filepath = f"{config['folder']}{item_id}.json"
320
+
321
  # Check existence efficiently
322
  blob = bucket.blob(filepath)
323
  exists = await run_io(blob.exists)
324
+
325
  if exists:
326
  data = await run_io(blob.download_as_text)
327
  return json.loads(data)
 
352
  try:
353
  # 1. Stream Upload to GCS
354
  blob = bucket.blob(full_path)
355
+
356
  # GCS Python client doesn't support async streaming upload easily out of the box,
357
  # but upload_from_file is efficient.
358
  # We wrap the spooled temp file from FastAPI
 
366
 
367
  await run_io(_update_idx)
368
  await cache.delete("library:sample")
369
+
370
  return {"success": True, "id": sample_id}
371
  except Exception as e:
372
  raise HTTPException(500, str(e))
 
374
  @app.get("/api/samples/{sample_id}")
375
  async def get_sample(sample_id: str):
376
  config = STORAGE_MAP["sample"]
377
+
378
  # 1. Lookup in Index (to get original filename/extension)
379
  idx = await run_io(_read_json_sync, config["index"])
380
  entry = next((i for i in idx if i["id"] == sample_id), None)
381
+
382
  if not entry:
383
  raise HTTPException(404, "Sample not found in index")
384
 
 
409
  Scans Google Cloud Storage to rebuild JSON indexes based on actual files.
410
  """
411
  report = {}
412
+
413
  async with INDEX_LOCK:
414
  for item_type, config in STORAGE_MAP.items():
415
  if item_type == "default": continue
416
 
417
  added = 0
418
  removed = 0
419
+
420
  try:
421
  # 1. List ALL objects in this folder prefix
422
  # prefix="songs/" returns "songs/123.json", "songs/456.json", etc.
423
  blobs = await run_io(lambda: list(bucket.list_blobs(prefix=config["folder"])))
424
+
425
  # Filter out the index file itself
426
  actual_files = []
427
  for b in blobs:
 
432
 
433
  # 2. Get Current Index
434
  index_data = await run_io(_read_json_sync, config["index"])
435
+
436
  # 3. Compare
437
  index_map = {item["filename"]: item for item in index_data}
438
  disk_set = set(actual_files)