Spaces:
Paused
Paused
hunyuan model
Browse files- main.py +2 -2
- routers/user_models.py +216 -18
- services/hunyuan_service.py +60 -0
- subscription_integration_example.py +0 -193
- utils/pubsub.py +0 -52
- utils/worker.py +0 -96
main.py
CHANGED
|
@@ -230,7 +230,7 @@ async def sync_supabase_user(request: SyncUserRequest, supabase_token: str = Dep
|
|
| 230 |
# Initialize credits for new user
|
| 231 |
supabase.from_("User_Credit_Account").insert({
|
| 232 |
"user_id": supabase_user["id"],
|
| 233 |
-
"num_of_available_gens":
|
| 234 |
}).execute()
|
| 235 |
logging.info("User credits initialized successfully")
|
| 236 |
except Exception as create_error:
|
|
@@ -300,7 +300,7 @@ async def complete_profile(request: CompleteProfileRequest, current_user: User =
|
|
| 300 |
if not credit.data:
|
| 301 |
supabase.from_("User_Credit_Account").insert({
|
| 302 |
"user_id": current_user.id,
|
| 303 |
-
"num_of_available_gens":
|
| 304 |
}).execute()
|
| 305 |
return {"message": "Profile completed and credits initialized."}
|
| 306 |
|
|
|
|
| 230 |
# Initialize credits for new user
|
| 231 |
supabase.from_("User_Credit_Account").insert({
|
| 232 |
"user_id": supabase_user["id"],
|
| 233 |
+
"num_of_available_gens": 0
|
| 234 |
}).execute()
|
| 235 |
logging.info("User credits initialized successfully")
|
| 236 |
except Exception as create_error:
|
|
|
|
| 300 |
if not credit.data:
|
| 301 |
supabase.from_("User_Credit_Account").insert({
|
| 302 |
"user_id": current_user.id,
|
| 303 |
+
"num_of_available_gens": 0
|
| 304 |
}).execute()
|
| 305 |
return {"message": "Profile completed and credits initialized."}
|
| 306 |
|
routers/user_models.py
CHANGED
|
@@ -1,16 +1,13 @@
|
|
| 1 |
-
from fastapi import APIRouter, Depends, HTTPException,
|
| 2 |
from fastapi.responses import JSONResponse
|
| 3 |
from auth import get_current_active_user, User, supabase
|
| 4 |
import logging
|
| 5 |
import httpx
|
| 6 |
import os
|
| 7 |
-
import json
|
| 8 |
from typing import Optional, Dict, Any
|
| 9 |
-
from sse_starlette.sse import EventSourceResponse
|
| 10 |
-
import asyncio
|
| 11 |
from pydantic import BaseModel
|
| 12 |
-
from utils.pubsub import pubsub as inmem_pubsub
|
| 13 |
import base64
|
|
|
|
| 14 |
|
| 15 |
router = APIRouter(
|
| 16 |
prefix="/user/models",
|
|
@@ -18,18 +15,6 @@ router = APIRouter(
|
|
| 18 |
dependencies=[Depends(get_current_active_user)]
|
| 19 |
)
|
| 20 |
|
| 21 |
-
"""
|
| 22 |
-
Endpoints already taken:
|
| 23 |
-
get user/models
|
| 24 |
-
get user/models/{task_id}
|
| 25 |
-
delete user/models/{generated_model_id}
|
| 26 |
-
post user/models/{task_id}/refresh
|
| 27 |
-
post user/models/{generated_model_id}/refresh
|
| 28 |
-
post user/models/refresh/{generated_model_id}
|
| 29 |
-
delete user/models/{generated_model_id}
|
| 30 |
-
"""
|
| 31 |
-
|
| 32 |
-
# Example endpoint - you can add new user/models endpoints here
|
| 33 |
@router.get("/progress_update/{generated_model_id}")
|
| 34 |
async def refresh_generated_model(generated_model_id: str, current_user: User = Depends(get_current_active_user)):
|
| 35 |
"""
|
|
@@ -51,6 +36,24 @@ async def refresh_generated_model(generated_model_id: str, current_user: User =
|
|
| 51 |
generation_type = prompts_config.get("generation_type")
|
| 52 |
should_texture = prompts_config.get("should_texture", False)
|
| 53 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
meshy_task_id = generated_model.get("meshy_api_job_id")
|
| 55 |
if not meshy_task_id:
|
| 56 |
raise HTTPException(status_code=400, detail="Meshy task id missing for this model")
|
|
@@ -428,6 +431,65 @@ async def _process_text_to_3d_background(
|
|
| 428 |
except Exception as ex:
|
| 429 |
logging.error(f"Background processing failed for text-to-3d model {generated_model_id}: {ex}")
|
| 430 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 431 |
# Background task for image-to-3d processing
|
| 432 |
async def _process_image_to_3d_background(generated_model_id: int, payload: Dict[str, Any], generation_type: str):
|
| 433 |
"""Background task to handle Meshy API call for image-to-3d"""
|
|
@@ -552,6 +614,67 @@ async def text_to_3d(prompt: TextPrompt, background_tasks: BackgroundTasks, curr
|
|
| 552 |
logging.error(f"Failed to create initial model DB record: {ex}")
|
| 553 |
raise HTTPException(status_code=500, detail=f"Failed to start generation: {ex}")
|
| 554 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 555 |
|
| 556 |
@router.post("/image-to-3d/upload")
|
| 557 |
async def image_to_3d_upload(
|
|
@@ -564,7 +687,7 @@ async def image_to_3d_upload(
|
|
| 564 |
|
| 565 |
Returns immediately after creating the database record and encoding the image.
|
| 566 |
Meshy API call happens in the background.
|
| 567 |
-
"""
|
| 568 |
|
| 569 |
# 1. Credit check and decrement – texture generation costs 3 credits
|
| 570 |
credit_cost = 3 if should_texture else 1
|
|
@@ -718,6 +841,81 @@ async def multi_image_to_3d_upload(
|
|
| 718 |
logging.error(f"Failed to create initial model DB record: {ex}")
|
| 719 |
raise HTTPException(status_code=500, detail=f"Failed to start generation: {ex}")
|
| 720 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 721 |
@router.delete("/{generated_model_id}")
|
| 722 |
async def delete_model(
|
| 723 |
generated_model_id: str,
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, UploadFile, File, Form
|
| 2 |
from fastapi.responses import JSONResponse
|
| 3 |
from auth import get_current_active_user, User, supabase
|
| 4 |
import logging
|
| 5 |
import httpx
|
| 6 |
import os
|
|
|
|
| 7 |
from typing import Optional, Dict, Any
|
|
|
|
|
|
|
| 8 |
from pydantic import BaseModel
|
|
|
|
| 9 |
import base64
|
| 10 |
+
from services.hunyuan_service import _hunyuan_image_to_3d
|
| 11 |
|
| 12 |
router = APIRouter(
|
| 13 |
prefix="/user/models",
|
|
|
|
| 15 |
dependencies=[Depends(get_current_active_user)]
|
| 16 |
)
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
@router.get("/progress_update/{generated_model_id}")
|
| 19 |
async def refresh_generated_model(generated_model_id: str, current_user: User = Depends(get_current_active_user)):
|
| 20 |
"""
|
|
|
|
| 36 |
generation_type = prompts_config.get("generation_type")
|
| 37 |
should_texture = prompts_config.get("should_texture", False)
|
| 38 |
|
| 39 |
+
# Special handling for Hunyuan generation (doesn't use Meshy API)
|
| 40 |
+
if generation_type == "hunyuan_image_to_3d":
|
| 41 |
+
return {
|
| 42 |
+
"task_id": generated_model_id,
|
| 43 |
+
"status": generated_model.get("status", "IN_PROGRESS"),
|
| 44 |
+
"progress": 100 if generated_model.get("status") == "COMPLETED" else 50,
|
| 45 |
+
"model_urls": None, # Hunyuan files are stored in Model_Files table
|
| 46 |
+
"thumbnail_url": None,
|
| 47 |
+
"texture_urls": None,
|
| 48 |
+
"created_at": generated_model.get("created_at"),
|
| 49 |
+
"started_at": generated_model.get("created_at"),
|
| 50 |
+
"finished_at": generated_model.get("updated_at") if generated_model.get("status") == "COMPLETED" else None,
|
| 51 |
+
"task_error": None,
|
| 52 |
+
"database_updated": True,
|
| 53 |
+
"generation_type": "hunyuan_image_to_3d",
|
| 54 |
+
"message": "Hunyuan generation completed. Check Model_Files table for 3D mesh file." if generated_model.get("status") == "COMPLETED" else "Hunyuan generation in progress..."
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
meshy_task_id = generated_model.get("meshy_api_job_id")
|
| 58 |
if not meshy_task_id:
|
| 59 |
raise HTTPException(status_code=400, detail="Meshy task id missing for this model")
|
|
|
|
| 431 |
except Exception as ex:
|
| 432 |
logging.error(f"Background processing failed for text-to-3d model {generated_model_id}: {ex}")
|
| 433 |
|
| 434 |
+
# CREDITS ARE NOT DECREMENTED HERE / DECREMENT BEFORE CALLING THIS FUNCTION
|
| 435 |
+
async def _process_hunyuan_image_to_3d_background(generated_model_id: int, image_url: str, user_id: str):
|
| 436 |
+
"""Background task to handle Hunyuan API call for image-to-3d"""
|
| 437 |
+
try:
|
| 438 |
+
hunyuan_response = _hunyuan_image_to_3d(image_url)
|
| 439 |
+
|
| 440 |
+
if not hunyuan_response:
|
| 441 |
+
logging.error(f"Hunyuan API failed for model {generated_model_id}")
|
| 442 |
+
return
|
| 443 |
+
|
| 444 |
+
# Extract mesh URL from response
|
| 445 |
+
mesh_url = hunyuan_response.get("output", {}).get("mesh") if "output" in hunyuan_response else hunyuan_response.get("mesh")
|
| 446 |
+
|
| 447 |
+
if not mesh_url:
|
| 448 |
+
logging.error(f"No mesh URL found in Hunyuan response for model {generated_model_id}")
|
| 449 |
+
return
|
| 450 |
+
|
| 451 |
+
# Download the mesh file
|
| 452 |
+
async with httpx.AsyncClient(timeout=60.0) as client:
|
| 453 |
+
mesh_response = await client.get(mesh_url)
|
| 454 |
+
|
| 455 |
+
if mesh_response.status_code != 200:
|
| 456 |
+
logging.error(f"Failed to download mesh file from {mesh_url}: {mesh_response.status_code}")
|
| 457 |
+
return
|
| 458 |
+
|
| 459 |
+
mesh_data = mesh_response.content
|
| 460 |
+
|
| 461 |
+
# Determine file format from URL or default to .glb
|
| 462 |
+
file_name = mesh_url.split("/")[-1] if "/" in mesh_url else f"hunyuan_model_{generated_model_id}.glb"
|
| 463 |
+
if "." not in file_name:
|
| 464 |
+
file_name += ".glb"
|
| 465 |
+
|
| 466 |
+
file_format = file_name.split(".")[-1].lower()
|
| 467 |
+
file_size = len(mesh_data)
|
| 468 |
+
|
| 469 |
+
# Update DB record with Hunyuan response
|
| 470 |
+
supabase.from_("Generated_Models").update({
|
| 471 |
+
"status": "COMPLETED",
|
| 472 |
+
"updated_at": "now()",
|
| 473 |
+
"prompts_and_models_config": hunyuan_response,
|
| 474 |
+
}).eq("generated_model_id", generated_model_id).execute()
|
| 475 |
+
|
| 476 |
+
# Insert the mesh file into Model_Files table
|
| 477 |
+
supabase.from_("Model_Files").insert({
|
| 478 |
+
"user_id": user_id,
|
| 479 |
+
"generated_model_id": generated_model_id,
|
| 480 |
+
"model_data": mesh_data,
|
| 481 |
+
"file_name": file_name,
|
| 482 |
+
"file_format": file_format,
|
| 483 |
+
"file_size": file_size,
|
| 484 |
+
"metadata": f"Hunyuan3D generated mesh file. Original URL: {mesh_url}",
|
| 485 |
+
"is_preview_file": False,
|
| 486 |
+
}).execute()
|
| 487 |
+
|
| 488 |
+
logging.info(f"Successfully completed Hunyuan image-to-3d generation for model {generated_model_id}")
|
| 489 |
+
|
| 490 |
+
except Exception as ex:
|
| 491 |
+
logging.error(f"Background processing failed for Hunyuan image-to-3d model {generated_model_id}: {ex}")
|
| 492 |
+
|
| 493 |
# Background task for image-to-3d processing
|
| 494 |
async def _process_image_to_3d_background(generated_model_id: int, payload: Dict[str, Any], generation_type: str):
|
| 495 |
"""Background task to handle Meshy API call for image-to-3d"""
|
|
|
|
| 614 |
logging.error(f"Failed to create initial model DB record: {ex}")
|
| 615 |
raise HTTPException(status_code=500, detail=f"Failed to start generation: {ex}")
|
| 616 |
|
| 617 |
+
@router.post("/image-to-3d")
|
| 618 |
+
async def image_to_3d(
|
| 619 |
+
background_tasks: BackgroundTasks,
|
| 620 |
+
image_url: str,
|
| 621 |
+
current_user: User = Depends(get_current_active_user),
|
| 622 |
+
):
|
| 623 |
+
"""
|
| 624 |
+
Create a Hunyuan3D Image-to-3D generation job using Replicate API.
|
| 625 |
+
|
| 626 |
+
Returns immediately after creating the database record. Hunyuan API call
|
| 627 |
+
and mesh file download happen in the background.
|
| 628 |
+
"""
|
| 629 |
+
|
| 630 |
+
# Credit check and decrement - Hunyuan generation costs 2 credits
|
| 631 |
+
await _check_and_decrement_credits(current_user.id, 2)
|
| 632 |
+
|
| 633 |
+
# Insert initial DB record and return immediately
|
| 634 |
+
try:
|
| 635 |
+
insert_res = supabase.from_("Generated_Models").insert({
|
| 636 |
+
"status": "IN_PROGRESS",
|
| 637 |
+
"user_id": current_user.id,
|
| 638 |
+
"meshy_api_job_id": None,
|
| 639 |
+
"model_name": f"Hunyuan 3D from {image_url.split('/')[-1] if '/' in image_url else 'image'}",
|
| 640 |
+
"prompts_and_models_config": {
|
| 641 |
+
"generation_type": "hunyuan_image_to_3d",
|
| 642 |
+
"input_image_url": image_url,
|
| 643 |
+
"status": "initializing",
|
| 644 |
+
"stage": "processing_image",
|
| 645 |
+
},
|
| 646 |
+
}).execute()
|
| 647 |
+
generated_model_id = insert_res.data[0]["generated_model_id"] if insert_res.data else None
|
| 648 |
+
|
| 649 |
+
if not generated_model_id:
|
| 650 |
+
raise HTTPException(status_code=500, detail="Failed to create model record")
|
| 651 |
+
|
| 652 |
+
# Add background task for Hunyuan processing
|
| 653 |
+
background_tasks.add_task(
|
| 654 |
+
_process_hunyuan_image_to_3d_background,
|
| 655 |
+
generated_model_id,
|
| 656 |
+
image_url,
|
| 657 |
+
current_user.id,
|
| 658 |
+
)
|
| 659 |
+
|
| 660 |
+
# Return immediately with explicit headers
|
| 661 |
+
response_data = {
|
| 662 |
+
"generated_model_id": generated_model_id,
|
| 663 |
+
"status": "initializing",
|
| 664 |
+
"input_image_url": image_url,
|
| 665 |
+
"message": "Hunyuan 3D generation started. Use the progress_update endpoint to check status."
|
| 666 |
+
}
|
| 667 |
+
|
| 668 |
+
logging.info(f"Returning response for hunyuan image-to-3d: {response_data}")
|
| 669 |
+
|
| 670 |
+
response = JSONResponse(content=response_data, status_code=200)
|
| 671 |
+
# Allowed header; avoids disallowed connection-specific headers under HTTP/2
|
| 672 |
+
response.headers["Cache-Control"] = "no-cache"
|
| 673 |
+
return response
|
| 674 |
+
|
| 675 |
+
except Exception as ex:
|
| 676 |
+
logging.error(f"Failed to create initial model DB record: {ex}")
|
| 677 |
+
raise HTTPException(status_code=500, detail=f"Failed to start generation: {ex}")
|
| 678 |
|
| 679 |
@router.post("/image-to-3d/upload")
|
| 680 |
async def image_to_3d_upload(
|
|
|
|
| 687 |
|
| 688 |
Returns immediately after creating the database record and encoding the image.
|
| 689 |
Meshy API call happens in the background.
|
| 690 |
+
""":
|
| 691 |
|
| 692 |
# 1. Credit check and decrement – texture generation costs 3 credits
|
| 693 |
credit_cost = 3 if should_texture else 1
|
|
|
|
| 841 |
logging.error(f"Failed to create initial model DB record: {ex}")
|
| 842 |
raise HTTPException(status_code=500, detail=f"Failed to start generation: {ex}")
|
| 843 |
|
| 844 |
+
@router.get("/{generated_model_id}/file")
|
| 845 |
+
async def get_model_file(
|
| 846 |
+
generated_model_id: str,
|
| 847 |
+
current_user: User = Depends(get_current_active_user)
|
| 848 |
+
):
|
| 849 |
+
"""
|
| 850 |
+
Get the model file info for a generated model belonging to the current user.
|
| 851 |
+
"""
|
| 852 |
+
try:
|
| 853 |
+
# First check if the model exists and belongs to the user
|
| 854 |
+
model_check = supabase.from_("Generated_Models").select("generated_model_id, user_id, model_name").eq("generated_model_id", generated_model_id).eq("user_id", current_user.id).single().execute()
|
| 855 |
+
|
| 856 |
+
if not model_check.data:
|
| 857 |
+
raise HTTPException(status_code=404, detail="Model not found or you do not have permission to access it.")
|
| 858 |
+
|
| 859 |
+
# Get the single model file for this generated model
|
| 860 |
+
file_result = supabase.from_("Model_Files").select("model_file_id, file_name, file_format, file_size, metadata, is_preview_file, created_at").eq("generated_model_id", generated_model_id).eq("user_id", current_user.id).single().execute()
|
| 861 |
+
|
| 862 |
+
if not file_result.data:
|
| 863 |
+
raise HTTPException(status_code=404, detail="No model file found for this generated model.")
|
| 864 |
+
|
| 865 |
+
return {
|
| 866 |
+
"generated_model_id": generated_model_id,
|
| 867 |
+
"model_name": model_check.data.get("model_name"),
|
| 868 |
+
"file": file_result.data
|
| 869 |
+
}
|
| 870 |
+
|
| 871 |
+
except HTTPException:
|
| 872 |
+
# Re-raise HTTP exceptions as-is
|
| 873 |
+
raise
|
| 874 |
+
except Exception as e:
|
| 875 |
+
logging.error(f"Failed to get model file for {generated_model_id}: {str(e)}")
|
| 876 |
+
raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
|
| 877 |
+
|
| 878 |
+
@router.get("/{generated_model_id}/download")
|
| 879 |
+
async def download_model_file(
|
| 880 |
+
generated_model_id: str,
|
| 881 |
+
current_user: User = Depends(get_current_active_user)
|
| 882 |
+
):
|
| 883 |
+
"""
|
| 884 |
+
Download the model file for a generated model.
|
| 885 |
+
"""
|
| 886 |
+
try:
|
| 887 |
+
# First check if the model exists and belongs to the user
|
| 888 |
+
model_check = supabase.from_("Generated_Models").select("generated_model_id, user_id, model_name").eq("generated_model_id", generated_model_id).eq("user_id", current_user.id).single().execute()
|
| 889 |
+
|
| 890 |
+
if not model_check.data:
|
| 891 |
+
raise HTTPException(status_code=404, detail="Model not found or you do not have permission to access it.")
|
| 892 |
+
|
| 893 |
+
# Get the model file and verify ownership
|
| 894 |
+
file_result = supabase.from_("Model_Files").select("*").eq("generated_model_id", generated_model_id).eq("user_id", current_user.id).single().execute()
|
| 895 |
+
|
| 896 |
+
if not file_result.data:
|
| 897 |
+
raise HTTPException(status_code=404, detail="No model file found for this generated model.")
|
| 898 |
+
|
| 899 |
+
file_data = file_result.data
|
| 900 |
+
|
| 901 |
+
from fastapi.responses import Response
|
| 902 |
+
|
| 903 |
+
return Response(
|
| 904 |
+
content=file_data["model_data"],
|
| 905 |
+
media_type="application/octet-stream",
|
| 906 |
+
headers={
|
| 907 |
+
"Content-Disposition": f"attachment; filename={file_data['file_name']}",
|
| 908 |
+
"Content-Length": str(file_data["file_size"])
|
| 909 |
+
}
|
| 910 |
+
)
|
| 911 |
+
|
| 912 |
+
except HTTPException:
|
| 913 |
+
# Re-raise HTTP exceptions as-is
|
| 914 |
+
raise
|
| 915 |
+
except Exception as e:
|
| 916 |
+
logging.error(f"Failed to download model file for {generated_model_id}: {str(e)}")
|
| 917 |
+
raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
|
| 918 |
+
|
| 919 |
@router.delete("/{generated_model_id}")
|
| 920 |
async def delete_model(
|
| 921 |
generated_model_id: str,
|
services/hunyuan_service.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from auth import supabase
|
| 2 |
+
from fastapi import HTTPException
|
| 3 |
+
import requests
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
REPLICATE_API_TOKEN = os.getenv("REPLICATE_API_TOKEN")
|
| 7 |
+
|
| 8 |
+
def _credit_check_decrement(user_id: str, cost: int = 1) -> bool:
|
| 9 |
+
|
| 10 |
+
user_credit = (
|
| 11 |
+
supabase.from_("User_Credit_Account")
|
| 12 |
+
.select("num_of_available_gens")
|
| 13 |
+
.eq("user_id", user_id)
|
| 14 |
+
.single()
|
| 15 |
+
.execute()
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
if not user_credit.data:
|
| 19 |
+
raise HTTPException(status_code=403, detail="No credit account")
|
| 20 |
+
return False
|
| 21 |
+
|
| 22 |
+
available_credits = user_credit.data["num_of_available_gens"]
|
| 23 |
+
|
| 24 |
+
if available_credits < cost:
|
| 25 |
+
raise HTTPException(status_code=403, detail="Not enough credits")
|
| 26 |
+
return False
|
| 27 |
+
|
| 28 |
+
supabase.from_("User_Credit_Account").update({"num_of_available_gens": available_credits - cost}).eq("user_id", user_id).execute()
|
| 29 |
+
|
| 30 |
+
return True
|
| 31 |
+
|
| 32 |
+
def _hunyuan_image_to_3d(image_url: str) -> dict:
|
| 33 |
+
if not REPLICATE_API_TOKEN:
|
| 34 |
+
raise HTTPException(status_code=500, detail="REPLICATE_API_TOKEN not configured")
|
| 35 |
+
|
| 36 |
+
payload = {
|
| 37 |
+
"version": "b1b9449a1277e10402781c5d41eb30c0a0683504fb23fab591ca9dfc2aabe1cb",
|
| 38 |
+
"input": {
|
| 39 |
+
"image": image_url,
|
| 40 |
+
"steps": 50,
|
| 41 |
+
"guidance_scale": 5.5,
|
| 42 |
+
"octree_resolution": 256,
|
| 43 |
+
"remove_background": True,
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
response = requests.post(
|
| 48 |
+
"https://api.replicate.com/v1/predictions",
|
| 49 |
+
headers={
|
| 50 |
+
"Authorization": f"Bearer {REPLICATE_API_TOKEN}",
|
| 51 |
+
"Content-Type": "application/json",
|
| 52 |
+
"Prefer": "wait"
|
| 53 |
+
},
|
| 54 |
+
json=payload,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
if response.status_code != 200:
|
| 58 |
+
raise HTTPException(status_code=response.status_code, detail=f"Replicate API error: {response.text}")
|
| 59 |
+
|
| 60 |
+
return response.json()
|
subscription_integration_example.py
DELETED
|
@@ -1,193 +0,0 @@
|
|
| 1 |
-
# Example: Integrating Subscriptions with Existing Credit System
|
| 2 |
-
# Add these functions to your main.py or create a separate utils module
|
| 3 |
-
|
| 4 |
-
from datetime import datetime, timedelta
|
| 5 |
-
from auth import supabase
|
| 6 |
-
import logging
|
| 7 |
-
|
| 8 |
-
async def check_user_access_permissions(user_id: str) -> dict:
|
| 9 |
-
"""
|
| 10 |
-
Enhanced function to check if user can access AI generation features.
|
| 11 |
-
Checks both one-time credits and active subscriptions.
|
| 12 |
-
"""
|
| 13 |
-
try:
|
| 14 |
-
# Check for active subscription first
|
| 15 |
-
subscription_record = supabase.from_("User_Subscriptions").select("*").eq("user_id", user_id).eq("status", "active").execute()
|
| 16 |
-
|
| 17 |
-
has_active_subscription = bool(subscription_record.data)
|
| 18 |
-
|
| 19 |
-
# Get current credits
|
| 20 |
-
credit_record = supabase.from_("User_Credit_Account").select("num_of_available_gens").eq("user_id", user_id).execute()
|
| 21 |
-
current_credits = credit_record.data[0]["num_of_available_gens"] if credit_record.data else 0
|
| 22 |
-
|
| 23 |
-
# Determine access level
|
| 24 |
-
if has_active_subscription:
|
| 25 |
-
subscription = subscription_record.data[0]
|
| 26 |
-
return {
|
| 27 |
-
"can_generate": True,
|
| 28 |
-
"access_type": "subscription",
|
| 29 |
-
"subscription_plan": subscription["plan"],
|
| 30 |
-
"credits": current_credits,
|
| 31 |
-
"subscription_active": True,
|
| 32 |
-
"period_end": subscription["current_period_end"]
|
| 33 |
-
}
|
| 34 |
-
elif current_credits > 0:
|
| 35 |
-
return {
|
| 36 |
-
"can_generate": True,
|
| 37 |
-
"access_type": "credits",
|
| 38 |
-
"subscription_plan": None,
|
| 39 |
-
"credits": current_credits,
|
| 40 |
-
"subscription_active": False,
|
| 41 |
-
"period_end": None
|
| 42 |
-
}
|
| 43 |
-
else:
|
| 44 |
-
return {
|
| 45 |
-
"can_generate": False,
|
| 46 |
-
"access_type": None,
|
| 47 |
-
"subscription_plan": None,
|
| 48 |
-
"credits": 0,
|
| 49 |
-
"subscription_active": False,
|
| 50 |
-
"period_end": None
|
| 51 |
-
}
|
| 52 |
-
|
| 53 |
-
except Exception as e:
|
| 54 |
-
logging.error(f"Error checking user access: {str(e)}")
|
| 55 |
-
return {
|
| 56 |
-
"can_generate": False,
|
| 57 |
-
"access_type": None,
|
| 58 |
-
"subscription_plan": None,
|
| 59 |
-
"credits": 0,
|
| 60 |
-
"subscription_active": False,
|
| 61 |
-
"period_end": None,
|
| 62 |
-
"error": str(e)
|
| 63 |
-
}
|
| 64 |
-
|
| 65 |
-
async def check_and_decrement_credits_enhanced(user_id: str) -> bool:
|
| 66 |
-
"""
|
| 67 |
-
Enhanced version of your existing credit checking function.
|
| 68 |
-
Prioritizes subscription access over one-time credits.
|
| 69 |
-
"""
|
| 70 |
-
try:
|
| 71 |
-
access_info = await check_user_access_permissions(user_id)
|
| 72 |
-
|
| 73 |
-
if not access_info["can_generate"]:
|
| 74 |
-
return False
|
| 75 |
-
|
| 76 |
-
if access_info["access_type"] == "subscription":
|
| 77 |
-
# User has active subscription - allow generation without decrementing credits
|
| 78 |
-
# Credits accumulate for subscription users as a bonus
|
| 79 |
-
logging.info(f"User {user_id} using subscription access ({access_info['subscription_plan']})")
|
| 80 |
-
return True
|
| 81 |
-
|
| 82 |
-
elif access_info["access_type"] == "credits":
|
| 83 |
-
# User using one-time credits - decrement as before
|
| 84 |
-
current_credits = access_info["credits"]
|
| 85 |
-
if current_credits > 0:
|
| 86 |
-
new_credits = current_credits - 1
|
| 87 |
-
supabase.from_("User_Credit_Account").update({"num_of_available_gens": new_credits}).eq("user_id", user_id).execute()
|
| 88 |
-
logging.info(f"User {user_id} used 1 credit. Remaining: {new_credits}")
|
| 89 |
-
return True
|
| 90 |
-
|
| 91 |
-
return False
|
| 92 |
-
|
| 93 |
-
except Exception as e:
|
| 94 |
-
logging.error(f"Error in credit check: {str(e)}")
|
| 95 |
-
return False
|
| 96 |
-
|
| 97 |
-
# New endpoint to add to main.py
|
| 98 |
-
from fastapi import Depends
|
| 99 |
-
from auth import get_current_active_user, User
|
| 100 |
-
|
| 101 |
-
@app.get("/user/access-info", tags=["User"])
|
| 102 |
-
async def get_user_access_info(current_user: User = Depends(get_current_active_user)):
|
| 103 |
-
"""
|
| 104 |
-
Get comprehensive user access information including subscription and credits.
|
| 105 |
-
"""
|
| 106 |
-
access_info = await check_user_access_permissions(current_user.id)
|
| 107 |
-
return access_info
|
| 108 |
-
|
| 109 |
-
@app.get("/user/subscription-status", tags=["User"])
|
| 110 |
-
async def get_subscription_status(current_user: User = Depends(get_current_active_user)):
|
| 111 |
-
"""
|
| 112 |
-
Get detailed subscription status for the user.
|
| 113 |
-
"""
|
| 114 |
-
try:
|
| 115 |
-
# Get subscription details
|
| 116 |
-
subscription_record = supabase.from_("User_Subscriptions").select("*").eq("user_id", current_user.id).order("created_at", desc=True).limit(1).execute()
|
| 117 |
-
|
| 118 |
-
if not subscription_record.data:
|
| 119 |
-
return {
|
| 120 |
-
"has_subscription": False,
|
| 121 |
-
"subscription": None,
|
| 122 |
-
"next_billing_date": None,
|
| 123 |
-
"can_upgrade": True
|
| 124 |
-
}
|
| 125 |
-
|
| 126 |
-
subscription = subscription_record.data[0]
|
| 127 |
-
|
| 128 |
-
# Get payment history
|
| 129 |
-
payment_history = supabase.from_("Subscription_Payment_History").select("*").eq("user_id", current_user.id).order("payment_date", desc=True).limit(5).execute()
|
| 130 |
-
|
| 131 |
-
return {
|
| 132 |
-
"has_subscription": subscription["status"] in ["active", "cancel_at_period_end"],
|
| 133 |
-
"subscription": subscription,
|
| 134 |
-
"next_billing_date": subscription["current_period_end"],
|
| 135 |
-
"can_upgrade": subscription["status"] == "active",
|
| 136 |
-
"recent_payments": payment_history.data,
|
| 137 |
-
"subscription_plan_details": SUBSCRIPTION_PLANS.get(subscription["plan"]) if subscription["plan"] in SUBSCRIPTION_PLANS else None
|
| 138 |
-
}
|
| 139 |
-
|
| 140 |
-
except Exception as e:
|
| 141 |
-
logging.error(f"Error getting subscription status: {str(e)}")
|
| 142 |
-
raise HTTPException(status_code=500, detail="Failed to get subscription status")
|
| 143 |
-
|
| 144 |
-
# Usage tracking for subscription users
|
| 145 |
-
async def track_subscription_usage(user_id: str, feature_used: str):
|
| 146 |
-
"""
|
| 147 |
-
Track usage for subscription users (for analytics and potential usage limits).
|
| 148 |
-
"""
|
| 149 |
-
try:
|
| 150 |
-
# Get user's active subscription
|
| 151 |
-
subscription_record = supabase.from_("User_Subscriptions").select("*").eq("user_id", user_id).eq("status", "active").execute()
|
| 152 |
-
|
| 153 |
-
if subscription_record.data:
|
| 154 |
-
subscription_id = subscription_record.data[0]["stripe_subscription_id"]
|
| 155 |
-
|
| 156 |
-
# Log usage (you might want to create a separate Usage_Tracking table)
|
| 157 |
-
usage_data = {
|
| 158 |
-
"user_id": user_id,
|
| 159 |
-
"subscription_id": subscription_id,
|
| 160 |
-
"feature_used": feature_used,
|
| 161 |
-
"timestamp": datetime.now().isoformat(),
|
| 162 |
-
"billing_period_start": subscription_record.data[0]["current_period_start"],
|
| 163 |
-
"billing_period_end": subscription_record.data[0]["current_period_end"]
|
| 164 |
-
}
|
| 165 |
-
|
| 166 |
-
# You can log this to a separate table or your existing logging system
|
| 167 |
-
logging.info(f"Subscription usage tracked: {usage_data}")
|
| 168 |
-
|
| 169 |
-
except Exception as e:
|
| 170 |
-
logging.error(f"Error tracking subscription usage: {str(e)}")
|
| 171 |
-
|
| 172 |
-
# Updated version of your existing generation endpoints
|
| 173 |
-
# Replace your existing check_and_decrement_credits calls with check_and_decrement_credits_enhanced
|
| 174 |
-
|
| 175 |
-
# Example for one of your endpoints:
|
| 176 |
-
@app.post("/req-img-to-3d-enhanced", tags=["Image-to-3D"])
|
| 177 |
-
async def req_img_to_3d_enhanced(
|
| 178 |
-
image: UploadFile = File(...),
|
| 179 |
-
settings: Settings = Depends(get_settings),
|
| 180 |
-
current_user: User = Depends(get_current_active_user)
|
| 181 |
-
):
|
| 182 |
-
"""
|
| 183 |
-
Enhanced version that works with both subscriptions and credits.
|
| 184 |
-
"""
|
| 185 |
-
# Check access permissions
|
| 186 |
-
if not await check_and_decrement_credits_enhanced(current_user.id):
|
| 187 |
-
raise HTTPException(status_code=402, detail="Insufficient credits or no active subscription")
|
| 188 |
-
|
| 189 |
-
# Track usage for subscription users
|
| 190 |
-
await track_subscription_usage(current_user.id, "image_to_3d")
|
| 191 |
-
|
| 192 |
-
# Rest of your existing logic...
|
| 193 |
-
# ... existing image processing code ...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
utils/pubsub.py
DELETED
|
@@ -1,52 +0,0 @@
|
|
| 1 |
-
from __future__ import annotations
|
| 2 |
-
|
| 3 |
-
import asyncio
|
| 4 |
-
from collections import defaultdict
|
| 5 |
-
from typing import Dict, List, Any
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
class InMemoryPubSub:
|
| 9 |
-
"""A minimal in-process pub/sub helper.
|
| 10 |
-
|
| 11 |
-
Subscribers are simply *asyncio.Queue* instances. Each task_id can have
|
| 12 |
-
many subscribers (e.g. multiple browser tabs connected to the same SSE
|
| 13 |
-
stream). The publisher pushes a message **dict** (already JSON-serialisable)
|
| 14 |
-
and every subscriber receives it.
|
| 15 |
-
|
| 16 |
-
NOTE: This works only inside a single Python process. If you run several
|
| 17 |
-
Uvicorn/Gunicorn workers the events won't be shared between them. Use a
|
| 18 |
-
proper external broker (Redis, NATS, …) for multi-process deployments.
|
| 19 |
-
"""
|
| 20 |
-
|
| 21 |
-
def __init__(self) -> None:
|
| 22 |
-
self._subscribers: Dict[str, List[asyncio.Queue]] = defaultdict(list)
|
| 23 |
-
self._lock = asyncio.Lock() # protect subscriber mutations
|
| 24 |
-
|
| 25 |
-
async def publish(self, task_id: str, message: Any) -> None:
|
| 26 |
-
"""Broadcast *message* to every subscriber listening to *task_id*."""
|
| 27 |
-
# Work on a snapshot to avoid race conditions if someone unsubscribes
|
| 28 |
-
async with self._lock:
|
| 29 |
-
queues = list(self._subscribers.get(task_id, []))
|
| 30 |
-
for queue in queues:
|
| 31 |
-
await queue.put(message)
|
| 32 |
-
|
| 33 |
-
async def subscribe(self, task_id: str) -> asyncio.Queue:
|
| 34 |
-
"""Return a brand-new *asyncio.Queue* subscribed to *task_id*."""
|
| 35 |
-
queue: asyncio.Queue = asyncio.Queue()
|
| 36 |
-
async with self._lock:
|
| 37 |
-
self._subscribers[task_id].append(queue)
|
| 38 |
-
return queue
|
| 39 |
-
|
| 40 |
-
async def unsubscribe(self, task_id: str, queue: asyncio.Queue) -> None:
|
| 41 |
-
"""Remove *queue* from *task_id* subscriptions and cleanup."""
|
| 42 |
-
async with self._lock:
|
| 43 |
-
if task_id in self._subscribers and queue in self._subscribers[task_id]:
|
| 44 |
-
self._subscribers[task_id].remove(queue)
|
| 45 |
-
if not self._subscribers[task_id]:
|
| 46 |
-
del self._subscribers[task_id]
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
# Global singleton used across the code-base
|
| 50 |
-
pubsub = InMemoryPubSub()
|
| 51 |
-
|
| 52 |
-
__all__ = ["pubsub", "InMemoryPubSub"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
utils/worker.py
DELETED
|
@@ -1,96 +0,0 @@
|
|
| 1 |
-
import os, asyncio, logging
|
| 2 |
-
import httpx
|
| 3 |
-
from auth import supabase # re-use existing Supabase client
|
| 4 |
-
from utils.pubsub import pubsub as inmem_pubsub
|
| 5 |
-
|
| 6 |
-
# ---------------------------------------------------------------------------
|
| 7 |
-
# Real Meshy Text-to-3D polling task – executed as FastAPI BackgroundTask
|
| 8 |
-
# ---------------------------------------------------------------------------
|
| 9 |
-
|
| 10 |
-
async def poll_meshy_task(generated_model_id: int, meshy_task_id: str):
|
| 11 |
-
"""Poll Meshy API every few seconds, publish progress to in-memory pub/sub and update DB.
|
| 12 |
-
|
| 13 |
-
Parameters
|
| 14 |
-
----------
|
| 15 |
-
generated_model_id : int
|
| 16 |
-
Primary-key id of our *Generated_Models* row. Used as the public
|
| 17 |
-
task identifier for SSE consumers.
|
| 18 |
-
meshy_task_id : str
|
| 19 |
-
UUID returned by Meshy when the generation request was created.
|
| 20 |
-
"""
|
| 21 |
-
|
| 22 |
-
meshy_api_key = os.getenv("MESHY_API_KEY")
|
| 23 |
-
if not meshy_api_key:
|
| 24 |
-
logging.error("MESHY_API_KEY env-var missing – cannot poll Meshy API")
|
| 25 |
-
return
|
| 26 |
-
|
| 27 |
-
headers = {"Authorization": f"Bearer {meshy_api_key}"}
|
| 28 |
-
|
| 29 |
-
last_progress = -1 # to avoid spamming identical values
|
| 30 |
-
|
| 31 |
-
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 32 |
-
while True:
|
| 33 |
-
try:
|
| 34 |
-
resp = await client.get(
|
| 35 |
-
f"https://api.meshy.ai/openapi/v2/text-to-3d/{meshy_task_id}",
|
| 36 |
-
headers=headers,
|
| 37 |
-
)
|
| 38 |
-
except Exception as exc:
|
| 39 |
-
logging.warning(f"Meshy API request failed: {exc}; retrying in 30s")
|
| 40 |
-
await asyncio.sleep(30)
|
| 41 |
-
continue
|
| 42 |
-
|
| 43 |
-
if resp.status_code != 200:
|
| 44 |
-
# Meshy returned an error – notify and abort.
|
| 45 |
-
data = {
|
| 46 |
-
"taskId": str(generated_model_id),
|
| 47 |
-
"progress": max(last_progress, 0),
|
| 48 |
-
"status": "failed",
|
| 49 |
-
"error": f"Meshy API error {resp.status_code}",
|
| 50 |
-
}
|
| 51 |
-
await inmem_pubsub.publish(str(generated_model_id), data)
|
| 52 |
-
break
|
| 53 |
-
|
| 54 |
-
payload = resp.json()
|
| 55 |
-
status = payload.get("status", "UNKNOWN")
|
| 56 |
-
progress = payload.get("progress", 0)
|
| 57 |
-
|
| 58 |
-
# Publish progress updates (avoid duplicates)
|
| 59 |
-
if progress != last_progress or status in ("SUCCEEDED", "FAILED"):
|
| 60 |
-
await inmem_pubsub.publish(str(generated_model_id), {
|
| 61 |
-
"taskId": str(generated_model_id),
|
| 62 |
-
"progress": progress,
|
| 63 |
-
"status": status.lower(),
|
| 64 |
-
"model_urls": payload.get("model_urls"),
|
| 65 |
-
"thumbnail_url": payload.get("thumbnail_url"),
|
| 66 |
-
})
|
| 67 |
-
last_progress = progress
|
| 68 |
-
|
| 69 |
-
# Handle terminal states
|
| 70 |
-
if status == "SUCCEEDED":
|
| 71 |
-
try:
|
| 72 |
-
supabase.from_("Generated_Models").update({
|
| 73 |
-
"prompts_and_models_config": payload,
|
| 74 |
-
"status": "COMPLETED",
|
| 75 |
-
"updated_at": "now()",
|
| 76 |
-
}).eq("generated_model_id", generated_model_id).execute()
|
| 77 |
-
except Exception as exc:
|
| 78 |
-
logging.warning(f"Failed to update DB on success: {exc}")
|
| 79 |
-
break
|
| 80 |
-
|
| 81 |
-
if status == "FAILED":
|
| 82 |
-
try:
|
| 83 |
-
supabase.from_("Generated_Models").update({
|
| 84 |
-
"prompts_and_models_config": payload,
|
| 85 |
-
"status": "FAILED",
|
| 86 |
-
"updated_at": "now()",
|
| 87 |
-
}).eq("generated_model_id", generated_model_id).execute()
|
| 88 |
-
except Exception as exc:
|
| 89 |
-
logging.warning(f"Failed to update DB on failure: {exc}")
|
| 90 |
-
break
|
| 91 |
-
|
| 92 |
-
# Sleep between polls
|
| 93 |
-
await asyncio.sleep(5)
|
| 94 |
-
|
| 95 |
-
# Provide explicit __all__ for clarity
|
| 96 |
-
__all__ = ["poll_meshy_task"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|