Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -517,4 +517,460 @@ def save_to_oci_bucket(image, text_content, story_title, page_number, file_type=
|
|
| 517 |
|
| 518 |
print(f"π¨ OCI API Response: {response.status_code}")
|
| 519 |
|
| 520 |
-
if response.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 517 |
|
| 518 |
print(f"π¨ OCI API Response: {response.status_code}")
|
| 519 |
|
| 520 |
+
if response.status_code == 200:
|
| 521 |
+
result = response.json()
|
| 522 |
+
if result['status'] == 'success':
|
| 523 |
+
return result.get('file_url', 'Unknown URL')
|
| 524 |
+
else:
|
| 525 |
+
raise Exception(f"OCI API Error: {result.get('message', 'Unknown error')}")
|
| 526 |
+
else:
|
| 527 |
+
raise Exception(f"HTTP Error: {response.status_code}")
|
| 528 |
+
|
| 529 |
+
except Exception as e:
|
| 530 |
+
raise Exception(f"OCI upload failed: {str(e)}")
|
| 531 |
+
|
| 532 |
+
# JOB MANAGEMENT FUNCTIONS
|
| 533 |
+
def create_job(story_request: StorybookRequest) -> str:
|
| 534 |
+
job_id = str(uuid.uuid4())
|
| 535 |
+
|
| 536 |
+
# Process character descriptions from n8n
|
| 537 |
+
character_templates = process_character_descriptions(story_request.characters)
|
| 538 |
+
character_references = generate_character_reference_sheet(story_request.characters)
|
| 539 |
+
|
| 540 |
+
job_storage[job_id] = {
|
| 541 |
+
"status": JobStatus.PENDING,
|
| 542 |
+
"progress": 0,
|
| 543 |
+
"message": "Job created and queued",
|
| 544 |
+
"request": story_request.dict(),
|
| 545 |
+
"result": None,
|
| 546 |
+
"created_at": time.time(),
|
| 547 |
+
"updated_at": time.time(),
|
| 548 |
+
"pages": [],
|
| 549 |
+
"character_templates": character_templates,
|
| 550 |
+
"character_references": character_references
|
| 551 |
+
}
|
| 552 |
+
|
| 553 |
+
print(f"π Created job {job_id} for story: {story_request.story_title}")
|
| 554 |
+
print(f"π₯ Processed {len(character_templates)} characters from n8n request")
|
| 555 |
+
|
| 556 |
+
return job_id
|
| 557 |
+
|
| 558 |
+
def update_job_status(job_id: str, status: JobStatus, progress: int, message: str, result=None):
|
| 559 |
+
if job_id not in job_storage:
|
| 560 |
+
return False
|
| 561 |
+
|
| 562 |
+
job_storage[job_id].update({
|
| 563 |
+
"status": status,
|
| 564 |
+
"progress": progress,
|
| 565 |
+
"message": message,
|
| 566 |
+
"updated_at": time.time()
|
| 567 |
+
})
|
| 568 |
+
|
| 569 |
+
if result:
|
| 570 |
+
job_storage[job_id]["result"] = result
|
| 571 |
+
|
| 572 |
+
# Send webhook notification if callback URL exists
|
| 573 |
+
job_data = job_storage[job_id]
|
| 574 |
+
request_data = job_data["request"]
|
| 575 |
+
|
| 576 |
+
if request_data.get("callback_url"):
|
| 577 |
+
try:
|
| 578 |
+
callback_url = request_data["callback_url"]
|
| 579 |
+
|
| 580 |
+
# Enhanced callback data
|
| 581 |
+
callback_data = {
|
| 582 |
+
"job_id": job_id,
|
| 583 |
+
"status": status.value,
|
| 584 |
+
"progress": progress,
|
| 585 |
+
"message": message,
|
| 586 |
+
"story_title": request_data["story_title"],
|
| 587 |
+
"total_scenes": len(request_data["scenes"]),
|
| 588 |
+
"total_characters": len(request_data["characters"]),
|
| 589 |
+
"timestamp": time.time(),
|
| 590 |
+
"source": "huggingface-storybook-generator",
|
| 591 |
+
"estimated_time_remaining": calculate_remaining_time(job_id, progress)
|
| 592 |
+
}
|
| 593 |
+
|
| 594 |
+
# Add result data for completed jobs
|
| 595 |
+
if status == JobStatus.COMPLETED and result:
|
| 596 |
+
callback_data["result"] = {
|
| 597 |
+
"total_pages": result.get("total_pages", 0),
|
| 598 |
+
"generation_time": result.get("generation_time", 0),
|
| 599 |
+
"oci_bucket_url": result.get("oci_bucket_url", ""),
|
| 600 |
+
"pages_generated": result.get("generated_pages", 0),
|
| 601 |
+
"characters_used": result.get("characters_used", 0)
|
| 602 |
+
}
|
| 603 |
+
|
| 604 |
+
# Add current scene info for processing jobs
|
| 605 |
+
if status == JobStatus.PROCESSING:
|
| 606 |
+
current_scene = progress // (100 // len(request_data["scenes"])) + 1
|
| 607 |
+
callback_data["current_scene"] = current_scene
|
| 608 |
+
callback_data["total_scenes"] = len(request_data["scenes"])
|
| 609 |
+
if current_scene <= len(request_data["scenes"]):
|
| 610 |
+
scene_visual = request_data["scenes"][current_scene-1]["visual"]
|
| 611 |
+
callback_data["scene_description"] = scene_visual[:100] + "..."
|
| 612 |
+
|
| 613 |
+
# Add characters in current scene
|
| 614 |
+
if "characters_present" in request_data["scenes"][current_scene-1]:
|
| 615 |
+
callback_data["characters_in_scene"] = request_data["scenes"][current_scene-1]["characters_present"]
|
| 616 |
+
|
| 617 |
+
headers = {
|
| 618 |
+
'Content-Type': 'application/json',
|
| 619 |
+
'User-Agent': 'Storybook-Generator/1.0'
|
| 620 |
+
}
|
| 621 |
+
|
| 622 |
+
response = requests.post(
|
| 623 |
+
callback_url,
|
| 624 |
+
json=callback_data,
|
| 625 |
+
headers=headers,
|
| 626 |
+
timeout=30
|
| 627 |
+
)
|
| 628 |
+
|
| 629 |
+
print(f"π’ Callback sent: Status {response.status_code}")
|
| 630 |
+
|
| 631 |
+
except Exception as e:
|
| 632 |
+
print(f"β οΈ Callback failed: {str(e)}")
|
| 633 |
+
|
| 634 |
+
return True
|
| 635 |
+
|
| 636 |
+
def calculate_remaining_time(job_id, progress):
|
| 637 |
+
"""Calculate estimated time remaining"""
|
| 638 |
+
if progress == 0:
|
| 639 |
+
return "Calculating..."
|
| 640 |
+
|
| 641 |
+
job_data = job_storage.get(job_id)
|
| 642 |
+
if not job_data:
|
| 643 |
+
return "Unknown"
|
| 644 |
+
|
| 645 |
+
time_elapsed = time.time() - job_data["created_at"]
|
| 646 |
+
if progress > 0:
|
| 647 |
+
total_estimated = (time_elapsed / progress) * 100
|
| 648 |
+
remaining = total_estimated - time_elapsed
|
| 649 |
+
return f"{int(remaining // 60)}m {int(remaining % 60)}s"
|
| 650 |
+
|
| 651 |
+
return "Unknown"
|
| 652 |
+
|
| 653 |
+
# ENHANCED BACKGROUND TASK WITH DYNAMIC CHARACTER CONSISTENCY
|
| 654 |
+
def generate_storybook_background(job_id: str):
|
| 655 |
+
"""Background task to generate complete storybook with dynamic character consistency"""
|
| 656 |
+
try:
|
| 657 |
+
job_data = job_storage[job_id]
|
| 658 |
+
story_request_data = job_data["request"]
|
| 659 |
+
story_request = StorybookRequest(**story_request_data)
|
| 660 |
+
character_templates = job_data["character_templates"]
|
| 661 |
+
|
| 662 |
+
print(f"π¬ Starting DYNAMIC storybook generation for job {job_id}")
|
| 663 |
+
print(f"π Story: {story_request.story_title}")
|
| 664 |
+
print(f"π₯ Characters: {len(story_request.characters)} (from n8n)")
|
| 665 |
+
print(f"π Scenes: {len(story_request.scenes)}")
|
| 666 |
+
print(f"π± Consistency seed: {story_request.consistency_seed}")
|
| 667 |
+
|
| 668 |
+
# Log character details
|
| 669 |
+
for char in story_request.characters:
|
| 670 |
+
print(f" - {char.name}: {char.description[:50]}...")
|
| 671 |
+
|
| 672 |
+
update_job_status(job_id, JobStatus.PROCESSING, 5, "Starting storybook generation with dynamic character consistency...")
|
| 673 |
+
|
| 674 |
+
total_scenes = len(story_request.scenes)
|
| 675 |
+
generated_pages = []
|
| 676 |
+
start_time = time.time()
|
| 677 |
+
|
| 678 |
+
for i, scene in enumerate(story_request.scenes):
|
| 679 |
+
progress = 5 + int((i / total_scenes) * 90)
|
| 680 |
+
|
| 681 |
+
# Extract characters for this scene
|
| 682 |
+
characters_present = []
|
| 683 |
+
if hasattr(scene, 'characters_present') and scene.characters_present:
|
| 684 |
+
characters_present = scene.characters_present
|
| 685 |
+
else:
|
| 686 |
+
# Fallback: extract from visual description using available characters
|
| 687 |
+
available_chars = [char.name for char in story_request.characters]
|
| 688 |
+
characters_present = extract_characters_from_visual(scene.visual, available_chars)
|
| 689 |
+
|
| 690 |
+
update_job_status(
|
| 691 |
+
job_id,
|
| 692 |
+
JobStatus.PROCESSING,
|
| 693 |
+
progress,
|
| 694 |
+
f"Generating page {i+1}/{total_scenes} with {len(characters_present)} characters: {scene.visual[:50]}..."
|
| 695 |
+
)
|
| 696 |
+
|
| 697 |
+
try:
|
| 698 |
+
print(f"πΌοΈ Generating page {i+1} with characters: {characters_present}")
|
| 699 |
+
|
| 700 |
+
# Generate consistent image using dynamic character templates
|
| 701 |
+
image = generate_consistent_image(
|
| 702 |
+
scene.visual,
|
| 703 |
+
story_request.model_choice,
|
| 704 |
+
story_request.style,
|
| 705 |
+
characters_present,
|
| 706 |
+
character_templates,
|
| 707 |
+
i + 1,
|
| 708 |
+
story_request.consistency_seed
|
| 709 |
+
)
|
| 710 |
+
|
| 711 |
+
# Save IMAGE to OCI bucket
|
| 712 |
+
image_url = save_to_oci_bucket(
|
| 713 |
+
image,
|
| 714 |
+
"", # No text for image
|
| 715 |
+
story_request.story_title,
|
| 716 |
+
i + 1,
|
| 717 |
+
"image"
|
| 718 |
+
)
|
| 719 |
+
|
| 720 |
+
# Save TEXT to OCI bucket
|
| 721 |
+
text_url = save_to_oci_bucket(
|
| 722 |
+
None, # No image for text
|
| 723 |
+
scene.text,
|
| 724 |
+
story_request.story_title,
|
| 725 |
+
i + 1,
|
| 726 |
+
"text"
|
| 727 |
+
)
|
| 728 |
+
|
| 729 |
+
# Store page data
|
| 730 |
+
page_data = {
|
| 731 |
+
"page_number": i + 1,
|
| 732 |
+
"image_url": image_url,
|
| 733 |
+
"text_url": text_url,
|
| 734 |
+
"text_content": scene.text,
|
| 735 |
+
"visual_description": scene.visual,
|
| 736 |
+
"characters_present": characters_present,
|
| 737 |
+
"prompt_used": f"Dynamic consistent generation with {len(characters_present)} characters"
|
| 738 |
+
}
|
| 739 |
+
generated_pages.append(page_data)
|
| 740 |
+
|
| 741 |
+
print(f"β
Page {i+1} completed - Characters: {characters_present}")
|
| 742 |
+
|
| 743 |
+
except Exception as e:
|
| 744 |
+
error_msg = f"Failed to generate page {i+1}: {str(e)}"
|
| 745 |
+
print(f"β {error_msg}")
|
| 746 |
+
update_job_status(job_id, JobStatus.FAILED, 0, error_msg)
|
| 747 |
+
return
|
| 748 |
+
|
| 749 |
+
# Complete the job
|
| 750 |
+
generation_time = time.time() - start_time
|
| 751 |
+
|
| 752 |
+
result = {
|
| 753 |
+
"story_title": story_request.story_title,
|
| 754 |
+
"total_pages": total_scenes,
|
| 755 |
+
"characters_used": len(story_request.characters),
|
| 756 |
+
"generated_pages": len(generated_pages),
|
| 757 |
+
"generation_time": round(generation_time, 2),
|
| 758 |
+
"folder_path": f"stories/{story_request.story_title}",
|
| 759 |
+
"oci_bucket_url": f"https://oci.com/stories/{story_request.story_title}",
|
| 760 |
+
"consistency_seed": story_request.consistency_seed,
|
| 761 |
+
"character_names": [char.name for char in story_request.characters],
|
| 762 |
+
"pages": generated_pages,
|
| 763 |
+
"file_structure": {
|
| 764 |
+
"images": [f"page_{i+1:03d}.png" for i in range(total_scenes)],
|
| 765 |
+
"texts": [f"page_{i+1:03d}.txt" for i in range(total_scenes)]
|
| 766 |
+
}
|
| 767 |
+
}
|
| 768 |
+
|
| 769 |
+
update_job_status(
|
| 770 |
+
job_id,
|
| 771 |
+
JobStatus.COMPLETED,
|
| 772 |
+
100,
|
| 773 |
+
f"π Storybook completed! {len(generated_pages)} pages with {len(story_request.characters)} dynamic characters created in {generation_time:.2f}s.",
|
| 774 |
+
result
|
| 775 |
+
)
|
| 776 |
+
|
| 777 |
+
print(f"π DYNAMIC Storybook generation finished for job {job_id}")
|
| 778 |
+
print(f"π Saved to: stories/{story_request.story_title} in OCI bucket")
|
| 779 |
+
print(f"π₯ Dynamic character consistency maintained for {len(story_request.characters)} characters across {total_scenes} scenes")
|
| 780 |
+
|
| 781 |
+
except Exception as e:
|
| 782 |
+
error_msg = f"Dynamic story generation failed: {str(e)}"
|
| 783 |
+
print(f"β {error_msg}")
|
| 784 |
+
update_job_status(job_id, JobStatus.FAILED, 0, error_msg)
|
| 785 |
+
|
| 786 |
+
# FASTAPI ENDPOINTS (for n8n)
|
| 787 |
+
@app.post("/api/generate-storybook")
|
| 788 |
+
async def generate_storybook(request: dict, background_tasks: BackgroundTasks):
|
| 789 |
+
"""Main endpoint for n8n integration - generates complete storybook with dynamic character consistency"""
|
| 790 |
+
try:
|
| 791 |
+
print(f"π₯ Received n8n request for story: {request.get('story_title', 'Unknown')}")
|
| 792 |
+
|
| 793 |
+
# Add consistency seed if not provided
|
| 794 |
+
if 'consistency_seed' not in request or not request['consistency_seed']:
|
| 795 |
+
request['consistency_seed'] = random.randint(1000, 9999)
|
| 796 |
+
print(f"π± Generated consistency seed: {request['consistency_seed']}")
|
| 797 |
+
|
| 798 |
+
# Ensure characters have required fields
|
| 799 |
+
if 'characters' in request:
|
| 800 |
+
for char in request['characters']:
|
| 801 |
+
if 'visual_prompt' not in char or not char['visual_prompt']:
|
| 802 |
+
# Generate visual prompt from description if not provided
|
| 803 |
+
char['visual_prompt'] = ""
|
| 804 |
+
if 'key_features' not in char:
|
| 805 |
+
char['key_features'] = []
|
| 806 |
+
|
| 807 |
+
# Convert to Pydantic model
|
| 808 |
+
story_request = StorybookRequest(**request)
|
| 809 |
+
|
| 810 |
+
# Validate required fields
|
| 811 |
+
if not story_request.story_title or not story_request.scenes:
|
| 812 |
+
raise HTTPException(status_code=400, detail="story_title and scenes are required")
|
| 813 |
+
|
| 814 |
+
# Create job immediately
|
| 815 |
+
job_id = create_job(story_request)
|
| 816 |
+
|
| 817 |
+
# Start background processing (runs independently of HF idle)
|
| 818 |
+
background_tasks.add_task(generate_storybook_background, job_id)
|
| 819 |
+
|
| 820 |
+
# Immediate response for n8n
|
| 821 |
+
response_data = {
|
| 822 |
+
"status": "success",
|
| 823 |
+
"message": "Storybook generation with dynamic character consistency started successfully",
|
| 824 |
+
"job_id": job_id,
|
| 825 |
+
"story_title": story_request.story_title,
|
| 826 |
+
"total_scenes": len(story_request.scenes),
|
| 827 |
+
"total_characters": len(story_request.characters),
|
| 828 |
+
"character_names": [char.name for char in story_request.characters],
|
| 829 |
+
"consistency_seed": story_request.consistency_seed,
|
| 830 |
+
"callback_url": story_request.callback_url,
|
| 831 |
+
"estimated_time_seconds": len(story_request.scenes) * 35,
|
| 832 |
+
"timestamp": datetime.now().isoformat()
|
| 833 |
+
}
|
| 834 |
+
|
| 835 |
+
print(f"β
Job {job_id} started with dynamic character consistency for: {story_request.story_title}")
|
| 836 |
+
|
| 837 |
+
return response_data
|
| 838 |
+
|
| 839 |
+
except Exception as e:
|
| 840 |
+
error_msg = f"API Error: {str(e)}"
|
| 841 |
+
print(f"β {error_msg}")
|
| 842 |
+
raise HTTPException(status_code=500, detail=error_msg)
|
| 843 |
+
|
| 844 |
+
@app.get("/api/job-status/{job_id}")
|
| 845 |
+
async def get_job_status_endpoint(job_id: str):
|
| 846 |
+
"""Check job status"""
|
| 847 |
+
job_data = job_storage.get(job_id)
|
| 848 |
+
if not job_data:
|
| 849 |
+
raise HTTPException(status_code=404, detail="Job not found")
|
| 850 |
+
|
| 851 |
+
return JobStatusResponse(
|
| 852 |
+
job_id=job_id,
|
| 853 |
+
status=job_data["status"],
|
| 854 |
+
progress=job_data["progress"],
|
| 855 |
+
message=job_data["message"],
|
| 856 |
+
result=job_data["result"],
|
| 857 |
+
created_at=job_data["created_at"],
|
| 858 |
+
updated_at=job_data["updated_at"]
|
| 859 |
+
)
|
| 860 |
+
|
| 861 |
+
@app.get("/api/health")
|
| 862 |
+
async def api_health():
|
| 863 |
+
"""Health check endpoint for n8n"""
|
| 864 |
+
return {
|
| 865 |
+
"status": "healthy",
|
| 866 |
+
"service": "storybook-generator",
|
| 867 |
+
"timestamp": datetime.now().isoformat(),
|
| 868 |
+
"active_jobs": len(job_storage),
|
| 869 |
+
"models_loaded": list(model_cache.keys()),
|
| 870 |
+
"fallback_templates": list(FALLBACK_CHARACTER_TEMPLATES.keys()),
|
| 871 |
+
"oci_api_connected": OCI_API_BASE_URL
|
| 872 |
+
}
|
| 873 |
+
|
| 874 |
+
@app.get("/api/local-images")
|
| 875 |
+
async def get_local_images():
|
| 876 |
+
"""API endpoint to get locally saved test images"""
|
| 877 |
+
storage_info = get_local_storage_info()
|
| 878 |
+
return storage_info
|
| 879 |
+
|
| 880 |
+
@app.delete("/api/local-images/{filename:path}")
|
| 881 |
+
async def delete_local_image_api(filename: str):
|
| 882 |
+
"""API endpoint to delete a local image"""
|
| 883 |
+
try:
|
| 884 |
+
filepath = os.path.join(PERSISTENT_IMAGE_DIR, filename)
|
| 885 |
+
success, message = delete_local_image(filepath)
|
| 886 |
+
return {"status": "success" if success else "error", "message": message}
|
| 887 |
+
except Exception as e:
|
| 888 |
+
return {"status": "error", "message": str(e)}
|
| 889 |
+
|
| 890 |
+
# MISSING HELPER FUNCTIONS FOR GRADIO INTERFACE
|
| 891 |
+
def delete_current_image(filepath):
|
| 892 |
+
"""Delete the currently displayed image"""
|
| 893 |
+
if not filepath:
|
| 894 |
+
return "β No image to delete", None, None, refresh_local_images()
|
| 895 |
+
|
| 896 |
+
success, message = delete_local_image(filepath)
|
| 897 |
+
updated_files = refresh_local_images()
|
| 898 |
+
|
| 899 |
+
if success:
|
| 900 |
+
status_msg = f"β
{message}"
|
| 901 |
+
return status_msg, None, "Image deleted successfully!", updated_files
|
| 902 |
+
else:
|
| 903 |
+
return f"β {message}", None, "Delete failed", updated_files
|
| 904 |
+
|
| 905 |
+
def clear_all_images():
|
| 906 |
+
"""Delete all local images"""
|
| 907 |
+
try:
|
| 908 |
+
storage_info = get_local_storage_info()
|
| 909 |
+
deleted_count = 0
|
| 910 |
+
|
| 911 |
+
if "images" in storage_info:
|
| 912 |
+
for image_info in storage_info["images"]:
|
| 913 |
+
success, _ = delete_local_image(image_info["path"])
|
| 914 |
+
if success:
|
| 915 |
+
deleted_count += 1
|
| 916 |
+
|
| 917 |
+
updated_files = refresh_local_images()
|
| 918 |
+
return f"β
Deleted {deleted_count} images", updated_files
|
| 919 |
+
except Exception as e:
|
| 920 |
+
return f"β Error: {str(e)}", refresh_local_images()
|
| 921 |
+
|
| 922 |
+
# Enhanced Gradio interface with dynamic character testing
|
| 923 |
+
def create_gradio_interface():
|
| 924 |
+
"""Create Gradio interface with dynamic character consistency features"""
|
| 925 |
+
|
| 926 |
+
def generate_test_image_with_characters(prompt, model_choice, style_choice, character_names_text):
|
| 927 |
+
"""Generate a single image for testing character consistency"""
|
| 928 |
+
try:
|
| 929 |
+
if not prompt.strip():
|
| 930 |
+
return None, "β Please enter a prompt", None
|
| 931 |
+
|
| 932 |
+
# Parse character names from text input
|
| 933 |
+
character_names = [name.strip() for name in character_names_text.split(",") if name.strip()]
|
| 934 |
+
|
| 935 |
+
print(f"π¨ Generating test image with prompt: {prompt}")
|
| 936 |
+
print(f"π₯ Character names: {character_names}")
|
| 937 |
+
|
| 938 |
+
# Create dynamic character templates for testing
|
| 939 |
+
character_templates = {}
|
| 940 |
+
for char_name in character_names:
|
| 941 |
+
character_templates[char_name] = {
|
| 942 |
+
"visual_prompt": f"{char_name}, distinctive appearance, consistent features",
|
| 943 |
+
"key_features": ["consistent appearance", "maintain features"],
|
| 944 |
+
"consistency_keywords": f"consistent {char_name}"
|
| 945 |
+
}
|
| 946 |
+
|
| 947 |
+
# Enhance the prompt with character consistency
|
| 948 |
+
enhanced_prompt, negative_prompt = enhance_prompt_with_characters(
|
| 949 |
+
prompt, character_names, character_templates, style_choice, 1
|
| 950 |
+
)
|
| 951 |
+
|
| 952 |
+
# Generate the image
|
| 953 |
+
image = generate_consistent_image(
|
| 954 |
+
prompt,
|
| 955 |
+
model_choice,
|
| 956 |
+
style_choice,
|
| 957 |
+
character_names,
|
| 958 |
+
character_templates,
|
| 959 |
+
1
|
| 960 |
+
)
|
| 961 |
+
|
| 962 |
+
# Save to local storage
|
| 963 |
+
filepath, filename = save_image_to_local(image, prompt, style_choice)
|
| 964 |
+
|
| 965 |
+
character_info = f"π₯ Characters: {', '.join(character_names)}" if character_names else "π₯ No specific characters"
|
| 966 |
+
|
| 967 |
+
status_msg = f"""β
Success! Generated: {prompt}
|
| 968 |
+
{character_info}
|
| 969 |
+
π¨ Enhanced prompt: {enhanced_prompt[:200]}...
|
| 970 |
+
π **Local file:** {filename if filename else 'Not saved'}"""
|
| 971 |
+
|
| 972 |
+
return image, status_msg, filepath
|
| 973 |
+
|
| 974 |
+
except Exception as e:
|
| 975 |
+
error_msg = f"β Generation failed: {str(e)}"
|
| 976 |
+
|