yukee1992 commited on
Commit
1d24354
Β·
verified Β·
1 Parent(s): c0034ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +90 -89
app.py CHANGED
@@ -9,12 +9,11 @@ from datetime import datetime
9
  import re
10
  import time
11
  from typing import List, Optional
12
- from fastapi import FastAPI, HTTPException, BackgroundTasks
13
  from pydantic import BaseModel
14
  import gc
15
  import psutil
16
  import threading
17
- from concurrent.futures import ThreadPoolExecutor, as_completed
18
 
19
  # External OCI API URL
20
  OCI_API_BASE_URL = "https://yukee1992-oci-story-book.hf.space"
@@ -74,9 +73,6 @@ model_lock = threading.Lock()
74
  # Character consistency tracking
75
  character_seeds = {}
76
 
77
- # Thread pool for parallel processing
78
- executor = ThreadPoolExecutor(max_workers=2)
79
-
80
  def monitor_memory():
81
  try:
82
  process = psutil.Process()
@@ -129,19 +125,18 @@ print("πŸš€ Initializing Storybook Generator...")
129
  load_model("dreamshaper-8")
130
  print("βœ… Model loaded and ready!")
131
 
132
- # CRITICAL: PROMPT OPTIMIZATION THAT ACTUALLY WORKS
133
  def optimize_prompt(scene_visual, characters, style="childrens_book", page_number=1):
134
  """
135
- Create a prompt that FITS within 77 tokens while preserving the ESSENCE
136
  """
137
- # Extract ONLY the most critical information
138
  character_essence = ""
139
  if characters:
140
  char_descriptors = []
141
  for char in characters:
142
  desc = char.get('description', '') if isinstance(char, dict) else getattr(char, 'description', '')
143
 
144
- # Extract ONLY: species + color + 1 key feature
145
  import re
146
  species_match = re.search(r'(rabbit|hedgehog|bird|dog|cat|fox|bear|dragon|human|girl|boy)', desc, re.IGNORECASE)
147
  species = species_match.group(1) if species_match else "character"
@@ -149,7 +144,6 @@ def optimize_prompt(scene_visual, characters, style="childrens_book", page_numbe
149
  color_match = re.search(r'(white|black|brown|blue|red|green|yellow|golden|pink)', desc, re.IGNORECASE)
150
  color = color_match.group(1) if color_match else ""
151
 
152
- # Find one key feature
153
  key_feature = ""
154
  if 'glasses' in desc.lower(): key_feature = "with glasses"
155
  elif 'dress' in desc.lower(): key_feature = "in dress"
@@ -157,16 +151,16 @@ def optimize_prompt(scene_visual, characters, style="childrens_book", page_numbe
157
 
158
  char_descriptors.append(f"{color} {species} {key_feature}".strip())
159
 
160
- character_essence = f"Features: {', '.join(char_descriptors)}. "
161
 
162
- # Compress scene description to MAX 40 words
163
  scene_words = scene_visual.split()
164
- if len(scene_words) > 40:
165
- scene_compressed = ' '.join(scene_words[:40])
166
  else:
167
  scene_compressed = scene_visual
168
 
169
- # Style context (very brief)
170
  style_context = {
171
  "childrens_book": "children's book illustration",
172
  "realistic": "photorealistic",
@@ -174,14 +168,14 @@ def optimize_prompt(scene_visual, characters, style="childrens_book", page_numbe
174
  "anime": "anime style"
175
  }.get(style, "children's book illustration")
176
 
177
- # Build the final prompt (GUARANTEED to fit 77 tokens)
178
  continuity = f"Scene {page_number}: " if page_number > 1 else ""
179
  final_prompt = f"{continuity}{scene_compressed}. {character_essence}{style_context}. masterpiece, best quality"
180
 
181
- # Ensure it's under 60 words
182
  words = final_prompt.split()
183
- if len(words) > 60:
184
- final_prompt = ' '.join(words[:60])
185
 
186
  print(f"πŸ“ Optimized prompt: {final_prompt}")
187
  print(f"πŸ“ Length: {len(final_prompt.split())} words")
@@ -189,7 +183,7 @@ def optimize_prompt(scene_visual, characters, style="childrens_book", page_numbe
189
  return final_prompt
190
 
191
  def enhance_prompt(scene_visual, characters, style="childrens_book", page_number=1):
192
- """Create optimized prompt that WILL work"""
193
  main_prompt = optimize_prompt(scene_visual, characters, style, page_number)
194
 
195
  negative_prompt = (
@@ -225,10 +219,10 @@ def save_complete_storybook_page(image, story_title, sequence_number, scene_text
225
  }
226
  text_response = requests.post(text_api_url, data=text_data, timeout=30)
227
 
228
- return f"βœ… Page {sequence_number}: Image & Text saved"
229
 
230
  except Exception as e:
231
- return f"❌ Save failed: {str(e)}"
232
 
233
  def get_character_seed(story_title, character_name, page_number):
234
  if story_title not in character_seeds:
@@ -243,12 +237,9 @@ def get_character_seed(story_title, character_name, page_number):
243
 
244
  return character_seeds[story_title][seed_key]
245
 
246
- def generate_single_page(scene_data, story_title, sequence_number, characters, model_choice, style):
247
- """Generate a single page - isolated for error handling"""
248
  try:
249
- scene_visual = scene_data.get('visual', '')
250
- scene_text = scene_data.get('text', '')
251
-
252
  print(f"πŸ”„ Generating page {sequence_number}...")
253
 
254
  enhanced_prompt, negative_prompt = enhance_prompt(
@@ -266,67 +257,42 @@ def generate_single_page(scene_data, story_title, sequence_number, characters, m
266
  main_char_seed = get_character_seed(story_title, main_char_name, sequence_number)
267
  generator.manual_seed(main_char_seed)
268
 
269
- # Generate with current pipe (already loaded)
270
  global current_pipe
271
  image = current_pipe(
272
  prompt=enhanced_prompt,
273
  negative_prompt=negative_prompt,
274
- num_inference_steps=20, # Faster generation
275
  guidance_scale=7.0,
276
- width=512, # Smaller for speed
277
  height=512,
278
  generator=generator
279
  ).images[0]
280
 
281
- save_status = save_complete_storybook_page(image, story_title, sequence_number, scene_text)
 
282
 
283
- return {
284
- "success": True,
285
- "page_number": sequence_number,
286
- "image": image,
287
- "status": save_status
288
- }
289
 
290
  except Exception as e:
291
- return {
292
- "success": False,
293
- "page_number": sequence_number,
294
- "error": f"Generation failed: {str(e)}"
295
- }
296
 
297
- # FastAPI endpoint - OPTIMIZED
298
  @app.post("/api/generate-storybook", response_model=StorybookResponse)
299
  async def api_generate_storybook(request: StorybookRequest):
300
- """API endpoint that WON'T timeout"""
301
  try:
302
  print(f"πŸ“š Received request: {request.story_title}")
303
  print(f"πŸ“– Pages: {len(request.scenes)}")
 
304
 
305
- # IMMEDIATE response to n8n to prevent timeout
306
- response_data = {
307
- "status": "processing",
308
- "story_title": request.story_title,
309
- "total_pages": len(request.scenes),
310
- "characters_used": len(request.characters),
311
- "generated_pages": 0,
312
- "generation_time": 0,
313
- "message": "Processing started in background",
314
- "folder_path": f"storybook-library/stories/{request.story_title.replace(' ', '_')}/",
315
- "pages": []
316
- }
317
-
318
- # Start background processing
319
- background_tasks = BackgroundTasks()
320
- background_tasks.add_task(process_storybook_background, request)
321
-
322
- return response_data
323
-
324
- except Exception as e:
325
- raise HTTPException(status_code=500, detail=f"Request failed: {str(e)}")
326
-
327
- def process_storybook_background(request):
328
- """Background processing to avoid timeouts"""
329
- try:
330
  start_time = time.time()
331
 
332
  # Load model ONCE
@@ -340,14 +306,15 @@ def process_storybook_background(request):
340
  "description": char.description
341
  })
342
 
343
- results = []
344
  status_messages = []
345
 
346
- # Process each page SEQUENTIALLY (better for memory)
347
  for i, scene in enumerate(request.scenes, 1):
348
  try:
349
- result = generate_single_page(
350
- {"visual": scene.visual, "text": scene.text},
 
351
  request.story_title,
352
  i,
353
  characters_dict,
@@ -355,20 +322,18 @@ def process_storybook_background(request):
355
  request.style
356
  )
357
 
358
- if result["success"]:
359
- results.append(result)
360
- status_messages.append(f"Page {i}: {result['status']}")
361
- print(f"βœ… Page {i} completed successfully")
362
  else:
363
- status_messages.append(f"Page {i}: {result['error']}")
364
- print(f"❌ Page {i} failed: {result['error']}")
365
 
366
  # Clean memory after each page
367
  cleanup_memory()
368
 
369
- # Add small delay to prevent resource exhaustion
370
  if i < len(request.scenes):
371
- time.sleep(2)
372
 
373
  except Exception as e:
374
  error_msg = f"Page {i} failed: {str(e)}"
@@ -376,10 +341,35 @@ def process_storybook_background(request):
376
  print(f"❌ {error_msg}")
377
 
378
  total_time = time.time() - start_time
379
- print(f"βœ… Background processing completed in {total_time:.2f} seconds")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
380
 
381
  except Exception as e:
382
- print(f"❌ Background processing failed: {str(e)}")
 
 
383
 
384
  @app.get("/api/health")
385
  async def health_check():
@@ -397,16 +387,27 @@ with gr.Blocks(title="Storybook Generator", theme="soft") as demo:
397
  gr.Markdown("# πŸ“š Storybook Generator")
398
 
399
  with gr.Row():
400
- story_title = gr.Textbox(label="Story Title")
401
- prompt_input = gr.Textbox(label="Scene Description", lines=3)
402
- generate_btn = gr.Button("Generate")
403
  output_image = gr.Image()
404
  status = gr.Textbox()
405
 
 
 
 
 
 
 
 
 
 
 
 
 
 
406
  generate_btn.click(
407
- fn=lambda p, t: generate_single_page(
408
- {"visual": p, "text": ""}, t, 1, [], "dreamshaper-8", "childrens_book"
409
- ),
410
  inputs=[prompt_input, story_title],
411
  outputs=[output_image, status]
412
  )
 
9
  import re
10
  import time
11
  from typing import List, Optional
12
+ from fastapi import FastAPI, HTTPException
13
  from pydantic import BaseModel
14
  import gc
15
  import psutil
16
  import threading
 
17
 
18
  # External OCI API URL
19
  OCI_API_BASE_URL = "https://yukee1992-oci-story-book.hf.space"
 
73
  # Character consistency tracking
74
  character_seeds = {}
75
 
 
 
 
76
  def monitor_memory():
77
  try:
78
  process = psutil.Process()
 
125
  load_model("dreamshaper-8")
126
  print("βœ… Model loaded and ready!")
127
 
128
+ # PROMPT OPTIMIZATION
129
  def optimize_prompt(scene_visual, characters, style="childrens_book", page_number=1):
130
  """
131
+ Create a prompt that FITS within 77 tokens
132
  """
133
+ # Extract character essence
134
  character_essence = ""
135
  if characters:
136
  char_descriptors = []
137
  for char in characters:
138
  desc = char.get('description', '') if isinstance(char, dict) else getattr(char, 'description', '')
139
 
 
140
  import re
141
  species_match = re.search(r'(rabbit|hedgehog|bird|dog|cat|fox|bear|dragon|human|girl|boy)', desc, re.IGNORECASE)
142
  species = species_match.group(1) if species_match else "character"
 
144
  color_match = re.search(r'(white|black|brown|blue|red|green|yellow|golden|pink)', desc, re.IGNORECASE)
145
  color = color_match.group(1) if color_match else ""
146
 
 
147
  key_feature = ""
148
  if 'glasses' in desc.lower(): key_feature = "with glasses"
149
  elif 'dress' in desc.lower(): key_feature = "in dress"
 
151
 
152
  char_descriptors.append(f"{color} {species} {key_feature}".strip())
153
 
154
+ character_essence = f"Characters: {', '.join(char_descriptors)}. "
155
 
156
+ # Compress scene description
157
  scene_words = scene_visual.split()
158
+ if len(scene_words) > 30:
159
+ scene_compressed = ' '.join(scene_words[:30])
160
  else:
161
  scene_compressed = scene_visual
162
 
163
+ # Style context
164
  style_context = {
165
  "childrens_book": "children's book illustration",
166
  "realistic": "photorealistic",
 
168
  "anime": "anime style"
169
  }.get(style, "children's book illustration")
170
 
171
+ # Build final prompt
172
  continuity = f"Scene {page_number}: " if page_number > 1 else ""
173
  final_prompt = f"{continuity}{scene_compressed}. {character_essence}{style_context}. masterpiece, best quality"
174
 
175
+ # Ensure it's under 55 words for safety
176
  words = final_prompt.split()
177
+ if len(words) > 55:
178
+ final_prompt = ' '.join(words[:55])
179
 
180
  print(f"πŸ“ Optimized prompt: {final_prompt}")
181
  print(f"πŸ“ Length: {len(final_prompt.split())} words")
 
183
  return final_prompt
184
 
185
  def enhance_prompt(scene_visual, characters, style="childrens_book", page_number=1):
186
+ """Create optimized prompt"""
187
  main_prompt = optimize_prompt(scene_visual, characters, style, page_number)
188
 
189
  negative_prompt = (
 
219
  }
220
  text_response = requests.post(text_api_url, data=text_data, timeout=30)
221
 
222
+ return True, f"βœ… Page {sequence_number}: Image & Text saved"
223
 
224
  except Exception as e:
225
+ return False, f"❌ Save failed: {str(e)}"
226
 
227
  def get_character_seed(story_title, character_name, page_number):
228
  if story_title not in character_seeds:
 
237
 
238
  return character_seeds[story_title][seed_key]
239
 
240
+ def generate_single_page(scene_visual, scene_text, story_title, sequence_number, characters, model_choice, style):
241
+ """Generate a single page"""
242
  try:
 
 
 
243
  print(f"πŸ”„ Generating page {sequence_number}...")
244
 
245
  enhanced_prompt, negative_prompt = enhance_prompt(
 
257
  main_char_seed = get_character_seed(story_title, main_char_name, sequence_number)
258
  generator.manual_seed(main_char_seed)
259
 
260
+ # Generate image
261
  global current_pipe
262
  image = current_pipe(
263
  prompt=enhanced_prompt,
264
  negative_prompt=negative_prompt,
265
+ num_inference_steps=20,
266
  guidance_scale=7.0,
267
+ width=512,
268
  height=512,
269
  generator=generator
270
  ).images[0]
271
 
272
+ # Save to OCI
273
+ success, save_status = save_complete_storybook_page(image, story_title, sequence_number, scene_text)
274
 
275
+ if success:
276
+ print(f"βœ… Page {sequence_number} completed successfully")
277
+ return True, save_status
278
+ else:
279
+ print(f"❌ Page {sequence_number} save failed: {save_status}")
280
+ return False, save_status
281
 
282
  except Exception as e:
283
+ error_msg = f"❌ Page {sequence_number} generation failed: {str(e)}"
284
+ print(error_msg)
285
+ return False, error_msg
 
 
286
 
287
+ # FastAPI endpoint - SYNCHRONOUS VERSION
288
  @app.post("/api/generate-storybook", response_model=StorybookResponse)
289
  async def api_generate_storybook(request: StorybookRequest):
290
+ """Synchronous API endpoint that actually works on Hugging Face"""
291
  try:
292
  print(f"πŸ“š Received request: {request.story_title}")
293
  print(f"πŸ“– Pages: {len(request.scenes)}")
294
+ print(f"πŸ‘€ Characters: {len(request.characters)}")
295
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
296
  start_time = time.time()
297
 
298
  # Load model ONCE
 
306
  "description": char.description
307
  })
308
 
309
+ generated_count = 0
310
  status_messages = []
311
 
312
+ # Process each page SEQUENTIALLY
313
  for i, scene in enumerate(request.scenes, 1):
314
  try:
315
+ success, message = generate_single_page(
316
+ scene.visual,
317
+ scene.text,
318
  request.story_title,
319
  i,
320
  characters_dict,
 
322
  request.style
323
  )
324
 
325
+ if success:
326
+ generated_count += 1
327
+ status_messages.append(f"Page {i}: {message}")
 
328
  else:
329
+ status_messages.append(f"Page {i}: {message}")
 
330
 
331
  # Clean memory after each page
332
  cleanup_memory()
333
 
334
+ # Add small delay between pages
335
  if i < len(request.scenes):
336
+ time.sleep(1)
337
 
338
  except Exception as e:
339
  error_msg = f"Page {i} failed: {str(e)}"
 
341
  print(f"❌ {error_msg}")
342
 
343
  total_time = time.time() - start_time
344
+
345
+ # Create response
346
+ response_data = {
347
+ "status": "success" if generated_count > 0 else "partial",
348
+ "story_title": request.story_title,
349
+ "total_pages": len(request.scenes),
350
+ "characters_used": len(request.characters),
351
+ "generated_pages": generated_count,
352
+ "generation_time": round(total_time, 2),
353
+ "message": "\n".join(status_messages),
354
+ "folder_path": f"storybook-library/stories/{request.story_title.replace(' ', '_')}/",
355
+ "pages": [
356
+ {
357
+ "page_number": i+1,
358
+ "image_file": f"page_{i+1:03d}_{request.story_title.replace(' ', '_')}.png",
359
+ "text_file": f"page_{i+1:03d}_{request.story_title.replace(' ', '_')}.txt"
360
+ } for i in range(len(request.scenes))
361
+ ]
362
+ }
363
+
364
+ print(f"βœ… Generation completed in {total_time:.2f} seconds")
365
+ print(f"πŸ“Š Generated {generated_count}/{len(request.scenes)} pages")
366
+
367
+ return response_data
368
 
369
  except Exception as e:
370
+ error_msg = f"Storybook generation failed: {str(e)}"
371
+ print(f"❌ {error_msg}")
372
+ raise HTTPException(status_code=500, detail=error_msg)
373
 
374
  @app.get("/api/health")
375
  async def health_check():
 
387
  gr.Markdown("# πŸ“š Storybook Generator")
388
 
389
  with gr.Row():
390
+ story_title = gr.Textbox(label="Story Title", value="Test Story")
391
+ prompt_input = gr.Textbox(label="Scene Description", lines=3, value="A beautiful sunset over mountains")
392
+ generate_btn = gr.Button("Generate Test Page")
393
  output_image = gr.Image()
394
  status = gr.Textbox()
395
 
396
+ def generate_test_page(prompt, title):
397
+ try:
398
+ success, message = generate_single_page(
399
+ prompt, "", title, 1, [], "dreamshaper-8", "childrens_book"
400
+ )
401
+ if success:
402
+ # For demo, return a placeholder since we can't easily get the image back
403
+ return None, message
404
+ else:
405
+ return None, message
406
+ except Exception as e:
407
+ return None, f"Error: {str(e)}"
408
+
409
  generate_btn.click(
410
+ fn=generate_test_page,
 
 
411
  inputs=[prompt_input, story_title],
412
  outputs=[output_image, status]
413
  )