MogensR commited on
Commit
b369c04
·
verified ·
1 Parent(s): 7a982a5

Update processing/ai_background.py

Browse files
Files changed (1) hide show
  1. processing/ai_background.py +74 -23
processing/ai_background.py CHANGED
@@ -1,7 +1,7 @@
1
  #!/usr/bin/env python3
2
  """
3
- AI Background Generator Module
4
- Handles Stable Diffusion background generation with proper dependency management.
5
  """
6
 
7
  import os
@@ -11,18 +11,21 @@
11
  import logging
12
  from pathlib import Path
13
  from typing import Optional
 
 
14
 
15
  logger = logging.getLogger(__name__)
16
 
17
  class AIBackgroundGenerator:
18
  """
19
- Stable Diffusion background generator with dependency isolation.
20
  """
21
 
22
  def __init__(self, temp_dir: Optional[str] = None):
23
  self.temp_dir = temp_dir or tempfile.gettempdir()
24
  self.available = False
25
  self.error_message = None
 
26
  self._check_dependencies()
27
 
28
  def _check_dependencies(self):
@@ -30,36 +33,78 @@ def _check_dependencies(self):
30
  try:
31
  import torch
32
  if not torch.cuda.is_available():
33
- self.error_message = "CUDA not available - AI background generation requires GPU"
 
34
  return
35
 
36
  # Test diffusers import
37
  from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
38
 
39
  self.available = True
40
- logger.info("AI Background Generator: Dependencies OK")
41
 
42
  except ImportError as e:
43
  if "torch.library" in str(e) and "custom_op" in str(e):
44
- self.error_message = (
45
- "PyTorch/Diffusers version mismatch. Please update:\n"
46
- "pip install --upgrade torch diffusers transformers accelerate"
47
- )
48
  else:
49
- self.error_message = f"Missing dependencies: {e}"
50
- logger.warning(f"AI Background Generator unavailable: {self.error_message}")
 
51
  except Exception as e:
52
- self.error_message = f"Unexpected error checking dependencies: {e}"
53
  logger.error(f"AI Background Generator error: {self.error_message}")
 
54
 
55
  def is_available(self) -> bool:
56
- """Check if AI background generation is available."""
57
- return self.available
58
 
59
  def get_error_message(self) -> Optional[str]:
60
  """Get error message if dependencies are not available."""
61
  return self.error_message
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  def generate_background(
64
  self,
65
  width: int,
@@ -73,22 +118,27 @@ def generate_background(
73
  seed: Optional[int] = None,
74
  ) -> str:
75
  """
76
- Generate AI background image.
77
 
78
  Returns:
79
  Path to generated background image
80
 
81
  Raises:
82
- RuntimeError: If dependencies are not available or generation fails
83
  """
84
- if not self.available:
85
- raise RuntimeError(f"AI Background not available: {self.error_message}")
86
 
 
 
 
 
 
 
87
  try:
88
  # Import here to avoid issues if dependencies not available
89
  import torch
90
  from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
91
  from PIL import Image
 
92
 
93
  device = "cuda" if torch.cuda.is_available() else "cpu"
94
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
@@ -99,7 +149,7 @@ def generate_background(
99
  seed = random.randint(0, 2**31 - 1)
100
  generator.manual_seed(seed)
101
 
102
- logger.info(f"Generating {width}x{height} background: '{prompt}' (seed: {seed})")
103
 
104
  # Choose pipeline based on whether we have an init image
105
  if init_image_path and os.path.exists(init_image_path):
@@ -165,7 +215,7 @@ def generate_background(
165
  # Save result
166
  output_path = os.path.join(
167
  self.temp_dir,
168
- f"ai_bg_{int(os.times().elapsed)}_{seed:08x}.jpg"
169
  )
170
  result.save(output_path, quality=95, optimize=True)
171
 
@@ -176,12 +226,13 @@ def generate_background(
176
  except Exception:
177
  pass
178
 
179
- logger.info(f"AI background generated: {output_path}")
180
  return output_path
181
 
182
  except Exception as e:
183
- logger.error(f"AI background generation failed: {e}")
184
- raise RuntimeError(f"Background generation failed: {e}")
 
185
 
186
 
187
  # Convenience function for easy import
 
1
  #!/usr/bin/env python3
2
  """
3
+ AI Background Generator Module - Updated to handle dependency conflicts
4
+ Handles Stable Diffusion background generation with graceful fallbacks.
5
  """
6
 
7
  import os
 
11
  import logging
12
  from pathlib import Path
13
  from typing import Optional
14
+ import numpy as np
15
+ import cv2
16
 
17
  logger = logging.getLogger(__name__)
18
 
19
  class AIBackgroundGenerator:
20
  """
21
+ AI background generator with dependency isolation and themed fallbacks.
22
  """
23
 
24
  def __init__(self, temp_dir: Optional[str] = None):
25
  self.temp_dir = temp_dir or tempfile.gettempdir()
26
  self.available = False
27
  self.error_message = None
28
+ self.use_fallback = False
29
  self._check_dependencies()
30
 
31
  def _check_dependencies(self):
 
33
  try:
34
  import torch
35
  if not torch.cuda.is_available():
36
+ self.error_message = "CUDA not available - using themed gradient fallback"
37
+ self.use_fallback = True
38
  return
39
 
40
  # Test diffusers import
41
  from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
42
 
43
  self.available = True
44
+ logger.info("AI Background Generator: Full Stable Diffusion available")
45
 
46
  except ImportError as e:
47
  if "torch.library" in str(e) and "custom_op" in str(e):
48
+ self.error_message = "PyTorch/Diffusers version mismatch - using themed gradient fallback"
49
+ logger.warning(f"Diffusers unavailable due to dependency conflict: {e}")
 
 
50
  else:
51
+ self.error_message = f"Missing dependencies - using themed gradient fallback: {e}"
52
+ logger.warning(f"AI Background Generator using fallback: {self.error_message}")
53
+ self.use_fallback = True
54
  except Exception as e:
55
+ self.error_message = f"Unexpected error - using themed gradient fallback: {e}"
56
  logger.error(f"AI Background Generator error: {self.error_message}")
57
+ self.use_fallback = True
58
 
59
  def is_available(self) -> bool:
60
+ """Check if AI background generation is available (includes fallback)."""
61
+ return self.available or self.use_fallback
62
 
63
  def get_error_message(self) -> Optional[str]:
64
  """Get error message if dependencies are not available."""
65
  return self.error_message
66
 
67
+ def _make_vertical_gradient(self, width: int, height: int, c1: tuple, c2: tuple) -> np.ndarray:
68
+ """Create a vertical gradient image."""
69
+ top = np.array(c1, dtype=np.float32)
70
+ bot = np.array(c2, dtype=np.float32)
71
+ rows = np.linspace(top, bot, num=max(1, height), dtype=np.float32)
72
+ grad = np.repeat(rows[:, None, :], repeats=max(1, width), axis=1)
73
+ return np.clip(grad, 0, 255).astype(np.uint8)
74
+
75
+ def _generate_themed_gradient(self, width: int, height: int, prompt: str, seed: Optional[int] = None) -> str:
76
+ """Generate themed gradient based on prompt analysis."""
77
+ prompt_lower = prompt.lower()
78
+
79
+ # Analyze prompt for appropriate colors
80
+ if any(word in prompt_lower for word in ["blue", "ocean", "sky", "water", "corporate"]):
81
+ colors = ((173, 216, 230), (0, 191, 255)) # Blue gradient
82
+ elif any(word in prompt_lower for word in ["green", "nature", "forest", "plant", "garden"]):
83
+ colors = ((34, 139, 34), (144, 238, 144)) # Green gradient
84
+ elif any(word in prompt_lower for word in ["warm", "office", "professional", "beige", "neutral"]):
85
+ colors = ((240, 240, 245), (200, 200, 210)) # Warm office
86
+ elif any(word in prompt_lower for word in ["dark", "night", "black", "studio", "dramatic"]):
87
+ colors = ((20, 20, 25), (40, 40, 45)) # Dark professional
88
+ elif any(word in prompt_lower for word in ["sunset", "orange", "pink", "warm"]):
89
+ colors = ((255, 182, 193), (255, 105, 180)) # Sunset pink
90
+ elif any(word in prompt_lower for word in ["purple", "violet", "magenta"]):
91
+ colors = ((147, 112, 219), (216, 191, 216)) # Purple gradient
92
+ elif any(word in prompt_lower for word in ["red", "burgundy", "wine"]):
93
+ colors = ((220, 20, 60), (255, 182, 193)) # Red gradient
94
+ else:
95
+ colors = ((40, 40, 48), (96, 96, 112)) # Default slate
96
+
97
+ # Create gradient
98
+ grad = self._make_vertical_gradient(width, height, colors[0], colors[1])
99
+
100
+ # Save result
101
+ seed_str = f"_{seed:08x}" if seed else ""
102
+ output_path = os.path.join(self.temp_dir, f"themed_bg_{int(time.time())}{seed_str}.jpg")
103
+ cv2.imwrite(output_path, grad)
104
+
105
+ logger.info(f"Generated themed gradient for prompt: '{prompt}' -> {colors}")
106
+ return output_path
107
+
108
  def generate_background(
109
  self,
110
  width: int,
 
118
  seed: Optional[int] = None,
119
  ) -> str:
120
  """
121
+ Generate AI background image with fallback to themed gradients.
122
 
123
  Returns:
124
  Path to generated background image
125
 
126
  Raises:
127
+ RuntimeError: If both AI generation and fallback fail
128
  """
 
 
129
 
130
+ # Use fallback if AI generation unavailable
131
+ if self.use_fallback or not self.available:
132
+ logger.info(f"Using themed gradient fallback for: '{prompt}'")
133
+ return self._generate_themed_gradient(width, height, prompt, seed)
134
+
135
+ # Try full AI generation
136
  try:
137
  # Import here to avoid issues if dependencies not available
138
  import torch
139
  from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
140
  from PIL import Image
141
+ import time
142
 
143
  device = "cuda" if torch.cuda.is_available() else "cpu"
144
  torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
 
149
  seed = random.randint(0, 2**31 - 1)
150
  generator.manual_seed(seed)
151
 
152
+ logger.info(f"Generating {width}x{height} AI background: '{prompt}' (seed: {seed})")
153
 
154
  # Choose pipeline based on whether we have an init image
155
  if init_image_path and os.path.exists(init_image_path):
 
215
  # Save result
216
  output_path = os.path.join(
217
  self.temp_dir,
218
+ f"ai_bg_{int(time.time())}_{seed:08x}.jpg"
219
  )
220
  result.save(output_path, quality=95, optimize=True)
221
 
 
226
  except Exception:
227
  pass
228
 
229
+ logger.info(f"AI background generated successfully: {output_path}")
230
  return output_path
231
 
232
  except Exception as e:
233
+ logger.warning(f"AI generation failed: {e}, falling back to themed gradient")
234
+ # Fall back to themed gradient on any error
235
+ return self._generate_themed_gradient(width, height, prompt, seed)
236
 
237
 
238
  # Convenience function for easy import