Flulike99 commited on
Commit
551e4bd
Β·
1 Parent(s): 96d1435
Files changed (1) hide show
  1. app.py +14 -25
app.py CHANGED
@@ -17,18 +17,16 @@ secret_model = os.environ.get("MODEL_PATH")
17
  # δ»ŽηŽ―ε’ƒε˜ι‡θŽ·ε–εŸΊη‘€ζ¨‘εž‹θ·―εΎ„
18
  BASE_MODEL = os.environ.get("BASE_MODEL_ID")
19
 
20
- try:
21
- from cascade.condition import Condition
22
- from cascade.generate import generate
23
- from cascade.lora_controller import set_lora_scale
24
- FLUX_AVAILABLE = True
25
- except ImportError as e:
26
- print(f"Warning: FLUX components not available: {e}")
27
- FLUX_AVAILABLE = False
28
 
29
  from huggingface_hub import hf_hub_download
30
  from safetensors.torch import load_file
31
 
 
 
 
32
  # θͺθ¨ΌγƒˆγƒΌγ‚―ンを使ってフゑむルをダウンロード
33
  model_path = hf_hub_download(
34
  repo_id="Cascade-Inc/private_model",
@@ -49,6 +47,9 @@ def get_gpu_memory_gb() -> float:
49
  return torch.cuda.get_device_properties(0).total_memory / 1024**3
50
 
51
  def init_pipeline_if_needed():
 
 
 
52
 
53
  print("πŸš€ Initializing pipeline...")
54
 
@@ -92,8 +93,9 @@ def init_pipeline_if_needed():
92
  print("🎨 Loading Cascade weights...")
93
  _pipe.load_lora_weights(MODEL_PATH, adapter_name=ADAPTER_NAME)
94
  _pipe.set_adapters([ADAPTER_NAME])
95
- pipe = _pipe
96
  print("βœ… Pipeline initialized successfully!")
 
97
 
98
  def _to_pil_rgba(img: Any) -> Image.Image:
99
  """Convert various inputs to PIL RGBA image"""
@@ -252,24 +254,11 @@ def apply_style(image: Image.Image, style: str, width: int = 1024, height: int =
252
  return styled_image
253
 
254
  def generate_background_local(styled_image: Image.Image, prompt: str, steps: int = 10, width: int = 1024, height: int = 1024) -> Image.Image:
255
- """Generate background using local FLUX model"""
256
  width = int(width)
257
  height = int(height)
258
-
259
- if not FLUX_AVAILABLE:
260
- # Return a simple gradient background if FLUX is not available
261
- if styled_image is None:
262
- return Image.new("RGB", (width, height), (200, 200, 255))
263
- styled_image = _center_subject_on_canvas(styled_image, width, height)
264
- # Create a simple colored background
265
- bg = Image.new("RGB", (width, height), (200, 220, 255))
266
- if styled_image.mode == "RGBA":
267
- bg.paste(styled_image, (0, 0), styled_image)
268
- else:
269
- bg.paste(styled_image, (0, 0))
270
- return bg
271
 
272
- init_pipeline_if_needed()
273
 
274
  if styled_image is None:
275
  return Image.new("RGB", (width, height), (255, 255, 255))
@@ -291,7 +280,7 @@ def generate_background_local(styled_image: Image.Image, prompt: str, steps: int
291
 
292
  with set_lora_scale([ADAPTER_NAME], scale=3.0):
293
  result_img = generate(
294
- #pipe,
295
  model_config=model_config,
296
  prompt=prompt.strip() if prompt else "",
297
  conditions=[condition],
 
17
  # δ»ŽηŽ―ε’ƒε˜ι‡θŽ·ε–εŸΊη‘€ζ¨‘εž‹θ·―εΎ„
18
  BASE_MODEL = os.environ.get("BASE_MODEL_ID")
19
 
20
+ from cascade.condition import Condition
21
+ from cascade.generate import generate
22
+ from cascade.lora_controller import set_lora_scale
 
 
 
 
 
23
 
24
  from huggingface_hub import hf_hub_download
25
  from safetensors.torch import load_file
26
 
27
+ # Global pipeline variable
28
+ _global_pipe = None
29
+
30
  # θͺθ¨ΌγƒˆγƒΌγ‚―ンを使ってフゑむルをダウンロード
31
  model_path = hf_hub_download(
32
  repo_id="Cascade-Inc/private_model",
 
47
  return torch.cuda.get_device_properties(0).total_memory / 1024**3
48
 
49
  def init_pipeline_if_needed():
50
+ global _global_pipe
51
+ if _global_pipe is not None:
52
+ return _global_pipe
53
 
54
  print("πŸš€ Initializing pipeline...")
55
 
 
93
  print("🎨 Loading Cascade weights...")
94
  _pipe.load_lora_weights(MODEL_PATH, adapter_name=ADAPTER_NAME)
95
  _pipe.set_adapters([ADAPTER_NAME])
96
+ _global_pipe = _pipe
97
  print("βœ… Pipeline initialized successfully!")
98
+ return _global_pipe
99
 
100
  def _to_pil_rgba(img: Any) -> Image.Image:
101
  """Convert various inputs to PIL RGBA image"""
 
254
  return styled_image
255
 
256
  def generate_background_local(styled_image: Image.Image, prompt: str, steps: int = 10, width: int = 1024, height: int = 1024) -> Image.Image:
257
+ """Generate background using local model"""
258
  width = int(width)
259
  height = int(height)
 
 
 
 
 
 
 
 
 
 
 
 
 
260
 
261
+ pipe = init_pipeline_if_needed()
262
 
263
  if styled_image is None:
264
  return Image.new("RGB", (width, height), (255, 255, 255))
 
280
 
281
  with set_lora_scale([ADAPTER_NAME], scale=3.0):
282
  result_img = generate(
283
+ pipe=pipe,
284
  model_config=model_config,
285
  prompt=prompt.strip() if prompt else "",
286
  conditions=[condition],