Toy commited on
Commit
7e5d515
Β·
1 Parent(s): b9ba091

Make it run locally with new sdxl-base model

Browse files
.env ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Hugging Face Configuration
2
+ # Point Hugging Face cache to external SSD using official environment variables
3
+ # This will create /Volumes/extssd/huggingface/hub and /Volumes/extssd/huggingface/datasets
4
+ HF_HOME=/Volumes/extssd/huggingface
5
+
6
+ # Model configuration (keep existing values)
7
+ MODEL_ID=stabilityai/stable-diffusion-xl-base-1.0
Makefile CHANGED
@@ -38,7 +38,17 @@ clean: ## Clean up cache files
38
  find . -type f -name "*.pyc" -delete
39
  find . -type f -name "*.pyo" -delete
40
 
41
- dev: ## Start development server
 
 
 
42
  uv run python app.py
43
 
 
 
 
 
 
 
 
44
  all: install setup quality test ## Run complete setup and checks
 
38
  find . -type f -name "*.pyc" -delete
39
  find . -type f -name "*.pyo" -delete
40
 
41
+ dev: ## Start development server (with external SSD cache)
42
+ ./run.sh
43
+
44
+ dev-internal: ## Start development server (with internal cache)
45
  uv run python app.py
46
 
47
+ test-cache: ## Test external SSD cache configuration
48
+ @if [ -d "/Volumes/extssd" ]; then \
49
+ export HF_HOME="/Volumes/extssd/huggingface" && uv run python test_external_cache.py; \
50
+ else \
51
+ echo "❌ External SSD not found at /Volumes/extssd"; \
52
+ fi
53
+
54
  all: install setup quality test ## Run complete setup and checks
app.py CHANGED
@@ -8,6 +8,10 @@ import sys
8
  import traceback
9
 
10
  import gradio as gr
 
 
 
 
11
 
12
  # Add src directory to path for imports
13
  src_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "src")
 
8
  import traceback
9
 
10
  import gradio as gr
11
+ from dotenv import load_dotenv
12
+
13
+ # Load environment variables from .env file
14
+ load_dotenv()
15
 
16
  # Add src directory to path for imports
17
  src_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "src")
download_models.sh CHANGED
@@ -5,6 +5,17 @@
5
 
6
  echo "🌸 Downloading Flowerfy models using Hugging Face CLI..."
7
 
 
 
 
 
 
 
 
 
 
 
 
8
  # Check if huggingface-hub is installed
9
  if ! command -v hf &> /dev/null; then
10
  echo "πŸ“¦ Installing huggingface-hub CLI..."
 
5
 
6
  echo "🌸 Downloading Flowerfy models using Hugging Face CLI..."
7
 
8
+ # Configure Hugging Face to use external SSD cache
9
+ if [ -d "/Volumes/extssd" ]; then
10
+ export HF_HOME="/Volumes/extssd/huggingface"
11
+ echo "βœ… Using external SSD cache at: $HF_HOME"
12
+ # Ensure directory exists
13
+ mkdir -p "$HF_HOME/hub"
14
+ else
15
+ echo "⚠️ External SSD not found at /Volumes/extssd"
16
+ echo " Models will be downloaded to default cache: ~/.cache/huggingface/hub"
17
+ fi
18
+
19
  # Check if huggingface-hub is installed
20
  if ! command -v hf &> /dev/null; then
21
  echo "πŸ“¦ Installing huggingface-hub CLI..."
pyproject.toml CHANGED
@@ -9,6 +9,7 @@ dependencies = [
9
  "diffusers>=0.35.1",
10
  "gradio>=5.44.0",
11
  "pillow>=11.3.0",
 
12
  "scikit-learn>=1.7.1",
13
  "torch>=2.8.0",
14
  "torchvision>=0.23.0",
 
9
  "diffusers>=0.35.1",
10
  "gradio>=5.44.0",
11
  "pillow>=11.3.0",
12
+ "python-dotenv>=1.0.0",
13
  "scikit-learn>=1.7.1",
14
  "torch>=2.8.0",
15
  "torchvision>=0.23.0",
run.sh ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Flowerfy Startup Script with External SSD Model Support
4
+ # This script sets up Hugging Face cache directories and launches the app
5
+
6
+ echo "🌸 Starting Flowerfy with external SSD model cache..."
7
+
8
+ # Check if external SSD is mounted
9
+ if [ ! -d "/Volumes/extssd" ]; then
10
+ echo "❌ Error: External SSD not found at /Volumes/extssd"
11
+ echo "Please ensure your external SSD is mounted and try again."
12
+ exit 1
13
+ fi
14
+
15
+ # Create cache directories if they don't exist
16
+ mkdir -p /Volumes/extssd/huggingface/hub
17
+
18
+ # Export Hugging Face environment variables (using official HF_HOME method)
19
+ export HF_HOME="/Volumes/extssd/huggingface"
20
+
21
+ echo "βœ… Cache directories configured:"
22
+ echo " HF_HOME: $HF_HOME"
23
+ echo " Models will be cached at: $HF_HOME/hub"
24
+ echo " Datasets will be cached at: $HF_HOME/datasets"
25
+
26
+ # Launch the application with hot reload
27
+ echo "πŸš€ Launching Flowerfy with hot reload..."
28
+ uv run gradio app.py
src/core/config.py CHANGED
@@ -14,9 +14,18 @@ class AppConfig:
14
 
15
  def _setup_device(self):
16
  """Setup device configuration for PyTorch."""
17
- self.device = "cuda" if torch.cuda.is_available() else "cpu"
18
- self.dtype = torch.float16 if self.device == "cuda" else torch.float32
19
- self.clf_device = 0 if torch.cuda.is_available() else -1
 
 
 
 
 
 
 
 
 
20
 
21
  @property
22
  def is_cuda_available(self):
 
14
 
15
  def _setup_device(self):
16
  """Setup device configuration for PyTorch."""
17
+ if torch.cuda.is_available():
18
+ self.device = "cuda"
19
+ self.dtype = torch.float16
20
+ self.clf_device = 0
21
+ elif hasattr(torch.backends, 'mps') and torch.backends.mps.is_available():
22
+ self.device = "mps"
23
+ self.dtype = torch.float16
24
+ self.clf_device = 0
25
+ else:
26
+ self.device = "cpu"
27
+ self.dtype = torch.float32
28
+ self.clf_device = -1
29
 
30
  @property
31
  def is_cuda_available(self):
src/core/constants.py CHANGED
@@ -2,6 +2,10 @@
2
 
3
  import os
4
 
 
 
 
 
5
  # Model configuration
6
  DEFAULT_MODEL_ID = os.getenv("MODEL_ID", "stabilityai/stable-diffusion-xl-base-1.0")
7
  FALLBACK_MODEL_ID = "black-forest-labs/FLUX.1-schnell"
 
2
 
3
  import os
4
 
5
+ # External storage configuration
6
+ # If using external SSD, models will be cached at /Volumes/extssd/huggingface/hub
7
+ # This is configured via environment variables (see .env file and run.sh script)
8
+
9
  # Model configuration
10
  DEFAULT_MODEL_ID = os.getenv("MODEL_ID", "stabilityai/stable-diffusion-xl-base-1.0")
11
  FALLBACK_MODEL_ID = "black-forest-labs/FLUX.1-schnell"
src/services/models/image_generation.py CHANGED
@@ -3,6 +3,7 @@
3
  from typing import Optional
4
 
5
  import torch
 
6
  from diffusers import AutoPipelineForText2Image, FluxPipeline
7
  from PIL import Image
8
 
@@ -110,7 +111,21 @@ class ImageGenerationService:
110
  max_sequence_length=512, # FLUX parameter for text encoding
111
  )
112
 
113
- return result.images[0]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
 
115
  def get_model_info(self) -> str:
116
  """Get information about the currently loaded model."""
 
3
  from typing import Optional
4
 
5
  import torch
6
+ import numpy as np
7
  from diffusers import AutoPipelineForText2Image, FluxPipeline
8
  from PIL import Image
9
 
 
111
  max_sequence_length=512, # FLUX parameter for text encoding
112
  )
113
 
114
+ # Validate and clean the image before returning
115
+ image = result.images[0]
116
+
117
+ # Convert to numpy array to check for invalid values
118
+ img_array = np.array(image)
119
+
120
+ # Check for NaN or inf values and replace them
121
+ if np.any(np.isnan(img_array)) or np.any(np.isinf(img_array)):
122
+ print("⚠️ Warning: Image contains invalid values (NaN/inf), cleaning...")
123
+ img_array = np.nan_to_num(img_array, nan=0.0, posinf=255.0, neginf=0.0)
124
+ # Ensure values are in valid range [0, 255]
125
+ img_array = np.clip(img_array, 0, 255).astype(np.uint8)
126
+ image = Image.fromarray(img_array)
127
+
128
+ return image
129
 
130
  def get_model_info(self) -> str:
131
  """Get information about the currently loaded model."""
test_external_cache.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """Test script to verify external SSD cache configuration works correctly."""
3
+
4
+ import os
5
+ import sys
6
+ from pathlib import Path
7
+
8
+ def test_cache_configuration():
9
+ """Test that the external cache configuration is working."""
10
+
11
+ print("πŸ§ͺ Testing External SSD Cache Configuration")
12
+ print("=" * 50)
13
+
14
+ # Check if external SSD is mounted
15
+ external_path = Path("/Volumes/extssd")
16
+ if not external_path.exists():
17
+ print("❌ External SSD not found at /Volumes/extssd")
18
+ return False
19
+
20
+ print("βœ… External SSD is mounted")
21
+
22
+ # Check if HF_HOME is set correctly
23
+ hf_home = os.environ.get("HF_HOME")
24
+ expected_hf_home = "/Volumes/extssd/huggingface"
25
+
26
+ if hf_home != expected_hf_home:
27
+ print(f"⚠️ HF_HOME not set correctly. Expected: {expected_hf_home}, Got: {hf_home}")
28
+ print(" Set HF_HOME with: export HF_HOME=/Volumes/extssd/huggingface")
29
+ return False
30
+
31
+ print(f"βœ… HF_HOME correctly set to: {hf_home}")
32
+
33
+ # Check if cache directories exist
34
+ hub_cache = Path(hf_home) / "hub"
35
+ if not hub_cache.exists():
36
+ print(f"❌ Hub cache directory not found at: {hub_cache}")
37
+ return False
38
+
39
+ print(f"βœ… Hub cache directory exists at: {hub_cache}")
40
+
41
+ # Check if models are present
42
+ model_count = len(list(hub_cache.glob("models--*")))
43
+ print(f"βœ… Found {model_count} models in cache")
44
+
45
+ # Test importing Hugging Face libraries and check their cache detection
46
+ try:
47
+ from huggingface_hub import HfFolder
48
+ from transformers import AutoTokenizer
49
+ from diffusers import DiffusionPipeline
50
+
51
+ print("βœ… Hugging Face libraries imported successfully")
52
+
53
+ # Test a small model to verify cache is working
54
+ print("πŸ”„ Testing cache with a small model (this may take a moment)...")
55
+
56
+ # This should use the external cache
57
+ tokenizer = AutoTokenizer.from_pretrained("openai/clip-vit-base-patch32")
58
+
59
+ print("βœ… Successfully loaded model from cache")
60
+
61
+ # Check if the model files are in the expected location
62
+ clip_path = hub_cache / "models--openai--clip-vit-base-patch32"
63
+ if clip_path.exists():
64
+ print(f"βœ… Model files found in external cache at: {clip_path}")
65
+ else:
66
+ print(f"⚠️ Model files not found at expected location: {clip_path}")
67
+ return False
68
+
69
+ return True
70
+
71
+ except Exception as e:
72
+ print(f"❌ Error loading model: {e}")
73
+ return False
74
+
75
+ def main():
76
+ """Main test function."""
77
+ # Load .env file if available
78
+ env_file = Path(__file__).parent / ".env"
79
+ if env_file.exists():
80
+ print("πŸ“ Loading .env file...")
81
+ from dotenv import load_dotenv
82
+ load_dotenv()
83
+ else:
84
+ print("⚠️ No .env file found, using system environment variables")
85
+
86
+ success = test_cache_configuration()
87
+
88
+ print("\n" + "=" * 50)
89
+ if success:
90
+ print("πŸŽ‰ All tests passed! External SSD cache is working correctly.")
91
+ print("You can now run the application with: ./run.sh")
92
+ else:
93
+ print("❌ Some tests failed. Please check the configuration.")
94
+ sys.exit(1)
95
+
96
+ if __name__ == "__main__":
97
+ main()
uv.lock CHANGED
@@ -266,6 +266,7 @@ dependencies = [
266
  { name = "diffusers" },
267
  { name = "gradio" },
268
  { name = "pillow" },
 
269
  { name = "scikit-learn" },
270
  { name = "torch" },
271
  { name = "torchvision" },
@@ -285,6 +286,7 @@ requires-dist = [
285
  { name = "diffusers", specifier = ">=0.35.1" },
286
  { name = "gradio", specifier = ">=5.44.0" },
287
  { name = "pillow", specifier = ">=11.3.0" },
 
288
  { name = "scikit-learn", specifier = ">=1.7.1" },
289
  { name = "torch", specifier = ">=2.8.0" },
290
  { name = "torchvision", specifier = ">=0.23.0" },
@@ -1032,6 +1034,15 @@ wheels = [
1032
  { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
1033
  ]
1034
 
 
 
 
 
 
 
 
 
 
1035
  [[package]]
1036
  name = "python-multipart"
1037
  version = "0.0.20"
 
266
  { name = "diffusers" },
267
  { name = "gradio" },
268
  { name = "pillow" },
269
+ { name = "python-dotenv" },
270
  { name = "scikit-learn" },
271
  { name = "torch" },
272
  { name = "torchvision" },
 
286
  { name = "diffusers", specifier = ">=0.35.1" },
287
  { name = "gradio", specifier = ">=5.44.0" },
288
  { name = "pillow", specifier = ">=11.3.0" },
289
+ { name = "python-dotenv", specifier = ">=1.0.0" },
290
  { name = "scikit-learn", specifier = ">=1.7.1" },
291
  { name = "torch", specifier = ">=2.8.0" },
292
  { name = "torchvision", specifier = ">=0.23.0" },
 
1034
  { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
1035
  ]
1036
 
1037
+ [[package]]
1038
+ name = "python-dotenv"
1039
+ version = "1.1.1"
1040
+ source = { registry = "https://pypi.org/simple" }
1041
+ sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
1042
+ wheels = [
1043
+ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
1044
+ ]
1045
+
1046
  [[package]]
1047
  name = "python-multipart"
1048
  version = "0.0.20"