Spaces:
Running on Zero
Running on Zero
Fix gradio version, model repo, and remove rmbg-v2
Browse files- Bump gradio to 5.47.0 and pin all deps from reference project
- Update sdk_version to 5.47.0 in README
- Switch HF_REPO_ID to tellurion/sdxl
- Fetch available models dynamically via list_repo_files()
- Change default_mask_extractor to ISNet (non-gated)
- Remove rmbg-v2 (gated briaai/RMBG-2.0 repo, inaccessible without token)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
- README.md +1 -1
- backend/__init__.py +2 -2
- backend/appfunc.py +14 -17
- preprocessor/__init__.py +0 -1
- requirements.txt +20 -18
README.md
CHANGED
|
@@ -4,7 +4,7 @@ emoji: 🎨
|
|
| 4 |
colorFrom: purple
|
| 5 |
colorTo: pink
|
| 6 |
sdk: gradio
|
| 7 |
-
sdk_version: "
|
| 8 |
python_version: "3.10"
|
| 9 |
app_file: app.py
|
| 10 |
pinned: false
|
|
|
|
| 4 |
colorFrom: purple
|
| 5 |
colorTo: pink
|
| 6 |
sdk: gradio
|
| 7 |
+
sdk_version: "5.47.0"
|
| 8 |
python_version: "3.10"
|
| 9 |
app_file: app.py
|
| 10 |
pinned: false
|
backend/__init__.py
CHANGED
|
@@ -11,6 +11,6 @@ __all__ = [
|
|
| 11 |
|
| 12 |
|
| 13 |
default_line_extractor = "lineart_keras"
|
| 14 |
-
default_mask_extractor = "
|
| 15 |
-
mask_extractor_list = ["none", "ISNet", "
|
| 16 |
line_extractor_list = ["lineart", "lineart_denoise", "lineart_keras", "lineart_sk"]
|
|
|
|
| 11 |
|
| 12 |
|
| 13 |
default_line_extractor = "lineart_keras"
|
| 14 |
+
default_mask_extractor = "ISNet"
|
| 15 |
+
mask_extractor_list = ["none", "ISNet", "BiRefNet", "BiRefNet_HR"]
|
| 16 |
line_extractor_list = ["lineart", "lineart_denoise", "lineart_keras", "lineart_sk"]
|
backend/appfunc.py
CHANGED
|
@@ -4,7 +4,7 @@ import traceback
|
|
| 4 |
import gradio as gr
|
| 5 |
import os.path as osp
|
| 6 |
|
| 7 |
-
from huggingface_hub import hf_hub_download
|
| 8 |
|
| 9 |
from omegaconf import OmegaConf
|
| 10 |
from refnet.util import instantiate_from_config
|
|
@@ -22,15 +22,9 @@ smask_extractor = create_model("ISNet-sketch").cpu()
|
|
| 22 |
MAXM_INT32 = 429496729
|
| 23 |
|
| 24 |
# HuggingFace model repository
|
| 25 |
-
HF_REPO_ID = "tellurion/
|
| 26 |
MODEL_CACHE_DIR = "models"
|
| 27 |
|
| 28 |
-
# Model registry: filename -> model_type
|
| 29 |
-
MODEL_REGISTRY = {
|
| 30 |
-
"sdxl.safetensors": "sdxl",
|
| 31 |
-
"xlv2.safetensors": "xlv2",
|
| 32 |
-
}
|
| 33 |
-
|
| 34 |
model_types = ["sdxl", "xlv2"]
|
| 35 |
|
| 36 |
'''
|
|
@@ -39,8 +33,12 @@ model_types = ["sdxl", "xlv2"]
|
|
| 39 |
|
| 40 |
|
| 41 |
def get_available_models():
|
| 42 |
-
"""
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
|
| 46 |
def download_model(filename):
|
|
@@ -104,13 +102,12 @@ def load_model(ckpt_name):
|
|
| 104 |
config_root = "configs/inference"
|
| 105 |
|
| 106 |
try:
|
| 107 |
-
# Determine model type from
|
| 108 |
-
new_model_type =
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
break
|
| 114 |
|
| 115 |
if model_type != new_model_type or not "model" in globals():
|
| 116 |
if "model" in globals() and exists(model):
|
|
|
|
| 4 |
import gradio as gr
|
| 5 |
import os.path as osp
|
| 6 |
|
| 7 |
+
from huggingface_hub import hf_hub_download, list_repo_files
|
| 8 |
|
| 9 |
from omegaconf import OmegaConf
|
| 10 |
from refnet.util import instantiate_from_config
|
|
|
|
| 22 |
MAXM_INT32 = 429496729
|
| 23 |
|
| 24 |
# HuggingFace model repository
|
| 25 |
+
HF_REPO_ID = "tellurion/sdxl"
|
| 26 |
MODEL_CACHE_DIR = "models"
|
| 27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
model_types = ["sdxl", "xlv2"]
|
| 29 |
|
| 30 |
'''
|
|
|
|
| 33 |
|
| 34 |
|
| 35 |
def get_available_models():
|
| 36 |
+
"""Fetch available .safetensors files from HuggingFace Hub."""
|
| 37 |
+
try:
|
| 38 |
+
files = list_repo_files(HF_REPO_ID)
|
| 39 |
+
return [f for f in files if f.endswith(".safetensors")]
|
| 40 |
+
except Exception:
|
| 41 |
+
return []
|
| 42 |
|
| 43 |
|
| 44 |
def download_model(filename):
|
|
|
|
| 102 |
config_root = "configs/inference"
|
| 103 |
|
| 104 |
try:
|
| 105 |
+
# Determine model type from filename prefix
|
| 106 |
+
new_model_type = ""
|
| 107 |
+
for key in model_types:
|
| 108 |
+
if ckpt_name.startswith(key):
|
| 109 |
+
new_model_type = key
|
| 110 |
+
break
|
|
|
|
| 111 |
|
| 112 |
if model_type != new_model_type or not "model" in globals():
|
| 113 |
if "model" in globals() and exists(model):
|
preprocessor/__init__.py
CHANGED
|
@@ -43,7 +43,6 @@ remote_model_dict = {
|
|
| 43 |
}
|
| 44 |
|
| 45 |
BiRefNet_dict = {
|
| 46 |
-
"rmbg-v2": ("briaai/RMBG-2.0", 1024),
|
| 47 |
"BiRefNet": ("ZhengPeng7/BiRefNet", 1024),
|
| 48 |
"BiRefNet_HR": ("ZhengPeng7/BiRefNet_HR", 2048)
|
| 49 |
}
|
|
|
|
| 43 |
}
|
| 44 |
|
| 45 |
BiRefNet_dict = {
|
|
|
|
| 46 |
"BiRefNet": ("ZhengPeng7/BiRefNet", 1024),
|
| 47 |
"BiRefNet_HR": ("ZhengPeng7/BiRefNet_HR", 2048)
|
| 48 |
}
|
requirements.txt
CHANGED
|
@@ -1,19 +1,21 @@
|
|
| 1 |
-
accelerate
|
| 2 |
-
diffusers
|
| 3 |
-
einops
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
|
|
|
|
|
|
| 9 |
opencv-python-headless
|
| 10 |
-
pillow
|
| 11 |
-
safetensors
|
| 12 |
-
timm
|
| 13 |
-
tokenizers
|
| 14 |
-
torch
|
| 15 |
-
torchdiffeq
|
| 16 |
-
torchsde
|
| 17 |
-
torchvision
|
| 18 |
-
tqdm
|
| 19 |
-
transformers
|
|
|
|
| 1 |
+
accelerate==1.10.1
|
| 2 |
+
diffusers==0.35.1
|
| 3 |
+
einops==0.8.1
|
| 4 |
+
gradio==5.47.0
|
| 5 |
+
gradio-client==1.13.2
|
| 6 |
+
huggingface-hub==0.35.1
|
| 7 |
+
kornia==0.8.1
|
| 8 |
+
numpy==2.2.6
|
| 9 |
+
omegaconf==2.3.0
|
| 10 |
+
open-clip-torch==2.24.0
|
| 11 |
opencv-python-headless
|
| 12 |
+
pillow==11.3.0
|
| 13 |
+
safetensors==0.6.2
|
| 14 |
+
timm==1.0.20
|
| 15 |
+
tokenizers==0.22.1
|
| 16 |
+
torch==2.8.0
|
| 17 |
+
torchdiffeq==0.2.5
|
| 18 |
+
torchsde==0.2.6
|
| 19 |
+
torchvision==0.23.0
|
| 20 |
+
tqdm==4.67.1
|
| 21 |
+
transformers==4.57.0
|