Spaces:
Paused
Paused
File size: 1,720 Bytes
5515675 9784271 5515675 79cf1f8 261d9d4 79cf1f8 5515675 79cf1f8 261d9d4 5515675 79cf1f8 9784271 261d9d4 5515675 79cf1f8 9784271 261d9d4 9784271 261d9d4 9784271 79cf1f8 9784271 79cf1f8 9784271 261d9d4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import gradio as gr
import torch
from diffusers import FluxPipeline
from safetensors.torch import load_file
import os
# CONFIG — ADD YOUR HF TOKEN HERE
HF_TOKEN = os.getenv('HF_TOKEN')
HF_MODEL = "black-forest-labs/FLUX.1-dev"
LORA_FILE = "./lora/20.safetensors"
# LOAD PIPELINE WITH AUTH
try:
pipe = FluxPipeline.from_pretrained(
HF_MODEL,
torch_dtype=torch.float16, # Change to float16
use_safetensors=True,
use_auth_token=HF_TOKEN,
).to("cuda")
print("Model loaded successfully.")
except Exception as e:
print(f"Error loading model: {e}")
exit()
# LOAD LORA
if os.path.exists(LORA_FILE):
try:
lora = load_file(LORA_FILE, device="cuda")
pipe.load_lora_weights(lora)
pipe.fuse_lora(lora_scale=1.0)
print("LoRA loaded successfully.")
except Exception as e:
print(f"Error loading LoRA: {e}")
# GENERATE
def generate(prompt, seed=42):
seed = int(seed)
generator = torch.Generator("cuda").manual_seed(seed)
try:
result = pipe(
prompt,
generator=generator,
num_inference_steps=28,
height=1024,
width=1024,
).images[0]
print("Image generated successfully.")
return result
except Exception as e:
print(f"Error during image generation: {e}")
return None
# GRADIO
with gr.Blocks() as demo:
gr.Markdown("# 🎨 FLUX.1 + My LoRA")
prompt = gr.Textbox(label="Prompt", value="portrait of san, realistic, 8k")
seed = gr.Number(label="Seed", value=42)
output = gr.Image()
gr.Button("Generate").click(generate, [prompt, seed], output)
if __name__ == "__main__":
demo.launch()
|