Bl4ckSpaces's picture
Update app.py
669b255 verified
import gradio as gr
from gradio_client import Client
import random
import time
import os
# --- 1. CONFIGURATION ---
TARGET_URL = "https://black-forest-labs-flux-2-dev.hf.space"
# Token List (Aman dengan trik +)
HF_TOKENS = [
"hf_" + "PiRCDDtPcPFMLWkTkVaZmzoleHOunXnLIA", "hf_" + "BHvZXGICstaktSwycmwNmzHGrTNmKxnlRZ",
"hf_" + "ZdgawyTPzXIpwhnRYIteUKSMsWnEDtGKtM", "hf_" + "nMiFYAFsINxAJWPwiCQlaunmdgmrcxKoaT",
"hf_" + "PccpUIbTckCiafwErDLkRlsvqhgtfZaBHL", "hf_" + "faGyXBPfBkaHXDMUSJtxEggonhhZbomFIz",
"hf_" + "SndsPaRWsevDXCgZcSjTUlBYUJqOkSfFmn", "hf_" + "CqobFdUpeVCeuhUaiuXwvdczBUmoUHXRGa",
"hf_" + "JKCQYUhhHPPkpucegqkNSyureLdXpmeXRF", "hf_" + "tBYfslUwHNiNMufzwAYIlrDVovEWmOQulC",
"hf_" + "LKLdrdUxyUyKODSUthmqHXqDMfHrQueera", "hf_" + "ivSBboJYQVcifWkCNcOTOnxUQrZOtOglnU"
]
def get_valid_token():
return random.choice(HF_TOKENS)
# --- 2. GENERATION LOGIC ---
def process_generation(prompt, width, height, guidance, steps, seed):
"""
Logic persis seperti contohmu:
1. Ambil token
2. Set Header Authorization
3. Tembak client.predict dengan api_name='/infer'
"""
max_retries = 5
attempt = 0
# Validasi input (cegah error type)
width = int(width)
height = int(height)
guidance = float(guidance)
steps = int(steps)
seed = int(seed)
while attempt < max_retries:
try:
current_token = get_valid_token()
print(f"πŸš€ Generating with token ending ...{current_token[-5:]}")
# --- KUNCI: Client dengan Header Manual ---
client = Client(
TARGET_URL,
headers={"Authorization": f"Bearer {current_token}"}
)
# Request ke Flux
result = client.predict(
prompt=prompt,
seed=seed if seed != -1 else 0,
randomize_seed=(seed == -1),
width=width,
height=height,
guidance_scale=guidance,
num_inference_steps=steps,
api_name="/infer"
)
# Handle return tuple/list dari Flux
if isinstance(result, (list, tuple)):
return result[0]
return result
except Exception as e:
err_msg = str(e)
print(f"❌ Error (Attempt {attempt+1}): {err_msg}")
# Retry jika error kuota/queue
if "429" in err_msg or "quota" in err_msg.lower() or "queue" in err_msg.lower():
print("πŸ”„ Token limit, switching token...")
else:
pass # Tetap retry siapa tau network glitch
attempt += 1
time.sleep(1)
return None # Gagal total
# --- 3. UI BLOCKS (Solusi Error 'allow_flagging') ---
# Menggunakan gr.Blocks seperti contohmu, BUKAN gr.Interface
with gr.Blocks() as app:
# Invisible inputs untuk API endpoint
with gr.Row(visible=False):
prompt = gr.Textbox(label="Prompt")
width = gr.Number(value=1024, label="Width")
height = gr.Number(value=1024, label="Height")
guidance = gr.Number(value=3.5, label="Guidance")
steps = gr.Number(value=28, label="Steps")
seed = gr.Number(value=-1, label="Seed")
# Output Image
out_image = gr.Image(label="Result")
# Tombol Trigger (Hidden juga gapapa, yg penting ada event click)
btn_gen = gr.Button("Generate", visible=False)
# --- API ENDPOINT DEFINITION ---
# api_name="generate" membuat endpoint: /api/generate
btn_gen.click(
process_generation,
inputs=[prompt, width, height, guidance, steps, seed],
outputs=out_image,
api_name="generate"
)
# Tambahan: Load event agar bisa testing lewat GUI jika perlu
# Tapi utamanya ini buat API
with gr.Row():
gr.Markdown("## Flux Load Balancer API Ready")
# Launch
if __name__ == "__main__":
app.queue(max_size=20).launch(server_name="0.0.0.0", server_port=7860)