|
|
import gradio as gr |
|
|
from gradio_client import Client |
|
|
import random |
|
|
import time |
|
|
import os |
|
|
|
|
|
|
|
|
TARGET_URL = "https://black-forest-labs-flux-2-dev.hf.space" |
|
|
|
|
|
|
|
|
HF_TOKENS = [ |
|
|
"hf_" + "PiRCDDtPcPFMLWkTkVaZmzoleHOunXnLIA", "hf_" + "BHvZXGICstaktSwycmwNmzHGrTNmKxnlRZ", |
|
|
"hf_" + "ZdgawyTPzXIpwhnRYIteUKSMsWnEDtGKtM", "hf_" + "nMiFYAFsINxAJWPwiCQlaunmdgmrcxKoaT", |
|
|
"hf_" + "PccpUIbTckCiafwErDLkRlsvqhgtfZaBHL", "hf_" + "faGyXBPfBkaHXDMUSJtxEggonhhZbomFIz", |
|
|
"hf_" + "SndsPaRWsevDXCgZcSjTUlBYUJqOkSfFmn", "hf_" + "CqobFdUpeVCeuhUaiuXwvdczBUmoUHXRGa", |
|
|
"hf_" + "JKCQYUhhHPPkpucegqkNSyureLdXpmeXRF", "hf_" + "tBYfslUwHNiNMufzwAYIlrDVovEWmOQulC", |
|
|
"hf_" + "LKLdrdUxyUyKODSUthmqHXqDMfHrQueera", "hf_" + "ivSBboJYQVcifWkCNcOTOnxUQrZOtOglnU" |
|
|
] |
|
|
|
|
|
def get_valid_token(): |
|
|
return random.choice(HF_TOKENS) |
|
|
|
|
|
|
|
|
def process_generation(prompt, width, height, guidance, steps, seed): |
|
|
""" |
|
|
Logic persis seperti contohmu: |
|
|
1. Ambil token |
|
|
2. Set Header Authorization |
|
|
3. Tembak client.predict dengan api_name='/infer' |
|
|
""" |
|
|
max_retries = 5 |
|
|
attempt = 0 |
|
|
|
|
|
|
|
|
width = int(width) |
|
|
height = int(height) |
|
|
guidance = float(guidance) |
|
|
steps = int(steps) |
|
|
seed = int(seed) |
|
|
|
|
|
while attempt < max_retries: |
|
|
try: |
|
|
current_token = get_valid_token() |
|
|
print(f"π Generating with token ending ...{current_token[-5:]}") |
|
|
|
|
|
|
|
|
client = Client( |
|
|
TARGET_URL, |
|
|
headers={"Authorization": f"Bearer {current_token}"} |
|
|
) |
|
|
|
|
|
|
|
|
result = client.predict( |
|
|
prompt=prompt, |
|
|
seed=seed if seed != -1 else 0, |
|
|
randomize_seed=(seed == -1), |
|
|
width=width, |
|
|
height=height, |
|
|
guidance_scale=guidance, |
|
|
num_inference_steps=steps, |
|
|
api_name="/infer" |
|
|
) |
|
|
|
|
|
|
|
|
if isinstance(result, (list, tuple)): |
|
|
return result[0] |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
err_msg = str(e) |
|
|
print(f"β Error (Attempt {attempt+1}): {err_msg}") |
|
|
|
|
|
|
|
|
if "429" in err_msg or "quota" in err_msg.lower() or "queue" in err_msg.lower(): |
|
|
print("π Token limit, switching token...") |
|
|
else: |
|
|
pass |
|
|
|
|
|
attempt += 1 |
|
|
time.sleep(1) |
|
|
|
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks() as app: |
|
|
|
|
|
with gr.Row(visible=False): |
|
|
prompt = gr.Textbox(label="Prompt") |
|
|
width = gr.Number(value=1024, label="Width") |
|
|
height = gr.Number(value=1024, label="Height") |
|
|
guidance = gr.Number(value=3.5, label="Guidance") |
|
|
steps = gr.Number(value=28, label="Steps") |
|
|
seed = gr.Number(value=-1, label="Seed") |
|
|
|
|
|
|
|
|
out_image = gr.Image(label="Result") |
|
|
|
|
|
|
|
|
btn_gen = gr.Button("Generate", visible=False) |
|
|
|
|
|
|
|
|
|
|
|
btn_gen.click( |
|
|
process_generation, |
|
|
inputs=[prompt, width, height, guidance, steps, seed], |
|
|
outputs=out_image, |
|
|
api_name="generate" |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
gr.Markdown("## Flux Load Balancer API Ready") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
app.queue(max_size=20).launch(server_name="0.0.0.0", server_port=7860) |
|
|
|