pikapool1994's picture
Update app.py
3b56cc0 verified
import io
import gradio as gr
from huggingface_hub import InferenceClient
from PIL import Image
def edit_image(hf_token, input_image, prompt):
if not hf_token.strip():
return None, "Please enter your HF token."
if input_image is None:
return None, "Please upload an image."
if not prompt.strip():
return None, "Please enter an edit instruction."
try:
client = InferenceClient(provider="fal-ai", api_key=hf_token.strip())
img_bytes = io.BytesIO()
input_image.save(img_bytes, format="PNG")
result = client.image_to_image(
img_bytes.getvalue(),
prompt=prompt.strip(),
model="black-forest-labs/FLUX.1-Kontext-dev",
)
return result, "Done!"
except Exception as e:
return None, f"Error: {str(e)}"
with gr.Blocks(title="FLUX Kontext Image Editor") as demo:
gr.Markdown("# FLUX.1 Kontext Image Editor")
gr.Markdown("Edit images with natural language. Fast results in ~10 seconds.")
with gr.Accordion("Setup - Enter your HF Token", open=True):
gr.Markdown("""
1. Get your token at [huggingface.co/settings/tokens](https://huggingface.co/settings/tokens)
2. Enable **Inference Providers** permission
3. Accept model license at [FLUX.1 Kontext dev](https://huggingface.co/black-forest-labs/FLUX.1-Kontext-dev)
4. Paste token below
""")
hf_token = gr.Textbox(label="HF Token", placeholder="hf_...", type="password")
with gr.Row():
with gr.Column():
input_img = gr.Image(type="pil", label="Upload Image")
prompt = gr.Textbox(label="Edit Instruction", placeholder="e.g. make the sky look like a sunset", lines=2)
run_btn = gr.Button("Edit Image", variant="primary")
with gr.Column():
output_img = gr.Image(label="Edited Image")
status = gr.Textbox(label="Status", interactive=False)
run_btn.click(fn=edit_image, inputs=[hf_token, input_img, prompt], outputs=[output_img, status])
if __name__ == "__main__":
demo.launch()