KeerthiVM commited on
Commit
89c9f22
·
unverified ·
1 Parent(s): 9e7c301

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -0
app.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, time, io
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+ from PIL import Image
5
+
6
+ HF_MODEL_ID = os.environ.get("HF_MODEL_ID", "stabilityai/stable-diffusion-x4-upscaler")
7
+ HF_TOKEN = os.environ.get("HF_TOKEN", None)
8
+ client = InferenceClient(token=HF_TOKEN)
9
+
10
+ def call_inference_image(fileobj, scale: int = 2):
11
+ start = time.time()
12
+ if hasattr(fileobj, "name"):
13
+ img_bytes = open(fileobj.name, "rb").read()
14
+ else:
15
+ img_bytes = fileobj.read()
16
+ params = {"scale": int(scale)} if scale else {}
17
+ res = client.invoke(HF_MODEL_ID, inputs=img_bytes, params=params)
18
+ latency = time.time() - start
19
+ if isinstance(res, (bytes, bytearray)):
20
+ return Image.open(io.BytesIO(res)), f"{latency:.3f}s"
21
+ return str(res), f"{latency:.3f}s"
22
+
23
+ with gr.Blocks() as demo:
24
+ gr.Markdown("# API-based Upscaler (calls HF Inference API)")
25
+ with gr.Row():
26
+ inp = gr.Image(type="file", label="Upload image")
27
+ scale = gr.Slider(2, 8, value=2, step=1, label="Scale (requested)")
28
+ out_img = gr.Image(label="Result")
29
+ latency = gr.Textbox(label="Latency")
30
+ btn = gr.Button("Upscale via API")
31
+ btn.click(call_inference_image, inputs=[inp, scale], outputs=[out_img, latency])
32
+
33
+ if __name__ == "__main__":
34
+ demo.launch(server_name="0.0.0.0", server_port=7860)