app1 / app.py
Kev09's picture
Update app.py
9733bbf
import gradio as gr
import numpy as np
import cv2
from PIL import Image
from controlnet_aux import OpenposeDetector
from gradio_client import Client
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel, UniPCMultistepScheduler
import torch
#CONTROLNET
from gradio_client import Client
client = Client("https://hysts-controlnet-v1-1.hf.space/")
result = client.predict(
"https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", # str (filepath or URL to image) in 'parameter_121' Image component
"Howdy!", # str in 'Prompt' Textbox component
"Howdy!", # str in 'Additional prompt' Textbox component
"Howdy!", # str in 'Negative prompt' Textbox component
1, # int | float (numeric value between 1 and 1) in 'Number of images' Slider component
256, # int | float (numeric value between 256 and 768) in 'Image resolution' Slider component
128, # int | float (numeric value between 128 and 512) in 'Preprocess resolution' Slider component
1, # int | float (numeric value between 1 and 100) in 'Number of steps' Slider component
0.1, # int | float (numeric value between 0.1 and 30.0) in 'Guidance scale' Slider component
0, # int | float (numeric value between 0 and 2147483647) in 'Seed' Slider component
"Openpose", # str in 'Preprocessor' Radio component
api_name="/openpose"
)
print("type(result)",type(result) )
print(result)
with gr.Blocks() as demo:
with gr.Tab("Lion"):
img1=gr.Image("Capture.PNG")
print("IMG1",img1)
print(type(img1))
print( type( np.array(img1) ) )
imgout=gr.Image(result)
btn1=gr.Button("bout")
# btn1.click(fn=prediction,inputs=img1,outputs=imgout)
with gr.Tab("Tiger"):
gr.Image()
gr.Button("ebout")
demo.launch()