File size: 1,853 Bytes
8aa4e0b
 
 
 
 
 
 
 
 
1a08e8a
8aa4e0b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65a748c
 
8aa4e0b
 
 
 
 
 
 
9733bbf
8aa4e0b
 
 
65a748c
8aa4e0b
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import gradio as gr
import numpy as np
import cv2
from PIL import Image
from controlnet_aux import OpenposeDetector
from gradio_client import Client
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel, UniPCMultistepScheduler
import torch
#CONTROLNET




from gradio_client import Client

client = Client("https://hysts-controlnet-v1-1.hf.space/")
result = client.predict(
				"https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",	# str (filepath or URL to image) in 'parameter_121' Image component
				"Howdy!",	# str  in 'Prompt' Textbox component
				"Howdy!",	# str  in 'Additional prompt' Textbox component
				"Howdy!",	# str  in 'Negative prompt' Textbox component
				1,	# int | float (numeric value between 1 and 1) in 'Number of images' Slider component
				256,	# int | float (numeric value between 256 and 768) in 'Image resolution' Slider component
				128,	# int | float (numeric value between 128 and 512) in 'Preprocess resolution' Slider component
				1,	# int | float (numeric value between 1 and 100) in 'Number of steps' Slider component
				0.1,	# int | float (numeric value between 0.1 and 30.0) in 'Guidance scale' Slider component
				0,	# int | float (numeric value between 0 and 2147483647) in 'Seed' Slider component
				"Openpose",	# str  in 'Preprocessor' Radio component
				api_name="/openpose"
)
print("type(result)",type(result) )
print(result)


    
    
    

with gr.Blocks() as demo:
    with gr.Tab("Lion"):
        img1=gr.Image("Capture.PNG")
        print("IMG1",img1)
        print(type(img1))
        print( type( np.array(img1) ) )
        imgout=gr.Image(result)
        

        btn1=gr.Button("bout")
    #    btn1.click(fn=prediction,inputs=img1,outputs=imgout)
    with gr.Tab("Tiger"):
        gr.Image()
        gr.Button("ebout")



demo.launch()