Spaces:
Sleeping
Sleeping
File size: 1,473 Bytes
e069b66 ac2193d e069b66 064aede e069b66 ac2193d e069b66 064aede e069b66 064aede e069b66 064aede e069b66 064aede e069b66 064aede e069b66 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import fal_client
from src.utils import numpy_to_base64
from src.helpers import resize_image
from src.deepl import detect_and_translate
def fal_ipadapter_api(input_image,ip_image,seg_prompt):
#print(input_image,ip_image,seg_prompt)
handler = fal_client.submit(
"comfy/JarvisSan22/cloth_ipadapter",
arguments={
"loadimage_1":numpy_to_base64(input_image),
"loadimage_2":numpy_to_base64(ip_image),
"groundingdinosamsegment (segment anything)_prompt":detect_and_translate(seg_prompt)
},
)
#print(handler)
result= handler.get()
#image_urls=[]
#print(result["outputs"])
"""
for k,item in result["outputs"].items():
if "images" in item:
image_urls.append(item["images"][0]["url"])
print(image_urls)
return image_urls
"""
#print(result)
return result["outputs"]["20"]["images"][0]["url"]
def fal_faceswap_api(input_image,face_image):
#print(input_image.shape,face_image.shape,type(face_image))
#face_image.resize((1024, 1024))
input_image=resize_image(input_image)
face_image=resize_image(face_image)
#print(input_image.shape,face_image.shape)
handler = fal_client.submit(
"fal-ai/face-swap",
arguments={
"base_image_url": numpy_to_base64(input_image),
"swap_image_url": numpy_to_base64(face_image),
},
)
result = handler.get()
#print(result)
return result["image"]["url"] |