chips commited on
Commit
0158ac0
·
1 Parent(s): 7b7082d

adding some outputs

Browse files
Files changed (2) hide show
  1. app.py +5 -2
  2. base_generator.py +3 -2
app.py CHANGED
@@ -18,8 +18,10 @@ def greet_json():
18
  #endpoints related to base image generation
19
 
20
  @app.post("/makeBaseImage")
21
- async def make_base_image(image: UploadFile = File(...)):
22
- return("something")
 
 
23
 
24
  # Function related to virtual outfit try on
25
 
@@ -27,6 +29,7 @@ async def make_base_image(image: UploadFile = File(...)):
27
 
28
 
29
  def combine_garment_images(Upper_garment: UploadFile = File(...), Lower_garment: UploadFile = File(...)):
 
30
  result = base_generator.create_image()
31
  return(result)
32
 
 
18
  #endpoints related to base image generation
19
 
20
  @app.post("/makeBaseImage")
21
+ async def make_base_image(character_lora: str, character_keyword: str, outfit_desc: str, pose_id: int ):
22
+ print("make base image")
23
+ result = base_generator.create_image(character_lora, character_keyword, outfit_desc, pose_id)
24
+ return(result)
25
 
26
  # Function related to virtual outfit try on
27
 
 
29
 
30
 
31
  def combine_garment_images(Upper_garment: UploadFile = File(...), Lower_garment: UploadFile = File(...)):
32
+ print("combine garment images")
33
  result = base_generator.create_image()
34
  return(result)
35
 
base_generator.py CHANGED
@@ -9,8 +9,7 @@ import requests
9
 
10
 
11
  def run_workflow(body):
12
-
13
-
14
  url = "https://comfy.icu/api/v1/workflows/" + body['workflow_id'] + "/runs"
15
  headers = {
16
  "accept": "application/json",
@@ -23,6 +22,7 @@ def run_workflow(body):
23
 
24
  def create_image(character_lora, character_keyword, outfit_desc, pose_id):
25
  seed = random.randint(0, 1000000)
 
26
  prompt = {"56": {"_meta": {"title": "CLIP Text Encode (Prompt)"}, "inputs": {"clip": ["365", 1], "text": ""}, "class_type": "CLIPTextEncode"}, "159": {"_meta": {"title": "Load VAE"}, "inputs": {"vae_name": "flux1-ae.safetensors"}, "class_type": "VAELoader"}, "175": {"_meta": {"title": "Apply ControlNet"}, "inputs": {"vae": ["159", 0], "image": ["369", 0], "negative": ["56", 0], "positive": ["199", 0], "strength": 0.7000000000000001, "control_net": ["260", 0], "end_percent": 0.5, "start_percent": 0}, "class_type": "ControlNetApplyAdvanced"}, "199": {"_meta": {"title": "CLIP Text Encode (Prompt)"}, "inputs": {"clip": ["365", 1], "text": f"Fashion model {character_keyword} wearing {outfit_desc}. posing in front of white background"}, "class_type": "CLIPTextEncode"}, "260": {"_meta": {"title": "Load ControlNet Model"}, "inputs": {"control_net_name": "flux.1-dev-controlnet-union.safetensors"}, "class_type": "ControlNetLoader"}, "263": {"_meta": {"title": "Save Image"}, "inputs": {"images": ["311", 0], "filename_prefix": "ControlNet"}, "class_type": "SaveImage"}, "307": {"_meta": {"title": "FluxGuidance"}, "inputs": {"guidance": 3.5, "conditioning": ["175", 0]}, "class_type": "FluxGuidance"}, "308": {"_meta": {"title": "KSampler"}, "inputs": {"cfg": 1, "seed": seed, "model": ["365", 0], "steps": 20, "denoise": 1, "negative": ["335", 0], "positive": ["307", 0], "scheduler": "simple", "latent_image": ["344", 0], "sampler_name": "euler"}, "class_type": "KSampler"}, "310": {"_meta": {"title": "DualCLIPLoader"}, "inputs": {"type": "flux", "device": "default", "clip_name1": "t5xxl_fp8_e4m3fn.safetensors", "clip_name2": "clip_l.safetensors"}, "class_type": "DualCLIPLoader"}, "311": {"_meta": {"title": "VAE Decode"}, "inputs": {"vae": ["159", 0], "samples": ["308", 0]}, "class_type": "VAEDecode"}, "335": {"_meta": {"title": "FluxGuidance"}, "inputs": {"guidance": 3.5, "conditioning": ["175", 1]}, "class_type": "FluxGuidance"}, "344": {"_meta": {"title": "Empty Latent Image"}, "inputs": {"width": 544, "height": 960, "batch_size": 1}, "class_type": "EmptyLatentImage"}, "363": {"_meta": {"title": "Load Diffusion Model"}, "inputs": {"unet_name": "flux1-dev-fp8-e4m3fn.safetensors", "weight_dtype": "fp8_e4m3fn"}, "class_type": "UNETLoader"}, "365": {"_meta": {"title": "Load LoRA"}, "inputs": {"clip": ["310", 0], "model": ["363", 0], "lora_name": character_lora, "strength_clip": 0.99, "strength_model": 0.84}, "class_type": "LoraLoader"}, "369": {"_meta": {"title": "Load Image"}, "inputs": {"image": "Pose_Female_Front_full_standing_02.webp_00001_.png", "upload": "image"}, "class_type": "LoadImage"}}
27
 
28
  files = {"/input/Pose_Female_Front_full_standing_02.webp_00001_.png": "https://comfy.icu/api/v1/view/workflows/SqG44yXRdRzxGQmfWwlSt/input/Pose_Female_Front_full_standing_02.webp_00001_.png", "/models/loras/7Jd1cwsai241yWWSPDW_k_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/lion/7Jd1cwsai241yWWSPDW_k_pytorch_lora_weights.safetensors", "/models/loras/N5sJtK8XVftjPlIj3idOB_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/monkey/N5sJtK8XVftjPlIj3idOB_pytorch_lora_weights.safetensors", "/models/loras/xVhN3ierb8IFqGRNOQpBT_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/tiger/xVhN3ierb8IFqGRNOQpBT_pytorch_lora_weights.safetensors", "/models/loras/yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/rabbit/yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors"};
@@ -31,6 +31,7 @@ def create_image(character_lora, character_keyword, outfit_desc, pose_id):
31
 
32
  run = run_workflow({"workflow_id": workflow_id, "prompt": prompt, "files": files})
33
  print(run)
 
34
 
35
  if __name__ == "__main__":
36
  character_lora = "yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors"
 
9
 
10
 
11
  def run_workflow(body):
12
+ print("run workflow")
 
13
  url = "https://comfy.icu/api/v1/workflows/" + body['workflow_id'] + "/runs"
14
  headers = {
15
  "accept": "application/json",
 
22
 
23
  def create_image(character_lora, character_keyword, outfit_desc, pose_id):
24
  seed = random.randint(0, 1000000)
25
+ print(f"seed: {seed}")
26
  prompt = {"56": {"_meta": {"title": "CLIP Text Encode (Prompt)"}, "inputs": {"clip": ["365", 1], "text": ""}, "class_type": "CLIPTextEncode"}, "159": {"_meta": {"title": "Load VAE"}, "inputs": {"vae_name": "flux1-ae.safetensors"}, "class_type": "VAELoader"}, "175": {"_meta": {"title": "Apply ControlNet"}, "inputs": {"vae": ["159", 0], "image": ["369", 0], "negative": ["56", 0], "positive": ["199", 0], "strength": 0.7000000000000001, "control_net": ["260", 0], "end_percent": 0.5, "start_percent": 0}, "class_type": "ControlNetApplyAdvanced"}, "199": {"_meta": {"title": "CLIP Text Encode (Prompt)"}, "inputs": {"clip": ["365", 1], "text": f"Fashion model {character_keyword} wearing {outfit_desc}. posing in front of white background"}, "class_type": "CLIPTextEncode"}, "260": {"_meta": {"title": "Load ControlNet Model"}, "inputs": {"control_net_name": "flux.1-dev-controlnet-union.safetensors"}, "class_type": "ControlNetLoader"}, "263": {"_meta": {"title": "Save Image"}, "inputs": {"images": ["311", 0], "filename_prefix": "ControlNet"}, "class_type": "SaveImage"}, "307": {"_meta": {"title": "FluxGuidance"}, "inputs": {"guidance": 3.5, "conditioning": ["175", 0]}, "class_type": "FluxGuidance"}, "308": {"_meta": {"title": "KSampler"}, "inputs": {"cfg": 1, "seed": seed, "model": ["365", 0], "steps": 20, "denoise": 1, "negative": ["335", 0], "positive": ["307", 0], "scheduler": "simple", "latent_image": ["344", 0], "sampler_name": "euler"}, "class_type": "KSampler"}, "310": {"_meta": {"title": "DualCLIPLoader"}, "inputs": {"type": "flux", "device": "default", "clip_name1": "t5xxl_fp8_e4m3fn.safetensors", "clip_name2": "clip_l.safetensors"}, "class_type": "DualCLIPLoader"}, "311": {"_meta": {"title": "VAE Decode"}, "inputs": {"vae": ["159", 0], "samples": ["308", 0]}, "class_type": "VAEDecode"}, "335": {"_meta": {"title": "FluxGuidance"}, "inputs": {"guidance": 3.5, "conditioning": ["175", 1]}, "class_type": "FluxGuidance"}, "344": {"_meta": {"title": "Empty Latent Image"}, "inputs": {"width": 544, "height": 960, "batch_size": 1}, "class_type": "EmptyLatentImage"}, "363": {"_meta": {"title": "Load Diffusion Model"}, "inputs": {"unet_name": "flux1-dev-fp8-e4m3fn.safetensors", "weight_dtype": "fp8_e4m3fn"}, "class_type": "UNETLoader"}, "365": {"_meta": {"title": "Load LoRA"}, "inputs": {"clip": ["310", 0], "model": ["363", 0], "lora_name": character_lora, "strength_clip": 0.99, "strength_model": 0.84}, "class_type": "LoraLoader"}, "369": {"_meta": {"title": "Load Image"}, "inputs": {"image": "Pose_Female_Front_full_standing_02.webp_00001_.png", "upload": "image"}, "class_type": "LoadImage"}}
27
 
28
  files = {"/input/Pose_Female_Front_full_standing_02.webp_00001_.png": "https://comfy.icu/api/v1/view/workflows/SqG44yXRdRzxGQmfWwlSt/input/Pose_Female_Front_full_standing_02.webp_00001_.png", "/models/loras/7Jd1cwsai241yWWSPDW_k_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/lion/7Jd1cwsai241yWWSPDW_k_pytorch_lora_weights.safetensors", "/models/loras/N5sJtK8XVftjPlIj3idOB_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/monkey/N5sJtK8XVftjPlIj3idOB_pytorch_lora_weights.safetensors", "/models/loras/xVhN3ierb8IFqGRNOQpBT_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/tiger/xVhN3ierb8IFqGRNOQpBT_pytorch_lora_weights.safetensors", "/models/loras/yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors": "https://v3.fal.media/files/rabbit/yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors"};
 
31
 
32
  run = run_workflow({"workflow_id": workflow_id, "prompt": prompt, "files": files})
33
  print(run)
34
+ return(run)
35
 
36
  if __name__ == "__main__":
37
  character_lora = "yOA2a06KWEMR-ewq3j8io_pytorch_lora_weights.safetensors"