ZhouwqZJ commited on
Commit
2da8cbf
·
1 Parent(s): b3cd8f4

modified: app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -14
app.py CHANGED
@@ -10,7 +10,7 @@ from einops import rearrange
10
  #from fire import Fire
11
  from PIL import ExifTags, Image
12
  from safetensors.torch import load_file, save_file
13
-
14
 
15
  import torch
16
  import torch.nn.functional as F
@@ -116,7 +116,7 @@ class FluxEditor:
116
  return img, gr_gallery
117
 
118
 
119
-
120
  @torch.inference_mode()
121
  def generate_image(
122
  self,
@@ -239,6 +239,7 @@ class FluxEditor:
239
  return img, history_gallery
240
 
241
 
 
242
  @torch.inference_mode()
243
  def edit(self, init_image, source_prompt, target_prompt, editing_strategy, denoise_strategy, num_steps, guidance, attn_guidance_start_block, inject_step, init_image_2=None):
244
 
@@ -531,16 +532,16 @@ def create_demo(model_name: str, device: str = "cuda" if torch.cuda.is_available
531
  return demo
532
 
533
 
534
- if __name__ == "__main__":
535
- import argparse
536
- parser = argparse.ArgumentParser(description="Flux")
537
- parser.add_argument("--name", type=str, default="flux-dev", choices=list(configs.keys()), help="Model name")
538
- parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu", help="Device to use")
539
- parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
540
- parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
541
- parser.add_argument("--port", type=int, default=9090)
542
- args = parser.parse_args()
543
 
544
- demo = create_demo(args.name, args.device, args.offload)
545
- #demo.launch(server_name='0.0.0.0', share=args.share, server_port=args.port)
546
- demo.launch(share=True)
 
 
 
 
 
 
 
 
 
 
10
  #from fire import Fire
11
  from PIL import ExifTags, Image
12
  from safetensors.torch import load_file, save_file
13
+ import spaces
14
 
15
  import torch
16
  import torch.nn.functional as F
 
116
  return img, gr_gallery
117
 
118
 
119
+ @spaces.GPU(duration=120)
120
  @torch.inference_mode()
121
  def generate_image(
122
  self,
 
239
  return img, history_gallery
240
 
241
 
242
+ @spaces.GPU(duration=120)
243
  @torch.inference_mode()
244
  def edit(self, init_image, source_prompt, target_prompt, editing_strategy, denoise_strategy, num_steps, guidance, attn_guidance_start_block, inject_step, init_image_2=None):
245
 
 
532
  return demo
533
 
534
 
 
 
 
 
 
 
 
 
 
535
 
536
+ import argparse
537
+ parser = argparse.ArgumentParser(description="Flux")
538
+ parser.add_argument("--name", type=str, default="flux-dev", choices=list(configs.keys()), help="Model name")
539
+ parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu", help="Device to use")
540
+ parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
541
+ parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
542
+ parser.add_argument("--port", type=int, default=9090)
543
+ args = parser.parse_args()
544
+
545
+ demo = create_demo(args.name, args.device, args.offload)
546
+ #demo.launch(server_name='0.0.0.0', share=args.share, server_port=args.port)
547
+ demo.launch(share=True)