concauu commited on
Commit
b5751ae
·
verified ·
1 Parent(s): 3fe0b36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -5
app.py CHANGED
@@ -1,7 +1,7 @@
1
  from PIL import Image
2
  import gradio as gr
3
  import torch
4
- from diffusers import FluxPipeline
5
  from groq import Groq # Import the Groq library
6
  from cryptography.fernet import Fernet
7
  from huggingface_hub import login
@@ -25,10 +25,7 @@ def get_hf_token(encrypted_token):
25
  groq_client = Groq(api_key="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")
26
  decrypted_token = get_hf_token("gAAAAABn3GfShExoJd50nau3B5ZJNiQ9dRD1ACO3XXMwVaIQMkmi59cL-MKGr6SYnsB0E2gGITJG2j29Ar9yjaZP-EC6hHsCBmwKSj4aFtTor9_n0_NdMBv1GtlxZRmwnQwriB-Xr94e")
27
  login(token=decrypted_token)
28
- pipe = FluxPipeline.from_pretrained(
29
- "black-forest-labs/FLUX.1-dev",
30
- torch_dtype=torch.bfloat16
31
- )
32
  pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
33
 
34
  def enhance_prompt(user_prompt):
 
1
  from PIL import Image
2
  import gradio as gr
3
  import torch
4
+ from diffusers import DiffusionPipeline
5
  from groq import Groq # Import the Groq library
6
  from cryptography.fernet import Fernet
7
  from huggingface_hub import login
 
25
  groq_client = Groq(api_key="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")
26
  decrypted_token = get_hf_token("gAAAAABn3GfShExoJd50nau3B5ZJNiQ9dRD1ACO3XXMwVaIQMkmi59cL-MKGr6SYnsB0E2gGITJG2j29Ar9yjaZP-EC6hHsCBmwKSj4aFtTor9_n0_NdMBv1GtlxZRmwnQwriB-Xr94e")
27
  login(token=decrypted_token)
28
+ pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell")
 
 
 
29
  pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
30
 
31
  def enhance_prompt(user_prompt):