Spanicin commited on
Commit
ebb14dd
·
verified ·
1 Parent(s): a10a589

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -13
app.py CHANGED
@@ -17,23 +17,31 @@ app.config['temp_response'] = None
17
  app.config['generation_thread'] = None
18
 
19
 
20
- def initialize_model():
21
- global pipe
22
- try:
23
- print('Downloading the model weights')
24
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
25
- xm = load_model('transmitter', device=device)
26
- model = load_model('text300M', device=device)
27
- diffusion = diffusion_from_config(load_config('diffusion'))
28
- return device, xm, model, diffusion
29
- except Exception as e:
30
- print(f"Error downloading the model: {e}")
31
- return jsonify({"error": f"Failed to download model: {str(e)}"}), 500
32
 
33
  def generate_image_gif(prompt):
34
  global pipe
35
  if pipe is None:
36
- device, xm, model, diffusion = initialize_model()
 
 
 
 
 
 
 
 
37
 
38
  try:
39
  batch_size = 1
 
17
  app.config['generation_thread'] = None
18
 
19
 
20
+ # def initialize_model():
21
+ # global pipe
22
+ # try:
23
+ # print('Downloading the model weights')
24
+ # device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
25
+ # xm = load_model('transmitter', device=device)
26
+ # model = load_model('text300M', device=device)
27
+ # diffusion = diffusion_from_config(load_config('diffusion'))
28
+ # return device, xm, model, diffusion
29
+ # except Exception as e:
30
+ # print(f"Error downloading the model: {e}")
31
+ # return jsonify({"error": f"Failed to download model: {str(e)}"}), 500
32
 
33
  def generate_image_gif(prompt):
34
  global pipe
35
  if pipe is None:
36
+ try:
37
+ print('Downloading the model weights')
38
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
39
+ xm = load_model('transmitter', device=device)
40
+ model = load_model('text300M', device=device)
41
+ diffusion = diffusion_from_config(load_config('diffusion'))
42
+ pipe = 'Model loaded'
43
+ except Exception as e:
44
+ print(f"Error downloading the model: {e}")
45
 
46
  try:
47
  batch_size = 1