himanshu-skid19 commited on
Commit
ee280e1
·
1 Parent(s): 72360bb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -172,7 +172,7 @@ def p_sample_loop(model, shape):
172
  img = torch.randn(shape, device=device)
173
  imgs = []
174
 
175
- for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=10):
176
  img = p_sample(model, img, torch.full((b,), i, device=device, dtype=torch.long), 3)
177
  imgs.append(img.cpu().numpy())
178
  return imgs
@@ -191,7 +191,7 @@ model.load_state_dict(torch.load("new_linear_model_1090.pt", map_location=torch.
191
 
192
  if(st.button("Click to generate image")):
193
  samples = sample(model, image_size=img_size, batch_size=64, channels=3)
194
- for i in range(10):
195
  reverse_transforms = transforms.Compose([
196
  transforms.Lambda(lambda t: (t + 1) / 2),
197
  transforms.Lambda(lambda t: t.permute(1, 2, 0)), # CHW to HWC
 
172
  img = torch.randn(shape, device=device)
173
  imgs = []
174
 
175
+ for i in tqdm(reversed(range(0, timesteps)), desc='sampling loop time step', total=1):
176
  img = p_sample(model, img, torch.full((b,), i, device=device, dtype=torch.long), 3)
177
  imgs.append(img.cpu().numpy())
178
  return imgs
 
191
 
192
  if(st.button("Click to generate image")):
193
  samples = sample(model, image_size=img_size, batch_size=64, channels=3)
194
+ for i in range(1):
195
  reverse_transforms = transforms.Compose([
196
  transforms.Lambda(lambda t: (t + 1) / 2),
197
  transforms.Lambda(lambda t: t.permute(1, 2, 0)), # CHW to HWC