text
stringlengths
1
93.6k
parser.add_argument('--test_fixed_size', type=int, default=-1)
parser.add_argument('--test_batch_size', type=int, default=1)
parser.add_argument('--epoch', type=int)
parser.add_argument('--name', type=str)
parser.add_argument('--split', type=str)
def test_model_origin(net, data_loader, save_output=False, save_path=None, test_fixed_size=-1, test_batch_size=1, gpus=None):
timer = Timer()
timer.tic()
net.eval()
mae = 0.0
mse = 0.0
detail = ''
if save_output:
print save_path
for i, blob in enumerate(data_loader.get_loader(test_batch_size)):
if (i * len(gpus) + 1) % 100 == 0:
print "testing %d" % (i + 1)
if save_output:
index, fname, data, mask, gt_dens, gt_count = blob
else:
index, fname, data, mask, gt_count = blob
with torch.no_grad():
dens = net(data)
if save_output:
image = data.squeeze_().mul_(torch.Tensor([0.229,0.224,0.225]).view(3,1,1))\
.add_(torch.Tensor([0.485,0.456,0.406]).view(3,1,1)).data.cpu().numpy()
dgen.save_image(image.transpose((1,2,0))*255.0, save_path, fname[0].split('.')[0] + "_0_img.png")
gt_dens = gt_dens.data.cpu().numpy()
density_map = dens.data.cpu().numpy()
dgen.save_density_map(gt_dens.squeeze(), save_path, fname[0].split('.')[0] + "_1_gt.png")
dgen.save_density_map(density_map.squeeze(), save_path, fname[0].split('.')[0] + "_2_et.png")
_gt_count = gt_dens.sum().item()
del gt_dens
gt_count = gt_count.item()
et_count = dens.sum().item()
del data, dens
detail += "index: {}; fname: {}; gt: {}; et: {};\n".format(i, fname[0].split('.')[0], gt_count, et_count)
mae += abs(gt_count-et_count)
mse += ((gt_count-et_count)*(gt_count-et_count))
mae = mae/len(data_loader)
mse = np.sqrt(mse/len(data_loader))
duration = timer.toc(average=False)
print "testing time: %d" % duration
return mae,mse,detail
def test_model_patches(net, data_loader, save_output=False, save_path=None, test_fixed_size=-1, test_batch_size=1, gpus=None):
timer = Timer()
timer.tic()
net.eval()
mae = 0.0
mse = 0.0
detail = ''
if save_output:
print save_path
for i, blob in enumerate(data_loader.get_loader(1)):
if (i + 1) % 10 == 0:
print "testing %d" % (i + 1)
if save_output:
index, fname, data, mask, gt_dens, gt_count = blob
else:
index, fname, data, mask, gt_count = blob
data = data.squeeze_()
if len(data.shape) == 3:
'image small than crop size'
data = data.unsqueeze_(dim=0)
mask = mask.squeeze_()
num_patch = len(data)
batches = zip([i * test_batch_size for i in range(num_patch // test_batch_size + int(num_patch % test_batch_size != 0))],
[(i + 1) * test_batch_size for i in range(num_patch // test_batch_size)] + [num_patch])
with torch.no_grad():
dens_patch = []
for batch in batches:
bat = data[slice(*batch)]
dens = net(bat).cpu()
dens_patch += [dens]
if args.test_fixed_size != -1:
H, W = mask.shape
_, _, fixed_size = data[0].shape
assert args.test_fixed_size == fixed_size
density_map = torch.zeros((H, W))
for dens_slice, (x, y) in zip(itertools.chain(*dens_patch), itertools.product(range(W / fixed_size), range(H / fixed_size))):
density_map[y * fixed_size:(y + 1) * fixed_size, x * fixed_size:(x + 1) * fixed_size] = dens_slice
H = mask.sum(dim=0).max().item()
W = mask.sum(dim=1).max().item()
density_map = density_map.masked_select(mask).view(H, W)
else:
density_map = dens_patch[0]
gt_count = gt_count.item()