Amould commited on
Commit
01b1eda
·
verified ·
1 Parent(s): af4e700

Update codes.py

Browse files
Files changed (1) hide show
  1. codes.py +59 -0
codes.py CHANGED
@@ -88,6 +88,65 @@ def load_image_pil_accelerated(image_path, dim=128):
88
  return tensor
89
 
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  def preprocess_image(image_path, dim = 128):
92
  img = torch.zeros([1,3,dim,dim])
93
  img[0] = load_image_pil_accelerated(image_path, dim)
 
88
  return tensor
89
 
90
 
91
+ def workaround_matrix(Affine_mtrx0, acc = 2):
92
+ # To find the equivalent torch-compatible matrix from a correct matrix set acc=2 #This will be needed for transforming an image
93
+ # To find the correct Affine matrix from Torch compatible matrix set acc=0.5
94
+ Affine_mtrx_adj = inv_AM(Affine_mtrx0)
95
+ Affine_mtrx_adj[:,:,2]*=acc
96
+ return Affine_mtrx_adj
97
+
98
+ def standarize_point(d, dim=128, flip = False):
99
+ if flip:
100
+ d = -d
101
+ return d/dim - 0.5
102
+
103
+ def destandarize_point(d, dim=128, flip = False):
104
+ if flip:
105
+ d = -d
106
+ return dim*(d + 0.5)
107
+
108
+ def generate_standard_elips(N_samples = 100, a= 1,b = 1):
109
+ radius = 0.25
110
+ center = 0
111
+ N_samples1 = int(N_samples/2 - 1)
112
+ N_samples2 = N_samples - N_samples1
113
+ x1 = torch.distributions.uniform.Uniform(center-radius,center + radius).sample([N_samples1])
114
+ x1_ordered = torch.sort(x1).values
115
+ y1 = center + b*torch.sqrt(radius**2 - ((x1_ordered-center)/a)**2)
116
+ x2 = torch.distributions.uniform.Uniform(center-radius,center + radius).sample([N_samples2])
117
+ x2_ordered = torch.sort(x2, descending=True).values
118
+ y2 = center - b*torch.sqrt(radius**2 - ((x2_ordered-center)/a)**2)
119
+ x = torch.cat([x1_ordered, x2_ordered])
120
+ y = torch.cat([y1, y2])
121
+ return x, y
122
+
123
+ def transform_standard_points(Affine_mat, x,y):
124
+ XY = torch.ones([3,x.shape[0]])
125
+ XY[0,:]= x
126
+ XY[1,:]= y
127
+ XYt = torch.matmul(Affine_mat.to('cpu').detach(),XY)
128
+ xt0 = XYt[0]
129
+ yt0 = XYt[1]
130
+ return xt0, yt0
131
+
132
+ def wrap_points(img, x_source, y_source, l=1):
133
+ for i in range(len(y_source)):
134
+ x0 = x_source[i].int()
135
+ y0 = y_source[i].int()
136
+ img[:,:,x0-l:x0+l,y0-l:y0+l] = 0
137
+ return img
138
+
139
+
140
+ def wrap_imge_cropped(Affine_mtrx, source_img, dim1=224, dim2=128):
141
+ source_img224 = torch.nn.ZeroPad2d(int((dim1-dim2)/2))(source_img)
142
+ grd = torch.nn.functional.affine_grid(Affine_mtrx, size=source_img224.shape,align_corners=False)
143
+ wrapped_img = torch.nn.functional.grid_sample(source_img224, grid=grd,
144
+ mode='bilinear', padding_mode='zeros', align_corners=False)
145
+ wrapped_img = torchvision.transforms.CenterCrop((dim2, dim2))(wrapped_img)
146
+ return wrapped_img
147
+
148
+
149
+
150
  def preprocess_image(image_path, dim = 128):
151
  img = torch.zeros([1,3,dim,dim])
152
  img[0] = load_image_pil_accelerated(image_path, dim)