Spaces:
Running
on
Zero
Running
on
Zero
Ahsen Khaliq
commited on
Commit
·
fa3cf6d
1
Parent(s):
7578aa8
switch versions
Browse files
app.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
import os
|
| 2 |
|
| 3 |
os.system("wget https://github.com/Sxela/ArcaneGAN/releases/download/v0.3/ArcaneGANv0.3.jit")
|
|
|
|
| 4 |
os.system("pip -qq install facenet_pytorch")
|
| 5 |
|
| 6 |
|
|
@@ -116,12 +117,6 @@ def proc_pil_img(input_image, model):
|
|
| 116 |
output_image = PIL.Image.fromarray(output_image)
|
| 117 |
return output_image
|
| 118 |
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
model_path = './ArcaneGANv0.3.jit'
|
| 122 |
-
|
| 123 |
-
model = torch.jit.load(model_path,map_location='cpu').to('cpu').float().eval().cpu()
|
| 124 |
-
|
| 125 |
def fit(img,maxsize=512):
|
| 126 |
maxdim = max(*img.size)
|
| 127 |
if maxdim>maxsize:
|
|
@@ -131,9 +126,11 @@ def fit(img,maxsize=512):
|
|
| 131 |
img = img.resize(size)
|
| 132 |
return img
|
| 133 |
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
|
|
|
|
|
|
| 137 |
im = scale_by_face_size(im, target_face=300, max_res=1_500_000, max_upscale=2)
|
| 138 |
res = proc_pil_img(im, model)
|
| 139 |
return res
|
|
@@ -144,7 +141,8 @@ article = "<div style='text-align: center;'>ArcaneGan by <a href='https://twitte
|
|
| 144 |
|
| 145 |
gr.Interface(
|
| 146 |
process,
|
| 147 |
-
gr.inputs.Image(type="pil", label="Input",shape=(256,256)),
|
|
|
|
| 148 |
gr.outputs.Image(type="pil", label="Output"),
|
| 149 |
title=title,
|
| 150 |
description=description,
|
|
|
|
| 1 |
import os
|
| 2 |
|
| 3 |
os.system("wget https://github.com/Sxela/ArcaneGAN/releases/download/v0.3/ArcaneGANv0.3.jit")
|
| 4 |
+
os.system("wget https://github.com/Sxela/ArcaneGAN/releases/download/v0.2/ArcaneGANv0.2.jit")
|
| 5 |
os.system("pip -qq install facenet_pytorch")
|
| 6 |
|
| 7 |
|
|
|
|
| 117 |
output_image = PIL.Image.fromarray(output_image)
|
| 118 |
return output_image
|
| 119 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 120 |
def fit(img,maxsize=512):
|
| 121 |
maxdim = max(*img.size)
|
| 122 |
if maxdim>maxsize:
|
|
|
|
| 126 |
img = img.resize(size)
|
| 127 |
return img
|
| 128 |
|
| 129 |
+
def process(im, version):
|
| 130 |
+
if version == 'version 0.3':
|
| 131 |
+
model = torch.jit.load('./ArcaneGANv0.3.jit',map_location='cpu').to('cpu').float().eval().cpu()
|
| 132 |
+
else:
|
| 133 |
+
model = torch.jit.load('./ArcaneGANv0.2.jit',map_location='cpu').to('cpu').float().eval().cpu()
|
| 134 |
im = scale_by_face_size(im, target_face=300, max_res=1_500_000, max_upscale=2)
|
| 135 |
res = proc_pil_img(im, model)
|
| 136 |
return res
|
|
|
|
| 141 |
|
| 142 |
gr.Interface(
|
| 143 |
process,
|
| 144 |
+
[gr.inputs.Image(type="pil", label="Input",shape=(256,256)),gr.inputs.Radio(choices=['version 0.2','version 0.3'], type="value", default='version 0.3', label='version')
|
| 145 |
+
],
|
| 146 |
gr.outputs.Image(type="pil", label="Output"),
|
| 147 |
title=title,
|
| 148 |
description=description,
|