Aaron Vattay commited on
Commit
cb3141b
·
1 Parent(s): 6e2ca06

Add application file

Browse files
Files changed (2) hide show
  1. app.py +46 -0
  2. upscaling.pth +3 -0
app.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ from PIL import Image
4
+ from torchvision import transforms
5
+ import os
6
+ import gradio as gr
7
+ device = torch.device("mps")
8
+ class UPSC(nn.Module):
9
+ def __init__(self):
10
+ super(UPSC,self).__init__()
11
+ self.model = nn.Sequential(
12
+ nn.Conv2d(in_channels=3, out_channels=64, kernel_size=5, padding=2),
13
+ nn.ReLU(),
14
+ nn.Conv2d(in_channels=64, out_channels=32, kernel_size=3, padding=1),
15
+ nn.ReLU(),
16
+ # This convolution outputs channels that are scale_factor^2 * number_of_channels.
17
+ nn.Conv2d(in_channels=32, out_channels=3 * 3 * 3, kernel_size=3, padding=1),
18
+ # PixelShuffle rearranges channels into spatial dimensions.
19
+ nn.PixelShuffle(3)
20
+ )
21
+ def forward(self, x):
22
+ return self.model(x)
23
+
24
+ model = UPSC().to(device)
25
+ model.load_state_dict(torch.load("upscaling.pth", weights_only=True))
26
+ model.eval()
27
+
28
+ def task(img):
29
+ transform = transforms.Compose([
30
+ transforms.Resize((256, 256)), # match training input size
31
+ transforms.ToTensor()
32
+ ])
33
+
34
+ lr_tensor = transform(img).unsqueeze(0).to(device)
35
+
36
+ with torch.no_grad():
37
+ sr_tensor = model(lr_tensor)
38
+
39
+ # Remove batch dimension and convert to PIL
40
+ sr_image = transforms.ToPILImage()(sr_tensor.squeeze(0).clamp(0, 1))
41
+ return sr_image
42
+ gr.Interface(fn=task,
43
+ inputs=gr.Image(type="pil"),
44
+ outputs=gr.Image(type="pil"),
45
+ title="Image Upscaling",
46
+ description="Upload an image to upscale it using a neural network.").launch(share=True)
upscaling.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8b27d159a451b1fac7efc1d1e3b2828dfafeea2695d344249df6a4cbf312f1b
3
+ size 127260