Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,11 +4,12 @@ from kornia.core import Tensor
|
|
| 4 |
from kornia.contrib import ImageStitcher
|
| 5 |
import kornia.feature as KF
|
| 6 |
import torch
|
|
|
|
| 7 |
|
| 8 |
-
def inference(file_1, file_2):
|
| 9 |
-
img_1: Tensor = K.io.load_image(file_1
|
| 10 |
img_1 = img_1[None] # 1xCxHxW / fp32 / [0, 1]
|
| 11 |
-
img_2: Tensor = K.io.load_image(file_2
|
| 12 |
img_2 = img_2[None] # 1xCxHxW / fp32 / [0, 1]
|
| 13 |
|
| 14 |
IS = ImageStitcher(KF.LoFTR(pretrained='outdoor'), estimator='ransac')
|
|
@@ -16,34 +17,20 @@ def inference(file_1, file_2):
|
|
| 16 |
result = IS(img_1, img_2)
|
| 17 |
|
| 18 |
return K.tensor_to_image(result[0])
|
| 19 |
-
|
| 20 |
|
| 21 |
examples = [
|
| 22 |
-
['examples/foto1B.jpg',
|
| 23 |
-
'examples/foto1A.jpg'],
|
| 24 |
-
]
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
inputs = [
|
| 28 |
-
gr.inputs.Image(type='file', label='Input Image'),
|
| 29 |
-
gr.inputs.Image(type='file', label='Input Image'),
|
| 30 |
]
|
| 31 |
|
| 32 |
-
|
| 33 |
-
gr.
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
examples=examples,
|
| 45 |
-
cache_examples=True,
|
| 46 |
-
live=True,
|
| 47 |
-
theme='huggingface',
|
| 48 |
-
)
|
| 49 |
-
demo_app.launch()
|
|
|
|
| 4 |
from kornia.contrib import ImageStitcher
|
| 5 |
import kornia.feature as KF
|
| 6 |
import torch
|
| 7 |
+
import numpy as np
|
| 8 |
|
| 9 |
+
def inference(file_1, file_2):
|
| 10 |
+
img_1: Tensor = K.io.load_image(file_1, K.io.ImageLoadType.RGB32)
|
| 11 |
img_1 = img_1[None] # 1xCxHxW / fp32 / [0, 1]
|
| 12 |
+
img_2: Tensor = K.io.load_image(file_2, K.io.ImageLoadType.RGB32)
|
| 13 |
img_2 = img_2[None] # 1xCxHxW / fp32 / [0, 1]
|
| 14 |
|
| 15 |
IS = ImageStitcher(KF.LoFTR(pretrained='outdoor'), estimator='ransac')
|
|
|
|
| 17 |
result = IS(img_1, img_2)
|
| 18 |
|
| 19 |
return K.tensor_to_image(result[0])
|
|
|
|
| 20 |
|
| 21 |
examples = [
|
| 22 |
+
['examples/foto1B.jpg', 'examples/foto1A.jpg'],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
]
|
| 24 |
|
| 25 |
+
with gr.Blocks(theme='huggingface') as demo_app:
|
| 26 |
+
gr.Markdown("# Image Stitching using Kornia and LoFTR")
|
| 27 |
+
with gr.Row():
|
| 28 |
+
input_image1 = gr.Image(label="Input Image 1")
|
| 29 |
+
input_image2 = gr.Image(label="Input Image 2")
|
| 30 |
+
output_image = gr.Image(label="Output Image")
|
| 31 |
+
stitch_button = gr.Button("Stitch Images")
|
| 32 |
+
stitch_button.click(fn=inference, inputs=[input_image1, input_image2], outputs=output_image)
|
| 33 |
+
gr.Examples(examples=examples, inputs=[input_image1, input_image2])
|
| 34 |
+
|
| 35 |
+
if __name__ == "__main__":
|
| 36 |
+
demo_app.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|