diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..a5e35336b7d5c4032f9a0dc602e2612a7c43f1c8 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..4dde94dc4bd58a2455d0753f45d20915fe1644d2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,19 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +background.jpg filter=lfs diff=lfs merge=lfs -text +flagged/image/5ad4b2887a6e21ed21ad368cb12b06241a8c06b4/tmpir4uhzvh.jpg filter=lfs diff=lfs merge=lfs -text +flagged/image/c60b52ea5b844eb28fc764fc77bee9bd47ce39a9/tmp_dno9712.jpg filter=lfs diff=lfs merge=lfs -text +flagged/image/fc61756a7b0971534c5b49be71caf17d819f35ad/tmpv0t201p6.jpg filter=lfs diff=lfs merge=lfs -text +flagged/output/68a659c360681de00197f50432d89fbd98efdefc/tmp_cajstfi.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:34:30[[:space:]]@2021-02-03/144/test/mark_new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:34:30[[:space:]]@2021-02-03/144/test/new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:52:58[[:space:]]@2021-02-03/144/test/mark_new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:52:58[[:space:]]@2021-02-03/144/test/new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:55:35[[:space:]]@2021-02-03/144/test/mark_new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:55:35[[:space:]]@2021-02-03/144/test/new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:56:55[[:space:]]@2021-02-03/144/test/mark_new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:56:55[[:space:]]@2021-02-03/144/test/new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:59:53[[:space:]]@2021-02-03/144/test/mark_new_image_1.png filter=lfs diff=lfs merge=lfs -text +flat/2023-07-13/2023-07-13[[:space:]]18:59:53[[:space:]]@2021-02-03/144/test/new_image_1.png filter=lfs diff=lfs merge=lfs -text +testdata/new_image_1.jpg filter=lfs diff=lfs merge=lfs -text diff --git a/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl b/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl new file mode 100644 index 0000000000000000000000000000000000000000..12af13be4d991fc52c4afae20f025c809414706a --- /dev/null +++ b/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2afacdaa13ab08049326e835064ecf6ae6873a10ecc3e2b8d4b4aa486e9b3c20 +size 159331175 diff --git a/ICDAR2021/2021-02-03 16:15:55/143/model_parameter b/ICDAR2021/2021-02-03 16:15:55/143/model_parameter new file mode 100644 index 0000000000000000000000000000000000000000..739eac7750ae92645dc7ee131b51a4d746a2a7b7 --- /dev/null +++ b/ICDAR2021/2021-02-03 16:15:55/143/model_parameter @@ -0,0 +1,5 @@ +Download the model parameters from the link below: + +Google: https://drive.google.com/file/d/1rKS03wHMoAXp8uR1h3z7NAVoBY84NZlp/view?usp=sharing + +Baidu: https://pan.baidu.com/s/1eSoiE3LoSQmTIL-SpbzPRw?pwd=rngi password: rngi diff --git a/README.md b/README.md index 9ae39dbf56f47efee0a313dbd3bafd09a8ace075..933a964a9ec2f05b12d6b5fc5e85572d62b47db8 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,56 @@ --- title: Source -emoji: 🏢 -colorFrom: red -colorTo: purple +app_file: demo.py sdk: gradio sdk_version: 3.40.1 -app_file: app.py -pinned: false --- +# Description +Due to work, more information will be released. -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# Performance +**Execution Time when getting a 1024x960 corrected image.** +| Method | Time | +| ---- | ---- | +| TPS | - | +| Interpolation | - | + + +# Testing +1、Download model parameter and source codes + +2、Resize the input image into 992x992 + +3、Running + +- In GPU 0: +`python test.py --data_path_test=./your/test/data/path/ --parallel 0 --schema test --batch_size 1` + +- In CPU: +`python test.py --data_path_test=./your/test/data/path/ --parallel None --schema test --batch_size 1` + +# Training +a) Download training data in [here](https://github.com/gwxie/Document-Dewarping-with-Control-Points/tree/main/Source/dataset/fiducial1024). + +b) Run `python train.py --data_path_train=./your/train/data/path/ --data_path_validate=./your/validate/data/path/ --data_path_test=./your/test/data/path/ --batch_size 32 --schema train --parallel 01` + +# Use your Dataset +The training dataset can be synthesised using the [scripts](https://github.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points). + +# Q&A +1. Q:How to adjust the output image resolution? +A: Adjusting the ‘flat_shap’ or 'output_shape' in utilsV4.py. +https://github.com/gwxie/Document-Dewarping-with-Control-Points/blob/4c74853b0eb93f7c6006a774c2eb42c64f363531/Source/utilsV4.py#L99 +or +https://github.com/gwxie/Document-Dewarping-with-Control-Points/blob/4c74853b0eb93f7c6006a774c2eb42c64f363531/Source/utilsV4.py#L162 + +2. Q:How to open '*.gw' files? +https://github.com/gwxie/Document-Dewarping-with-Control-Points/blob/0f4e9ac577fb001a719fb63b05cfa915fe3c9866/Source/dataloader.py#L146 +'im' is input images; 'lbl' is control points; 'segment' is the intervals of points between the horizontal and vertical directions; + +3. Q:How to train the model again with 61 points? +https://github.com/gwxie/Document-Dewarping-with-Control-Points/blob/1ad92be2995ee7f4ecdd04157350f2b44ecbd7e9/Source/dataloader.py#L185-L186 +https://github.com/gwxie/Document-Dewarping-with-Control-Points/blob/1ad92be2995ee7f4ecdd04157350f2b44ecbd7e9/Source/dataloader.py#L74-L75 +set self.col_gap = 0; self.row_gap = 0 diff --git a/__pycache__/dataloader.cpython-39.pyc b/__pycache__/dataloader.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c600a2a5c6a714d6fd0fad049f0998d9381a60ef Binary files /dev/null and b/__pycache__/dataloader.cpython-39.pyc differ diff --git a/__pycache__/network.cpython-39.pyc b/__pycache__/network.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2573b4bf425fdf18b53ec68cebb96336d8ebdece Binary files /dev/null and b/__pycache__/network.cpython-39.pyc differ diff --git a/__pycache__/test.cpython-39.pyc b/__pycache__/test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6044f933e712d7357f40fceb1244b19fefdcaca6 Binary files /dev/null and b/__pycache__/test.cpython-39.pyc differ diff --git a/__pycache__/tpsV2.cpython-39.pyc b/__pycache__/tpsV2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..235f06423362d4f8be2efb4d4536b4eca3747d5c Binary files /dev/null and b/__pycache__/tpsV2.cpython-39.pyc differ diff --git a/__pycache__/utilsV4.cpython-39.pyc b/__pycache__/utilsV4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88220d2412c298e367eb2e6be61aa7a9765134d0 Binary files /dev/null and b/__pycache__/utilsV4.cpython-39.pyc differ diff --git a/background.jpg b/background.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cdba78d51e68e99d918d2cd54e4fff3c480061cf --- /dev/null +++ b/background.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb445003762f9445e95790e189dac9d1785d4915eecb155fccb70d8b23bafac5 +size 4915452 diff --git a/css.css b/css.css new file mode 100644 index 0000000000000000000000000000000000000000..f64dc4d79fdecc536050323d2e004eb53a23116a --- /dev/null +++ b/css.css @@ -0,0 +1,3 @@ +body { + background-color: #bd1414 !important; +} diff --git a/dataloader.py b/dataloader.py new file mode 100644 index 0000000000000000000000000000000000000000..a4a48d48002a781a94fa362fd1e2ccbbc2ca34db --- /dev/null +++ b/dataloader.py @@ -0,0 +1,189 @@ +import os +import pickle +from os.path import join as pjoin +import collections +import json +import torch +import numpy as np + +import re +import cv2 + +from torch.utils import data + + +def get_data_path(name): + """Extract path to data from config file. + + Args: + name (str): The name of the dataset. + + Returns: + (str): The path to the root directory containing the dataset. + """ + with open('../xgw/segmentation/config.json') as f: + js = f.read() + # js = open('config.json').read() + data = json.loads(js) + return os.path.expanduser(data[name]['data_path']) + +def getDatasets(dir): + return os.listdir(dir) +''' +Resize the input image into 1024x960 (zooming in or out along the longest side and keeping the aspect ration, then filling zero for padding. ) +''' +def resize_image(origin_img, long_edge=1024, short_edge=960): + # long_edge, short_edge = 2048, 1920 + # long_edge, short_edge = 1024, 960 + # long_edge, short_edge = 512, 480 + + im_lr = origin_img.shape[0] + im_ud = origin_img.shape[1] + new_img = np.zeros([long_edge, short_edge, 3], dtype=np.uint8) + new_shape = new_img.shape[:2] + if im_lr > im_ud: + img_shrink, base_img_shrink = long_edge, long_edge + im_ud = int(im_ud / im_lr * base_img_shrink) + im_ud += 32-im_ud%32 + im_ud = min(im_ud, short_edge) + im_lr = img_shrink + origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) + new_img[:, (new_shape[1]-im_ud)//2:new_shape[1]-(new_shape[1]-im_ud)//2] = origin_img + # mask = np.full(new_shape, 255, dtype='uint8') + # mask[:, (new_shape[1] - im_ud) // 2:new_shape[1] - (new_shape[1] - im_ud) // 2] = 0 + else: + img_shrink, base_img_shrink = short_edge, short_edge + im_lr = int(im_lr / im_ud * base_img_shrink) + im_lr += 32-im_lr%32 + im_lr = min(im_lr, long_edge) + im_ud = img_shrink + origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) + new_img[(new_shape[0] - im_lr) // 2:new_shape[0] - (new_shape[0] - im_lr) // 2, :] = origin_img + return new_img + +class PerturbedDatastsForFiducialPoints_pickle_color_v2_v2(data.Dataset): + def __init__(self, root, split='1-1', img_shrink=None, is_return_img_name=False, preproccess=False): + self.root = os.path.expanduser(root) + self.split = split + self.img_shrink = img_shrink + self.is_return_img_name = is_return_img_name + self.preproccess = preproccess + # self.mean = np.array([104.00699, 116.66877, 122.67892]) + self.images = collections.defaultdict(list) + self.labels = collections.defaultdict(list) + self.row_gap = 1 # value:0, 1, 2; POINTS NUM: 61, 31, 21 + self.col_gap = 1 + datasets = ['validate', 'test', 'train'] + + if self.split == 'test' or self.split == 'eval': + img_file_list = getDatasets(os.path.join(self.root)) + self.images[self.split] = img_file_list + # self.images[self.split] = sorted(img_file_list, key=lambda num: ( + # int(re.match(r'(\d+)_(\d+)( copy.png)', num, re.IGNORECASE).group(1)), int(re.match(r'(\d+)_(\d+)( copy.png)', num, re.IGNORECASE).group(2)))) + elif self.split in datasets: + img_file_list = [] + img_file_list_ = getDatasets(os.path.join(self.root, 'color')) + for id_ in img_file_list_: + img_file_list.append(id_.rstrip()) + + self.images[self.split] = sorted(img_file_list, key=lambda num: ( + re.match(r'(\w+\d*)_(\d+)_(\d+)_(\w+)', num, re.IGNORECASE).group(1), int(re.match(r'(\w+\d*)_(\d+)_(\d+)_(\w+)', num, re.IGNORECASE).group(2)) + , int(re.match(r'(\w+\d*)_(\d+)_(\d+)_(\w+)', num, re.IGNORECASE).group(3)), re.match(r'(\w+\d*)_(\d+)_(\d+)_(\w+)', num, re.IGNORECASE).group(4))) + else: + raise Exception('load data error') + # self.checkImg() + + def checkImg(self): + if self.split == 'validate': + for im_name in self.images[self.split]: + # if 'SinglePage' in im_name: + im_path = pjoin(self.root, self.split, 'color', im_name) + try: + with open(im_path, 'rb') as f: + perturbed_data = pickle.load(f) + + im_shape = perturbed_data.shape + except: + print(im_name) + # os.remove(im_path) + + def __len__(self): + return len(self.images[self.split]) + + def __getitem__(self, item): + if self.split == 'test': + im_name = self.images[self.split][item] + im_path = pjoin(self.root, im_name) + + im = cv2.imread(im_path, flags=cv2.IMREAD_COLOR) + + im = self.resize_im(im) + im = self.transform_im(im) + + if self.is_return_img_name: + return im, im_name + return im + elif self.split == 'eval': + im_name = self.images[self.split][item] + im_path = pjoin(self.root, im_name) + + img = cv2.imread(im_path, flags=cv2.IMREAD_COLOR) + + im = self.resize_im(img) + im = self.transform_im(im) + + if self.is_return_img_name: + return im, im_name + return im, img + # return im, img, im_name + + else: + im_name = self.images[self.split][item] + + im_path = pjoin(self.root, 'color', im_name) + + with open(im_path, 'rb') as f: + perturbed_data = pickle.load(f) + + im = perturbed_data.get('image') + lbl = perturbed_data.get('fiducial_points') + segment = perturbed_data.get('segment') + + im = self.resize_im(im) + im = im.transpose(2, 0, 1) + + lbl = self.resize_lbl(lbl) + lbl, segment = self.fiducal_points_lbl(lbl, segment) + lbl = lbl.transpose(2, 0, 1) + + im = torch.from_numpy(im) + lbl = torch.from_numpy(lbl).float() + segment = torch.from_numpy(segment).float() + + if self.is_return_img_name: + return im, lbl, segment, im_name + + return im, lbl, segment + + def transform_im(self, im): + im = im.transpose(2, 0, 1) + im = torch.from_numpy(im).float() + + return im + + def resize_im(self, im): + im = cv2.resize(im, (992, 992), interpolation=cv2.INTER_LINEAR) + # im = cv2.resize(im, (496, 496), interpolation=cv2.INTER_LINEAR) + return im + + def resize_lbl(self, lbl): + lbl = lbl/[960, 1024]*[992, 992] + # lbl = lbl/[960, 1024]*[496, 496] + return lbl + + def fiducal_points_lbl(self, fiducial_points, segment): + + fiducial_point_gaps = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30, 60] # POINTS NUM: 61, 31, 21, 16, 13, 11, 7, 6, 5, 4, 3, 2 + fiducial_points = fiducial_points[::fiducial_point_gaps[self.row_gap], ::fiducial_point_gaps[self.col_gap], :] + segment = segment * [fiducial_point_gaps[self.col_gap], fiducial_point_gaps[self.row_gap]] + return fiducial_points, segment diff --git a/dataset/.DS_Store b/dataset/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..ff1ab27826dc93923c4d92ba58ae800390ffe0f3 Binary files /dev/null and b/dataset/.DS_Store differ diff --git a/dataset/fiducial1024/README.md b/dataset/fiducial1024/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d44418961aae68b69c93869891015949b47d59b2 --- /dev/null +++ b/dataset/fiducial1024/README.md @@ -0,0 +1,11 @@ +# Download training Data + +Link: https://pan.baidu.com/s/1bZ9B7oZuxsMNRDZGz89Mbg?pwd=9fck + +Password: 9fck + + +# unzip +```bash +cat fiducial1024.tar.gz_0* > fiducial1024.tar.gz | tar -zxv +``` diff --git a/demo.py b/demo.py new file mode 100644 index 0000000000000000000000000000000000000000..3999b7f87fd73de25eea477feff03f759d8607e1 --- /dev/null +++ b/demo.py @@ -0,0 +1,65 @@ +import gradio as gr +from PIL import Image +import os +import subprocess +import glob + + +def get_latest_file(directory): + # 获取所有的文件和文件夹 + all_items = glob.glob(os.path.join(directory, '*')) + + # 分离文件和文件夹 + folders = [item for item in all_items if os.path.isdir(item)] + files = [item for item in all_items if os.path.isfile(item)] + + # 先检查文件夹,如果有文件夹,则返回最新的文件夹 + if folders: + latest_folder = max(folders, key=os.path.getmtime) + return latest_folder + + # 如果没有文件夹,那么检查文件,并返回最新的文件 + elif files: + latest_file = max(files, key=os.path.getmtime) + return latest_file + + # 如果没有文件和文件夹,那么返回None + else: + return None + +def save_image(image): + # 指定目录 + directory = '/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/testdata' + + # 检查目录是否存在,不存在则创建 + if not os.path.exists(directory): + os.makedirs(directory) + + # 保存图片到指定目录 + image.save(os.path.join(directory, 'new_image_1.jpg')) + +def process_image(image): + # 使用PIL.Image将numpy数组转换回图像 + pil_image = Image.fromarray(image) + + # 将图像保存到指定目录 + save_image(pil_image) + + program_path = '/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/test.py' + subprocess.run(['python3', program_path, '--data_path_test=./testdata']) + + directory_image = '/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat' # 你的目录路径 + latest_file = get_latest_file(get_latest_file(directory_image)) + image_path = f'{latest_file}/144/test/new_image_1.jpg' + new_image = Image.open(image_path) + return new_image + +iface = gr.Interface(fn=process_image, + inputs="image", + outputs="image", + title="Document dewarping platform", + description="This page recognises wrinkled documents and processes them into flat ones, you can upload your local images and download the processed files.", + allow_flagging="never" + ) +iface.launch(share = True) + diff --git a/flagged/image/5ad4b2887a6e21ed21ad368cb12b06241a8c06b4/tmpir4uhzvh.jpg b/flagged/image/5ad4b2887a6e21ed21ad368cb12b06241a8c06b4/tmpir4uhzvh.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ed7c52deca75755b924e330e2372289abddfb1f7 --- /dev/null +++ b/flagged/image/5ad4b2887a6e21ed21ad368cb12b06241a8c06b4/tmpir4uhzvh.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c16cad49e3d06f57cd8f137f4da0ca1d3fc20963419cef0229467bdc274fb51 +size 1111412 diff --git a/flagged/image/c60b52ea5b844eb28fc764fc77bee9bd47ce39a9/tmp_dno9712.jpg b/flagged/image/c60b52ea5b844eb28fc764fc77bee9bd47ce39a9/tmp_dno9712.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4bcecb94e4916c130a06c5405ed01feff4cfb271 --- /dev/null +++ b/flagged/image/c60b52ea5b844eb28fc764fc77bee9bd47ce39a9/tmp_dno9712.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e35e0291ba0ca3ae4f7d060c61cc79bafc6aec3a6e5de069460aef191c9f063e +size 5761738 diff --git a/flagged/image/fc61756a7b0971534c5b49be71caf17d819f35ad/tmpv0t201p6.jpg b/flagged/image/fc61756a7b0971534c5b49be71caf17d819f35ad/tmpv0t201p6.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ed7c52deca75755b924e330e2372289abddfb1f7 --- /dev/null +++ b/flagged/image/fc61756a7b0971534c5b49be71caf17d819f35ad/tmpv0t201p6.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c16cad49e3d06f57cd8f137f4da0ca1d3fc20963419cef0229467bdc274fb51 +size 1111412 diff --git a/flagged/log.csv b/flagged/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..f1d70ccdcab810d6c3e1a295b01af16e78be403e --- /dev/null +++ b/flagged/log.csv @@ -0,0 +1,6 @@ +image,output,flag,username,timestamp +,,,,2023-07-12 18:59:12.763581 +/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/image/c60b52ea5b844eb28fc764fc77bee9bd47ce39a9/tmp_dno9712.jpg,/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/output/68a659c360681de00197f50432d89fbd98efdefc/tmp_cajstfi.png,,,2023-07-13 18:34:59.129831 +/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/image/fc61756a7b0971534c5b49be71caf17d819f35ad/tmpv0t201p6.jpg,/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/output/020e114a87a9ea1d41251128026eaf06751a1803/tmpa50oe7qi.png,,,2023-08-23 22:08:00.276130 +/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/image/5ad4b2887a6e21ed21ad368cb12b06241a8c06b4/tmpir4uhzvh.jpg,/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flagged/output/870c2f8157465905417c545291ec216d6c9e9d1e/tmpgkg2h7fc.png,,,2023-08-23 22:08:23.861292 +,,,,2023-08-23 22:09:54.461844 diff --git a/flagged/output/020e114a87a9ea1d41251128026eaf06751a1803/tmpa50oe7qi.png b/flagged/output/020e114a87a9ea1d41251128026eaf06751a1803/tmpa50oe7qi.png new file mode 100644 index 0000000000000000000000000000000000000000..4b55f482d983c3b1d84780e363a8078358adc674 Binary files /dev/null and b/flagged/output/020e114a87a9ea1d41251128026eaf06751a1803/tmpa50oe7qi.png differ diff --git a/flagged/output/68a659c360681de00197f50432d89fbd98efdefc/tmp_cajstfi.png b/flagged/output/68a659c360681de00197f50432d89fbd98efdefc/tmp_cajstfi.png new file mode 100644 index 0000000000000000000000000000000000000000..c589036effc6db8b87f2bbead277213cf1b78776 --- /dev/null +++ b/flagged/output/68a659c360681de00197f50432d89fbd98efdefc/tmp_cajstfi.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbe3cff43302f3bee6fd5e38b5fc45a8f70af7862c7256318d234596408d12c5 +size 1052025 diff --git a/flagged/output/870c2f8157465905417c545291ec216d6c9e9d1e/tmpgkg2h7fc.png b/flagged/output/870c2f8157465905417c545291ec216d6c9e9d1e/tmpgkg2h7fc.png new file mode 100644 index 0000000000000000000000000000000000000000..4b55f482d983c3b1d84780e363a8078358adc674 Binary files /dev/null and b/flagged/output/870c2f8157465905417c545291ec216d6c9e9d1e/tmpgkg2h7fc.png differ diff --git a/flat/2023-07-11/2023-07-11 15:21:45 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-11/2023-07-11 15:21:45 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..6225db0d61c2c20779e2777b5d6990ab8c244cd4 --- /dev/null +++ b/flat/2023-07-11/2023-07-11 15:21:45 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./test1.jpg', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) diff --git a/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/test1.jpg b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..f119c3faddef14816ddb0fd4009ebfb783ef26b6 --- /dev/null +++ b/flat/2023-07-11/2023-07-11 15:27:11 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.533 diff --git a/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/test1.jpg b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..45858e63c1e4c627c76869c85265f68d616bb887 --- /dev/null +++ b/flat/2023-07-11/2023-07-11 17:10:10 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.614 diff --git a/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/test1.jpg b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..6dea8bfc5895501d05daaa0b0fbffce60dc1cb68 --- /dev/null +++ b/flat/2023-07-12/2023-07-12 14:53:08 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.535 diff --git a/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/test1.jpg b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..ec5f543b9a7c17ed28e6b5e2f6b5f865da9aaff8 --- /dev/null +++ b/flat/2023-07-12/2023-07-12 14:59:01 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.410 diff --git a/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..354723e18cb63949df1ef59edd5f54a5df1d6acb Binary files /dev/null and b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_new_image_1.png differ diff --git a/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..f5cf462dff2879dca63ca0c7c708e2f6b338ac3a Binary files /dev/null and b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/new_image_1.png differ diff --git a/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/test1.jpg b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..58d7bc69f21ee859c720ede9cfda3586475c9a84 --- /dev/null +++ b/flat/2023-07-12/2023-07-12 19:01:14 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.356 diff --git a/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..91af6bb421318f3364a949c6c214e3fbc2bc6b9d Binary files /dev/null and b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_new_image_1.png differ diff --git a/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..7cd77f8e72da10605a560319773831372fdefc08 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/new_image_1.png differ diff --git a/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..02fd85072a936d8d28f94e9c5778f2928ddbbe40 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 17:09:54 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.125 diff --git a/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..8d95630a3c56f8c1af69b317c2fe017079dadd4a --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7ae9fddd3d55f728913313d55be86376582b15112babc8148a929daa8a67c2 +size 1937027 diff --git a/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..4efe5041a424c645547cccef60254f3ba946eb6f --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706d85799ebc9ae3b4a33769e256d2e9cb2851fcb9c7bf6f8118b250a7d7f35f +size 1718692 diff --git a/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..794dd9f24cbb36d939486e03fce811b717203cb4 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:34:30 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.794 diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..58377aa23c8fc0c756fe982d3d94362f09f44017 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..8d95630a3c56f8c1af69b317c2fe017079dadd4a --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7ae9fddd3d55f728913313d55be86376582b15112babc8148a929daa8a67c2 +size 1937027 diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c9437357aa11363ee0d7903ab98de2beeff560bc Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..4efe5041a424c645547cccef60254f3ba946eb6f --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706d85799ebc9ae3b4a33769e256d2e9cb2851fcb9c7bf6f8118b250a7d7f35f +size 1718692 diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..7612f33adb3fb99a78c99df2c3c175aa2affe563 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:52:58 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 10.761 diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..58377aa23c8fc0c756fe982d3d94362f09f44017 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..8d95630a3c56f8c1af69b317c2fe017079dadd4a --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7ae9fddd3d55f728913313d55be86376582b15112babc8148a929daa8a67c2 +size 1937027 diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c9437357aa11363ee0d7903ab98de2beeff560bc Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..4efe5041a424c645547cccef60254f3ba946eb6f --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706d85799ebc9ae3b4a33769e256d2e9cb2851fcb9c7bf6f8118b250a7d7f35f +size 1718692 diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..8614ec8ba795c29851e28292428e2adab94234d1 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:55:35 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.925 diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..58377aa23c8fc0c756fe982d3d94362f09f44017 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..8d95630a3c56f8c1af69b317c2fe017079dadd4a --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7ae9fddd3d55f728913313d55be86376582b15112babc8148a929daa8a67c2 +size 1937027 diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c9437357aa11363ee0d7903ab98de2beeff560bc Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..4efe5041a424c645547cccef60254f3ba946eb6f --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706d85799ebc9ae3b4a33769e256d2e9cb2851fcb9c7bf6f8118b250a7d7f35f +size 1718692 diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..a64c473c95c2b9857d7fa81e60878ee397ba8927 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:56:55 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.694 diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..58377aa23c8fc0c756fe982d3d94362f09f44017 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.png b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..8d95630a3c56f8c1af69b317c2fe017079dadd4a --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff7ae9fddd3d55f728913313d55be86376582b15112babc8148a929daa8a67c2 +size 1937027 diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_test1.jpg b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..23c808e8b83e264212a1514a69d33d22201a3744 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/mark_test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c9437357aa11363ee0d7903ab98de2beeff560bc Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.png b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.png new file mode 100644 index 0000000000000000000000000000000000000000..4efe5041a424c645547cccef60254f3ba946eb6f --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/new_image_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706d85799ebc9ae3b4a33769e256d2e9cb2851fcb9c7bf6f8118b250a7d7f35f +size 1718692 diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/test1.jpg b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/test1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32b6eb9e0d1b9c1a86e81132d0f61fd3baa28425 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03/144/test/test1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..3b4c2e0debecb5ecc38a7cd0dc4b799281c0b98d --- /dev/null +++ b/flat/2023-07-13/2023-07-13 18:59:53 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.858 diff --git a/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9d29da34f43c601288e0c1846235c349b27f6c83 Binary files /dev/null and b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9a2c79c74d40f9d6fdcb587f0a0a91e12c7e2b4d Binary files /dev/null and b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..0a283b2cc856a0b087083275c27e0baa315e3761 --- /dev/null +++ b/flat/2023-07-13/2023-07-13 20:43:28 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.177 diff --git a/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2bd388700d8d82584406c27b3c244c6c2c550a27 Binary files /dev/null and b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..56cbf3cc487a0b8d3b074e539fcbb3c86d36a1d0 Binary files /dev/null and b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..ce6599918b7bfd6587e2a767a2aa65cd1ceed6b7 --- /dev/null +++ b/flat/2023-07-25/2023-07-25 14:24:59 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.482 diff --git a/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..429803c4967c3be260e3cbe900504fa63ca68cb5 Binary files /dev/null and b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..131ac7248efb92f8dd6aaa1ca41de043bbeef26e Binary files /dev/null and b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..428e4ba55414e3b24e98b0a49e2238241c0b8114 --- /dev/null +++ b/flat/2023-08-01/2023-08-01 14:22:40 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 13.989 diff --git a/flat/2023-08-22/2023-08-22 17:15:05 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-22/2023-08-22 17:15:05 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..57347c4b5b0a5afd4ea9cfdc124fccf97529db5c Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c827f941004258b7b00f18146f93b14c95e0188f Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..9b3b3c68f5ad65fc3785e5057fb41ebee7a827ad --- /dev/null +++ b/flat/2023-08-22/2023-08-22 17:17:19 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.076 diff --git a/flat/2023-08-22/2023-08-22 17:19:30 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-22/2023-08-22 17:19:30 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cf387b8aa531b2a940e31f996d94570879742c0 Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ce2b874f12973c4b954f9c63b13c9720f114683 Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..e25ee7edc9786d7618e6f53cdd8623539c6b1890 --- /dev/null +++ b/flat/2023-08-22/2023-08-22 17:20:46 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.360 diff --git a/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2a04934ad4cacc519dbee8e5dcbf6a437eda6561 Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a1b1876a5e2c2157ed5b2d196a9eb036cb90dc3c Binary files /dev/null and b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..3bb54535b4dd0fec963bbf965731cbfe5feb6a43 --- /dev/null +++ b/flat/2023-08-22/2023-08-22 17:24:42 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 9.783 diff --git a/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cf387b8aa531b2a940e31f996d94570879742c0 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ce2b874f12973c4b954f9c63b13c9720f114683 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..b51dc5df8901ab1ebf4b7af0cabae21a65efdee8 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 18:48:01 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.674 diff --git a/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4403e1545477c52ea8abb4abee921ed2808e63c9 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ca8fc960d86129835b627f37ae465483fe1492bf Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..b1f237d8defa19a6e0212d0f2efe21e999c9ac46 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 22:07:07 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 10.438 diff --git a/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4403e1545477c52ea8abb4abee921ed2808e63c9 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ca8fc960d86129835b627f37ae465483fe1492bf Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..0556d39871fb6c0a9b64c13d7c49f26831138e94 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 22:07:19 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.175 diff --git a/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cf387b8aa531b2a940e31f996d94570879742c0 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ce2b874f12973c4b954f9c63b13c9720f114683 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..545fd8095bc8ef0a046859de4b5a1ddac92235a4 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 22:23:19 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.344 diff --git a/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f3338bea6afcba09947b353e2fe0a5d03296f2bb Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4354a5b3071f833552b014dfccdbd3b8ea793373 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..775b12ed0165483f36514b6034d1946efa673ba6 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 22:26:35 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.409 diff --git a/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/mark_new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/mark_new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f3338bea6afcba09947b353e2fe0a5d03296f2bb Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/mark_new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/new_image_1.jpg b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4354a5b3071f833552b014dfccdbd3b8ea793373 Binary files /dev/null and b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03/144/test/new_image_1.jpg differ diff --git a/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03_Document-Dewarping-with-Control-Points.log b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03_Document-Dewarping-with-Control-Points.log new file mode 100644 index 0000000000000000000000000000000000000000..5208b9e78f94d71d112e07d4dcb058b1244afc02 --- /dev/null +++ b/flat/2023-08-23/2023-08-23 22:26:49 @2021-02-03_Document-Dewarping-with-Control-Points.log @@ -0,0 +1,2 @@ +Namespace(arch='Document-Dewarping-with-Control-Points', img_shrink=None, n_epoch=300, optimizer='adam', l_rate=0.0002, print_freq=60, data_path_train=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/color'), data_path_validate=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/dataset/fiducial1024/fiducial1024_v1/validate'), data_path_test='./testdata', output_path=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/flat'), resume=PosixPath('/Users/lixiangfang/Desktop/Surf code/Document-Dewarping-with-Control-Points/Source/ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl'), batch_size=1, schema='test', parallel=None) +test time : 8.546 diff --git a/loss.py b/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..7c4e4d627ac84d7db213e37dbc14b39d43c9c6ab --- /dev/null +++ b/loss.py @@ -0,0 +1,135 @@ +import torch +import torch.nn.functional as F + + +class Losses(object): + def __init__(self, classify_size_average=True, args_gpu=0): + self.classify_size_average = classify_size_average + self.args_gpu = args_gpu + self.kernel_r = torch.ones(2, 1, 3, 3).cuda(self.args_gpu) + self.kernel_cross_17 = torch.tensor( + [[[[0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [1., 1., 1., 1., 1., 1., 1., 1., 1.], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0]]], + [[[0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [1., 1., 1., 1., 1., 1., 1., 1., 1.], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0], + [0, 0, 0, 0, 1., 0, 0, 0, 0]]]]).cuda(self.args_gpu) + self.kernel = torch.tensor([[[[0, 1., 0], + [1., 1., 1.], + [0, 1., 0]]], + [[[0, 1., 0], + [1., 1., 1.], + [0, 1., 0]]]]).cuda(self.args_gpu) + # self.kernel = torch.ones(2, 1, 3, 3).cuda(self.args_gpu) + self.kernel_2_1 = torch.tensor([[[[1.], [-1.]]], [[[1.], [-1.]]]]).cuda(self.args_gpu) + self.kernel_1_2 = torch.tensor([[[[1., -1.]]], [[[1., -1.]]]]).cuda(self.args_gpu) + # self.lambda_ = 0.1 + self.lambda_ = 0.5 + + self.matrices_2 = torch.full((1024, 960), 2, dtype=torch.float).cuda(self.args_gpu) + self.matrices_0 = torch.full((1024, 960), 0, dtype=torch.float).cuda(self.args_gpu) + + self.fiducial_point_gaps = [2, 3, 4, 5, 6, 10, 12, 15, 20, 30] + self.fiducial_point_gaps_v2 = [2, 3, 4, 5, 6, 10, 12, 15, 20, 30, 60] + + def line_cross(self, input, target, size_average=False): + + input_rectangles_h = F.conv2d(input, self.kernel_1_2, padding=0, groups=2) + target_rectangles_h = F.conv2d(target, self.kernel_1_2, padding=0, groups=2) + input_arget_rectangles_h = F.mse_loss(input_rectangles_h, target_rectangles_h, size_average=size_average) + input_rectangles_o = F.conv2d(input, self.kernel_2_1, padding=0, groups=2) + target_rectangles_o = F.conv2d(target, self.kernel_2_1, padding=0, groups=2) + input_arget_rectangles_o = F.mse_loss(input_rectangles_o, target_rectangles_o, size_average=size_average) + loss_rectangles = input_arget_rectangles_h + input_arget_rectangles_o + + return loss_rectangles + + def loss_line_cross(self, input, target, size_average=False): + i_t = target - input + + loss_local = torch.mean(torch.pow(F.conv2d(F.pad(i_t, (1, 1, 1, 1), mode='replicate'), self.kernel, padding=0, groups=2) - i_t*5, 2)) + # loss_local = torch.mean(torch.abs(F.conv2d(i_t, self.kernel, padding=1, groups=2) - i_t*5)) + # loss_local = torch.mean(torch.pow(F.conv2d(i_t, self.kernel, padding=1, groups=2) - i_t*5, 2)) + + input_rectangles_h = F.conv2d(input, self.kernel_1_2, padding=0, groups=2) + target_rectangles_h = F.conv2d(target, self.kernel_1_2, padding=0, groups=2) + input_arget_rectangles_h = F.mse_loss(input_rectangles_h, target_rectangles_h, size_average=size_average) + input_rectangles_o = F.conv2d(input, self.kernel_2_1, padding=0, groups=2) + target_rectangles_o = F.conv2d(target, self.kernel_2_1, padding=0, groups=2) + input_arget_rectangles_o = F.mse_loss(input_rectangles_o, target_rectangles_o, size_average=size_average) + loss_rectangles = input_arget_rectangles_h + input_arget_rectangles_o + + return loss_local, loss_rectangles + + def loss_line_cross_l1(self, input, target, size_average=False): + i_t = target - input + + loss_local = torch.mean(torch.abs(F.conv2d(F.pad(i_t, (1, 1, 1, 1), mode='replicate'), self.kernel, padding=0, groups=2) - i_t*5)) + # loss_local = torch.mean(torch.abs(F.conv2d(i_t, self.kernel, padding=1, groups=2) - i_t*5)) + # loss_local = torch.mean(torch.pow(F.conv2d(i_t, self.kernel, padding=1, groups=2) - i_t*5, 2)) + + input_rectangles_h = F.conv2d(input, self.kernel_1_2, padding=0, groups=2) + target_rectangles_h = F.conv2d(target, self.kernel_1_2, padding=0, groups=2) + input_arget_rectangles_h = F.l1_loss(input_rectangles_h, target_rectangles_h, size_average=size_average) + input_rectangles_o = F.conv2d(input, self.kernel_2_1, padding=0, groups=2) + target_rectangles_o = F.conv2d(target, self.kernel_2_1, padding=0, groups=2) + input_arget_rectangles_o = F.l1_loss(input_rectangles_o, target_rectangles_o, size_average=size_average) + loss_rectangles = input_arget_rectangles_h + input_arget_rectangles_o + + return loss_local, loss_rectangles + + + def loss_fn4_v5(self, input, target, size_average=False): + n, c, h, w = input.size() + + # n_ = n*c*h*w + + i_t = target - input + + '''one''' + loss_l1 = F.smooth_l1_loss(input, target, size_average=size_average) + + '''two''' + loss_local = torch.mean(torch.pow(F.conv2d(F.pad(i_t, (1, 1, 1, 1), mode='replicate'), self.kernel, padding=0, groups=2) - i_t*5, 2)) + + '''three --weak''' + loss_edge_a = F.mse_loss(input[:, :, 0, :], target[:, :, 0, :], size_average=size_average) + loss_edge_b = F.mse_loss(input[:, :, h-1, :], target[:, :, h-1, :], size_average=size_average) + loss_edge_c = F.mse_loss(input[:, :, :, 0], target[:, :, :, 0], size_average=size_average) + loss_edge_d = F.mse_loss(input[:, :, :, w-1], target[:, :, :, w-1], size_average=size_average) + loss_edge = loss_edge_a+loss_edge_b+loss_edge_c+loss_edge_d + + '''four''' + loss_rectangle = self.line_cross(input, target, size_average) + + + return loss_l1, loss_local, loss_edge, loss_rectangle + + def loss_fn4_v5_r_4(self, input, target, size_average=False): + i_t = target - input + + '''one''' + loss_l1 = F.smooth_l1_loss(input, target, size_average=size_average) + + + '''two''' + loss_local = torch.mean(torch.pow(F.conv2d(F.pad(i_t, (4, 4, 4, 4), mode='replicate'), self.kernel_cross_17, padding=0, groups=2) - i_t*17, 2)) + + return loss_l1, loss_local, 0, 0 + # return loss_l1, loss_local, loss_edge, loss_rectangle + + def loss_fn_l1_loss(self, input, target): + return F.l1_loss(input, target, size_average=self.classify_size_average) + diff --git a/network.py b/network.py new file mode 100644 index 0000000000000000000000000000000000000000..4279169c371aa2c809ceb57d9dd9e18a3a1bf9e2 --- /dev/null +++ b/network.py @@ -0,0 +1,252 @@ +import math +import pickle +import torch.nn as nn +import torch +# from xgw.dewarp.fiducial_points.networks.resnet import * +import torch.nn.init as tinit +import torch.nn.functional as F + +def conv3x3(in_channels, out_channels, stride=1): + return nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1) + +def dilation_conv_bn_act(in_channels, out_dim, act_fn, BatchNorm, dilation=4): + model = nn.Sequential( + nn.Conv2d(in_channels, out_dim, kernel_size=3, stride=1, padding=dilation, dilation=dilation), + BatchNorm(out_dim), + # nn.BatchNorm2d(out_dim), + act_fn, + ) + return model + +def dilation_conv(in_channels, out_dim, stride=1, dilation=4, groups=1): + model = nn.Sequential( + nn.Conv2d(in_channels, out_dim, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=groups), + ) + return model + +class ResidualBlockWithDilatedV1(nn.Module): + def __init__(self, in_channels, out_channels, BatchNorm, stride=1, downsample=None, is_activation=True, is_top=False, is_dropout=False): + super(ResidualBlockWithDilatedV1, self).__init__() + self.stride = stride + self.is_activation = is_activation + self.downsample = downsample + self.is_top = is_top + if self.stride != 1 or self.is_top: + self.conv1 = conv3x3(in_channels, out_channels, self.stride) + else: + self.conv1 = dilation_conv(in_channels, out_channels, dilation=3) # 3 + self.bn1 = BatchNorm(out_channels) + # self.bn1 = nn.BatchNorm2d(out_channels) + self.relu = nn.ReLU(inplace=True) + if self.stride != 1 or self.is_top: + self.conv2 = conv3x3(out_channels, out_channels) + else: + self.conv2 = dilation_conv(out_channels, out_channels, dilation=3) # 1 + self.bn2 = BatchNorm(out_channels) + + self.is_dropout = is_dropout + self.drop_out = nn.Dropout2d(p=0.2) + + def forward(self, x): + residual = x + + out1 = self.relu(self.bn1(self.conv1(x))) + # if self.is_dropout: + # out1 = self.drop_out(out1) + out = self.bn2(self.conv2(out1)) + if self.downsample is not None: + residual = self.downsample(x) + out += residual + out = self.relu(out) + + return out + +class ResNetV2StraightV2(nn.Module): + def __init__(self, num_filter, map_num, BatchNorm, block_nums=[3, 4, 6, 3], block=ResidualBlockWithDilatedV1, stride=[1, 2, 2, 2], dropRate=[0.2, 0.2, 0.2, 0.2], is_sub_dropout=False): + super(ResNetV2StraightV2, self).__init__() + self.in_channels = num_filter * map_num[0] + self.dropRate = dropRate + self.stride = stride + self.is_sub_dropout = is_sub_dropout + # self.is_dropout = is_dropout + self.drop_out = nn.Dropout2d(p=dropRate[0]) + self.drop_out_2 = nn.Dropout2d(p=dropRate[1]) + self.drop_out_3 = nn.Dropout2d(p=dropRate[2]) + self.drop_out_4 = nn.Dropout2d(p=dropRate[3]) # add + self.relu = nn.ReLU(inplace=True) + + self.block_nums = block_nums + self.layer1 = self.blocklayer(block, num_filter * map_num[0], self.block_nums[0], BatchNorm, stride=self.stride[0]) + self.layer2 = self.blocklayer(block, num_filter * map_num[1], self.block_nums[1], BatchNorm, stride=self.stride[1]) + self.layer3 = self.blocklayer(block, num_filter * map_num[2], self.block_nums[2], BatchNorm, stride=self.stride[2]) + self.layer4 = self.blocklayer(block, num_filter * map_num[3], self.block_nums[3], BatchNorm, stride=self.stride[3]) + + def blocklayer(self, block, out_channels, block_nums, BatchNorm, stride=1): + downsample = None + if (stride != 1) or (self.in_channels != out_channels): + downsample = nn.Sequential( + conv3x3(self.in_channels, out_channels, stride=stride), + BatchNorm(out_channels)) + + layers = [] + layers.append(block(self.in_channels, out_channels, BatchNorm, stride, downsample, is_top=True, is_dropout=False)) + self.in_channels = out_channels + for i in range(1, block_nums): + layers.append(block(out_channels, out_channels, BatchNorm, is_activation=True, is_top=False, is_dropout=self.is_sub_dropout)) + return nn.Sequential(*layers) + + def forward(self, x, is_skip=False): + + out1 = self.layer1(x) + + out2 = self.layer2(out1) + + out3 = self.layer3(out2) + + out4 = self.layer4(out3) + + return out4 + +class FiducialPoints(nn.Module): + def __init__(self, n_classes, num_filter, architecture, BatchNorm='GN', in_channels=3): + super(FiducialPoints, self).__init__() + self.in_channels = in_channels + self.n_classes = n_classes + self.num_filter = num_filter + if BatchNorm == 'IN': + BatchNorm = nn.InstanceNorm2d + elif BatchNorm == 'BN': + BatchNorm = nn.BatchNorm2d + elif BatchNorm == 'GN': + BatchNorm = nn.GroupNorm + + + + self.dilated_unet = architecture(self.n_classes, self.num_filter, BatchNorm, in_channels=self.in_channels) + + def forward(self, x, is_softmax=True): + return self.dilated_unet(x, is_softmax) + +''' Dilated Resnet For Flat By Classify with Rgress simple -2''' +class DilatedResnetForFlatByFiducialPointsS2(nn.Module): + + def __init__(self, n_classes, num_filter, BatchNorm, in_channels=3): + super(DilatedResnetForFlatByFiducialPointsS2, self).__init__() + self.in_channels = in_channels + self.n_classes = n_classes + self.num_filter = num_filter + # act_fn = nn.PReLU() + act_fn = nn.ReLU(inplace=True) + # act_fn = nn.LeakyReLU(0.2) + + map_num = [1, 2, 4, 8, 16] + + print("\n------load DilatedResnetForFlatByFiducialPointsS2------\n") + + self.resnet_head = nn.Sequential( + + nn.Conv2d(self.in_channels, self.num_filter * map_num[0], kernel_size=3, stride=2, padding=1), + # nn.InstanceNorm2d(self.num_filter * map_num[0]), + # BatchNorm(1, self.num_filter * map_num[0]), + BatchNorm(self.num_filter * map_num[0]), + # nn.BatchNorm2d(self.num_filter * map_num[0]), + act_fn, + # nn.Dropout(p=0.2), + # nn.MaxPool2d(kernel_size=2, stride=2, padding=0), + nn.Conv2d(self.num_filter * map_num[0], self.num_filter * map_num[0], kernel_size=3, stride=2, padding=1), + BatchNorm(self.num_filter * map_num[0]), + act_fn, + # nn.Dropout(p=0.2), + ) + + self.resnet_down = ResNetV2StraightV2(num_filter, map_num, BatchNorm, block_nums=[3, 4, 6, 3], block=ResidualBlockWithDilatedV1, dropRate=[0, 0, 0, 0], is_sub_dropout=False) + + map_num_i = 3 + self.bridge_1 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=1), + # conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], act_fn), + ) + self.bridge_2 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=2), + ) + self.bridge_3 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=5), + ) + self.bridge_4 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=8), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=3), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=2), + ) + self.bridge_5 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=12), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=7), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=4), + ) + self.bridge_6 = nn.Sequential( + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=18), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=12), + dilation_conv_bn_act(self.num_filter * map_num[map_num_i], self.num_filter * map_num[map_num_i], + act_fn, BatchNorm, dilation=6), + ) + + self.bridge_concate = nn.Sequential( + nn.Conv2d(self.num_filter * map_num[map_num_i] * 6, self.num_filter * map_num[2], kernel_size=1, stride=1, padding=0), + # BatchNorm(GN_num, self.num_filter * map_num[4]), + BatchNorm(self.num_filter * map_num[2]), + # nn.BatchNorm2d(self.num_filter * map_num[4]), + act_fn, + ) + self.out_regress = nn.Sequential( + nn.Conv2d(self.num_filter * map_num[2], self.num_filter * map_num[0], kernel_size=3, stride=1, padding=1), + BatchNorm(self.num_filter * map_num[0]), + nn.PReLU(), + + nn.Conv2d(self.num_filter * map_num[0], n_classes, kernel_size=3, stride=1, padding=1), + + ) + + + self.segment_regress = nn.Linear(self.num_filter * map_num[2]*31*31, 2) + + self._initialize_weights() + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + tinit.xavier_normal_(m.weight, gain=0.2) + if isinstance(m, nn.ConvTranspose2d): + assert m.kernel_size[0] == m.kernel_size[1] + tinit.xavier_normal_(m.weight, gain=0.2) + + def cat(self, trans, down): + return torch.cat([trans, down], dim=1) + + def forward(self, x, is_softmax): + resnet_head = self.resnet_head(x) + resnet_down = self.resnet_down(resnet_head) + + bridge_1 = self.bridge_1(resnet_down) + bridge_2 = self.bridge_2(resnet_down) + bridge_3 = self.bridge_3(resnet_down) + bridge_4 = self.bridge_4(resnet_down) + bridge_5 = self.bridge_5(resnet_down) + bridge_6 = self.bridge_6(resnet_down) + bridge_concate = torch.cat([bridge_1, bridge_2, bridge_3, bridge_4, bridge_5, bridge_6], dim=1) + bridge = self.bridge_concate(bridge_concate) + + out_regress = self.out_regress(bridge) + + segment_regress = self.segment_regress(bridge.view(x.size(0), -1)) + + return out_regress, segment_regress diff --git a/test.py b/test.py new file mode 100644 index 0000000000000000000000000000000000000000..55501be8aeac4689ee3689dbb9a895467e4d82a5 --- /dev/null +++ b/test.py @@ -0,0 +1,182 @@ +''' +2021/2/3 +Guowang Xie + +args: + n_epoch:epoch values for training + optimizer:various optimization algorithms + l_rate:initial learning rate + resume:the path of trained model parameter after + data_path_train:datasets path for training + data_path_validate:datasets path for validating + data_path_test:datasets path for testing + output-path:output path + batch_size: + schema:test or train + parallel:number of gpus used, like 0, or, 0123 + +''' +import os, sys +import argparse +import torch + +import time +import re +from pathlib import Path +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] + +from network import FiducialPoints, DilatedResnetForFlatByFiducialPointsS2 + +# import utilsV3 as utils +import utilsV4 as utils + +from dataloader import PerturbedDatastsForFiducialPoints_pickle_color_v2_v2 + + +def train(args): + global _re_date + if args.resume is not None: + re_date = re.compile(r'\d{4}-\d{1,2}-\d{1,2}') + _re_date = re_date.search(str(args.resume)).group(0) + reslut_file = open(path + '/' + date + date_time + ' @' + _re_date + '_' + args.arch + '.log', 'w') + else: + _re_date = None + reslut_file = open(path+'/'+date+date_time+'_'+args.arch+'.log', 'w') + + # Setup Dataloader + data_path = str(args.data_path_train)+'/' + data_path_validate = str(args.data_path_validate)+'/' + data_path_test = str(args.data_path_test)+'/' + + print(args) + print(args, file=reslut_file) + + n_classes = 2 + + model = FiducialPoints(n_classes=n_classes, num_filter=32, architecture=DilatedResnetForFlatByFiducialPointsS2, BatchNorm='BN', in_channels=3) # + + if args.parallel is not None: + device_ids = list(map(int, args.parallel)) + args.device = torch.device('cuda:'+str(device_ids[0])) + + # args.gpu = device_ids[0] + # if args.gpu < 8: + torch.cuda.set_device(args.device) + model = torch.nn.DataParallel(model, device_ids=device_ids) + model.cuda(args.device) + else: + # exit() + args.device = torch.device('cpu') + print('using CPU!') + + + if args.optimizer == 'SGD': + optimizer = torch.optim.SGD(model.parameters(), lr=0.0001, momentum=0.8, weight_decay=1e-12) + elif args.optimizer == 'adam': + optimizer = torch.optim.Adam(model.parameters(), lr=args.l_rate, weight_decay=1e-10) + else: + assert 'please choice optimizer' + exit('error') + + if args.resume is not None: + if os.path.isfile(args.resume): + print("Loading model and optimizer from checkpoint '{}'".format(args.resume)) + if args.parallel is not None: + checkpoint = torch.load(args.resume, map_location=args.device) + model.load_state_dict(checkpoint['model_state']) + else: + checkpoint = torch.load(args.resume, map_location=args.device) + '''cpu''' + model_parameter_dick = {} + for k in checkpoint['model_state']: + model_parameter_dick[k.replace('module.', '')] = checkpoint['model_state'][k] + model.load_state_dict(model_parameter_dick) + + print("Loaded checkpoint '{}' (epoch {})" + .format(args.resume.name, checkpoint['epoch'])) + else: + print("No checkpoint found at '{}'".format(args.resume.name)) + + FlatImg = utils.FlatImg(args=args, path=path, date=date, date_time=date_time, _re_date=_re_date, model=model, \ + reslut_file=reslut_file, n_classes=n_classes, optimizer=optimizer, \ + data_loader=PerturbedDatastsForFiducialPoints_pickle_color_v2_v2, \ + data_path=data_path, data_path_validate=data_path_validate, data_path_test=data_path_test, data_preproccess=False) # , valloaderSet=valloaderSet, v_loaderSet=v_loaderSet + ''' load data ''' + FlatImg.loadTestData() + + epoch = checkpoint['epoch'] if args.resume is not None else 0 + model.eval() + FlatImg.validateOrTestModelV3(epoch, 0, validate_test='t_all') + exit() + + reslut_file.close() + +if __name__ == '__main__': + print(FILE) + print(ROOT) + parser = argparse.ArgumentParser(description='Hyperparams') + parser.add_argument('--arch', nargs='?', type=str, default='Document-Dewarping-with-Control-Points', + help='Architecture') + + parser.add_argument('--img_shrink', nargs='?', type=int, default=None, + help='short edge of the input image') + + parser.add_argument('--n_epoch', nargs='?', type=int, default=300, + help='# of the epochs') + + parser.add_argument('--optimizer', type=str, default='adam', + help='optimization') + + parser.add_argument('--l_rate', nargs='?', type=float, default=0.0002, + help='Learning Rate') + + + parser.add_argument('--print-freq', '-p', default=60, type=int, + metavar='N', help='print frequency (default: 10)') # print frequency + + parser.add_argument('--data_path_train', default=ROOT / 'dataset/fiducial1024/fiducial1024_v1/color/', type=str, + help='the path of train images.') # train image path + + parser.add_argument('--data_path_validate', default=ROOT / 'dataset/fiducial1024/fiducial1024_v1/validate/', type=str, + help='the path of validate images.') # validate image path + + parser.add_argument('--data_path_test', default=ROOT / 'data/', type=str, help='the path of test images.') + + parser.add_argument('--output-path', default=ROOT / 'flat/', type=str, help='the path is used to save output --img or result.') + + parser.add_argument('--resume', default=ROOT / 'ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl', type=str, + help='Path to previous saved model to restart from') + + parser.add_argument('--batch_size', nargs='?', type=int, default=1, + help='Batch Size')#28 + + parser.add_argument('--schema', type=str, default='test', + help='train or test') # train validate + + # parser.set_defaults(resume='./ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl') + + parser.add_argument('--parallel', default=None, type=list, + help='choice the gpu id for parallel ') + + args = parser.parse_args() + + if args.resume is not None: + if not os.path.isfile(args.resume): + raise Exception(args.resume+' -- not exist') + + if args.data_path_test is None: + raise Exception('-- No test path') + else: + if not os.path.exists(args.data_path_test): + raise Exception(args.data_path_test+' -- no find') + + global path, date, date_time + date = time.strftime('%Y-%m-%d', time.localtime(time.time())) + date_time = time.strftime(' %H:%M:%S', time.localtime(time.time())) + path = os.path.join(args.output_path, date) + + if not os.path.exists(path): + os.makedirs(path) + + train(args) diff --git a/testdata/new_image_1.jpg b/testdata/new_image_1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ed7c52deca75755b924e330e2372289abddfb1f7 --- /dev/null +++ b/testdata/new_image_1.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c16cad49e3d06f57cd8f137f4da0ca1d3fc20963419cef0229467bdc274fb51 +size 1111412 diff --git a/tpsV2.py b/tpsV2.py new file mode 100644 index 0000000000000000000000000000000000000000..484ebffe2e3d28a01f4ddb74bafa9d3dc8756c3c --- /dev/null +++ b/tpsV2.py @@ -0,0 +1,104 @@ +''' + +''' +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F + +class createThinPlateSplineShapeTransformer(nn.Module): + + def __init__(self, I_r_size, fiducial_num=[31, 31], device=torch.device('cuda:0')): + """ + input: + batch_I: Batch Input Image [batch_size x I_channel_num x I_height x I_width] + I_r_size : (height, width) of the rectified image I_r + fiducial_num : the number of fiducial_points + output: + batch_I_r: rectified image [batch_size x I_channel_num x I_r_height x I_r_width] + """ + super(createThinPlateSplineShapeTransformer, self).__init__() + self.f_row_num, self.f_col_num = fiducial_num + self.F = self.f_row_num * self.f_col_num + self.I_r_size = I_r_size + self.device = device + self.estimateTransformation = estimateTransformation(self.F, self.I_r_size, self.device) + + def forward(self, batch_I, batch_F, shap_new=None): + build_P_prime = self.estimateTransformation.build_P_prime(batch_F) + build_P_prime_reshape = build_P_prime.reshape([build_P_prime.size(0), self.I_r_size[0], self.I_r_size[1], 2]) + + if shap_new is None: + batch_I_r = F.grid_sample(batch_I, build_P_prime_reshape, padding_mode='border', align_corners=True) + else: + build_P_prime_reshape = build_P_prime_reshape.transpose(2, 3).transpose(1, 2) + map = F.interpolate(build_P_prime_reshape, shap_new, mode='bilinear', align_corners=True) + map = map.transpose(1, 2).transpose(2, 3) + batch_I_r = F.grid_sample(batch_I, map, padding_mode='border', align_corners=True) + + return batch_I_r + +class estimateTransformation(nn.Module): + + def __init__(self, F, I_r_size, device): + super(estimateTransformation, self).__init__() + self.eps = 1e-6 + self.I_r_height, self.I_r_width = I_r_size + self.F = F + self.C = self._build_C(self.F) + self.P = self._build_P(self.I_r_width, self.I_r_height) + self.device = device + self.register_buffer("inv_delta_C", torch.tensor(self._build_inv_delta_C(self.F, self.C), dtype=torch.float64, device=self.device)) + self.register_buffer("P_hat", torch.tensor(self._build_P_hat(self.F, self.C, self.P), dtype=torch.float64, device=self.device)) + + def _build_C(self, F): + im_x, im_y = np.mgrid[-1:1:complex(31), -1:1:complex(31)] + C = np.stack((im_y,im_x), axis=2).reshape(-1,2) + return C + + def _build_inv_delta_C(self, F, C): + hat_C = np.zeros((F, F), dtype=float) # F x F + for i in range(0, F): + for j in range(i, F): + r = np.linalg.norm(C[i] - C[j]) + hat_C[i, j] = r + hat_C[j, i] = r + np.fill_diagonal(hat_C, 1) + hat_C = (hat_C ** 2) * np.log(hat_C ** 2) + delta_C = np.concatenate( # F+3 x F+3 + [ + np.concatenate([np.ones((F, 1)), C, hat_C], axis=1), # F x F+3 + np.concatenate([np.zeros((1, 3)), np.ones((1, F))], axis=1), # 1 x F+3 + np.concatenate([np.zeros((2, 3)), np.transpose(C)], axis=1), # 2 x F+3 + ], + axis=0 + ) + inv_delta_C = np.linalg.inv(delta_C) + return inv_delta_C # F+3 x F+3 + + def _build_P(self, I_r_width, I_r_height): + I_r_grid_x = (np.arange(-I_r_width, I_r_width, 2) + 1.0) / I_r_width # self.I_r_width + I_r_grid_y = (np.arange(-I_r_height, I_r_height, 2) + 1.0) / I_r_height # self.I_r_height + P = np.stack( # self.I_r_width x self.I_r_height x 2 + np.meshgrid(I_r_grid_x, I_r_grid_y), + axis=2 + ) + return P.reshape([-1, 2]) # n (= self.I_r_width x self.I_r_height) x 2 + + def _build_P_hat(self, F, C, P): + n = P.shape[0] # n (= self.I_r_width x self.I_r_height) + P_tile = np.tile(np.expand_dims(P, axis=1), (1, F, 1)) # n x 2 -> n x 1 x 2 -> n x F x 2 + C_tile = np.expand_dims(C, axis=0) # 1 x F x 2 + P_diff = P_tile - C_tile # n x F x 2 + rbf_norm = np.linalg.norm(P_diff, ord=2, axis=2, keepdims=False) # n x F + rbf = 2 * np.multiply(np.square(rbf_norm), np.log(rbf_norm + self.eps)) # n x F + P_hat = np.concatenate([np.ones((n, 1)), P, rbf], axis=1) + return P_hat # n x F+3 + + def build_P_prime(self, batch_C_prime): + batch_size = batch_C_prime.size(0) + batch_C_prime_with_zeros = torch.cat((batch_C_prime, torch.zeros( + batch_size, 3, 2).double().to(self.device)), dim=1) # batch_size x F+3 x 2 + batch_T = torch.matmul(self.inv_delta_C, batch_C_prime_with_zeros) # batch_size x F+3 x 2 + batch_P_prime = torch.matmul(self.P_hat, batch_T) # batch_size x n x 2 + return batch_P_prime # batch_size x n x 2 diff --git a/train.py b/train.py new file mode 100644 index 0000000000000000000000000000000000000000..eedb193c42811d4b5ae93157d29459ea3fbe9487 --- /dev/null +++ b/train.py @@ -0,0 +1,319 @@ +''' +2021/2/3 + +Guowang Xie + +args: + n_epoch:epoch values for training + optimizer:various optimization algorithms + l_rate:initial learning rate + resume:the path of trained model parameter after + data_path_train:datasets path for training + data_path_validate:datasets path for validating + data_path_test:datasets path for testing + output-path:output path + batch_size: + schema:test or train + parallel:number of gpus used, like 0, or, 0123 + +''' +import os, sys +import argparse +import torch +from torch.autograd import Variable +import warnings +import time +import re +from pathlib import Path +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] + +from network import FiducialPoints, DilatedResnetForFlatByFiducialPointsS2 + +# import utilsV3 as utils +import utilsV4 as utils + +from dataloader import PerturbedDatastsForFiducialPoints_pickle_color_v2_v2 + +from loss import Losses + +def train(args): + global _re_date + if args.resume is not None: + re_date = re.compile(r'\d{4}-\d{1,2}-\d{1,2}') + _re_date = re_date.search(str(args.resume)).group(0) + reslut_file = open(path + '/' + date + date_time + ' @' + _re_date + '_' + args.arch + '.log', 'w') + else: + _re_date = None + reslut_file = open(path+'/'+date+date_time+'_'+args.arch+'.log', 'w') + + # Setup Dataloader + data_path = str(args.data_path_train)+'/' + data_path_validate = str(args.data_path_validate)+'/' + data_path_test = str(args.data_path_test)+'/' + + print(args) + print(args, file=reslut_file) + + n_classes = 2 + + model = FiducialPoints(n_classes=n_classes, num_filter=32, architecture=DilatedResnetForFlatByFiducialPointsS2, BatchNorm='BN', in_channels=3) # + + if args.parallel is not None: + device_ids = list(map(int, args.parallel)) + args.device = torch.device('cuda:'+str(device_ids[0])) + + # args.gpu = device_ids[0] + # if args.gpu < 8: + torch.cuda.set_device(args.device) + model = torch.nn.DataParallel(model, device_ids=device_ids) + model.cuda(args.device) + elif args.distributed: + model.cuda() + model = torch.nn.parallel.DistributedDataParallel(model) + else: + warnings.warn('no found gpu') + exit() + + if args.optimizer == 'SGD': + optimizer = torch.optim.SGD(model.parameters(), lr=0.0001, momentum=0.8, weight_decay=1e-12) + elif args.optimizer == 'adam': + optimizer = torch.optim.Adam(model.parameters(), lr=args.l_rate, weight_decay=1e-10) + else: + assert 'please choice optimizer' + exit('error') + + if args.resume is not None: + if os.path.isfile(args.resume): + print("Loading model and optimizer from checkpoint '{}'".format(args.resume)) + checkpoint = torch.load(args.resume, map_location=args.device) + + model.load_state_dict(checkpoint['model_state']) + optimizer.load_state_dict(checkpoint['optimizer_state']) + print("Loaded checkpoint '{}' (epoch {})" + .format(args.resume.name, checkpoint['epoch'])) + else: + print("No checkpoint found at '{}'".format(args.resume.name)) + + loss_fun_classes = Losses(classify_size_average=True, args_gpu=args.device) + loss_fun = loss_fun_classes.loss_fn4_v5_r_4 # * + # loss_fun = loss_fun_classes.loss_fn4_v5_r_3 # * + + loss_fun2 = loss_fun_classes.loss_fn_l1_loss + + FlatImg = utils.FlatImg(args=args, path=path, date=date, date_time=date_time, _re_date=_re_date, model=model, \ + reslut_file=reslut_file, n_classes=n_classes, optimizer=optimizer, \ + loss_fn=loss_fun, loss_fn2=loss_fun2, data_loader=PerturbedDatastsForFiducialPoints_pickle_color_v2_v2, \ + data_path=data_path, data_path_validate=data_path_validate, data_path_test=data_path_test, data_preproccess=False) # , valloaderSet=valloaderSet, v_loaderSet=v_loaderSet + ''' load data ''' + FlatImg.loadTestData() + + train_time = AverageMeter() + losses = AverageMeter() + + FlatImg.lambda_loss = 1 + FlatImg.lambda_loss_segment = 0.01 + FlatImg.lambda_loss_a = 0.1 + FlatImg.lambda_loss_b = 0.001 + FlatImg.lambda_loss_c = 0.01 + + scheduler = torch.optim.lr_scheduler.MultiStepLR(FlatImg.optimizer, milestones=[40, 90, 150, 200], gamma=0.5) + + epoch_start = checkpoint['epoch'] if args.resume is not None else 0 + + if args.schema == 'train': + trainloader = FlatImg.loadTrainData(data_split='train', is_shuffle=True) + FlatImg.loadValidateAndTestData(is_shuffle=True) + trainloader_len = len(trainloader) + + for epoch in range(epoch_start, args.n_epoch): + + print('* lambda_loss :'+str(FlatImg.lambda_loss)+'\t'+'learning_rate :'+str(optimizer.param_groups[0]['lr'])) + print('* lambda_loss :'+str(FlatImg.lambda_loss)+'\t'+'learning_rate :'+str(optimizer.param_groups[0]['lr']), file=reslut_file) + + begin_train = time.time() + loss_segment_list = 0 + loss_l1_list = 0 + loss_local_list = 0 + loss_edge_list = 0 + loss_rectangles_list = 0 + loss_list = [] + + model.train() + for i, (images, labels, segment) in enumerate(trainloader): + + images = Variable(images) + labels = Variable(labels.cuda(args.device)) + segment = Variable(segment.cuda(args.device)) + + optimizer.zero_grad() + outputs, outputs_segment = FlatImg.model(images, is_softmax=False) + + loss_l1, loss_local, loss_edge, loss_rectangles = loss_fun(outputs, labels, size_average=True) + loss_segment = loss_fun2(outputs_segment, segment) + loss = FlatImg.lambda_loss*(loss_l1 + loss_local*FlatImg.lambda_loss_a + loss_edge*FlatImg.lambda_loss_b + loss_rectangles*FlatImg.lambda_loss_c) + FlatImg.lambda_loss_segment*loss_segment + + losses.update(loss.item()) + loss.backward() + optimizer.step() + + loss_list.append(loss.item()) + loss_segment_list += loss_segment.item() + loss_l1_list += loss_l1.item() + loss_local_list += loss_local.item() + # loss_edge_list += loss_edge.item() + # loss_rectangles_list += loss_rectangles.item() + + if (i + 1) % args.print_freq == 0 or (i + 1) == trainloader_len: + list_len = len(loss_list) + print('[{0}][{1}/{2}]\t\t' + '[{3:.2f} {4:.4f} {5:.2f}]\t' + '[l1:{6:.4f} l:{7:.4f} e:{8:.4f} r:{9:.4f} s:{10:.4f}]\t' + '{loss.avg:.4f}'.format( + epoch + 1, i + 1, trainloader_len, + min(loss_list), sum(loss_list) / list_len, max(loss_list), + loss_l1_list / list_len, loss_local_list / list_len, loss_edge_list / list_len, loss_rectangles_list / list_len, loss_segment_list / list_len, + loss=losses)) + print('[{0}][{1}/{2}]\t\t' + '[{3:.2f} {4:.4f} {5:.2f}]\t' + '[l1:{6:.4f} l:{7:.4f} e:{8:.4f} r:{9:.4f} s:{10:.4f}]\t' + '{loss.avg:.4f}'.format( + epoch + 1, i + 1, trainloader_len, + min(loss_list), sum(loss_list) / list_len, max(loss_list), + loss_l1_list / list_len, loss_local_list / list_len, loss_edge_list / list_len, loss_rectangles_list / list_len, loss_segment_list / list_len, + loss=losses), file=reslut_file) + + del loss_list[:] + loss_segment_list = 0 + loss_l1_list = 0 + loss_local_list = 0 + loss_edge_list = 0 + loss_rectangles_list = 0 + FlatImg.saveModel_epoch(epoch) # FlatImg.saveModel(epoch, save_path=path) + + model.eval() + + trian_t = time.time()-begin_train + losses.reset() + + train_time.update(trian_t) + + try: + FlatImg.validateOrTestModelV3(epoch, trian_t, validate_test='v_l4') + FlatImg.validateOrTestModelV3(epoch, 0, validate_test='t') + except: + print(' Error: validate or test') + + try: + scheduler.step() + except: + pass + + print('\n') + elif args.schema == 'validate': + epoch = checkpoint['epoch'] if args.resume is not None else 0 + model.eval() + FlatImg.validateOrTestModelV3(epoch, 0, validate_test='t_all') + exit() + elif args.schema == 'test': + epoch = checkpoint['epoch'] if args.resume is not None else 0 + model.eval() + FlatImg.validateOrTestModelV3(epoch, 0, validate_test='t_all') + exit() + elif args.schema == 'eval': + FlatImg.evalData(is_shuffle=True) + + epoch = checkpoint['epoch'] if args.resume is not None else 0 + model.eval() + FlatImg.evalModelGreyC1(epoch, is_scaling=False) + exit() + + m, s = divmod(train_time.sum, 60) + h, m = divmod(m, 60) + print("All Train Time : %02d:%02d:%02d\n" % (h, m, s)) + print("All Train Time : %02d:%02d:%02d\n" % (h, m, s), file=reslut_file) + + reslut_file.close() + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Hyperparams') + parser.add_argument('--arch', nargs='?', type=str, default='Document-Dewarping-with-Control-Points', + help='Architecture') + + parser.add_argument('--img_shrink', nargs='?', type=int, default=None, + help='short edge of the input image') + + parser.add_argument('--n_epoch', nargs='?', type=int, default=300, + help='# of the epochs') + + parser.add_argument('--optimizer', type=str, default='adam', + help='optimization') + + parser.add_argument('--l_rate', nargs='?', type=float, default=0.0002, + help='Learning Rate') + + parser.add_argument('--print-freq', '-p', default=60, type=int, + metavar='N', help='print frequency (default: 10)') # print frequency + + parser.add_argument('--data_path_train', default=ROOT / 'dataset/fiducial1024/fiducial1024_v1/', type=str, + help='the path of train images.') # train image path + + parser.add_argument('--data_path_validate', default=ROOT / 'dataset/fiducial1024/fiducial1024_v1/validate/', type=str, + help='the path of validate images.') # validate image path + + parser.add_argument('--data_path_test', default=ROOT / 'data/', type=str, help='the path of test images.') + + parser.add_argument('--output-path', default=ROOT / 'flat/', type=str, help='the path is used to save output --img or result.') + + parser.add_argument('--resume', default=ROOT / 'ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl', type=str, + help='Path to previous saved model to restart from') + + parser.add_argument('--batch_size', nargs='?', type=int, default=2, + help='Batch Size')#28 + + parser.add_argument('--schema', type=str, default='test', + help='train or test') # train validate + + # parser.set_defaults(resume='./ICDAR2021/2021-02-03 16:15:55/143/2021-02-03 16_15_55flat_img_by_fiducial_points-fiducial1024_v1.pkl') + + parser.add_argument('--parallel', default='1', type=list, + help='choice the gpu id for parallel ') + + args = parser.parse_args() + + if args.resume is not None: + if not os.path.isfile(args.resume): + raise Exception(args.resume+' -- not exist') + + if args.data_path_test is None: + raise Exception('-- No test path') + else: + if not os.path.exists(args.data_path_test): + raise Exception(args.data_path_test+' -- no find') + + global path, date, date_time + date = time.strftime('%Y-%m-%d', time.localtime(time.time())) + date_time = time.strftime(' %H:%M:%S', time.localtime(time.time())) + path = os.path.join(args.output_path, date) + + if not os.path.exists(path): + os.makedirs(path) + + train(args) diff --git a/utilsV3.py b/utilsV3.py new file mode 100644 index 0000000000000000000000000000000000000000..705c88abd32a362e09b82e9a14727d6e76e4ffe1 --- /dev/null +++ b/utilsV3.py @@ -0,0 +1,558 @@ +''' +2021/2/3 + +Guowang Xie + +''' +import pickle + +import torch +from torch.utils import data +from torch.autograd import Variable, Function + +import numpy as np + +import sys, os, math + +import cv2 +import time +import re +from multiprocessing import Pool + +import random +import scipy.spatial.qhull as qhull + +from scipy.optimize import fsolve +from scipy.interpolate import griddata + +def adjust_position(x_min, y_min, x_max, y_max, new_shape): + if (new_shape[0] - (x_max - x_min)) % 2 == 0: + f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 + f_g_0_1 = f_g_0_0 + else: + f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 + f_g_0_1 = f_g_0_0 + 1 + + if (new_shape[1] - (y_max - y_min)) % 2 == 0: + f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 + f_g_1_1 = f_g_1_0 + else: + f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 + f_g_1_1 = f_g_1_0 + 1 + + # return f_g_0_0, f_g_0_1, f_g_1_0, f_g_1_1 + return f_g_0_0, f_g_1_0, new_shape[0] - f_g_0_1, new_shape[1] - f_g_1_1 + +def get_matric_edge(matric): + return np.concatenate((matric[:, 0, :], matric[:, -1, :], matric[0, 1:-1, :], matric[-1, 1:-1, :]), axis=0) + + +class SaveFlatImage(object): + def __init__(self, path, date, date_time, _re_date, data_path_validate, data_path_test, batch_size, preproccess=False): + self.path = path + self.date = date + self.date_time = date_time + self._re_date = _re_date + self.preproccess = preproccess + self.data_path_validate =data_path_validate + self.data_path_test = data_path_test + self.batch_size = batch_size + self.scaling_test_perturbed_img_path = '/lustre/home/gwxie/data/unwarp_new/test/shrink_2048_1920/crop/' + # self.perturbed_test_img_path = '/lustre/home/gwxie/data/unwarp_new/test/new_1024_960/crop/' + # self.perturbed_test_img_path = '/lustre/home/gwxie/data/unwarp_new/test/shrink_1024_960/crop/' + self.perturbed_test_img_path = '/lustre/home/gwxie/data/unwarp_new/test/yin2/' + + def location_mark(self, img, location, color=(0, 0, 255)): + stepSize = 0 + for l in location.astype(np.int64).reshape(-1, 2): + cv2.circle(img, + (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 3, color, -1) + return img + + def flatByRegressWithClassiy_fiducial_v1_RGB_AT_show(self, fiducial_points, segment, im_name, epoch, perturbed_img=None, scheme='validate', is_scaling=False): + '''''' + # if (scheme == 'test' or scheme == 'eval') and is_scaling: + # pass + # else: + if scheme == 'test' or scheme == 'eval': + perturbed_img_path = self.data_path_test + im_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + perturbed_img = cv2.resize(perturbed_img, (960, 1024)) + elif scheme == 'validate' and perturbed_img is None: + RGB_name = im_name.replace('gw', 'png') + perturbed_img_path = '/lustre/home/gwxie/data/unwarp_new/train/' + self.data_split + '/validate/png/' + RGB_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + elif perturbed_img is not None: + perturbed_img = perturbed_img.transpose(1, 2, 0) + + fiducial_points = fiducial_points / [992, 992] * [960, 1024] + # fiducial_points = fiducial_points / [496, 496] * [960, 1024] + # flat_shape = perturbed_img.shape[:2] + ''' + tps = cv2.createThinPlateSplineShapeTransformer() + edge_padding = 3''' + col_gap = 2 #4 + row_gap = col_gap# col_gap + 1 if col_gap < 6 else col_gap + # fiducial_point_gaps = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30, 60] # POINTS NUM: 61, 31, 21, 16, 13, 11, 7, 6, 5, 4, 3, 2 + fiducial_point_gaps = [1, 2, 3, 5, 6, 10, 15, 30] # POINTS NUM: 31, 16, 11, 7, 6, 4, 3, 2 + sshape = fiducial_points[::fiducial_point_gaps[row_gap], ::fiducial_point_gaps[col_gap], :] + segment_h, segment_w = segment * [fiducial_point_gaps[col_gap], fiducial_point_gaps[row_gap]] + fiducial_points_row, fiducial_points_col = sshape.shape[:2] + ''' + im_hight = np.linspace(0, segment_h * (fiducial_points_col - 1), fiducial_points_col, dtype=np.int64) + im_wide = np.linspace(0, segment_w * (fiducial_points_row - 1), fiducial_points_row, dtype=np.int64) + im_y, im_x = np.meshgrid(im_hight, im_wide) + tshape = np.stack((im_x, im_y), axis=2) + ''' + im_x, im_y = np.mgrid[0:(fiducial_points_col - 1):complex(fiducial_points_col), + 0:(fiducial_points_row - 1):complex(fiducial_points_row)] + + tshape = np.stack((im_x, im_y), axis=2) * [segment_w, segment_h] + + ''' + tshape = get_matric_edge(tshape) + sshape = get_matric_edge(sshape) + ''' + tshape = tshape.reshape(-1, 2) + sshape = sshape.reshape(-1, 2) + # perturbed_img_mark = self.location_mark(perturbed_img.copy(), fiducial_points, (0, 0, 255)) + # perturbed_img_mark = self.location_mark(perturbed_img.copy(), sshape, (0, 255, 0)) + + ''' + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, + self.date + self.date_time, + str(epoch)) + if scheme == 'test': + i_path += '/test' + if not os.path.exists(i_path): + os.makedirs(i_path) + im_name = im_name.replace('gw', 'png') + cv2.imwrite(i_path + '/' + im_name, perturbed_img_mark) + # return + ''' + ''' + matches = list() + for i in range(sshape.shape[0]): + matches.append(cv2.DMatch(i, i, 0)) + tps.estimateTransformation(tshape.reshape(1, -1, 2), sshape.reshape(1, -1, 2), matches) + + shrink_paddig = 0 # 2 * edge_padding + x_start, x_end, y_start, y_end = shrink_paddig, segment_h * (fiducial_points_col - 1) - shrink_paddig, shrink_paddig, segment_w * (fiducial_points_row - 1) - shrink_paddig + # flat_img = tps.warpImage(perturbed_img)[0:segment_h * (fiducial_points_col - 1), 0:segment_w * (fiducial_points_row - 1), :] + flat_img = tps.warpImage(perturbed_img)[x_start:x_end, y_start:y_end, :] + # flat_img_mark = self.location_mark(flat_img.copy(), tshape, (0, 255, 0)) + ''' + + output_shape = (segment_h * (fiducial_points_col - 1), segment_w * (fiducial_points_row - 1)) + grid_x, grid_y = np.mgrid[0:output_shape[0] - 1:complex(output_shape[0]), + 0:output_shape[1] - 1:complex(output_shape[1])] + # grid_z = griddata(tshape, sshape, (grid_y, grid_x), method='cubic').astype('float32') + grid_ = griddata(tshape, sshape, (grid_y, grid_x), method='linear').astype('float32') + flat_img = cv2.remap(perturbed_img, grid_[:, :, 0], grid_[:, :, 1], cv2.INTER_CUBIC) + + '''''' + flat_img = flat_img.astype(np.uint8) + + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, + self.date + self.date_time, + str(epoch)) + '''''' + if scheme == 'eval': + img_figure = cv2.cvtColor(flat_img, cv2.COLOR_RGB2GRAY) + if scheme == 'eval': + i_path += '/eval' + if not os.path.exists(i_path): + os.makedirs(i_path) + # print(im_name) + im_name = im_name.replace(' copy.png', '.jpg') + cv2.imwrite(i_path + '/' + im_name, img_figure) + else: + perturbed_img_mark = self.location_mark(perturbed_img.copy(), sshape, (0, 0, 255)) + + shrink_paddig = 0 # 2 * edge_padding + x_start, x_end, y_start, y_end = shrink_paddig, segment_h * (fiducial_points_col - 1) - shrink_paddig, shrink_paddig, segment_w * (fiducial_points_row - 1) - shrink_paddig + + x_ = (perturbed_img_mark.shape[0]-(x_end-x_start))//2 + y_ = (perturbed_img_mark.shape[1]-(y_end-y_start))//2 + + flat_img_new = np.zeros_like(perturbed_img_mark) + flat_img_new[x_:perturbed_img_mark.shape[0] - x_, y_:perturbed_img_mark.shape[1] - y_] = flat_img + img_figure = np.concatenate( + (perturbed_img_mark, flat_img_new), axis=1) + + if scheme == 'test': + i_path += '/test' + if not os.path.exists(i_path): + os.makedirs(i_path) + + im_name = im_name.replace('gw', 'png') + cv2.imwrite(i_path + '/' + im_name, img_figure) + ''' + # img_figure = cv2.cvtColor(flat_img, cv2.COLOR_RGB2GRAY) + # if scheme == 'eval': + i_path += '/eval' + if not os.path.exists(i_path): + os.makedirs(i_path) + # print(im_name) + im_name = im_name.replace(' copy.png', '.jpg') + cv2.imwrite(i_path + '/' + im_name, flat_img) + ''' + def flatByRegressWithClassiy_multiProcessV2(self, pred_fiducial_points, pred_segment, im_name, epoch, process_pool, perturbed_img=None, scheme='validate', is_scaling=False): + # process_pool = Pool(self.batch_size) + for i_val_i in range(pred_fiducial_points.shape[0]): + # self.flatByRegressWithClassiy_fiducial_v1_RGB_AT(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling) + process_pool.apply_async(func=self.flatByRegressWithClassiy_fiducial_v1_RGB_AT_show, + args=(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling)) + # process_pool.apply_async(func=self.flatByRegressWithClassiy_fiducial_v1_RGB, + # args=(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling)) + # process_pool.apply_async(func=self.flatByRegressWithClassiy_triangular_v2_RGB, + # args=(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling)) + # process_pool.close() + # process_pool.join() + + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1, m=1): + self.val = val + self.sum += val * m + self.count += n + self.avg = self.sum / self.count + +class FlatImg(object): + def __init__(self, args, path, date, date_time, _re_date, model,\ + reslut_file, n_classes, optimizer, \ + model_D=None, optimizer_D=None, \ + loss_fn=None, loss_fn2=None, data_loader=None, data_loader_hdf5=None, dataPackage_loader = None, \ + data_path=None, data_path_validate=None, data_path_test=None, data_preproccess=True): #, valloaderSet, v_loaderSet + self.args = args + self.path = path + self.date = date + self.date_time = date_time + self._re_date = _re_date + # self.valloaderSet = valloaderSet + # self.v_loaderSet = v_loaderSet + self.model = model + self.model_D = model_D + self.reslut_file = reslut_file + self.n_classes = n_classes + self.optimizer = optimizer + self.optimizer_D = optimizer_D + self.loss_fn = loss_fn + self.loss_fn2 = loss_fn2 + self.data_loader = data_loader + self.data_loader_hdf5 = data_loader_hdf5 + self.dataPackage_loader = dataPackage_loader + self.data_path = data_path + self.data_path_validate = data_path_validate + self.data_path_test = data_path_test + self.data_preproccess = data_preproccess + self.save_flat_mage = SaveFlatImage(self.path, self.date, self.date_time, self._re_date, self.data_path_validate, self.data_path_test, self.args.batch_size, self.data_preproccess) + + self.validate_loss = AverageMeter() + self.validate_loss_regress = AverageMeter() + self.validate_loss_segment = AverageMeter() + self.lambda_loss = 1 + self.lambda_loss_segment = 1 + self.lambda_loss_a = 1 + self.lambda_loss_b = 1 + self.lambda_loss_c = 1 + + def saveDataPackage(self, data_size='640'): + + if not os.path.exists(self.data_path_validate + 'clip' + data_size + '/'): + os.makedirs(self.data_path_validate + 'clip' + data_size + '/') + + if not os.path.exists(self.data_path_validate + 'label' + data_size + '/'): + os.makedirs(self.data_path_validate + 'label' + data_size + '/') + trainloader = self.loadTrainData(data_split=self.data_split, is_shuffle=True) + begin_train = time.time() + for i, (images, labels) in enumerate(trainloader): + with open(self.data_path_validate + 'clip' + data_size + '/' + str(i) + '.im', 'wb') as f: + pickle_perturbed_im = pickle.dumps(images) + f.write(pickle_perturbed_im) + + with open(self.data_path_validate + 'label' + data_size + '/' + str(i) + '.lbl', 'wb') as f: + pickle_perturbed_lbl = pickle.dumps(labels) + f.write(pickle_perturbed_lbl) + + trian_t = time.time() - begin_train + + m, s = divmod(trian_t, 60) + h, m = divmod(m, 60) + print("All Train Time : %02d:%02d:%02d\n" % (h, m, s)) + + def loadTrainData(self, data_split, is_shuffle=True): + train_loader = self.data_loader(self.data_path, split=data_split, img_shrink=self.args.img_shrink, preproccess=self.data_preproccess) + trainloader = data.DataLoader(train_loader, batch_size=self.args.batch_size, num_workers=min(self.args.batch_size, 8), drop_last=True, pin_memory=True, + shuffle=is_shuffle) + return trainloader + + # def loadTrainDataPackage(self, data_split, is_shuffle=True, data_size='640'): + # train_loader = self.dataPackage_loader(self.data_path, split=data_split, data_size=data_size) + # trainloader = data.DataLoader(train_loader, batch_size=1, num_workers=1, shuffle=is_shuffle) + # + # return trainloader + + def loadValidateAndTestData(self, is_shuffle=True, sub_dir='shrink_512/crop/'): + v1_loader = self.data_loader(self.data_path_validate, split='validate', img_shrink=self.args.img_shrink, is_return_img_name=True, preproccess=self.data_preproccess) + valloader1 = data.DataLoader(v1_loader, batch_size=self.args.batch_size, num_workers=min(self.args.batch_size, 8), pin_memory=True, \ + shuffle=is_shuffle) + + '''val sets''' + v_loaderSet = { + 'v1_loader': v1_loader, + } + valloaderSet = { + 'valloader1': valloader1, + } + # sub_dir = 'crop/crop/' + + t1_loader = self.data_loader(self.data_path_test, split='test', img_shrink=self.args.img_shrink, is_return_img_name=True) + testloader1 = data.DataLoader(t1_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, pin_memory=True, \ + shuffle=False) + + '''test sets''' + t_loaderSet = { + 't1_loader': v1_loader, + } + testloaderSet = { + 'testloader1': testloader1, + } + + self.valloaderSet = valloaderSet + self.v_loaderSet = v_loaderSet + + self.testloaderSet = testloaderSet + self.t_loaderSet = t_loaderSet + # return v_loaderSet, valloaderSet + + def loadTestData(self, is_shuffle=True): + t1_loader = self.data_loader(self.data_path_test, split='test', img_shrink=self.args.img_shrink, + is_return_img_name=True) + testloader1 = data.DataLoader(t1_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, + pin_memory=True, shuffle=False) + + '''test sets''' + testloaderSet = { + 'testloader1': testloader1, + } + + self.testloaderSet = testloaderSet + + def evalData(self, is_shuffle=True, sub_dir='shrink_512/crop/'): + eval_loader = self.data_loader(self.data_path_test, split='eval', img_shrink=self.args.img_shrink, is_return_img_name=True) + evalloader = data.DataLoader(eval_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, pin_memory=True, \ + shuffle=False) + + self.evalloaderSet = evalloader + # return v_loaderSet, valloaderSet + + def saveModel_epoch(self, epoch): + epoch += 1 + state = {'epoch': epoch, + 'model_state': self.model.state_dict(), + 'optimizer_state': self.optimizer.state_dict(), # AN ERROR HAS OCCURED + } + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, self.date + self.date_time, str(epoch)) + if not os.path.exists(i_path): + os.makedirs(i_path) + + if self._re_date is None: + torch.save(state, i_path + '/' + self.date + self.date_time + "{}".format(self.args.arch) + ".pkl") # "./trained_model/{}_{}_best_model.pkl" + else: + torch.save(state, + i_path + '/' + self._re_date + "@" + self.date + self.date_time + "{}".format( + self.args.arch) + ".pkl") + + def evalModelGreyC1(self, epoch, is_scaling=False): + process_pool = Pool(self.args.batch_size*4) + + begin_test = time.time() + with torch.no_grad(): + # for i_val, (images, perturbed_img, im_name) in enumerate(self.evalloaderSet): + for i_val, (images, im_name) in enumerate(self.evalloaderSet): + try: + images = Variable(images) + + outputs, outputs_segment = self.model(images) + # outputs, outputs_segment = self.model(images, is_softmax=True) + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + self.save_flat_mage.flatByRegressWithClassiy_multiProcess_eval(pred_regress, + pred_segment, im_name, + epoch + 1, process_pool, + # perturbed_img=perturbed_img, + scheme='eval', + is_scaling=is_scaling) + except: + print('* save image tested error :' + im_name[0]) + process_pool.close() + process_pool.join() + test_time = time.time() - begin_test + + print('test time : {test_time:.3f}'.format( + test_time=test_time)) + + print('test time : {test_time:.3f}'.format( + test_time=test_time), + file=self.reslut_file) + + def validateOrTestModelV3(self, epoch, trian_t, validate_test='v_l2', is_scaling=False): + process_pool = Pool(16)# Pool(self.args.batch_size) + + if validate_test == 'v_l4': + loss_segment_list = 0 + loss_overall_list = 0 + loss_local_list = 0 + loss_edge_list = 0 + loss_rectangles_list = 0 + loss_list = [] + + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.valloaderSet.values()): + for i_val, (images, labels, segment, im_name) in enumerate(valloader): + try: + # save_img_ = random.choices([True, False], weights=[1, 0])[0] + save_img_ = random.choices([True, False], weights=[0.05, 0.95])[0] + # save_img_ = True + + images = Variable(images) + labels = Variable(labels.cuda(self.args.gpu)) + segment = Variable(segment.cuda(self.args.gpu)) + + outputs, outputs_segment = self.model(images) + + loss_overall, loss_local, loss_edge, loss_rectangles = self.loss_fn(outputs, labels, size_average=True) + loss_segment = self.loss_fn2(outputs_segment, segment) + + loss = self.lambda_loss * (loss_overall + loss_local + loss_edge * self.lambda_loss_a + loss_rectangles * self.lambda_loss_b) + self.lambda_loss_segment * loss_segment + # loss = self.lambda_loss * (loss_local + loss_rectangles + loss_edge*self.lambda_loss_a + loss_overall*self.lambda_loss_b) + self.lambda_loss_segment * loss_segment + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) # (4, 1280, 1024, 2) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + if save_img_: + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, process_pool, + perturbed_img=images.numpy(), scheme='validate', is_scaling=is_scaling) + loss_list.append(loss.item()) + loss_segment_list += loss_segment.item() + loss_overall_list += loss_overall.item() + loss_local_list += loss_local.item() + # loss_edge_list += loss_edge.item() + # loss_rectangles_list += loss_rectangles.item() + + except: + print('* save image validated error :'+im_name[0]) + process_pool.close() + process_pool.join() + test_time = time.time() - begin_test + + # if always_save_model: + # self.saveModel(epoch, save_path=self.path) + list_len = len(loss_list) + print('train time : {trian_t:.3f}\t' + 'validate time : {test_time:.3f}\t' + '[o:{overall_avg:.4f} l:{local_avg:.4f} e:{edge_avg:.4f} r:{rectangles_avg:.4f}\t' + '[{loss_regress:.4f} {loss_segment:.4f}]\n'.format( + trian_t=trian_t, test_time=test_time, + overall_avg=loss_overall_list / list_len, local_avg=loss_local_list / list_len, edge_avg=loss_edge_list / list_len, rectangles_avg=loss_rectangles_list / list_len, + loss_regress=(loss_overall_list+loss_local_list+loss_edge_list) / list_len, loss_segment=loss_segment_list / list_len)) + print('train time : {trian_t:.3f}\t' + 'validate time : {test_time:.3f}\t' + '[o:{overall_avg:.4f} l:{local_avg:.4f} e:{edge_avg:.4f} r:{rectangles_avg:.4f}\t' + '[{loss_regress:.4f} {loss_segment:.4f}]\n'.format( + trian_t=trian_t, test_time=test_time, + overall_avg=loss_overall_list / list_len, local_avg=loss_local_list / list_len, edge_avg=loss_edge_list / list_len, rectangles_avg=loss_rectangles_list / list_len, + loss_regress=(loss_overall_list+loss_local_list+loss_edge_list) / list_len, loss_segment=loss_segment_list / list_len), file=self.reslut_file) + elif validate_test == 't_all': + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.testloaderSet.values()): + + for i_val, (images, im_name) in enumerate(valloader): + try: + # save_img_ = True + save_img_ = random.choices([True, False], weights=[1, 0])[0] + # save_img_ = random.choices([True, False], weights=[0.2, 0.8])[0] + + if save_img_: + images = Variable(images) + + outputs, outputs_segment = self.model(images) + # outputs, outputs_segment = self.model(images, is_softmax=True) + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, process_pool, + scheme='test', is_scaling=is_scaling) + except: + print('* save image tested error :' + im_name[0]) + process_pool.close() + process_pool.join() + test_time = time.time() - begin_test + + print('test time : {test_time:.3f}'.format( + test_time=test_time)) + + print('test time : {test_time:.3f}'.format( + test_time=test_time), + file=self.reslut_file) + else: + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.testloaderSet.values()): + + for i_val, (images, im_name) in enumerate(valloader): + try: + # save_img_ = True + # save_img_ = random.choices([True, False], weights=[1, 0])[0] + save_img_ = random.choices([True, False], weights=[0.4, 0.6])[0] + + if save_img_: + images = Variable(images) + + outputs, outputs_segment = self.model(images) + # outputs, outputs_segment = self.model(images, is_softmax=True) + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, process_pool, + scheme='test', is_scaling=is_scaling) + except: + print('* save image tested error :' + im_name[0]) + process_pool.close() + process_pool.join() + test_time = time.time() - begin_test + + print('test time : {test_time:.3f}'.format( + test_time=test_time)) + + print('test time : {test_time:.3f}'.format( + test_time=test_time), + file=self.reslut_file) + + diff --git a/utilsV4.py b/utilsV4.py new file mode 100644 index 0000000000000000000000000000000000000000..0723425b72cda1c3930fd1b70ba1f26a909e6e50 --- /dev/null +++ b/utilsV4.py @@ -0,0 +1,488 @@ +''' +Guowang Xie +from utilsV3.py +''' +import torch +from torch.utils import data +from torch.autograd import Variable, Function +import numpy as np +import sys, os, math +import cv2 +import time +import re +import random +from scipy.interpolate import griddata +from tpsV2 import createThinPlateSplineShapeTransformer + +def adjust_position(x_min, y_min, x_max, y_max, new_shape): + if (new_shape[0] - (x_max - x_min)) % 2 == 0: + f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 + f_g_0_1 = f_g_0_0 + else: + f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 + f_g_0_1 = f_g_0_0 + 1 + + if (new_shape[1] - (y_max - y_min)) % 2 == 0: + f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 + f_g_1_1 = f_g_1_0 + else: + f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 + f_g_1_1 = f_g_1_0 + 1 + + # return f_g_0_0, f_g_0_1, f_g_1_0, f_g_1_1 + return f_g_0_0, f_g_1_0, new_shape[0] - f_g_0_1, new_shape[1] - f_g_1_1 + +def get_matric_edge(matric): + return np.concatenate((matric[:, 0, :], matric[:, -1, :], matric[0, 1:-1, :], matric[-1, 1:-1, :]), axis=0) + + +class SaveFlatImage(object): + ''' + Post-processing and save result. + Function: + flatByRegressWithClassiy_multiProcessV2: Selecting a post-processing method + flatByfiducial_TPS: Thin Plate Spline, input multi-batch + flatByfiducial_interpolation: Interpolation, input one image + ''' + def __init__(self, path, date, date_time, _re_date, data_path_validate, data_path_test, batch_size, preproccess=False, postprocess='tps_gpu', device=torch.device('cuda:0')): + self.path = path + self.date = date + self.date_time = date_time + self._re_date = _re_date + self.preproccess = preproccess + self.data_path_validate =data_path_validate + self.data_path_test = data_path_test + self.batch_size = batch_size + self.device = device + self.col_gap = 0 #4 + self.row_gap = self.col_gap# col_gap + 1 if col_gap < 6 else col_gap + # fiducial_point_gaps = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30, 60] # POINTS NUM: 61, 31, 21, 16, 13, 11, 7, 6, 5, 4, 3, 2 + self.fiducial_point_gaps = [1, 2, 3, 5, 6, 10, 15, 30] # POINTS NUM: 31, 16, 11, 7, 6, 4, 3, 2 + self.fiducial_point_num = [31, 16, 11, 7, 6, 4, 3, 2] + self.fiducial_num = self.fiducial_point_num[self.col_gap], self.fiducial_point_num[self.row_gap] + map_shape = (320, 320) + self.postprocess = postprocess + + if self.postprocess == 'tps': + self.tps = createThinPlateSplineShapeTransformer(map_shape, fiducial_num=self.fiducial_num, device=self.device) + + + def location_mark(self, img, location, color=(0, 0, 255)): + stepSize = 0 + for l in location.astype(np.int64).reshape(-1, 2): + cv2.circle(img, + (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 3, color, -1) + return img + + def flatByfiducial_TPS(self, fiducial_points, segment, im_name, epoch, perturbed_img=None, scheme='validate', is_scaling=False): + ''' + flat_shap controls the output image resolution + ''' + # if (scheme == 'test' or scheme == 'eval') and is_scaling: + # pass + # else: + if scheme == 'test' or scheme == 'eval': + perturbed_img_path = self.data_path_test + im_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + perturbed_img = cv2.resize(perturbed_img, (960, 1024)) + elif scheme == 'validate' and perturbed_img is None: + RGB_name = im_name.replace('gw', 'png') + perturbed_img_path = self.data_path_validate + '/png/' + RGB_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + elif perturbed_img is not None: + perturbed_img = perturbed_img.transpose(1, 2, 0) + + fiducial_points = fiducial_points / [992, 992] + perturbed_img_shape = perturbed_img.shape[:2] + + sshape = fiducial_points[::self.fiducial_point_gaps[self.row_gap], ::self.fiducial_point_gaps[self.col_gap], :] + flat_shap = segment * [self.fiducial_point_gaps[self.col_gap], self.fiducial_point_gaps[self.row_gap]] * [self.fiducial_point_num[self.col_gap], self.fiducial_point_num[self.row_gap]] + # flat_shap = perturbed_img_shape + time_1 = time.time() + perturbed_img_ = torch.tensor(perturbed_img.transpose(2,0,1)[None,:]) + + fiducial_points_ = (torch.tensor(fiducial_points.transpose(1, 0,2).reshape(-1, 2))[None,:]-0.5)*2 + rectified = self.tps(perturbed_img_.double().to(self.device), fiducial_points_.to(self.device), list(flat_shap)) + time_2 = time.time() + time_interval = time_2 - time_1 + print('TPS time: '+ str(time_interval)) + + flat_img = rectified[0].cpu().numpy().transpose(1,2,0) + + '''save''' + flat_img = flat_img.astype(np.uint8) + + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, + self.date + self.date_time, + str(epoch)) + '''''' + + perturbed_img_mark = self.location_mark(perturbed_img.copy(), sshape*perturbed_img_shape[::-1], (0, 0, 255)) + + if scheme == 'test': + i_path += '/test' + if not os.path.exists(i_path): + os.makedirs(i_path) + + im_name = im_name.replace('gw', 'png') + cv2.imwrite(i_path + '/mark_' + im_name, perturbed_img_mark) + cv2.imwrite(i_path + '/' + im_name, flat_img) + + def flatByfiducial_interpolation(self, fiducial_points, segment, im_name, epoch, perturbed_img=None, scheme='validate', is_scaling=False): + '''''' + if scheme == 'test' or scheme == 'eval': + perturbed_img_path = self.data_path_test + im_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + perturbed_img = cv2.resize(perturbed_img, (960, 1024)) + elif scheme == 'validate' and perturbed_img is None: + RGB_name = im_name.replace('gw', 'png') + perturbed_img_path = self.data_path_validate + '/png/' + RGB_name + perturbed_img = cv2.imread(perturbed_img_path, flags=cv2.IMREAD_COLOR) + elif perturbed_img is not None: + perturbed_img = perturbed_img.transpose(1, 2, 0) + + fiducial_points = fiducial_points / [992, 992] * [960, 1024] + col_gap = 2 #4 + row_gap = col_gap# col_gap + 1 if col_gap < 6 else col_gap + # fiducial_point_gaps = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30, 60] # POINTS NUM: 61, 31, 21, 16, 13, 11, 7, 6, 5, 4, 3, 2 + fiducial_point_gaps = [1, 2, 3, 5, 6, 10, 15, 30] # POINTS NUM: 31, 16, 11, 7, 6, 4, 3, 2 + sshape = fiducial_points[::fiducial_point_gaps[row_gap], ::fiducial_point_gaps[col_gap], :] + segment_h, segment_w = segment * [fiducial_point_gaps[col_gap], fiducial_point_gaps[row_gap]] + fiducial_points_row, fiducial_points_col = sshape.shape[:2] + + im_x, im_y = np.mgrid[0:(fiducial_points_col - 1):complex(fiducial_points_col), + 0:(fiducial_points_row - 1):complex(fiducial_points_row)] + + tshape = np.stack((im_x, im_y), axis=2) * [segment_w, segment_h] + + tshape = tshape.reshape(-1, 2) + sshape = sshape.reshape(-1, 2) + + output_shape = (segment_h * (fiducial_points_col - 1), segment_w * (fiducial_points_row - 1)) + grid_x, grid_y = np.mgrid[0:output_shape[0] - 1:complex(output_shape[0]), + 0:output_shape[1] - 1:complex(output_shape[1])] + time_1 = time.time() + # grid_z = griddata(tshape, sshape, (grid_y, grid_x), method='cubic').astype('float32') + grid_ = griddata(tshape, sshape, (grid_y, grid_x), method='linear').astype('float32') + flat_img = cv2.remap(perturbed_img, grid_[:, :, 0], grid_[:, :, 1], cv2.INTER_CUBIC) + time_2 = time.time() + time_interval = time_2 - time_1 + print('Interpolation time: '+ str(time_interval)) + '''''' + flat_img = flat_img.astype(np.uint8) + + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, + self.date + self.date_time, + str(epoch)) + '''''' + perturbed_img_mark = self.location_mark(perturbed_img.copy(), sshape, (0, 0, 255)) + + shrink_paddig = 0 # 2 * edge_padding + x_start, x_end, y_start, y_end = shrink_paddig, segment_h * (fiducial_points_col - 1) - shrink_paddig, shrink_paddig, segment_w * (fiducial_points_row - 1) - shrink_paddig + + x_ = (perturbed_img_mark.shape[0]-(x_end-x_start))//2 + y_ = (perturbed_img_mark.shape[1]-(y_end-y_start))//2 + + flat_img_new = np.zeros_like(perturbed_img_mark) + flat_img_new[x_:perturbed_img_mark.shape[0] - x_, y_:perturbed_img_mark.shape[1] - y_] = flat_img + img_figure = np.concatenate( + (perturbed_img_mark, flat_img_new), axis=1) + + if scheme == 'test': + i_path += '/test' + if not os.path.exists(i_path): + os.makedirs(i_path) + + im_name = im_name.replace('gw', 'png') + cv2.imwrite(i_path + '/' + im_name, img_figure) + + def flatByRegressWithClassiy_multiProcessV2(self, pred_fiducial_points, pred_segment, im_name, epoch, process_pool=None, perturbed_img=None, scheme='validate', is_scaling=False): + for i_val_i in range(pred_fiducial_points.shape[0]): + if self.postprocess == 'tps': + self.flatByfiducial_TPS(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling) + elif self.postprocess == 'interpolation': + self.flatByfiducial_interpolation(pred_fiducial_points[i_val_i], pred_segment[i_val_i], im_name[i_val_i], epoch, None if perturbed_img is None else perturbed_img[i_val_i], scheme, is_scaling) + else: + print('Error: Other postprocess.') + exit() + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1, m=1): + self.val = val + self.sum += val * m + self.count += n + self.avg = self.sum / self.count + +class FlatImg(object): + ''' + args: + self.save_flat_mage:Initialize the post-processing. Select a method in "postprocess_list". + ''' + def __init__(self, args, path, date, date_time, _re_date, model,\ + reslut_file, n_classes, optimizer, \ + model_D=None, optimizer_D=None, \ + loss_fn=None, loss_fn2=None, data_loader=None, data_loader_hdf5=None, dataPackage_loader = None, \ + data_path=None, data_path_validate=None, data_path_test=None, data_preproccess=True): #, valloaderSet, v_loaderSet + self.args = args + self.path = path + self.date = date + self.date_time = date_time + self._re_date = _re_date + # self.valloaderSet = valloaderSet + # self.v_loaderSet = v_loaderSet + self.model = model + self.model_D = model_D + self.reslut_file = reslut_file + self.n_classes = n_classes + self.optimizer = optimizer + self.optimizer_D = optimizer_D + self.loss_fn = loss_fn + self.loss_fn2 = loss_fn2 + self.data_loader = data_loader + self.data_loader_hdf5 = data_loader_hdf5 + self.dataPackage_loader = dataPackage_loader + self.data_path = data_path + self.data_path_validate = data_path_validate + self.data_path_test = data_path_test + self.data_preproccess = data_preproccess + + postprocess_list = ['tps', 'interpolation'] + self.save_flat_mage = SaveFlatImage(self.path, self.date, self.date_time, self._re_date, self.data_path_validate, self.data_path_test, self.args.batch_size, self.data_preproccess, postprocess=postprocess_list[0], device=torch.device(self.args.device)) + + self.validate_loss = AverageMeter() + self.validate_loss_regress = AverageMeter() + self.validate_loss_segment = AverageMeter() + self.lambda_loss = 1 + self.lambda_loss_segment = 1 + self.lambda_loss_a = 1 + self.lambda_loss_b = 1 + self.lambda_loss_c = 1 + + + def loadTrainData(self, data_split, is_shuffle=True): + train_loader = self.data_loader(self.data_path, split=data_split, img_shrink=self.args.img_shrink, preproccess=self.data_preproccess) + trainloader = data.DataLoader(train_loader, batch_size=self.args.batch_size, num_workers=min(self.args.batch_size, 8), drop_last=True, pin_memory=True, + shuffle=is_shuffle) + return trainloader + + def loadValidateAndTestData(self, is_shuffle=True, sub_dir='shrink_512/crop/'): + v1_loader = self.data_loader(self.data_path_validate, split='validate', img_shrink=self.args.img_shrink, is_return_img_name=True, preproccess=self.data_preproccess) + valloader1 = data.DataLoader(v1_loader, batch_size=self.args.batch_size, num_workers=min(self.args.batch_size, 8), pin_memory=True, \ + shuffle=is_shuffle) + + '''val sets''' + v_loaderSet = { + 'v1_loader': v1_loader, + } + valloaderSet = { + 'valloader1': valloader1, + } + # sub_dir = 'crop/crop/' + + t1_loader = self.data_loader(self.data_path_test, split='test', img_shrink=self.args.img_shrink, is_return_img_name=True) + testloader1 = data.DataLoader(t1_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, pin_memory=True, \ + shuffle=False) + + '''test sets''' + t_loaderSet = { + 't1_loader': v1_loader, + } + testloaderSet = { + 'testloader1': testloader1, + } + + self.valloaderSet = valloaderSet + self.v_loaderSet = v_loaderSet + + self.testloaderSet = testloaderSet + self.t_loaderSet = t_loaderSet + # return v_loaderSet, valloaderSet + + def loadTestData(self, is_shuffle=True): + t1_loader = self.data_loader(self.data_path_test, split='test', img_shrink=self.args.img_shrink, + is_return_img_name=True) + testloader1 = data.DataLoader(t1_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, + shuffle=False) + + '''test sets''' + testloaderSet = { + 'testloader1': testloader1, + } + + self.testloaderSet = testloaderSet + + def evalData(self, is_shuffle=True, sub_dir='shrink_512/crop/'): + eval_loader = self.data_loader(self.data_path_test, split='eval', img_shrink=self.args.img_shrink, is_return_img_name=True) + evalloader = data.DataLoader(eval_loader, batch_size=self.args.batch_size, num_workers=self.args.batch_size, pin_memory=True, \ + shuffle=False) + + self.evalloaderSet = evalloader + # return v_loaderSet, valloaderSet + + def saveModel_epoch(self, epoch): + epoch += 1 + state = {'epoch': epoch, + 'model_state': self.model.state_dict(), + 'optimizer_state': self.optimizer.state_dict(), # AN ERROR HAS OCCURED + } + i_path = os.path.join(self.path, self.date + self.date_time + ' @' + self._re_date, + str(epoch)) if self._re_date is not None else os.path.join(self.path, self.date + self.date_time, str(epoch)) + if not os.path.exists(i_path): + os.makedirs(i_path) + + if self._re_date is None: + torch.save(state, i_path + '/' + self.date + self.date_time + "{}".format(self.args.arch) + ".pkl") # "./trained_model/{}_{}_best_model.pkl" + else: + torch.save(state, + i_path + '/' + self._re_date + "@" + self.date + self.date_time + "{}".format( + self.args.arch) + ".pkl") + + def validateOrTestModelV3(self, epoch, trian_t, validate_test='v_l2', is_scaling=False): + + if validate_test == 'v_l4': + loss_segment_list = 0 + loss_overall_list = 0 + loss_local_list = 0 + loss_edge_list = 0 + loss_rectangles_list = 0 + loss_list = [] + + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.valloaderSet.values()): + for i_val, (images, labels, segment, im_name) in enumerate(valloader): + try: + # save_img_ = random.choices([True, False], weights=[1, 0])[0] + save_img_ = random.choices([True, False], weights=[0.05, 0.95])[0] + # save_img_ = True + + images = Variable(images) + labels = Variable(labels.cuda(self.args.device)) + segment = Variable(segment.cuda(self.args.device)) + + outputs, outputs_segment = self.model(images) + + loss_overall, loss_local, loss_edge, loss_rectangles = self.loss_fn(outputs, labels, size_average=True) + loss_segment = self.loss_fn2(outputs_segment, segment) + + loss = self.lambda_loss * (loss_overall + loss_local + loss_edge * self.lambda_loss_a + loss_rectangles * self.lambda_loss_b) + self.lambda_loss_segment * loss_segment + # loss = self.lambda_loss * (loss_local + loss_rectangles + loss_edge*self.lambda_loss_a + loss_overall*self.lambda_loss_b) + self.lambda_loss_segment * loss_segment + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) # (4, 1280, 1024, 2) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + if save_img_: + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, + perturbed_img=images.numpy(), scheme='validate', is_scaling=is_scaling) + loss_list.append(loss.item()) + loss_segment_list += loss_segment.item() + loss_overall_list += loss_overall.item() + loss_local_list += loss_local.item() + # loss_edge_list += loss_edge.item() + # loss_rectangles_list += loss_rectangles.item() + + except: + print('* save image validated error :'+im_name[0]) + test_time = time.time() - begin_test + + # if always_save_model: + # self.saveModel(epoch, save_path=self.path) + list_len = len(loss_list) + print('train time : {trian_t:.3f}\t' + 'validate time : {test_time:.3f}\t' + '[o:{overall_avg:.4f} l:{local_avg:.4f} e:{edge_avg:.4f} r:{rectangles_avg:.4f}\t' + '[{loss_regress:.4f} {loss_segment:.4f}]\n'.format( + trian_t=trian_t, test_time=test_time, + overall_avg=loss_overall_list / list_len, local_avg=loss_local_list / list_len, edge_avg=loss_edge_list / list_len, rectangles_avg=loss_rectangles_list / list_len, + loss_regress=(loss_overall_list+loss_local_list+loss_edge_list) / list_len, loss_segment=loss_segment_list / list_len)) + print('train time : {trian_t:.3f}\t' + 'validate time : {test_time:.3f}\t' + '[o:{overall_avg:.4f} l:{local_avg:.4f} e:{edge_avg:.4f} r:{rectangles_avg:.4f}\t' + '[{loss_regress:.4f} {loss_segment:.4f}]\n'.format( + trian_t=trian_t, test_time=test_time, + overall_avg=loss_overall_list / list_len, local_avg=loss_local_list / list_len, edge_avg=loss_edge_list / list_len, rectangles_avg=loss_rectangles_list / list_len, + loss_regress=(loss_overall_list+loss_local_list+loss_edge_list) / list_len, loss_segment=loss_segment_list / list_len), file=self.reslut_file) + elif validate_test == 't_all': + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.testloaderSet.values()): + + for i_val, (images, im_name) in enumerate(valloader): + try: + # save_img_ = True + save_img_ = random.choices([True, False], weights=[1, 0])[0] + # save_img_ = random.choices([True, False], weights=[0.2, 0.8])[0] + + if save_img_: + images = Variable(images) + + outputs, outputs_segment = self.model(images) + # outputs, outputs_segment = self.model(images, is_softmax=True) + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, + scheme='test', is_scaling=is_scaling) + except: + print('* save image tested error :' + im_name[0]) + test_time = time.time() - begin_test + + print('test time : {test_time:.3f}'.format( + test_time=test_time)) + + print('test time : {test_time:.3f}'.format( + test_time=test_time), + file=self.reslut_file) + else: + begin_test = time.time() + with torch.no_grad(): + for i_valloader, valloader in enumerate(self.testloaderSet.values()): + + for i_val, (images, im_name) in enumerate(valloader): + try: + # save_img_ = True + # save_img_ = random.choices([True, False], weights=[1, 0])[0] + save_img_ = random.choices([True, False], weights=[0.4, 0.6])[0] + + if save_img_: + images = Variable(images) + + outputs, outputs_segment = self.model(images) + # outputs, outputs_segment = self.model(images, is_softmax=True) + + pred_regress = outputs.data.cpu().numpy().transpose(0, 2, 3, 1) + pred_segment = outputs_segment.data.round().int().cpu().numpy() # (4, 1280, 1024) ==outputs.data.argmax(dim=0).cpu().numpy() + + self.save_flat_mage.flatByRegressWithClassiy_multiProcessV2(pred_regress, + pred_segment, im_name, + epoch + 1, + scheme='test', is_scaling=is_scaling) + except: + print('* save image tested error :' + im_name[0]) + test_time = time.time() - begin_test + + print('test time : {test_time:.3f}'.format( + test_time=test_time)) + + print('test time : {test_time:.3f}'.format( + test_time=test_time), + file=self.reslut_file) + +