| | |
| | """2.7 Code to be deployed 21.02.2025 |
| | |
| | Automatically generated by Colab. |
| | |
| | Original file is located at |
| | https://colab.research.google.com/drive/1RWSQn0GW_KXoHkJLcbYzLAGGyc0tiDWl |
| | """ |
| |
|
| | """## Imports""" |
| |
|
| | import numpy as np |
| | import cv2 |
| | from matplotlib import pyplot as plt |
| | import math |
| | from PIL import Image , ImageDraw, ImageFont , ImageColor |
| | import fitz |
| | import ezdxf as ez |
| | import sys |
| | from ezdxf import units |
| | |
| | from ezdxf.math import OCS, Matrix44, Vec3 |
| | import ezdxf |
| | print(ezdxf.__version__) |
| | import matplotlib.pyplot as plt |
| | from matplotlib.patches import Polygon |
| | from shapely.geometry import Point, Polygon as ShapelyPolygon |
| | from ezdxf.math import Vec2 |
| | import random |
| | import pandas as pd |
| | import google_sheet_Legend |
| | |
| | from ezdxf import bbox |
| | from math import sin, cos, radians |
| | |
| | from ezdxf.colors import aci2rgb |
| | |
| | from collections import Counter |
| |
|
| | import xml.etree.ElementTree as ET |
| | from PyPDF2 import PdfReader, PdfWriter |
| | from PyPDF2.generic import TextStringObject, NameObject, ArrayObject, FloatObject |
| | from PyPDF2.generic import NameObject, TextStringObject, DictionaryObject, FloatObject, ArrayObject, NumberObject |
| |
|
| | from typing import NewType |
| | from ctypes import sizeof |
| | from io import BytesIO |
| |
|
| |
|
| |
|
| | def normalize_vertices(vertices): |
| | """Sort vertices to ensure consistent order.""" |
| | return tuple(sorted(tuple(v) for v in vertices)) |
| |
|
| | def areas_are_similar(area1, area2, tolerance=0.2): |
| | """Check if two areas are within a given tolerance.""" |
| | return abs(area1 - area2) <= tolerance |
| |
|
| |
|
| | |
| | """Version to be deployed of 3.2 Calculating area/perimeter |
| | |
| | Automatically generated by Colab. |
| | |
| | Original file is located at |
| | https://colab.research.google.com/drive/1XPeCoTBgWSNBYZ3aMKBteP4YG3w4bORs |
| | """ |
| |
|
| |
|
| | """## Notes""" |
| |
|
| | |
| | ''' |
| | This portion is used to convert vertices read from dxf to pixels in order to accurately locate shapes in the image and pdf |
| | ratio : |
| | MeasuredMetric* PixelValue/ DxfMetric = MeasuredPixel |
| | PixelValue: get from pixel conversion code , second number in the bracker represents the perimeter |
| | DxfMetric: measured perimeter from foxit |
| | |
| | divide pixelvalue by dxfmetric, will give u a ratio , this is ur dxfratio |
| | |
| | |
| | ''' |
| |
|
| |
|
| | """PDF to image""" |
| |
|
| | def pdftoimg(datadoc,pdf_content=0): |
| | if pdf_content: |
| | doc = fitz.open(stream=pdf_content, filetype="pdf") |
| | else: |
| | doc =fitz.open('pdf',datadoc) |
| | page=doc[0] |
| | pix = page.get_pixmap() |
| | pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples) |
| | img=np.array(pl) |
| | img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) |
| | print("IMAGE") |
| | |
| | return img,pix |
| |
|
| |
|
| | |
| | ISO_SIZES_INCHES = { |
| | "A0": (33.11, 46.81), |
| | "A1": (23.39, 33.11), |
| | "A2": (16.54, 23.39), |
| | "A3": (11.69, 16.54), |
| | "A4": (8.27, 11.69), |
| | "A5": (5.83, 8.27), |
| | "A6": (4.13, 5.83), |
| | "A7": (2.91, 4.13), |
| | "A8": (2.05, 2.91), |
| | "A9": (1.46, 2.05), |
| | "A10": (1.02, 1.46) |
| | } |
| |
|
| | def get_paper_size_in_inches(width, height): |
| | """Find the closest matching paper size in inches.""" |
| | for size, (w, h) in ISO_SIZES_INCHES.items(): |
| | if (abs(w - width) < 0.1 and abs(h - height) < 0.1) or (abs(w - height) < 0.1 and abs(h - width) < 0.1): |
| | return size |
| | return "Unknown Size" |
| |
|
| | def analyze_pdf(datadoc,pdf_content=0): |
| | |
| | if pdf_content: |
| | pdf_document = fitz.open(stream=pdf_content, filetype="pdf") |
| | else: |
| | pdf_document = fitz.open('pdf',datadoc) |
| |
|
| | |
| | for page_number in range(len(pdf_document)): |
| | page = pdf_document[page_number] |
| | rect = page.rect |
| | width_points, height_points = rect.width, rect.height |
| |
|
| | |
| | width_inches, height_inches = width_points / 72, height_points / 72 |
| |
|
| | paper_size = get_paper_size_in_inches(width_inches, height_inches) |
| |
|
| | print(f"Page {page_number + 1}: {width_inches:.2f} x {height_inches:.2f} inches ({paper_size})") |
| |
|
| | pdf_document.close() |
| | return width_inches , height_inches , paper_size |
| |
|
| |
|
| | def get_dxfSize(dxfpath): |
| |
|
| | doc = ezdxf.readfile(dxfpath) |
| | msp = doc.modelspace() |
| | |
| | |
| | cache = bbox.Cache() |
| | overall_bbox = bbox.extents(msp, cache=cache) |
| | print("Overall Bounding Box:", overall_bbox) |
| | print(overall_bbox.extmin[0]+overall_bbox.extmax[0], overall_bbox.extmin[1]+overall_bbox.extmax[1]) |
| |
|
| | return overall_bbox.extmin[0]+overall_bbox.extmax[0], overall_bbox.extmin[1]+overall_bbox.extmax[1] |
| |
|
| |
|
| |
|
| | def switch_case(argument): |
| | switcher = { |
| | "A0": 1.27, |
| | "A1": 2.54, |
| | "A2": 5.08, |
| | "A3": 10.16, |
| | "A4": 20.32, |
| | "A5": 40.64, |
| | "A6": 81.28, |
| | "A7": 162.56, |
| | "A8": 325.12, |
| | "A9": 650.24, |
| | "A10": 1300.48 |
| | } |
| | |
| | print("Final Ratio=",switcher.get(argument, 1)) |
| | return switcher.get(argument, 1) |
| |
|
| |
|
| |
|
| |
|
| | def RetriveRatio(datadoc,dxfpath,pdf_content=0): |
| | if pdf_content: |
| | width,height,paper_size = analyze_pdf (datadoc,pdf_content) |
| | else: |
| | width,height,paper_size = analyze_pdf (datadoc) |
| | if(width > height ): |
| | bigger=width |
| | else: |
| | bigger=height |
| |
|
| | width_dxf,height_dxf = get_dxfSize(dxfpath) |
| |
|
| | if(width_dxf > height_dxf ): |
| | bigger_dxf=width_dxf |
| | else: |
| | bigger_dxf=height_dxf |
| |
|
| | if(0.2 < bigger_dxf/bigger < 1.2): |
| | print("bigger_dxf/bigger",bigger/bigger_dxf) |
| | argument = paper_size |
| | FinalRatio=switch_case(argument) |
| | else: |
| | FinalRatio=1 |
| | return FinalRatio |
| |
|
| |
|
| | """Flips image |
| | DXF origin is at the bottom left while img origin is top left |
| | """ |
| |
|
| | def flip(img): |
| | height, width = img.shape[:2] |
| |
|
| | |
| | angle = 180 |
| |
|
| | |
| | rotation_matrix = cv2.getRotationMatrix2D((width/2, height/2), angle, 1) |
| |
|
| | |
| | rotated_image = cv2.warpAffine(img, rotation_matrix, (width, height)) |
| | flipped_horizontal = cv2.flip(rotated_image, 1) |
| | return flipped_horizontal |
| |
|
| |
|
| |
|
| | def aci_to_rgb(aci): |
| | aci_rgb_map = { |
| | 0: (0, 0, 0), |
| | 1: (255, 0, 0), |
| | 2: (255, 255, 0), |
| | 3: (0, 255, 0), |
| | 4: (0, 255, 255), |
| | 5: (0, 0, 255), |
| | 6: (255, 0, 255), |
| | 7: (255, 255, 255), |
| | 8: (65, 65, 65), |
| | 9: (128, 128, 128), |
| | 10: (255, 0, 0), |
| | 11: (255, 170, 170), |
| | 12: (189, 0, 0), |
| | 13: (189, 126, 126), |
| | 14: (129, 0, 0), |
| | 15: (129, 86, 86), |
| | 16: (104, 0, 0), |
| | 17: (104, 69, 69), |
| | 18: (79, 0, 0), |
| | 19: (79, 53, 53), |
| | 20: (255, 63, 0), |
| | 21: (255, 191, 170), |
| | 22: (189, 46, 0), |
| | 23: (189, 141, 126), |
| | 24: (129, 31, 0), |
| | 25: (129, 96, 86), |
| | 26: (104, 25, 0), |
| | 27: (104, 78, 69), |
| | 28: (79, 19, 0), |
| | 29: (79, 59, 53), |
| | 30: (255, 127, 0), |
| | 31: (255, 212, 170), |
| | 32: (189, 94, 0), |
| | 33: (189, 157, 126), |
| | 34: (129, 64, 0), |
| | 35: (129, 107, 86), |
| | 36: (104, 52, 0), |
| | 37: (104, 86, 69), |
| | 38: (79, 39, 0), |
| | 39: (79, 66, 53), |
| | 40: (255, 191, 0), |
| | 41: (255, 234, 170), |
| | 42: (189, 141, 0), |
| | 43: (189, 173, 126), |
| | 44: (129, 96, 0), |
| | 45: (129, 118, 86), |
| | 46: (104, 78, 0), |
| | 47: (104, 95, 69), |
| | 48: (79, 59, 0), |
| | 49: (79, 73, 53), |
| | 50: (255, 255, 0), |
| | 51: (255, 255, 170), |
| | 52: (189, 189, 0), |
| | 53: (189, 189, 126), |
| | 54: (129, 129, 0), |
| | 55: (129, 129, 86), |
| | 56: (104, 104, 0), |
| | 57: (104, 104, 69), |
| | 58: (79, 79, 0), |
| | 59: (79, 79, 53), |
| | 60: (191, 255, 0), |
| | 61: (234, 255, 170), |
| | 62: (141, 189, 0), |
| | 63: (173, 189, 126), |
| | 64: (96, 129, 0), |
| | 65: (118, 129, 86), |
| | 66: (78, 104, 0), |
| | 67: (95, 104, 69), |
| | 68: (59, 79, 0), |
| | 69: (73, 79, 53), |
| | 70: (127, 255, 0), |
| | 71: (212, 255, 170), |
| | 72: (94, 189, 0), |
| | 73: (157, 189, 126), |
| | 74: (64, 129, 0), |
| | 75: (107, 129, 86), |
| | 76: (52, 104, 0), |
| | 77: (86, 104, 69), |
| | 78: (39, 79, 0), |
| | 79: (66, 79, 53), |
| | 80: (63, 255, 0), |
| | 81: (191, 255, 170), |
| | 82: (46, 189, 0), |
| | 83: (141, 189, 126), |
| | 84: (31, 129, 0), |
| | 85: (96, 129, 86), |
| | 86: (25, 104, 0), |
| | 87: (78, 104, 69), |
| | 88: (19, 79, 0), |
| | 89: (59, 79, 53), |
| | 90: (0, 255, 0), |
| | 91: (170, 255, 170), |
| | 92: (0, 189, 0), |
| | 93: (126, 189, 126), |
| | 94: (0, 129, 0), |
| | 95: (86, 129, 86), |
| | 96: (0, 104, 0), |
| | 97: (69, 104, 69), |
| | 98: (0, 79, 0), |
| | 99: (53, 79, 53), |
| | 100: (0, 255, 63), |
| | 101: (170, 255, 191), |
| | 102: (0, 189, 46), |
| | 103: (126, 189, 141), |
| | 104: (0, 129, 31), |
| | 105: (86, 129, 96), |
| | 106: (0, 104, 25), |
| | 107: (69, 104, 78), |
| | 108: (0, 79, 19), |
| | 109: (53, 79, 59), |
| | 110: (0, 255, 127), |
| | 111: (170, 255, 212), |
| | 112: (0, 189, 94), |
| | 113: (126, 189, 157), |
| | 114: (0, 129, 64), |
| | 115: (86, 129, 107), |
| | 116: (0, 104, 52), |
| | 117: (69, 104, 86), |
| | 118: (0, 79, 39), |
| | 119: (53, 79, 66), |
| | 120: (0, 255, 191), |
| | 121: (170, 255, 234), |
| | 122: (0, 189, 141), |
| | 123: (126, 189, 173), |
| | 124: (0, 129, 96), |
| | 125: (86, 129, 118), |
| | 126: (0, 104, 78), |
| | 127: (69, 104, 95), |
| | 128: (0, 79, 59), |
| | 129: (53, 79, 73), |
| | 130: (0, 255, 255), |
| | 131: (170, 255, 255), |
| | 132: (0, 189, 189), |
| | 133: (126, 189, 189), |
| | 134: (0, 129, 129), |
| | 135: (86, 129, 129), |
| | 136: (0, 104, 104), |
| | 137: (69, 104, 104), |
| | 138: (0, 79, 79), |
| | 139: (53, 79, 79), |
| | 140: (0, 191, 255), |
| | 141: (170, 234, 255), |
| | 142: (0, 141, 189), |
| | 143: (126, 173, 189), |
| | 144: (0, 96, 129), |
| | 145: (86, 118, 129), |
| | 146: (0, 78, 104), |
| | 147: (69, 95, 104), |
| | 148: (0, 59, 79), |
| | 149: (53, 73, 79), |
| | 150: (0, 127, 255), |
| | 151: (170, 212, 255), |
| | 152: (0, 94, 189), |
| | 153: (126, 157, 189), |
| | 154: (0, 64, 129), |
| | 155: (86, 107, 129), |
| | 156: (0, 52, 104), |
| | 157: (69, 86, 104), |
| | 158: (0, 39, 79), |
| | 159: (53, 66, 79), |
| | 160: (0, 63, 255), |
| | 161: (170, 191, 255), |
| | 162: (0, 46, 189), |
| | 163: (126, 141, 189), |
| | 164: (0, 31, 129), |
| | 165: (86, 96, 129), |
| | 166: (0, 25, 104), |
| | 167: (69, 78, 104), |
| | 168: (0, 19, 79), |
| | 169: (53, 59, 79), |
| | 170: (0, 0, 255), |
| | 171: (170, 170, 255), |
| | 172: (0, 0, 189), |
| | 173: (126, 126, 189), |
| | 174: (0, 0, 129), |
| | 175: (86, 86, 129), |
| | 176: (0, 0, 104), |
| | 177: (69, 69, 104), |
| | 178: (0, 0, 79), |
| | 179: (53, 53, 79), |
| | 180: (63, 0, 255), |
| | 181: (191, 170, 255), |
| | 182: (46, 0, 189), |
| | 183: (141, 126, 189), |
| | 184: (31, 0, 129), |
| | 185: (96, 86, 129), |
| | 186: (25, 0, 104), |
| | 187: (78, 69, 104), |
| | 188: (19, 0, 79), |
| | 189: (59, 53, 79), |
| | 190: (127, 0, 255), |
| | 191: (212, 170, 255), |
| | 192: (94, 0, 189), |
| | 193: (157, 126, 189), |
| | 194: (64, 0, 129), |
| | 195: (107, 86, 129), |
| | 196: (52, 0, 104), |
| | 197: (86, 69, 104), |
| | 198: (39, 0, 79), |
| | 199: (66, 53, 79), |
| | 200: (191, 0, 255), |
| | 201: (234, 170, 255), |
| | 202: (141, 0, 189), |
| | 203: (173, 126, 189), |
| | 204: (96, 0, 129), |
| | 205: (118, 86, 129), |
| | 206: (78, 0, 104), |
| | 207: (95, 69, 104), |
| | 208: (59, 0, 79), |
| | 209: (73, 53, 79), |
| | 210: (255, 0, 255), |
| | 211: (255, 170, 255), |
| | 212: (189, 0, 189), |
| | 213: (189, 126, 189), |
| | 214: (129, 0, 129), |
| | 215: (129, 86, 129), |
| | 216: (104, 0, 104), |
| | 217: (104, 69, 104), |
| | 218: (79, 0, 79), |
| | 219: (79, 53, 79), |
| | 220: (255, 0, 191), |
| | 221: (255, 170, 234), |
| | 222: (189, 0, 141), |
| | 223: (189, 126, 173), |
| | 224: (129, 0, 96), |
| | 225: (129, 86, 118), |
| | 226: (104, 0, 78), |
| | 227: (104, 69, 95), |
| | 228: (79, 0, 59), |
| | 229: (79, 53, 73), |
| | 230: (255, 0, 127), |
| | 231: (255, 170, 212), |
| | 232: (189, 0, 94), |
| | 233: (189, 126, 157), |
| | 234: (129, 0, 64), |
| | 235: (129, 86, 107), |
| | 236: (104, 0, 52), |
| | 237: (104, 69, 86), |
| | 238: (79, 0, 39), |
| | 239: (79, 53, 66), |
| | 240: (255, 0, 63), |
| | 241: (255, 170, 191), |
| | 242: (189, 0, 46), |
| | 243: (189, 126, 141), |
| | 244: (129, 0, 31), |
| | 245: (129, 86, 96), |
| | 246: (104, 0, 25), |
| | 247: (104, 69, 78), |
| | 248: (79, 0, 19), |
| | 249: (79, 53, 59), |
| | 250: (51, 51, 51), |
| | 251: (80, 80, 80), |
| | 252: (105, 105, 105), |
| | 253: (130, 130, 130), |
| | 254: (190, 190, 190), |
| | 255: (255, 255, 255) |
| | } |
| |
|
| | |
| | return aci_rgb_map.get(aci, (255, 255, 255)) |
| |
|
| |
|
| | def int_to_rgb(color_int): |
| | """Convert an integer to an (R, G, B) tuple.""" |
| | r = (color_int >> 16) & 255 |
| | g = (color_int >> 8) & 255 |
| | b = color_int & 255 |
| | return (r, g, b) |
| |
|
| |
|
| | def get_hatch_color(entity): |
| | """Extract hatch color with detailed debugging.""" |
| | if not entity: |
| | |
| | return (255, 255, 255) |
| |
|
| | |
| | if entity.dxf.hasattr('true_color'): |
| | true_color = entity.dxf.true_color |
| | rgb_color = int_to_rgb(true_color) |
| | |
| | return rgb_color |
| |
|
| | |
| | color_index = entity.dxf.color |
| | |
| | if 1 <= color_index <= 255: |
| | rgb_color = aci_to_rgb(color_index) |
| | |
| | return rgb_color |
| |
|
| | |
| | if color_index == 0: |
| | layer_name = entity.dxf.layer |
| | layer = entity.doc.layers.get(layer_name) |
| | |
| | if layer: |
| | layer_color_index = layer.dxf.color |
| | |
| | rgb_color = aci_to_rgb(layer_color_index) |
| | |
| | return rgb_color |
| | else: |
| | |
| | return (255, 255, 255) |
| |
|
| | |
| | |
| | return (255, 255, 255) |
| |
|
| |
|
| |
|
| | def point_in_rectangle(point, rect_coords): |
| | x, y = point |
| | (x1, y1), (x2, y2) = rect_coords |
| | return x1 <= x <= x2 and y1 <= y <= y2 |
| |
|
| | from math import sqrt |
| |
|
| | def euclidean_distance(point1, point2): |
| | x1, y1 = point1 |
| | x2, y2 = point2 |
| | return sqrt((x2 - x1)**2 + (y2 - y1)**2) |
| |
|
| | def compute_hatch_centroid(hatch): |
| | x_coords = [] |
| | y_coords = [] |
| | for path in hatch.paths: |
| | if path.PATH_TYPE == "PolylinePath": |
| | for vertex in path.vertices: |
| | x_coords.append(vertex[0]) |
| | y_coords.append(vertex[1]) |
| | elif path.PATH_TYPE == "EdgePath": |
| | for edge in path.edges: |
| | if hasattr(edge, "start"): |
| | x_coords.append(edge.start[0]) |
| | y_coords.append(edge.start[1]) |
| | if hasattr(edge, "end"): |
| | x_coords.append(edge.end[0]) |
| | y_coords.append(edge.end[1]) |
| | if x_coords and y_coords: |
| | return (sum(x_coords) / len(x_coords), sum(y_coords) / len(y_coords)) |
| | return None |
| |
|
| | """### Hatched areas""" |
| | def get_hatched_areas(datadoc,filename,FinalRatio,rotationangle,SearchArray): |
| |
|
| | print("SearchArray = ",SearchArray) |
| |
|
| | doc = ezdxf.readfile(filename) |
| | doc.header['$MEASUREMENT'] = 1 |
| | msp = doc.modelspace() |
| | trial=0 |
| | hatched_areas = [] |
| | threshold=0.001 |
| | TextFound = 0 |
| | j=0 |
| | unique_shapes = [] |
| |
|
| |
|
| | text_with_positions = [] |
| | text_color_mapping = {} |
| | color_palette = [ |
| | (255, 0, 0), (0, 0, 255), (0, 255, 255), (0, 64, 0), (255, 204, 0), |
| | (255, 128, 64), (255, 0, 128), (255, 128, 192), (128, 128, 255), |
| | (128, 64, 0), (0, 255, 0), (0, 200, 0), (255, 128, 255), (128, 0, 255), |
| | (0, 128, 192), (128, 0, 128), (128, 0, 0), (0, 128, 255), (149, 1, 70), |
| | (255, 182, 128), (222, 48, 71), (240, 0, 112), (255, 0, 255), |
| | (192, 46, 65), (0, 0, 128), (0, 128, 64), (255, 255, 0), (128, 0, 80), |
| | (255, 255, 128), (90, 255, 140), (255, 200, 20), (91, 16, 51), |
| | (90, 105, 138), (114, 10, 138), (36, 82, 78), (225, 105, 190), |
| | (108, 150, 170), (11, 35, 75), (42, 176, 170), (255, 176, 170), |
| | (209, 151, 15), (81, 27, 85), (226, 106, 122), (67, 119, 149), |
| | (159, 179, 140), (159, 179, 30), (255, 85, 198), (255, 27, 85), |
| | (188, 158, 8), (140, 188, 120), (59, 61, 52), (65, 81, 21), |
| | (212, 255, 174), (15, 164, 90), (41, 217, 245), (213, 23, 182), |
| | (11, 85, 169), (78, 153, 239), (0, 66, 141), (64, 98, 232), |
| | (140, 112, 255), (57, 33, 154), (194, 117, 252), (116, 92, 135), |
| | (74, 43, 98), (188, 13, 123), (129, 58, 91), (255, 128, 100), |
| | (171, 122, 145), (255, 98, 98), (222, 48, 77) |
| | ] |
| | import re |
| |
|
| | text_with_positions = [] |
| | |
| |
|
| | |
| | |
| | |
| |
|
| | if(SearchArray): |
| | for i in range(len(SearchArray)): |
| |
|
| | if (SearchArray[i][0] and SearchArray[i][1] and SearchArray[i][2]): |
| | for text_entity in doc.modelspace().query('TEXT MTEXT'): |
| | text = text_entity.text.strip() if hasattr(text_entity, 'text') else "" |
| | |
| | if(text.startswith(SearchArray[i][0]) and len(text)==int(SearchArray[i][2])): |
| | position = text_entity.dxf.insert |
| | x, y = position.x, position.y |
| |
|
| | for text_entity in doc.modelspace().query('TEXT MTEXT'): |
| | NBS = text_entity.text.strip() if hasattr(text_entity, 'text') else "" |
| | if (NBS.startswith(SearchArray[i][1])): |
| | positionNBS = text_entity.dxf.insert |
| | xNBS, yNBS = positionNBS.x, positionNBS.y |
| |
|
| | if(x == xNBS or y == yNBS): |
| | textNBS=NBS |
| | break |
| |
|
| | else: |
| | textNBS = None |
| |
|
| |
|
| |
|
| | nearest_hatch = None |
| | min_distance = float('inf') |
| | detected_color = (255, 255, 255) |
| |
|
| | |
| | for hatch in doc.modelspace().query('HATCH'): |
| | if hatch.paths: |
| | for path in hatch.paths: |
| | if path.type == 1: |
| | vertices = [v[:2] for v in path.vertices] |
| | |
| | centroid_x = sum(v[0] for v in vertices) / len(vertices) |
| | centroid_y = sum(v[1] for v in vertices) / len(vertices) |
| | centroid = (centroid_x, centroid_y) |
| |
|
| | |
| | distance = calculate_distance((x, y), centroid) |
| |
|
| | |
| | if distance < min_distance: |
| | min_distance = distance |
| | nearest_hatch = hatch |
| |
|
| | |
| | current_color = get_hatch_color(hatch) |
| | if current_color != (255, 255, 255): |
| | detected_color = current_color |
| | break |
| |
|
| |
|
| | |
| | text_with_positions.append([text, textNBS, (x, y), detected_color]) |
| | print("text_with_positions=",text_with_positions) |
| |
|
| | elif (SearchArray[i][0] and SearchArray[i][2]): |
| | for text_entity in doc.modelspace().query('TEXT MTEXT'): |
| | text = text_entity.text.strip() if hasattr(text_entity, 'text') else "" |
| | |
| | if(text.startswith(SearchArray[i][0]) and len(text)==int(SearchArray[i][2])): |
| | position = text_entity.dxf.insert |
| | x, y = position.x, position.y |
| | textNBS = None |
| | nearest_hatch = None |
| | min_distance = float('inf') |
| | detected_color = (255, 255, 255) |
| |
|
| | |
| | for hatch in doc.modelspace().query('HATCH'): |
| | if hatch.paths: |
| | for path in hatch.paths: |
| | if path.type == 1: |
| | vertices = [v[:2] for v in path.vertices] |
| | |
| | centroid_x = sum(v[0] for v in vertices) / len(vertices) |
| | centroid_y = sum(v[1] for v in vertices) / len(vertices) |
| | centroid = (centroid_x, centroid_y) |
| |
|
| | |
| | distance = calculate_distance((x, y), centroid) |
| |
|
| | |
| | if distance < min_distance: |
| | min_distance = distance |
| | nearest_hatch = hatch |
| |
|
| | |
| | current_color = get_hatch_color(hatch) |
| | if current_color != (255, 255, 255): |
| | detected_color = current_color |
| | break |
| |
|
| |
|
| | |
| | text_with_positions.append([text, textNBS, (x, y), detected_color]) |
| | print("text_with_positions=",text_with_positions) |
| |
|
| | elif(SearchArray[i][0]): |
| | for text_entity in doc.modelspace().query('TEXT MTEXT'): |
| | text = text_entity.text.strip() if hasattr(text_entity, 'text') else "" |
| | |
| | if(text.startswith(SearchArray[i][0])): |
| | position = text_entity.dxf.insert |
| | x, y = position.x, position.y |
| | textNBS = None |
| | nearest_hatch = None |
| | min_distance = float('inf') |
| | detected_color = (255, 255, 255) |
| |
|
| | |
| | for hatch in doc.modelspace().query('HATCH'): |
| | if hatch.paths: |
| | for path in hatch.paths: |
| | if path.type == 1: |
| | vertices = [v[:2] for v in path.vertices] |
| | |
| | centroid_x = sum(v[0] for v in vertices) / len(vertices) |
| | centroid_y = sum(v[1] for v in vertices) / len(vertices) |
| | centroid = (centroid_x, centroid_y) |
| |
|
| | |
| | distance = calculate_distance((x, y), centroid) |
| |
|
| | |
| | if distance < min_distance: |
| | min_distance = distance |
| | nearest_hatch = hatch |
| |
|
| | |
| | current_color = get_hatch_color(hatch) |
| | if current_color != (255, 255, 255): |
| | detected_color = current_color |
| | break |
| |
|
| |
|
| | |
| | text_with_positions.append([text, textNBS, (x, y), detected_color]) |
| | print("text_with_positions=",text_with_positions) |
| |
|
| |
|
| |
|
| |
|
| |
|
| |
|
| |
|
| |
|
| | grouped = {} |
| | for entry in text_with_positions: |
| | key = entry[0] |
| | grouped.setdefault(key, []).append(entry) |
| |
|
| | |
| | filtered_results = [] |
| | for key, entries in grouped.items(): |
| | |
| | complete = next((entry for entry in entries if entry[1] is not None), None) |
| | if complete: |
| | filtered_results.append(complete) |
| | else: |
| | |
| | filtered_results.append(entries[0]) |
| |
|
| | text_with_positions=filtered_results |
| |
|
| | for entity in msp: |
| | if entity.dxftype() == 'HATCH': |
| |
|
| | cntPoints=[] |
| | for path in entity.paths: |
| |
|
| | |
| |
|
| | |
| | |
| | |
| |
|
| | vertices = [] |
| |
|
| | |
| |
|
| | if str(path.type) == 'BoundaryPathType.POLYLINE' or path.type == 1: |
| | |
| | |
| | vertices = [(vertex[0] * FinalRatio, vertex[1] * FinalRatio) for vertex in path.vertices] |
| | |
| |
|
| | if len(vertices) > 3: |
| | poly = ShapelyPolygon(vertices) |
| | minx, miny, maxx, maxy = poly.bounds |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| |
|
| |
|
| |
|
| | if (poly.area > 0 and (height > 0.2 or width > 0.2)): |
| |
|
| | length = height |
| | if(width > length): |
| | length = width |
| |
|
| | area1 = round(poly.area, 3) |
| | perimeter = round(poly.length, 3) |
| | |
| | normalized_vertices = normalize_vertices(vertices) |
| |
|
| | rgb_color = get_hatch_color(entity) |
| | |
| |
|
| | |
| | |
| |
|
| | |
| | |
| |
|
| | |
| | |
| | |
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices and areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | |
| | unique_shapes.append((normalized_vertices, area1)) |
| |
|
| | if length > 0.6: |
| | hatched_areas.append([vertices, area1, length, rgb_color]) |
| |
|
| | elif str(path.type) == 'BoundaryPathType.EDGE' or path.type == 2: |
| | |
| | |
| | vert = [] |
| | for edge in path.edges: |
| | x, y = edge.start |
| | x1, y1 = edge.end |
| | vert.append((x * FinalRatio, y * FinalRatio)) |
| | vert.append((x1 * FinalRatio, y1 * FinalRatio)) |
| |
|
| | poly = ShapelyPolygon(vert) |
| | minx, miny, maxx, maxy = poly.bounds |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| | if (poly.area > 0 and (height > 0.2 or width > 0.2)): |
| |
|
| | length = height |
| | if(width > length): |
| | length = width |
| |
|
| | area1 = round(poly.area, 3) |
| | perimeter = round(poly.length, 3) |
| | normalized_vertices = normalize_vertices(vert) |
| | rgb_color = get_hatch_color(entity) |
| | |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| |
|
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices and areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | |
| | unique_shapes.append((normalized_vertices, area1)) |
| |
|
| | if length > 0.6: |
| | hatched_areas.append([vert, area1, length, rgb_color]) |
| |
|
| | else: |
| | print(f"Encountered path type: {path.type}") |
| |
|
| | elif entity.dxftype() == 'SOLID': |
| |
|
| |
|
| |
|
| | vertices = [entity.dxf.vtx0 * (FinalRatio), entity.dxf.vtx1* (FinalRatio), entity.dxf.vtx2* (FinalRatio), entity.dxf.vtx3* (FinalRatio)] |
| | poly = ShapelyPolygon(vertices) |
| | minx, miny, maxx, maxy = poly.bounds |
| |
|
| | |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| | if (poly.area > 0 and (height > 0 and width > 0)): |
| | area1 = round(poly.area, 3) |
| | perimeter = round(poly.length, 3) |
| | normalized_vertices = normalize_vertices(vertices) |
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | rgb_color = get_hatch_color(entity) |
| | unique_shapes.append((normalized_vertices, area1)) |
| | hatched_areas.append([vertices, area1, perimeter, rgb_color]) |
| |
|
| |
|
| |
|
| | elif entity.dxftype() == 'LWPOLYLINE': |
| |
|
| | vertices = [] |
| | lwpolyline = entity |
| | points = lwpolyline.get_points() |
| | flag = 0 |
| |
|
| | |
| | for i in range(len(points)): |
| | vertices.append([points[i][0] * FinalRatio, points[i][1] * FinalRatio]) |
| |
|
| | |
| | if len(vertices) > 3: |
| | |
| | if vertices[0][0] == vertices[-1][0] or vertices[0][1] == vertices[-1][1]: |
| | poly = ShapelyPolygon(vertices) |
| | minx, miny, maxx, maxy = poly.bounds |
| |
|
| | |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| | |
| | if (poly.area > 0 and (height > 0 and width > 0)): |
| | area1 = round(poly.area, 3) |
| | perimeter = round(poly.length, 3) |
| | normalized_vertices = normalize_vertices(vertices) |
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | rgb_color = get_hatch_color(entity) |
| | unique_shapes.append((normalized_vertices, area1)) |
| | hatched_areas.append([vertices, area1, perimeter, rgb_color]) |
| |
|
| |
|
| |
|
| | elif entity.dxftype() == 'POLYLINE': |
| |
|
| | flag=0 |
| | vertices = [(v.dxf.location.x * (FinalRatio), v.dxf.location.y * (FinalRatio)) for v in entity.vertices] |
| | |
| |
|
| | if(len(vertices)>3): |
| |
|
| | if(vertices[0][0] == vertices[len(vertices)-1][0] or vertices[0][1] == vertices[len(vertices)-1][1]): |
| |
|
| | poly=ShapelyPolygon(vertices) |
| | minx, miny, maxx, maxy = poly.bounds |
| |
|
| | |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| | if (poly.area > 0 and (height > 0 and width > 0)): |
| | area1 = round(poly.area,3) |
| | perimeter = round (poly.length,3) |
| | normalized_vertices = normalize_vertices(vertices) |
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | rgb_color = get_hatch_color(entity) |
| | unique_shapes.append((normalized_vertices, area1)) |
| | hatched_areas.append([vertices, area1, perimeter, rgb_color]) |
| |
|
| |
|
| | elif entity.dxftype() == 'SPLINE': |
| |
|
| | spline_entity = entity |
| | vertices = [] |
| | control_points = spline_entity.control_points |
| | if(len(control_points)>3): |
| | for i in range(len(control_points)): |
| | vertices.append([control_points[i][0]* (FinalRatio),control_points[i][1]* (FinalRatio)]) |
| | poly=ShapelyPolygon(vertices) |
| |
|
| | minx, miny, maxx, maxy = poly.bounds |
| |
|
| | |
| | width = maxx - minx |
| | height = maxy - miny |
| |
|
| |
|
| | if (poly.area > 0 and (height > 0 and width > 0)): |
| | area1 = round(poly.area,3) |
| | perimeter = round (poly.length,3) |
| | normalized_vertices = normalize_vertices(vertices) |
| |
|
| | duplicate_found = False |
| | for existing_vertices, existing_area in unique_shapes: |
| | if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area): |
| | duplicate_found = True |
| | break |
| |
|
| | if not duplicate_found: |
| | rgb_color = get_hatch_color(entity) |
| | unique_shapes.append((normalized_vertices, area1)) |
| | hatched_areas.append([vertices, area1, perimeter, rgb_color]) |
| |
|
| | |
| |
|
| | sorted_data = sorted(hatched_areas, key=lambda x: x[1]) |
| | return sorted_data,text_with_positions |
| |
|
| |
|
| | """### Rotate polygon""" |
| |
|
| |
|
| |
|
| | def rotate_point(point, angle,pdfrotation,width,height, center_point=(0, 0)): |
| | """Rotates a point around center_point(origin by default) |
| | Angle is in degrees. |
| | Rotation is counter-clockwise |
| | """ |
| | angle_rad = radians(angle % 360) |
| | |
| | new_point = (point[0] - center_point[0], point[1] - center_point[1]) |
| | new_point = (new_point[0] * cos(angle_rad) - new_point[1] * sin(angle_rad), |
| | new_point[0] * sin(angle_rad) + new_point[1] * cos(angle_rad)) |
| | |
| | if pdfrotation!=0: |
| |
|
| | new_point = (new_point[0]+width + center_point[0], new_point[1] + center_point[1]) |
| | else: |
| |
|
| | new_point = (new_point[0] + center_point[0], new_point[1]+ height + center_point[1]) |
| | |
| | return new_point |
| |
|
| |
|
| | def rotate_polygon(polygon, angle, pdfrotation,width,height,center_point=(0, 0)): |
| | """Rotates the given polygon which consists of corners represented as (x,y) |
| | around center_point (origin by default) |
| | Rotation is counter-clockwise |
| | Angle is in degrees |
| | """ |
| | rotated_polygon = [] |
| | for corner in polygon: |
| | rotated_corner = rotate_point(corner, angle,pdfrotation,width,height, center_point) |
| | rotated_polygon.append(rotated_corner) |
| | return rotated_polygon |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| |
|
| |
|
| | def Create_DF(dxfpath,datadoc,hatched_areas,pdf_content=0): |
| |
|
| | if pdf_content: |
| | FinalRatio= RetriveRatio(datadoc,dxfpath,pdf_content) |
| | else: |
| | FinalRatio= RetriveRatio(datadoc,dxfpath) |
| | |
| |
|
| | |
| |
|
| | |
| | SimilarAreaDictionary= pd.DataFrame(columns=['Guess','Color','Occurences','Area','Total Area','Perimeter','Total Perimeter','Length','Total Length','Texts','Comments']) |
| |
|
| | |
| | |
| | |
| | TotalArea=0 |
| | TotalPerimeter=0 |
| | for shape in hatched_areas: |
| | area = shape[1] |
| | perimeter = shape[2] |
| | |
| | |
| | |
| | |
| | |
| | |
| | TotalArea = area |
| | TotalPerimeter = perimeter |
| | tol=0 |
| | condition1 = (SimilarAreaDictionary['Area'] >= area - tol) & (SimilarAreaDictionary['Area'] <= area +tol) |
| | condition2 = (SimilarAreaDictionary['Perimeter'] >= perimeter -tol) & (SimilarAreaDictionary['Perimeter'] <= perimeter +tol) |
| | combined_condition = condition1 & condition2 |
| |
|
| | if any(combined_condition): |
| | index = np.where(combined_condition)[0][0] |
| | SimilarAreaDictionary.at[index, 'Occurences'] += 1 |
| | SimilarAreaDictionary.at[index, 'Total Area'] = SimilarAreaDictionary.at[index, 'Area'] * SimilarAreaDictionary.at[index, 'Occurences'] |
| | SimilarAreaDictionary.at[index, 'Total Perimeter'] = SimilarAreaDictionary.at[index, 'Perimeter'] * SimilarAreaDictionary.at[index, 'Occurences'] |
| | else: |
| | TotalArea=area |
| | TotalPerimeter=perimeter |
| | |
| | new_data = {'Area': area, 'Total Area': TotalArea ,'Perimeter': perimeter, 'Total Perimeter': TotalPerimeter, 'Occurences': 1, 'Color':shape[3],'Comments':''} |
| | SimilarAreaDictionary = pd.concat([SimilarAreaDictionary, pd.DataFrame([new_data])], ignore_index=True) |
| |
|
| | |
| | return SimilarAreaDictionary |
| | """### Draw on Image and PDF""" |
| |
|
| | |
| |
|
| | def color_distance(color1, color2): |
| | print("color1 = ",color1) |
| | print("color2 = ",color2) |
| | print("abs(color1[0] - color2[0]) = ",abs(color1[0] - color2[0])) |
| | print("abs(color1[1] - color2[1]) = ",abs(color1[1] - color2[1])) |
| | print("abs(color1[2] - color2[2]) = ",abs(color1[2] - color2[2])) |
| | if(abs(color1[0] - color2[0]) < 20 and |
| | abs(color1[1] - color2[1]) < 20 and |
| | abs(color1[2] - color2[2]) < 20): |
| | return 1 |
| | else: |
| | return 100 |
| | |
| |
|
| | |
| | def unify_colors(df, threshold=20): |
| | |
| | df['Color'] = df['Color'].apply(lambda x: tuple(x) if isinstance(x, list) else x) |
| |
|
| | |
| | for i in range(len(df) - 1): |
| | current_color = df.at[i, 'Color'] |
| | next_color = df.at[i + 1, 'Color'] |
| |
|
| | |
| | if color_distance(current_color, next_color) <= threshold: |
| | |
| | df.at[i + 1, 'Color'] = current_color |
| |
|
| | return df |
| |
|
| | def normalize_color(color): |
| | """Convert PDF color (range 0-1) to RGB (range 0-255).""" |
| | return tuple(min(max(round(c * 255), 0), 255) for c in color) |
| |
|
| |
|
| | def color_close_enough(c1, c2, threshold=10): |
| | return all(abs(a - b) <= threshold for a, b in zip(c1, c2)) |
| |
|
| | def adjustannotations(OutputPdfStage1,text_with_positions): |
| | input_pdf_path = OutputPdfStage1 |
| | output_pdf_path = "Final-WallsAdjusted.pdf" |
| |
|
| | |
| | pdf_bytes_io = BytesIO(OutputPdfStage1) |
| |
|
| | reader = PdfReader(pdf_bytes_io) |
| | writer = PdfWriter() |
| |
|
| | |
| | writer.append_pages_from_reader(reader) |
| |
|
| | |
| | metadata = reader.metadata |
| | writer.add_metadata(metadata) |
| |
|
| | for page_index, page in enumerate(writer.pages): |
| | if "/Annots" in page: |
| | annotations = page["/Annots"] |
| | for annot_index, annot in enumerate(annotations): |
| | obj = annot.get_object() |
| |
|
| | |
| | |
| |
|
| | if obj.get("/Subtype") == "/Line": |
| | |
| | |
| | |
| | if "/Subj" in obj and "Perimeter Measurement" in obj["/Subj"]: |
| | |
| | obj.update({ |
| | NameObject("/Measure"): DictionaryObject({ |
| | NameObject("/Type"): NameObject("/Measure"), |
| | NameObject("/L"): DictionaryObject({ |
| | NameObject("/G"): FloatObject(1), |
| | NameObject("/U"): TextStringObject("m"), |
| | }), |
| |
|
| | }), |
| | NameObject("/IT"): NameObject("/LineDimension"), |
| | NameObject("/Subj"): TextStringObject("Length Measurement"), |
| | }) |
| | |
| |
|
| | if obj.get("/Subtype") in ["/Line", "/PolyLine"] and "/C" in obj: |
| | |
| | annot_color = normalize_color(obj["/C"]) |
| | matched_entry = next( |
| | ((text, NBS) for text,NBS, _, color in text_with_positions if color_close_enough(annot_color, color)), |
| | (None, None) |
| | ) |
| | |
| | matched_text, matched_nbs = matched_entry |
| |
|
| | combined_text = "" |
| | if matched_text and matched_nbs: |
| | combined_text = f"{matched_text} - {matched_nbs}" |
| | elif matched_text: |
| | combined_text = matched_text |
| | elif matched_nbs: |
| | combined_text = matched_nbs |
| |
|
| | obj.update({ |
| | NameObject("/T"): TextStringObject(combined_text), |
| | }) |
| |
|
| |
|
| |
|
| | output_pdf_io = BytesIO() |
| | writer.write(output_pdf_io) |
| | output_pdf_io.seek(0) |
| |
|
| | print(f"Annotations updated and saved to {output_pdf_path}") |
| | return output_pdf_io.read() |
| |
|
| | def distance(rect1, rect2): |
| | """Calculate the Euclidean distance between two annotation centers.""" |
| | x1, y1 = (float(rect1[0]) + float(rect1[2])) / 2, (float(rect1[1]) + float(rect1[3])) / 2 |
| | x2, y2 = (float(rect2[0]) + float(rect2[2])) / 2, (float(rect2[1]) + float(rect2[3])) / 2 |
| | return math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) |
| |
|
| | def remove_duplicate_annotations(pdf_path, threshold): |
| | """Remove one of the duplicate annotations if they are close and have the same color.""" |
| |
|
| | input_pdf_path = pdf_path |
| | output_pdf_path = "Filtered-Walls.pdf" |
| |
|
| | |
| | pdf_bytes_io = BytesIO(pdf_path) |
| |
|
| | reader = PdfReader(pdf_bytes_io) |
| | writer = PdfWriter() |
| |
|
| | |
| | |
| |
|
| | |
| | metadata = reader.metadata |
| | writer.add_metadata(metadata) |
| |
|
| | for page_index in range(len(reader.pages)): |
| | page = reader.pages[page_index] |
| |
|
| | if "/Annots" in page: |
| | annotations = page["/Annots"] |
| | annots_data = [] |
| | to_delete = set() |
| |
|
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| |
|
| | for i, annot_ref in enumerate(annotations): |
| | annot = annot_ref.get_object() |
| | rect = annot.get("/Rect") |
| | color = annot.get("/C") |
| |
|
| | if rect and color and isinstance(rect, ArrayObject) and len(rect) == 4: |
| | norm_color = normalize_color(color) |
| | annots_data.append((i, list(rect), norm_color)) |
| | |
| |
|
| | for i, (idx1, rect1, color1) in enumerate(annots_data): |
| | if idx1 in to_delete: |
| | continue |
| | for j in range(i + 1, len(annots_data)): |
| | idx2, rect2, color2 = annots_data[j] |
| | if idx2 in to_delete: |
| | continue |
| | if color_close_enough(color1, color2) and distance(rect1, rect2) < threshold: |
| | to_delete.add(idx2) |
| |
|
| | |
| | new_annots = [annotations[i] for i in range(len(annotations)) if i not in to_delete] |
| | page[NameObject("/Annots")] = ArrayObject(new_annots) |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| |
|
| | writer.add_page(page) |
| |
|
| | output_pdf_io = BytesIO() |
| | writer.write(output_pdf_io) |
| | output_pdf_io.seek(0) |
| |
|
| | return output_pdf_io.read() |
| |
|
| |
|
| |
|
| |
|
| |
|
| |
|
| | def calculate_distance(p1, p2): |
| | return math.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) |
| |
|
| |
|
| |
|
| | def mainFunctionDrawImgPdf(datadoc,dxfpath, dxfratio,SearchArray,Thickness,pdfpath=0,pdfname=0,pdf_content=0): |
| | OutputPdfStage1='BB Trial.pdf' |
| | if pdf_content: |
| | FinalRatio= RetriveRatio(datadoc,dxfpath,pdf_content) |
| | else: |
| | FinalRatio= RetriveRatio(datadoc,dxfpath) |
| |
|
| | |
| | |
| | if pdf_content: |
| | img,pix2=pdftoimg(datadoc,pdf_content) |
| | else: |
| | img,pix2=pdftoimg(datadoc) |
| | flipped_horizontal=flip(img) |
| | allcnts = [] |
| | imgg = flipped_horizontal |
| | |
| | if pdf_content: |
| | doc = fitz.open(stream=pdf_content, filetype="pdf") |
| | else: |
| | doc = fitz.open('pdf',datadoc) |
| | page2 = doc[0] |
| | rotationOld=page2.rotation |
| | derotationMatrix=page2.derotation_matrix |
| | |
| | pix=page2.get_pixmap() |
| | width=abs(page2.mediabox[2])+abs(page2.mediabox[0]) |
| | height=abs(page2.mediabox[3])+abs(page2.mediabox[1]) |
| | print('mediabox', width , height) |
| |
|
| |
|
| |
|
| |
|
| |
|
| |
|
| | if page2.rotation!=0: |
| |
|
| | rotationangle = page2.rotation |
| | page2.set_rotation(0) |
| | ratio = pix.width/ img.shape[0] |
| | else: |
| | ratio = pix.width/ img.shape[1] |
| | rotationangle = 270 |
| |
|
| | hatched_areas,text_with_positions = get_hatched_areas(datadoc,dxfpath,FinalRatio,rotationangle,SearchArray) |
| | allshapes=[] |
| | |
| | NewColors = [] |
| | if pdf_content: |
| | SimilarAreaDictionary=Create_DF(dxfpath,datadoc,hatched_areas,pdf_content) |
| | else: |
| | SimilarAreaDictionary=Create_DF(dxfpath,datadoc,hatched_areas) |
| | i=0 |
| | flagcolor = 0 |
| | ColorCounter = 0 |
| | ColorCheck=[] |
| | deleterows = [] |
| |
|
| |
|
| | |
| | |
| |
|
| | color_margin = 2 |
| |
|
| | for polygon in hatched_areas: |
| | cntPoints = [] |
| | cntPoints1 = [] |
| | shapeePerimeter = [] |
| | shapeeArea = [] |
| | Text_Detected = 0 |
| |
|
| | blackImgShapes = np.zeros(imgg.shape[:2], dtype="uint8") |
| | blackImgShapes= cv2.cvtColor(blackImgShapes, cv2.COLOR_GRAY2BGR) |
| |
|
| | |
| | for vertex in polygon[0]: |
| | x = (vertex[0]) *dxfratio |
| | y = (vertex[1]) *dxfratio |
| | if rotationangle==0: |
| | if y<0: |
| | y=y*-1 |
| | cntPoints.append([int(x), int(y)]) |
| | cntPoints1.append([x, y]) |
| |
|
| | cv2.drawContours(blackImgShapes, [np.array(cntPoints)], -1, ([255,255,255]), thickness=-1) |
| | x, y, w, h = cv2.boundingRect(np.array(cntPoints)) |
| | firstpoint = 0 |
| | for poi in np.array(cntPoints1): |
| | if firstpoint == 0: |
| | x2, y2 = poi |
| | p2 = fitz.Point(x2,y2) |
| | |
| | p2=p2*derotationMatrix |
| | shapeePerimeter.append([p2[0],p2[1]]) |
| | firstpoint = 1 |
| | else: |
| | x1, y1 = poi |
| | p1 = fitz.Point(x1,y1) |
| | |
| | p1=p1*derotationMatrix |
| | |
| | shapeePerimeter.append([p1[0],p1[1]]) |
| |
|
| | shapeePerimeter.append([p2[0],p2[1]]) |
| | shapeePerimeter=np.flip(shapeePerimeter,1) |
| | shapeePerimeter=rotate_polygon(shapeePerimeter,rotationangle,rotationOld,width,height) |
| |
|
| | for poi in np.array(cntPoints1): |
| | x1, y1 = poi |
| | p1 = fitz.Point(x1,y1) |
| | |
| | p1=p1*derotationMatrix |
| | |
| | shapeeArea.append([p1[0],p1[1]]) |
| |
|
| | shapeeArea.append([p2[0],p2[1]]) |
| | shapeeArea=np.flip(shapeeArea,1) |
| | shapeeArea=rotate_polygon(shapeeArea,rotationangle,rotationOld,width,height) |
| |
|
| | tol=0 |
| | condition1 = (SimilarAreaDictionary['Area'] >= polygon[1] - tol) & (SimilarAreaDictionary['Area'] <= polygon[1] +tol) |
| | condition2 = (SimilarAreaDictionary['Perimeter'] >= polygon[2] -tol) & (SimilarAreaDictionary['Perimeter'] <= polygon[2] +tol) |
| | combined_condition = condition1 & condition2 |
| | |
| |
|
| | if any(combined_condition): |
| |
|
| | flagcolor = 1 |
| | index = np.where(combined_condition)[0][0] |
| | |
| | NewColors=SimilarAreaDictionary.at[index, 'Color'] |
| |
|
| | else: |
| | flagcolor = 2 |
| | NewColors=SimilarAreaDictionary.at[i, 'Color'] |
| | |
| |
|
| | |
| | |
| | |
| | |
| | if(int(NewColors[0])==255 and int(NewColors[1])==255 and int(NewColors[2])==255): |
| |
|
| | WhiteImgFinal = cv2.bitwise_and(blackImgShapes,imgg) |
| | |
| | |
| | flipped=flip(WhiteImgFinal) |
| | |
| | |
| |
|
| | imgslice = WhiteImgFinal[y:y+h, x:x+w] |
| | |
| | |
| | if(imgslice.shape[0] != 0 and imgslice.shape[1] != 0): |
| | flippedSlice=flip(imgslice) |
| | |
| | |
| |
|
| | |
| | flippedSlice_pil = Image.fromarray(flippedSlice) |
| |
|
| | |
| | patch_size = 100 |
| | patch_colors = [] |
| |
|
| | |
| | for i in range(0, flippedSlice_pil.width, patch_size): |
| | for j in range(0, flippedSlice_pil.height, patch_size): |
| | |
| | patch = flippedSlice_pil.crop((i, j, i + patch_size, j + patch_size)) |
| | patch_colors += patch.getcolors(patch_size * patch_size) |
| |
|
| | |
| | max_count = 0 |
| | dominant_color = None |
| | tolerance = 5 |
| | black_threshold = 30 |
| | white_threshold = 225 |
| |
|
| | for count, color in patch_colors: |
| | |
| | if not (all(c <= black_threshold for c in color) or all(c >= white_threshold for c in color)): |
| | |
| | if count > max_count: |
| | max_count = count |
| | dominant_color = color |
| |
|
| | |
| |
|
| | |
| | if dominant_color is not None: |
| | ColorCheck.append(dominant_color) |
| |
|
| | NewColors = None |
| |
|
| | for color in ColorCheck: |
| | |
| | |
| | |
| | if (abs(color[0] - dominant_color[0]) < 20 and |
| | abs(color[1] - dominant_color[1]) < 20 and |
| | abs(color[2] - dominant_color[2]) < 20): |
| | NewColors = (color[2], color[1], color[0]) |
| | break |
| | else: |
| | |
| | NewColors = (dominant_color[2], dominant_color[1], dominant_color[0]) |
| | |
| |
|
| | |
| | if NewColors not in ColorCheck: |
| | ColorCheck.append(NewColors) |
| |
|
| | if flagcolor == 1: |
| | SimilarAreaDictionary.at[index, 'Color'] = NewColors |
| | |
| | elif flagcolor == 2: |
| | SimilarAreaDictionary.at[i, 'Color'] = NewColors |
| | |
| | cv2.drawContours(imgg, [np.array(cntPoints)], -1, ([NewColors[2],NewColors[1],NewColors[0]]), thickness=3) |
| |
|
| |
|
| |
|
| |
|
| | start_point1 = shapeePerimeter[0] |
| | end_point1 = shapeePerimeter[1] |
| | start_point2 = shapeePerimeter[0] |
| | end_point2 = shapeePerimeter[-2] |
| |
|
| | distance1 = calculate_distance(start_point1, end_point1) |
| | distance2 = calculate_distance(start_point2, end_point2) |
| |
|
| |
|
| |
|
| | |
| | half_index = len(shapeePerimeter) // 2 |
| | half1 = shapeePerimeter[1:half_index+1] |
| | half2 = shapeePerimeter[half_index:] |
| | |
| | |
| | |
| | |
| |
|
| | |
| | if len(half1) >= 2: |
| | half1_distance = sum(calculate_distance(half1[i], half1[i + 1]) for i in range(len(half1) - 1)) |
| | else: |
| | half1_distance = 0 |
| |
|
| | if len(half2) >= 2: |
| | half2_distance = sum(calculate_distance(half2[i], half2[i + 1]) for i in range(len(half2) - 1)) |
| | else: |
| | half2_distance = 0 |
| |
|
| | max_distance = max(distance1, distance2, half1_distance) |
| |
|
| | if max_distance == distance1: |
| | |
| | chosen_start = start_point1 |
| | chosen_end = end_point1 |
| | |
| | points=[] |
| | points.append(chosen_start) |
| | points.append(chosen_end) |
| | annot12 = page2.add_polyline_annot(points) |
| | |
| | elif max_distance == distance2: |
| | |
| | chosen_start = start_point2 |
| | chosen_end = end_point2 |
| | |
| | points=[] |
| | points.append(chosen_start) |
| | points.append(chosen_end) |
| | |
| | points=[] |
| | points.append(chosen_start) |
| | points.append(chosen_end) |
| | annot12 = page2.add_polyline_annot(points) |
| | |
| | elif max_distance == half1_distance: |
| | |
| | max_pair_distance = 0.0 |
| | max_pair_start = None |
| | max_pair_end = None |
| |
|
| | |
| | for i in range(len(half1) - 1): |
| | p_current = half1[i] |
| | p_next = half1[i + 1] |
| |
|
| | |
| | dist = calculate_distance(p_current, p_next) |
| |
|
| | |
| | if dist > max_pair_distance: |
| | max_pair_distance = dist |
| | max_pair_start = p_current |
| | max_pair_end = p_next |
| |
|
| | |
| | |
| | if max_pair_start is not None and max_pair_end is not None: |
| | |
| | |
| | points=[] |
| | points.append(max_pair_start) |
| | points.append(max_pair_end) |
| | annot12 = page2.add_polyline_annot(points) |
| | |
| | else: |
| | |
| | print("Not enough points in half1 to compute a line.") |
| | |
| |
|
| |
|
| |
|
| | annot12.set_border(width=0.8) |
| | annot12.set_colors(stroke=(int(NewColors[0])/255,int(NewColors[1])/255,int(NewColors[2])/255)) |
| | |
| | annot12.set_info(subject='Perimeter Measurement',content=str(polygon[2])+' m') |
| | annot12.set_opacity(0.8) |
| | annot12.update() |
| |
|
| |
|
| | i += 1 |
| | alpha = 0.8 |
| |
|
| | page2.set_rotation(rotationOld) |
| | Correct_img=flip(imgg) |
| |
|
| | image_new1 = cv2.addWeighted(Correct_img, alpha, img, 1 - alpha, 0) |
| | SimilarAreaDictionary = SimilarAreaDictionary.fillna(' ') |
| |
|
| | |
| | white_color = (255, 255, 255) |
| |
|
| | |
| | SimilarAreaDictionary = SimilarAreaDictionary[SimilarAreaDictionary['Color'] != white_color] |
| |
|
| | |
| | SimilarAreaDictionary.reset_index(drop=True, inplace=True) |
| |
|
| |
|
| | grouped_df = SimilarAreaDictionary.groupby('Color').agg({ |
| | 'Guess': 'first', |
| | 'Occurences': 'sum', |
| | 'Area':'first', |
| | 'Total Area': 'sum', |
| | 'Perimeter':'first', |
| | 'Total Perimeter': 'sum', |
| | 'Length':'first', |
| | 'Total Length': 'sum', |
| | 'Texts': 'first', |
| | 'Comments': 'first' |
| |
|
| | }).reset_index() |
| |
|
| | |
| | |
| | modified_pdf_data = doc.tobytes() |
| | OutputPdfStage2=adjustannotations(modified_pdf_data,text_with_positions) |
| | |
| | if (Thickness): |
| | threshold = math.ceil(float(Thickness) * float(dxfratio) ) |
| | print(threshold) |
| | OutputPdfStage3 = remove_duplicate_annotations(OutputPdfStage2,threshold) |
| | else: |
| | OutputPdfStage3 = remove_duplicate_annotations(OutputPdfStage2,threshold=10) |
| |
|
| | if pdf_content: |
| | latestimg,pix=pdftoimg(OutputPdfStage3,pdf_content) |
| | else: |
| | latestimg,pix=pdftoimg(OutputPdfStage3) |
| | doc2 =fitz.open('pdf',OutputPdfStage3) |
| | if pdf_content: |
| | gc,spreadsheet_service,spreadsheetId, spreadsheet_url , namepathArr=google_sheet_Legend.legendGoogleSheets(grouped_df , pdfname,pdfpath,pdf_content) |
| | else: |
| | gc,spreadsheet_service,spreadsheetId, spreadsheet_url , namepathArr=google_sheet_Legend.legendGoogleSheets(grouped_df , pdfname,pdfpath) |
| | list1=pd.DataFrame(columns=['content', 'id', 'subject','color']) |
| |
|
| | |
| | for page in doc2: |
| | |
| | for annot in page.annots(): |
| | |
| | annot_color = annot.colors |
| | if annot_color is not None: |
| | |
| | stroke_color = annot_color.get('stroke') |
| | fill_color = annot_color.get('fill') |
| | if fill_color: |
| | v='fill' |
| | |
| | if stroke_color: |
| | v='stroke' |
| | x,y,z=int(annot_color.get(v)[0]*255),int(annot_color.get(v)[1]*255),int(annot_color.get(v)[2]*255) |
| | list1.loc[len(list1)] =[annot.info['content'],annot.info['id'],annot.info['subject'],[x,y,z]] |
| | print('LISTTT',list1) |
| | return doc2,latestimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas |
| |
|
| | |