Marthee commited on
Commit
f76508c
·
verified ·
1 Parent(s): b0ef755

Update Code 2.1 For Tameem.py

Browse files
Files changed (1) hide show
  1. Code 2.1 For Tameem.py +1767 -0
Code 2.1 For Tameem.py CHANGED
@@ -0,0 +1,1767 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """## Imports"""
2
+
3
+ import numpy as np
4
+ import cv2
5
+ from matplotlib import pyplot as plt
6
+ import math
7
+ from PIL import Image , ImageDraw, ImageFont , ImageColor
8
+ import fitz
9
+ import ezdxf as ez
10
+ import sys
11
+ from ezdxf import units
12
+ # from google.colab.patches import cv2_imshow
13
+ from ezdxf.math import OCS, Matrix44, Vec3
14
+ import ezdxf
15
+ print(ezdxf.__version__)
16
+ import matplotlib.pyplot as plt
17
+ from matplotlib.patches import Polygon
18
+ from shapely.geometry import Point, Polygon as ShapelyPolygon
19
+ from ezdxf.math import Vec2
20
+ import random
21
+ import pandas as pd
22
+ import google_sheet_Legend
23
+ # import tsadropboxretrieval
24
+ from ezdxf import bbox
25
+ from math import sin, cos, radians
26
+ # from ezdxf.tools import rgb
27
+ from ezdxf.colors import aci2rgb
28
+ # from ezdxf.math import rgb_from_color
29
+ from collections import Counter
30
+
31
+ import xml.etree.ElementTree as ET
32
+ from PyPDF2 import PdfReader, PdfWriter
33
+ from PyPDF2.generic import TextStringObject, NameObject, ArrayObject, FloatObject
34
+ from PyPDF2.generic import NameObject, TextStringObject, DictionaryObject, FloatObject, ArrayObject, NumberObject
35
+
36
+ from typing import NewType
37
+ from ctypes import sizeof
38
+ from io import BytesIO
39
+
40
+
41
+
42
+ def normalize_vertices(vertices):
43
+ """Sort vertices to ensure consistent order."""
44
+ return tuple(sorted(tuple(v) for v in vertices))
45
+
46
+ def areas_are_similar(area1, area2, tolerance=0.2):
47
+ """Check if two areas are within a given tolerance."""
48
+ return abs(area1 - area2) <= tolerance
49
+
50
+
51
+ # -*- coding: utf-8 -*-wj
52
+ """Version to be deployed of 3.2 Calculating area/perimeter
53
+ Automatically generated by Colab.
54
+ Original file is located at
55
+ https://colab.research.google.com/drive/1XPeCoTBgWSNBYZ3aMKBteP4YG3w4bORs
56
+ """
57
+
58
+
59
+ """## Notes"""
60
+
61
+ #new approach to get width and height of dxf plan
62
+ '''
63
+ This portion is used to convert vertices read from dxf to pixels in order to accurately locate shapes in the image and pdf
64
+ ratio :
65
+ MeasuredMetric* PixelValue/ DxfMetric = MeasuredPixel
66
+ PixelValue: get from pixel conversion code , second number in the bracker represents the perimeter
67
+ DxfMetric: measured perimeter from foxit
68
+ divide pixelvalue by dxfmetric, will give u a ratio , this is ur dxfratio
69
+ '''
70
+
71
+
72
+ """PDF to image"""
73
+
74
+ def pdftoimg(datadoc):
75
+ doc =fitz.open('pdf',datadoc)
76
+ page=doc[0]
77
+ pix = page.get_pixmap() # render page to an image
78
+ pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples)
79
+ img=np.array(pl)
80
+ img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
81
+ print("IMAGE")
82
+ # cv2_imshow(img)
83
+ return img,pix
84
+
85
+
86
+ # Standard ISO paper sizes in inches
87
+ ISO_SIZES_INCHES = {
88
+ "A0": (33.11, 46.81),
89
+ "A1": (23.39, 33.11),
90
+ "A2": (16.54, 23.39),
91
+ "A3": (11.69, 16.54),
92
+ "A4": (8.27, 11.69),
93
+ "A5": (5.83, 8.27),
94
+ "A6": (4.13, 5.83),
95
+ "A7": (2.91, 4.13),
96
+ "A8": (2.05, 2.91),
97
+ "A9": (1.46, 2.05),
98
+ "A10": (1.02, 1.46)
99
+ }
100
+
101
+ def get_paper_size_in_inches(width, height):
102
+ """Find the closest matching paper size in inches."""
103
+ for size, (w, h) in ISO_SIZES_INCHES.items():
104
+ if (abs(w - width) < 0.1 and abs(h - height) < 0.1) or (abs(w - height) < 0.1 and abs(h - width) < 0.1):
105
+ return size
106
+ return "Unknown Size"
107
+
108
+ def analyze_pdf(datadoc):
109
+ # Open the PDF file
110
+ pdf_document = fitz.open('pdf',datadoc)
111
+
112
+ # Iterate through pages and print their sizes
113
+ for page_number in range(len(pdf_document)):
114
+ page = pdf_document[page_number]
115
+ rect = page.rect
116
+ width_points, height_points = rect.width, rect.height
117
+
118
+ # Convert points to inches
119
+ width_inches, height_inches = width_points / 72, height_points / 72
120
+
121
+ paper_size = get_paper_size_in_inches(width_inches, height_inches)
122
+
123
+ print(f"Page {page_number + 1}: {width_inches:.2f} x {height_inches:.2f} inches ({paper_size})")
124
+
125
+ pdf_document.close()
126
+ return width_inches , height_inches , paper_size
127
+
128
+
129
+ def get_dxfSize(dxfpath):
130
+
131
+ doc = ezdxf.readfile(dxfpath)
132
+ msp = doc.modelspace()
133
+ # Create a cache for bounding box calculations
134
+ # Get the overall bounding box for all entities in the modelspace
135
+ cache = bbox.Cache()
136
+ overall_bbox = bbox.extents(msp, cache=cache)
137
+ print("Overall Bounding Box:", overall_bbox)
138
+ print(overall_bbox.extmin[0]+overall_bbox.extmax[0], overall_bbox.extmin[1]+overall_bbox.extmax[1])
139
+
140
+ return overall_bbox.extmin[0]+overall_bbox.extmax[0], overall_bbox.extmin[1]+overall_bbox.extmax[1]
141
+
142
+
143
+
144
+ def switch_case(argument):
145
+ switcher = {
146
+ "A0": 1.27,
147
+ "A1": 2.54,
148
+ "A2": 5.08,
149
+ "A3": 10.16,
150
+ "A4": 20.32,
151
+ "A5": 40.64,
152
+ "A6": 81.28,
153
+ "A7": 162.56,
154
+ "A8": 325.12,
155
+ "A9": 650.24,
156
+ "A10": 1300.48
157
+ }
158
+ # Get the value from the dictionary; if not found, return a default value
159
+ print("Final Ratio=",switcher.get(argument, 1))
160
+ return switcher.get(argument, 1)
161
+
162
+
163
+
164
+
165
+ def RetriveRatio(datadoc,dxfpath):
166
+
167
+ width,height,paper_size = analyze_pdf (datadoc)
168
+
169
+ if(width > height ):
170
+ bigger=width
171
+ else:
172
+ bigger=height
173
+
174
+ width_dxf,height_dxf = get_dxfSize(dxfpath)
175
+
176
+ if(width_dxf > height_dxf ):
177
+ bigger_dxf=width_dxf
178
+ else:
179
+ bigger_dxf=height_dxf
180
+
181
+ if(0.2 < bigger_dxf/bigger < 1.2):
182
+ print("bigger_dxf/bigger",bigger/bigger_dxf)
183
+ argument = paper_size
184
+ FinalRatio=switch_case(argument)
185
+ else:
186
+ FinalRatio=1
187
+ return FinalRatio
188
+
189
+
190
+ """Flips image
191
+ DXF origin is at the bottom left while img origin is top left
192
+ """
193
+
194
+ def flip(img):
195
+ height, width = img.shape[:2]
196
+
197
+ # Define the rotation angle (clockwise)
198
+ angle = 180
199
+
200
+ # Calculate the rotation matrix
201
+ rotation_matrix = cv2.getRotationMatrix2D((width/2, height/2), angle, 1)
202
+
203
+ # Rotate the image
204
+ rotated_image = cv2.warpAffine(img, rotation_matrix, (width, height))
205
+ flipped_horizontal = cv2.flip(rotated_image, 1)
206
+ return flipped_horizontal
207
+
208
+
209
+
210
+ def aci_to_rgb(aci):
211
+ aci_rgb_map = {
212
+ 0: (0, 0, 0),
213
+ 1: (255, 0, 0),
214
+ 2: (255, 255, 0),
215
+ 3: (0, 255, 0),
216
+ 4: (0, 255, 255),
217
+ 5: (0, 0, 255),
218
+ 6: (255, 0, 255),
219
+ 7: (255, 255, 255),
220
+ 8: (65, 65, 65),
221
+ 9: (128, 128, 128),
222
+ 10: (255, 0, 0),
223
+ 11: (255, 170, 170),
224
+ 12: (189, 0, 0),
225
+ 13: (189, 126, 126),
226
+ 14: (129, 0, 0),
227
+ 15: (129, 86, 86),
228
+ 16: (104, 0, 0),
229
+ 17: (104, 69, 69),
230
+ 18: (79, 0, 0),
231
+ 19: (79, 53, 53),
232
+ 20: (255, 63, 0),
233
+ 21: (255, 191, 170),
234
+ 22: (189, 46, 0),
235
+ 23: (189, 141, 126),
236
+ 24: (129, 31, 0),
237
+ 25: (129, 96, 86),
238
+ 26: (104, 25, 0),
239
+ 27: (104, 78, 69),
240
+ 28: (79, 19, 0),
241
+ 29: (79, 59, 53),
242
+ 30: (255, 127, 0),
243
+ 31: (255, 212, 170),
244
+ 32: (189, 94, 0),
245
+ 33: (189, 157, 126),
246
+ 34: (129, 64, 0),
247
+ 35: (129, 107, 86),
248
+ 36: (104, 52, 0),
249
+ 37: (104, 86, 69),
250
+ 38: (79, 39, 0),
251
+ 39: (79, 66, 53),
252
+ 40: (255, 191, 0),
253
+ 41: (255, 234, 170),
254
+ 42: (189, 141, 0),
255
+ 43: (189, 173, 126),
256
+ 44: (129, 96, 0),
257
+ 45: (129, 118, 86),
258
+ 46: (104, 78, 0),
259
+ 47: (104, 95, 69),
260
+ 48: (79, 59, 0),
261
+ 49: (79, 73, 53),
262
+ 50: (255, 255, 0),
263
+ 51: (255, 255, 170),
264
+ 52: (189, 189, 0),
265
+ 53: (189, 189, 126),
266
+ 54: (129, 129, 0),
267
+ 55: (129, 129, 86),
268
+ 56: (104, 104, 0),
269
+ 57: (104, 104, 69),
270
+ 58: (79, 79, 0),
271
+ 59: (79, 79, 53),
272
+ 60: (191, 255, 0),
273
+ 61: (234, 255, 170),
274
+ 62: (141, 189, 0),
275
+ 63: (173, 189, 126),
276
+ 64: (96, 129, 0),
277
+ 65: (118, 129, 86),
278
+ 66: (78, 104, 0),
279
+ 67: (95, 104, 69),
280
+ 68: (59, 79, 0),
281
+ 69: (73, 79, 53),
282
+ 70: (127, 255, 0),
283
+ 71: (212, 255, 170),
284
+ 72: (94, 189, 0),
285
+ 73: (157, 189, 126),
286
+ 74: (64, 129, 0),
287
+ 75: (107, 129, 86),
288
+ 76: (52, 104, 0),
289
+ 77: (86, 104, 69),
290
+ 78: (39, 79, 0),
291
+ 79: (66, 79, 53),
292
+ 80: (63, 255, 0),
293
+ 81: (191, 255, 170),
294
+ 82: (46, 189, 0),
295
+ 83: (141, 189, 126),
296
+ 84: (31, 129, 0),
297
+ 85: (96, 129, 86),
298
+ 86: (25, 104, 0),
299
+ 87: (78, 104, 69),
300
+ 88: (19, 79, 0),
301
+ 89: (59, 79, 53),
302
+ 90: (0, 255, 0),
303
+ 91: (170, 255, 170),
304
+ 92: (0, 189, 0),
305
+ 93: (126, 189, 126),
306
+ 94: (0, 129, 0),
307
+ 95: (86, 129, 86),
308
+ 96: (0, 104, 0),
309
+ 97: (69, 104, 69),
310
+ 98: (0, 79, 0),
311
+ 99: (53, 79, 53),
312
+ 100: (0, 255, 63),
313
+ 101: (170, 255, 191),
314
+ 102: (0, 189, 46),
315
+ 103: (126, 189, 141),
316
+ 104: (0, 129, 31),
317
+ 105: (86, 129, 96),
318
+ 106: (0, 104, 25),
319
+ 107: (69, 104, 78),
320
+ 108: (0, 79, 19),
321
+ 109: (53, 79, 59),
322
+ 110: (0, 255, 127),
323
+ 111: (170, 255, 212),
324
+ 112: (0, 189, 94),
325
+ 113: (126, 189, 157),
326
+ 114: (0, 129, 64),
327
+ 115: (86, 129, 107),
328
+ 116: (0, 104, 52),
329
+ 117: (69, 104, 86),
330
+ 118: (0, 79, 39),
331
+ 119: (53, 79, 66),
332
+ 120: (0, 255, 191),
333
+ 121: (170, 255, 234),
334
+ 122: (0, 189, 141),
335
+ 123: (126, 189, 173),
336
+ 124: (0, 129, 96),
337
+ 125: (86, 129, 118),
338
+ 126: (0, 104, 78),
339
+ 127: (69, 104, 95),
340
+ 128: (0, 79, 59),
341
+ 129: (53, 79, 73),
342
+ 130: (0, 255, 255),
343
+ 131: (170, 255, 255),
344
+ 132: (0, 189, 189),
345
+ 133: (126, 189, 189),
346
+ 134: (0, 129, 129),
347
+ 135: (86, 129, 129),
348
+ 136: (0, 104, 104),
349
+ 137: (69, 104, 104),
350
+ 138: (0, 79, 79),
351
+ 139: (53, 79, 79),
352
+ 140: (0, 191, 255),
353
+ 141: (170, 234, 255),
354
+ 142: (0, 141, 189),
355
+ 143: (126, 173, 189),
356
+ 144: (0, 96, 129),
357
+ 145: (86, 118, 129),
358
+ 146: (0, 78, 104),
359
+ 147: (69, 95, 104),
360
+ 148: (0, 59, 79),
361
+ 149: (53, 73, 79),
362
+ 150: (0, 127, 255),
363
+ 151: (170, 212, 255),
364
+ 152: (0, 94, 189),
365
+ 153: (126, 157, 189),
366
+ 154: (0, 64, 129),
367
+ 155: (86, 107, 129),
368
+ 156: (0, 52, 104),
369
+ 157: (69, 86, 104),
370
+ 158: (0, 39, 79),
371
+ 159: (53, 66, 79),
372
+ 160: (0, 63, 255),
373
+ 161: (170, 191, 255),
374
+ 162: (0, 46, 189),
375
+ 163: (126, 141, 189),
376
+ 164: (0, 31, 129),
377
+ 165: (86, 96, 129),
378
+ 166: (0, 25, 104),
379
+ 167: (69, 78, 104),
380
+ 168: (0, 19, 79),
381
+ 169: (53, 59, 79),
382
+ 170: (0, 0, 255),
383
+ 171: (170, 170, 255),
384
+ 172: (0, 0, 189),
385
+ 173: (126, 126, 189),
386
+ 174: (0, 0, 129),
387
+ 175: (86, 86, 129),
388
+ 176: (0, 0, 104),
389
+ 177: (69, 69, 104),
390
+ 178: (0, 0, 79),
391
+ 179: (53, 53, 79),
392
+ 180: (63, 0, 255),
393
+ 181: (191, 170, 255),
394
+ 182: (46, 0, 189),
395
+ 183: (141, 126, 189),
396
+ 184: (31, 0, 129),
397
+ 185: (96, 86, 129),
398
+ 186: (25, 0, 104),
399
+ 187: (78, 69, 104),
400
+ 188: (19, 0, 79),
401
+ 189: (59, 53, 79),
402
+ 190: (127, 0, 255),
403
+ 191: (212, 170, 255),
404
+ 192: (94, 0, 189),
405
+ 193: (157, 126, 189),
406
+ 194: (64, 0, 129),
407
+ 195: (107, 86, 129),
408
+ 196: (52, 0, 104),
409
+ 197: (86, 69, 104),
410
+ 198: (39, 0, 79),
411
+ 199: (66, 53, 79),
412
+ 200: (191, 0, 255),
413
+ 201: (234, 170, 255),
414
+ 202: (141, 0, 189),
415
+ 203: (173, 126, 189),
416
+ 204: (96, 0, 129),
417
+ 205: (118, 86, 129),
418
+ 206: (78, 0, 104),
419
+ 207: (95, 69, 104),
420
+ 208: (59, 0, 79),
421
+ 209: (73, 53, 79),
422
+ 210: (255, 0, 255),
423
+ 211: (255, 170, 255),
424
+ 212: (189, 0, 189),
425
+ 213: (189, 126, 189),
426
+ 214: (129, 0, 129),
427
+ 215: (129, 86, 129),
428
+ 216: (104, 0, 104),
429
+ 217: (104, 69, 104),
430
+ 218: (79, 0, 79),
431
+ 219: (79, 53, 79),
432
+ 220: (255, 0, 191),
433
+ 221: (255, 170, 234),
434
+ 222: (189, 0, 141),
435
+ 223: (189, 126, 173),
436
+ 224: (129, 0, 96),
437
+ 225: (129, 86, 118),
438
+ 226: (104, 0, 78),
439
+ 227: (104, 69, 95),
440
+ 228: (79, 0, 59),
441
+ 229: (79, 53, 73),
442
+ 230: (255, 0, 127),
443
+ 231: (255, 170, 212),
444
+ 232: (189, 0, 94),
445
+ 233: (189, 126, 157),
446
+ 234: (129, 0, 64),
447
+ 235: (129, 86, 107),
448
+ 236: (104, 0, 52),
449
+ 237: (104, 69, 86),
450
+ 238: (79, 0, 39),
451
+ 239: (79, 53, 66),
452
+ 240: (255, 0, 63),
453
+ 241: (255, 170, 191),
454
+ 242: (189, 0, 46),
455
+ 243: (189, 126, 141),
456
+ 244: (129, 0, 31),
457
+ 245: (129, 86, 96),
458
+ 246: (104, 0, 25),
459
+ 247: (104, 69, 78),
460
+ 248: (79, 0, 19),
461
+ 249: (79, 53, 59),
462
+ 250: (51, 51, 51),
463
+ 251: (80, 80, 80),
464
+ 252: (105, 105, 105),
465
+ 253: (130, 130, 130),
466
+ 254: (190, 190, 190),
467
+ 255: (255, 255, 255)
468
+ }
469
+
470
+ # Default to white if index is invalid or not found
471
+ return aci_rgb_map.get(aci, (255, 255, 255))
472
+
473
+
474
+ def int_to_rgb(color_int):
475
+ """Convert an integer to an (R, G, B) tuple."""
476
+ r = (color_int >> 16) & 255
477
+ g = (color_int >> 8) & 255
478
+ b = color_int & 255
479
+ return (r, g, b)
480
+
481
+
482
+ def get_hatch_color(entity):
483
+ """Extract hatch color with detailed debugging."""
484
+ if not entity:
485
+ # print("No entity provided for color extraction.")
486
+ return (255, 255, 255)
487
+
488
+ # Check for true color
489
+ if entity.dxf.hasattr('true_color'):
490
+ true_color = entity.dxf.true_color
491
+ rgb_color = int_to_rgb(true_color) # Convert integer to (R, G, B)
492
+ # print(f"True color detected (RGB): {rgb_color}")
493
+ return rgb_color
494
+
495
+ # Check for color index
496
+ color_index = entity.dxf.color
497
+ # print(f"Entity color index: {color_index}")
498
+ if 1 <= color_index <= 255:
499
+ rgb_color = aci_to_rgb(color_index) # Convert ACI to RGB
500
+ # print(f"Converted ACI to RGB: {rgb_color}")
501
+ return rgb_color
502
+
503
+ # Handle ByLayer or ByBlock
504
+ if color_index == 0: # ByLayer
505
+ layer_name = entity.dxf.layer
506
+ layer = entity.doc.layers.get(layer_name)
507
+ # print(f"ByLayer detected for layer '{layer_name}'.")
508
+ if layer:
509
+ layer_color_index = layer.dxf.color
510
+ # print(layer_color_index)
511
+ rgb_color = aci_to_rgb(layer_color_index)
512
+ # print(f"Layer '{layer_name}' color index {layer_color_index} converted to RGB: {rgb_color}")
513
+ return rgb_color
514
+ else:
515
+ # print(f"Layer '{layer_name}' not found. Defaulting to white.")
516
+ return (255, 255, 255)
517
+
518
+ # Default
519
+ # print("Unhandled color case. Defaulting to white.")
520
+ return (255, 255, 255)
521
+
522
+
523
+
524
+ def point_in_rectangle(point, rect_coords):
525
+ x, y = point
526
+ (x1, y1), (x2, y2) = rect_coords
527
+ return x1 <= x <= x2 and y1 <= y <= y2
528
+
529
+ from math import sqrt
530
+
531
+ def euclidean_distance(point1, point2):
532
+ x1, y1 = point1
533
+ x2, y2 = point2
534
+ return sqrt((x2 - x1)**2 + (y2 - y1)**2)
535
+
536
+ def compute_hatch_centroid(hatch):
537
+ x_coords = []
538
+ y_coords = []
539
+ for path in hatch.paths:
540
+ if path.PATH_TYPE == "PolylinePath":
541
+ for vertex in path.vertices:
542
+ x_coords.append(vertex[0])
543
+ y_coords.append(vertex[1])
544
+ elif path.PATH_TYPE == "EdgePath":
545
+ for edge in path.edges:
546
+ if hasattr(edge, "start"):
547
+ x_coords.append(edge.start[0])
548
+ y_coords.append(edge.start[1])
549
+ if hasattr(edge, "end"):
550
+ x_coords.append(edge.end[0])
551
+ y_coords.append(edge.end[1])
552
+ if x_coords and y_coords:
553
+ return (sum(x_coords) / len(x_coords), sum(y_coords) / len(y_coords))
554
+ return None
555
+
556
+ """### Hatched areas"""
557
+ def get_hatched_areas(datadoc,filename,FinalRatio,rotationangle,SearchArray):
558
+
559
+ print("SearchArray = ",SearchArray)
560
+
561
+ doc = ezdxf.readfile(filename)
562
+ doc.header['$MEASUREMENT'] = 1
563
+ msp = doc.modelspace()
564
+ trial=0
565
+ hatched_areas = []
566
+ threshold=0.001
567
+ TextFound = 0
568
+ j=0
569
+ unique_shapes = []
570
+
571
+
572
+ text_with_positions = []
573
+ text_color_mapping = {}
574
+ color_palette = [
575
+ (255, 0, 0), (0, 0, 255), (0, 255, 255), (0, 64, 0), (255, 204, 0),
576
+ (255, 128, 64), (255, 0, 128), (255, 128, 192), (128, 128, 255),
577
+ (128, 64, 0), (0, 255, 0), (0, 200, 0), (255, 128, 255), (128, 0, 255),
578
+ (0, 128, 192), (128, 0, 128), (128, 0, 0), (0, 128, 255), (149, 1, 70),
579
+ (255, 182, 128), (222, 48, 71), (240, 0, 112), (255, 0, 255),
580
+ (192, 46, 65), (0, 0, 128), (0, 128, 64), (255, 255, 0), (128, 0, 80),
581
+ (255, 255, 128), (90, 255, 140), (255, 200, 20), (91, 16, 51),
582
+ (90, 105, 138), (114, 10, 138), (36, 82, 78), (225, 105, 190),
583
+ (108, 150, 170), (11, 35, 75), (42, 176, 170), (255, 176, 170),
584
+ (209, 151, 15), (81, 27, 85), (226, 106, 122), (67, 119, 149),
585
+ (159, 179, 140), (159, 179, 30), (255, 85, 198), (255, 27, 85),
586
+ (188, 158, 8), (140, 188, 120), (59, 61, 52), (65, 81, 21),
587
+ (212, 255, 174), (15, 164, 90), (41, 217, 245), (213, 23, 182),
588
+ (11, 85, 169), (78, 153, 239), (0, 66, 141), (64, 98, 232),
589
+ (140, 112, 255), (57, 33, 154), (194, 117, 252), (116, 92, 135),
590
+ (74, 43, 98), (188, 13, 123), (129, 58, 91), (255, 128, 100),
591
+ (171, 122, 145), (255, 98, 98), (222, 48, 77)
592
+ ]
593
+ import re
594
+
595
+ text_with_positions = []
596
+ # SearchArray=[["","Wall Type","",""],["","","",""]]
597
+
598
+ # print("SearchArray=",len(SearchArray))
599
+ # print("SearchArray=",len(SearchArray[0]))
600
+ # print("SearchArray=",SearchArray[0][0])
601
+
602
+ if(SearchArray):
603
+ for i in range(len(SearchArray)):
604
+
605
+ if (SearchArray[i][0] and SearchArray[i][1] and SearchArray[i][2]):
606
+ for text_entity in doc.modelspace().query('TEXT MTEXT'):
607
+ text = text_entity.text.strip() if hasattr(text_entity, 'text') else ""
608
+ # if (text.startswith("P") and len(text) == 3) or (text.startswith("I") and len(text) == 3): # Filter for "Wall"
609
+ if(text.startswith(SearchArray[i][0]) and len(text)==int(SearchArray[i][2])):
610
+ position = text_entity.dxf.insert # Extract text position
611
+ x, y = position.x, position.y
612
+
613
+ for text_entity in doc.modelspace().query('TEXT MTEXT'):
614
+ NBS = text_entity.text.strip() if hasattr(text_entity, 'text') else ""
615
+ if (NBS.startswith(SearchArray[i][1])):
616
+ positionNBS = text_entity.dxf.insert # Extract text position
617
+ xNBS, yNBS = positionNBS.x, positionNBS.y
618
+
619
+ if(x == xNBS or y == yNBS):
620
+ textNBS=NBS
621
+ break
622
+
623
+ else:
624
+ textNBS = None
625
+
626
+
627
+
628
+ nearest_hatch = None
629
+ min_distance = float('inf') # Initialize with a very large value
630
+ detected_color = (255, 255, 255) # Default to white
631
+
632
+ # Search for the nearest hatch
633
+ for hatch in doc.modelspace().query('HATCH'): # Query only hatches
634
+ if hatch.paths:
635
+ for path in hatch.paths:
636
+ if path.type == 1: # PolylinePath
637
+ vertices = [v[:2] for v in path.vertices]
638
+ # Calculate the centroid of the hatch
639
+ centroid_x = sum(v[0] for v in vertices) / len(vertices)
640
+ centroid_y = sum(v[1] for v in vertices) / len(vertices)
641
+ centroid = (centroid_x, centroid_y)
642
+
643
+ # Calculate the distance between the text and the hatch centroid
644
+ distance = calculate_distance((x, y), centroid)
645
+
646
+ # Update the nearest hatch if a closer one is found
647
+ if distance < min_distance:
648
+ min_distance = distance
649
+ nearest_hatch = hatch
650
+
651
+ # Get the color of this hatch
652
+ current_color = get_hatch_color(hatch)
653
+ if current_color != (255, 255, 255): # Valid color found
654
+ detected_color = current_color
655
+ break # Stop checking further paths for this hatch
656
+
657
+
658
+ # Append the detected result only once
659
+ text_with_positions.append([text, textNBS, (x, y), detected_color])
660
+ print("text_with_positions=",text_with_positions)
661
+
662
+ elif (SearchArray[i][0] and SearchArray[i][2]):
663
+ for text_entity in doc.modelspace().query('TEXT MTEXT'):
664
+ text = text_entity.text.strip() if hasattr(text_entity, 'text') else ""
665
+ # if (text.startswith("P") and len(text) == 3) or (text.startswith("I") and len(text) == 3): # Filter for "Wall"
666
+ if(text.startswith(SearchArray[i][0]) and len(text)==int(SearchArray[i][2])):
667
+ position = text_entity.dxf.insert # Extract text position
668
+ x, y = position.x, position.y
669
+ textNBS = None
670
+ nearest_hatch = None
671
+ min_distance = float('inf') # Initialize with a very large value
672
+ detected_color = (255, 255, 255) # Default to white
673
+
674
+ # Search for the nearest hatch
675
+ for hatch in doc.modelspace().query('HATCH'): # Query only hatches
676
+ if hatch.paths:
677
+ for path in hatch.paths:
678
+ if path.type == 1: # PolylinePath
679
+ vertices = [v[:2] for v in path.vertices]
680
+ # Calculate the centroid of the hatch
681
+ centroid_x = sum(v[0] for v in vertices) / len(vertices)
682
+ centroid_y = sum(v[1] for v in vertices) / len(vertices)
683
+ centroid = (centroid_x, centroid_y)
684
+
685
+ # Calculate the distance between the text and the hatch centroid
686
+ distance = calculate_distance((x, y), centroid)
687
+
688
+ # Update the nearest hatch if a closer one is found
689
+ if distance < min_distance:
690
+ min_distance = distance
691
+ nearest_hatch = hatch
692
+
693
+ # Get the color of this hatch
694
+ current_color = get_hatch_color(hatch)
695
+ if current_color != (255, 255, 255): # Valid color found
696
+ detected_color = current_color
697
+ break # Stop checking further paths for this hatch
698
+
699
+
700
+ # Append the detected result only once
701
+ text_with_positions.append([text, textNBS, (x, y), detected_color])
702
+ print("text_with_positions=",text_with_positions)
703
+
704
+ elif(SearchArray[i][0]):
705
+ for text_entity in doc.modelspace().query('TEXT MTEXT'):
706
+ text = text_entity.text.strip() if hasattr(text_entity, 'text') else ""
707
+ # if (text.startswith("P") and len(text) == 3) or (text.startswith("I") and len(text) == 3): # Filter for "Wall"
708
+ if(text.startswith(SearchArray[i][0])):
709
+ position = text_entity.dxf.insert # Extract text position
710
+ x, y = position.x, position.y
711
+ textNBS = None
712
+ nearest_hatch = None
713
+ min_distance = float('inf') # Initialize with a very large value
714
+ detected_color = (255, 255, 255) # Default to white
715
+
716
+ # Search for the nearest hatch
717
+ for hatch in doc.modelspace().query('HATCH'): # Query only hatches
718
+ if hatch.paths:
719
+ for path in hatch.paths:
720
+ if path.type == 1: # PolylinePath
721
+ vertices = [v[:2] for v in path.vertices]
722
+ # Calculate the centroid of the hatch
723
+ centroid_x = sum(v[0] for v in vertices) / len(vertices)
724
+ centroid_y = sum(v[1] for v in vertices) / len(vertices)
725
+ centroid = (centroid_x, centroid_y)
726
+
727
+ # Calculate the distance between the text and the hatch centroid
728
+ distance = calculate_distance((x, y), centroid)
729
+
730
+ # Update the nearest hatch if a closer one is found
731
+ if distance < min_distance:
732
+ min_distance = distance
733
+ nearest_hatch = hatch
734
+
735
+ # Get the color of this hatch
736
+ current_color = get_hatch_color(hatch)
737
+ if current_color != (255, 255, 255): # Valid color found
738
+ detected_color = current_color
739
+ break # Stop checking further paths for this hatch
740
+
741
+
742
+ # Append the detected result only once
743
+ text_with_positions.append([text, textNBS, (x, y), detected_color])
744
+ print("text_with_positions=",text_with_positions)
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+ grouped = {}
754
+ for entry in text_with_positions:
755
+ key = entry[0]
756
+ grouped.setdefault(key, []).append(entry)
757
+
758
+ # Filter the groups: if any entry in a group has a non-None Text Nbs, keep only one of those
759
+ filtered_results = []
760
+ for key, entries in grouped.items():
761
+ # Find the first entry with a valid textNBS (non-None)
762
+ complete = next((entry for entry in entries if entry[1] is not None), None)
763
+ if complete:
764
+ filtered_results.append(complete)
765
+ else:
766
+ # If none are complete, you can choose to keep just one entry
767
+ filtered_results.append(entries[0])
768
+
769
+ text_with_positions=filtered_results
770
+
771
+ for entity in msp:
772
+ if entity.dxftype() == 'HATCH':
773
+
774
+ cntPoints=[]
775
+ for path in entity.paths:
776
+
777
+ # path_type = path.type
778
+
779
+ # # Resolve the path type to its name
780
+ # path_type_name = BoundaryPathType(path_type).name
781
+ # print(f"Encountered path type: {path_type_name}")
782
+
783
+ vertices = [] # Reset vertices for each path
784
+
785
+ # print(str(path.type))
786
+
787
+ if str(path.type) == 'BoundaryPathType.POLYLINE' or path.type == 1:
788
+ # if path.type == 2: # Polyline path
789
+ # Handle POLYLINE type HATCH
790
+ vertices = [(vertex[0] * FinalRatio, vertex[1] * FinalRatio) for vertex in path.vertices]
791
+ # print("Hatch Vertices = ",vertices)
792
+
793
+ if len(vertices) > 3:
794
+ poly = ShapelyPolygon(vertices)
795
+ minx, miny, maxx, maxy = poly.bounds
796
+ width = maxx - minx
797
+ height = maxy - miny
798
+
799
+
800
+
801
+
802
+ if (poly.area > 0 and (height > 0.2 or width > 0.2)):
803
+
804
+ length = height
805
+ if(width > length):
806
+ length = width
807
+
808
+ area1 = round(poly.area, 3)
809
+ perimeter = round(poly.length, 3)
810
+ # print("Vertices = ",vertices)
811
+ normalized_vertices = normalize_vertices(vertices)
812
+
813
+ rgb_color = get_hatch_color(entity)
814
+ # print("rgb_color = ",rgb_color)
815
+
816
+ # if(rgb_color == (255, 255, 255)):
817
+ # if(len(text_with_positions)>0):
818
+
819
+ # for text, position, color in text_with_positions:
820
+ # text_position = Point(position[0], position[1])
821
+
822
+ # if poly.contains(text_position):
823
+ # rgb_color = color
824
+ # break
825
+
826
+ duplicate_found = False
827
+ for existing_vertices, existing_area in unique_shapes:
828
+ if normalized_vertices == existing_vertices and areas_are_similar(area1, existing_area):
829
+ duplicate_found = True
830
+ break
831
+
832
+ if not duplicate_found:
833
+ # rgb_color = get_hatch_color(entity) # Assuming this function exists
834
+ unique_shapes.append((normalized_vertices, area1))
835
+
836
+ if length > 0.6:
837
+ hatched_areas.append([vertices, area1, length, rgb_color])
838
+
839
+ elif str(path.type) == 'BoundaryPathType.EDGE' or path.type == 2:
840
+ # elif path.type == 2: # Edge path
841
+ # Handle EDGE type HATCH
842
+ vert = []
843
+ for edge in path.edges:
844
+ x, y = edge.start
845
+ x1, y1 = edge.end
846
+ vert.append((x * FinalRatio, y * FinalRatio))
847
+ vert.append((x1 * FinalRatio, y1 * FinalRatio))
848
+
849
+ poly = ShapelyPolygon(vert)
850
+ minx, miny, maxx, maxy = poly.bounds
851
+ width = maxx - minx
852
+ height = maxy - miny
853
+
854
+ if (poly.area > 0 and (height > 0.2 or width > 0.2)):
855
+
856
+ length = height
857
+ if(width > length):
858
+ length = width
859
+
860
+ area1 = round(poly.area, 3)
861
+ perimeter = round(poly.length, 3)
862
+ normalized_vertices = normalize_vertices(vert)
863
+ rgb_color = get_hatch_color(entity)
864
+ # print("rgb_color = ",rgb_color)
865
+
866
+ # if(rgb_color == (255, 255, 255)):
867
+ # if(len(text_with_positions)>0):
868
+ # for text, position, color in text_with_positions:
869
+ # text_position = Point(position[0], position[1])
870
+
871
+ # if poly.contains(text_position):
872
+ # rgb_color = color
873
+ # break
874
+
875
+
876
+ duplicate_found = False
877
+ for existing_vertices, existing_area in unique_shapes:
878
+ if normalized_vertices == existing_vertices and areas_are_similar(area1, existing_area):
879
+ duplicate_found = True
880
+ break
881
+
882
+ if not duplicate_found:
883
+ # rgb_color = get_hatch_color(entity) # Assuming this function exists
884
+ unique_shapes.append((normalized_vertices, area1))
885
+
886
+ if length > 0.6:
887
+ hatched_areas.append([vert, area1, length, rgb_color])
888
+
889
+ else:
890
+ print(f"Encountered path type: {path.type}")
891
+
892
+ elif entity.dxftype() == 'SOLID':
893
+
894
+
895
+
896
+ vertices = [entity.dxf.vtx0 * (FinalRatio), entity.dxf.vtx1* (FinalRatio), entity.dxf.vtx2* (FinalRatio), entity.dxf.vtx3* (FinalRatio)]
897
+ poly = ShapelyPolygon(vertices)
898
+ minx, miny, maxx, maxy = poly.bounds
899
+
900
+ # Calculate the width and height of the bounding box
901
+ width = maxx - minx
902
+ height = maxy - miny
903
+
904
+ if (poly.area > 0 and (height > 0 and width > 0)):
905
+ area1 = round(poly.area, 3)
906
+ perimeter = round(poly.length, 3)
907
+ normalized_vertices = normalize_vertices(vertices)
908
+
909
+ duplicate_found = False
910
+ for existing_vertices, existing_area in unique_shapes:
911
+ if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area):
912
+ duplicate_found = True
913
+ break
914
+
915
+ if not duplicate_found:
916
+ rgb_color = get_hatch_color(entity) # Assuming this function exists
917
+ unique_shapes.append((normalized_vertices, area1))
918
+ hatched_areas.append([vertices, area1, perimeter, rgb_color])
919
+
920
+
921
+
922
+ elif entity.dxftype() == 'LWPOLYLINE':
923
+
924
+ vertices = []
925
+ lwpolyline = entity
926
+ points = lwpolyline.get_points()
927
+ flag = 0
928
+
929
+ # Collect vertices and apply the FinalRatio
930
+ for i in range(len(points)):
931
+ vertices.append([points[i][0] * FinalRatio, points[i][1] * FinalRatio])
932
+
933
+ # # Ensure there are more than 3 vertices
934
+ if len(vertices) > 3:
935
+ # Check if the polyline is closed
936
+ if vertices[0][0] == vertices[-1][0] or vertices[0][1] == vertices[-1][1]:
937
+ poly = ShapelyPolygon(vertices)
938
+ minx, miny, maxx, maxy = poly.bounds
939
+
940
+ # Calculate width and height of the bounding box
941
+ width = maxx - minx
942
+ height = maxy - miny
943
+
944
+ # Check area and size constraints
945
+ if (poly.area > 0 and (height > 0 and width > 0)):
946
+ area1 = round(poly.area, 3)
947
+ perimeter = round(poly.length, 3)
948
+ normalized_vertices = normalize_vertices(vertices)
949
+
950
+ duplicate_found = False
951
+ for existing_vertices, existing_area in unique_shapes:
952
+ if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area):
953
+ duplicate_found = True
954
+ break
955
+
956
+ if not duplicate_found:
957
+ rgb_color = get_hatch_color(entity) # Assuming this function exists
958
+ unique_shapes.append((normalized_vertices, area1))
959
+ hatched_areas.append([vertices, area1, perimeter, rgb_color])
960
+
961
+
962
+
963
+ elif entity.dxftype() == 'POLYLINE':
964
+
965
+ flag=0
966
+ vertices = [(v.dxf.location.x * (FinalRatio), v.dxf.location.y * (FinalRatio)) for v in entity.vertices]
967
+ # print('Vertices:', vertices)
968
+
969
+ if(len(vertices)>3):
970
+
971
+ if(vertices[0][0] == vertices[len(vertices)-1][0] or vertices[0][1] == vertices[len(vertices)-1][1]):
972
+
973
+ poly=ShapelyPolygon(vertices)
974
+ minx, miny, maxx, maxy = poly.bounds
975
+
976
+ # Calculate the width and height of the bounding box
977
+ width = maxx - minx
978
+ height = maxy - miny
979
+
980
+ if (poly.area > 0 and (height > 0 and width > 0)):
981
+ area1 = round(poly.area,3)
982
+ perimeter = round (poly.length,3)
983
+ normalized_vertices = normalize_vertices(vertices)
984
+
985
+ duplicate_found = False
986
+ for existing_vertices, existing_area in unique_shapes:
987
+ if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area):
988
+ duplicate_found = True
989
+ break
990
+
991
+ if not duplicate_found:
992
+ rgb_color = get_hatch_color(entity) # Assuming this function exists
993
+ unique_shapes.append((normalized_vertices, area1))
994
+ hatched_areas.append([vertices, area1, perimeter, rgb_color])
995
+
996
+
997
+ elif entity.dxftype() == 'SPLINE':
998
+
999
+ spline_entity = entity
1000
+ vertices = []
1001
+ control_points = spline_entity.control_points
1002
+ if(len(control_points)>3):
1003
+ for i in range(len(control_points)):
1004
+ vertices.append([control_points[i][0]* (FinalRatio),control_points[i][1]* (FinalRatio)])
1005
+ poly=ShapelyPolygon(vertices)
1006
+
1007
+ minx, miny, maxx, maxy = poly.bounds
1008
+
1009
+ # Calculate the width and height of the bounding box
1010
+ width = maxx - minx
1011
+ height = maxy - miny
1012
+
1013
+
1014
+ if (poly.area > 0 and (height > 0 and width > 0)):
1015
+ area1 = round(poly.area,3)
1016
+ perimeter = round (poly.length,3)
1017
+ normalized_vertices = normalize_vertices(vertices)
1018
+
1019
+ duplicate_found = False
1020
+ for existing_vertices, existing_area in unique_shapes:
1021
+ if normalized_vertices == existing_vertices or areas_are_similar(area1, existing_area):
1022
+ duplicate_found = True
1023
+ break
1024
+
1025
+ if not duplicate_found:
1026
+ rgb_color = get_hatch_color(entity) # Assuming this function exists
1027
+ unique_shapes.append((normalized_vertices, area1))
1028
+ hatched_areas.append([vertices, area1, perimeter, rgb_color])
1029
+
1030
+
1031
+
1032
+ sorted_data = sorted(hatched_areas, key=lambda x: x[1])
1033
+ return sorted_data,text_with_positions
1034
+
1035
+
1036
+ """### Rotate polygon"""
1037
+
1038
+
1039
+
1040
+ def rotate_point(point, angle,pdfrotation,width,height, center_point=(0, 0)):
1041
+ """Rotates a point around center_point(origin by default)
1042
+ Angle is in degrees.
1043
+ Rotation is counter-clockwise
1044
+ """
1045
+ angle_rad = radians(angle % 360)
1046
+ # Shift the point so that center_point becomes the origin
1047
+ new_point = (point[0] - center_point[0], point[1] - center_point[1])
1048
+ new_point = (new_point[0] * cos(angle_rad) - new_point[1] * sin(angle_rad),
1049
+ new_point[0] * sin(angle_rad) + new_point[1] * cos(angle_rad))
1050
+ # Reverse the shifting we have done
1051
+ if pdfrotation!=0:
1052
+
1053
+ new_point = (new_point[0]+width + center_point[0], new_point[1] + center_point[1]) #pdfsize[2] is the same as +width
1054
+ else:
1055
+
1056
+ new_point = (new_point[0] + center_point[0], new_point[1]+ height + center_point[1]) # pdfsize[3] is the same as +height
1057
+ # new_point = (new_point[0] + center_point[0], new_point[1] + center_point[1])
1058
+ return new_point
1059
+
1060
+
1061
+ def rotate_polygon(polygon, angle, pdfrotation,width,height,center_point=(0, 0)):
1062
+ """Rotates the given polygon which consists of corners represented as (x,y)
1063
+ around center_point (origin by default)
1064
+ Rotation is counter-clockwise
1065
+ Angle is in degrees
1066
+ """
1067
+ rotated_polygon = []
1068
+ for corner in polygon:
1069
+ rotated_corner = rotate_point(corner, angle,pdfrotation,width,height, center_point)
1070
+ rotated_polygon.append(rotated_corner)
1071
+ return rotated_polygon
1072
+
1073
+ #create a dataframe containing color , count(how many times is this object found in the plan), area of 1 of these shapes, total area
1074
+ #perimeter, totat perimeter, length, total length
1075
+ #import pandas as pd
1076
+ #SimilarAreaDictionary= pd.DataFrame(columns=['Guess','Color','Occurences','Area','Total Area','Perimeter','Total Perimeter','Length','Total Length','R','G','B'])
1077
+ #loop 3la hatched areas and count the occurences of each shape w create a table bl hagat di
1078
+
1079
+
1080
+
1081
+ def Create_DF(dxfpath,datadoc,hatched_areas):
1082
+
1083
+ FinalRatio= RetriveRatio(datadoc,dxfpath)
1084
+
1085
+ # hatched_areas = get_hatched_areas(datadoc,dxfpath,FinalRatio)
1086
+
1087
+ # hatched_areas=remove_duplicate_shapes(new_hatched_areas)
1088
+
1089
+ # SimilarAreaDictionary= pd.DataFrame(columns=['Area', 'Total Area', 'Perimeter', 'Total Perimeter', 'Occurences', 'Color'])
1090
+ SimilarAreaDictionary= pd.DataFrame(columns=['Guess','Color','Occurences','Area','Total Area','Perimeter','Total Perimeter','Length','Total Length','Texts','Comments'])
1091
+
1092
+ # colorRanges2=generate_color_array(30000)
1093
+ # colorRanges = [[255, 0, 0], [0, 0, 255], [0, 255, 255], [0, 64, 0], [255, 204, 0], [255, 128, 64], [255, 0, 128], [255, 128, 192], [128, 128, 255], [128, 64, 0],[0, 255, 0],[0, 200, 0],[255, 128, 255], [128, 0, 255], [0, 128, 192], [128, 0, 128],[128, 0, 0], [0, 128, 255], [149, 1, 70], [255, 182, 128], [222, 48, 71], [240, 0, 112], [255, 0, 255], [192, 46, 65], [0, 0, 128],[0, 128, 64],[255, 255, 0], [128, 0, 80], [255, 255, 128], [90, 255, 140],[255, 200, 20],[91, 16, 51], [90, 105, 138], [114, 10, 138], [36, 82, 78], [225, 105, 190], [108, 150, 170], [11, 35, 75], [42, 176, 170], [255, 176, 170], [209, 151, 15],[81, 27, 85], [226, 106, 122], [67, 119, 149], [159, 179, 140], [159, 179, 30],[255, 85, 198], [255, 27, 85], [188, 158, 8],[140, 188, 120], [59, 61, 52], [65, 81, 21], [212, 255, 174], [15, 164, 90],[41, 217, 245], [213, 23, 182], [11, 85, 169], [78, 153, 239], [0, 66, 141],[64, 98, 232], [140, 112, 255], [57, 33, 154], [194, 117, 252], [116, 92, 135], [74, 43, 98], [188, 13, 123], [129, 58, 91], [255, 128, 100], [171, 122, 145], [255, 98, 98], [222, 48, 77]]
1094
+ # colorUsed=[]
1095
+ TotalArea=0
1096
+ TotalPerimeter=0
1097
+ for shape in hatched_areas:
1098
+ area = shape[1] # area
1099
+ perimeter = shape[2] # perimeter
1100
+ # if(i < len(colorRanges)):
1101
+ # color = colorRanges[i]
1102
+ # colorUsed.append(color)
1103
+ # else:
1104
+ # color = colorRanges2[i]
1105
+ # colorUsed.append(color)
1106
+ TotalArea = area
1107
+ TotalPerimeter = perimeter
1108
+ tol=0
1109
+ condition1 = (SimilarAreaDictionary['Area'] >= area - tol) & (SimilarAreaDictionary['Area'] <= area +tol)
1110
+ condition2 = (SimilarAreaDictionary['Perimeter'] >= perimeter -tol) & (SimilarAreaDictionary['Perimeter'] <= perimeter +tol)
1111
+ combined_condition = condition1 & condition2
1112
+
1113
+ if any(combined_condition):
1114
+ index = np.where(combined_condition)[0][0]
1115
+ SimilarAreaDictionary.at[index, 'Occurences'] += 1
1116
+ SimilarAreaDictionary.at[index, 'Total Area'] = SimilarAreaDictionary.at[index, 'Area'] * SimilarAreaDictionary.at[index, 'Occurences']
1117
+ SimilarAreaDictionary.at[index, 'Total Perimeter'] = SimilarAreaDictionary.at[index, 'Perimeter'] * SimilarAreaDictionary.at[index, 'Occurences']
1118
+ else:
1119
+ TotalArea=area
1120
+ TotalPerimeter=perimeter
1121
+ # print("Shape[3]",shape[3])
1122
+ new_data = {'Area': area, 'Total Area': TotalArea ,'Perimeter': perimeter, 'Total Perimeter': TotalPerimeter, 'Occurences': 1, 'Color':shape[3],'Comments':''} #add color here and read color to insert in
1123
+ SimilarAreaDictionary = pd.concat([SimilarAreaDictionary, pd.DataFrame([new_data])], ignore_index=True)
1124
+
1125
+ # print(SimilarAreaDictionary)
1126
+ return SimilarAreaDictionary
1127
+ """### Draw on Image and PDF"""
1128
+
1129
+ # from sklearn.cluster import KMeans
1130
+
1131
+ def color_distance(color1, color2):
1132
+ print("color1 = ",color1)
1133
+ print("color2 = ",color2)
1134
+ print("abs(color1[0] - color2[0]) = ",abs(color1[0] - color2[0]))
1135
+ print("abs(color1[1] - color2[1]) = ",abs(color1[1] - color2[1]))
1136
+ print("abs(color1[2] - color2[2]) = ",abs(color1[2] - color2[2]))
1137
+ if(abs(color1[0] - color2[0]) < 20 and
1138
+ abs(color1[1] - color2[1]) < 20 and
1139
+ abs(color1[2] - color2[2]) < 20):
1140
+ return 1
1141
+ else:
1142
+ return 100
1143
+ # return np.sqrt(sum((a - b) ** 2 for a, b in zip(color1, color2)))
1144
+
1145
+ # Unify colors within a distance threshold
1146
+ def unify_colors(df, threshold=20):
1147
+ # Convert colors to tuple if they are not already in tuple format
1148
+ df['Color'] = df['Color'].apply(lambda x: tuple(x) if isinstance(x, list) else x)
1149
+
1150
+ # Iterate through the DataFrame and compare each color with the next one
1151
+ for i in range(len(df) - 1): # We don't need to compare the last color with anything
1152
+ current_color = df.at[i, 'Color']
1153
+ next_color = df.at[i + 1, 'Color']
1154
+
1155
+ # If the distance between current color and the next color is smaller than the threshold
1156
+ if color_distance(current_color, next_color) <= threshold:
1157
+ # Make both the same color (unify them to the current color)
1158
+ df.at[i + 1, 'Color'] = current_color # Change the next color to the current color
1159
+
1160
+ return df
1161
+
1162
+ def normalize_color(color):
1163
+ """Convert PDF color (range 0-1) to RGB (range 0-255)."""
1164
+ return tuple(min(max(round(c * 255), 0), 255) for c in color)
1165
+
1166
+
1167
+
1168
+
1169
+ def adjustannotations(OutputPdfStage1,text_with_positions):
1170
+ input_pdf_path = OutputPdfStage1
1171
+ output_pdf_path = "Final-WallsAdjusted.pdf"
1172
+
1173
+ # Load the input PDF
1174
+ pdf_bytes_io = BytesIO(OutputPdfStage1)
1175
+
1176
+ reader = PdfReader(pdf_bytes_io)
1177
+ writer = PdfWriter()
1178
+
1179
+ # Append all pages to the writer
1180
+ writer.append_pages_from_reader(reader)
1181
+
1182
+ # Add metadata (optional)
1183
+ metadata = reader.metadata
1184
+ writer.add_metadata(metadata)
1185
+
1186
+ for page_index, page in enumerate(writer.pages):
1187
+ if "/Annots" in page:
1188
+ annotations = page["/Annots"]
1189
+ for annot_index, annot in enumerate(annotations):
1190
+ obj = annot.get_object()
1191
+
1192
+ # print("obj", obj)
1193
+ # print(obj.get("/IT"))
1194
+
1195
+ if obj.get("/Subtype") == "/Line":
1196
+ # print("AWL ANNOT IF")
1197
+ # Check the /IT value to differentiate annotations
1198
+ # if "/Contents" in obj and "m" in obj["/Contents"]:
1199
+ if "/Subj" in obj and "Perimeter Measurement" in obj["/Subj"]:
1200
+ # print("Tany IF")
1201
+ obj.update({
1202
+ NameObject("/Measure"): DictionaryObject({
1203
+ NameObject("/Type"): NameObject("/Measure"),
1204
+ NameObject("/L"): DictionaryObject({
1205
+ NameObject("/G"): FloatObject(1),
1206
+ NameObject("/U"): TextStringObject("m"), # Unit of measurement for area
1207
+ }),
1208
+
1209
+ }),
1210
+ NameObject("/IT"): NameObject("/LineDimension"), # Use more distinctive name
1211
+ NameObject("/Subj"): TextStringObject("Length Measurement"), # Intent explicitly for Area
1212
+ })
1213
+ # print(obj)
1214
+
1215
+ if obj.get("/Subtype") in ["/Line", "/PolyLine"] and "/C" in obj:
1216
+ # Normalize and match the color
1217
+ annot_color = normalize_color(obj["/C"])
1218
+ matched_entry = next(
1219
+ ((text, NBS) for text,NBS, _, color in text_with_positions if annot_color == color),
1220
+ (None, None)
1221
+ )
1222
+ # print("matched_entry = ",matched_entry)
1223
+ matched_text, matched_nbs = matched_entry
1224
+
1225
+ combined_text = ""
1226
+ if matched_text and matched_nbs:
1227
+ combined_text = f"{matched_text} - {matched_nbs}"
1228
+ elif matched_text:
1229
+ combined_text = matched_text
1230
+ elif matched_nbs:
1231
+ combined_text = matched_nbs
1232
+
1233
+ obj.update({
1234
+ NameObject("/T"): TextStringObject(combined_text), # Custom text for "Comment" column
1235
+ })
1236
+
1237
+
1238
+
1239
+ output_pdf_io = BytesIO()
1240
+ writer.write(output_pdf_io)
1241
+ output_pdf_io.seek(0)
1242
+
1243
+ print(f"Annotations updated and saved to {output_pdf_path}")
1244
+ return output_pdf_io.read()
1245
+
1246
+ def distance(rect1, rect2):
1247
+ """Calculate the Euclidean distance between two annotation centers."""
1248
+ x1, y1 = (float(rect1[0]) + float(rect1[2])) / 2, (float(rect1[1]) + float(rect1[3])) / 2
1249
+ x2, y2 = (float(rect2[0]) + float(rect2[2])) / 2, (float(rect2[1]) + float(rect2[3])) / 2
1250
+ return math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
1251
+
1252
+ def remove_duplicate_annotations(pdf_path, threshold):
1253
+ """Remove one of the duplicate annotations if they are close and have the same color."""
1254
+
1255
+ input_pdf_path = pdf_path
1256
+ output_pdf_path = "Filtered-Walls.pdf"
1257
+
1258
+ # Load the input PDF
1259
+ pdf_bytes_io = BytesIO(pdf_path)
1260
+
1261
+ reader = PdfReader(pdf_bytes_io)
1262
+ writer = PdfWriter()
1263
+
1264
+ # Append all pages to the writer
1265
+ # writer.append_pages_from_reader(reader)
1266
+
1267
+ # Add metadata (optional)
1268
+ metadata = reader.metadata
1269
+ writer.add_metadata(metadata)
1270
+
1271
+ for page_index in range(len(reader.pages)):
1272
+ page = reader.pages[page_index]
1273
+
1274
+ if "/Annots" in page:
1275
+ annotations = page["/Annots"]
1276
+ annots_data = []
1277
+ to_delete = set()
1278
+
1279
+ # Extract annotation positions and colors
1280
+ for annot_index, annot_ref in enumerate(annotations):
1281
+ annot = annot_ref.get_object()
1282
+
1283
+ if "/Rect" in annot and "/C" in annot:
1284
+ rect = annot["/Rect"]
1285
+ if isinstance(rect, ArrayObject): # Ensure rect is a list
1286
+ rect = list(rect)
1287
+
1288
+ color = normalize_color(annot["/C"])
1289
+ annots_data.append((annot_index, rect, color))
1290
+
1291
+ # Compare distances and mark duplicates
1292
+ for i, (idx1, rect1, color1) in enumerate(annots_data):
1293
+ if idx1 in to_delete:
1294
+ continue
1295
+ for j, (idx2, rect2, color2) in enumerate(annots_data[i+1:], start=i+1):
1296
+ if idx2 in to_delete:
1297
+ continue
1298
+ if color1 == color2 and distance(rect1, rect2) < threshold:
1299
+ to_delete.add(idx2) # Mark second annotation for deletion
1300
+
1301
+ # Remove duplicates
1302
+ new_annotations = [annotations[i] for i in range(len(annotations)) if i not in to_delete]
1303
+ page[NameObject("/Annots")] = ArrayObject(new_annotations)
1304
+
1305
+ writer.add_page(page)
1306
+
1307
+ output_pdf_io = BytesIO()
1308
+ writer.write(output_pdf_io)
1309
+ output_pdf_io.seek(0)
1310
+
1311
+ return output_pdf_io.read()
1312
+
1313
+
1314
+
1315
+
1316
+
1317
+
1318
+ def calculate_distance(p1, p2):
1319
+ return math.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)
1320
+
1321
+ def compare_annotations_paths(filtered_pdf_path: str,page2_pdf_path: str) -> None:
1322
+
1323
+ output_path='compared output.pdf'
1324
+ """
1325
+ Compare annotations between two PDFs given as file paths.
1326
+ Internally reads each PDF into bytes, filters annotations, and
1327
+ writes the resulting PDF to the specified output path.
1328
+
1329
+ Parameters:
1330
+ filtered_pdf_path (str): Path to the filtered PDF.
1331
+ page2_pdf_path (str): Path to the second PDF for comparison.
1332
+ output_path (str): Path where the filtered PDF will be saved.
1333
+ """
1334
+ # Read PDFs into memory as bytes
1335
+ with open(filtered_pdf_path, 'rb') as f1:
1336
+ filtered_bytes = f1.read()
1337
+ with open(page2_pdf_path, 'rb') as f2:
1338
+ page2_bytes = f2.read()
1339
+
1340
+ # Open both PDFs from in-memory bytes
1341
+ filtered_doc = fitz.open(stream=filtered_bytes, filetype="pdf")
1342
+ page2_doc = fitz.open(stream=page2_bytes, filetype="pdf")
1343
+
1344
+ # Iterate only up to the shorter document
1345
+ num_pages = min(len(filtered_doc), len(page2_doc))
1346
+
1347
+ for i in range(num_pages):
1348
+ f_page = filtered_doc[i]
1349
+ p2_page = page2_doc[i]
1350
+
1351
+ # Get annotation lists (or empty)
1352
+ f_annots = list(f_page.annots()) or []
1353
+ p2_annots = list(p2_page.annots()) or []
1354
+
1355
+ # Delete any annotation in f_page with no intersection
1356
+ for annot in f_annots:
1357
+ if not any(annot.rect.intersects(a.rect) for a in p2_annots):
1358
+ f_page.delete_annot(annot)
1359
+
1360
+ # Serialize filtered_doc back to PDF bytes
1361
+ result_bytes = filtered_doc.write()
1362
+
1363
+ # Save the result bytes to output_path
1364
+ with open(output_path, 'wb') as out_f:
1365
+ out_f.write(result_bytes)
1366
+
1367
+ # Clean up
1368
+ filtered_doc.close()
1369
+ page2_doc.close()
1370
+
1371
+ return output_path
1372
+
1373
+
1374
+ def mainFunctionDrawImgPdf(datadoc,dxfpath, dxfratio,SearchArray,Thickness,selected_walls,pdfpath=0,pdfname=0):
1375
+ OutputPdfStage1='BB Trial.pdf'
1376
+ FinalRatio= RetriveRatio(datadoc,dxfpath)
1377
+
1378
+ # hatched_areas = get_hatched_areas(datadoc,dxfpath,FinalRatio)
1379
+ # hatched_areas=remove_duplicate_shapes(new_hatched_areas)
1380
+
1381
+ img,pix2=pdftoimg(datadoc)
1382
+ flipped_horizontal=flip(img)
1383
+ allcnts = []
1384
+ imgg = flipped_horizontal
1385
+ # imgtransparent1=imgg.copy()
1386
+ doc = fitz.open('pdf',datadoc)
1387
+ page2 = doc[0]
1388
+ pageNew = doc[0]
1389
+ rotationOld=page2.rotation
1390
+ derotationMatrix=page2.derotation_matrix
1391
+ # print("Derotation Matrix = ",derotationMatrix)
1392
+ pix=page2.get_pixmap()
1393
+ width=abs(page2.mediabox[2])+abs(page2.mediabox[0])
1394
+ height=abs(page2.mediabox[3])+abs(page2.mediabox[1])
1395
+ print('mediabox', width , height)
1396
+
1397
+
1398
+
1399
+
1400
+
1401
+
1402
+ if page2.rotation!=0:
1403
+
1404
+ rotationangle = page2.rotation
1405
+ page2.set_rotation(0)
1406
+ ratio = pix.width/ img.shape[0]
1407
+ else:
1408
+ ratio = pix.width/ img.shape[1]
1409
+ rotationangle = 270
1410
+
1411
+ hatched_areas,text_with_positions = get_hatched_areas(datadoc,dxfpath,FinalRatio,rotationangle,SearchArray)
1412
+ allshapes=[]
1413
+ # Iterate through each polygon in metric units
1414
+ NewColors = []
1415
+ SimilarAreaDictionary=Create_DF(dxfpath,datadoc,hatched_areas)
1416
+ i=0
1417
+ flagcolor = 0
1418
+ ColorCounter = 0
1419
+ ColorCheck=[]
1420
+ deleterows = []
1421
+
1422
+
1423
+ # def color_distance(color1, color2):
1424
+ # return np.sqrt(sum((a - b) ** 2 for a, b in zip(color1, color2)))
1425
+
1426
+ color_margin = 2 # Define margin threshold
1427
+
1428
+ for polygon in hatched_areas:
1429
+ cntPoints = []
1430
+ cntPoints1 = []
1431
+ shapeePerimeter = []
1432
+ shapeeArea = []
1433
+ Text_Detected = 0
1434
+
1435
+ blackImgShapes = np.zeros(imgg.shape[:2], dtype="uint8")
1436
+ blackImgShapes= cv2.cvtColor(blackImgShapes, cv2.COLOR_GRAY2BGR)
1437
+
1438
+ # Convert each vertex from metric to pixel coordinates
1439
+ for vertex in polygon[0]:
1440
+ x = (vertex[0]) *dxfratio
1441
+ y = (vertex[1]) *dxfratio
1442
+ if rotationangle==0:
1443
+ if y<0:
1444
+ y=y*-1
1445
+ cntPoints.append([int(x), int(y)])
1446
+ cntPoints1.append([x, y])
1447
+
1448
+ cv2.drawContours(blackImgShapes, [np.array(cntPoints)], -1, ([255,255,255]), thickness=-1)
1449
+ x, y, w, h = cv2.boundingRect(np.array(cntPoints))
1450
+ firstpoint = 0
1451
+ for poi in np.array(cntPoints1):
1452
+ if firstpoint == 0:
1453
+ x2, y2 = poi
1454
+ p2 = fitz.Point(x2,y2)
1455
+ # p1 = fitz.Point(x1,y1)
1456
+ p2=p2*derotationMatrix
1457
+ shapeePerimeter.append([p2[0],p2[1]])
1458
+ firstpoint = 1
1459
+ else:
1460
+ x1, y1 = poi
1461
+ p1 = fitz.Point(x1,y1)
1462
+ # p1 = fitz.Point(x1,y1)
1463
+ p1=p1*derotationMatrix
1464
+ # print("P1 = ",p1)
1465
+ shapeePerimeter.append([p1[0],p1[1]])
1466
+
1467
+ shapeePerimeter.append([p2[0],p2[1]])
1468
+ shapeePerimeter=np.flip(shapeePerimeter,1)
1469
+ shapeePerimeter=rotate_polygon(shapeePerimeter,rotationangle,rotationOld,width,height)
1470
+
1471
+ for poi in np.array(cntPoints1):
1472
+ x1, y1 = poi
1473
+ p1 = fitz.Point(x1,y1)
1474
+ # p1 = fitz.Point(x1,y1)
1475
+ p1=p1*derotationMatrix
1476
+ # print("P1 = ",p1)
1477
+ shapeeArea.append([p1[0],p1[1]])
1478
+
1479
+ shapeeArea.append([p2[0],p2[1]])
1480
+ shapeeArea=np.flip(shapeeArea,1)
1481
+ shapeeArea=rotate_polygon(shapeeArea,rotationangle,rotationOld,width,height)
1482
+
1483
+ tol=0
1484
+ condition1 = (SimilarAreaDictionary['Area'] >= polygon[1] - tol) & (SimilarAreaDictionary['Area'] <= polygon[1] +tol)
1485
+ condition2 = (SimilarAreaDictionary['Perimeter'] >= polygon[2] -tol) & (SimilarAreaDictionary['Perimeter'] <= polygon[2] +tol)
1486
+ combined_condition = condition1 & condition2
1487
+ # print("combined_condition = ",combined_condition)
1488
+
1489
+ if any(combined_condition):
1490
+
1491
+ flagcolor = 1
1492
+ index = np.where(combined_condition)[0][0]
1493
+ # print(SimilarAreaDictionary.at[index, 'Color'])
1494
+ NewColors=SimilarAreaDictionary.at[index, 'Color']
1495
+
1496
+ else:
1497
+ flagcolor = 2
1498
+ NewColors=SimilarAreaDictionary.at[i, 'Color']
1499
+ # flagcolor = 2
1500
+
1501
+ # cv2.drawContours(imgg, [np.array(cntPoints)], -1, (NewColors), thickness=2)
1502
+ # print("new color = ",NewColors)
1503
+ # print("New Colors = ",NewColors)
1504
+ # if img is not None or img.shape[0] != 0 or img.shape[1] != 0:
1505
+ if(int(NewColors[0])==255 and int(NewColors[1])==255 and int(NewColors[2])==255):
1506
+
1507
+ WhiteImgFinal = cv2.bitwise_and(blackImgShapes,imgg)
1508
+ # print("length = ",WhiteImgFinal.shape[0])
1509
+ # print("width = ",WhiteImgFinal.shape[1])
1510
+ flipped=flip(WhiteImgFinal)
1511
+ # print("Flipped")
1512
+ # cv2_imshow(flipped)
1513
+
1514
+ imgslice = WhiteImgFinal[y:y+h, x:x+w]
1515
+ # print("length slice = ",imgslice.shape[0])
1516
+ # print("width slice = ",imgslice.shape[1])
1517
+ if(imgslice.shape[0] != 0 and imgslice.shape[1] != 0):
1518
+ flippedSlice=flip(imgslice)
1519
+ # print("Sliced & Flipped")
1520
+ # cv2_imshow(flippedSlice)
1521
+
1522
+ # Convert flippedSlice to PIL for color extraction
1523
+ flippedSlice_pil = Image.fromarray(flippedSlice)
1524
+
1525
+ # Define patch size for color sampling (e.g., 10x10 pixels)
1526
+ patch_size = 100
1527
+ patch_colors = []
1528
+
1529
+ # Loop through patches in the image
1530
+ for i in range(0, flippedSlice_pil.width, patch_size):
1531
+ for j in range(0, flippedSlice_pil.height, patch_size):
1532
+ # Crop a patch from the original image
1533
+ patch = flippedSlice_pil.crop((i, j, i + patch_size, j + patch_size))
1534
+ patch_colors += patch.getcolors(patch_size * patch_size)
1535
+
1536
+ # Calculate the dominant color from all patches
1537
+ max_count = 0
1538
+ dominant_color = None
1539
+ tolerance = 5
1540
+ black_threshold = 30 # Max RGB value for a color to be considered "black"
1541
+ white_threshold = 225 # Min RGB value for a color to be considered "white"
1542
+
1543
+ for count, color in patch_colors:
1544
+ # Exclude colors within the black and white ranges
1545
+ if not (all(c <= black_threshold for c in color) or all(c >= white_threshold for c in color)):
1546
+ # Update if the current color has a higher count than previous max
1547
+ if count > max_count:
1548
+ max_count = count
1549
+ dominant_color = color
1550
+
1551
+ # print("Dominant Color =", dominant_color)
1552
+
1553
+ # Append dominant color to ColorCheck and update NewColors
1554
+ if dominant_color is not None:
1555
+ ColorCheck.append(dominant_color)
1556
+
1557
+ NewColors = None # Initialize NewColors
1558
+
1559
+ for color in ColorCheck:
1560
+ # Check if the current color is within the tolerance
1561
+ # print("color = ",color)
1562
+ # print("dominant_color = ",dominant_color)
1563
+ if (abs(color[0] - dominant_color[0]) < 20 and
1564
+ abs(color[1] - dominant_color[1]) < 20 and
1565
+ abs(color[2] - dominant_color[2]) < 20):
1566
+ NewColors = (color[2], color[1], color[0]) # Set the new color
1567
+ break
1568
+ else:
1569
+ # If no color in ColorCheck meets the tolerance, use the dominant color
1570
+ NewColors = (dominant_color[2], dominant_color[1], dominant_color[0])
1571
+ # break
1572
+
1573
+ # Avoid appending `dominant_color` again unnecessarily
1574
+ if NewColors not in ColorCheck:
1575
+ ColorCheck.append(NewColors)
1576
+
1577
+ if flagcolor == 1:
1578
+ SimilarAreaDictionary.at[index, 'Color'] = NewColors
1579
+ # # print(f"Updated Color at index {index} with {NewColors}.")
1580
+ elif flagcolor == 2:
1581
+ SimilarAreaDictionary.at[i, 'Color'] = NewColors
1582
+ # print("New Colors = ",NewColors)
1583
+ cv2.drawContours(imgg, [np.array(cntPoints)], -1, ([NewColors[2],NewColors[1],NewColors[0]]), thickness=3)
1584
+
1585
+
1586
+
1587
+
1588
+ start_point1 = shapeePerimeter[0]
1589
+ end_point1 = shapeePerimeter[1]
1590
+ start_point2 = shapeePerimeter[0]
1591
+ end_point2 = shapeePerimeter[-2]
1592
+
1593
+ distance1 = calculate_distance(start_point1, end_point1)
1594
+ distance2 = calculate_distance(start_point2, end_point2)
1595
+
1596
+
1597
+
1598
+ # Divide the shapePerimeter into two halves
1599
+ half_index = len(shapeePerimeter) // 2
1600
+ # half1 = shapeePerimeter[1:half_index+1]
1601
+ # half2 = shapeePerimeter[half_index:]
1602
+ half1 = shapeePerimeter[1:half_index]
1603
+ half2 = shapeePerimeter[half_index:-1]
1604
+
1605
+
1606
+
1607
+ # Calculate distances for the halves
1608
+ if len(half1) >= 2:
1609
+ half1_distance = sum(calculate_distance(half1[i], half1[i + 1]) for i in range(len(half1) - 1))
1610
+ else:
1611
+ half1_distance = 0
1612
+
1613
+ if len(half2) >= 2:
1614
+ half2_distance = sum(calculate_distance(half2[i], half2[i + 1]) for i in range(len(half2) - 1))
1615
+ else:
1616
+ half2_distance = 0
1617
+
1618
+ max_distance = max(distance1, distance2, half1_distance)
1619
+
1620
+ if max_distance == distance1:
1621
+ # Draw the line annotation for distance1
1622
+ chosen_start = start_point1
1623
+ chosen_end = end_point1
1624
+ annot12 = page2.add_line_annot(chosen_start, chosen_end)
1625
+ elif max_distance == distance2:
1626
+ # Draw the line annotation for distance2
1627
+ chosen_start = start_point2
1628
+ chosen_end = end_point2
1629
+ annot12 = page2.add_line_annot(chosen_start, chosen_end)
1630
+ elif max_distance == half1_distance:
1631
+ # Draw the polyline annotation for half1
1632
+ annot12 = page2.add_polyline_annot(half1)
1633
+ # else: # max_distance == half2_distance
1634
+ # # Draw the polyline annotation for half2
1635
+ # annot12 = page2.add_polyline_annot(half2)
1636
+
1637
+
1638
+
1639
+ annot12.set_border(width=0.8)
1640
+ annot12.set_colors(stroke=(int(NewColors[0])/255,int(NewColors[1])/255,int(NewColors[2])/255))
1641
+ # annot12.set_info(content=str(polygon[2])+' m',subject='Perimeter Measurement', title="ADR Team")
1642
+ annot12.set_info(subject='Perimeter Measurement',content=str(polygon[2])+' m')
1643
+ annot12.set_opacity(0.8)
1644
+ annot12.update()
1645
+
1646
+
1647
+ i += 1
1648
+ alpha = 0.8 # Transparency factor.
1649
+
1650
+ page2.set_rotation(rotationOld)
1651
+ Correct_img=flip(imgg)
1652
+
1653
+ image_new1 = cv2.addWeighted(Correct_img, alpha, img, 1 - alpha, 0)
1654
+ SimilarAreaDictionary = SimilarAreaDictionary.fillna(' ')
1655
+
1656
+ # Define white color to filter out
1657
+ white_color = (255, 255, 255)
1658
+
1659
+ # Delete rows where 'Guess' equals white_color
1660
+ SimilarAreaDictionary = SimilarAreaDictionary[SimilarAreaDictionary['Color'] != white_color]
1661
+
1662
+ # Reset the index to update row numbering
1663
+ SimilarAreaDictionary.reset_index(drop=True, inplace=True)
1664
+
1665
+
1666
+ grouped_df = SimilarAreaDictionary.groupby('Color').agg({
1667
+ 'Guess': 'first',
1668
+ 'Occurences': 'sum', # Sum of occurrences for each color
1669
+ 'Area':'first',
1670
+ 'Total Area': 'sum', # Sum of areas for each color
1671
+ 'Perimeter':'first',
1672
+ 'Total Perimeter': 'sum', # Sum of perimeters for each color
1673
+ 'Length':'first',
1674
+ 'Total Length': 'sum', # Sum of lengths for each color
1675
+ 'Texts': 'first', # Keep the first occurrence of 'Texts'
1676
+ 'Comments': 'first' # Keep the first occurrence of 'Comments'
1677
+
1678
+ }).reset_index()
1679
+
1680
+ # doc.save(OutputPdfStage1)
1681
+ # OutputPdfStage2=adjustannotations(OutputPdfStage1,text_with_positions)
1682
+ modified_pdf_data = doc.tobytes()
1683
+ OutputPdfStage2=adjustannotations(modified_pdf_data,text_with_positions)
1684
+
1685
+ # threshold = math.ceil(float(Thickness) * float(dxfratio) )
1686
+ # print(threshold)
1687
+ OutputPdfStage3 = remove_duplicate_annotations(OutputPdfStage2,threshold=10)
1688
+
1689
+
1690
+
1691
+ walls= selected_walls
1692
+
1693
+ # Convert each wall's list of points to a NumPy array
1694
+ # walls_array = [np.array(wall, dtype=np.int32) for wall in walls]
1695
+
1696
+ walls_array = [
1697
+ np.array(list(filter(lambda pt: pt is not Ellipsis, wall)), dtype=np.int32)
1698
+ for wall in walls
1699
+ ]
1700
+
1701
+
1702
+ derotationMatrix=pageNew.derotation_matrix
1703
+
1704
+ shapeePoints=[]
1705
+ firstpoint = 0
1706
+
1707
+ # # Create an empty image or load your existing image
1708
+ # img = np.zeros((pix.height, pix.width, 3), dtype=np.uint8) # Replace 'height' and 'width' with your image dimensions
1709
+
1710
+ # Draw each wall contour on the image
1711
+ for wall in walls_array:
1712
+ shapeePoints=[]
1713
+ # cv2.drawContours(img, [wall], -1, (255, 0, 0), thickness=4)
1714
+ # print(wall)
1715
+ for poi in wall:
1716
+ if firstpoint == 0:
1717
+ x2, y2 = poi
1718
+ p2 = fitz.Point(x2,y2)
1719
+ # p1 = fitz.Point(x1,y1)
1720
+ p2=p2*derotationMatrix
1721
+ shapeePoints.append([p2[0],p2[1]])
1722
+ firstpoint = 1
1723
+ else:
1724
+ x1, y1 = poi
1725
+ p1 = fitz.Point(x1,y1)
1726
+ # p1 = fitz.Point(x1,y1)
1727
+ p1=p1*derotationMatrix
1728
+ # print("P1 = ",p1)
1729
+ shapeePoints.append([p1[0],p1[1]])
1730
+
1731
+ annot122 = pageNew.add_polyline_annot(shapeePoints)
1732
+ annot122.set_border(width=0.8)
1733
+ annot122.set_colors(stroke=(1,0,0))
1734
+ # annot12.set_info(content=str(polygon[2])+' m',subject='Perimeter Measurement', title="ADR Team")
1735
+ # annot12.set_info(subject='Perimeter Measurement',content=str(polygon[2])+' m')
1736
+ annot122.set_opacity(0.8)
1737
+ annot122.update()
1738
+
1739
+
1740
+ doc.save("pageNew.pdf")
1741
+
1742
+ OutputPdfStage4=compare_annotations_paths(OutputPdfStage3,pageNew)
1743
+
1744
+ latestimg,pix=pdftoimg(OutputPdfStage3)
1745
+ doc2 =fitz.open('pdf',OutputPdfStage3)
1746
+ gc,spreadsheet_service,spreadsheetId, spreadsheet_url , namepathArr=google_sheet_Legend.legendGoogleSheets(grouped_df , pdfname,pdfpath)
1747
+ list1=pd.DataFrame(columns=['content', 'id', 'subject','color'])
1748
+
1749
+ # for page in doc:
1750
+ for page in doc2:
1751
+ # Iterate through annotations on the page
1752
+ for annot in page.annots():
1753
+ # Get the color of the annotation
1754
+ annot_color = annot.colors
1755
+ if annot_color is not None:
1756
+ # annot_color is a dictionary with 'stroke' and 'fill' keys
1757
+ stroke_color = annot_color.get('stroke') # Border color
1758
+ fill_color = annot_color.get('fill') # Fill color
1759
+ if fill_color:
1760
+ v='fill'
1761
+ # print('fill')
1762
+ if stroke_color:
1763
+ v='stroke'
1764
+ x,y,z=int(annot_color.get(v)[0]*255),int(annot_color.get(v)[1]*255),int(annot_color.get(v)[2]*255)
1765
+ list1.loc[len(list1)] =[annot.info['content'],annot.info['id'],annot.info['subject'],[x,y,z]]
1766
+ print('LISTTT',list1)
1767
+ return doc2,latestimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas