nathbns commited on
Commit
9b4f4f7
·
verified ·
1 Parent(s): 91eb20c

Upload 11 files

Browse files
Files changed (11) hide show
  1. app.py +82 -0
  2. data/laps_models/laps.h5 +3 -0
  3. deps/__init__.py +3 -0
  4. deps/geometry.py +1165 -0
  5. deps/laps.py +26 -0
  6. laps.py +137 -0
  7. llr.py +308 -0
  8. preprocess.py +50 -0
  9. requirements.txt +10 -0
  10. rescale.py +49 -0
  11. slid.py +212 -0
app.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ from preprocess import preprocess_image_from_array
4
+
5
+
6
+ def process_image(image):
7
+ """
8
+ Process an uploaded image and return the preprocessed version.
9
+
10
+ Args:
11
+ image: PIL Image or numpy array from Gradio
12
+
13
+ Returns:
14
+ Preprocessed image as numpy array
15
+ """
16
+ if image is None:
17
+ return None
18
+
19
+ try:
20
+ # Convert PIL Image to numpy array if needed
21
+ if hasattr(image, 'mode'):
22
+ # PIL Image
23
+ img_array = np.array(image)
24
+ else:
25
+ # Already a numpy array
26
+ img_array = image
27
+
28
+ # Ensure RGB format
29
+ if len(img_array.shape) == 2:
30
+ # Grayscale, convert to RGB
31
+ img_array = np.stack([img_array] * 3, axis=-1)
32
+ elif img_array.shape[2] == 4:
33
+ # RGBA, convert to RGB
34
+ img_array = img_array[:, :, :3]
35
+
36
+ # Process the image
37
+ processed = preprocess_image_from_array(img_array)
38
+
39
+ return processed
40
+ except Exception as e:
41
+ print(f"Erreur lors du traitement: {e}")
42
+ import traceback
43
+ traceback.print_exc()
44
+ return None
45
+
46
+
47
+ # Create Gradio interface
48
+ with gr.Blocks(title="Preprocessing d'Échiquier") as demo:
49
+ gr.Markdown("""
50
+ # Preprocessing d'Images d'Échiquier
51
+
52
+ Cette application applique le preprocessing d'images d'échiquier pour convertir
53
+ une photo prise sous un angle arbitraire en une projection 2D.
54
+
55
+ Uploadez une image d'échiquier et obtenez la version préprocessée.
56
+ """)
57
+
58
+ with gr.Row():
59
+ with gr.Column():
60
+ input_image = gr.Image(label="Image d'entrée", type="numpy")
61
+ process_btn = gr.Button("Traiter l'image", variant="primary")
62
+
63
+ with gr.Column():
64
+ output_image = gr.Image(label="Image préprocessée", type="numpy")
65
+
66
+ process_btn.click(
67
+ fn=process_image,
68
+ inputs=[input_image],
69
+ outputs=[output_image]
70
+ )
71
+
72
+ # Auto-process when image is uploaded
73
+ input_image.change(
74
+ fn=process_image,
75
+ inputs=[input_image],
76
+ outputs=[output_image]
77
+ )
78
+
79
+
80
+ if __name__ == "__main__":
81
+ demo.launch(server_name="0.0.0.0", server_port=7860)
82
+
data/laps_models/laps.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1ed18c9410b0fa3ad837e6311eafd68df1e635241afdb7d7c04a819efe0619f
3
+ size 651444
deps/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from . import geometry
2
+ from . import laps
3
+
deps/geometry.py ADDED
@@ -0,0 +1,1165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ https://github.com/ideasman42/isect_segments-bentley_ottmann
3
+ """
4
+
5
+ # BentleyOttmann sweep-line implementation
6
+ # (for finding all intersections in a set of line segments)
7
+
8
+ __all__ = (
9
+ "isect_segments",
10
+ "isect_polygon",
11
+
12
+ # for testing only (correct but slow)
13
+ "isect_segments__naive",
14
+ "isect_polygon__naive",
15
+ )
16
+
17
+ # ----------------------------------------------------------------------------
18
+ # Main Poly Intersection
19
+
20
+ # Defines to change behavior.
21
+ #
22
+ # Whether to ignore intersections of line segments when both
23
+ # their end points form the intersection point.
24
+ USE_IGNORE_SEGMENT_ENDINGS = True
25
+
26
+ USE_DEBUG = False # FIXME
27
+
28
+ USE_VERBOSE = False
29
+
30
+ # checks we should NOT need,
31
+ # but do them in case we find a test-case that fails.
32
+ USE_PARANOID = False
33
+
34
+ # Support vertical segments,
35
+ # (the bentley-ottmann method doesn't support this).
36
+ # We use the term 'START_VERTICAL' for a vertical segment,
37
+ # to differentiate it from START/END/INTERSECTION
38
+ USE_VERTICAL = True
39
+ # end defines!
40
+ # ------------
41
+
42
+ # ---------
43
+ # Constants
44
+ X, Y = 0, 1
45
+ EPS = 1e-10
46
+ EPS_SQ = EPS * EPS
47
+ INF = float("inf")
48
+
49
+
50
+ class Event:
51
+ __slots__ = (
52
+ "type",
53
+ "point",
54
+ "segment",
55
+
56
+ # this is just cache,
57
+ # we may remove or calculate slope on the fly
58
+ "slope",
59
+ "span",
60
+ ) + (() if not USE_DEBUG else (
61
+ # debugging only
62
+ "other",
63
+ "in_sweep",
64
+ ))
65
+
66
+ class Type:
67
+ END = 0
68
+ INTERSECTION = 1
69
+ START = 2
70
+ if USE_VERTICAL:
71
+ START_VERTICAL = 3
72
+
73
+ def __init__(self, type, point, segment, slope):
74
+ assert(isinstance(point, tuple))
75
+ self.type = type
76
+ self.point = point
77
+ self.segment = segment
78
+
79
+ # will be None for INTERSECTION
80
+ self.slope = slope
81
+ if segment is not None:
82
+ self.span = segment[1][X] - segment[0][X]
83
+
84
+ if USE_DEBUG:
85
+ self.other = None
86
+ self.in_sweep = False
87
+
88
+ def is_vertical(self):
89
+ return self.segment[0][X] == self.segment[1][X]
90
+
91
+ def y_intercept_x(self, x: float):
92
+ # vertical events only for comparison (above_all check)
93
+ # never added into the binary-tree its self
94
+ if USE_VERTICAL:
95
+ if self.is_vertical():
96
+ return None
97
+
98
+ if x <= self.segment[0][X]:
99
+ return self.segment[0][Y]
100
+ elif x >= self.segment[1][X]:
101
+ return self.segment[1][Y]
102
+
103
+ # use the largest to avoid float precision error with nearly vertical lines.
104
+ delta_x0 = x - self.segment[0][X]
105
+ delta_x1 = self.segment[1][X] - x
106
+ if delta_x0 > delta_x1:
107
+ ifac = delta_x0 / self.span
108
+ fac = 1.0 - ifac
109
+ else:
110
+ fac = delta_x1 / self.span
111
+ ifac = 1.0 - fac
112
+ assert(fac <= 1.0)
113
+ return (self.segment[0][Y] * fac) + (self.segment[1][Y] * ifac)
114
+
115
+ @staticmethod
116
+ def Compare(sweep_line, this, that):
117
+ if this is that:
118
+ return 0
119
+ if USE_DEBUG:
120
+ if this.other is that:
121
+ return 0
122
+ current_point_x = sweep_line._current_event_point_x
123
+ ipthis = this.y_intercept_x(current_point_x)
124
+ ipthat = that.y_intercept_x(current_point_x)
125
+ # print(ipthis, ipthat)
126
+ if USE_VERTICAL:
127
+ if ipthis is None:
128
+ ipthis = this.point[Y]
129
+ if ipthat is None:
130
+ ipthat = that.point[Y]
131
+
132
+ delta_y = ipthis - ipthat
133
+
134
+ assert((delta_y < 0.0) == (ipthis < ipthat))
135
+ # NOTE, VERY IMPORTANT TO USE EPSILON HERE!
136
+ # otherwise w/ float precision errors we get incorrect comparisons
137
+ # can get very strange & hard to debug output without this.
138
+ if abs(delta_y) > EPS:
139
+ return -1 if (delta_y < 0.0) else 1
140
+ else:
141
+ this_slope = this.slope
142
+ that_slope = that.slope
143
+ if this_slope != that_slope:
144
+ if sweep_line._before:
145
+ return -1 if (this_slope > that_slope) else 1
146
+ else:
147
+ return 1 if (this_slope > that_slope) else -1
148
+
149
+ delta_x_p1 = this.segment[0][X] - that.segment[0][X]
150
+ if delta_x_p1 != 0.0:
151
+ return -1 if (delta_x_p1 < 0.0) else 1
152
+
153
+ delta_x_p2 = this.segment[1][X] - that.segment[1][X]
154
+ if delta_x_p2 != 0.0:
155
+ return -1 if (delta_x_p2 < 0.0) else 1
156
+
157
+ return 0
158
+
159
+ def __repr__(self):
160
+ return ("Event(0x%x, s0=%r, s1=%r, p=%r, type=%d, slope=%r)" % (
161
+ id(self),
162
+ self.segment[0], self.segment[1],
163
+ self.point,
164
+ self.type,
165
+ self.slope,
166
+ ))
167
+
168
+
169
+ class SweepLine:
170
+ __slots__ = (
171
+ # A map holding all intersection points mapped to the Events
172
+ # that form these intersections.
173
+ # {Point: set(Event, ...), ...}
174
+ "intersections",
175
+ "queue",
176
+
177
+ # Events (sorted set of ordered events, no values)
178
+ #
179
+ # note: START & END events are considered the same so checking if an event is in the tree
180
+ # will return true if its opposite side is found.
181
+ # This is essential for the algorithm to work, and why we don't explicitly remove START events.
182
+ # Instead, the END events are never added to the current sweep, and removing them also removes the start.
183
+ "_events_current_sweep",
184
+ # The point of the current Event.
185
+ "_current_event_point_x",
186
+ # A flag to indicate if we're slightly before or after the line.
187
+ "_before",
188
+ )
189
+
190
+ def __init__(self):
191
+ self.intersections = {}
192
+
193
+ self._current_event_point_x = None
194
+ self._events_current_sweep = RBTree(cmp=Event.Compare, cmp_data=self)
195
+ self._before = True
196
+
197
+ def get_intersections(self):
198
+ return list(self.intersections.keys())
199
+
200
+ # Checks if an intersection exists between two Events 'a' and 'b'.
201
+ def _check_intersection(self, a: Event, b: Event):
202
+ # Return immediately in case either of the events is null, or
203
+ # if one of them is an INTERSECTION event.
204
+ if ((a is None or b is None) or
205
+ (a.type == Event.Type.INTERSECTION) or
206
+ (b.type == Event.Type.INTERSECTION)):
207
+
208
+ return
209
+
210
+ if a is b:
211
+ return
212
+
213
+ # Get the intersection point between 'a' and 'b'.
214
+ p = isect_seg_seg_v2_point(
215
+ a.segment[0], a.segment[1],
216
+ b.segment[0], b.segment[1])
217
+
218
+ # No intersection exists.
219
+ if p is None:
220
+ return
221
+
222
+ # If the intersection is formed by both the segment endings, AND
223
+ # USE_IGNORE_SEGMENT_ENDINGS is true,
224
+ # return from this method.
225
+ if USE_IGNORE_SEGMENT_ENDINGS:
226
+ if ((len_squared_v2v2(p, a.segment[0]) < EPS_SQ or
227
+ len_squared_v2v2(p, a.segment[1]) < EPS_SQ) and
228
+ (len_squared_v2v2(p, b.segment[0]) < EPS_SQ or
229
+ len_squared_v2v2(p, b.segment[1]) < EPS_SQ)):
230
+
231
+ return
232
+
233
+ # Add the intersection.
234
+ events_for_point = self.intersections.pop(p, set())
235
+ is_new = len(events_for_point) == 0
236
+ events_for_point.add(a)
237
+ events_for_point.add(b)
238
+ self.intersections[p] = events_for_point
239
+
240
+ # If the intersection occurs to the right of the sweep line, OR
241
+ # if the intersection is on the sweep line and it's above the
242
+ # current event-point, add it as a new Event to the queue.
243
+ if is_new and p[X] >= self._current_event_point_x:
244
+ event_isect = Event(Event.Type.INTERSECTION, p, None, None)
245
+ self.queue.offer(p, event_isect)
246
+
247
+ def _sweep_to(self, p):
248
+ if p[X] == self._current_event_point_x:
249
+ # happens in rare cases,
250
+ # we can safely ignore
251
+ return
252
+
253
+ self._current_event_point_x = p[X]
254
+
255
+ def insert(self, event):
256
+ assert(event not in self._events_current_sweep)
257
+ assert(event.type != Event.Type.START_VERTICAL)
258
+ if USE_DEBUG:
259
+ assert(event.in_sweep == False)
260
+ assert(event.other.in_sweep == False)
261
+
262
+ self._events_current_sweep.insert(event, None)
263
+
264
+ if USE_DEBUG:
265
+ event.in_sweep = True
266
+ event.other.in_sweep = True
267
+
268
+ def remove(self, event):
269
+ try:
270
+ self._events_current_sweep.remove(event)
271
+ if USE_DEBUG:
272
+ assert(event.in_sweep == True)
273
+ assert(event.other.in_sweep == True)
274
+ event.in_sweep = False
275
+ event.other.in_sweep = False
276
+ return True
277
+ except KeyError:
278
+ if USE_DEBUG:
279
+ assert(event.in_sweep == False)
280
+ assert(event.other.in_sweep == False)
281
+ return False
282
+
283
+ def above(self, event):
284
+ return self._events_current_sweep.succ_key(event, None)
285
+
286
+ def below(self, event):
287
+ return self._events_current_sweep.prev_key(event, None)
288
+
289
+ '''
290
+ def above_all(self, event):
291
+ while True:
292
+ event = self.above(event)
293
+ if event is None:
294
+ break
295
+ yield event
296
+ '''
297
+
298
+ def above_all(self, event):
299
+ # assert(event not in self._events_current_sweep)
300
+ return self._events_current_sweep.key_slice(event, None, reverse=False)
301
+
302
+ def handle(self, p, events_current):
303
+ if len(events_current) == 0:
304
+ return
305
+ # done already
306
+ # self._sweep_to(events_current[0])
307
+ assert(p[0] == self._current_event_point_x)
308
+
309
+ if not USE_IGNORE_SEGMENT_ENDINGS:
310
+ if len(events_current) > 1:
311
+ for i in range(0, len(events_current) - 1):
312
+ for j in range(i + 1, len(events_current)):
313
+ self._check_intersection(
314
+ events_current[i], events_current[j])
315
+
316
+ for e in events_current:
317
+ self.handle_event(e)
318
+
319
+ def handle_event(self, event):
320
+ t = event.type
321
+ if t == Event.Type.START:
322
+ # print(" START")
323
+ self._before = False
324
+ self.insert(event)
325
+
326
+ e_above = self.above(event)
327
+ e_below = self.below(event)
328
+
329
+ self._check_intersection(event, e_above)
330
+ self._check_intersection(event, e_below)
331
+ if USE_PARANOID:
332
+ self._check_intersection(e_above, e_below)
333
+
334
+ elif t == Event.Type.END:
335
+ # print(" END")
336
+ self._before = True
337
+
338
+ e_above = self.above(event)
339
+ e_below = self.below(event)
340
+
341
+ self.remove(event)
342
+
343
+ self._check_intersection(e_above, e_below)
344
+ if USE_PARANOID:
345
+ self._check_intersection(event, e_above)
346
+ self._check_intersection(event, e_below)
347
+
348
+ elif t == Event.Type.INTERSECTION:
349
+ # print(" INTERSECTION")
350
+ self._before = True
351
+ event_set = self.intersections[event.point]
352
+ # note: events_current aren't sorted.
353
+ reinsert_stack = [] # Stack
354
+ for e in event_set:
355
+ # If we the Event was not already removed,
356
+ # we want to insert it later on.
357
+ if self.remove(e):
358
+ reinsert_stack.append(e)
359
+ self._before = False
360
+
361
+ # Insert all Events that we were able to remove.
362
+ while reinsert_stack:
363
+ e = reinsert_stack.pop()
364
+
365
+ self.insert(e)
366
+
367
+ e_above = self.above(e)
368
+ e_below = self.below(e)
369
+
370
+ self._check_intersection(e, e_above)
371
+ self._check_intersection(e, e_below)
372
+ if USE_PARANOID:
373
+ self._check_intersection(e_above, e_below)
374
+ elif (USE_VERTICAL and
375
+ (t == Event.Type.START_VERTICAL)):
376
+
377
+ # just check sanity
378
+ assert(event.segment[0][X] == event.segment[1][X])
379
+ assert(event.segment[0][Y] <= event.segment[1][Y])
380
+
381
+ # In this case we only need to find all segments in this span.
382
+ y_above_max = event.segment[1][Y]
383
+
384
+ # self.insert(event)
385
+ for e_above in self.above_all(event):
386
+ if e_above.type == Event.Type.START_VERTICAL:
387
+ continue
388
+ y_above = e_above.y_intercept_x(
389
+ self._current_event_point_x)
390
+ if USE_IGNORE_SEGMENT_ENDINGS:
391
+ if y_above >= y_above_max:
392
+ break
393
+ else:
394
+ if y_above > y_above_max:
395
+ break
396
+
397
+ # We know this intersects,
398
+ # so we could use a faster function now:
399
+ # ix = (self._current_event_point_x, y_above)
400
+ # ...however best use existing functions
401
+ # since it does all sanity checks on endpoints... etc.
402
+ self._check_intersection(event, e_above)
403
+
404
+ # self.remove(event)
405
+
406
+
407
+ class EventQueue:
408
+ __slots__ = (
409
+ # note: we only ever pop_min, this could use a 'heap' structure.
410
+ # The sorted map holding the points -> event list
411
+ # [Point: Event] (tree)
412
+ "events_scan",
413
+ )
414
+
415
+ def __init__(self, segments, line: SweepLine):
416
+ self.events_scan = RBTree()
417
+ # segments = [s for s in segments if s[0][0] != s[1][0] and s[0][1] != s[1][1]]
418
+
419
+ for s in segments:
420
+ assert(s[0][X] <= s[1][X])
421
+
422
+ slope = slope_v2v2(*s)
423
+
424
+ if s[0] == s[1]:
425
+ pass
426
+ elif USE_VERTICAL and (s[0][X] == s[1][X]):
427
+ e_start = Event(Event.Type.START_VERTICAL, s[0], s, slope)
428
+
429
+ if USE_DEBUG:
430
+ e_start.other = e_start # FAKE, avoid error checking
431
+
432
+ self.offer(s[0], e_start)
433
+ else:
434
+ e_start = Event(Event.Type.START, s[0], s, slope)
435
+ e_end = Event(Event.Type.END, s[1], s, slope)
436
+
437
+ if USE_DEBUG:
438
+ e_start.other = e_end
439
+ e_end.other = e_start
440
+
441
+ self.offer(s[0], e_start)
442
+ self.offer(s[1], e_end)
443
+
444
+ line.queue = self
445
+
446
+ def offer(self, p, e: Event):
447
+ """
448
+ Offer a new event ``s`` at point ``p`` in this queue.
449
+ """
450
+ existing = self.events_scan.setdefault(
451
+ p, ([], [], [], []) if USE_VERTICAL else
452
+ ([], [], []))
453
+ # Can use double linked-list for easy insertion at beginning/end
454
+ '''
455
+ if e.type == Event.Type.END:
456
+ existing.insert(0, e)
457
+ else:
458
+ existing.append(e)
459
+ '''
460
+
461
+ existing[e.type].append(e)
462
+
463
+ # return a set of events
464
+ def poll(self):
465
+ """
466
+ Get, and remove, the first (lowest) item from this queue.
467
+
468
+ :return: the first (lowest) item from this queue.
469
+ :rtype: Point, Event pair.
470
+ """
471
+ assert(len(self.events_scan) != 0)
472
+ p, events_current = self.events_scan.pop_min()
473
+ return p, events_current
474
+
475
+
476
+ def isect_segments(segments) -> list:
477
+ # order points left -> right
478
+ segments = [
479
+ # in nearly all cases, comparing X is enough,
480
+ # but compare Y too for vertical lines
481
+ (s[0], s[1]) if (s[0] <= s[1]) else
482
+ (s[1], s[0])
483
+ for s in segments]
484
+
485
+ sweep_line = SweepLine()
486
+ queue = EventQueue(segments, sweep_line)
487
+
488
+ while len(queue.events_scan) > 0:
489
+ if USE_VERBOSE:
490
+ print(len(queue.events_scan), sweep_line._current_event_point_x)
491
+ p, e_ls = queue.poll()
492
+ for events_current in e_ls:
493
+ if events_current:
494
+ sweep_line._sweep_to(p)
495
+ sweep_line.handle(p, events_current)
496
+
497
+ return sweep_line.get_intersections()
498
+
499
+
500
+ def isect_polygon(points) -> list:
501
+ n = len(points)
502
+ segments = [
503
+ (tuple(points[i]), tuple(points[(i + 1) % n]))
504
+ for i in range(n)]
505
+ return isect_segments(segments)
506
+
507
+
508
+ # ----------------------------------------------------------------------------
509
+ # 2D math utilities
510
+
511
+
512
+ def slope_v2v2(p1, p2):
513
+ if p1[X] == p2[X]:
514
+ if p1[Y] < p2[Y]:
515
+ return INF
516
+ else:
517
+ return -INF
518
+ else:
519
+ return (p2[Y] - p1[Y]) / (p2[X] - p1[X])
520
+
521
+
522
+ def sub_v2v2(a, b):
523
+ return (
524
+ a[0] - b[0],
525
+ a[1] - b[1])
526
+
527
+
528
+ def dot_v2v2(a, b):
529
+ return (
530
+ (a[0] * b[0]) +
531
+ (a[1] * b[1]))
532
+
533
+
534
+ def len_squared_v2v2(a, b):
535
+ c = sub_v2v2(a, b)
536
+ return dot_v2v2(c, c)
537
+
538
+
539
+ def line_point_factor_v2(p, l1, l2, default=0.0):
540
+ u = sub_v2v2(l2, l1)
541
+ h = sub_v2v2(p, l1)
542
+ dot = dot_v2v2(u, u)
543
+ return (dot_v2v2(u, h) / dot) if dot != 0.0 else default
544
+
545
+
546
+ def isect_seg_seg_v2_point(v1, v2, v3, v4, bias=0.0):
547
+ # Only for predictability and hashable point when same input is given
548
+ if v1 > v2:
549
+ v1, v2 = v2, v1
550
+ if v3 > v4:
551
+ v3, v4 = v4, v3
552
+
553
+ if (v1, v2) > (v3, v4):
554
+ v1, v2, v3, v4 = v3, v4, v1, v2
555
+
556
+ div = (v2[0] - v1[0]) * (v4[1] - v3[1]) - (v2[1] - v1[1]) * (v4[0] - v3[0])
557
+ if div == 0.0:
558
+ return None
559
+
560
+ vi = (((v3[0] - v4[0]) *
561
+ (v1[0] * v2[1] - v1[1] * v2[0]) - (v1[0] - v2[0]) *
562
+ (v3[0] * v4[1] - v3[1] * v4[0])) / div,
563
+ ((v3[1] - v4[1]) *
564
+ (v1[0] * v2[1] - v1[1] * v2[0]) - (v1[1] - v2[1]) *
565
+ (v3[0] * v4[1] - v3[1] * v4[0])) / div,
566
+ )
567
+
568
+ fac = line_point_factor_v2(vi, v1, v2, default=-1.0)
569
+ if fac < 0.0 - bias or fac > 1.0 + bias:
570
+ return None
571
+
572
+ fac = line_point_factor_v2(vi, v3, v4, default=-1.0)
573
+ if fac < 0.0 - bias or fac > 1.0 + bias:
574
+ return None
575
+
576
+ # vi = round(vi[X], 8), round(vi[Y], 8)
577
+ return vi
578
+
579
+
580
+ # ----------------------------------------------------------------------------
581
+ # Simple naive line intersect, (for testing only)
582
+
583
+
584
+ def isect_segments__naive(segments) -> list:
585
+ """
586
+ Brute force O(n2) version of ``isect_segments`` for test validation.
587
+ """
588
+ isect = []
589
+
590
+ # order points left -> right
591
+ segments = [
592
+ (s[0], s[1]) if s[0][X] <= s[1][X] else
593
+ (s[1], s[0])
594
+ for s in segments]
595
+
596
+ n = len(segments)
597
+
598
+ for i in range(n):
599
+ a0, a1 = segments[i]
600
+ for j in range(i + 1, n):
601
+ b0, b1 = segments[j]
602
+ if a0 not in (b0, b1) and a1 not in (b0, b1):
603
+ ix = isect_seg_seg_v2_point(a0, a1, b0, b1)
604
+ if ix is not None:
605
+ # USE_IGNORE_SEGMENT_ENDINGS handled already
606
+ isect.append(ix)
607
+
608
+ return isect
609
+
610
+
611
+ def isect_polygon__naive(points) -> list:
612
+ """
613
+ Brute force O(n2) version of ``isect_polygon`` for test validation.
614
+ """
615
+ isect = []
616
+
617
+ n = len(points)
618
+
619
+ for i in range(n):
620
+ a0, a1 = points[i], points[(i + 1) % n]
621
+ for j in range(i + 1, n):
622
+ b0, b1 = points[j], points[(j + 1) % n]
623
+ if a0 not in (b0, b1) and a1 not in (b0, b1):
624
+ ix = isect_seg_seg_v2_point(a0, a1, b0, b1)
625
+ if ix is not None:
626
+
627
+ if USE_IGNORE_SEGMENT_ENDINGS:
628
+ if ((len_squared_v2v2(ix, a0) < EPS_SQ or
629
+ len_squared_v2v2(ix, a1) < EPS_SQ) and
630
+ (len_squared_v2v2(ix, b0) < EPS_SQ or
631
+ len_squared_v2v2(ix, b1) < EPS_SQ)):
632
+ continue
633
+
634
+ isect.append(ix)
635
+
636
+ return isect
637
+
638
+
639
+ # ----------------------------------------------------------------------------
640
+ # Inline Libs
641
+ #
642
+ # bintrees: 2.0.2, extracted from:
643
+ # http://pypi.python.org/pypi/bintrees
644
+ #
645
+ # - Removed unused functions, such as slicing and range iteration.
646
+ # - Added 'cmp' and and 'cmp_data' arguments,
647
+ # so we can define our own comparison that takes an arg.
648
+ # Needed for sweep-line.
649
+ # - Added support for 'default' arguments for prev_item/succ_item,
650
+ # so we can avoid exception handling.
651
+
652
+ # -------
653
+ # ABCTree
654
+
655
+ from operator import attrgetter
656
+ _sentinel = object()
657
+
658
+
659
+ class _ABCTree(object):
660
+ def __init__(self, items=None, cmp=None, cmp_data=None):
661
+ """T.__init__(...) initializes T; see T.__class__.__doc__ for signature"""
662
+ self._root = None
663
+ self._count = 0
664
+ if cmp is None:
665
+ def cmp(cmp_data, a, b):
666
+ if a < b:
667
+ return -1
668
+ elif a > b:
669
+ return 1
670
+ else:
671
+ return 0
672
+ self._cmp = cmp
673
+ self._cmp_data = cmp_data
674
+ if items is not None:
675
+ self.update(items)
676
+
677
+ def clear(self):
678
+ """T.clear() -> None. Remove all items from T."""
679
+ def _clear(node):
680
+ if node is not None:
681
+ _clear(node.left)
682
+ _clear(node.right)
683
+ node.free()
684
+ _clear(self._root)
685
+ self._count = 0
686
+ self._root = None
687
+
688
+ @property
689
+ def count(self):
690
+ """Get items count."""
691
+ return self._count
692
+
693
+ def get_value(self, key):
694
+ node = self._root
695
+ while node is not None:
696
+ cmp = self._cmp(self._cmp_data, key, node.key)
697
+ if cmp == 0:
698
+ return node.value
699
+ elif cmp < 0:
700
+ node = node.left
701
+ else:
702
+ node = node.right
703
+ raise KeyError(str(key))
704
+
705
+ def pop_item(self):
706
+ """T.pop_item() -> (k, v), remove and return some (key, value) pair as a
707
+ 2-tuple; but raise KeyError if T is empty.
708
+ """
709
+ if self.is_empty():
710
+ raise KeyError("pop_item(): tree is empty")
711
+ node = self._root
712
+ while True:
713
+ if node.left is not None:
714
+ node = node.left
715
+ elif node.right is not None:
716
+ node = node.right
717
+ else:
718
+ break
719
+ key = node.key
720
+ value = node.value
721
+ self.remove(key)
722
+ return key, value
723
+ popitem = pop_item # for compatibility to dict()
724
+
725
+ def min_item(self):
726
+ """Get item with min key of tree, raises ValueError if tree is empty."""
727
+ if self.is_empty():
728
+ raise ValueError("Tree is empty")
729
+ node = self._root
730
+ while node.left is not None:
731
+ node = node.left
732
+ return node.key, node.value
733
+
734
+ def max_item(self):
735
+ """Get item with max key of tree, raises ValueError if tree is empty."""
736
+ if self.is_empty():
737
+ raise ValueError("Tree is empty")
738
+ node = self._root
739
+ while node.right is not None:
740
+ node = node.right
741
+ return node.key, node.value
742
+
743
+ def succ_item(self, key, default=_sentinel):
744
+ """Get successor (k,v) pair of key, raises KeyError if key is max key
745
+ or key does not exist. optimized for pypy.
746
+ """
747
+ # removed graingets version, because it was little slower on CPython and much slower on pypy
748
+ # this version runs about 4x faster with pypy than the Cython version
749
+ # Note: Code sharing of succ_item() and ceiling_item() is possible, but has always a speed penalty.
750
+ node = self._root
751
+ succ_node = None
752
+ while node is not None:
753
+ cmp = self._cmp(self._cmp_data, key, node.key)
754
+ if cmp == 0:
755
+ break
756
+ elif cmp < 0:
757
+ if (succ_node is None) or self._cmp(self._cmp_data, node.key, succ_node.key) < 0:
758
+ succ_node = node
759
+ node = node.left
760
+ else:
761
+ node = node.right
762
+
763
+ if node is None: # stay at dead end
764
+ if default is _sentinel:
765
+ raise KeyError(str(key))
766
+ return default
767
+ # found node of key
768
+ if node.right is not None:
769
+ # find smallest node of right subtree
770
+ node = node.right
771
+ while node.left is not None:
772
+ node = node.left
773
+ if succ_node is None:
774
+ succ_node = node
775
+ elif self._cmp(self._cmp_data, node.key, succ_node.key) < 0:
776
+ succ_node = node
777
+ elif succ_node is None: # given key is biggest in tree
778
+ if default is _sentinel:
779
+ raise KeyError(str(key))
780
+ return default
781
+ return succ_node.key, succ_node.value
782
+
783
+ def prev_item(self, key, default=_sentinel):
784
+ """Get predecessor (k,v) pair of key, raises KeyError if key is min key
785
+ or key does not exist. optimized for pypy.
786
+ """
787
+ # removed graingets version, because it was little slower on CPython and much slower on pypy
788
+ # this version runs about 4x faster with pypy than the Cython version
789
+ # Note: Code sharing of prev_item() and floor_item() is possible, but has always a speed penalty.
790
+ node = self._root
791
+ prev_node = None
792
+
793
+ while node is not None:
794
+ cmp = self._cmp(self._cmp_data, key, node.key)
795
+ if cmp == 0:
796
+ break
797
+ elif cmp < 0:
798
+ node = node.left
799
+ else:
800
+ if (prev_node is None) or self._cmp(self._cmp_data, prev_node.key, node.key) < 0:
801
+ prev_node = node
802
+ node = node.right
803
+
804
+ if node is None: # stay at dead end (None)
805
+ if default is _sentinel:
806
+ raise KeyError(str(key))
807
+ return default
808
+ # found node of key
809
+ if node.left is not None:
810
+ # find biggest node of left subtree
811
+ node = node.left
812
+ while node.right is not None:
813
+ node = node.right
814
+ if prev_node is None:
815
+ prev_node = node
816
+ elif self._cmp(self._cmp_data, prev_node.key, node.key) < 0:
817
+ prev_node = node
818
+ elif prev_node is None: # given key is smallest in tree
819
+ if default is _sentinel:
820
+ raise KeyError(str(key))
821
+ return default
822
+ return prev_node.key, prev_node.value
823
+
824
+ def __repr__(self):
825
+ """T.__repr__(...) <==> repr(x)"""
826
+ tpl = "%s({%s})" % (self.__class__.__name__, '%s')
827
+ return tpl % ", ".join(("%r: %r" % item for item in self.items()))
828
+
829
+ def __contains__(self, key):
830
+ """k in T -> True if T has a key k, else False"""
831
+ try:
832
+ self.get_value(key)
833
+ return True
834
+ except KeyError:
835
+ return False
836
+
837
+ def __len__(self):
838
+ """T.__len__() <==> len(x)"""
839
+ return self.count
840
+
841
+ def is_empty(self):
842
+ """T.is_empty() -> False if T contains any items else True"""
843
+ return self.count == 0
844
+
845
+ def set_default(self, key, default=None):
846
+ """T.set_default(k[,d]) -> T.get(k,d), also set T[k]=d if k not in T"""
847
+ try:
848
+ return self.get_value(key)
849
+ except KeyError:
850
+ self.insert(key, default)
851
+ return default
852
+ setdefault = set_default # for compatibility to dict()
853
+
854
+ def get(self, key, default=None):
855
+ """T.get(k[,d]) -> T[k] if k in T, else d. d defaults to None."""
856
+ try:
857
+ return self.get_value(key)
858
+ except KeyError:
859
+ return default
860
+
861
+ def pop(self, key, *args):
862
+ """T.pop(k[,d]) -> v, remove specified key and return the corresponding value.
863
+ If key is not found, d is returned if given, otherwise KeyError is raised
864
+ """
865
+ if len(args) > 1:
866
+ raise TypeError("pop expected at most 2 arguments, got %d" % (1 + len(args)))
867
+ try:
868
+ value = self.get_value(key)
869
+ self.remove(key)
870
+ return value
871
+ except KeyError:
872
+ if len(args) == 0:
873
+ raise
874
+ else:
875
+ return args[0]
876
+
877
+ def prev_key(self, key, default=_sentinel):
878
+ """Get predecessor to key, raises KeyError if key is min key
879
+ or key does not exist.
880
+ """
881
+ item = self.prev_item(key, default)
882
+ return default if item is default else item[0]
883
+
884
+ def succ_key(self, key, default=_sentinel):
885
+ """Get successor to key, raises KeyError if key is max key
886
+ or key does not exist.
887
+ """
888
+ item = self.succ_item(key, default)
889
+ return default if item is default else item[0]
890
+
891
+ def pop_min(self):
892
+ """T.pop_min() -> (k, v), remove item with minimum key, raise ValueError
893
+ if T is empty.
894
+ """
895
+ item = self.min_item()
896
+ self.remove(item[0])
897
+ return item
898
+
899
+ def pop_max(self):
900
+ """T.pop_max() -> (k, v), remove item with maximum key, raise ValueError
901
+ if T is empty.
902
+ """
903
+ item = self.max_item()
904
+ self.remove(item[0])
905
+ return item
906
+
907
+ def min_key(self):
908
+ """Get min key of tree, raises ValueError if tree is empty. """
909
+ return self.min_item()[0]
910
+
911
+ def max_key(self):
912
+ """Get max key of tree, raises ValueError if tree is empty. """
913
+ return self.max_item()[0]
914
+
915
+ def key_slice(self, start_key, end_key, reverse=False):
916
+ """T.key_slice(start_key, end_key) -> key iterator:
917
+ start_key <= key < end_key.
918
+
919
+ Yields keys in ascending order if reverse is False else in descending order.
920
+ """
921
+ return (k for k, v in self.iter_items(start_key, end_key, reverse=reverse))
922
+
923
+ def iter_items(self, start_key=None, end_key=None, reverse=False):
924
+ """Iterates over the (key, value) items of the associated tree,
925
+ in ascending order if reverse is True, iterate in descending order,
926
+ reverse defaults to False"""
927
+ # optimized iterator (reduced method calls) - faster on CPython but slower on pypy
928
+
929
+ if self.is_empty():
930
+ return []
931
+ if reverse:
932
+ return self._iter_items_backward(start_key, end_key)
933
+ else:
934
+ return self._iter_items_forward(start_key, end_key)
935
+
936
+ def _iter_items_forward(self, start_key=None, end_key=None):
937
+ for item in self._iter_items(left=attrgetter("left"), right=attrgetter("right"),
938
+ start_key=start_key, end_key=end_key):
939
+ yield item
940
+
941
+ def _iter_items_backward(self, start_key=None, end_key=None):
942
+ for item in self._iter_items(left=attrgetter("right"), right=attrgetter("left"),
943
+ start_key=start_key, end_key=end_key):
944
+ yield item
945
+
946
+ def _iter_items(self, left=attrgetter("left"), right=attrgetter("right"), start_key=None, end_key=None):
947
+ node = self._root
948
+ stack = []
949
+ go_left = True
950
+ in_range = self._get_in_range_func(start_key, end_key)
951
+
952
+ while True:
953
+ if left(node) is not None and go_left:
954
+ stack.append(node)
955
+ node = left(node)
956
+ else:
957
+ if in_range(node.key):
958
+ yield node.key, node.value
959
+ if right(node) is not None:
960
+ node = right(node)
961
+ go_left = True
962
+ else:
963
+ if not len(stack):
964
+ return # all done
965
+ node = stack.pop()
966
+ go_left = False
967
+
968
+ def _get_in_range_func(self, start_key, end_key):
969
+ if start_key is None and end_key is None:
970
+ return lambda x: True
971
+ else:
972
+ if start_key is None:
973
+ start_key = self.min_key()
974
+ if end_key is None:
975
+ return (lambda x: self._cmp(self._cmp_data, start_key, x) <= 0)
976
+ else:
977
+ return (lambda x: self._cmp(self._cmp_data, start_key, x) <= 0 and
978
+ self._cmp(self._cmp_data, x, end_key) < 0)
979
+
980
+
981
+ # ------
982
+ # RBTree
983
+
984
+ class Node(object):
985
+ """Internal object, represents a tree node."""
986
+ __slots__ = ['key', 'value', 'red', 'left', 'right']
987
+
988
+ def __init__(self, key=None, value=None):
989
+ self.key = key
990
+ self.value = value
991
+ self.red = True
992
+ self.left = None
993
+ self.right = None
994
+
995
+ def free(self):
996
+ self.left = None
997
+ self.right = None
998
+ self.key = None
999
+ self.value = None
1000
+
1001
+ def __getitem__(self, key):
1002
+ """N.__getitem__(key) <==> x[key], where key is 0 (left) or 1 (right)."""
1003
+ return self.left if key == 0 else self.right
1004
+
1005
+ def __setitem__(self, key, value):
1006
+ """N.__setitem__(key, value) <==> x[key]=value, where key is 0 (left) or 1 (right)."""
1007
+ if key == 0:
1008
+ self.left = value
1009
+ else:
1010
+ self.right = value
1011
+
1012
+
1013
+ class RBTree(_ABCTree):
1014
+ """
1015
+ RBTree implements a balanced binary tree with a dict-like interface.
1016
+
1017
+ see: http://en.wikipedia.org/wiki/Red_black_tree
1018
+ """
1019
+ @staticmethod
1020
+ def is_red(node):
1021
+ if (node is not None) and node.red:
1022
+ return True
1023
+ else:
1024
+ return False
1025
+
1026
+ @staticmethod
1027
+ def jsw_single(root, direction):
1028
+ other_side = 1 - direction
1029
+ save = root[other_side]
1030
+ root[other_side] = save[direction]
1031
+ save[direction] = root
1032
+ root.red = True
1033
+ save.red = False
1034
+ return save
1035
+
1036
+ @staticmethod
1037
+ def jsw_double(root, direction):
1038
+ other_side = 1 - direction
1039
+ root[other_side] = RBTree.jsw_single(root[other_side], other_side)
1040
+ return RBTree.jsw_single(root, direction)
1041
+
1042
+ def _new_node(self, key, value):
1043
+ """Create a new tree node."""
1044
+ self._count += 1
1045
+ return Node(key, value)
1046
+
1047
+ def insert(self, key, value):
1048
+ """T.insert(key, value) <==> T[key] = value, insert key, value into tree."""
1049
+ if self._root is None: # Empty tree case
1050
+ self._root = self._new_node(key, value)
1051
+ self._root.red = False # make root black
1052
+ return
1053
+
1054
+ head = Node() # False tree root
1055
+ grand_parent = None
1056
+ grand_grand_parent = head
1057
+ parent = None # parent
1058
+ direction = 0
1059
+ last = 0
1060
+
1061
+ # Set up helpers
1062
+ grand_grand_parent.right = self._root
1063
+ node = grand_grand_parent.right
1064
+ # Search down the tree
1065
+ while True:
1066
+ if node is None: # Insert new node at the bottom
1067
+ node = self._new_node(key, value)
1068
+ parent[direction] = node
1069
+ elif RBTree.is_red(node.left) and RBTree.is_red(node.right): # Color flip
1070
+ node.red = True
1071
+ node.left.red = False
1072
+ node.right.red = False
1073
+
1074
+ # Fix red violation
1075
+ if RBTree.is_red(node) and RBTree.is_red(parent):
1076
+ direction2 = 1 if grand_grand_parent.right is grand_parent else 0
1077
+ if node is parent[last]:
1078
+ grand_grand_parent[direction2] = RBTree.jsw_single(grand_parent, 1 - last)
1079
+ else:
1080
+ grand_grand_parent[direction2] = RBTree.jsw_double(grand_parent, 1 - last)
1081
+
1082
+ # Stop if found
1083
+ if self._cmp(self._cmp_data, key, node.key) == 0:
1084
+ node.value = value # set new value for key
1085
+ break
1086
+
1087
+ last = direction
1088
+ direction = 0 if (self._cmp(self._cmp_data, key, node.key) < 0) else 1
1089
+ # Update helpers
1090
+ if grand_parent is not None:
1091
+ grand_grand_parent = grand_parent
1092
+ grand_parent = parent
1093
+ parent = node
1094
+ node = node[direction]
1095
+
1096
+ self._root = head.right # Update root
1097
+ self._root.red = False # make root black
1098
+
1099
+ def remove(self, key):
1100
+ """T.remove(key) <==> del T[key], remove item <key> from tree."""
1101
+ if self._root is None:
1102
+ raise KeyError(str(key))
1103
+ head = Node() # False tree root
1104
+ node = head
1105
+ node.right = self._root
1106
+ parent = None
1107
+ grand_parent = None
1108
+ found = None # Found item
1109
+ direction = 1
1110
+
1111
+ # Search and push a red down
1112
+ while node[direction] is not None:
1113
+ last = direction
1114
+
1115
+ # Update helpers
1116
+ grand_parent = parent
1117
+ parent = node
1118
+ node = node[direction]
1119
+
1120
+ direction = 1 if (self._cmp(self._cmp_data, node.key, key) < 0) else 0
1121
+
1122
+ # Save found node
1123
+ if self._cmp(self._cmp_data, key, node.key) == 0:
1124
+ found = node
1125
+
1126
+ # Push the red node down
1127
+ if not RBTree.is_red(node) and not RBTree.is_red(node[direction]):
1128
+ if RBTree.is_red(node[1 - direction]):
1129
+ parent[last] = RBTree.jsw_single(node, direction)
1130
+ parent = parent[last]
1131
+ elif not RBTree.is_red(node[1 - direction]):
1132
+ sibling = parent[1 - last]
1133
+ if sibling is not None:
1134
+ if (not RBTree.is_red(sibling[1 - last])) and (not RBTree.is_red(sibling[last])):
1135
+ # Color flip
1136
+ parent.red = False
1137
+ sibling.red = True
1138
+ node.red = True
1139
+ else:
1140
+ direction2 = 1 if grand_parent.right is parent else 0
1141
+ if RBTree.is_red(sibling[last]):
1142
+ grand_parent[direction2] = RBTree.jsw_double(parent, last)
1143
+ elif RBTree.is_red(sibling[1-last]):
1144
+ grand_parent[direction2] = RBTree.jsw_single(parent, last)
1145
+ # Ensure correct coloring
1146
+ grand_parent[direction2].red = True
1147
+ node.red = True
1148
+ grand_parent[direction2].left.red = False
1149
+ grand_parent[direction2].right.red = False
1150
+
1151
+ # Replace and remove if found
1152
+ if found is not None:
1153
+ found.key = node.key
1154
+ found.value = node.value
1155
+ parent[int(parent.right is node)] = node[int(node.left is None)]
1156
+ node.free()
1157
+ self._count -= 1
1158
+
1159
+ # Update root and make it black
1160
+ self._root = head.right
1161
+ if self._root is not None:
1162
+ self._root.red = False
1163
+ if not found:
1164
+ raise KeyError(str(key))
1165
+
deps/laps.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tensorflow.keras.optimizers import RMSprop
2
+ from tensorflow.keras.models import Sequential
3
+ from tensorflow.keras.layers import *
4
+
5
+ # input
6
+ model = Sequential()
7
+ model.add(Dense(441, input_shape=(21, 21, 1)))
8
+
9
+ # H(2)
10
+ for i in range(2):
11
+ for j in [3, 2, 1]:
12
+ model.add(Conv2D(16, j, activation='elu'))
13
+ model.add(MaxPooling2D(pool_size=(2, 2)))
14
+ model.add(BatchNormalization())
15
+
16
+ # F(128)
17
+ model.add(Dense(128, activation='elu'))
18
+ model.add(Dropout(0.5))
19
+ model.add(Flatten())
20
+
21
+ # output
22
+ model.add(Dense(2, activation='softmax'))
23
+ model.compile(RMSprop(learning_rate=0.001),
24
+ loss='categorical_crossentropy',
25
+ metrics=['categorical_accuracy'])
26
+
laps.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code and weights taken from
2
+ # https://github.com/maciejczyzewski/neural-chessboard/
3
+
4
+ import deps
5
+
6
+ import numpy as np
7
+ import cv2
8
+ import collections
9
+ import scipy
10
+ import scipy.cluster
11
+ import tensorflow as tf
12
+ import os
13
+
14
+ # Try to load the complete model from .h5 file
15
+ model_h5_path = "data/laps_models/laps.h5"
16
+ try:
17
+ NEURAL_MODEL = tf.keras.models.load_model(model_h5_path, compile=False)
18
+ # Recompile with current optimizer
19
+ from tensorflow.keras.optimizers import RMSprop
20
+ NEURAL_MODEL.compile(RMSprop(learning_rate=0.001),
21
+ loss='categorical_crossentropy',
22
+ metrics=['categorical_accuracy'])
23
+ except Exception as e:
24
+ print(f"Warning: Could not load model from {model_h5_path}: {e}")
25
+ # Fallback to creating model from deps.laps
26
+ from deps.laps import model as NEURAL_MODEL
27
+
28
+
29
+ def laps_intersections(lines):
30
+ '''Find all intersections'''
31
+ __lines = [[(a[0], a[1]), (b[0], b[1])] for a, b in lines]
32
+ return deps.geometry.isect_segments(__lines)
33
+
34
+
35
+ def laps_cluster(points, max_dist=10):
36
+ """cluster very similar points"""
37
+ Y = scipy.spatial.distance.pdist(points)
38
+ Z = scipy.cluster.hierarchy.single(Y)
39
+ T = scipy.cluster.hierarchy.fcluster(Z, max_dist, 'distance')
40
+ clusters = collections.defaultdict(list)
41
+ for i in range(len(T)):
42
+ clusters[T[i]].append(points[i])
43
+ clusters = clusters.values()
44
+ clusters = map(lambda arr: (np.mean(np.array(arr)[:, 0]),
45
+ np.mean(np.array(arr)[:, 1])), clusters)
46
+ # if two points are close, they become one mean point
47
+ return list(clusters)
48
+
49
+
50
+ def laps_detector(img):
51
+ """determine if that shape is positive"""
52
+ global NC_LAYER
53
+
54
+ hashid = str(hash(img.tobytes()))
55
+
56
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
57
+ img = cv2.threshold(img, 0, 255, cv2.THRESH_OTSU)[1]
58
+ img = cv2.Canny(img, 0, 255)
59
+ img = cv2.resize(img, (21, 21), interpolation=cv2.INTER_CUBIC)
60
+
61
+ imgd = img
62
+
63
+ X = [np.where(img > int(255/2), 1, 0).ravel()]
64
+ X = X[0].reshape([-1, 21, 21, 1])
65
+
66
+ img = cv2.dilate(img, None)
67
+ mask = cv2.copyMakeBorder(img, top=1, bottom=1, left=1, right=1,
68
+ borderType=cv2.BORDER_CONSTANT, value=[255, 255, 255])
69
+ mask = cv2.bitwise_not(mask)
70
+ i = 0
71
+ contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL,
72
+ cv2.CHAIN_APPROX_NONE)
73
+
74
+ _c = np.zeros((23, 23, 3), np.uint8)
75
+
76
+ # geometric detector
77
+ for cnt in contours:
78
+ (x, y), radius = cv2.minEnclosingCircle(cnt)
79
+ x, y = int(x), int(y)
80
+ approx = cv2.approxPolyDP(cnt, 0.1*cv2.arcLength(cnt, True), True)
81
+ if len(approx) == 4 and radius < 14:
82
+ cv2.drawContours(_c, [cnt], 0, (0, 255, 0), 1)
83
+ i += 1
84
+ else:
85
+ cv2.drawContours(_c, [cnt], 0, (0, 0, 255), 1)
86
+
87
+ if i == 4:
88
+ return (True, 1)
89
+
90
+ pred = NEURAL_MODEL.predict(X)
91
+ a, b = pred[0][0], pred[0][1]
92
+ t = a > b and b < 0.03 and a > 0.975
93
+
94
+ # decision
95
+ if t:
96
+ return (True, pred[0])
97
+ else:
98
+ return (False, pred[0])
99
+
100
+ ################################################################################
101
+
102
+
103
+ def LAPS(img, lines, size=10):
104
+
105
+ __points, points = laps_intersections(lines), []
106
+
107
+ for pt in __points:
108
+ # pixels are in integers
109
+ pt = list(map(int, pt))
110
+
111
+ # size of our analysis area
112
+ lx1 = max(0, int(pt[0]-size-1))
113
+ lx2 = max(0, int(pt[0]+size))
114
+ ly1 = max(0, int(pt[1]-size))
115
+ ly2 = max(0, int(pt[1]+size+1))
116
+
117
+ # cropping for detector
118
+ dimg = img[ly1:ly2, lx1:lx2]
119
+ dimg_shape = np.shape(dimg)
120
+
121
+ # not valid
122
+ if dimg_shape[0] <= 0 or dimg_shape[1] <= 0:
123
+ continue
124
+
125
+ # use neural network
126
+ re_laps = laps_detector(dimg)
127
+ if not re_laps[0]:
128
+ continue
129
+
130
+ # add if okay
131
+ if pt[0] < 0 or pt[1] < 0:
132
+ continue
133
+ points += [pt]
134
+ points = laps_cluster(points)
135
+
136
+ return points
137
+
llr.py ADDED
@@ -0,0 +1,308 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code taken from
2
+ # https://github.com/maciejczyzewski/neural-chessboard/
3
+
4
+ from laps import laps_intersections, laps_cluster
5
+ from slid import slid_tendency
6
+ import scipy
7
+ import cv2
8
+ import pyclipper
9
+ import numpy as np
10
+ import matplotlib.path
11
+ import matplotlib.pyplot as plt
12
+ import matplotlib.path as mplPath
13
+ import collections
14
+ import itertools
15
+ import random
16
+ import math
17
+ import sklearn.cluster
18
+ from copy import copy
19
+ na = np.array
20
+
21
+
22
+ ################################################################################
23
+
24
+
25
+ def llr_normalize(points): return [[int(a), int(b)] for a, b in points]
26
+
27
+
28
+ def llr_correctness(points, shape):
29
+ __points = []
30
+ for pt in points:
31
+ if pt[0] < 0 or pt[1] < 0 or \
32
+ pt[0] > shape[1] or \
33
+ pt[1] > shape[0]:
34
+ continue
35
+ __points += [pt]
36
+ return __points
37
+
38
+
39
+ def llr_unique(a):
40
+ indices = sorted(range(len(a)), key=a.__getitem__)
41
+ indices = set(next(it) for k, it in
42
+ itertools.groupby(indices, key=a.__getitem__))
43
+ return [x for i, x in enumerate(a) if i in indices]
44
+
45
+
46
+ def llr_polysort(pts):
47
+ """sort points clockwise"""
48
+ mlat = sum(x[0] for x in pts) / len(pts)
49
+ mlng = sum(x[1] for x in pts) / len(pts)
50
+
51
+ def __sort(x): # main math --> found on MIT site
52
+ return (math.atan2(x[0]-mlat, x[1]-mlng) +
53
+ 2*math.pi) % (2*math.pi)
54
+ pts.sort(key=__sort)
55
+ return pts
56
+
57
+
58
+ def llr_polyscore(cnt, pts, cen, alfa=5, beta=2):
59
+ a = cnt[0]
60
+ b = cnt[1]
61
+ c = cnt[2]
62
+ d = cnt[3]
63
+
64
+ area = cv2.contourArea(cnt)
65
+ t2 = area < (4 * alfa * alfa) * 5
66
+ if t2:
67
+ return 0
68
+
69
+ gamma = alfa/1.5
70
+
71
+ pco = pyclipper.PyclipperOffset()
72
+ pco.AddPath(cnt, pyclipper.JT_MITER, pyclipper.ET_CLOSEDPOLYGON)
73
+ pcnt = matplotlib.path.Path(pco.Execute(gamma)[0]) # FIXME: alfa/1.5
74
+ wtfs = pcnt.contains_points(pts)
75
+ pts_in = min(np.count_nonzero(wtfs), 49)
76
+ t1 = pts_in < min(len(pts), 49) - 2 * beta - 1
77
+ if t1:
78
+ return 0
79
+
80
+ A = pts_in
81
+ B = area
82
+
83
+ def nln(l1, x, dx): return \
84
+ np.linalg.norm(np.cross(na(l1[1])-na(l1[0]),
85
+ na(l1[0])-na(x)))/dx
86
+ pcnt_in = []
87
+ i = 0
88
+ for pt in wtfs:
89
+ if pt:
90
+ pcnt_in += [pts[i]]
91
+ i += 1
92
+
93
+ def __convex_approx(points, alfa=0.001):
94
+ hull = scipy.spatial.ConvexHull(na(points)).vertices
95
+ cnt = na([points[pt] for pt in hull])
96
+ return cnt
97
+
98
+ cnt_in = __convex_approx(na(pcnt_in))
99
+
100
+ points = cnt_in
101
+ x = [p[0] for p in points]
102
+ y = [p[1] for p in points]
103
+ cen2 = (sum(x) / len(points),
104
+ sum(y) / len(points))
105
+
106
+ G = np.linalg.norm(na(cen)-na(cen2))
107
+
108
+ """
109
+ cnt_in = __convex_approx(na(pcnt_in))
110
+ S = cv2.contourArea(na(cnt_in))
111
+ if S < B: E += abs(S - B)
112
+ cnt_in = __convex_approx(na(list(cnt_in)+list(cnt)))
113
+ S = cv2.contourArea(na(cnt_in))
114
+ if S > B: E += abs(S - B)
115
+ """
116
+
117
+ a = [cnt[0], cnt[1]]
118
+ b = [cnt[1], cnt[2]]
119
+ c = [cnt[2], cnt[3]]
120
+ d = [cnt[3], cnt[0]]
121
+ lns = [a, b, c, d]
122
+ E = 0
123
+ F = 0
124
+ for l in lns:
125
+ d = np.linalg.norm(na(l[0])-na(l[1]))
126
+ for p in cnt_in:
127
+ r = nln(l, p, d)
128
+ if r < gamma:
129
+ E += r
130
+ F += 1
131
+ if F == 0:
132
+ return 0
133
+ E /= F
134
+
135
+ if B == 0 or A == 0:
136
+ return 0
137
+
138
+ # See Eq.11 and Sec.3.4 in the paper
139
+
140
+ C = 1+(E/A)**(1/3)
141
+ D = 1+(G/A)**(1/5)
142
+ R = (A**4)/((B**2) * C * D)
143
+
144
+ # print(R*(10**12), A, "|", B, C, D, "|", E, G)
145
+
146
+ return R
147
+
148
+ ################################################################################
149
+
150
+ # LAPS, SLID
151
+
152
+
153
+ def LLR(img, points, lines):
154
+ old = points
155
+
156
+ def __convex_approx(points, alfa=0.01):
157
+ hull = scipy.spatial.ConvexHull(na(points)).vertices
158
+ cnt = na([points[pt] for pt in hull])
159
+ approx = cv2.approxPolyDP(cnt, alfa *
160
+ cv2.arcLength(cnt, True), True)
161
+ return llr_normalize(itertools.chain(*approx))
162
+
163
+ __cache = {}
164
+
165
+ def __dis(a, b):
166
+ idx = hash("__dis" + str(a) + str(b))
167
+ if idx in __cache:
168
+ return __cache[idx]
169
+ __cache[idx] = np.linalg.norm(na(a)-na(b))
170
+ return __cache[idx]
171
+
172
+ def nln(l1, x, dx): return \
173
+ np.linalg.norm(np.cross(na(l1[1])-na(l1[0]),
174
+ na(l1[0])-na(x)))/dx
175
+
176
+ pregroup = [[], []]
177
+ S = {}
178
+
179
+ points = llr_correctness(llr_normalize(points), img.shape)
180
+
181
+ __points = {}
182
+ points = llr_polysort(points)
183
+ __max, __points_max = 0, []
184
+ alfa = math.sqrt(cv2.contourArea(na(points))/49)
185
+ X = sklearn.cluster.DBSCAN(eps=alfa*4).fit(points)
186
+ for i in range(len(points)):
187
+ __points[i] = []
188
+ for i in range(len(points)):
189
+ if X.labels_[i] != -1:
190
+ __points[X.labels_[i]] += [points[i]]
191
+ for i in range(len(points)):
192
+ if len(__points[i]) > __max:
193
+ __max = len(__points[i])
194
+ __points_max = __points[i]
195
+ if len(__points) > 0 and len(points) > 49/2:
196
+ points = __points_max
197
+ # print(X.labels_)
198
+
199
+ ring = __convex_approx(llr_polysort(points))
200
+
201
+ n = len(points)
202
+ beta = n*(5/100)
203
+ alfa = math.sqrt(cv2.contourArea(na(points))/49)
204
+
205
+ x = [p[0] for p in points]
206
+ y = [p[1] for p in points]
207
+ centroid = (sum(x) / len(points),
208
+ sum(y) / len(points))
209
+
210
+ # print(alfa, beta, centroid)
211
+
212
+ def __v(l):
213
+ y_0, x_0 = l[0][0], l[0][1]
214
+ y_1, x_1 = l[1][0], l[1][1]
215
+
216
+ x_2 = 0
217
+ t = (x_0-x_2)/(x_0-x_1+0.0001)
218
+ a = [int((1-t)*x_0+t*x_1), int((1-t)*y_0+t*y_1)][::-1]
219
+
220
+ x_2 = img.shape[0]
221
+ t = (x_0-x_2)/(x_0-x_1+0.0001)
222
+ b = [int((1-t)*x_0+t*x_1), int((1-t)*y_0+t*y_1)][::-1]
223
+
224
+ poly1 = llr_polysort([[0, 0], [0, img.shape[0]], a, b])
225
+ s1 = llr_polyscore(na(poly1), points, centroid, beta=beta, alfa=alfa/2)
226
+ poly2 = llr_polysort([a, b,
227
+ [img.shape[1], 0], [img.shape[1], img.shape[0]]])
228
+ s2 = llr_polyscore(na(poly2), points, centroid, beta=beta, alfa=alfa/2)
229
+
230
+ return [a, b], s1, s2
231
+
232
+ def __h(l):
233
+ x_0, y_0 = l[0][0], l[0][1]
234
+ x_1, y_1 = l[1][0], l[1][1]
235
+
236
+ x_2 = 0
237
+ t = (x_0-x_2)/(x_0-x_1+0.0001)
238
+ a = [int((1-t)*x_0+t*x_1), int((1-t)*y_0+t*y_1)]
239
+
240
+ x_2 = img.shape[1]
241
+ t = (x_0-x_2)/(x_0-x_1+0.0001)
242
+ b = [int((1-t)*x_0+t*x_1), int((1-t)*y_0+t*y_1)]
243
+
244
+ poly1 = llr_polysort([[0, 0], [img.shape[1], 0], a, b])
245
+ s1 = llr_polyscore(na(poly1), points, centroid, beta=beta, alfa=alfa/2)
246
+ poly2 = llr_polysort([a, b,
247
+ [0, img.shape[0]], [img.shape[1], img.shape[0]]])
248
+ s2 = llr_polyscore(na(poly2), points, centroid, beta=beta, alfa=alfa/2)
249
+
250
+ return [a, b], s1, s2
251
+
252
+ for l in lines:
253
+ for p in points:
254
+ t1 = nln(l, p, __dis(*l)) < alfa
255
+ t2 = nln(l, centroid, __dis(*l)) > alfa * 2.5
256
+
257
+ if t1 and t2:
258
+ tx, ty = l[0][0]-l[1][0], l[0][1]-l[1][1]
259
+ if abs(tx) < abs(ty):
260
+ ll, s1, s2 = __v(l)
261
+ o = 0
262
+ else:
263
+ ll, s1, s2 = __h(l)
264
+ o = 1
265
+ if s1 == 0 and s2 == 0:
266
+ continue
267
+ pregroup[o] += [ll]
268
+
269
+ pregroup[0] = llr_unique(pregroup[0])
270
+ pregroup[1] = llr_unique(pregroup[1])
271
+
272
+ # print("---------------------")
273
+ # print(pregroup)
274
+ for v in itertools.combinations(pregroup[0], 2):
275
+ for h in itertools.combinations(pregroup[1], 2):
276
+ poly = laps_intersections([v[0], v[1], h[0], h[1]])
277
+ poly = llr_correctness(poly, img.shape)
278
+ if len(poly) != 4:
279
+ continue
280
+ poly = na(llr_polysort(llr_normalize(poly)))
281
+ if not cv2.isContourConvex(poly):
282
+ continue
283
+ # print("Poly:", -llr_polyscore(poly, points, centroid,
284
+ # beta=beta, alfa=alfa/2))
285
+ S[-llr_polyscore(poly, points, centroid,
286
+ beta=beta, alfa=alfa/2)] = poly
287
+
288
+ # print(bool(S))
289
+ S = collections.OrderedDict(sorted(S.items()))
290
+ K = next(iter(S))
291
+ # print("key --", K)
292
+ four_points = llr_normalize(S[K])
293
+
294
+ # print("POINTS:", len(points))
295
+ # print("LINES:", len(lines))
296
+
297
+ return four_points
298
+
299
+
300
+ def llr_pad(four_points, img):
301
+ pco = pyclipper.PyclipperOffset()
302
+ pco.AddPath(four_points, pyclipper.JT_MITER, pyclipper.ET_CLOSEDPOLYGON)
303
+
304
+ padded = pco.Execute(60)[0]
305
+
306
+ # 60,70/75 is best (with buffer/for debug purpose)
307
+ return pco.Execute(60)[0]
308
+
preprocess.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Preprocessing function adapted for Gradio
2
+ # This script preprocesses original pictures and turns them into 2D-projections.
3
+
4
+ import numpy as np
5
+ import cv2
6
+ from matplotlib import pyplot as plt
7
+
8
+ from rescale import *
9
+ from slid import detect_lines
10
+ from laps import LAPS
11
+ from llr import LLR, llr_pad
12
+
13
+
14
+ def preprocess_image_from_array(img_array):
15
+ '''
16
+ Preprocesses an image from a numpy array (RGB format).
17
+
18
+ Args:
19
+ img_array: numpy array of the image in RGB format
20
+
21
+ Returns:
22
+ Preprocessed image as numpy array
23
+ '''
24
+ # Convert RGB to BGR for OpenCV (if needed)
25
+ # Gradio images are usually in RGB format, but cv2.imread expects BGR
26
+ # Since we're getting an array, we need to convert RGB to BGR
27
+ if len(img_array.shape) == 3 and img_array.shape[2] == 3:
28
+ res = img_array[..., ::-1] # RGB to BGR
29
+ else:
30
+ res = img_array.copy()
31
+
32
+ # Crop twice, just like Czyzewski et al. did
33
+ for _ in range(2):
34
+ img, shape, scale = image_resize(res)
35
+ lines = detect_lines(img)
36
+ lattice_points = LAPS(img, lines)
37
+ # Sometimes LLR() or llr_pad() will produce an error. In this case,
38
+ # the picture needs to be retaken
39
+ inner_points = LLR(img, lattice_points, lines)
40
+ four_points = llr_pad(inner_points, img) # padcrop
41
+
42
+ try:
43
+ res = crop(res, four_points, scale)
44
+ except:
45
+ print("WARNING: couldn't crop around outer points")
46
+ res = crop(res, inner_points, scale)
47
+
48
+ # Convert BGR back to RGB for display
49
+ return res[..., ::-1]
50
+
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ gradio>=4.0.0
2
+ keras>=2.0.0
3
+ matplotlib>=3.0.0
4
+ numpy>=1.20.0
5
+ opencv-contrib-python>=4.5.0
6
+ scipy>=1.7.0
7
+ tensorflow>=2.0.0
8
+ pyclipper>=1.2.0
9
+ scikit-learn>=1.0.0
10
+
rescale.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import numpy as np
3
+ import cv2
4
+ import math
5
+ arr = np.array
6
+
7
+
8
+ def image_scale(pts, scale):
9
+ """scale to original image size"""
10
+ def __loop(x, y): return [x[0] * y, x[1] * y]
11
+ return list(map(functools.partial(__loop, y=1/scale), pts))
12
+
13
+
14
+ def image_resize(img, height=500):
15
+ """resize image to same normalized area (height**2)"""
16
+ pixels = height * height
17
+ shape = list(np.shape(img))
18
+ scale = math.sqrt(float(pixels)/float(shape[0]*shape[1]))
19
+ shape[0] *= scale
20
+ shape[1] *= scale
21
+ img = cv2.resize(img, (int(shape[1]), int(shape[0])))
22
+ img_shape = np.shape(img)
23
+ return img, img_shape, scale
24
+
25
+
26
+ def image_transform(img, points, square_length=150):
27
+ """crop original image using perspective warp"""
28
+ board_length = square_length * 8
29
+ def __dis(a, b): return np.linalg.norm(arr(a)-arr(b))
30
+ def __shi(seq, n=0): return seq[-(n % len(seq)):] + seq[:-(n % len(seq))]
31
+ best_idx, best_val = 0, 10**6
32
+ for idx, val in enumerate(points):
33
+ val = __dis(val, [0, 0])
34
+ if val < best_val:
35
+ best_idx, best_val = idx, val
36
+ pts1 = np.float32(__shi(points, 4 - best_idx))
37
+ pts2 = np.float32([[0, 0], [board_length, 0],
38
+ [board_length, board_length], [0, board_length]])
39
+ M = cv2.getPerspectiveTransform(pts1, pts2)
40
+ W = cv2.warpPerspective(img, M, (board_length, board_length))
41
+ return W
42
+
43
+
44
+ def crop(img, pts, scale):
45
+ """crop using 4 points transform"""
46
+ pts_orig = image_scale(pts, scale)
47
+ img_crop = image_transform(img, pts_orig)
48
+ return img_crop
49
+
slid.py ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # My implementation of the SLID module from
2
+ # https://github.com/maciejczyzewski/neural-chessboard/
3
+
4
+ from typing import Tuple
5
+ import numpy as np
6
+ import cv2
7
+
8
+
9
+ arr = np.array
10
+ # Four parameters are taken from the original code and
11
+ # correspond to four possible cases that need correction:
12
+ # low light, overexposure, underexposure, and blur
13
+ CLAHE_PARAMS = [[3, (2, 6), 5], # @1
14
+ [3, (6, 2), 5], # @2
15
+ [5, (3, 3), 5], # @3
16
+ [0, (0, 0), 0]] # EE
17
+
18
+
19
+ def slid_clahe(img, limit=2, grid=(3, 3), iters=5):
20
+ """repair using CLAHE algorithm (adaptive histogram equalization)"""
21
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
22
+ for i in range(iters):
23
+ img = cv2.createCLAHE(clipLimit=limit,
24
+ tileGridSize=grid).apply(img)
25
+ if limit != 0:
26
+ kernel = np.ones((10, 10), np.uint8)
27
+ img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel)
28
+ return img
29
+
30
+
31
+ def slid_detector(img, alfa=150, beta=2):
32
+ """detect lines using Hough algorithm"""
33
+ __lines, lines = [], cv2.HoughLinesP(img, rho=1, theta=np.pi/360*beta,
34
+ threshold=40, minLineLength=50, maxLineGap=15) # [40, 40, 10]
35
+ if lines is None:
36
+ return []
37
+ for line in np.reshape(lines, (-1, 4)):
38
+ __lines += [[[int(line[0]), int(line[1])],
39
+ [int(line[2]), int(line[3])]]]
40
+ return __lines
41
+
42
+
43
+ def slid_canny(img, sigma=0.25):
44
+ """apply Canny edge detector (automatic thresh)"""
45
+ v = np.median(img)
46
+ img = cv2.medianBlur(img, 5)
47
+ img = cv2.GaussianBlur(img, (7, 7), 2)
48
+ lower = int(max(0, (1.0 - sigma) * v))
49
+ upper = int(min(255, (1.0 + sigma) * v))
50
+ return cv2.Canny(img, lower, upper)
51
+
52
+
53
+ def pSLID(img, thresh=150):
54
+ """find all lines using different settings"""
55
+ segments = []
56
+ i = 0
57
+ for key, arr in enumerate(CLAHE_PARAMS):
58
+ tmp = slid_clahe(img, limit=arr[0], grid=arr[1], iters=arr[2])
59
+ curr_segments = list(slid_detector(slid_canny(tmp), thresh))
60
+ segments += curr_segments
61
+ i += 1
62
+ # print("FILTER: {} {} : {}".format(i, arr, len(curr_segments)))
63
+ return segments
64
+
65
+
66
+ all_points = []
67
+
68
+
69
+ def SLID(img, segments):
70
+ global all_points
71
+ all_points = []
72
+
73
+ pregroup, group, hashmap, raw_lines = [[], []], {}, {}, []
74
+
75
+ dists = {}
76
+
77
+ def dist(a, b):
78
+ h = hash("dist"+str(a)+str(b))
79
+ if h not in dists:
80
+ dists[h] = np.linalg.norm(arr(a)-arr(b))
81
+ return dists[h]
82
+
83
+ parents = {}
84
+
85
+ def find(x):
86
+ if x not in parents:
87
+ parents[x] = x
88
+ if parents[x] != x:
89
+ parents[x] = find(parents[x])
90
+ return parents[x]
91
+
92
+ def union(a, b):
93
+ par_a = find(a)
94
+ par_b = find(b)
95
+ parents[par_a] = par_b
96
+ group[par_b] |= group[par_a]
97
+
98
+ def height(line, pt):
99
+ v = np.cross(arr(line[1])-arr(line[0]), arr(pt)-arr(line[0]))
100
+ # Using dist() to speed up distance look-up since the 2-norm
101
+ # is used many times
102
+ return np.linalg.norm(v)/dist(line[1], line[0])
103
+
104
+ def are_similar(l1, l2):
105
+ '''See Sec.3.2.2 in Czyzewski et al.'''
106
+ a = dist(l1[0], l1[1])
107
+ b = dist(l2[0], l2[1])
108
+
109
+ x1 = height(l2, l1[0])
110
+ x2 = height(l2, l1[1])
111
+ y1 = height(l1, l2[0])
112
+ y2 = height(l1, l2[1])
113
+
114
+ if x1 < 1e-8 and x2 < 1e-8 and y1 < 1e-8 and y2 < 1e-8:
115
+ return True
116
+
117
+ # print("l1: %s, l2: %s" % (str(l1), str(l2)))
118
+ # print("x1: %f, x2: %f, y1: %f, y2: %f" % (x1, x2, y1, y2))
119
+ gamma = 0.25 * (x1+x2+y1+y2)
120
+ # print("gamma:", gamma)
121
+
122
+ img_width = 500
123
+ img_height = 282
124
+ p = 0.
125
+ A = img_width*img_height
126
+ w = np.pi/2 / np.sqrt(np.sqrt(A))
127
+ t_delta = p*w
128
+ t_delta = 0.0625
129
+ # t_delta = 0.05
130
+
131
+ delta = (a+b) * t_delta
132
+
133
+ return (a/gamma > delta) and (b/gamma > delta)
134
+
135
+ def generate_line(a, b, n):
136
+ points = []
137
+ for i in range(n):
138
+ x = a[0] + (b[0] - a[0]) * (i/n)
139
+ y = a[1] + (b[1] - a[1]) * (i/n)
140
+ points += [[int(x), int(y)]]
141
+ return points
142
+
143
+ def analyze(group):
144
+ global all_points
145
+ points = []
146
+ for idx in group:
147
+ points += generate_line(*hashmap[idx], 10)
148
+ _, radius = cv2.minEnclosingCircle(arr(points))
149
+ w = radius * np.pi / 2
150
+ vx, vy, cx, cy = cv2.fitLine(arr(points), cv2.DIST_L2, 0, 0.01, 0.01)
151
+ all_points += points
152
+ return [[int(cx-vx*w), int(cy-vy*w)], [int(cx+vx*w), int(cy+vy*w)]]
153
+
154
+ for l in segments:
155
+ h = hash(str(l))
156
+ # Initialize the line
157
+ hashmap[h] = l
158
+ group[h] = set([h])
159
+ parents[h] = h
160
+
161
+ wid = l[0][0] - l[1][0]
162
+ hei = l[0][1] - l[1][1]
163
+
164
+ # Divide lines into more horizontal vs more vertical
165
+ # to speed up comparison later
166
+ if abs(wid) < abs(hei):
167
+ pregroup[0].append(l)
168
+ else:
169
+ pregroup[1].append(l)
170
+
171
+ for lines in pregroup:
172
+ for i in range(len(lines)):
173
+ l1 = lines[i]
174
+ h1 = hash(str(l1))
175
+ # We're looking for the root line of each disjoint set
176
+ if parents[h1] != h1:
177
+ continue
178
+ for j in range(i+1, len(lines)):
179
+ l2 = lines[j]
180
+ h2 = hash(str(l2))
181
+ if parents[h2] != h2:
182
+ continue
183
+ if are_similar(l1, l2):
184
+ # Merge lines into a single disjoint set
185
+ union(h1, h2)
186
+
187
+ for h in group:
188
+ if parents[h] != h:
189
+ continue
190
+ raw_lines += [analyze(group[h])]
191
+
192
+ return raw_lines
193
+
194
+
195
+ def slid_tendency(raw_lines, s=4):
196
+ lines = []
197
+ def scale(x, y, s): return int(x * (1+s)/2 + y * (1-s)/2)
198
+ for a, b in raw_lines:
199
+ a[0] = scale(a[0], b[0], s)
200
+ a[1] = scale(a[1], b[1], s)
201
+ b[0] = scale(b[0], a[0], s)
202
+ b[1] = scale(b[1], a[1], s)
203
+ lines += [[a, b]]
204
+ return lines
205
+
206
+
207
+ def detect_lines(img):
208
+ segments = pSLID(img)
209
+ raw_lines = SLID(img, segments)
210
+ lines = slid_tendency(raw_lines)
211
+ return lines
212
+