infinex commited on
Commit
30a0963
·
verified ·
1 Parent(s): 6da4d9d

Uploading dataset files from the local data folder.

Browse files
Files changed (1) hide show
  1. chunking.txt +55 -23
chunking.txt CHANGED
@@ -269,31 +269,63 @@ class DocumentChunkingService:
269
  all_raw_groups.extend(response_groups)
270
  return all_raw_groups
271
 
272
- def _resolve_overlaps(self, raw_groups: List[List[int]], all_lines_map: Dict[int, Line]) -> List[SemanticGroup]:
273
- parent = {line_num: line_num for line_num in all_lines_map.keys()}
274
- def find(i):
275
- if parent[i] == i: return i
276
- parent[i] = find(parent[i])
277
- return parent[i]
278
- def union(i, j):
279
- root_i = find(i)
280
- root_j = find(j)
281
- if root_i != root_j: parent[root_j] = root_i
282
 
 
283
  for group in raw_groups:
284
- if not group: continue
285
- valid_group = [g for g in group if g in parent]
286
- if not valid_group: continue
287
- first = valid_group[0]
288
- for other in valid_group[1:]:
289
- union(first, other)
290
-
291
- clusters: Dict[int, Set[int]] = {}
292
- for line_num in all_lines_map.keys():
293
- root = find(line_num)
294
- if root not in clusters: clusters[root] = set()
295
- clusters[root].add(line_num)
296
- return sorted([SemanticGroup(lines) for lines in clusters.values()], key=lambda x: min(x.line_numbers))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
297
 
298
  def _finalize_chunk(self, content: str, line_numbers: List[int], parent_id: Optional[str] = None) -> List[Dict[str, Any]]:
299
  count = self._count_tokens(content)
 
269
  all_raw_groups.extend(response_groups)
270
  return all_raw_groups
271
 
272
+ def resolve_overlaps(raw_groups: List[List[int]], all_lines_map: Dict[int, Line]) -> List[SemanticGroup]:
273
+ """
274
+ Merges groups based on overlapping line number ranges.
275
+ Uses a standard 'Merge Intervals' algorithm.
276
+ """
277
+ intervals: List[GroupInterval] = []
 
 
 
 
278
 
279
+ # 1. Convert raw groups to Intervals
280
  for group in raw_groups:
281
+ if not group:
282
+ continue
283
+
284
+ # Filter for valid lines only
285
+ valid_lines = {g for g in group if g in all_lines_map}
286
+ if not valid_lines:
287
+ continue
288
+
289
+ # Define range based on min and max line numbers in the group
290
+ intervals.append(GroupInterval(
291
+ start=min(valid_lines),
292
+ end=max(valid_lines),
293
+ line_numbers=valid_lines
294
+ ))
295
+
296
+ if not intervals:
297
+ return []
298
+
299
+ # 2. Sort by start time
300
+ intervals.sort(key=lambda x: x.start)
301
+
302
+ # 3. Merge overlapping intervals
303
+ merged: List[GroupInterval] = []
304
+
305
+ for current in intervals:
306
+ if not merged:
307
+ merged.append(current)
308
+ continue
309
+
310
+ last = merged[-1]
311
+
312
+ # Check for overlap:
313
+ # If current starts before (or exactly when) last ends, they overlap.
314
+ # e.g. [84, 795] and [788, 887] -> 788 <= 795, so merge.
315
+ if current.start <= last.end:
316
+ # Merge logic:
317
+ # 1. Extend the end if needed
318
+ last.end = max(last.end, current.end)
319
+ # 2. Combine the sets of line numbers
320
+ last.line_numbers.update(current.line_numbers)
321
+ else:
322
+ # No overlap, start a new cluster
323
+ merged.append(current)
324
+
325
+ # 4. Convert back to SemanticGroups
326
+ results = [SemanticGroup(group.line_numbers) for group in merged]
327
+ return sorted(results, key=lambda x: min(x.line_numbers) if x.line_numbers else 0)
328
+
329
 
330
  def _finalize_chunk(self, content: str, line_numbers: List[int], parent_id: Optional[str] = None) -> List[Dict[str, Any]]:
331
  count = self._count_tokens(content)