jason122490 commited on
Commit
67fea8f
·
verified ·
1 Parent(s): 2d82643

Update ICCAD.py

Browse files
Files changed (1) hide show
  1. ICCAD.py +157 -139
ICCAD.py CHANGED
@@ -39,8 +39,9 @@ class ICCAD_Config(datasets.BuilderConfig):
39
  patchify: bool = False
40
  # transform
41
  use_resize: bool = True
42
- img_size: int = 288
43
- dist_type: str = 'edt'
 
44
  use_i_map: bool = True
45
  use_r_map: bool = True
46
  use_r_dist: bool = True
@@ -62,12 +63,15 @@ def _transform(self, img, max_pool=False):
62
  img = patchify(img, patch_size=(img_size, img_size, img.shape[-1]), step=img_size)
63
  img = img.reshape((-1, img_size, img_size, img.shape[-1])).transpose((0, 3, 1, 2))
64
  elif self.config.use_resize:
65
- img = A.Resize(img_size, img_size, interpolation=cv2.INTER_AREA)(image=img)["image"]
66
- img = img.transpose((2, 0, 1))
67
- elif max_pool:
68
- img = torch.from_numpy(img).unsqueeze(0)
69
- img = F.adaptive_max_pool2d(img, img_size)
70
- img = img.squeeze(0).numpy()
 
 
 
71
  img = img.transpose((2, 0, 1))
72
 
73
  return img
@@ -138,7 +142,8 @@ def get_image(self, data):
138
  get_dist(self, data["R_map_88"]) * get_blur(data["I_map"], ksize=49),
139
  get_dist(self, data["R_map_89"]) * get_blur(data["I_map"], ksize=49),
140
  get_dist(self, data["R_map_99"]) * get_blur(data["I_map"], ksize=49),
141
- ] if self.config.use_power_map else []), axis=2))
 
142
  }
143
 
144
  def get_IR_map_H(Via_map, I_map):
@@ -297,13 +302,23 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
297
  "H": datasets.Value("int32"),
298
  "W": datasets.Value("int32"),
299
  "image": datasets.Array4D((None, in_chans, img_size, img_size), 'float32'),
 
300
  })
301
- else:
302
  features = datasets.Features({
303
  "data_idx": datasets.Value("string"),
304
  "H": datasets.Value("int32"),
305
  "W": datasets.Value("int32"),
306
  "image": datasets.Array3D((in_chans, img_size, img_size), 'float32'),
 
 
 
 
 
 
 
 
 
307
  })
308
 
309
  return datasets.DatasetInfo(
@@ -316,68 +331,45 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
316
  fake_cur = []
317
  fake_pdn = []
318
  fake_dist = []
319
- fake_irdrop = []
320
  fake_netlist = []
321
 
322
  real_idx = []
323
  real_cur = []
324
  real_pdn = []
325
  real_dist = []
326
- real_irdrop = []
327
  real_netlist = []
328
 
329
- if self.config.use_BeGAN:
330
- BeGAN_01_idx = []
331
- BeGAN_01_cur = []
332
- BeGAN_01_pdn = []
333
- BeGAN_01_dist = []
334
- BeGAN_01_irdrop = []
335
- BeGAN_01_netlist = []
336
-
337
- BeGAN_02_idx = []
338
- BeGAN_02_cur = []
339
- BeGAN_02_pdn = []
340
- BeGAN_02_dist = []
341
- BeGAN_02_irdrop = []
342
- BeGAN_02_netlist = []
343
-
344
 
345
  # Download images
346
- fake_data_files = os.path.join(dl_manager.download_and_extract(_URLS["fake_data_url"]), "fake-circuit-data_20230623")
347
  real_data_files = os.path.join(dl_manager.download_and_extract(_URLS["real_data_url"]), "real-circuit-data_20230615")
348
-
349
- fake_path_files = sorted(glob.glob(os.path.join(fake_data_files, "*.sp")))
350
  real_path_files = sorted(glob.glob(os.path.join(real_data_files, "*")))
351
 
352
- if self.config.use_BeGAN:
 
 
 
 
353
  BeGAN_01_data_files = os.path.join(dl_manager.download_and_extract(_URLS["BeGAN_01_data_url"]), "BeGAN-ver01")
354
  BeGAN_01_path_files = sorted(glob.glob(os.path.join(BeGAN_01_data_files, "*.sp")))
355
 
356
  BeGAN_02_data_files = os.path.join(dl_manager.download_and_extract(_URLS["BeGAN_02_data_url"]), "BeGAN-ver02")
357
  BeGAN_02_path_files = sorted(glob.glob(os.path.join(BeGAN_02_data_files, "*.sp")))
358
 
359
- # for fake
360
- for path in fake_path_files:
361
- data_idx = os.path.basename(path).split(".")[0]
362
- fake_idx.append(data_idx)
363
- data_path = glob.glob(os.path.join(os.path.dirname(path), data_idx + "*.*"))
364
-
365
- for data in data_path:
366
- if "current.csv" in os.path.basename(data):
367
- fake_cur.append(data)
368
- elif "eff_dist.csv" in os.path.basename(data):
369
- fake_dist.append(data)
370
- elif "ir_drop.csv" in os.path.basename(data):
371
- fake_irdrop.append(data)
372
- elif "pdn_density.csv" in os.path.basename(data):
373
- fake_pdn.append(data)
374
- elif ".sp" in os.path.basename(data):
375
- fake_netlist.append(data)
376
- else:
377
- raise AssertionError(os.path.basename(data), "fake data path error")
378
-
379
- assert len(fake_idx) == len(fake_cur) == len(fake_dist) == len(fake_irdrop) == len(fake_pdn) == len(fake_netlist), f"{(len(fake_idx), len(fake_cur), len(fake_dist), len(fake_irdrop), len(fake_pdn), len(fake_netlist))} fake data length not the same"
380
-
381
  # for real
382
  for path in real_path_files:
383
  data_idx = os.path.basename(path)
@@ -390,16 +382,40 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
390
  elif "eff_dist_map.csv" in os.path.basename(data):
391
  real_dist.append(data)
392
  elif "ir_drop_map.csv" in os.path.basename(data):
393
- real_irdrop.append(data)
394
  elif "pdn_density.csv" in os.path.basename(data):
395
  real_pdn.append(data)
396
  elif "netlist.sp" in os.path.basename(data):
397
  real_netlist.append(data)
398
  else:
399
  raise AssertionError(os.path.basename(data), "real data path error")
400
-
401
- assert len(real_idx) == len(real_cur) == len(real_dist) == len(real_irdrop) == len(real_pdn) == len(real_netlist), f"{(len(real_idx), len(real_cur), len(real_dist), len(real_irdrop), len(real_pdn), len(real_netlist))} real data length not the same"
402
- if self.config.use_BeGAN:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
403
  # for BeGAN-ver01
404
  for path in BeGAN_01_path_files:
405
  data_idx = os.path.basename(path).split(".")[0]
@@ -412,7 +428,7 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
412
  elif "eff_dist.csv" in os.path.basename(data):
413
  BeGAN_01_dist.append(data)
414
  elif "ir_drop_map.csv" in os.path.basename(data):
415
- BeGAN_01_irdrop.append(data)
416
  elif "pdn_density.csv" in os.path.basename(data):
417
  BeGAN_01_pdn.append(data)
418
  elif ".sp" in os.path.basename(data):
@@ -420,7 +436,7 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
420
  else:
421
  raise AssertionError(os.path.basename(data), "BeGAN-ver01 data path error")
422
 
423
- assert len(BeGAN_01_idx) == len(BeGAN_01_cur) == len(BeGAN_01_dist) == len(BeGAN_01_irdrop) == len(BeGAN_01_pdn) == len(BeGAN_01_netlist), f"{(len(BeGAN_01_idx), len(BeGAN_01_cur), len(BeGAN_01_dist), len(BeGAN_01_irdrop), len(BeGAN_01_pdn), len(BeGAN_01_netlist))} BeGAN-ver02 data length not the same"
424
 
425
  # for BeGAN-ver02
426
  for path in BeGAN_02_path_files:
@@ -434,7 +450,7 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
434
  elif "eff_dist.csv" in os.path.basename(data):
435
  BeGAN_02_dist.append(data)
436
  elif "voltage.csv" in os.path.basename(data):
437
- BeGAN_02_irdrop.append(data)
438
  elif "regions.csv" in os.path.basename(data):
439
  BeGAN_02_pdn.append(data)
440
  elif ".sp" in os.path.basename(data):
@@ -442,60 +458,61 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
442
  else:
443
  raise AssertionError(os.path.basename(data), "BeGAN-ver01 data path error")
444
 
445
- assert len(BeGAN_02_idx) == len(BeGAN_02_cur) == len(BeGAN_02_dist) == len(BeGAN_02_irdrop) == len(BeGAN_02_pdn) == len(BeGAN_02_netlist), f"{(len(BeGAN_02_idx), len(BeGAN_02_cur), len(BeGAN_02_dist), len(BeGAN_02_irdrop), len(BeGAN_02_pdn), len(BeGAN_02_netlist))} BeGAN-ver01 data length not the same"
446
-
447
- return [datasets.SplitGenerator(
448
- name=datasets.Split("real"),
449
- gen_kwargs={
450
- "data_idx": real_idx,
451
- "current": real_cur,
452
- "pdn_density": real_pdn,
453
- "eff_dist": real_dist,
454
- "ir_drop": real_irdrop,
455
- "netlist": real_netlist,
456
- })
457
- ] if self.config.test_mode else [
458
- datasets.SplitGenerator(
459
- name=datasets.Split("fake"),
460
- gen_kwargs={
461
- "data_idx": fake_idx,
462
- "current": fake_cur,
463
- "pdn_density": fake_pdn,
464
- "eff_dist": fake_dist,
465
- "ir_drop": fake_irdrop,
466
- "netlist": fake_netlist,
467
- })
468
- ] + [datasets.SplitGenerator(
469
- name=datasets.Split("real"),
470
- gen_kwargs={
471
- "data_idx": real_idx,
472
- "current": real_cur,
473
- "pdn_density": real_pdn,
474
- "eff_dist": real_dist,
475
- "ir_drop": real_irdrop,
476
- "netlist": real_netlist,
477
- })
478
- ] + ([datasets.SplitGenerator(
479
- name=datasets.Split("BeGAN_01"),
480
- gen_kwargs={
481
- "data_idx": BeGAN_01_idx,
482
- "current": BeGAN_01_cur,
483
- "pdn_density": BeGAN_01_pdn,
484
- "eff_dist": BeGAN_01_dist,
485
- "ir_drop": BeGAN_01_irdrop,
486
- "netlist": BeGAN_01_netlist,
487
- }),
488
- datasets.SplitGenerator(
489
- name=datasets.Split("BeGAN_02"),
490
- gen_kwargs={
491
- "data_idx": BeGAN_02_idx,
492
- "current": BeGAN_02_cur,
493
- "pdn_density": BeGAN_02_pdn,
494
- "eff_dist": BeGAN_02_dist,
495
- "ir_drop": BeGAN_02_irdrop,
496
- "netlist": BeGAN_02_netlist,
497
- })
498
- ] if self.config.use_BeGAN else [])
 
499
 
500
 
501
  def _generate_examples(self, data_idx, current, pdn_density, eff_dist, ir_drop, netlist):
@@ -761,16 +778,16 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
761
  else:
762
  raise AssertionError(_data_idx, "R map layer not found", row[0], row[3])
763
 
764
- # clean not connect
765
- # R_map_99[:, ~V_map.any(axis=0)] = 0
766
- # R_map_89[:, ~V_map.any(axis=0)] = 0
767
-
768
  # clean wrong via
769
  R_map_14[(R_map_11 == 0) | (R_map_44 == 0)] = 0
770
  R_map_47[(R_map_44 == 0) | (R_map_77 == 0)] = 0
771
  R_map_78[(R_map_77 == 0) | (R_map_88 == 0)] = 0
772
  R_map_89[(R_map_88 == 0) | (R_map_99 == 0)] = 0
773
-
 
 
 
 
774
 
775
  # I
776
  df_I = df[df["type"].str.contains("I")]
@@ -830,29 +847,30 @@ class ICCAD_Dataset(datasets.GeneratorBasedBuilder):
830
 
831
 
832
  # resize
833
- R_map_11 = resize(R_map_11) / (H * W) * (_H * _W)
834
- R_map_14 = resize(R_map_14) / (H * W) * (_H * _W)
835
- R_map_44 = resize(R_map_44) / (H * W) * (_H * _W)
836
- R_map_47 = resize(R_map_47) / (H * W) * (_H * _W)
837
- R_map_77 = resize(R_map_77) / (H * W) * (_H * _W)
838
- R_map_78 = resize(R_map_78) / (H * W) * (_H * _W)
839
- R_map_88 = resize(R_map_88) / (H * W) * (_H * _W)
840
- R_map_89 = resize(R_map_89) / (H * W) * (_H * _W)
841
- R_map_99 = resize(R_map_99) / (H * W) * (_H * _W)
842
- I_map = resize(I_map) / (H * W) * (_H * _W)
843
- V_map = resize(V_map) / (H * W) * (_H * _W)
844
  if self.config.use_multi_dist:
845
- V_multi_dist = resize(1 / V_multi_dist) / math.sqrt(H * W) * math.sqrt(_H * _W)
846
- R_multi_dist_14 = resize(1 / R_multi_dist_14) / math.sqrt(H * W) * math.sqrt(_H * _W)
847
- R_multi_dist_47 = resize(1 / R_multi_dist_47) / math.sqrt(H * W) * math.sqrt(_H * _W)
848
- R_multi_dist_78 = resize(1 / R_multi_dist_78) / math.sqrt(H * W) * math.sqrt(_H * _W)
849
- R_multi_dist_89 = resize(1 / R_multi_dist_89) / math.sqrt(H * W) * math.sqrt(_H * _W)
850
  if self.config.use_ir_map:
851
- IR_map_14 = resize(IR_map_14, max_pool=False) / (H * W) * (_H * _W) / (H * W) * (_H * _W)
852
- IR_map_47 = resize(IR_map_47, max_pool=False) / (H * W) * (_H * _W) / (H * W) * (_H * _W)
853
- IR_map_78 = resize(IR_map_78, max_pool=False) / (H * W) * (_H * _W) / (H * W) * (_H * _W)
854
- IR_map_89 = resize(IR_map_89, max_pool=False) / (H * W) * (_H * _W) / (H * W) * (_H * _W)
855
 
 
856
 
857
  yield _idx, get_image(self, {
858
  **{
 
39
  patchify: bool = False
40
  # transform
41
  use_resize: bool = True
42
+ img_size: int = 384
43
+ dist_type: str = 'cdt'
44
+ clean_connection: bool = True
45
  use_i_map: bool = True
46
  use_r_map: bool = True
47
  use_r_dist: bool = True
 
63
  img = patchify(img, patch_size=(img_size, img_size, img.shape[-1]), step=img_size)
64
  img = img.reshape((-1, img_size, img_size, img.shape[-1])).transpose((0, 3, 1, 2))
65
  elif self.config.use_resize:
66
+ if max_pool:
67
+ img = torch.from_numpy(img).unsqueeze(0)
68
+ img = F.adaptive_max_pool2d(img, img_size)
69
+ img = img.squeeze(0).numpy()
70
+ img = img.transpose((2, 0, 1))
71
+ else:
72
+ img = A.Resize(img_size, img_size, interpolation=cv2.INTER_AREA)(image=img)["image"]
73
+ img = img.transpose((2, 0, 1))
74
+ else:
75
  img = img.transpose((2, 0, 1))
76
 
77
  return img
 
142
  get_dist(self, data["R_map_88"]) * get_blur(data["I_map"], ksize=49),
143
  get_dist(self, data["R_map_89"]) * get_blur(data["I_map"], ksize=49),
144
  get_dist(self, data["R_map_99"]) * get_blur(data["I_map"], ksize=49),
145
+ ] if self.config.use_power_map else []), axis=2)),
146
+ "ir_drop": data["ir_drop"],
147
  }
148
 
149
  def get_IR_map_H(Via_map, I_map):
 
302
  "H": datasets.Value("int32"),
303
  "W": datasets.Value("int32"),
304
  "image": datasets.Array4D((None, in_chans, img_size, img_size), 'float32'),
305
+ "ir_drop": datasets.Image(),
306
  })
307
+ elif self.config.use_resize:
308
  features = datasets.Features({
309
  "data_idx": datasets.Value("string"),
310
  "H": datasets.Value("int32"),
311
  "W": datasets.Value("int32"),
312
  "image": datasets.Array3D((in_chans, img_size, img_size), 'float32'),
313
+ "ir_drop": datasets.Image(),
314
+ })
315
+ else:
316
+ features = datasets.Features({
317
+ "data_idx": datasets.Value("string"),
318
+ "H": datasets.Value("int32"),
319
+ "W": datasets.Value("int32"),
320
+ "image": datasets.Array3D((in_chans, 1, -1), 'float32'),
321
+ "ir_drop": datasets.Image(),
322
  })
323
 
324
  return datasets.DatasetInfo(
 
331
  fake_cur = []
332
  fake_pdn = []
333
  fake_dist = []
334
+ fake_ir_drop = []
335
  fake_netlist = []
336
 
337
  real_idx = []
338
  real_cur = []
339
  real_pdn = []
340
  real_dist = []
341
+ real_ir_drop = []
342
  real_netlist = []
343
 
344
+ BeGAN_01_idx = []
345
+ BeGAN_01_cur = []
346
+ BeGAN_01_pdn = []
347
+ BeGAN_01_dist = []
348
+ BeGAN_01_ir_drop = []
349
+ BeGAN_01_netlist = []
350
+
351
+ BeGAN_02_idx = []
352
+ BeGAN_02_cur = []
353
+ BeGAN_02_pdn = []
354
+ BeGAN_02_dist = []
355
+ BeGAN_02_ir_drop = []
356
+ BeGAN_02_netlist = []
 
 
357
 
358
  # Download images
 
359
  real_data_files = os.path.join(dl_manager.download_and_extract(_URLS["real_data_url"]), "real-circuit-data_20230615")
 
 
360
  real_path_files = sorted(glob.glob(os.path.join(real_data_files, "*")))
361
 
362
+ if not self.config.test_mode:
363
+ fake_data_files = os.path.join(dl_manager.download_and_extract(_URLS["fake_data_url"]), "fake-circuit-data_20230623")
364
+ fake_path_files = sorted(glob.glob(os.path.join(fake_data_files, "*.sp")))
365
+
366
+ if self.config.use_BeGAN and not self.config.test_mode:
367
  BeGAN_01_data_files = os.path.join(dl_manager.download_and_extract(_URLS["BeGAN_01_data_url"]), "BeGAN-ver01")
368
  BeGAN_01_path_files = sorted(glob.glob(os.path.join(BeGAN_01_data_files, "*.sp")))
369
 
370
  BeGAN_02_data_files = os.path.join(dl_manager.download_and_extract(_URLS["BeGAN_02_data_url"]), "BeGAN-ver02")
371
  BeGAN_02_path_files = sorted(glob.glob(os.path.join(BeGAN_02_data_files, "*.sp")))
372
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
373
  # for real
374
  for path in real_path_files:
375
  data_idx = os.path.basename(path)
 
382
  elif "eff_dist_map.csv" in os.path.basename(data):
383
  real_dist.append(data)
384
  elif "ir_drop_map.csv" in os.path.basename(data):
385
+ real_ir_drop.append(data)
386
  elif "pdn_density.csv" in os.path.basename(data):
387
  real_pdn.append(data)
388
  elif "netlist.sp" in os.path.basename(data):
389
  real_netlist.append(data)
390
  else:
391
  raise AssertionError(os.path.basename(data), "real data path error")
392
+
393
+ assert len(real_idx) == len(real_cur) == len(real_dist) == len(real_ir_drop) == len(real_pdn) == len(real_netlist), f"{(len(real_idx), len(real_cur), len(real_dist), len(real_ir_drop), len(real_pdn), len(real_netlist))} real data length not the same"
394
+
395
+ # for fake
396
+ if not self.config.test_mode:
397
+ for path in fake_path_files:
398
+ data_idx = os.path.basename(path).split(".")[0]
399
+ fake_idx.append(data_idx)
400
+ data_path = glob.glob(os.path.join(os.path.dirname(path), data_idx + "*.*"))
401
+
402
+ for data in data_path:
403
+ if "current.csv" in os.path.basename(data):
404
+ fake_cur.append(data)
405
+ elif "eff_dist.csv" in os.path.basename(data):
406
+ fake_dist.append(data)
407
+ elif "ir_drop.csv" in os.path.basename(data):
408
+ fake_ir_drop.append(data)
409
+ elif "pdn_density.csv" in os.path.basename(data):
410
+ fake_pdn.append(data)
411
+ elif ".sp" in os.path.basename(data):
412
+ fake_netlist.append(data)
413
+ else:
414
+ raise AssertionError(os.path.basename(data), "fake data path error")
415
+
416
+ assert len(fake_idx) == len(fake_cur) == len(fake_dist) == len(fake_ir_drop) == len(fake_pdn) == len(fake_netlist), f"{(len(fake_idx), len(fake_cur), len(fake_dist), len(fake_ir_drop), len(fake_pdn), len(fake_netlist))} fake data length not the same"
417
+
418
+ if self.config.use_BeGAN and not self.config.test_mode:
419
  # for BeGAN-ver01
420
  for path in BeGAN_01_path_files:
421
  data_idx = os.path.basename(path).split(".")[0]
 
428
  elif "eff_dist.csv" in os.path.basename(data):
429
  BeGAN_01_dist.append(data)
430
  elif "ir_drop_map.csv" in os.path.basename(data):
431
+ BeGAN_01_ir_drop.append(data)
432
  elif "pdn_density.csv" in os.path.basename(data):
433
  BeGAN_01_pdn.append(data)
434
  elif ".sp" in os.path.basename(data):
 
436
  else:
437
  raise AssertionError(os.path.basename(data), "BeGAN-ver01 data path error")
438
 
439
+ assert len(BeGAN_01_idx) == len(BeGAN_01_cur) == len(BeGAN_01_dist) == len(BeGAN_01_ir_drop) == len(BeGAN_01_pdn) == len(BeGAN_01_netlist), f"{(len(BeGAN_01_idx), len(BeGAN_01_cur), len(BeGAN_01_dist), len(BeGAN_01_ir_drop), len(BeGAN_01_pdn), len(BeGAN_01_netlist))} BeGAN-ver02 data length not the same"
440
 
441
  # for BeGAN-ver02
442
  for path in BeGAN_02_path_files:
 
450
  elif "eff_dist.csv" in os.path.basename(data):
451
  BeGAN_02_dist.append(data)
452
  elif "voltage.csv" in os.path.basename(data):
453
+ BeGAN_02_ir_drop.append(data)
454
  elif "regions.csv" in os.path.basename(data):
455
  BeGAN_02_pdn.append(data)
456
  elif ".sp" in os.path.basename(data):
 
458
  else:
459
  raise AssertionError(os.path.basename(data), "BeGAN-ver01 data path error")
460
 
461
+ assert len(BeGAN_02_idx) == len(BeGAN_02_cur) == len(BeGAN_02_dist) == len(BeGAN_02_ir_drop) == len(BeGAN_02_pdn) == len(BeGAN_02_netlist), f"{(len(BeGAN_02_idx), len(BeGAN_02_cur), len(BeGAN_02_dist), len(BeGAN_02_ir_drop), len(BeGAN_02_pdn), len(BeGAN_02_netlist))} BeGAN-ver01 data length not the same"
462
+
463
+ if self.config.test_mode:
464
+ return [datasets.SplitGenerator(
465
+ name=datasets.Split("real"),
466
+ gen_kwargs={
467
+ "data_idx": real_idx,
468
+ "current": real_cur,
469
+ "pdn_density": real_pdn,
470
+ "eff_dist": real_dist,
471
+ "ir_drop": real_ir_drop,
472
+ "netlist": real_netlist,
473
+ })]
474
+ else:
475
+ return [datasets.SplitGenerator(
476
+ name=datasets.Split("fake"),
477
+ gen_kwargs={
478
+ "data_idx": fake_idx,
479
+ "current": fake_cur,
480
+ "pdn_density": fake_pdn,
481
+ "eff_dist": fake_dist,
482
+ "ir_drop": fake_ir_drop,
483
+ "netlist": fake_netlist,
484
+ })
485
+ ] + [datasets.SplitGenerator(
486
+ name=datasets.Split("real"),
487
+ gen_kwargs={
488
+ "data_idx": real_idx,
489
+ "current": real_cur,
490
+ "pdn_density": real_pdn,
491
+ "eff_dist": real_dist,
492
+ "ir_drop": real_ir_drop,
493
+ "netlist": real_netlist,
494
+ })
495
+ ] + ([datasets.SplitGenerator(
496
+ name=datasets.Split("BeGAN_01"),
497
+ gen_kwargs={
498
+ "data_idx": BeGAN_01_idx,
499
+ "current": BeGAN_01_cur,
500
+ "pdn_density": BeGAN_01_pdn,
501
+ "eff_dist": BeGAN_01_dist,
502
+ "ir_drop": BeGAN_01_ir_drop,
503
+ "netlist": BeGAN_01_netlist,
504
+ }),
505
+ datasets.SplitGenerator(
506
+ name=datasets.Split("BeGAN_02"),
507
+ gen_kwargs={
508
+ "data_idx": BeGAN_02_idx,
509
+ "current": BeGAN_02_cur,
510
+ "pdn_density": BeGAN_02_pdn,
511
+ "eff_dist": BeGAN_02_dist,
512
+ "ir_drop": BeGAN_02_ir_drop,
513
+ "netlist": BeGAN_02_netlist,
514
+ })
515
+ ] if self.config.use_BeGAN else [])
516
 
517
 
518
  def _generate_examples(self, data_idx, current, pdn_density, eff_dist, ir_drop, netlist):
 
778
  else:
779
  raise AssertionError(_data_idx, "R map layer not found", row[0], row[3])
780
 
 
 
 
 
781
  # clean wrong via
782
  R_map_14[(R_map_11 == 0) | (R_map_44 == 0)] = 0
783
  R_map_47[(R_map_44 == 0) | (R_map_77 == 0)] = 0
784
  R_map_78[(R_map_77 == 0) | (R_map_88 == 0)] = 0
785
  R_map_89[(R_map_88 == 0) | (R_map_99 == 0)] = 0
786
+
787
+ # clean not connect
788
+ if self.config.clean_connection:
789
+ R_map_99[:, ~V_map.any(axis=0)] = 0
790
+ R_map_89[:, ~V_map.any(axis=0)] = 0
791
 
792
  # I
793
  df_I = df[df["type"].str.contains("I")]
 
847
 
848
 
849
  # resize
850
+ R_map_11 = resize(R_map_11) * (H * W) / (_H * _W)
851
+ R_map_14 = resize(R_map_14) * (H * W) / (_H * _W)
852
+ R_map_44 = resize(R_map_44) * (H * W) / (_H * _W)
853
+ R_map_47 = resize(R_map_47) * (H * W) / (_H * _W)
854
+ R_map_77 = resize(R_map_77) * (H * W) / (_H * _W)
855
+ R_map_78 = resize(R_map_78) * (H * W) / (_H * _W)
856
+ R_map_88 = resize(R_map_88) * (H * W) / (_H * _W)
857
+ R_map_89 = resize(R_map_89) * (H * W) / (_H * _W)
858
+ R_map_99 = resize(R_map_99) * (H * W) / (_H * _W)
859
+ I_map = resize(I_map) * (H * W) / (_H * _W)
860
+ V_map = resize(V_map) * (H * W) / (_H * _W)
861
  if self.config.use_multi_dist:
862
+ V_multi_dist = resize(1 / V_multi_dist) * math.sqrt(H * W) / math.sqrt(_H * _W)
863
+ R_multi_dist_14 = resize(1 / R_multi_dist_14) * math.sqrt(H * W) / math.sqrt(_H * _W)
864
+ R_multi_dist_47 = resize(1 / R_multi_dist_47) * math.sqrt(H * W) / math.sqrt(_H * _W)
865
+ R_multi_dist_78 = resize(1 / R_multi_dist_78) * math.sqrt(H * W) / math.sqrt(_H * _W)
866
+ R_multi_dist_89 = resize(1 / R_multi_dist_89) * math.sqrt(H * W) / math.sqrt(_H * _W)
867
  if self.config.use_ir_map:
868
+ IR_map_14 = resize(IR_map_14, max_pool=False)
869
+ IR_map_47 = resize(IR_map_47, max_pool=False)
870
+ IR_map_78 = resize(IR_map_78, max_pool=False)
871
+ IR_map_89 = resize(IR_map_89, max_pool=False)
872
 
873
+ img_size = self.config.img_size
874
 
875
  yield _idx, get_image(self, {
876
  **{