ymin98 commited on
Commit
8f5f7e0
ยท
verified ยท
1 Parent(s): e7efe53

Upload 10 files

Browse files
tools/__pycache__/config.cpython-310.pyc ADDED
Binary file (4.57 kB). View file
 
tools/__pycache__/ir_dataset.cpython-310.pyc ADDED
Binary file (15.7 kB). View file
 
tools/config.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from dataclasses import dataclass, field
3
+ import os
4
+ import cv2
5
+ from datetime import datetime
6
+
7
+ # JSON ํŒŒ์ผ์— ์ €์žฅ๋œ ํ†ต๊ณ„์ •๋ณด์˜ ๊ตฌ์กฐ์™€ ๋™์ผํ•œ ํ•„๋“œ๋ฅผ ๊ฐ–๋Š” dataclass๋“ค
8
+ @dataclass(frozen=True)
9
+ class StatConfig:
10
+ mean: float
11
+ std: float
12
+ min: float
13
+ max: float
14
+
15
+ @dataclass(frozen=True)
16
+ class ResistanceStats:
17
+ # ๊ฐ layer๋ณ„ ํ†ต๊ณ„์™€ ์ „์ฒด ํ†ต๊ณ„๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค.
18
+ layers: dict = field(default_factory=dict) # ์˜ˆ: {"m8": StatConfig, "via56": StatConfig, ...}
19
+ total: StatConfig = None
20
+
21
+ @dataclass(frozen=True)
22
+ class Config:
23
+ resolution: tuple = (256, 256)
24
+ interpolation: int = cv2.INTER_AREA
25
+ ir_drop: StatConfig = None
26
+ current: StatConfig = None
27
+ pad: StatConfig = None
28
+ pdn_density: StatConfig = None
29
+ resistances: ResistanceStats = None
30
+ conductance: dict = field(default_factory=dict)
31
+ comprehensive_feature: dict = field(default_factory=dict)
32
+ meta: dict = field(default_factory=dict)
33
+
34
+ def map_json_to_config(json_data):
35
+ stats = json_data.get("stats", {})
36
+ meta = json_data.get("meta", {})
37
+
38
+ def create_stat(stat_dict):
39
+ if not stat_dict or stat_dict.get("mean") is None:
40
+ return None
41
+ return StatConfig(
42
+ mean=stat_dict["mean"],
43
+ std=stat_dict["std"],
44
+ min=stat_dict["min"],
45
+ max=stat_dict["max"]
46
+ )
47
+
48
+ # core stats
49
+ ir_drop_cfg = create_stat(stats.get("ir_drop"))
50
+ current_cfg = create_stat(stats.get("current"))
51
+ pad_cfg = create_stat(stats.get("pad"))
52
+ pdn_density_cfg = create_stat(stats.get("pdn_density"))
53
+
54
+ # resistance stats
55
+ res_stats = stats.get("resistance", {})
56
+ layers_cfg = {k: create_stat(v) for k, v in res_stats.get("layers", {}).items()}
57
+ total_cfg = create_stat(res_stats.get("total"))
58
+ resistance_cfg = ResistanceStats(layers=layers_cfg, total=total_cfg)
59
+
60
+ # conductance stats
61
+ cond_cfg = {k: create_stat(v) for k, v in stats.get("conductance", {}).items()}
62
+
63
+ # comprehensive_feature stats (channels indexed as strings)
64
+ comp_cfg = {}
65
+ for k, v in stats.get("comprehensive_feature", {}).items():
66
+ # k may be index or name, preserve as str
67
+ comp_cfg[str(k)] = create_stat(v)
68
+
69
+ return Config(
70
+ resolution=tuple(meta.get('resolution', (256,256))),
71
+ interpolation=cv2.INTER_AREA,
72
+ ir_drop=ir_drop_cfg,
73
+ current=current_cfg,
74
+ pad=pad_cfg,
75
+ pdn_density=pdn_density_cfg,
76
+ resistances=resistance_cfg,
77
+ conductance=cond_cfg,
78
+ comprehensive_feature=comp_cfg,
79
+ meta=meta
80
+ )
81
+
82
+ def get_config(unit=None, configs_path="/IR_Drop_prior_study/XICCAD/configs", dataset_name='began_iccad_fake'):
83
+ if configs_path.lower().endswith('.json'):
84
+ config_json_path = configs_path
85
+ else:
86
+ config_json_path = os.path.join(configs_path, dataset_name, f'stats_{unit}.json')
87
+ with open(config_json_path, "r") as f:
88
+ json_data = json.load(f)
89
+ return map_json_to_config(json_data)
90
+
91
+ # ์ถœ๋ ฅ ์˜ˆ์‹œ
92
+ if __name__ == "__main__":
93
+ config = get_config('1um',configs_path='/workspace/IR_Drop_prior_study/XICCAD/configs/test/began_iccad_all/stats_384x384.json',dataset_name='began_iccad_all')
94
+
95
+ print("IR Drop ํ†ต๊ณ„:")
96
+ if config.ir_drop:
97
+ print(f" ํ‰๊ท : {config.ir_drop.mean:.8f}")
98
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {config.ir_drop.std:.8f}")
99
+ print(f" ์ตœ์†Œ๊ฐ’: {config.ir_drop.min:.8f}")
100
+ print(f" ์ตœ๋Œ€๊ฐ’: {config.ir_drop.max:.8f}")
101
+ else:
102
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
103
+
104
+ print("\nCurrent ํ†ต๊ณ„:")
105
+ if config.current:
106
+ print(f" ํ‰๊ท : {config.current.mean:.8f}")
107
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {config.current.std:.8f}")
108
+ print(f" ์ตœ์†Œ๊ฐ’: {config.current.min:.8f}")
109
+ print(f" ์ตœ๋Œ€๊ฐ’: {config.current.max:.8f}")
110
+ else:
111
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
112
+
113
+ print("\nPad ํ†ต๊ณ„:")
114
+ if config.pad:
115
+ print(f" ํ‰๊ท : {config.pad.mean:.8f}")
116
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {config.pad.std:.8f}")
117
+ print(f" ์ตœ์†Œ๊ฐ’: {config.pad.min:.8f}")
118
+ print(f" ์ตœ๋Œ€๊ฐ’: {config.pad.max:.8f}")
119
+ else:
120
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
121
+
122
+ print("\nResistance ์ „์ฒด ํ†ต๊ณ„:")
123
+ if config.resistances and config.resistances.total:
124
+ print(f" ํ‰๊ท : {config.resistances.total.mean:.8f}")
125
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {config.resistances.total.std:.8f}")
126
+ print(f" ์ตœ์†Œ๊ฐ’: {config.resistances.total.min:.8f}")
127
+ print(f" ์ตœ๋Œ€๊ฐ’: {config.resistances.total.max:.8f}")
128
+ else:
129
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
130
+
131
+ print("\nResistance Layer๋ณ„ ํ†ต๊ณ„:")
132
+ if config.resistances and config.resistances.layers:
133
+ for layer, stat in config.resistances.layers.items():
134
+ print(f"Layer {layer}:")
135
+ print(f" ํ‰๊ท : {stat.mean:.8f}")
136
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {stat.std:.8f}")
137
+ print(f" ์ตœ์†Œ๊ฐ’: {stat.min:.8f}")
138
+ print(f" ์ตœ๋Œ€๊ฐ’: {stat.max:.8f}")
139
+ else:
140
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
141
+
142
+ print("\nConductance ํ†ต๊ณ„:")
143
+ if config.conductance:
144
+ for key, stat in config.conductance.items():
145
+ print(f"{key}:")
146
+ if stat:
147
+ print(f" ํ‰๊ท : {stat.mean:.8f}")
148
+ print(f" ํ‘œ์ค€ํŽธ์ฐจ: {stat.std:.8f}")
149
+ print(f" ์ตœ์†Œ๊ฐ’: {stat.min:.8f}")
150
+ print(f" ์ตœ๋Œ€๊ฐ’: {stat.max:.8f}")
151
+ else:
152
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
153
+ for ch, sc in config.comprehensive_feature.items():
154
+ if sc:
155
+ print(f' Channel {ch}: mean={sc.mean:.6f}, std={sc.std:.6f}, min={sc.min:.6f}, max={sc.max:.6f}')
156
+ else:
157
+ print(f' Channel {ch}: None')
158
+
159
+ else:
160
+ print(" ๋ฐ์ดํ„ฐ ์—†์Œ.")
tools/ir_dataset.py ADDED
@@ -0,0 +1,627 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import cv2
3
+ import pandas as pd
4
+ import numpy as np
5
+ from torch.utils.data import Subset
6
+ from sklearn.model_selection import train_test_split
7
+ import torch
8
+ from torch.utils.data import Dataset
9
+ import glob
10
+ import torch.nn.functional as F
11
+ import albumentations as A
12
+ from albumentations.pytorch import ToTensorV2
13
+ import json
14
+
15
+ def get_config_key(layer):
16
+ """
17
+ Map layer name to config key: 'm1'->'m1', 'm14'->'via14'
18
+ """
19
+ num = layer[1:]
20
+ return f"via{num}" if len(num) >= 2 else layer
21
+
22
+ class IRDropDataset(Dataset):
23
+ interpolation_map = {
24
+ 'nearest': cv2.INTER_NEAREST,
25
+ 'bilinear': cv2.INTER_LINEAR,
26
+ 'bicubic': cv2.INTER_CUBIC,
27
+ 'area': cv2.INTER_AREA,
28
+ 'lanczos': cv2.INTER_LANCZOS4
29
+ }
30
+
31
+ def __init__(self,
32
+ root_path, selected_folders, target_layers,
33
+ img_size=384, post_fix_path='', train=True,
34
+ in_ch=None, use_raw=False, return_case=False, seed=1234,
35
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
36
+ # --- ํ”ผ์ฒ˜ ์‚ฌ์šฉ ์—ฌ๋ถ€๋ฅผ ๊ฒฐ์ •ํ•˜๋Š” ํ”Œ๋ž˜๊ทธ๋“ค ---
37
+ use_current=False, # 1
38
+ use_pad_distance=False, # 1
39
+ use_pdn_density=False, # 1
40
+ use_resistance=False, # 12
41
+ use_comprehensive_feature=False, # 23
42
+ use_moran=False, # 1
43
+ use_geary=False, # 1
44
+ use_gstar=False, # 1
45
+ # ------------------------------------
46
+ interpolation='bicubic',
47
+ asap7=False):
48
+
49
+ # ๊ธฐ๋ณธ ์†์„ฑ ์„ค์ •
50
+ self.root_path = root_path
51
+ self.selected_folders = selected_folders
52
+ self.target_layers = target_layers if use_resistance else []
53
+ self.img_size = img_size
54
+ self.post_fix = post_fix_path
55
+ self.train = train
56
+ self.use_raw = use_raw
57
+ self.return_case = return_case
58
+ self.input_norm_type = input_norm_type
59
+ self.target_norm_type = target_norm_type
60
+ self.conf = norm_config
61
+ self.asap7 = asap7
62
+ self.interpolation = self.interpolation_map[interpolation]
63
+
64
+ # --- 1. ์ค‘์•™ ์ง‘์ค‘์‹ ํ”ผ์ฒ˜ ์„ค์ • ---
65
+ self.feature_config = {
66
+ 'current': {'use': use_current, 'pattern': '*current*.npy', 'norm_key': 'current', 'channels': 1},
67
+ 'eff_dist': {'use': use_pad_distance, 'pattern': '*eff_dist*.npy', 'norm_key': 'pad', 'channels': 1},
68
+ 'pdn_density': {'use': use_pdn_density, 'pattern': '*pdn_density*.npy', 'norm_key': 'pdn_density', 'channels': 1},
69
+ 'resistance': {'use': use_resistance, 'pattern': 'layer_data/*{}_resistance*.npy', 'channels': len(self.target_layers)},
70
+ # ===== (์ถ”๊ฐ€) ์ƒˆ๋กœ์šด ํ”ผ์ฒ˜ ์„ค์ • =====
71
+ 'moran': {'use': use_moran, 'pattern': f'layer_data/*moran_{self.img_size}_{self.img_size}.npy', 'norm_key': 'moran', 'channels': 1},
72
+ 'geary': {'use': use_geary, 'pattern': f'layer_data/*geary_{self.img_size}_{self.img_size}.npy', 'norm_key': 'geary', 'channels': 1},
73
+ 'gstar': {'use': use_gstar, 'pattern': f'layer_data/*gstar_{self.img_size}_{self.img_size}.npy', 'norm_key': 'gstar', 'channels': 1},
74
+ # ------------------------------------
75
+ 'comp_feature': {'use': use_comprehensive_feature, 'pattern': f'layer_data/*comprehensive_feature_{self.img_size}*.npy', 'channels': 23},
76
+ }
77
+
78
+ if self.asap7:
79
+ self.feature_config['eff_dist']['use'] = False
80
+ self.feature_config['pdn_density']['use'] = False
81
+
82
+ # --- 2. ์ž…๋ ฅ ์ฑ„๋„ ์ˆ˜ ๋™์  ๊ณ„์‚ฐ ๋ฐ ๊ฒ€์ฆ ---
83
+ calculated_in_ch = sum(config['channels'] for config in self.feature_config.values() if config['use'])
84
+
85
+ if in_ch is not None and in_ch != calculated_in_ch:
86
+ raise ValueError(
87
+ f"Input channel mismatch! User provided in_ch={in_ch}, but calculated channels from "
88
+ f"'use_{{feature}}' flags is {calculated_in_ch}."
89
+ )
90
+ self.in_ch = calculated_in_ch
91
+
92
+ self.target_pattern = '*voltage*.npy' if self.asap7 else '*ir_drop*.npy'
93
+
94
+ self.transform = A.Compose([
95
+ A.OneOf([
96
+ A.HorizontalFlip(p=1), A.VerticalFlip(p=1),
97
+ A.Rotate(limit=(90, 90), p=1), A.Rotate(limit=(180, 180), p=1),
98
+ A.Rotate(limit=(270, 270), p=1), A.NoOp(p=1)
99
+ ], p=1) if self.train else A.NoOp(p=1),
100
+ ToTensorV2()
101
+ ], is_check_shapes=False,seed=seed)
102
+
103
+ self.data_files = self._find_files()
104
+
105
+ def _find_files(self):
106
+ file_lists = {key: [] for key, conf in self.feature_config.items() if conf['use']}
107
+ file_lists['target'] = []
108
+ if self.feature_config['resistance']['use']:
109
+ file_lists['resistance'] = {ly: [] for ly in self.target_layers}
110
+
111
+ for fld in self.selected_folders:
112
+ base = os.path.join(self.root_path, fld, self.post_fix)
113
+ if not os.path.isdir(base): continue
114
+
115
+ file_lists['target'] += glob.glob(os.path.join(base, self.target_pattern))
116
+
117
+ for key, config in self.feature_config.items():
118
+ if not config['use']: continue
119
+
120
+ pattern = config['pattern']
121
+ if key == 'resistance':
122
+ for ly in self.target_layers:
123
+ ly_pattern = pattern.format(ly)
124
+ file_lists['resistance'][ly] += glob.glob(os.path.join(base, ly_pattern))
125
+ else:
126
+ file_lists[key] += glob.glob(os.path.join(base, pattern))
127
+
128
+ for key, item in file_lists.items():
129
+ if isinstance(item, list): item.sort()
130
+ elif isinstance(item, dict):
131
+ for sub_item in item.values(): sub_item.sort()
132
+
133
+ n = len(file_lists['target'])
134
+ if n == 0:
135
+ print(f"Warning: No target files found in provided folders: {self.selected_folders}")
136
+ return []
137
+
138
+ for key, item in file_lists.items():
139
+ if not self.feature_config.get(key, {}).get('use', True): continue
140
+
141
+ if key == 'resistance':
142
+ for ly, files in item.items():
143
+ if len(files) != n: raise ValueError(f"Mismatch in file counts for resistance layer '{ly}'. Expected {n}, got {len(files)}!")
144
+ elif len(item) != n:
145
+ raise ValueError(f"Mismatch in file counts for feature '{key}'. Expected {n}, got {len(item)}!")
146
+
147
+ data_entries = []
148
+ for i in range(n):
149
+ entry = {'target': file_lists['target'][i]}
150
+ for key, config in self.feature_config.items():
151
+ if not config['use']: continue
152
+ if key == 'resistance':
153
+ entry[key] = {ly: file_lists[key][ly][i] for ly in self.target_layers}
154
+ else:
155
+ entry[key] = file_lists[key][i]
156
+ data_entries.append(entry)
157
+
158
+ return data_entries
159
+
160
+ def _load_data_from_disk(self, idx):
161
+ f_paths = self.data_files[idx]
162
+ channels_to_stack = []
163
+
164
+ # ๋ฌผ๋ฆฌ์ /๊ธฐ๋ณธ ํ”ผ์ฒ˜ ๋กœ๋“œ
165
+ for key in ['current', 'eff_dist', 'pdn_density', 'resistance']:
166
+ if self.feature_config[key]['use']:
167
+ if key == 'resistance':
168
+ for ly in self.target_layers:
169
+ channels_to_stack.append((np.load(f_paths[key][ly]), get_config_key(ly)))
170
+ else:
171
+ channels_to_stack.append((np.load(f_paths[key]), self.feature_config[key]['norm_key']))
172
+
173
+ for key in ['moran', 'geary', 'gstar']:
174
+ if self.feature_config[key]['use']:
175
+ # print(np.load(f_paths[key]).shape) # 1, H, W
176
+ channels_to_stack.append((np.load(f_paths[key])[0], self.feature_config[key]['norm_key']))
177
+ # ------------------------------------------
178
+
179
+ if self.feature_config['comp_feature']['use']:
180
+ comp_data = np.load(f_paths['comp_feature'])
181
+ for c in range(comp_data.shape[-1]):
182
+ channels_to_stack.append((comp_data[..., c], f'comp_{c}'))
183
+
184
+ processed_channels = []
185
+ for data, norm_key in channels_to_stack:
186
+ resized_data = cv2.resize(data, (self.img_size, self.img_size), interpolation=self.interpolation)
187
+ normalized_data = self.normalize_input(resized_data, norm_key)
188
+ processed_channels.append(normalized_data)
189
+
190
+ input_data = np.stack(processed_channels, axis=-1)
191
+ target_data = np.load(f_paths['target'])
192
+
193
+ return input_data, target_data
194
+
195
+ def __getitem__(self, idx):
196
+ inp, tgt = self._load_data_from_disk(idx)
197
+
198
+ if not self.use_raw:
199
+ ir_resized = cv2.resize(tgt, (self.img_size, self.img_size), interpolation=self.interpolation)
200
+ target = self.normalize_target(ir_resized)
201
+ transformed = self.transform(image=inp, mask=target)
202
+ y = transformed['mask'].unsqueeze(0).float()
203
+ else:
204
+ target = tgt
205
+ transformed = self.transform(image=inp)
206
+ y = torch.as_tensor(target).unsqueeze(0).float()
207
+
208
+ x = transformed['image'].float()
209
+
210
+ if self.return_case:
211
+ case_path = self.data_files[idx]['target']
212
+ tc_name = next((p for p in case_path.split('/') if 'case' in p), os.path.basename(case_path))
213
+ return x, y, tc_name
214
+
215
+ return x, y
216
+
217
+ def __len__(self):
218
+ return len(self.data_files)
219
+
220
+ def normalize_input(self, data, key):
221
+ t = self.input_norm_type
222
+ out = data.astype(np.float32)
223
+ conf = self.conf
224
+ if conf is None: return out
225
+
226
+ mean, std, max_val = None, None, None
227
+
228
+ if key == 'current':
229
+ mean, std, max_val = conf.current.mean, conf.current.std, conf.current.max
230
+ elif key == 'pad':
231
+ mean, std, max_val = conf.pad.mean, conf.pad.std, conf.pad.max
232
+ elif key == 'pdn_density':
233
+ mean, std, max_val = conf.pdn_density.mean, conf.pdn_density.std, conf.pdn_density.max
234
+ elif key.startswith('via') or key.startswith('m'):
235
+ if hasattr(conf, 'resistances') and conf.resistances and key in conf.resistances.layers:
236
+ layer_conf = conf.resistances.layers[key]
237
+ if layer_conf: mean, std, max_val = layer_conf.mean, layer_conf.std, layer_conf.max
238
+ # ===== (์ถ”๊ฐ€) ์ƒˆ๋กœ์šด ํ”ผ์ฒ˜ ์ •๊ทœํ™” ๋กœ์ง =====
239
+ elif key in ['moran', 'geary', 'gstar']:
240
+ if hasattr(conf, key):
241
+ feature_conf_dict = getattr(conf, key)
242
+ # ํ†ต๊ณ„ ํŒŒ์ผ ๊ตฌ์กฐ๊ฐ€ {'0': {'mean': ...}} ํ˜•ํƒœ์ด๋ฏ€๋กœ '0' ํ‚ค๋กœ ์ ‘๊ทผ
243
+ if '0' in feature_conf_dict and feature_conf_dict['0']:
244
+ stat_conf = feature_conf_dict['0']
245
+ mean, std, max_val = stat_conf.mean, stat_conf.std, stat_conf.max
246
+ # ------------------------------------------
247
+ elif key.startswith('comp_'):
248
+ idx = key.split('_')[1]
249
+ if hasattr(conf, 'comprehensive_feature') and idx in conf.comprehensive_feature:
250
+ comp_conf = conf.comprehensive_feature[idx]
251
+ if comp_conf: mean, std, max_val = comp_conf.mean, comp_conf.std, comp_conf.max
252
+
253
+ if t in ['global_max', 'g_max']:
254
+ if max_val and max_val > 0: out /= max_val
255
+ elif t in ['max', 'sample_max']:
256
+ m = out.max()
257
+ if m != 0: out /= m
258
+ elif t == 'min_max':
259
+ mi, ma = out.min(), out.max()
260
+ if ma != mi: out = (out - mi) / (ma - mi)
261
+ else: out = out - mi
262
+ elif t == 'z_score':
263
+ if mean is not None and std is not None and std > 0:
264
+ out = (out - mean) / std
265
+ return out
266
+
267
+ def normalize_target(self, tgt):
268
+ t = self.target_norm_type
269
+ out = tgt.astype(np.float32)
270
+ if self.conf is None or not hasattr(self.conf, 'ir_drop') or self.conf.ir_drop is None: return out
271
+
272
+ if t in ['global_max','g_max']:
273
+ if self.conf.ir_drop.max > 0: out /= self.conf.ir_drop.max
274
+ elif t in ['max','sample_max']:
275
+ m = out.max()
276
+ if m != 0: out /= m
277
+ elif t=='min_max':
278
+ mi, ma = out.min(), out.max()
279
+ if ma != mi: out = (out-mi)/(ma-mi)
280
+ else: out = out-mi
281
+ elif t=='z_score':
282
+ mean, std = self.conf.ir_drop.mean, self.conf.ir_drop.std
283
+ if std and std>0: out = (out - mean) / std
284
+ elif t in ['raw','1000']:
285
+ out *= 1e3
286
+ return out
287
+
288
+ def get_stat_from_conf(self, feature, stat_type='max', layer_key=None):
289
+ conf = self.conf
290
+ if feature=='ir_drop': return getattr(conf.ir_drop, stat_type, None)
291
+ if feature=='current': return getattr(conf.current, stat_type, None)
292
+ if feature in ['pad','pad_distance']: return getattr(conf.pad, stat_type, None)
293
+ if feature=='pdn_density': return getattr(conf.pdn_density, stat_type, None)
294
+ if feature=='resistance' and layer_key:
295
+ return getattr(conf.resistances.layers.get(layer_key), stat_type, None)
296
+ return None
297
+
298
+ def inverse_torch(self, x, norm_type, t_min=None, t_max=None):
299
+ if norm_type in ['global_max', 'g_max']:
300
+ max_val = self.get_stat_from_conf('ir_drop', 'max')
301
+ return x * max_val if max_val is not None else x
302
+ elif norm_type in ['global_max_log', 'g_max_log']:
303
+ max_val = self.get_stat_from_conf('ir_drop', 'max')
304
+ return (torch.exp(x) - 1.3) * max_val if max_val is not None else x
305
+ elif norm_type in ['global_max_log_div', 'g_max_log_div']:
306
+ max_val = self.get_stat_from_conf('ir_drop', 'max')
307
+ eps = 1e-6
308
+ return torch.exp(torch.log(torch.tensor(max_val + eps,device=x.device)) / (x + eps)) - eps if max_val is not None else x
309
+ elif norm_type in ['z_score']:
310
+ mean_val = self.get_stat_from_conf('ir_drop', 'mean')
311
+ std_val = self.get_stat_from_conf('ir_drop', 'std')
312
+ return x * std_val + mean_val if std_val is not None else x
313
+ elif norm_type in ['min_max', 'sample_min_max']:
314
+ return x * (t_max - t_min) + t_min if t_min is not None and t_max is not None else x
315
+ elif norm_type in ['raw', '1000']:
316
+ return x *1e-3
317
+ else:
318
+ print('Inverse normalization not implemented for norm type:', norm_type)
319
+ return x
320
+
321
+ #################################################################################################################
322
+ def tensor_to_numpy(t: torch.Tensor) -> np.ndarray:
323
+ return t.detach().cpu().numpy()
324
+
325
+ def numpy_to_tensor(n: np.ndarray) -> torch.Tensor:
326
+ return torch.from_numpy(n).float()
327
+
328
+ def resize_tensor_cv2(tensor: torch.Tensor, target_size: tuple) -> torch.Tensor:
329
+ """
330
+ Resize each (C, H, W) in tensor using cv2.INTER_LANCZOS4
331
+ target_size: (H_out, W_out)
332
+ """
333
+ np_tensor = tensor_to_numpy(tensor)
334
+ resized = []
335
+ for img in np_tensor: # img: (C, H, W)
336
+ resized_channels = []
337
+ for c in img:
338
+ resized_c = cv2.resize(c, (target_size[1], target_size[0]), interpolation=cv2.INTER_LANCZOS4)
339
+ resized_channels.append(resized_c)
340
+ resized.append(np.stack(resized_channels, axis=0))
341
+ resized_np = np.stack(resized, axis=0) # (B, C, H, W)
342
+ return numpy_to_tensor(resized_np).to(tensor.device)
343
+ ################################################################################################################
344
+
345
+
346
+ def split_train_val(td,vd, train_ratio=0.8, random_state=42):
347
+ dataset_size = len(td)
348
+
349
+ indices = list(range(dataset_size))
350
+
351
+ train_size = int(train_ratio * dataset_size)
352
+ train_indices, valid_indices = train_test_split(indices, train_size=train_size, random_state=random_state)
353
+
354
+ train_dataset = Subset(td, train_indices)
355
+ valid_dataset = Subset(vd, valid_indices)
356
+
357
+ return train_dataset, valid_dataset
358
+
359
+ #################################################################################################################
360
+
361
+ def build_dataset_began_iccad_fake(root_path='', img_size=384, train=True,
362
+ in_ch=25, use_raw=False,return_case=False,
363
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
364
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
365
+ use_pdn_density=False,use_comprehensive_feature=False,
366
+ selected_folders=[
367
+ '/data/BeGAN-circuit-benchmarks/nangate45/set1_numpy/data',
368
+ '/data/BeGAN-circuit-benchmarks/nangate45/set2_numpy/data',
369
+ '/data/ICCAD_2023/fake-circuit-data_20230623/fake-circuit-data-npy'
370
+ ],**kwargs):
371
+ train_dataset = IRDropDataset(
372
+ root_path=root_path,
373
+ selected_folders=selected_folders,
374
+ img_size=img_size,
375
+ train=True,
376
+ target_layers=target_layers,
377
+ in_ch=in_ch,
378
+ use_raw=False,
379
+ input_norm_type=input_norm_type,
380
+ target_norm_type=target_norm_type,
381
+ norm_config=norm_config,
382
+ use_pdn_density=use_pdn_density,
383
+ use_comprehensive_feature=use_comprehensive_feature,
384
+ **kwargs
385
+ )
386
+
387
+ val_dataset = IRDropDataset(
388
+ root_path=root_path,
389
+ selected_folders=selected_folders,
390
+ img_size=img_size,
391
+ train=False,
392
+ target_layers=target_layers,
393
+ in_ch=in_ch,
394
+ use_raw=use_raw,
395
+ input_norm_type=input_norm_type,
396
+ target_norm_type=target_norm_type,
397
+ norm_config=norm_config,
398
+ use_pdn_density=use_pdn_density,
399
+ use_comprehensive_feature=use_comprehensive_feature,
400
+ **kwargs
401
+ )
402
+ print("Dataset length:", len(train_dataset))
403
+ return split_train_val(train_dataset,val_dataset)
404
+
405
+ def build_dataset_began(root_path='', img_size=384, train=True,
406
+ in_ch=12, use_raw=False,return_case=False,
407
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
408
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
409
+ use_pdn_density=False,use_comprehensive_feature=False,
410
+ selected_folders=[
411
+ '/data/BeGAN-circuit-benchmarks/nangate45/set1_numpy/data',
412
+ '/data/BeGAN-circuit-benchmarks/nangate45/set2_numpy/data',
413
+ ],**kwargs):
414
+ val_dataset = IRDropDataset(
415
+ root_path=root_path,
416
+ selected_folders=selected_folders,
417
+ img_size=img_size,
418
+ train=False,
419
+ target_layers=target_layers,
420
+ in_ch=in_ch,
421
+ use_raw=use_raw,
422
+ input_norm_type=input_norm_type,
423
+ target_norm_type=target_norm_type,
424
+ norm_config=norm_config,
425
+ use_pdn_density=use_pdn_density,
426
+ use_comprehensive_feature=use_comprehensive_feature
427
+ )
428
+ print("Dataset length:", len(val_dataset))
429
+ return val_dataset
430
+
431
+ def build_dataset_iccad_real(root_path='/data/ICCAD_2023/real-circuit-data_20230615', img_size=384, train=True,
432
+ in_ch=25, use_raw=False, return_case = False,interpolation='bicubic',
433
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
434
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
435
+ use_pdn_density=False,use_comprehensive_feature=False,**kwargs):
436
+ selected_folders = os.listdir(root_path)
437
+ dataset = IRDropDataset(
438
+ root_path=root_path,
439
+ selected_folders=selected_folders,
440
+ img_size=img_size,
441
+ train=train,
442
+ target_layers=target_layers,
443
+ in_ch=in_ch,
444
+ use_raw=use_raw,
445
+ input_norm_type=input_norm_type,
446
+ target_norm_type=target_norm_type,
447
+ norm_config=norm_config,
448
+ return_case = return_case,
449
+ use_pdn_density=use_pdn_density,
450
+ use_pad_distance=kwargs.get('use_pad_distance',False),
451
+ use_comprehensive_feature=use_comprehensive_feature,
452
+ interpolation=interpolation
453
+ )
454
+ print("Dataset length of iccad-real :", len(dataset))
455
+ return dataset
456
+
457
+ def build_dataset_iccad_hidden(root_path='/data/ICCAD_2023/hidden-real-circuit-data', img_size=384, train=False,
458
+ in_ch=12, use_raw=False, return_case=False,
459
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
460
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
461
+ interpolation='area',
462
+ use_pdn_density=False,use_comprehensive_feature=False,**kwargs):
463
+ selected_folders = os.listdir(root_path)
464
+ dataset = IRDropDataset(
465
+ root_path=root_path,
466
+ selected_folders=selected_folders,
467
+ img_size=img_size,
468
+ train=False,
469
+ target_layers=target_layers,
470
+ in_ch=in_ch,
471
+ use_raw=use_raw,
472
+ input_norm_type=input_norm_type,
473
+ target_norm_type=target_norm_type,
474
+ norm_config=norm_config,
475
+ return_case = return_case,
476
+ use_pdn_density=use_pdn_density,
477
+ use_pad_distance=kwargs.get('use_pad_distance',False),
478
+ use_comprehensive_feature=use_comprehensive_feature,
479
+ interpolation=interpolation
480
+ )
481
+ print("Dataset length of iccad-hidden:", len(dataset))
482
+ return dataset
483
+
484
+ def build_dataset_iccad_fake(root_path='', img_size=384, train=True,
485
+ in_ch=25, use_raw=False,return_case=False,
486
+ target_layers=['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
487
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
488
+ use_pdn_density=False, use_comprehensive_feature=False,**kwargs):
489
+ train_dataset = IRDropDataset(
490
+ root_path=root_path,
491
+ selected_folders=[
492
+ '/data/ICCAD_2023/fake-circuit-data_20230623/fake-circuit-data-npy'
493
+ ],
494
+ img_size=img_size,
495
+ train=True,
496
+ target_layers=target_layers,
497
+ in_ch=in_ch,
498
+ use_raw=False,
499
+ input_norm_type=input_norm_type,
500
+ target_norm_type=target_norm_type,
501
+ norm_config=norm_config,
502
+ use_pdn_density=use_pdn_density,
503
+ use_comprehensive_feature=use_comprehensive_feature
504
+ )
505
+
506
+ val_dataset = IRDropDataset(
507
+ root_path=root_path,
508
+ selected_folders=[
509
+ '/data/ICCAD_2023/fake-circuit-data_20230623/fake-circuit-data-npy'
510
+ ],
511
+ img_size=img_size,
512
+ train=False,
513
+ target_layers=target_layers,
514
+ in_ch=in_ch,
515
+ use_raw=use_raw,
516
+ input_norm_type=input_norm_type,
517
+ target_norm_type=target_norm_type,
518
+ norm_config=norm_config ,
519
+ use_pdn_density=use_pdn_density,
520
+ use_comprehensive_feature=use_comprehensive_feature
521
+ )
522
+ print("Dataset length:", len(train_dataset))
523
+ return split_train_val(train_dataset,val_dataset)
524
+
525
+
526
+ def build_dataset_began_iccad_fake_real(root_path='', img_size=384, train=True,
527
+ in_ch=25, use_raw=False, use_raw_target=False,return_case=False,
528
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
529
+ input_norm_type='z_score', target_norm_type='raw', norm_config=None,
530
+ interpolation='bicubic',
531
+ use_pdn_density=False,use_comprehensive_feature=False,**kwargs):
532
+ train_dataset = IRDropDataset(
533
+ root_path=root_path,
534
+ selected_folders=[
535
+ '/data/BeGAN-circuit-benchmarks/nangate45/set1_numpy/data',
536
+ '/data/BeGAN-circuit-benchmarks/nangate45/set2_numpy/data',
537
+ '/data/ICCAD_2023/fake-circuit-data_20230623/fake-circuit-data-npy'
538
+ ],
539
+ img_size=img_size,
540
+ train=train,
541
+ target_layers=target_layers,
542
+ in_ch=in_ch,
543
+ use_raw=use_raw_target,
544
+ input_norm_type=input_norm_type,
545
+ target_norm_type=target_norm_type,
546
+ norm_config=norm_config,
547
+ use_pdn_density=use_pdn_density,
548
+ use_comprehensive_feature=use_comprehensive_feature,
549
+ interpolation=interpolation
550
+ )
551
+
552
+ root_path='/data/ICCAD_2023/real-circuit-data_20230615'
553
+ selected_folders = os.listdir(root_path)
554
+ val_dataset = IRDropDataset(
555
+ root_path=root_path,
556
+ selected_folders=selected_folders,
557
+ img_size=img_size,
558
+ train=False,
559
+ target_layers=target_layers,
560
+ in_ch=in_ch,
561
+ use_raw=use_raw,
562
+ input_norm_type=input_norm_type,
563
+ target_norm_type=target_norm_type,
564
+ norm_config=norm_config,
565
+ return_case = return_case,
566
+ use_pdn_density=use_pdn_density,
567
+ use_comprehensive_feature=use_comprehensive_feature,
568
+ interpolation=interpolation
569
+ )
570
+ print("Dataset length of iccad-real :", len(val_dataset))
571
+ print("Dataset length of began-iccad-fake :", len(train_dataset))
572
+ return train_dataset, val_dataset
573
+
574
+ def build_dataset_began_asap7(root_path='/data/BeGAN-circuit-benchmarks/asap7' , img_size=512, train=True,
575
+ in_ch=12, use_raw=False, use_raw_target=False,return_case=False,
576
+ target_layers= ['m1', 'm4', 'm7', 'm8', 'm9', 'm14', 'm47', 'm78', 'm89'],
577
+ input_norm_type='global_max', target_norm_type='global_max', norm_config=None,
578
+ use_pdn_density=False,use_comprehensive_feature=False):
579
+ target_layers = ['m2', 'm5', 'm6', 'm7', 'm8', 'm25', 'm56', 'm67', 'm78']
580
+ dataset = IRDropDataset(
581
+ root_path=root_path,
582
+ selected_folders=['BeGAN'],
583
+ img_size=img_size,
584
+ train=True,
585
+ target_layers=target_layers,
586
+ in_ch=in_ch,
587
+ use_raw=use_raw_target,
588
+ input_norm_type=input_norm_type,
589
+ target_norm_type=target_norm_type,
590
+ norm_config=norm_config,
591
+ use_comprehensive_feature=use_comprehensive_feature,
592
+ asap7=True,
593
+ )
594
+ return dataset
595
+
596
+ if __name__ == '__main__':
597
+ import sys
598
+ sys.path.append('/IR_Drop_prior_study/XICCAD')
599
+ from config import *
600
+ config_path = "/IR_Drop_prior_study/XICCAD/configs/cfirst/began_iccad_fake/stats_1um.json"
601
+ with open(config_path, "r") as f:
602
+ json_data = json.load(f)
603
+ norm_cfg = map_json_to_config(json_data)
604
+ def test_began_iccad():
605
+ ds,_ = build_dataset_began_iccad_fake(
606
+ img_size=512, train=True,
607
+ in_ch=29, use_raw=False,
608
+ input_norm_type='g_max',
609
+ target_norm_type='g_max',
610
+ norm_config=norm_cfg,
611
+ target_layers=[],
612
+ use_current=True, # 1
613
+ use_pad_distance=True, # 1
614
+ use_pdn_density=True, # 1
615
+ use_resistance=False, # 12
616
+ use_comprehensive_feature=True, # 23
617
+ use_moran=True, # 1
618
+ use_geary=True, # 1
619
+ use_gstar=True, # 1
620
+ )
621
+ # ds = build_dataset_began_iccad_fake()
622
+ for i in range(ds.__len__()):
623
+ sample_input, sample_target = ds[i]
624
+ print("Input shape:", sample_input.shape)
625
+ print("Target shape:", sample_target.shape)
626
+ if i == 10: break
627
+ test_began_iccad()
tools/make_samples.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # tools/prepare_samples.py
2
+ """
3
+ ์˜ˆ์ œ:
4
+ cd waca_unet_space
5
+ python tools/prepare_samples.py --root /data/ICCAD_2023/hidden-real-circuit-data \
6
+ --configs_path configs/began_iccad_fake \
7
+ --dataset iccad_hidden \
8
+ --indices 0 1 2 3 4
9
+
10
+ - build_dataset_iccad_hidden / build_dataset_iccad_real ์„ ์ด์šฉํ•ด์„œ
11
+ (input, target, casename)์„ ๊ฐ€์ ธ์˜จ ๋’ค, input๋งŒ samples/ ํด๋”์— ์ €์žฅ.
12
+ - input์€ IRDropDataset์—์„œ ๋ฐ˜ํ™˜ํ•˜๋Š” (C,H,W) ํ…์„œ(์ •๊ทœํ™” ์™„๋ฃŒ)๋ฅผ ๊ทธ๋Œ€๋กœ npy๋กœ ์ €์žฅ.
13
+ """
14
+ import os
15
+ import argparse
16
+ import numpy as np
17
+ import torch
18
+
19
+ from config import get_config
20
+ from ir_dataset import (
21
+ build_dataset_iccad_hidden,
22
+ build_dataset_iccad_real,
23
+ )
24
+
25
+
26
+ def parse_args():
27
+ p = argparse.ArgumentParser()
28
+ p.add_argument(
29
+ "--root",
30
+ type=str,
31
+ help="ICCAD hidden/real root path. ์˜ˆ: /data/ICCAD_2023/hidden-real-circuit-data or /data/ICCAD_2023/real-circuit-data",
32
+ default='/data/ICCAD_2023/hidden-real-circuit-data',
33
+ )
34
+ p.add_argument(
35
+ "--dataset",
36
+ type=str,
37
+ choices=["iccad_hidden", "iccad_real"],
38
+ default="iccad_real",
39
+ )
40
+ p.add_argument(
41
+ "--img_size",
42
+ type=int,
43
+ default=384,
44
+ )
45
+ p.add_argument(
46
+ "--in_ch",
47
+ type=int,
48
+ default=25,
49
+ )
50
+ p.add_argument(
51
+ "--configs_path",
52
+ type=str,
53
+ default="/workspace/IR_Drop_prior_study/XICCAD/configs/cfirst/began_iccad_fake/stats_1um.json",
54
+ help="stats_1um.json ์ด ๋“ค์–ด์žˆ๋Š” ํด๋”",
55
+ )
56
+ p.add_argument(
57
+ "--unit",
58
+ type=str,
59
+ default="1um",
60
+ )
61
+ p.add_argument(
62
+ "--indices",
63
+ type=int,
64
+ nargs="+",
65
+ default=[0, 1, 2, 3, 4],
66
+ help="์˜ˆ์ œ ์ƒ˜ํ”Œ๋กœ ๋ฝ‘์„ dataset ์ธ๋ฑ์Šค๋“ค",
67
+ )
68
+ p.add_argument(
69
+ "--out_dir",
70
+ type=str,
71
+ default="samples",
72
+ help="npy๋ฅผ ์ €์žฅํ•  ๋””๋ ‰ํ† ๋ฆฌ",
73
+ )
74
+ return p.parse_args()
75
+
76
+
77
+ def main():
78
+ args = parse_args()
79
+ os.makedirs(args.out_dir, exist_ok=True)
80
+
81
+ # ํ•™์Šต ๋•Œ ์“ฐ๋˜ ํ†ต๊ณ„ config ๋กœ๋“œ (๋‹น์‹ ์˜ get_config ์‹œ๊ทธ๋‹ˆ์ฒ˜์— ๋งž๊ฒŒ ์กฐ์ •)
82
+ norm_config = get_config(
83
+ args.unit,
84
+ configs_path=args.configs_path,
85
+ dataset_name="began_iccad_fake",
86
+ )
87
+
88
+ common_kwargs = dict(
89
+ img_size=args.img_size,
90
+ in_ch=args.in_ch,
91
+ train=False,
92
+ use_raw=False, # ํ•™์Šต ๋•Œ์™€ ๋™์ผํ•˜๊ฒŒ z-score ๋“ฑ์œผ๋กœ ์ •๊ทœํ™”๋œ ์ž…๋ ฅ ์‚ฌ์šฉ
93
+ input_norm_type="z_score",
94
+ target_norm_type="raw",
95
+ target_layers=[], # ์ „์ฒด ์ข…ํ•ฉ IR-drop (์ด๋ฏธ npy์— ํ†ตํ•ฉ๋œ ๊ฒฝ์šฐ)
96
+ use_pdn_density=True,
97
+ use_pad_distance=True,
98
+ use_comprehensive_feature=True,
99
+ norm_config=norm_config,
100
+ return_case=True, # (x, y, casename) ๋ฐ˜ํ™˜
101
+ interpolation="lanczos",
102
+ )
103
+
104
+ if args.dataset == "iccad_hidden":
105
+ dataset = build_dataset_iccad_hidden(
106
+ root_path=args.root,
107
+ **common_kwargs,
108
+ )
109
+ else:
110
+ dataset = build_dataset_iccad_real(
111
+ root_path=args.root,
112
+ **common_kwargs,
113
+ )
114
+
115
+ # build_dataset_* ๊ฐ€ (train,val) ํŠœํ”Œ์„ ๋ฐ˜ํ™˜ํ•˜๋Š” ๊ฒฝ์šฐ ๋Œ€์‘
116
+ if isinstance(dataset, (tuple, list)) and len(dataset) == 2:
117
+ dataset = dataset[1] # val set์„ ์˜ˆ์ œ๋กœ ์‚ฌ์šฉํ•˜๊ฑฐ๋‚˜, ํ•„์š”์— ๋”ฐ๋ผ ์ˆ˜์ •
118
+
119
+ print(f"Dataset length: {len(dataset)}")
120
+
121
+ for idx in args.indices:
122
+ if idx >= len(dataset):
123
+ print(f"[WARN] index {idx} is out of range, skip.")
124
+ continue
125
+
126
+ sample = dataset[idx]
127
+ if len(sample) == 3:
128
+ x, y, casename = sample
129
+ else:
130
+ x, y = sample
131
+ casename = f"idx{idx}"
132
+
133
+ if isinstance(x, torch.Tensor):
134
+ x_np = x.detach().cpu().numpy()
135
+ else:
136
+ x_np = np.asarray(x)
137
+
138
+ out_name = f"{casename}_input.npy"
139
+ out_path = os.path.join(args.out_dir, out_name)
140
+ np.save(out_path, x_np)
141
+ print(f"Saved: {out_path} (shape={x_np.shape})")
142
+
143
+ print("Done.")
144
+
145
+
146
+ if __name__ == "__main__":
147
+ main()
tools/samples/testcase10_input.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5bf6e1ada47874a649d57011f58d72f6553e96315f329cc17235d3cefd933e4
3
+ size 14745728
tools/samples/testcase13_input.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04064bde7d4bafbcaffdeb8e040eedca5a542d087a83e57cddf3d41fcd6c1cbe
3
+ size 14745728
tools/samples/testcase14_input.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bad9477a3ab46e1980159f777627ac9dd8617af2e89e843969fa6011476b613
3
+ size 14745728
tools/samples/testcase15_input.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:019f54b8d4ffd73a39445192869822a365e84b48ecdb1b55cc6c44f655a22d70
3
+ size 14745728
tools/samples/testcase16_input.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84ffeaf88986476046f0bb77dd6daaea77729164eb18d75f2a66345066c2b4b1
3
+ size 14745728