|
|
|
|
|
"""Summarize sh001_29b345d42a results and generate paper-ready plots.""" |
|
|
from __future__ import annotations |
|
|
|
|
|
import csv |
|
|
import math |
|
|
import re |
|
|
from collections import Counter |
|
|
from pathlib import Path |
|
|
|
|
|
import matplotlib |
|
|
matplotlib.use("Agg") |
|
|
import matplotlib.pyplot as plt |
|
|
|
|
|
RESULT_DIR = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/OpenROAD-flow-scripts/flow/results/nangate45/gcd/sh001_29b345d42a" |
|
|
) |
|
|
REPORT_DIR = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/OpenROAD-flow-scripts/flow/reports/nangate45/gcd/sh001_29b345d42a" |
|
|
) |
|
|
LOG_DIR = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/OpenROAD-flow-scripts/flow/logs/nangate45/gcd/sh001_29b345d42a" |
|
|
) |
|
|
DEF_PATH = RESULT_DIR / "6_final.def" |
|
|
V_PATH = RESULT_DIR / "6_final.v" |
|
|
MEM_PATH = RESULT_DIR / "mem.json" |
|
|
FINISH_RPT_PATH = REPORT_DIR / "6_finish.rpt" |
|
|
REPORT_LOG_PATH = LOG_DIR / "6_report.log" |
|
|
OUT_FIG_DIR = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/IEEE_EdgeEDA_Agent_ISVLSI/figures" |
|
|
) |
|
|
OUT_CSV = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/runs/sh001_29b345d42a_summary.csv" |
|
|
) |
|
|
OUT_TEX = Path( |
|
|
"/Users/thalia/Desktop/EdgePPAgent/edgeeda-agent/IEEE_EdgeEDA_Agent_ISVLSI/gcd_sh001_results_table.tex" |
|
|
) |
|
|
|
|
|
|
|
|
def parse_def_metrics(def_text: str) -> dict: |
|
|
metrics = {} |
|
|
units_match = re.search(r"^UNITS\s+DISTANCE\s+MICRONS\s+(\d+)\s*;", def_text, re.M) |
|
|
die_match = re.search(r"^DIEAREA\s*\(\s*(\d+)\s+(\d+)\s*\)\s*\(\s*(\d+)\s+(\d+)\s*\)\s*;", def_text, re.M) |
|
|
comp_match = re.search(r"^COMPONENTS\s+(\d+)\s*;", def_text, re.M) |
|
|
pin_match = re.search(r"^PINS\s+(\d+)\s*;", def_text, re.M) |
|
|
net_match = re.search(r"^NETS\s+(\d+)\s*;", def_text, re.M) |
|
|
row_count = len(re.findall(r"^ROW\s+", def_text, re.M)) |
|
|
|
|
|
if units_match: |
|
|
metrics["units_per_micron"] = int(units_match.group(1)) |
|
|
if die_match: |
|
|
x0, y0, x1, y1 = map(int, die_match.groups()) |
|
|
metrics["die_x0"] = x0 |
|
|
metrics["die_y0"] = y0 |
|
|
metrics["die_x1"] = x1 |
|
|
metrics["die_y1"] = y1 |
|
|
if comp_match: |
|
|
metrics["components"] = int(comp_match.group(1)) |
|
|
if pin_match: |
|
|
metrics["pins"] = int(pin_match.group(1)) |
|
|
if net_match: |
|
|
metrics["nets"] = int(net_match.group(1)) |
|
|
metrics["rows"] = row_count |
|
|
|
|
|
if "units_per_micron" in metrics and "die_x1" in metrics: |
|
|
units = metrics["units_per_micron"] |
|
|
width = (metrics["die_x1"] - metrics["die_x0"]) / units |
|
|
height = (metrics["die_y1"] - metrics["die_y0"]) / units |
|
|
metrics["die_width_um"] = width |
|
|
metrics["die_height_um"] = height |
|
|
metrics["die_area_um2"] = width * height |
|
|
|
|
|
return metrics |
|
|
|
|
|
|
|
|
def parse_def_cell_counts(def_text: str) -> Counter: |
|
|
comp_match = re.search(r"^COMPONENTS\s+\d+\s*;\n(.*?)\nEND COMPONENTS", def_text, re.S | re.M) |
|
|
if not comp_match: |
|
|
return Counter() |
|
|
section = comp_match.group(1) |
|
|
counts = Counter() |
|
|
for line in section.splitlines(): |
|
|
line = line.strip() |
|
|
if not line.startswith("-"): |
|
|
continue |
|
|
parts = line.split() |
|
|
if len(parts) >= 3: |
|
|
counts[parts[2]] += 1 |
|
|
return counts |
|
|
|
|
|
|
|
|
def parse_netlist_cell_counts(v_text: str) -> Counter: |
|
|
pattern = re.compile(r"^\s*([A-Za-z_][\w$]*)\s+([A-Za-z_][\w$]*)\s*\(", re.M) |
|
|
counts = Counter() |
|
|
for cell, _inst in pattern.findall(v_text): |
|
|
if cell in {"module", "endmodule", "input", "output", "wire", "reg", "assign", "always"}: |
|
|
continue |
|
|
counts[cell] += 1 |
|
|
return counts |
|
|
|
|
|
|
|
|
def parse_finish_rpt(text: str) -> dict: |
|
|
metrics = {} |
|
|
if not text: |
|
|
return metrics |
|
|
tns_match = re.search(r"tns max\s+([+-]?\d+(?:\.\d+)?)", text) |
|
|
wns_match = re.search(r"wns max\s+([+-]?\d+(?:\.\d+)?)", text) |
|
|
worst_match = re.search(r"worst slack max\s+([+-]?\d+(?:\.\d+)?)", text) |
|
|
period_match = re.search(r"period_min\s*=\s*([0-9.]+)\s+fmax\s*=\s*([0-9.]+)", text) |
|
|
|
|
|
if tns_match: |
|
|
metrics["tns_ns"] = float(tns_match.group(1)) |
|
|
if wns_match: |
|
|
metrics["wns_ns"] = float(wns_match.group(1)) |
|
|
if worst_match: |
|
|
metrics["worst_slack_ns"] = float(worst_match.group(1)) |
|
|
if period_match: |
|
|
metrics["clock_period_ns"] = float(period_match.group(1)) |
|
|
metrics["clock_fmax_mhz"] = float(period_match.group(2)) |
|
|
return metrics |
|
|
|
|
|
|
|
|
def parse_report_log(text: str) -> dict: |
|
|
metrics = {} |
|
|
if not text: |
|
|
return metrics |
|
|
design_match = re.search(r"Design area\s+([0-9.]+)\s+um\^2\s+([0-9.]+)% utilization", text) |
|
|
power_match = re.search(r"Total power\s*:\s*([0-9.eE+-]+)\s*W", text) |
|
|
ir_avg_match = re.search(r"Average IR drop\s*:\s*([0-9.eE+-]+)\s*V", text) |
|
|
ir_worst_match = re.search(r"Worstcase IR drop:\s*([0-9.eE+-]+)\s*V", text) |
|
|
ir_pct_match = re.search(r"Percentage drop\s*:\s*([0-9.eE+-]+)\s*%", text) |
|
|
total_cells_match = re.search(r"^\s*Total\s+(\d+)\s+([0-9.]+)\s*$", text, re.M) |
|
|
|
|
|
if design_match: |
|
|
metrics["design_area_um2"] = float(design_match.group(1)) |
|
|
metrics["design_utilization_pct"] = float(design_match.group(2)) |
|
|
if power_match: |
|
|
metrics["total_power_w"] = float(power_match.group(1)) |
|
|
if ir_avg_match: |
|
|
metrics["ir_drop_avg_v"] = float(ir_avg_match.group(1)) |
|
|
if ir_worst_match: |
|
|
metrics["ir_drop_worst_v"] = float(ir_worst_match.group(1)) |
|
|
if ir_pct_match: |
|
|
metrics["ir_drop_pct"] = float(ir_pct_match.group(1)) |
|
|
if total_cells_match: |
|
|
metrics["cell_total_count"] = int(total_cells_match.group(1)) |
|
|
metrics["cell_total_area_um2"] = float(total_cells_match.group(2)) |
|
|
return metrics |
|
|
|
|
|
|
|
|
def classify_cells(counts: Counter) -> dict: |
|
|
categories = { |
|
|
"filler": 0, |
|
|
"tap": 0, |
|
|
"sequential": 0, |
|
|
"combinational": 0, |
|
|
"other": 0, |
|
|
} |
|
|
for cell, count in counts.items(): |
|
|
ucell = cell.upper() |
|
|
if "FILL" in ucell: |
|
|
categories["filler"] += count |
|
|
elif "TAP" in ucell: |
|
|
categories["tap"] += count |
|
|
elif "DFF" in ucell or "LATCH" in ucell: |
|
|
categories["sequential"] += count |
|
|
elif re.match(r"[A-Z]+\d+_X\d+", ucell) or any(k in ucell for k in ["NAND", "NOR", "AOI", "OAI", "INV", "BUF", "XOR", "XNOR"]): |
|
|
categories["combinational"] += count |
|
|
else: |
|
|
categories["other"] += count |
|
|
return categories |
|
|
|
|
|
|
|
|
def write_summary_csv(metrics: dict, def_counts: Counter, v_counts: Counter, categories: dict) -> None: |
|
|
OUT_CSV.parent.mkdir(parents=True, exist_ok=True) |
|
|
with OUT_CSV.open("w", newline="") as f: |
|
|
writer = csv.writer(f) |
|
|
writer.writerow(["metric", "value"]) |
|
|
for key in [ |
|
|
"components", |
|
|
"pins", |
|
|
"nets", |
|
|
"rows", |
|
|
"units_per_micron", |
|
|
"die_width_um", |
|
|
"die_height_um", |
|
|
"die_area_um2", |
|
|
"tns_ns", |
|
|
"wns_ns", |
|
|
"worst_slack_ns", |
|
|
"clock_period_ns", |
|
|
"clock_fmax_mhz", |
|
|
"design_area_um2", |
|
|
"design_utilization_pct", |
|
|
"total_power_w", |
|
|
"ir_drop_avg_v", |
|
|
"ir_drop_worst_v", |
|
|
"ir_drop_pct", |
|
|
"cell_total_count", |
|
|
"cell_total_area_um2", |
|
|
]: |
|
|
if key in metrics: |
|
|
writer.writerow([key, metrics[key]]) |
|
|
writer.writerow(["def_instance_total", sum(def_counts.values())]) |
|
|
writer.writerow(["netlist_instance_total", sum(v_counts.values())]) |
|
|
for k, v in categories.items(): |
|
|
writer.writerow([f"category_{k}", v]) |
|
|
|
|
|
|
|
|
def write_latex_table(metrics: dict, def_counts: Counter, categories: dict) -> None: |
|
|
OUT_TEX.parent.mkdir(parents=True, exist_ok=True) |
|
|
total = sum(def_counts.values()) |
|
|
def pct(x): |
|
|
return 0.0 if total == 0 else (100.0 * x / total) |
|
|
def fmt_num(value, fmt: str) -> str: |
|
|
try: |
|
|
return format(float(value), fmt) |
|
|
except (TypeError, ValueError): |
|
|
return "n/a" |
|
|
|
|
|
die_width = fmt_num(metrics.get("die_width_um"), ".3f") |
|
|
die_height = fmt_num(metrics.get("die_height_um"), ".3f") |
|
|
die_area = fmt_num(metrics.get("die_area_um2"), ".2f") |
|
|
wns = fmt_num(metrics.get("wns_ns"), ".3f") |
|
|
tns = fmt_num(metrics.get("tns_ns"), ".3f") |
|
|
worst_slack = fmt_num(metrics.get("worst_slack_ns"), ".3f") |
|
|
period = fmt_num(metrics.get("clock_period_ns"), ".3f") |
|
|
fmax = fmt_num(metrics.get("clock_fmax_mhz"), ".2f") |
|
|
design_area = fmt_num(metrics.get("design_area_um2"), ".2f") |
|
|
util = fmt_num(metrics.get("design_utilization_pct"), ".1f") |
|
|
power_w = metrics.get("total_power_w") |
|
|
power_mw = fmt_num(power_w * 1e3 if power_w is not None else None, ".3f") |
|
|
ir_avg = fmt_num(metrics.get("ir_drop_avg_v"), ".4f") |
|
|
ir_worst = fmt_num(metrics.get("ir_drop_worst_v"), ".4f") |
|
|
ir_pct = fmt_num(metrics.get("ir_drop_pct"), ".2f") |
|
|
|
|
|
lines = [ |
|
|
r"\\begin{table}[t]", |
|
|
r"\\caption{Post-route summary for \texttt{nangate45/gcd/sh001\_29b345d42a}.}", |
|
|
r"\\label{tab:postroute_sh001}", |
|
|
r"\\centering", |
|
|
r"\\small", |
|
|
r"\\begin{tabular}{@{}ll@{}}", |
|
|
r"\\toprule", |
|
|
r"Metric & Value \\", |
|
|
r"\\midrule", |
|
|
f"Components & {metrics.get('components', 'n/a')} \\\\", |
|
|
f"Pins / nets & {metrics.get('pins', 'n/a')} / {metrics.get('nets', 'n/a')} \\\\", |
|
|
f"Rows & {metrics.get('rows', 'n/a')} \\\\", |
|
|
rf"Die size ($\mu m$) & {die_width} $\times$ {die_height} \\", |
|
|
rf"Die area ($\mu m^2$) & {die_area} \\", |
|
|
f"WNS / TNS / worst (ns) & {wns} / {tns} / {worst_slack} \\\\", |
|
|
f"Clock period / fmax & {period} ns / {fmax} MHz \\\\", |
|
|
rf"Design area / util & {design_area} $\mu m^2$ / {util}\% \\", |
|
|
f"Total power & {power_mw} mW \\\\", |
|
|
rf"IR drop avg / worst / \% & {ir_avg} / {ir_worst} / {ir_pct} \\", |
|
|
f"Filler / tap cells & {categories['filler']} ({pct(categories['filler']):.1f}\\%) / {categories['tap']} ({pct(categories['tap']):.1f}\\%) \\\\", |
|
|
f"Sequential / combinational & {categories['sequential']} ({pct(categories['sequential']):.1f}\\%) / {categories['combinational']} ({pct(categories['combinational']):.1f}\\%) \\\\", |
|
|
r"\\bottomrule", |
|
|
r"\\end{tabular}", |
|
|
r"\\end{table}", |
|
|
"", |
|
|
] |
|
|
OUT_TEX.write_text("\n".join(lines)) |
|
|
|
|
|
|
|
|
def plot_top_cell_types(def_counts: Counter) -> None: |
|
|
OUT_FIG_DIR.mkdir(parents=True, exist_ok=True) |
|
|
top = def_counts.most_common(10) |
|
|
labels = [k for k, _ in top] |
|
|
values = [v for _k, v in top] |
|
|
fig, ax = plt.subplots(figsize=(7.2, 4.2)) |
|
|
bars = ax.bar(labels, values, color="#4B8BBE") |
|
|
ax.set_ylabel("Instance count") |
|
|
ax.set_title("Top cell types (post-route DEF)") |
|
|
ax.tick_params(axis="x", rotation=45, labelsize=8) |
|
|
for bar, value in zip(bars, values): |
|
|
ax.text(bar.get_x() + bar.get_width() / 2, bar.get_height(), str(value), ha="center", va="bottom", fontsize=7) |
|
|
fig.tight_layout() |
|
|
for ext in ("png", "pdf"): |
|
|
fig.savefig(OUT_FIG_DIR / f"gcd_sh001_celltype_top10.{ext}", dpi=300) |
|
|
plt.close(fig) |
|
|
|
|
|
|
|
|
def plot_category_pie(categories: dict) -> None: |
|
|
OUT_FIG_DIR.mkdir(parents=True, exist_ok=True) |
|
|
labels = ["Combinational", "Sequential", "Filler", "Tap", "Other"] |
|
|
values = [ |
|
|
categories["combinational"], |
|
|
categories["sequential"], |
|
|
categories["filler"], |
|
|
categories["tap"], |
|
|
categories["other"], |
|
|
] |
|
|
fig, ax = plt.subplots(figsize=(5.0, 4.2)) |
|
|
ax.pie(values, labels=labels, autopct=lambda p: f"{p:.1f}%" if p > 0 else "") |
|
|
ax.set_title("Cell category mix (post-route DEF)") |
|
|
fig.tight_layout() |
|
|
for ext in ("png", "pdf"): |
|
|
fig.savefig(OUT_FIG_DIR / f"gcd_sh001_celltype_categories.{ext}", dpi=300) |
|
|
plt.close(fig) |
|
|
|
|
|
|
|
|
def main() -> None: |
|
|
def_text = DEF_PATH.read_text() |
|
|
v_text = V_PATH.read_text() |
|
|
|
|
|
metrics = parse_def_metrics(def_text) |
|
|
finish_metrics = parse_finish_rpt(FINISH_RPT_PATH.read_text() if FINISH_RPT_PATH.exists() else "") |
|
|
report_metrics = parse_report_log(REPORT_LOG_PATH.read_text() if REPORT_LOG_PATH.exists() else "") |
|
|
metrics.update(finish_metrics) |
|
|
metrics.update(report_metrics) |
|
|
def_counts = parse_def_cell_counts(def_text) |
|
|
v_counts = parse_netlist_cell_counts(v_text) |
|
|
categories = classify_cells(def_counts) |
|
|
|
|
|
write_summary_csv(metrics, def_counts, v_counts, categories) |
|
|
write_latex_table(metrics, def_counts, categories) |
|
|
plot_top_cell_types(def_counts) |
|
|
plot_category_pie(categories) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|