vxkyyy commited on
Commit
1d4d3e9
·
1 Parent(s): 3e6106e

feat: add Supabase auth + 5 new pipeline stages

Browse files
benchmark/README.md DELETED
@@ -1,66 +0,0 @@
1
- # AgentIC Benchmark Suite
2
-
3
- Drop this entire `benchmark/` folder into your AgentIC root directory and run.
4
-
5
- ## Quick Start
6
-
7
- ```bash
8
- # From AgentIC root directory:
9
-
10
- # Run all 10 designs (full pipeline including OpenLane)
11
- python3 benchmark/run_benchmark.py
12
-
13
- # Run RTL-only (faster, skips physical flow — good for first pass)
14
- python3 benchmark/run_benchmark.py --skip-openlane
15
-
16
- # Run a single design to test
17
- python3 benchmark/run_benchmark.py --design uart_tx
18
-
19
- # Use GF180 instead of Sky130
20
- python3 benchmark/run_benchmark.py --pdk gf180
21
-
22
- # Run each design 3 times (for statistical reliability)
23
- python3 benchmark/run_benchmark.py --attempts 3
24
- ```
25
-
26
- ## What It Produces
27
-
28
- After running you get two files in `benchmark/results/`:
29
-
30
- - `benchmark_YYYY-MM-DD.md` — human readable report with tables, analysis, bottleneck identification
31
- - `benchmark_YYYY-MM-DD.json` — raw data for programmatic use
32
-
33
- Results are also saved after every single build as `interim_YYYY-MM-DD.json`
34
- so you never lose data if a build crashes halfway through.
35
-
36
- ## The 10 Test Designs
37
-
38
- | # | Design | Complexity | Tests |
39
- |---|--------|-----------|-------|
40
- | 1 | counter_8bit | Simple | Basic sequential logic |
41
- | 2 | uart_tx | Simple | Serial communication |
42
- | 3 | pwm_generator | Simple | Configurable output + registers |
43
- | 4 | spi_master | Simple | SPI protocol state machine |
44
- | 5 | fifo_sync | Simple | Memory + pointer logic |
45
- | 6 | alu_8bit | Medium | Combinational logic, flags |
46
- | 7 | i2c_master | Medium | Complex protocol, open-drain |
47
- | 8 | apb_timer | Medium | APB bus interface + interrupt |
48
- | 9 | vga_controller | Medium | Timing generation, counters |
49
- | 10 | wishbone_uart | Complex | Full bus interface + FIFOs |
50
-
51
- ## Which Steps Matter in Industry
52
-
53
- | Stage | Matters For | Skip OK? |
54
- |-------|------------|----------|
55
- | RTL_GEN + RTL_FIX | Everything — this is the chip | Never |
56
- | VERIFICATION | Proving functionality | Never |
57
- | HARDENING | Physical layout | Never |
58
- | SIGNOFF | Fab acceptance | Never |
59
- | FORMAL_VERIFY | Safety-critical designs | Simple designs: yes |
60
- | COVERAGE_CHECK | Test completeness | If sim passes: yes |
61
- | REGRESSION | Robustness | Yes |
62
- | ECO_PATCH | Post-signoff fixes | First attempt: yes |
63
- | CONVERGENCE | Timing closure | Simple designs: yes |
64
-
65
- **The 4 things a fab actually cares about:**
66
- RTL correctness → Functional simulation → Place & Route → DRC/LVS/STA Signoff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
benchmark/results/benchmark_2026-03-06.json DELETED
@@ -1,889 +0,0 @@
1
- {
2
- "meta": {
3
- "date": "2026-03-06",
4
- "pdk": "sky130",
5
- "pass_rate_pct": 20.0
6
- },
7
- "results": [
8
- {
9
- "design_id": "counter8",
10
- "complexity": "Simple",
11
- "attempt": 1,
12
- "passed": true,
13
- "timed_out": false,
14
- "failed_stage": null,
15
- "failed_stage_name": null,
16
- "failed_stage_critical": null,
17
- "failed_reason": null,
18
- "completed_stages": [
19
- "INIT",
20
- "SPEC",
21
- "RTL_GEN",
22
- "RTL_FIX",
23
- "VERIFICATION",
24
- "FORMAL_VERIFY",
25
- "REGRESSION"
26
- ],
27
- "completed_stages_count": 7,
28
- "artifacts": {
29
- "LOG": [
30
- {
31
- "file": "counter8.log",
32
- "path": "/home/vickynishad/OpenLane/designs/counter8/counter8.log",
33
- "size_bytes": 32765,
34
- "size_human": "32.0 KB"
35
- }
36
- ],
37
- "CONFIG": [
38
- {
39
- "file": "counter8_tb_compile_gate.json",
40
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate.json",
41
- "size_bytes": 2128,
42
- "size_human": "2.1 KB"
43
- },
44
- {
45
- "file": "counter8_tb_static_gate_attempt1.json",
46
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate_attempt1.json",
47
- "size_bytes": 258,
48
- "size_human": "258 B"
49
- },
50
- {
51
- "file": "counter8_formal_preflight.json",
52
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_formal_preflight.json",
53
- "size_bytes": 89,
54
- "size_human": "89 B"
55
- },
56
- {
57
- "file": "counter8_tb_repair_action_attempt1.json",
58
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_repair_action_attempt1.json",
59
- "size_bytes": 114,
60
- "size_human": "114 B"
61
- },
62
- {
63
- "file": "counter8_tb_static_gate_attempt2.json",
64
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate_attempt2.json",
65
- "size_bytes": 258,
66
- "size_human": "258 B"
67
- },
68
- {
69
- "file": "counter8_tb_compile_gate_attempt2.json",
70
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate_attempt2.json",
71
- "size_bytes": 2128,
72
- "size_human": "2.1 KB"
73
- },
74
- {
75
- "file": "counter8_tb_repair_action.json",
76
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_repair_action.json",
77
- "size_bytes": 114,
78
- "size_human": "114 B"
79
- },
80
- {
81
- "file": "counter8_tb_compile_gate_attempt1.json",
82
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate_attempt1.json",
83
- "size_bytes": 2466,
84
- "size_human": "2.4 KB"
85
- },
86
- {
87
- "file": "counter8_tb_static_gate.json",
88
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate.json",
89
- "size_bytes": 258,
90
- "size_human": "258 B"
91
- }
92
- ],
93
- "RTL": [
94
- {
95
- "file": "counter8.v",
96
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8.v",
97
- "size_bytes": 1433,
98
- "size_human": "1.4 KB"
99
- },
100
- {
101
- "file": "counter8_sby_check.sv",
102
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_sby_check.sv",
103
- "size_bytes": 1160,
104
- "size_human": "1.1 KB"
105
- },
106
- {
107
- "file": "counter8_sva.sv",
108
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_sva.sv",
109
- "size_bytes": 1046,
110
- "size_human": "1.0 KB"
111
- }
112
- ],
113
- "TESTBENCH": [
114
- {
115
- "file": "counter8_tb.v",
116
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb.v",
117
- "size_bytes": 2478,
118
- "size_human": "2.4 KB"
119
- }
120
- ]
121
- },
122
- "artifact_types": [
123
- "LOG",
124
- "CONFIG",
125
- "RTL",
126
- "TESTBENCH"
127
- ],
128
- "rtl_generated": true,
129
- "testbench_generated": true,
130
- "gds_generated": false,
131
- "duration_minutes": 5.0,
132
- "timestamp": "2026-03-06T01:26:38.860725",
133
- "pdk": "sky130"
134
- },
135
- {
136
- "design_id": "uart_tx",
137
- "complexity": "Simple",
138
- "attempt": 1,
139
- "passed": false,
140
- "timed_out": false,
141
- "failed_stage": "RTL_FIX",
142
- "failed_stage_name": "RTL Lint & Syntax Fix",
143
- "failed_stage_critical": true,
144
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
145
- "completed_stages": [
146
- "INIT",
147
- "SPEC",
148
- "RTL_GEN",
149
- "RTL_FIX"
150
- ],
151
- "completed_stages_count": 4,
152
- "artifacts": {
153
- "LOG": [
154
- {
155
- "file": "uart_tx.log",
156
- "path": "/home/vickynishad/OpenLane/designs/uart_tx/uart_tx.log",
157
- "size_bytes": 72014,
158
- "size_human": "70.3 KB"
159
- }
160
- ],
161
- "RTL": [
162
- {
163
- "file": "uart_tx.v",
164
- "path": "/home/vickynishad/OpenLane/designs/uart_tx/src/uart_tx.v",
165
- "size_bytes": 2947,
166
- "size_human": "2.9 KB"
167
- }
168
- ]
169
- },
170
- "artifact_types": [
171
- "LOG",
172
- "RTL"
173
- ],
174
- "rtl_generated": true,
175
- "testbench_generated": false,
176
- "gds_generated": false,
177
- "duration_minutes": 7.9,
178
- "timestamp": "2026-03-06T01:31:40.190788",
179
- "pdk": "sky130"
180
- },
181
- {
182
- "design_id": "pwm_gen",
183
- "complexity": "Simple",
184
- "attempt": 1,
185
- "passed": false,
186
- "timed_out": false,
187
- "failed_stage": "VERIFICATION",
188
- "failed_stage_name": "Functional Simulation",
189
- "failed_stage_critical": true,
190
- "failed_reason": "[VERIFICATION] TB gate failed; applied deterministic auto-repair.",
191
- "completed_stages": [
192
- "INIT",
193
- "SPEC",
194
- "RTL_GEN",
195
- "RTL_FIX"
196
- ],
197
- "completed_stages_count": 4,
198
- "artifacts": {
199
- "LOG": [
200
- {
201
- "file": "pwm_gen.log",
202
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/pwm_gen.log",
203
- "size_bytes": 20937,
204
- "size_human": "20.4 KB"
205
- }
206
- ],
207
- "CONFIG": [
208
- {
209
- "file": "pwm_gen_tb_static_gate_attempt2.json",
210
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt2.json",
211
- "size_bytes": 258,
212
- "size_human": "258 B"
213
- },
214
- {
215
- "file": "pwm_gen_tb_compile_gate_attempt2.json",
216
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt2.json",
217
- "size_bytes": 3491,
218
- "size_human": "3.4 KB"
219
- },
220
- {
221
- "file": "pwm_gen_tb_static_gate.json",
222
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate.json",
223
- "size_bytes": 258,
224
- "size_human": "258 B"
225
- },
226
- {
227
- "file": "pwm_gen_tb_repair_action.json",
228
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_repair_action.json",
229
- "size_bytes": 114,
230
- "size_human": "114 B"
231
- },
232
- {
233
- "file": "pwm_gen_tb_compile_gate_attempt1.json",
234
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt1.json",
235
- "size_bytes": 3011,
236
- "size_human": "2.9 KB"
237
- },
238
- {
239
- "file": "pwm_gen_tb_compile_gate_attempt3.json",
240
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt3.json",
241
- "size_bytes": 3011,
242
- "size_human": "2.9 KB"
243
- },
244
- {
245
- "file": "pwm_gen_tb_static_gate_attempt3.json",
246
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt3.json",
247
- "size_bytes": 258,
248
- "size_human": "258 B"
249
- },
250
- {
251
- "file": "pwm_gen_tb_compile_gate.json",
252
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate.json",
253
- "size_bytes": 3011,
254
- "size_human": "2.9 KB"
255
- },
256
- {
257
- "file": "pwm_gen_tb_static_gate_attempt1.json",
258
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt1.json",
259
- "size_bytes": 258,
260
- "size_human": "258 B"
261
- },
262
- {
263
- "file": "pwm_gen_tb_repair_action_attempt1.json",
264
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_repair_action_attempt1.json",
265
- "size_bytes": 114,
266
- "size_human": "114 B"
267
- }
268
- ],
269
- "TESTBENCH": [
270
- {
271
- "file": "pwm_gen_tb.v",
272
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb.v",
273
- "size_bytes": 2208,
274
- "size_human": "2.2 KB"
275
- }
276
- ],
277
- "RTL": [
278
- {
279
- "file": "pwm_gen.v",
280
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen.v",
281
- "size_bytes": 915,
282
- "size_human": "915 B"
283
- }
284
- ]
285
- },
286
- "artifact_types": [
287
- "LOG",
288
- "CONFIG",
289
- "TESTBENCH",
290
- "RTL"
291
- ],
292
- "rtl_generated": true,
293
- "testbench_generated": true,
294
- "gds_generated": false,
295
- "duration_minutes": 0.5,
296
- "timestamp": "2026-03-06T01:39:32.846798",
297
- "pdk": "sky130"
298
- },
299
- {
300
- "design_id": "spi_master",
301
- "complexity": "Simple",
302
- "attempt": 1,
303
- "passed": false,
304
- "timed_out": false,
305
- "failed_stage": "FORMAL_VERIFY",
306
- "failed_stage_name": "Formal Verification",
307
- "failed_stage_critical": false,
308
- "failed_reason": "[FORMAL_VERIFY] Formal preflight failed: 1 issue(s).",
309
- "completed_stages": [
310
- "INIT",
311
- "SPEC",
312
- "RTL_GEN",
313
- "RTL_FIX",
314
- "VERIFICATION"
315
- ],
316
- "completed_stages_count": 5,
317
- "artifacts": {
318
- "LOG": [
319
- {
320
- "file": "spi_master.log",
321
- "path": "/home/vickynishad/OpenLane/designs/spi_master/spi_master.log",
322
- "size_bytes": 14056,
323
- "size_human": "13.7 KB"
324
- }
325
- ],
326
- "TESTBENCH": [
327
- {
328
- "file": "spi_master_tb.v",
329
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb.v",
330
- "size_bytes": 1700,
331
- "size_human": "1.7 KB"
332
- }
333
- ],
334
- "CONFIG": [
335
- {
336
- "file": "spi_master_tb_compile_gate.json",
337
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_compile_gate.json",
338
- "size_bytes": 2180,
339
- "size_human": "2.1 KB"
340
- },
341
- {
342
- "file": "spi_master_tb_static_gate_attempt1.json",
343
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_static_gate_attempt1.json",
344
- "size_bytes": 258,
345
- "size_human": "258 B"
346
- },
347
- {
348
- "file": "spi_master_tb_compile_gate_attempt1.json",
349
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_compile_gate_attempt1.json",
350
- "size_bytes": 2180,
351
- "size_human": "2.1 KB"
352
- },
353
- {
354
- "file": "spi_master_formal_preflight.json",
355
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_formal_preflight.json",
356
- "size_bytes": 292,
357
- "size_human": "292 B"
358
- },
359
- {
360
- "file": "spi_master_tb_static_gate.json",
361
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_static_gate.json",
362
- "size_bytes": 258,
363
- "size_human": "258 B"
364
- }
365
- ],
366
- "RTL": [
367
- {
368
- "file": "spi_master.v",
369
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master.v",
370
- "size_bytes": 4256,
371
- "size_human": "4.2 KB"
372
- },
373
- {
374
- "file": "spi_master_sva.sv",
375
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_sva.sv",
376
- "size_bytes": 1188,
377
- "size_human": "1.2 KB"
378
- }
379
- ]
380
- },
381
- "artifact_types": [
382
- "LOG",
383
- "TESTBENCH",
384
- "CONFIG",
385
- "RTL"
386
- ],
387
- "rtl_generated": true,
388
- "testbench_generated": true,
389
- "gds_generated": false,
390
- "duration_minutes": 1.3,
391
- "timestamp": "2026-03-06T01:40:00.129996",
392
- "pdk": "sky130"
393
- },
394
- {
395
- "design_id": "sync_fifo",
396
- "complexity": "Simple",
397
- "attempt": 1,
398
- "passed": false,
399
- "timed_out": false,
400
- "failed_stage": "RTL_FIX",
401
- "failed_stage_name": "RTL Lint & Syntax Fix",
402
- "failed_stage_critical": true,
403
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
404
- "completed_stages": [
405
- "INIT",
406
- "SPEC",
407
- "RTL_GEN",
408
- "RTL_FIX"
409
- ],
410
- "completed_stages_count": 4,
411
- "artifacts": {
412
- "LOG": [
413
- {
414
- "file": "sync_fifo.log",
415
- "path": "/home/vickynishad/OpenLane/designs/sync_fifo/sync_fifo.log",
416
- "size_bytes": 53377,
417
- "size_human": "52.1 KB"
418
- }
419
- ],
420
- "RTL": [
421
- {
422
- "file": "sync_fifo.v",
423
- "path": "/home/vickynishad/OpenLane/designs/sync_fifo/src/sync_fifo.v",
424
- "size_bytes": 1299,
425
- "size_human": "1.3 KB"
426
- }
427
- ]
428
- },
429
- "artifact_types": [
430
- "LOG",
431
- "RTL"
432
- ],
433
- "rtl_generated": true,
434
- "testbench_generated": false,
435
- "gds_generated": false,
436
- "duration_minutes": 2.4,
437
- "timestamp": "2026-03-06T01:41:16.885242",
438
- "pdk": "sky130"
439
- },
440
- {
441
- "design_id": "alu8",
442
- "complexity": "Medium",
443
- "attempt": 1,
444
- "passed": false,
445
- "timed_out": false,
446
- "failed_stage": "FORMAL_VERIFY",
447
- "failed_stage_name": "Formal Verification",
448
- "failed_stage_critical": false,
449
- "failed_reason": "[FORMAL_VERIFY] Yosys SVA preflight failed. Regenerating SVA with error context.",
450
- "completed_stages": [
451
- "INIT",
452
- "SPEC",
453
- "RTL_GEN",
454
- "RTL_FIX",
455
- "VERIFICATION",
456
- "FORMAL_VERIFY"
457
- ],
458
- "completed_stages_count": 6,
459
- "artifacts": {
460
- "LOG": [
461
- {
462
- "file": "alu8.log",
463
- "path": "/home/vickynishad/OpenLane/designs/alu8/alu8.log",
464
- "size_bytes": 47547,
465
- "size_human": "46.4 KB"
466
- }
467
- ],
468
- "TESTBENCH": [
469
- {
470
- "file": "alu8_tb.v",
471
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb.v",
472
- "size_bytes": 2674,
473
- "size_human": "2.6 KB"
474
- }
475
- ],
476
- "CONFIG": [
477
- {
478
- "file": "alu8_tb_static_gate_attempt1.json",
479
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_static_gate_attempt1.json",
480
- "size_bytes": 258,
481
- "size_human": "258 B"
482
- },
483
- {
484
- "file": "alu8_tb_compile_gate.json",
485
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_compile_gate.json",
486
- "size_bytes": 13185,
487
- "size_human": "12.9 KB"
488
- },
489
- {
490
- "file": "alu8_coverage_attempt1.json",
491
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt1.json",
492
- "size_bytes": 850,
493
- "size_human": "850 B"
494
- },
495
- {
496
- "file": "alu8_coverage_latest.json",
497
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_latest.json",
498
- "size_bytes": 851,
499
- "size_human": "851 B"
500
- },
501
- {
502
- "file": "alu8_coverage_attempt2.json",
503
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt2.json",
504
- "size_bytes": 850,
505
- "size_human": "850 B"
506
- },
507
- {
508
- "file": "alu8_tb_compile_gate_attempt1.json",
509
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_compile_gate_attempt1.json",
510
- "size_bytes": 13185,
511
- "size_human": "12.9 KB"
512
- },
513
- {
514
- "file": "alu8_coverage_attempt3.json",
515
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt3.json",
516
- "size_bytes": 851,
517
- "size_human": "851 B"
518
- },
519
- {
520
- "file": "alu8_coverage_attempt4.json",
521
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt4.json",
522
- "size_bytes": 851,
523
- "size_human": "851 B"
524
- },
525
- {
526
- "file": "alu8_formal_preflight.json",
527
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_formal_preflight.json",
528
- "size_bytes": 89,
529
- "size_human": "89 B"
530
- },
531
- {
532
- "file": "alu8_tb_static_gate.json",
533
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_static_gate.json",
534
- "size_bytes": 258,
535
- "size_human": "258 B"
536
- }
537
- ],
538
- "RTL": [
539
- {
540
- "file": "alu8_sby_check.sv",
541
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_sby_check.sv",
542
- "size_bytes": 3908,
543
- "size_human": "3.8 KB"
544
- },
545
- {
546
- "file": "alu8_sva.sv",
547
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_sva.sv",
548
- "size_bytes": 1810,
549
- "size_human": "1.8 KB"
550
- },
551
- {
552
- "file": "alu8.v",
553
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8.v",
554
- "size_bytes": 1517,
555
- "size_human": "1.5 KB"
556
- }
557
- ]
558
- },
559
- "artifact_types": [
560
- "LOG",
561
- "TESTBENCH",
562
- "CONFIG",
563
- "RTL"
564
- ],
565
- "rtl_generated": true,
566
- "testbench_generated": true,
567
- "gds_generated": false,
568
- "duration_minutes": 5.2,
569
- "timestamp": "2026-03-06T01:43:39.134144",
570
- "pdk": "sky130"
571
- },
572
- {
573
- "design_id": "i2c_master",
574
- "complexity": "Medium",
575
- "attempt": 1,
576
- "passed": false,
577
- "timed_out": false,
578
- "failed_stage": null,
579
- "failed_stage_name": null,
580
- "failed_stage_critical": null,
581
- "failed_reason": "\u2502 address, data, read/write, start trigger. Busy, done, error status. 50 MHz system clock. \u2502",
582
- "completed_stages": [
583
- "INIT",
584
- "SPEC",
585
- "RTL_GEN",
586
- "RTL_FIX"
587
- ],
588
- "completed_stages_count": 4,
589
- "artifacts": {
590
- "LOG": [
591
- {
592
- "file": "i2c_master.log",
593
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/i2c_master.log",
594
- "size_bytes": 24228,
595
- "size_human": "23.7 KB"
596
- }
597
- ],
598
- "CONFIG": [
599
- {
600
- "file": "i2c_master_tb_static_gate.json",
601
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_static_gate.json",
602
- "size_bytes": 258,
603
- "size_human": "258 B"
604
- },
605
- {
606
- "file": "i2c_master_tb_compile_gate.json",
607
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_compile_gate.json",
608
- "size_bytes": 5988,
609
- "size_human": "5.8 KB"
610
- },
611
- {
612
- "file": "i2c_master_tb_compile_gate_attempt1.json",
613
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_compile_gate_attempt1.json",
614
- "size_bytes": 5988,
615
- "size_human": "5.8 KB"
616
- },
617
- {
618
- "file": "i2c_master_tb_static_gate_attempt1.json",
619
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_static_gate_attempt1.json",
620
- "size_bytes": 258,
621
- "size_human": "258 B"
622
- }
623
- ],
624
- "RTL": [
625
- {
626
- "file": "i2c_master_controller.v",
627
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_controller.v",
628
- "size_bytes": 1205,
629
- "size_human": "1.2 KB"
630
- },
631
- {
632
- "file": "i2c_master.v",
633
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master.v",
634
- "size_bytes": 619,
635
- "size_human": "619 B"
636
- }
637
- ],
638
- "TESTBENCH": [
639
- {
640
- "file": "i2c_master_tb.v",
641
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb.v",
642
- "size_bytes": 1543,
643
- "size_human": "1.5 KB"
644
- }
645
- ]
646
- },
647
- "artifact_types": [
648
- "LOG",
649
- "CONFIG",
650
- "RTL",
651
- "TESTBENCH"
652
- ],
653
- "rtl_generated": true,
654
- "testbench_generated": true,
655
- "gds_generated": false,
656
- "duration_minutes": 9.8,
657
- "timestamp": "2026-03-06T01:48:48.644076",
658
- "pdk": "sky130"
659
- },
660
- {
661
- "design_id": "apb_timer",
662
- "complexity": "Medium",
663
- "attempt": 1,
664
- "passed": true,
665
- "timed_out": false,
666
- "failed_stage": null,
667
- "failed_stage_name": null,
668
- "failed_stage_critical": null,
669
- "failed_reason": null,
670
- "completed_stages": [
671
- "INIT",
672
- "SPEC",
673
- "RTL_GEN",
674
- "RTL_FIX",
675
- "VERIFICATION",
676
- "FORMAL_VERIFY",
677
- "REGRESSION"
678
- ],
679
- "completed_stages_count": 7,
680
- "artifacts": {
681
- "LOG": [
682
- {
683
- "file": "apb_timer.log",
684
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/apb_timer.log",
685
- "size_bytes": 26458,
686
- "size_human": "25.8 KB"
687
- }
688
- ],
689
- "RTL": [
690
- {
691
- "file": "apb_timer.v",
692
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer.v",
693
- "size_bytes": 2418,
694
- "size_human": "2.4 KB"
695
- },
696
- {
697
- "file": "apb_timer_sva.sv",
698
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_sva.sv",
699
- "size_bytes": 1738,
700
- "size_human": "1.7 KB"
701
- },
702
- {
703
- "file": "apb_timer_sby_check.sv",
704
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_sby_check.sv",
705
- "size_bytes": 1743,
706
- "size_human": "1.7 KB"
707
- }
708
- ],
709
- "TESTBENCH": [
710
- {
711
- "file": "apb_timer_tb.v",
712
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb.v",
713
- "size_bytes": 1914,
714
- "size_human": "1.9 KB"
715
- }
716
- ],
717
- "CONFIG": [
718
- {
719
- "file": "apb_timer_tb_static_gate.json",
720
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_static_gate.json",
721
- "size_bytes": 258,
722
- "size_human": "258 B"
723
- },
724
- {
725
- "file": "apb_timer_tb_static_gate_attempt1.json",
726
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_static_gate_attempt1.json",
727
- "size_bytes": 258,
728
- "size_human": "258 B"
729
- },
730
- {
731
- "file": "apb_timer_tb_compile_gate.json",
732
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_compile_gate.json",
733
- "size_bytes": 6322,
734
- "size_human": "6.2 KB"
735
- },
736
- {
737
- "file": "apb_timer_tb_compile_gate_attempt1.json",
738
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_compile_gate_attempt1.json",
739
- "size_bytes": 6322,
740
- "size_human": "6.2 KB"
741
- },
742
- {
743
- "file": "apb_timer_formal_preflight.json",
744
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_formal_preflight.json",
745
- "size_bytes": 89,
746
- "size_human": "89 B"
747
- }
748
- ]
749
- },
750
- "artifact_types": [
751
- "LOG",
752
- "RTL",
753
- "TESTBENCH",
754
- "CONFIG"
755
- ],
756
- "rtl_generated": true,
757
- "testbench_generated": true,
758
- "gds_generated": false,
759
- "duration_minutes": 4.2,
760
- "timestamp": "2026-03-06T01:58:37.404877",
761
- "pdk": "sky130"
762
- },
763
- {
764
- "design_id": "vga_ctrl",
765
- "complexity": "Medium",
766
- "attempt": 1,
767
- "passed": false,
768
- "timed_out": false,
769
- "failed_stage": "RTL_FIX",
770
- "failed_stage_name": "RTL Lint & Syntax Fix",
771
- "failed_stage_critical": true,
772
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
773
- "completed_stages": [
774
- "INIT",
775
- "SPEC",
776
- "RTL_GEN",
777
- "RTL_FIX"
778
- ],
779
- "completed_stages_count": 4,
780
- "artifacts": {
781
- "LOG": [
782
- {
783
- "file": "vga_ctrl.log",
784
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/vga_ctrl.log",
785
- "size_bytes": 803376,
786
- "size_human": "784.5 KB"
787
- }
788
- ],
789
- "TESTBENCH": [
790
- {
791
- "file": "vga_ctrl_tb.v",
792
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/src/vga_ctrl_tb.v",
793
- "size_bytes": 510,
794
- "size_human": "510 B"
795
- }
796
- ],
797
- "RTL": [
798
- {
799
- "file": "vga_ctrl.v",
800
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/src/vga_ctrl.v",
801
- "size_bytes": 2957,
802
- "size_human": "2.9 KB"
803
- }
804
- ]
805
- },
806
- "artifact_types": [
807
- "LOG",
808
- "TESTBENCH",
809
- "RTL"
810
- ],
811
- "rtl_generated": true,
812
- "testbench_generated": true,
813
- "gds_generated": false,
814
- "duration_minutes": 6.7,
815
- "timestamp": "2026-03-06T02:02:46.652227",
816
- "pdk": "sky130"
817
- },
818
- {
819
- "design_id": "wb_uart",
820
- "complexity": "Complex",
821
- "attempt": 1,
822
- "passed": false,
823
- "timed_out": false,
824
- "failed_stage": "RTL_FIX",
825
- "failed_stage_name": "RTL Lint & Syntax Fix",
826
- "failed_stage_critical": true,
827
- "failed_reason": "[RTL_FIX] Already on fallback strategy. Build Failed.",
828
- "completed_stages": [
829
- "INIT",
830
- "SPEC",
831
- "RTL_GEN",
832
- "RTL_FIX"
833
- ],
834
- "completed_stages_count": 4,
835
- "artifacts": {
836
- "LOG": [
837
- {
838
- "file": "wb_uart.log",
839
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/wb_uart.log",
840
- "size_bytes": 28250,
841
- "size_human": "27.6 KB"
842
- }
843
- ],
844
- "RTL": [
845
- {
846
- "file": "wb_uart_tx_fifo.v",
847
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_tx_fifo.v",
848
- "size_bytes": 1282,
849
- "size_human": "1.3 KB"
850
- },
851
- {
852
- "file": "wb_uart_wishbone_interface.v",
853
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_wishbone_interface.v",
854
- "size_bytes": 1041,
855
- "size_human": "1.0 KB"
856
- },
857
- {
858
- "file": "wb_uart_controller.v",
859
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_controller.v",
860
- "size_bytes": 1658,
861
- "size_human": "1.6 KB"
862
- },
863
- {
864
- "file": "wb_uart.v",
865
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart.v",
866
- "size_bytes": 1638,
867
- "size_human": "1.6 KB"
868
- },
869
- {
870
- "file": "wb_uart_rx_fifo.v",
871
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_rx_fifo.v",
872
- "size_bytes": 1282,
873
- "size_human": "1.3 KB"
874
- }
875
- ]
876
- },
877
- "artifact_types": [
878
- "LOG",
879
- "RTL"
880
- ],
881
- "rtl_generated": true,
882
- "testbench_generated": false,
883
- "gds_generated": false,
884
- "duration_minutes": 7.0,
885
- "timestamp": "2026-03-06T02:09:26.491231",
886
- "pdk": "sky130"
887
- }
888
- ]
889
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
benchmark/results/benchmark_2026-03-06.md DELETED
@@ -1,53 +0,0 @@
1
- # AgentIC Benchmark Report
2
- **Date:** March 06, 2026
3
- **PDK:** sky130
4
- **Model:** NVIDIA NIM — Llama 3.3 70B
5
- **Mode:** RTL only
6
-
7
- ## Summary
8
- | Metric | Value |
9
- |--------|-------|
10
- | Total Designs | 10 |
11
- | **First-Attempt Pass Rate** | **20% (2/10)** |
12
- | Average Build Time | 5.0 min |
13
- | RTL Generated (incl. failures) | 10/10 |
14
- | GDS Generated | 0/10 |
15
-
16
- ## Results
17
- | Design | Complexity | Pass? | Failed At | Time | RTL | GDS |
18
- |--------|-----------|-------|-----------|------|-----|-----|
19
- | counter8 | Simple | ✓ | — | 5.0 min | ✓ | ✗ |
20
- | uart_tx | Simple | ✗ | RTL Lint & Syntax Fix | 7.9 min | ✓ | ✗ |
21
- | pwm_gen | Simple | ✗ | Functional Simulation | 0.5 min | ✓ | ✗ |
22
- | spi_master | Simple | ✗ | Formal Verification | 1.3 min | ✓ | ✗ |
23
- | sync_fifo | Simple | ✗ | RTL Lint & Syntax Fix | 2.4 min | ✓ | ✗ |
24
- | alu8 | Medium | ✗ | Formal Verification | 5.2 min | ✓ | ✗ |
25
- | i2c_master | Medium | ✗ | — | 9.8 min | ✓ | ✗ |
26
- | apb_timer | Medium | ✓ | — | 4.2 min | ✓ | ✗ |
27
- | vga_ctrl | Medium | ✗ | RTL Lint & Syntax Fix | 6.7 min | ✓ | ✗ |
28
- | wb_uart | Complex | ✗ | RTL Lint & Syntax Fix | 7.0 min | ✓ | ✗ |
29
-
30
- ## Stage Failure Analysis
31
- | Stage | Industry Name | Failures | Critical? |
32
- |-------|--------------|----------|-----------|
33
- | RTL_FIX | RTL Lint & Syntax Fix | 4 | 🔴 Yes |
34
- | FORMAL_VERIFY | Formal Verification | 2 | 🟡 Optional |
35
- | VERIFICATION | Functional Simulation | 1 | 🔴 Yes |
36
-
37
- **Fix `RTL_FIX` first.**
38
-
39
- ## Which Stages Matter in Industry
40
- | Stage | Skip OK? | Why |
41
- |-------|----------|-----|
42
- | RTL_GEN + RTL_FIX | ❌ Never | This is the chip |
43
- | VERIFICATION | ❌ Never | Proves it works |
44
- | HARDENING | ❌ Never | Physical layout |
45
- | SIGNOFF | ❌ Never | Fab requirement |
46
- | FORMAL_VERIFY | ✅ Simple designs | Optional for non-safety-critical |
47
- | COVERAGE_CHECK | ✅ If sim passes | Nice to have |
48
- | REGRESSION | ✅ Yes | Corner cases only |
49
- | ECO_PATCH | ✅ First attempt | Only if signoff fails |
50
- | CONVERGENCE | ✅ Simple designs | Embedded in hardening |
51
-
52
- ---
53
- *Generated by AgentIC Benchmark Runner — March 06, 2026*
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
benchmark/results/interim_2026-03-06.json DELETED
@@ -1,884 +0,0 @@
1
- {
2
- "results": [
3
- {
4
- "design_id": "counter8",
5
- "complexity": "Simple",
6
- "attempt": 1,
7
- "passed": true,
8
- "timed_out": false,
9
- "failed_stage": null,
10
- "failed_stage_name": null,
11
- "failed_stage_critical": null,
12
- "failed_reason": null,
13
- "completed_stages": [
14
- "INIT",
15
- "SPEC",
16
- "RTL_GEN",
17
- "RTL_FIX",
18
- "VERIFICATION",
19
- "FORMAL_VERIFY",
20
- "REGRESSION"
21
- ],
22
- "completed_stages_count": 7,
23
- "artifacts": {
24
- "LOG": [
25
- {
26
- "file": "counter8.log",
27
- "path": "/home/vickynishad/OpenLane/designs/counter8/counter8.log",
28
- "size_bytes": 32765,
29
- "size_human": "32.0 KB"
30
- }
31
- ],
32
- "CONFIG": [
33
- {
34
- "file": "counter8_tb_compile_gate.json",
35
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate.json",
36
- "size_bytes": 2128,
37
- "size_human": "2.1 KB"
38
- },
39
- {
40
- "file": "counter8_tb_static_gate_attempt1.json",
41
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate_attempt1.json",
42
- "size_bytes": 258,
43
- "size_human": "258 B"
44
- },
45
- {
46
- "file": "counter8_formal_preflight.json",
47
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_formal_preflight.json",
48
- "size_bytes": 89,
49
- "size_human": "89 B"
50
- },
51
- {
52
- "file": "counter8_tb_repair_action_attempt1.json",
53
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_repair_action_attempt1.json",
54
- "size_bytes": 114,
55
- "size_human": "114 B"
56
- },
57
- {
58
- "file": "counter8_tb_static_gate_attempt2.json",
59
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate_attempt2.json",
60
- "size_bytes": 258,
61
- "size_human": "258 B"
62
- },
63
- {
64
- "file": "counter8_tb_compile_gate_attempt2.json",
65
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate_attempt2.json",
66
- "size_bytes": 2128,
67
- "size_human": "2.1 KB"
68
- },
69
- {
70
- "file": "counter8_tb_repair_action.json",
71
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_repair_action.json",
72
- "size_bytes": 114,
73
- "size_human": "114 B"
74
- },
75
- {
76
- "file": "counter8_tb_compile_gate_attempt1.json",
77
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_compile_gate_attempt1.json",
78
- "size_bytes": 2466,
79
- "size_human": "2.4 KB"
80
- },
81
- {
82
- "file": "counter8_tb_static_gate.json",
83
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb_static_gate.json",
84
- "size_bytes": 258,
85
- "size_human": "258 B"
86
- }
87
- ],
88
- "RTL": [
89
- {
90
- "file": "counter8.v",
91
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8.v",
92
- "size_bytes": 1433,
93
- "size_human": "1.4 KB"
94
- },
95
- {
96
- "file": "counter8_sby_check.sv",
97
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_sby_check.sv",
98
- "size_bytes": 1160,
99
- "size_human": "1.1 KB"
100
- },
101
- {
102
- "file": "counter8_sva.sv",
103
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_sva.sv",
104
- "size_bytes": 1046,
105
- "size_human": "1.0 KB"
106
- }
107
- ],
108
- "TESTBENCH": [
109
- {
110
- "file": "counter8_tb.v",
111
- "path": "/home/vickynishad/OpenLane/designs/counter8/src/counter8_tb.v",
112
- "size_bytes": 2478,
113
- "size_human": "2.4 KB"
114
- }
115
- ]
116
- },
117
- "artifact_types": [
118
- "LOG",
119
- "CONFIG",
120
- "RTL",
121
- "TESTBENCH"
122
- ],
123
- "rtl_generated": true,
124
- "testbench_generated": true,
125
- "gds_generated": false,
126
- "duration_minutes": 5.0,
127
- "timestamp": "2026-03-06T01:26:38.860725",
128
- "pdk": "sky130"
129
- },
130
- {
131
- "design_id": "uart_tx",
132
- "complexity": "Simple",
133
- "attempt": 1,
134
- "passed": false,
135
- "timed_out": false,
136
- "failed_stage": "RTL_FIX",
137
- "failed_stage_name": "RTL Lint & Syntax Fix",
138
- "failed_stage_critical": true,
139
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
140
- "completed_stages": [
141
- "INIT",
142
- "SPEC",
143
- "RTL_GEN",
144
- "RTL_FIX"
145
- ],
146
- "completed_stages_count": 4,
147
- "artifacts": {
148
- "LOG": [
149
- {
150
- "file": "uart_tx.log",
151
- "path": "/home/vickynishad/OpenLane/designs/uart_tx/uart_tx.log",
152
- "size_bytes": 72014,
153
- "size_human": "70.3 KB"
154
- }
155
- ],
156
- "RTL": [
157
- {
158
- "file": "uart_tx.v",
159
- "path": "/home/vickynishad/OpenLane/designs/uart_tx/src/uart_tx.v",
160
- "size_bytes": 2947,
161
- "size_human": "2.9 KB"
162
- }
163
- ]
164
- },
165
- "artifact_types": [
166
- "LOG",
167
- "RTL"
168
- ],
169
- "rtl_generated": true,
170
- "testbench_generated": false,
171
- "gds_generated": false,
172
- "duration_minutes": 7.9,
173
- "timestamp": "2026-03-06T01:31:40.190788",
174
- "pdk": "sky130"
175
- },
176
- {
177
- "design_id": "pwm_gen",
178
- "complexity": "Simple",
179
- "attempt": 1,
180
- "passed": false,
181
- "timed_out": false,
182
- "failed_stage": "VERIFICATION",
183
- "failed_stage_name": "Functional Simulation",
184
- "failed_stage_critical": true,
185
- "failed_reason": "[VERIFICATION] TB gate failed; applied deterministic auto-repair.",
186
- "completed_stages": [
187
- "INIT",
188
- "SPEC",
189
- "RTL_GEN",
190
- "RTL_FIX"
191
- ],
192
- "completed_stages_count": 4,
193
- "artifacts": {
194
- "LOG": [
195
- {
196
- "file": "pwm_gen.log",
197
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/pwm_gen.log",
198
- "size_bytes": 20937,
199
- "size_human": "20.4 KB"
200
- }
201
- ],
202
- "CONFIG": [
203
- {
204
- "file": "pwm_gen_tb_static_gate_attempt2.json",
205
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt2.json",
206
- "size_bytes": 258,
207
- "size_human": "258 B"
208
- },
209
- {
210
- "file": "pwm_gen_tb_compile_gate_attempt2.json",
211
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt2.json",
212
- "size_bytes": 3491,
213
- "size_human": "3.4 KB"
214
- },
215
- {
216
- "file": "pwm_gen_tb_static_gate.json",
217
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate.json",
218
- "size_bytes": 258,
219
- "size_human": "258 B"
220
- },
221
- {
222
- "file": "pwm_gen_tb_repair_action.json",
223
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_repair_action.json",
224
- "size_bytes": 114,
225
- "size_human": "114 B"
226
- },
227
- {
228
- "file": "pwm_gen_tb_compile_gate_attempt1.json",
229
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt1.json",
230
- "size_bytes": 3011,
231
- "size_human": "2.9 KB"
232
- },
233
- {
234
- "file": "pwm_gen_tb_compile_gate_attempt3.json",
235
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate_attempt3.json",
236
- "size_bytes": 3011,
237
- "size_human": "2.9 KB"
238
- },
239
- {
240
- "file": "pwm_gen_tb_static_gate_attempt3.json",
241
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt3.json",
242
- "size_bytes": 258,
243
- "size_human": "258 B"
244
- },
245
- {
246
- "file": "pwm_gen_tb_compile_gate.json",
247
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_compile_gate.json",
248
- "size_bytes": 3011,
249
- "size_human": "2.9 KB"
250
- },
251
- {
252
- "file": "pwm_gen_tb_static_gate_attempt1.json",
253
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_static_gate_attempt1.json",
254
- "size_bytes": 258,
255
- "size_human": "258 B"
256
- },
257
- {
258
- "file": "pwm_gen_tb_repair_action_attempt1.json",
259
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb_repair_action_attempt1.json",
260
- "size_bytes": 114,
261
- "size_human": "114 B"
262
- }
263
- ],
264
- "TESTBENCH": [
265
- {
266
- "file": "pwm_gen_tb.v",
267
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen_tb.v",
268
- "size_bytes": 2208,
269
- "size_human": "2.2 KB"
270
- }
271
- ],
272
- "RTL": [
273
- {
274
- "file": "pwm_gen.v",
275
- "path": "/home/vickynishad/OpenLane/designs/pwm_gen/src/pwm_gen.v",
276
- "size_bytes": 915,
277
- "size_human": "915 B"
278
- }
279
- ]
280
- },
281
- "artifact_types": [
282
- "LOG",
283
- "CONFIG",
284
- "TESTBENCH",
285
- "RTL"
286
- ],
287
- "rtl_generated": true,
288
- "testbench_generated": true,
289
- "gds_generated": false,
290
- "duration_minutes": 0.5,
291
- "timestamp": "2026-03-06T01:39:32.846798",
292
- "pdk": "sky130"
293
- },
294
- {
295
- "design_id": "spi_master",
296
- "complexity": "Simple",
297
- "attempt": 1,
298
- "passed": false,
299
- "timed_out": false,
300
- "failed_stage": "FORMAL_VERIFY",
301
- "failed_stage_name": "Formal Verification",
302
- "failed_stage_critical": false,
303
- "failed_reason": "[FORMAL_VERIFY] Formal preflight failed: 1 issue(s).",
304
- "completed_stages": [
305
- "INIT",
306
- "SPEC",
307
- "RTL_GEN",
308
- "RTL_FIX",
309
- "VERIFICATION"
310
- ],
311
- "completed_stages_count": 5,
312
- "artifacts": {
313
- "LOG": [
314
- {
315
- "file": "spi_master.log",
316
- "path": "/home/vickynishad/OpenLane/designs/spi_master/spi_master.log",
317
- "size_bytes": 14056,
318
- "size_human": "13.7 KB"
319
- }
320
- ],
321
- "TESTBENCH": [
322
- {
323
- "file": "spi_master_tb.v",
324
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb.v",
325
- "size_bytes": 1700,
326
- "size_human": "1.7 KB"
327
- }
328
- ],
329
- "CONFIG": [
330
- {
331
- "file": "spi_master_tb_compile_gate.json",
332
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_compile_gate.json",
333
- "size_bytes": 2180,
334
- "size_human": "2.1 KB"
335
- },
336
- {
337
- "file": "spi_master_tb_static_gate_attempt1.json",
338
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_static_gate_attempt1.json",
339
- "size_bytes": 258,
340
- "size_human": "258 B"
341
- },
342
- {
343
- "file": "spi_master_tb_compile_gate_attempt1.json",
344
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_compile_gate_attempt1.json",
345
- "size_bytes": 2180,
346
- "size_human": "2.1 KB"
347
- },
348
- {
349
- "file": "spi_master_formal_preflight.json",
350
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_formal_preflight.json",
351
- "size_bytes": 292,
352
- "size_human": "292 B"
353
- },
354
- {
355
- "file": "spi_master_tb_static_gate.json",
356
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_tb_static_gate.json",
357
- "size_bytes": 258,
358
- "size_human": "258 B"
359
- }
360
- ],
361
- "RTL": [
362
- {
363
- "file": "spi_master.v",
364
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master.v",
365
- "size_bytes": 4256,
366
- "size_human": "4.2 KB"
367
- },
368
- {
369
- "file": "spi_master_sva.sv",
370
- "path": "/home/vickynishad/OpenLane/designs/spi_master/src/spi_master_sva.sv",
371
- "size_bytes": 1188,
372
- "size_human": "1.2 KB"
373
- }
374
- ]
375
- },
376
- "artifact_types": [
377
- "LOG",
378
- "TESTBENCH",
379
- "CONFIG",
380
- "RTL"
381
- ],
382
- "rtl_generated": true,
383
- "testbench_generated": true,
384
- "gds_generated": false,
385
- "duration_minutes": 1.3,
386
- "timestamp": "2026-03-06T01:40:00.129996",
387
- "pdk": "sky130"
388
- },
389
- {
390
- "design_id": "sync_fifo",
391
- "complexity": "Simple",
392
- "attempt": 1,
393
- "passed": false,
394
- "timed_out": false,
395
- "failed_stage": "RTL_FIX",
396
- "failed_stage_name": "RTL Lint & Syntax Fix",
397
- "failed_stage_critical": true,
398
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
399
- "completed_stages": [
400
- "INIT",
401
- "SPEC",
402
- "RTL_GEN",
403
- "RTL_FIX"
404
- ],
405
- "completed_stages_count": 4,
406
- "artifacts": {
407
- "LOG": [
408
- {
409
- "file": "sync_fifo.log",
410
- "path": "/home/vickynishad/OpenLane/designs/sync_fifo/sync_fifo.log",
411
- "size_bytes": 53377,
412
- "size_human": "52.1 KB"
413
- }
414
- ],
415
- "RTL": [
416
- {
417
- "file": "sync_fifo.v",
418
- "path": "/home/vickynishad/OpenLane/designs/sync_fifo/src/sync_fifo.v",
419
- "size_bytes": 1299,
420
- "size_human": "1.3 KB"
421
- }
422
- ]
423
- },
424
- "artifact_types": [
425
- "LOG",
426
- "RTL"
427
- ],
428
- "rtl_generated": true,
429
- "testbench_generated": false,
430
- "gds_generated": false,
431
- "duration_minutes": 2.4,
432
- "timestamp": "2026-03-06T01:41:16.885242",
433
- "pdk": "sky130"
434
- },
435
- {
436
- "design_id": "alu8",
437
- "complexity": "Medium",
438
- "attempt": 1,
439
- "passed": false,
440
- "timed_out": false,
441
- "failed_stage": "FORMAL_VERIFY",
442
- "failed_stage_name": "Formal Verification",
443
- "failed_stage_critical": false,
444
- "failed_reason": "[FORMAL_VERIFY] Yosys SVA preflight failed. Regenerating SVA with error context.",
445
- "completed_stages": [
446
- "INIT",
447
- "SPEC",
448
- "RTL_GEN",
449
- "RTL_FIX",
450
- "VERIFICATION",
451
- "FORMAL_VERIFY"
452
- ],
453
- "completed_stages_count": 6,
454
- "artifacts": {
455
- "LOG": [
456
- {
457
- "file": "alu8.log",
458
- "path": "/home/vickynishad/OpenLane/designs/alu8/alu8.log",
459
- "size_bytes": 47547,
460
- "size_human": "46.4 KB"
461
- }
462
- ],
463
- "TESTBENCH": [
464
- {
465
- "file": "alu8_tb.v",
466
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb.v",
467
- "size_bytes": 2674,
468
- "size_human": "2.6 KB"
469
- }
470
- ],
471
- "CONFIG": [
472
- {
473
- "file": "alu8_tb_static_gate_attempt1.json",
474
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_static_gate_attempt1.json",
475
- "size_bytes": 258,
476
- "size_human": "258 B"
477
- },
478
- {
479
- "file": "alu8_tb_compile_gate.json",
480
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_compile_gate.json",
481
- "size_bytes": 13185,
482
- "size_human": "12.9 KB"
483
- },
484
- {
485
- "file": "alu8_coverage_attempt1.json",
486
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt1.json",
487
- "size_bytes": 850,
488
- "size_human": "850 B"
489
- },
490
- {
491
- "file": "alu8_coverage_latest.json",
492
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_latest.json",
493
- "size_bytes": 851,
494
- "size_human": "851 B"
495
- },
496
- {
497
- "file": "alu8_coverage_attempt2.json",
498
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt2.json",
499
- "size_bytes": 850,
500
- "size_human": "850 B"
501
- },
502
- {
503
- "file": "alu8_tb_compile_gate_attempt1.json",
504
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_compile_gate_attempt1.json",
505
- "size_bytes": 13185,
506
- "size_human": "12.9 KB"
507
- },
508
- {
509
- "file": "alu8_coverage_attempt3.json",
510
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt3.json",
511
- "size_bytes": 851,
512
- "size_human": "851 B"
513
- },
514
- {
515
- "file": "alu8_coverage_attempt4.json",
516
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_coverage_attempt4.json",
517
- "size_bytes": 851,
518
- "size_human": "851 B"
519
- },
520
- {
521
- "file": "alu8_formal_preflight.json",
522
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_formal_preflight.json",
523
- "size_bytes": 89,
524
- "size_human": "89 B"
525
- },
526
- {
527
- "file": "alu8_tb_static_gate.json",
528
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_tb_static_gate.json",
529
- "size_bytes": 258,
530
- "size_human": "258 B"
531
- }
532
- ],
533
- "RTL": [
534
- {
535
- "file": "alu8_sby_check.sv",
536
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_sby_check.sv",
537
- "size_bytes": 3908,
538
- "size_human": "3.8 KB"
539
- },
540
- {
541
- "file": "alu8_sva.sv",
542
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8_sva.sv",
543
- "size_bytes": 1810,
544
- "size_human": "1.8 KB"
545
- },
546
- {
547
- "file": "alu8.v",
548
- "path": "/home/vickynishad/OpenLane/designs/alu8/src/alu8.v",
549
- "size_bytes": 1517,
550
- "size_human": "1.5 KB"
551
- }
552
- ]
553
- },
554
- "artifact_types": [
555
- "LOG",
556
- "TESTBENCH",
557
- "CONFIG",
558
- "RTL"
559
- ],
560
- "rtl_generated": true,
561
- "testbench_generated": true,
562
- "gds_generated": false,
563
- "duration_minutes": 5.2,
564
- "timestamp": "2026-03-06T01:43:39.134144",
565
- "pdk": "sky130"
566
- },
567
- {
568
- "design_id": "i2c_master",
569
- "complexity": "Medium",
570
- "attempt": 1,
571
- "passed": false,
572
- "timed_out": false,
573
- "failed_stage": null,
574
- "failed_stage_name": null,
575
- "failed_stage_critical": null,
576
- "failed_reason": "\u2502 address, data, read/write, start trigger. Busy, done, error status. 50 MHz system clock. \u2502",
577
- "completed_stages": [
578
- "INIT",
579
- "SPEC",
580
- "RTL_GEN",
581
- "RTL_FIX"
582
- ],
583
- "completed_stages_count": 4,
584
- "artifacts": {
585
- "LOG": [
586
- {
587
- "file": "i2c_master.log",
588
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/i2c_master.log",
589
- "size_bytes": 24228,
590
- "size_human": "23.7 KB"
591
- }
592
- ],
593
- "CONFIG": [
594
- {
595
- "file": "i2c_master_tb_static_gate.json",
596
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_static_gate.json",
597
- "size_bytes": 258,
598
- "size_human": "258 B"
599
- },
600
- {
601
- "file": "i2c_master_tb_compile_gate.json",
602
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_compile_gate.json",
603
- "size_bytes": 5988,
604
- "size_human": "5.8 KB"
605
- },
606
- {
607
- "file": "i2c_master_tb_compile_gate_attempt1.json",
608
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_compile_gate_attempt1.json",
609
- "size_bytes": 5988,
610
- "size_human": "5.8 KB"
611
- },
612
- {
613
- "file": "i2c_master_tb_static_gate_attempt1.json",
614
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb_static_gate_attempt1.json",
615
- "size_bytes": 258,
616
- "size_human": "258 B"
617
- }
618
- ],
619
- "RTL": [
620
- {
621
- "file": "i2c_master_controller.v",
622
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_controller.v",
623
- "size_bytes": 1205,
624
- "size_human": "1.2 KB"
625
- },
626
- {
627
- "file": "i2c_master.v",
628
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master.v",
629
- "size_bytes": 619,
630
- "size_human": "619 B"
631
- }
632
- ],
633
- "TESTBENCH": [
634
- {
635
- "file": "i2c_master_tb.v",
636
- "path": "/home/vickynishad/OpenLane/designs/i2c_master/src/i2c_master_tb.v",
637
- "size_bytes": 1543,
638
- "size_human": "1.5 KB"
639
- }
640
- ]
641
- },
642
- "artifact_types": [
643
- "LOG",
644
- "CONFIG",
645
- "RTL",
646
- "TESTBENCH"
647
- ],
648
- "rtl_generated": true,
649
- "testbench_generated": true,
650
- "gds_generated": false,
651
- "duration_minutes": 9.8,
652
- "timestamp": "2026-03-06T01:48:48.644076",
653
- "pdk": "sky130"
654
- },
655
- {
656
- "design_id": "apb_timer",
657
- "complexity": "Medium",
658
- "attempt": 1,
659
- "passed": true,
660
- "timed_out": false,
661
- "failed_stage": null,
662
- "failed_stage_name": null,
663
- "failed_stage_critical": null,
664
- "failed_reason": null,
665
- "completed_stages": [
666
- "INIT",
667
- "SPEC",
668
- "RTL_GEN",
669
- "RTL_FIX",
670
- "VERIFICATION",
671
- "FORMAL_VERIFY",
672
- "REGRESSION"
673
- ],
674
- "completed_stages_count": 7,
675
- "artifacts": {
676
- "LOG": [
677
- {
678
- "file": "apb_timer.log",
679
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/apb_timer.log",
680
- "size_bytes": 26458,
681
- "size_human": "25.8 KB"
682
- }
683
- ],
684
- "RTL": [
685
- {
686
- "file": "apb_timer.v",
687
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer.v",
688
- "size_bytes": 2418,
689
- "size_human": "2.4 KB"
690
- },
691
- {
692
- "file": "apb_timer_sva.sv",
693
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_sva.sv",
694
- "size_bytes": 1738,
695
- "size_human": "1.7 KB"
696
- },
697
- {
698
- "file": "apb_timer_sby_check.sv",
699
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_sby_check.sv",
700
- "size_bytes": 1743,
701
- "size_human": "1.7 KB"
702
- }
703
- ],
704
- "TESTBENCH": [
705
- {
706
- "file": "apb_timer_tb.v",
707
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb.v",
708
- "size_bytes": 1914,
709
- "size_human": "1.9 KB"
710
- }
711
- ],
712
- "CONFIG": [
713
- {
714
- "file": "apb_timer_tb_static_gate.json",
715
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_static_gate.json",
716
- "size_bytes": 258,
717
- "size_human": "258 B"
718
- },
719
- {
720
- "file": "apb_timer_tb_static_gate_attempt1.json",
721
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_static_gate_attempt1.json",
722
- "size_bytes": 258,
723
- "size_human": "258 B"
724
- },
725
- {
726
- "file": "apb_timer_tb_compile_gate.json",
727
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_compile_gate.json",
728
- "size_bytes": 6322,
729
- "size_human": "6.2 KB"
730
- },
731
- {
732
- "file": "apb_timer_tb_compile_gate_attempt1.json",
733
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_tb_compile_gate_attempt1.json",
734
- "size_bytes": 6322,
735
- "size_human": "6.2 KB"
736
- },
737
- {
738
- "file": "apb_timer_formal_preflight.json",
739
- "path": "/home/vickynishad/OpenLane/designs/apb_timer/src/apb_timer_formal_preflight.json",
740
- "size_bytes": 89,
741
- "size_human": "89 B"
742
- }
743
- ]
744
- },
745
- "artifact_types": [
746
- "LOG",
747
- "RTL",
748
- "TESTBENCH",
749
- "CONFIG"
750
- ],
751
- "rtl_generated": true,
752
- "testbench_generated": true,
753
- "gds_generated": false,
754
- "duration_minutes": 4.2,
755
- "timestamp": "2026-03-06T01:58:37.404877",
756
- "pdk": "sky130"
757
- },
758
- {
759
- "design_id": "vga_ctrl",
760
- "complexity": "Medium",
761
- "attempt": 1,
762
- "passed": false,
763
- "timed_out": false,
764
- "failed_stage": "RTL_FIX",
765
- "failed_stage_name": "RTL Lint & Syntax Fix",
766
- "failed_stage_critical": true,
767
- "failed_reason": "[RTL_FIX] Semantic rigor gate failed. Attempting mechanical width auto-fix.",
768
- "completed_stages": [
769
- "INIT",
770
- "SPEC",
771
- "RTL_GEN",
772
- "RTL_FIX"
773
- ],
774
- "completed_stages_count": 4,
775
- "artifacts": {
776
- "LOG": [
777
- {
778
- "file": "vga_ctrl.log",
779
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/vga_ctrl.log",
780
- "size_bytes": 803376,
781
- "size_human": "784.5 KB"
782
- }
783
- ],
784
- "TESTBENCH": [
785
- {
786
- "file": "vga_ctrl_tb.v",
787
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/src/vga_ctrl_tb.v",
788
- "size_bytes": 510,
789
- "size_human": "510 B"
790
- }
791
- ],
792
- "RTL": [
793
- {
794
- "file": "vga_ctrl.v",
795
- "path": "/home/vickynishad/OpenLane/designs/vga_ctrl/src/vga_ctrl.v",
796
- "size_bytes": 2957,
797
- "size_human": "2.9 KB"
798
- }
799
- ]
800
- },
801
- "artifact_types": [
802
- "LOG",
803
- "TESTBENCH",
804
- "RTL"
805
- ],
806
- "rtl_generated": true,
807
- "testbench_generated": true,
808
- "gds_generated": false,
809
- "duration_minutes": 6.7,
810
- "timestamp": "2026-03-06T02:02:46.652227",
811
- "pdk": "sky130"
812
- },
813
- {
814
- "design_id": "wb_uart",
815
- "complexity": "Complex",
816
- "attempt": 1,
817
- "passed": false,
818
- "timed_out": false,
819
- "failed_stage": "RTL_FIX",
820
- "failed_stage_name": "RTL Lint & Syntax Fix",
821
- "failed_stage_critical": true,
822
- "failed_reason": "[RTL_FIX] Already on fallback strategy. Build Failed.",
823
- "completed_stages": [
824
- "INIT",
825
- "SPEC",
826
- "RTL_GEN",
827
- "RTL_FIX"
828
- ],
829
- "completed_stages_count": 4,
830
- "artifacts": {
831
- "LOG": [
832
- {
833
- "file": "wb_uart.log",
834
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/wb_uart.log",
835
- "size_bytes": 28250,
836
- "size_human": "27.6 KB"
837
- }
838
- ],
839
- "RTL": [
840
- {
841
- "file": "wb_uart_tx_fifo.v",
842
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_tx_fifo.v",
843
- "size_bytes": 1282,
844
- "size_human": "1.3 KB"
845
- },
846
- {
847
- "file": "wb_uart_wishbone_interface.v",
848
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_wishbone_interface.v",
849
- "size_bytes": 1041,
850
- "size_human": "1.0 KB"
851
- },
852
- {
853
- "file": "wb_uart_controller.v",
854
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_controller.v",
855
- "size_bytes": 1658,
856
- "size_human": "1.6 KB"
857
- },
858
- {
859
- "file": "wb_uart.v",
860
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart.v",
861
- "size_bytes": 1638,
862
- "size_human": "1.6 KB"
863
- },
864
- {
865
- "file": "wb_uart_rx_fifo.v",
866
- "path": "/home/vickynishad/OpenLane/designs/wb_uart/src/wb_uart_rx_fifo.v",
867
- "size_bytes": 1282,
868
- "size_human": "1.3 KB"
869
- }
870
- ]
871
- },
872
- "artifact_types": [
873
- "LOG",
874
- "RTL"
875
- ],
876
- "rtl_generated": true,
877
- "testbench_generated": false,
878
- "gds_generated": false,
879
- "duration_minutes": 7.0,
880
- "timestamp": "2026-03-06T02:09:26.491231",
881
- "pdk": "sky130"
882
- }
883
- ]
884
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
benchmark/run_benchmark.py DELETED
@@ -1,549 +0,0 @@
1
- #!/usr/bin/env python3
2
- #!/usr/bin/env python3
3
- """
4
- AgentIC Benchmark Runner v2
5
- ============================
6
- Runs 10 chip designs through the AgentIC pipeline and produces
7
- a detailed report of real pass/fail rates, stage failures, timing,
8
- and artifact recovery.
9
-
10
- Usage (from AgentIC root directory):
11
- python3 benchmark/run_benchmark.py
12
- python3 benchmark/run_benchmark.py --skip-openlane
13
- python3 benchmark/run_benchmark.py --full-signoff
14
- python3 benchmark/run_benchmark.py --design uart_tx
15
- python3 benchmark/run_benchmark.py --pdk gf180
16
- """
17
-
18
- import os
19
- import re
20
- import sys
21
- import json
22
- import time
23
- import argparse
24
- import datetime
25
- import subprocess
26
- from pathlib import Path
27
-
28
- # ─────────────────────────────────────────────────────────────
29
- # 10 TEST DESIGNS — simple to complex
30
- # ─────────────────────────────────────────────────────────────
31
- TEST_DESIGNS = [
32
- {
33
- "id": "counter8",
34
- "complexity": "Simple",
35
- "desc": (
36
- "8-bit synchronous up-counter with active-high synchronous reset and "
37
- "active-high enable. On every rising clock edge, if reset is high the "
38
- "counter clears to zero. If enable is high and reset is low, the counter "
39
- "increments by one. When it reaches 255 it wraps to zero. Output the 8-bit count."
40
- ),
41
- },
42
- {
43
- "id": "uart_tx",
44
- "complexity": "Simple",
45
- "desc": (
46
- "UART transmitter at 115200 baud, 8N1 format. Accepts parallel 8-bit data "
47
- "and a start signal. Outputs a serial TX line. 50 MHz system clock. "
48
- "Signals transmission complete via a done flag. Idle state is logic high."
49
- ),
50
- },
51
- {
52
- "id": "pwm_gen",
53
- "complexity": "Simple",
54
- "desc": (
55
- "PWM generator with a 16-bit period register and 16-bit duty cycle register, "
56
- "both writable via a simple register interface with address and write-enable. "
57
- "Outputs a single PWM signal. 50 MHz clock. Edge-aligned mode."
58
- ),
59
- },
60
- {
61
- "id": "spi_master",
62
- "complexity": "Simple",
63
- "desc": (
64
- "SPI master controller, mode 0 only (CPOL=0 CPHA=0). 8-bit transfers. "
65
- "Generates SCLK, MOSI, CS. Accepts MISO. Clock divider to set SPI speed "
66
- "from 50 MHz system clock. Busy and done status signals."
67
- ),
68
- },
69
- {
70
- "id": "sync_fifo",
71
- "complexity": "Simple",
72
- "desc": (
73
- "Synchronous FIFO, 8-bit data width, 16-entry depth, single clock domain. "
74
- "Push and pop with full and empty flags. Almost-full flag when 2 or fewer "
75
- "slots remain. Almost-empty flag when 2 or fewer entries stored."
76
- ),
77
- },
78
- {
79
- "id": "alu8",
80
- "complexity": "Medium",
81
- "desc": (
82
- "8-bit ALU with 4-bit opcode selecting: ADD, SUB, AND, OR, XOR, NOT, "
83
- "left shift by 1, right shift by 1, increment, decrement. Outputs 8-bit "
84
- "result and 4-bit flags: zero, carry, overflow, negative. Fully combinational."
85
- ),
86
- },
87
- {
88
- "id": "i2c_master",
89
- "complexity": "Medium",
90
- "desc": (
91
- "I2C master controller, standard mode 100 kHz. Generates SCL and SDA with "
92
- "open-drain outputs. 7-bit addressing. Handles START, STOP conditions. "
93
- "ACK/NACK detection. Register interface for address, data, read/write, "
94
- "start trigger. Busy, done, error status. 50 MHz system clock."
95
- ),
96
- },
97
- {
98
- "id": "apb_timer",
99
- "complexity": "Medium",
100
- "desc": (
101
- "32-bit APB timer peripheral with interrupt. APB3 slave interface with "
102
- "PCLK, PRESETn, PSEL, PENABLE, PWRITE, PADDR, PWDATA, PRDATA, PREADY. "
103
- "Registers: control, prescaler, reload value, current count, interrupt status. "
104
- "Supports one-shot and continuous modes. Interrupt when counter reaches zero. "
105
- "Prescaler divides clock 1 to 65536."
106
- ),
107
- },
108
- {
109
- "id": "vga_ctrl",
110
- "complexity": "Medium",
111
- "desc": (
112
- "VGA timing controller for 640x480 at 60 Hz. Generates HSYNC and VSYNC "
113
- "with correct timing. Outputs current pixel X and Y coordinates and "
114
- "active video enable signal. Pixel clock input at 25 MHz."
115
- ),
116
- },
117
- {
118
- "id": "wb_uart",
119
- "complexity": "Complex",
120
- "desc": (
121
- "UART transceiver with Wishbone B4 slave interface. 8N1 format. "
122
- "Configurable baud rate via baud divisor register. 16-byte TX FIFO and "
123
- "16-byte RX FIFO. Wishbone registers: TX data, RX data, status "
124
- "(TX full, TX empty, RX full, RX empty, overrun), control (baud divisor, "
125
- "loopback enable), interrupt enable. Interrupt on RX available and TX empty. "
126
- "50 MHz clock. Wishbone signals: CLK_I RST_I ADR_I DAT_I DAT_O WE_I STB_I ACK_O CYC_I."
127
- ),
128
- },
129
- ]
130
-
131
- # ─────────────────────────────────────────────────────────────
132
- # STAGE METADATA
133
- # ─────────────────────────────────────────────────────────────
134
- STAGE_INFO = {
135
- "INIT": {"name": "Environment Setup", "critical": True},
136
- "SPEC": {"name": "Architectural Planning", "critical": True},
137
- "RTL_GEN": {"name": "RTL Generation", "critical": True},
138
- "RTL_FIX": {"name": "RTL Lint & Syntax Fix", "critical": True},
139
- "VERIFICATION": {"name": "Functional Simulation", "critical": True},
140
- "FORMAL_VERIFY": {"name": "Formal Verification", "critical": False},
141
- "COVERAGE_CHECK": {"name": "Coverage Closure", "critical": False},
142
- "REGRESSION": {"name": "Regression Testing", "critical": False},
143
- "SDC_GEN": {"name": "Timing Constraints", "critical": True},
144
- "FLOORPLAN": {"name": "Physical Floorplanning", "critical": True},
145
- "HARDENING": {"name": "Place & Route", "critical": True},
146
- "CONVERGENCE": {"name": "Timing Convergence", "critical": True},
147
- "ECO_PATCH": {"name": "Engineering Change Order", "critical": False},
148
- "SIGNOFF": {"name": "DRC/LVS/STA Signoff", "critical": True},
149
- }
150
-
151
- SUCCESS_MARKERS = [
152
- "PIPELINE COMPLETE", "BUILD COMPLETE", "ALL STAGES PASSED",
153
- "SIGNOFF PASSED", "BUILD SUCCEEDED", "SUCCESS",
154
- ]
155
- FAILURE_MARKERS = [
156
- "PIPELINE FAILED", "BUILD FAILED", "FATAL ERROR",
157
- "STAGE FAILED", "ABORTING", "FAIL-CLOSED",
158
- ]
159
-
160
- STAGE_PATTERN = re.compile(
161
- r"\b(INIT|SPEC|RTL_GEN|RTL_FIX|VERIFICATION|FORMAL_VERIFY|"
162
- r"COVERAGE_CHECK|REGRESSION|SDC_GEN|FLOORPLAN|HARDENING|"
163
- r"CONVERGENCE|ECO_PATCH|SIGNOFF)\b",
164
- re.IGNORECASE,
165
- )
166
-
167
-
168
- def parse_args():
169
- p = argparse.ArgumentParser(description="AgentIC Benchmark Runner v2")
170
- p.add_argument("--pdk", default="sky130", choices=["sky130", "gf180"])
171
- p.add_argument("--skip-openlane", action="store_true")
172
- p.add_argument("--skip-coverage", action="store_true")
173
- p.add_argument("--full-signoff", action="store_true")
174
- p.add_argument("--design", default=None)
175
- p.add_argument("--max-retries", default=3, type=int)
176
- p.add_argument("--output-dir", default="benchmark/results")
177
- p.add_argument("--attempts", default=1, type=int)
178
- p.add_argument("--timeout", default=3600, type=int)
179
- return p.parse_args()
180
-
181
-
182
- def build_command(design, args):
183
- cmd = [
184
- "python3", "main.py", "build",
185
- "--name", design["id"],
186
- "--desc", design["desc"],
187
- "--pdk-profile", args.pdk,
188
- "--max-retries", str(args.max_retries),
189
- "--strict-gates",
190
- ]
191
- if args.skip_openlane:
192
- cmd.append("--skip-openlane")
193
- if args.skip_coverage:
194
- cmd.append("--skip-coverage")
195
- if args.full_signoff:
196
- cmd.append("--full-signoff")
197
- return cmd
198
-
199
-
200
- def detect_pass_fail(stdout, stderr, returncode):
201
- combined = (stdout + stderr).upper()
202
- for marker in FAILURE_MARKERS:
203
- if marker in combined:
204
- return False
205
- for marker in SUCCESS_MARKERS:
206
- if marker in combined:
207
- return True
208
- return returncode == 0
209
-
210
-
211
- def extract_failed_stage(stdout, stderr):
212
- combined = stdout + stderr
213
- last_stage = None
214
- for line in combined.split("\n"):
215
- m = STAGE_PATTERN.search(line)
216
- if m:
217
- last_stage = m.group(1).upper()
218
- if any(kw in line.upper() for kw in ["FAILED", "ERROR", "FATAL", "ABORT", "FAIL-CLOSED"]):
219
- return last_stage, line.strip()[:250]
220
- return None, None
221
-
222
-
223
- def extract_completed_stages(stdout):
224
- completed = []
225
- for line in stdout.split("\n"):
226
- m = STAGE_PATTERN.search(line)
227
- if m:
228
- stage = m.group(1).upper()
229
- if any(kw in line.upper() for kw in [
230
- "COMPLETE", "PASSED", "SUCCESS", "DONE", "TRANSITION", "FINISHED"
231
- ]):
232
- if stage not in completed:
233
- completed.append(stage)
234
- return completed
235
-
236
-
237
- def find_openlane_root():
238
- env = os.environ.get("OPENLANE_ROOT")
239
- if env and Path(env).exists():
240
- return env
241
- for c in [Path.home() / "OpenLane", Path("/opt/OpenLane")]:
242
- if c.exists():
243
- return str(c)
244
- return str(Path.home() / "OpenLane")
245
-
246
-
247
- def find_artifacts(design_id, openlane_root):
248
- """Only find files actually belonging to THIS design — no false positives."""
249
- found = {}
250
- type_map = {
251
- ".v": "RTL", ".sv": "RTL", ".sva": "FORMAL", ".sby": "FORMAL",
252
- ".sdc": "TIMING", ".tcl": "PHYSICAL", ".lef": "PHYSICAL",
253
- ".def": "PHYSICAL", ".gds": "PHYSICAL", ".json": "CONFIG",
254
- ".log": "LOG", ".rpt": "SIGNOFF",
255
- }
256
- scan_dirs = [
257
- Path(f"outputs/{design_id}"),
258
- Path(f"results/{design_id}"),
259
- Path(f"designs/{design_id}"),
260
- Path(openlane_root) / "designs" / design_id,
261
- ]
262
- for d in scan_dirs:
263
- if not d.exists():
264
- continue
265
- for f in d.rglob("*"):
266
- if not f.is_file():
267
- continue
268
- # STRICT: only files where design_id is in filename OR direct parent folder
269
- in_name = design_id.lower() in f.name.lower()
270
- in_parent = design_id.lower() in f.parent.name.lower()
271
- if not (in_name or in_parent):
272
- continue
273
- atype = type_map.get(f.suffix.lower())
274
- if not atype:
275
- continue
276
- if atype == "RTL" and "_tb" in f.name.lower():
277
- atype = "TESTBENCH"
278
- if atype not in found:
279
- found[atype] = []
280
- sz = f.stat().st_size
281
- found[atype].append({
282
- "file": f.name,
283
- "path": str(f),
284
- "size_bytes": sz,
285
- "size_human": fmt_size(sz),
286
- })
287
- return found
288
-
289
-
290
- def fmt_size(b):
291
- if b < 1024: return f"{b} B"
292
- elif b < 1048576: return f"{b/1024:.1f} KB"
293
- else: return f"{b/1048576:.1f} MB"
294
-
295
-
296
- def run_build(design, args, attempt):
297
- openlane_root = find_openlane_root()
298
- cmd = build_command(design, args)
299
- timestamp = datetime.datetime.now().isoformat()
300
-
301
- print(f"\n{'─'*60}")
302
- print(f" Design : {design['id']} ({design['complexity']})")
303
- print(f" Attempt : {attempt}")
304
- print(f" PDK : {args.pdk}")
305
- print(f" Command : {' '.join(cmd[:6])} ...")
306
- print(f"{'─'*60}")
307
-
308
- start = time.time()
309
- try:
310
- proc = subprocess.run(cmd, capture_output=True, text=True, timeout=args.timeout)
311
- stdout, stderr, retcode = proc.stdout, proc.stderr, proc.returncode
312
- except subprocess.TimeoutExpired:
313
- dur = round((time.time() - start) / 60, 1)
314
- print(f" Result : ✗ TIMEOUT ({dur} min)")
315
- return make_result(design, attempt, args, False, "TIMEOUT",
316
- f"Exceeded {args.timeout}s timeout", [], {}, dur, timestamp, True)
317
- except FileNotFoundError:
318
- print("ERROR: main.py not found. Run from AgentIC root.")
319
- sys.exit(1)
320
-
321
- dur = round((time.time() - start) / 60, 1)
322
- passed = detect_pass_fail(stdout, stderr, retcode)
323
-
324
- # Sanity check — real builds never finish in under 2 minutes
325
- # BUT only apply this if no real stages completed (otherwise it was a real fast failure)
326
- completed = extract_completed_stages(stdout)
327
- if dur < 2.0 and len(completed) <= 1:
328
- print(f" ⚠ WARNING: Finished in {dur} min with no meaningful progress.")
329
- print(f" ⚠ Check that your CLI args match and the orchestrator actually ran.")
330
- passed = False
331
- failed_stage = "INIT"
332
- failed_reason = f"Build exited in {dur} min with ≤1 stage — CLI args may be wrong."
333
- elif dur < 2.0:
334
- # Build ran real stages but failed fast — use real failure data
335
- print(f" ⚠ NOTE: Build completed in {dur} min (fast failure after {len(completed)} stages).")
336
- failed_stage, failed_reason = (None, None) if passed else extract_failed_stage(stdout, stderr)
337
- else:
338
- failed_stage, failed_reason = (None, None) if passed else extract_failed_stage(stdout, stderr)
339
-
340
-
341
- artifacts = find_artifacts(design["id"], openlane_root)
342
-
343
- status = "✓ PASS" if passed else "✗ FAIL"
344
- fail_info = ""
345
- if failed_stage:
346
- name = STAGE_INFO.get(failed_stage, {}).get("name", failed_stage)
347
- fail_info = f" — failed at {failed_stage} ({name})"
348
-
349
- print(f" Result : {status}{fail_info}")
350
- print(f" Time : {dur} min")
351
- print(f" Stages : {len(completed)} completed")
352
- print(f" Artifacts: {', '.join(artifacts.keys()) if artifacts else 'none found for this design'}")
353
-
354
- return make_result(design, attempt, args, passed, failed_stage,
355
- failed_reason, completed, artifacts, dur, timestamp)
356
-
357
-
358
- def make_result(design, attempt, args, passed, failed_stage, failed_reason,
359
- completed, artifacts, duration, timestamp, timed_out=False):
360
- info = STAGE_INFO.get(failed_stage, {}) if failed_stage else {}
361
- return {
362
- "design_id": design["id"],
363
- "complexity": design["complexity"],
364
- "attempt": attempt,
365
- "passed": passed,
366
- "timed_out": timed_out,
367
- "failed_stage": failed_stage,
368
- "failed_stage_name": info.get("name"),
369
- "failed_stage_critical": info.get("critical"),
370
- "failed_reason": failed_reason,
371
- "completed_stages": completed,
372
- "completed_stages_count": len(completed),
373
- "artifacts": artifacts,
374
- "artifact_types": list(artifacts.keys()),
375
- "rtl_generated": "RTL" in artifacts,
376
- "testbench_generated": "TESTBENCH" in artifacts,
377
- "gds_generated": "PHYSICAL" in artifacts,
378
- "duration_minutes": duration,
379
- "timestamp": timestamp,
380
- "pdk": args.pdk,
381
- }
382
-
383
-
384
- def print_summary(results):
385
- passed = [r for r in results if r["passed"]]
386
- failed = [r for r in results if not r["passed"]]
387
- rate = len(passed) / len(results) * 100 if results else 0
388
- avg = sum(r["duration_minutes"] for r in results) / len(results)
389
- fails = {}
390
- for r in failed:
391
- s = r.get("failed_stage")
392
- if s: fails[s] = fails.get(s, 0) + 1
393
-
394
- print(f"\n{'═'*60}")
395
- print(f" BENCHMARK COMPLETE")
396
- print(f"{'═'*60}")
397
- print(f" Pass Rate : {rate:.0f}% ({len(passed)}/{len(results)})")
398
- print(f" Avg Time : {avg:.1f} min")
399
- print(f"{'─'*60}")
400
- for r in passed:
401
- print(f" ✓ {r['design_id']:<22} {r['duration_minutes']} min")
402
- for r in failed:
403
- at = r.get("failed_stage_name") or r.get("failed_stage") or "unknown"
404
- print(f" ✗ {r['design_id']:<22} failed at {at}")
405
- if fails:
406
- worst = max(fails, key=fails.get)
407
- name = STAGE_INFO.get(worst, {}).get("name", worst)
408
- print(f"\n ⚠ Bottleneck: {worst} ({name}) — fix this first")
409
- print(f"{'═'*60}\n")
410
-
411
-
412
- def generate_markdown(results, args):
413
- today = datetime.date.today().strftime("%B %d, %Y")
414
- passed = [r for r in results if r["passed"]]
415
- failed = [r for r in results if not r["passed"]]
416
- rate = len(passed) / len(results) * 100 if results else 0
417
- avg = sum(r["duration_minutes"] for r in results) / len(results)
418
- fails = {}
419
- for r in failed:
420
- s = r.get("failed_stage")
421
- if s: fails[s] = fails.get(s, 0) + 1
422
-
423
- L = [
424
- f"# AgentIC Benchmark Report",
425
- f"**Date:** {today} ",
426
- f"**PDK:** {args.pdk} ",
427
- f"**Model:** NVIDIA NIM — Llama 3.3 70B ",
428
- f"**Mode:** {'RTL only' if args.skip_openlane else 'Full pipeline'}",
429
- "",
430
- "## Summary",
431
- "| Metric | Value |",
432
- "|--------|-------|",
433
- f"| Total Designs | {len(results)} |",
434
- f"| **First-Attempt Pass Rate** | **{rate:.0f}% ({len(passed)}/{len(results)})** |",
435
- f"| Average Build Time | {avg:.1f} min |",
436
- f"| RTL Generated (incl. failures) | {sum(1 for r in results if r.get('rtl_generated'))}/{len(results)} |",
437
- f"| GDS Generated | {sum(1 for r in results if r.get('gds_generated'))}/{len(results)} |",
438
- "",
439
- "## Results",
440
- "| Design | Complexity | Pass? | Failed At | Time | RTL | GDS |",
441
- "|--------|-----------|-------|-----------|------|-----|-----|",
442
- ]
443
- for r in results:
444
- s = "✓" if r["passed"] else "✗"
445
- f = r.get("failed_stage_name") or r.get("failed_stage") or "—"
446
- L.append(f"| {r['design_id']} | {r['complexity']} | {s} | {f} | {r['duration_minutes']} min | {'✓' if r.get('rtl_generated') else '✗'} | {'✓' if r.get('gds_generated') else '✗'} |")
447
-
448
- if fails:
449
- L += ["", "## Stage Failure Analysis",
450
- "| Stage | Industry Name | Failures | Critical? |",
451
- "|-------|--------------|----------|-----------|"]
452
- for stage, count in sorted(fails.items(), key=lambda x: -x[1]):
453
- info = STAGE_INFO.get(stage, {})
454
- L.append(f"| {stage} | {info.get('name', stage)} | {count} | {'🔴 Yes' if info.get('critical') else '🟡 Optional'} |")
455
- worst = max(fails, key=fails.get)
456
- L += ["", f"**Fix `{worst}` first.**"]
457
-
458
- L += [
459
- "", "## Which Stages Matter in Industry",
460
- "| Stage | Skip OK? | Why |",
461
- "|-------|----------|-----|",
462
- "| RTL_GEN + RTL_FIX | ❌ Never | This is the chip |",
463
- "| VERIFICATION | ❌ Never | Proves it works |",
464
- "| HARDENING | ❌ Never | Physical layout |",
465
- "| SIGNOFF | ❌ Never | Fab requirement |",
466
- "| FORMAL_VERIFY | ✅ Simple designs | Optional for non-safety-critical |",
467
- "| COVERAGE_CHECK | ✅ If sim passes | Nice to have |",
468
- "| REGRESSION | ✅ Yes | Corner cases only |",
469
- "| ECO_PATCH | ✅ First attempt | Only if signoff fails |",
470
- "| CONVERGENCE | ✅ Simple designs | Embedded in hardening |",
471
- "", "---",
472
- f"*Generated by AgentIC Benchmark Runner — {today}*",
473
- ]
474
- return "\n".join(L)
475
-
476
-
477
- def main():
478
- args = parse_args()
479
-
480
- if not Path("main.py").exists():
481
- print("ERROR: Run from AgentIC root directory (where main.py is).")
482
- sys.exit(1)
483
-
484
- output_dir = Path(args.output_dir)
485
- output_dir.mkdir(parents=True, exist_ok=True)
486
-
487
- designs = TEST_DESIGNS
488
- if args.design:
489
- designs = [d for d in TEST_DESIGNS if d["id"] == args.design]
490
- if not designs:
491
- print(f"ERROR: '{args.design}' not found.")
492
- print(f"Available IDs: {[d['id'] for d in TEST_DESIGNS]}")
493
- sys.exit(1)
494
-
495
- total = len(designs) * args.attempts
496
- est_mins = total * 25
497
-
498
- print(f"\n{'═'*60}")
499
- print(f" AgentIC Benchmark Runner v2")
500
- print(f"{'═'*60}")
501
- print(f" Designs : {len(designs)}")
502
- print(f" Total runs: {total}")
503
- print(f" PDK : {args.pdk}")
504
- mode_parts = []
505
- if args.skip_openlane:
506
- mode_parts.append("RTL only (--skip-openlane)")
507
- else:
508
- mode_parts.append("Full pipeline")
509
- if args.skip_coverage:
510
- mode_parts.append("skip coverage (--skip-coverage)")
511
- print(f" Mode : {', '.join(mode_parts)}")
512
- print(f" Est. time : ~{est_mins} min")
513
- print(f" Output : {output_dir}/")
514
- print(f"{'═'*60}\n")
515
-
516
- all_results = []
517
- date_str = datetime.date.today().strftime("%Y-%m-%d")
518
-
519
- for design in designs:
520
- for attempt in range(1, args.attempts + 1):
521
- result = run_build(design, args, attempt)
522
- all_results.append(result)
523
- # Save after every build
524
- with open(output_dir / f"interim_{date_str}.json", "w") as f:
525
- json.dump({"results": all_results}, f, indent=2)
526
-
527
- # Final saves
528
- with open(output_dir / f"benchmark_{date_str}.json", "w") as f:
529
- json.dump({
530
- "meta": {
531
- "date": date_str, "pdk": args.pdk,
532
- "pass_rate_pct": round(
533
- len([r for r in all_results if r["passed"]]) / len(all_results) * 100, 1
534
- )
535
- },
536
- "results": all_results
537
- }, f, indent=2)
538
-
539
- md_path = output_dir / f"benchmark_{date_str}.md"
540
- with open(md_path, "w") as f:
541
- f.write(generate_markdown(all_results, args))
542
-
543
- print_summary(all_results)
544
- print(f" Saved: {md_path}")
545
- print(f" Saved: {output_dir}/benchmark_{date_str}.json\n")
546
-
547
-
548
- if __name__ == "__main__":
549
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
parser.out ADDED
The diff for this file is too large to render. See raw diff
 
parsetab.py ADDED
The diff for this file is too large to render. See raw diff
 
server/api.py CHANGED
@@ -70,7 +70,7 @@ JOB_STORE: Dict[str, Dict[str, Any]] = {}
70
  TRAINING_JSONL = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "training", "agentic_sft_data.jsonl"))
71
 
72
  BUILD_STATES_ORDER = [
73
- "INIT", "SPEC", "RTL_GEN", "RTL_FIX", "VERIFICATION",
74
  "FORMAL_VERIFY", "COVERAGE_CHECK", "REGRESSION",
75
  "SDC_GEN",
76
  "FLOORPLAN", "HARDENING", "CONVERGENCE_REVIEW",
@@ -81,6 +81,11 @@ TOTAL_STEPS = len(BUILD_STATES_ORDER)
81
  STAGE_META: Dict[str, Dict[str, str]] = {
82
  "INIT": {"label": "Initializing Workspace", "icon": "🔧"},
83
  "SPEC": {"label": "Architectural Planning", "icon": "📐"},
 
 
 
 
 
84
  "RTL_GEN": {"label": "RTL Generation", "icon": "💻"},
85
  "RTL_FIX": {"label": "RTL Syntax Fixing", "icon": "🔨"},
86
  "VERIFICATION": {"label": "Verification & Testbench", "icon": "🧪"},
@@ -492,6 +497,11 @@ def _infer_agent_name(state: str, message: str) -> str:
492
  state_agents = {
493
  "INIT": "Orchestrator",
494
  "SPEC": "ArchitectModule",
 
 
 
 
 
495
  "RTL_GEN": "RTL Designer",
496
  "RTL_FIX": "Error Analyst",
497
  "VERIFICATION": "Testbench Designer",
@@ -529,6 +539,11 @@ def _get_thinking_message(state_name: str, design_name: str) -> str:
529
  messages = {
530
  "INIT": f"Setting up workspace for {design_name}...",
531
  "SPEC": f"Decomposing architecture for {design_name}...",
 
 
 
 
 
532
  "RTL_GEN": f"Generating Verilog RTL for {design_name}...",
533
  "RTL_FIX": f"Running syntax checks and applying fixes...",
534
  "VERIFICATION": f"Generating testbench and running simulation...",
@@ -604,7 +619,25 @@ def _run_with_approval_gates(job_id: str, orchestrator, req, llm):
604
  prev_state = orchestrator.state
605
  _execute_stage(orchestrator, current_state_name)
606
  new_state = orchestrator.state
607
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
608
  # If the stage transitioned to a new state, the stage completed successfully
609
  # Generate approval card and wait
610
  if new_state != prev_state or new_state in (BuildState.SUCCESS, BuildState.FAIL):
@@ -667,6 +700,11 @@ def _execute_stage(orchestrator, state_name: str):
667
  stage_handlers = {
668
  "INIT": orchestrator.do_init,
669
  "SPEC": orchestrator.do_spec,
 
 
 
 
 
670
  "RTL_GEN": orchestrator.do_rtl_gen,
671
  "RTL_FIX": orchestrator.do_rtl_fix,
672
  "VERIFICATION": orchestrator.do_verification,
@@ -1067,6 +1105,9 @@ async def stream_build_events(job_id: str):
1067
 
1068
  async def event_generator():
1069
  sent_index = 0
 
 
 
1070
  # Send a ping immediately so the browser knows the connection is alive
1071
  yield "data: {\"type\": \"ping\", \"message\": \"connected\"}\n\n"
1072
 
@@ -1080,12 +1121,36 @@ async def stream_build_events(job_id: str):
1080
  event = events[sent_index]
1081
  yield f"data: {json.dumps(event)}\n\n"
1082
  sent_index += 1
 
 
1083
 
1084
  # Stop streaming when done, failed, or cancelled
1085
  if job["status"] in ("done", "failed", "cancelled") and sent_index >= len(events):
1086
  yield f"data: {json.dumps({'type': 'stream_end', 'status': job['status']})}\n\n"
1087
  break
1088
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1089
  await asyncio.sleep(0.4)
1090
 
1091
  return StreamingResponse(
 
70
  TRAINING_JSONL = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "training", "agentic_sft_data.jsonl"))
71
 
72
  BUILD_STATES_ORDER = [
73
+ "INIT", "SPEC", "SPEC_VALIDATE", "HIERARCHY_EXPAND", "FEASIBILITY_CHECK", "CDC_ANALYZE", "VERIFICATION_PLAN", "RTL_GEN", "RTL_FIX", "VERIFICATION",
74
  "FORMAL_VERIFY", "COVERAGE_CHECK", "REGRESSION",
75
  "SDC_GEN",
76
  "FLOORPLAN", "HARDENING", "CONVERGENCE_REVIEW",
 
81
  STAGE_META: Dict[str, Dict[str, str]] = {
82
  "INIT": {"label": "Initializing Workspace", "icon": "🔧"},
83
  "SPEC": {"label": "Architectural Planning", "icon": "📐"},
84
+ "SPEC_VALIDATE": {"label": "Specification Validation", "icon": "🔍"},
85
+ "HIERARCHY_EXPAND": {"label": "Hierarchy Expansion", "icon": "🌲"},
86
+ "FEASIBILITY_CHECK": {"label": "Feasibility Check", "icon": "⚖️"},
87
+ "CDC_ANALYZE": {"label": "CDC Analysis", "icon": "🔀"},
88
+ "VERIFICATION_PLAN": {"label": "Verification Planning", "icon": "📋"},
89
  "RTL_GEN": {"label": "RTL Generation", "icon": "💻"},
90
  "RTL_FIX": {"label": "RTL Syntax Fixing", "icon": "🔨"},
91
  "VERIFICATION": {"label": "Verification & Testbench", "icon": "🧪"},
 
497
  state_agents = {
498
  "INIT": "Orchestrator",
499
  "SPEC": "ArchitectModule",
500
+ "SPEC_VALIDATE": "Spec Validator",
501
+ "HIERARCHY_EXPAND": "Hierarchy Expander",
502
+ "FEASIBILITY_CHECK": "Feasibility Checker",
503
+ "CDC_ANALYZE": "CDC Analyzer",
504
+ "VERIFICATION_PLAN": "Verification Planner",
505
  "RTL_GEN": "RTL Designer",
506
  "RTL_FIX": "Error Analyst",
507
  "VERIFICATION": "Testbench Designer",
 
539
  messages = {
540
  "INIT": f"Setting up workspace for {design_name}...",
541
  "SPEC": f"Decomposing architecture for {design_name}...",
542
+ "SPEC_VALIDATE": f"Validating hardware spec for {design_name}...",
543
+ "HIERARCHY_EXPAND": f"Expanding submodule hierarchy for {design_name}...",
544
+ "FEASIBILITY_CHECK": f"Checking Sky130 feasibility for {design_name}...",
545
+ "CDC_ANALYZE": f"Analyzing clock domain crossings for {design_name}...",
546
+ "VERIFICATION_PLAN": f"Generating verification plan for {design_name}...",
547
  "RTL_GEN": f"Generating Verilog RTL for {design_name}...",
548
  "RTL_FIX": f"Running syntax checks and applying fixes...",
549
  "VERIFICATION": f"Generating testbench and running simulation...",
 
619
  prev_state = orchestrator.state
620
  _execute_stage(orchestrator, current_state_name)
621
  new_state = orchestrator.state
622
+
623
+ # ── Spec elaboration options event ──
624
+ # If spec_generator produced 3 design options (short description), emit them
625
+ # so the web UI can surface an interactive option picker card.
626
+ if orchestrator.artifacts.get("spec_elaboration_needed"):
627
+ options = orchestrator.artifacts.get("spec_elaboration_options", [])
628
+ elaboration_payload = {
629
+ "job_id": job_id,
630
+ "event": "design_options",
631
+ "stage": "SPEC_VALIDATE",
632
+ "design_name": design_name,
633
+ "message": "Your description was brief — here are 3 expert design interpretations:",
634
+ "options": options,
635
+ "auto_selected": orchestrator.artifacts.get("elaborated_desc", ""),
636
+ }
637
+ _emit_event(job_id, elaboration_payload)
638
+ # Clear the flag so we don't re-emit on the retry
639
+ orchestrator.artifacts.pop("spec_elaboration_needed", None)
640
+
641
  # If the stage transitioned to a new state, the stage completed successfully
642
  # Generate approval card and wait
643
  if new_state != prev_state or new_state in (BuildState.SUCCESS, BuildState.FAIL):
 
700
  stage_handlers = {
701
  "INIT": orchestrator.do_init,
702
  "SPEC": orchestrator.do_spec,
703
+ "SPEC_VALIDATE": orchestrator.do_spec_validate,
704
+ "HIERARCHY_EXPAND": orchestrator.do_hierarchy_expand,
705
+ "FEASIBILITY_CHECK": orchestrator.do_feasibility_check,
706
+ "CDC_ANALYZE": orchestrator.do_cdc_analyze,
707
+ "VERIFICATION_PLAN": orchestrator.do_verification_plan,
708
  "RTL_GEN": orchestrator.do_rtl_gen,
709
  "RTL_FIX": orchestrator.do_rtl_fix,
710
  "VERIFICATION": orchestrator.do_verification,
 
1105
 
1106
  async def event_generator():
1107
  sent_index = 0
1108
+ last_event_sent_at = time.time()
1109
+ stall_warned = False
1110
+ STALL_TIMEOUT = 300 # 5 minutes of silence → stall warning
1111
  # Send a ping immediately so the browser knows the connection is alive
1112
  yield "data: {\"type\": \"ping\", \"message\": \"connected\"}\n\n"
1113
 
 
1121
  event = events[sent_index]
1122
  yield f"data: {json.dumps(event)}\n\n"
1123
  sent_index += 1
1124
+ last_event_sent_at = time.time()
1125
+ stall_warned = False # new event arrived — reset warning
1126
 
1127
  # Stop streaming when done, failed, or cancelled
1128
  if job["status"] in ("done", "failed", "cancelled") and sent_index >= len(events):
1129
  yield f"data: {json.dumps({'type': 'stream_end', 'status': job['status']})}\n\n"
1130
  break
1131
 
1132
+ # Emit a stall warning if no events have arrived for STALL_TIMEOUT seconds
1133
+ if (
1134
+ not stall_warned
1135
+ and job["status"] == "running"
1136
+ and (time.time() - last_event_sent_at) >= STALL_TIMEOUT
1137
+ ):
1138
+ stage = job.get("current_state", "UNKNOWN")
1139
+ stall_event = {
1140
+ "type": "stall_warning",
1141
+ "state": stage,
1142
+ "message": (
1143
+ f"⚠️ No activity for 5 minutes at stage {stage} — "
1144
+ "the LLM may be stuck or unresponsive. "
1145
+ "You can cancel and retry."
1146
+ ),
1147
+ "step": 0,
1148
+ "total_steps": TOTAL_STEPS,
1149
+ "timestamp": int(time.time()),
1150
+ }
1151
+ yield f"data: {json.dumps(stall_event)}\n\n"
1152
+ stall_warned = True
1153
+
1154
  await asyncio.sleep(0.4)
1155
 
1156
  return StreamingResponse(
server/auth.py CHANGED
@@ -88,7 +88,7 @@ def _supabase_query(table: str, select: str = "*", filters: str = "") -> list:
88
  url,
89
  headers={
90
  "apikey": SUPABASE_SERVICE_KEY,
91
- "Authorization": f"Bearer {SUPABASE_SERVICE_KEY},",
92
  },
93
  timeout=10,
94
  )
 
88
  url,
89
  headers={
90
  "apikey": SUPABASE_SERVICE_KEY,
91
+ "Authorization": f"Bearer {SUPABASE_SERVICE_KEY}",
92
  },
93
  timeout=10,
94
  )
server/stage_summary.py CHANGED
@@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
11
 
12
  # Next stage mapping
13
  STAGE_FLOW = [
14
- "INIT", "SPEC", "RTL_GEN", "RTL_FIX", "VERIFICATION",
15
  "FORMAL_VERIFY", "COVERAGE_CHECK", "REGRESSION",
16
  "SDC_GEN", "FLOORPLAN", "HARDENING", "CONVERGENCE_REVIEW",
17
  "ECO_PATCH", "SIGNOFF", "SUCCESS",
@@ -20,6 +20,10 @@ STAGE_FLOW = [
20
  STAGE_DESCRIPTIONS = {
21
  "INIT": "Initialize workspace, check tool availability, and prepare build directories",
22
  "SPEC": "Decompose natural language description into a structured architecture specification (SID JSON)",
 
 
 
 
23
  "RTL_GEN": "Generate Verilog/SystemVerilog RTL code from the architecture specification",
24
  "RTL_FIX": "Run syntax checks and fix any Verilog syntax errors in the generated RTL",
25
  "VERIFICATION": "Generate a testbench and run functional simulation to verify RTL correctness",
@@ -62,6 +66,26 @@ def collect_stage_artifacts(orchestrator, stage_name: str) -> List[Dict[str, str
62
  ("sid", "Structured Interface Document (SID JSON)"),
63
  ("spec", "Detailed RTL generation prompt from SID"),
64
  ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  "RTL_GEN": [
66
  ("rtl_path", "Generated Verilog RTL file"),
67
  ("rtl_code", "RTL source code content"),
@@ -275,6 +299,11 @@ def build_stage_complete_payload(orchestrator, stage_name: str, design_name: str
275
  STAGE_HUMAN_NAMES = {
276
  "INIT": "Initialization",
277
  "SPEC": "Architecture Specification",
 
 
 
 
 
278
  "RTL_GEN": "RTL Generation",
279
  "RTL_FIX": "RTL Syntax Fixing",
280
  "VERIFICATION": "Verification",
 
11
 
12
  # Next stage mapping
13
  STAGE_FLOW = [
14
+ "INIT", "SPEC", "SPEC_VALIDATE", "HIERARCHY_EXPAND", "FEASIBILITY_CHECK", "CDC_ANALYZE", "VERIFICATION_PLAN", "RTL_GEN", "RTL_FIX", "VERIFICATION",
15
  "FORMAL_VERIFY", "COVERAGE_CHECK", "REGRESSION",
16
  "SDC_GEN", "FLOORPLAN", "HARDENING", "CONVERGENCE_REVIEW",
17
  "ECO_PATCH", "SIGNOFF", "SUCCESS",
 
20
  STAGE_DESCRIPTIONS = {
21
  "INIT": "Initialize workspace, check tool availability, and prepare build directories",
22
  "SPEC": "Decompose natural language description into a structured architecture specification (SID JSON)",
23
+ "SPEC_VALIDATE": "Run 6-stage hardware spec validation: classify design, check completeness, decompose modules, define interfaces, generate behavioral contract",
24
+ "HIERARCHY_EXPAND": "Evaluate submodule complexity, recursively expand complex submodules into nested specs, and verify interface consistency across the full hierarchy",
25
+ "FEASIBILITY_CHECK": "Evaluate Sky130/OpenLane physical design feasibility: frequency limits, memory sizing, arithmetic complexity, area budget, and PDK-specific rules",
26
+ "CDC_ANALYZE": "Identify clock domain crossings, assign synchronization strategies (2-flop sync, pulse sync, async FIFO, handshake, reset sync), and generate CDC submodule specifications",
27
  "RTL_GEN": "Generate Verilog/SystemVerilog RTL code from the architecture specification",
28
  "RTL_FIX": "Run syntax checks and fix any Verilog syntax errors in the generated RTL",
29
  "VERIFICATION": "Generate a testbench and run functional simulation to verify RTL correctness",
 
66
  ("sid", "Structured Interface Document (SID JSON)"),
67
  ("spec", "Detailed RTL generation prompt from SID"),
68
  ],
69
+ "SPEC_VALIDATE": [
70
+ ("hardware_spec", "Validated hardware specification (JSON)"),
71
+ ("spec_enrichment", "Behavioral contract and verification hints from spec validation"),
72
+ ],
73
+ "HIERARCHY_EXPAND": [
74
+ ("hierarchy_result", "Expanded hierarchy specification (JSON)"),
75
+ ("hierarchy_enrichment", "Hierarchy depth, expansion count, and consistency fixes"),
76
+ ],
77
+ "FEASIBILITY_CHECK": [
78
+ ("feasibility_result", "Physical design feasibility analysis (JSON)"),
79
+ ("feasibility_enrichment", "Feasibility verdict, GE estimate, floorplan recommendation, warnings"),
80
+ ],
81
+ "CDC_ANALYZE": [
82
+ ("cdc_result", "Clock domain crossing analysis (JSON)"),
83
+ ("cdc_enrichment", "CDC status, domain count, crossing signals, synchronization submodules"),
84
+ ],
85
+ "VERIFICATION_PLAN": [
86
+ ("verification_plan", "Structured verification plan (JSON)"),
87
+ ("verification_enrichment", "Test counts, SVA count, coverage points, warnings"),
88
+ ],
89
  "RTL_GEN": [
90
  ("rtl_path", "Generated Verilog RTL file"),
91
  ("rtl_code", "RTL source code content"),
 
299
  STAGE_HUMAN_NAMES = {
300
  "INIT": "Initialization",
301
  "SPEC": "Architecture Specification",
302
+ "SPEC_VALIDATE": "Specification Validation",
303
+ "HIERARCHY_EXPAND": "Hierarchy Expansion",
304
+ "FEASIBILITY_CHECK": "Feasibility Check",
305
+ "CDC_ANALYZE": "CDC Analysis",
306
+ "VERIFICATION_PLAN": "Verification Planning",
307
  "RTL_GEN": "RTL Generation",
308
  "RTL_FIX": "RTL Syntax Fixing",
309
  "VERIFICATION": "Verification",
src/agentic/cli.py CHANGED
@@ -6,7 +6,7 @@ Uses CrewAI + LLM (DeepSeek/Llama/Groq) to generate chips from natural language.
6
 
7
 
8
  Usage:
9
- python main.py build --name counter --desc "8-bit counter with enable and reset"
10
  """
11
 
12
  import os
 
6
 
7
 
8
  Usage:
9
+ python3 main.py build --name counter --desc "8-bit counter with enable and reset"
10
  """
11
 
12
  import os
src/agentic/core/__init__.py CHANGED
@@ -12,6 +12,11 @@ Modules:
12
  """
13
 
14
  from .architect import ArchitectModule, StructuredSpecDict
 
 
 
 
 
15
  from .waveform_expert import WaveformExpertModule
16
  from .deep_debugger import DeepDebuggerModule
17
  from .react_agent import ReActAgent, ReActStep
@@ -20,6 +25,16 @@ from .self_reflect import SelfReflectPipeline
20
  __all__ = [
21
  "ArchitectModule",
22
  "StructuredSpecDict",
 
 
 
 
 
 
 
 
 
 
23
  "WaveformExpertModule",
24
  "DeepDebuggerModule",
25
  "ReActAgent",
 
12
  """
13
 
14
  from .architect import ArchitectModule, StructuredSpecDict
15
+ from .spec_generator import HardwareSpecGenerator, HardwareSpec
16
+ from .hierarchy_expander import HierarchyExpander, HierarchyResult
17
+ from .feasibility_checker import FeasibilityChecker, FeasibilityResult
18
+ from .cdc_analyzer import CDCAnalyzer, CDCResult
19
+ from .verification_planner import VerificationPlanner, VerificationPlan
20
  from .waveform_expert import WaveformExpertModule
21
  from .deep_debugger import DeepDebuggerModule
22
  from .react_agent import ReActAgent, ReActStep
 
25
  __all__ = [
26
  "ArchitectModule",
27
  "StructuredSpecDict",
28
+ "HardwareSpecGenerator",
29
+ "HardwareSpec",
30
+ "HierarchyExpander",
31
+ "HierarchyResult",
32
+ "FeasibilityChecker",
33
+ "FeasibilityResult",
34
+ "CDCAnalyzer",
35
+ "CDCResult",
36
+ "VerificationPlanner",
37
+ "VerificationPlan",
38
  "WaveformExpertModule",
39
  "DeepDebuggerModule",
40
  "ReActAgent",
src/agentic/core/cdc_analyzer.py ADDED
@@ -0,0 +1,1100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ CDC Analyzer — Phase 4 of the Spec Pipeline
3
+ =============================================
4
+
5
+ Receives a feasibility-checked hardware specification and identifies every
6
+ signal that crosses a clock domain boundary. For each crossing it assigns
7
+ an exact synchronization strategy and generates CDC sub-module specifications
8
+ that the RTL generator must instantiate.
9
+
10
+ A missed CDC is the hardest class of silicon bug to debug — it may pass
11
+ simulation and fail only on real silicon under specific timing conditions.
12
+
13
+ Pipeline Steps:
14
+ 1. IDENTIFY CLOCK DOMAINS — Extract all distinct clock domains
15
+ 2. IDENTIFY CROSSING SIGNALS — Enumerate every cross-domain signal
16
+ 3. ASSIGN SYNCHRONIZATION STRATEGY — One strategy per crossing
17
+ 4. GENERATE CDC SUBMODULES — Submodule specs for the RTL generator
18
+ 5. OUTPUT — Enriched spec with CDC annotations
19
+ """
20
+
21
+ import json
22
+ import logging
23
+ import re
24
+ from dataclasses import asdict, dataclass, field
25
+ from typing import Any, Dict, List, Optional, Set, Tuple
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+
30
+ # ─── Synchronization Strategy Constants ──────────────────────────────
31
+
32
+ SYNC_SINGLE_BIT = "SINGLE_BIT_SYNC"
33
+ SYNC_PULSE = "PULSE_SYNC"
34
+ SYNC_ASYNC_FIFO = "ASYNC_FIFO"
35
+ SYNC_HANDSHAKE = "HANDSHAKE"
36
+ SYNC_RESET = "RESET_SYNC"
37
+ SYNC_QUASI_STATIC = "QUASI_STATIC"
38
+ SYNC_UNRESOLVED = "CDC_UNRESOLVED"
39
+
40
+ # Minimum FIFO depth for async FIFOs
41
+ MIN_FIFO_DEPTH = 4
42
+ DEFAULT_FIFO_DEPTH = 8
43
+
44
+ # Synchronizer depth thresholds
45
+ DEFAULT_SYNC_DEPTH = 2
46
+ HIGH_FREQ_RATIO_THRESHOLD = 4 # use 3-flop sync when ratio > 4:1
47
+
48
+
49
+ # ─── Signal Type Detection Patterns ─────────────────────────────────
50
+
51
+ _RESET_PATTERNS = re.compile(
52
+ r"\brst|reset|rstn|rst_n|arst|areset\b", re.IGNORECASE
53
+ )
54
+ _HANDSHAKE_PATTERNS = re.compile(
55
+ r"\breq|ack|valid|ready|grant|request|acknowledge\b", re.IGNORECASE
56
+ )
57
+ _BUS_WIDTH_PATTERN = re.compile(
58
+ r"\[(\d+)\s*:\s*(\d+)\]"
59
+ )
60
+ _DATA_BUS_PATTERNS = re.compile(
61
+ r"\bdata|wdata|rdata|din|dout|payload|fifo_data|wr_data|rd_data\b",
62
+ re.IGNORECASE,
63
+ )
64
+ _CLOCK_PATTERNS = re.compile(
65
+ r"\bclk|clock|pclk|hclk|fclk|aclk|sclk|mclk|bclk|clk_\w+\b",
66
+ re.IGNORECASE,
67
+ )
68
+ _ENABLE_FLAG_PATTERNS = re.compile(
69
+ r"\ben|enable|flag|strobe|pulse|irq|interrupt|trigger|start|done|busy|"
70
+ r"empty|full|overflow|underflow|valid|error\b",
71
+ re.IGNORECASE,
72
+ )
73
+ _CONFIG_PATTERNS = re.compile(
74
+ r"\bcfg|config|mode|ctrl|control|param|setting|threshold\b",
75
+ re.IGNORECASE,
76
+ )
77
+
78
+ # Clock source keywords for domain identification
79
+ _CLK_DIVIDER_PATTERNS = re.compile(
80
+ r"divid|prescal|div_by|divided|half_clk|clk_div", re.IGNORECASE
81
+ )
82
+ _CLK_GATE_PATTERNS = re.compile(
83
+ r"gate|gated|clock_gate|clk_gate|cg_|icg", re.IGNORECASE
84
+ )
85
+
86
+
87
+ # ─── Output Dataclasses ─────────────────────────────────────────────
88
+
89
+ @dataclass
90
+ class ClockDomain:
91
+ """A distinct clock domain in the design."""
92
+ domain_name: str
93
+ source_clock_signal: str
94
+ nominal_frequency_mhz: float = 0.0
95
+ is_derived: bool = False # True if divided/gated from another
96
+ parent_domain: str = "" # Name of parent domain if derived
97
+ derivation_type: str = "" # "divided" | "gated" | ""
98
+ submodules: List[str] = field(default_factory=list)
99
+
100
+ def to_dict(self) -> Dict[str, Any]:
101
+ return asdict(self)
102
+
103
+
104
+ @dataclass
105
+ class CrossingSignal:
106
+ """A signal that crosses between two clock domains."""
107
+ signal_name: str
108
+ source_domain: str
109
+ destination_domain: str
110
+ signal_type: str # "single_bit_control" | "multi_bit_data" | "bus" |
111
+ # "handshake" | "reset"
112
+ bit_width: int = 1
113
+ direction: str = "unidirectional" # "unidirectional" | "bidirectional"
114
+ sync_strategy: str = ""
115
+ sync_details: Dict[str, Any] = field(default_factory=dict)
116
+ unresolved_reason: str = ""
117
+
118
+ def to_dict(self) -> Dict[str, Any]:
119
+ return asdict(self)
120
+
121
+
122
+ @dataclass
123
+ class CDCSubmodule:
124
+ """A synchronization submodule that must be instantiated in the RTL."""
125
+ module_name: str
126
+ strategy: str
127
+ ports: List[Dict[str, str]] = field(default_factory=list)
128
+ parameters: Dict[str, Any] = field(default_factory=dict)
129
+ behavior: str = ""
130
+ source_domain: str = ""
131
+ destination_domain: str = ""
132
+
133
+ def to_dict(self) -> Dict[str, Any]:
134
+ return asdict(self)
135
+
136
+
137
+ @dataclass
138
+ class CDCResult:
139
+ """Output of the CDCAnalyzer."""
140
+ cdc_status: str # "SINGLE_DOMAIN" | "MULTI_DOMAIN" | "UNRESOLVED"
141
+ clock_domains: List[ClockDomain] = field(default_factory=list)
142
+ crossing_signals: List[CrossingSignal] = field(default_factory=list)
143
+ cdc_submodules_added: List[CDCSubmodule] = field(default_factory=list)
144
+ cdc_warnings: List[str] = field(default_factory=list)
145
+ cdc_unresolved: List[str] = field(default_factory=list)
146
+ domain_count: int = 0
147
+
148
+ def to_dict(self) -> Dict[str, Any]:
149
+ return {
150
+ "cdc_status": self.cdc_status,
151
+ "clock_domains": [d.to_dict() for d in self.clock_domains],
152
+ "crossing_signals": [c.to_dict() for c in self.crossing_signals],
153
+ "cdc_submodules_added": [s.to_dict() for s in self.cdc_submodules_added],
154
+ "cdc_warnings": list(self.cdc_warnings),
155
+ "cdc_unresolved": list(self.cdc_unresolved),
156
+ "domain_count": self.domain_count,
157
+ }
158
+
159
+ def to_json(self) -> str:
160
+ return json.dumps(self.to_dict(), indent=2)
161
+
162
+
163
+ # ─── Main Class ──────────────────────────────────────────────────────
164
+
165
+ class CDCAnalyzer:
166
+ """
167
+ Identifies every clock domain crossing in a hardware specification and
168
+ assigns synchronization strategies before RTL generation.
169
+
170
+ Input: HardwareSpec dict (+ optional hierarchy/feasibility data)
171
+ Output: CDCResult with domains, crossings, sync submodules, and warnings
172
+ """
173
+
174
+ def analyze(
175
+ self,
176
+ hw_spec_dict: Dict[str, Any],
177
+ hierarchy_result_dict: Optional[Dict[str, Any]] = None,
178
+ ) -> CDCResult:
179
+ """
180
+ Run full CDC analysis on the spec.
181
+
182
+ Args:
183
+ hw_spec_dict: HardwareSpec.to_dict() output.
184
+ hierarchy_result_dict: Optional HierarchyResult.to_dict() for
185
+ expanded submodule clock information.
186
+
187
+ Returns:
188
+ CDCResult with full CDC analysis.
189
+ """
190
+ # Step 1: Identify all clock domains
191
+ domains = self._identify_clock_domains(hw_spec_dict, hierarchy_result_dict)
192
+
193
+ if len(domains) <= 1:
194
+ return CDCResult(
195
+ cdc_status="SINGLE_DOMAIN",
196
+ clock_domains=domains,
197
+ domain_count=len(domains),
198
+ cdc_warnings=["No CDC analysis required — single clock domain."]
199
+ if domains else [],
200
+ )
201
+
202
+ # Build domain relationship map
203
+ domain_map = {d.domain_name: d for d in domains}
204
+ async_pairs = self._find_async_domain_pairs(domains)
205
+
206
+ # Step 2: Identify all crossing signals
207
+ crossings = self._identify_crossing_signals(
208
+ hw_spec_dict, hierarchy_result_dict, domains, async_pairs
209
+ )
210
+
211
+ if not crossings:
212
+ return CDCResult(
213
+ cdc_status="MULTI_DOMAIN",
214
+ clock_domains=domains,
215
+ domain_count=len(domains),
216
+ cdc_warnings=[
217
+ f"Design has {len(domains)} clock domains but no "
218
+ f"cross-domain signals detected. Verify domain isolation."
219
+ ],
220
+ )
221
+
222
+ # Step 3: Assign synchronization strategy to each crossing
223
+ warnings: List[str] = []
224
+ unresolved: List[str] = []
225
+ for crossing in crossings:
226
+ self._assign_sync_strategy(crossing, domain_map, warnings, unresolved)
227
+
228
+ # Step 4: Generate CDC submodules
229
+ submodules = self._generate_cdc_submodules(crossings)
230
+
231
+ # Determine overall status
232
+ if unresolved:
233
+ status = "UNRESOLVED"
234
+ warnings.append(
235
+ f"CDC analysis has {len(unresolved)} unresolved crossing(s). "
236
+ f"RTL generation should not proceed until these are resolved."
237
+ )
238
+ else:
239
+ status = "MULTI_DOMAIN"
240
+
241
+ return CDCResult(
242
+ cdc_status=status,
243
+ clock_domains=domains,
244
+ crossing_signals=crossings,
245
+ cdc_submodules_added=submodules,
246
+ cdc_warnings=warnings,
247
+ cdc_unresolved=unresolved,
248
+ domain_count=len(domains),
249
+ )
250
+
251
+ # ── Step 1: Identify Clock Domains ───────────────────────────────
252
+
253
+ def _identify_clock_domains(
254
+ self,
255
+ hw_spec_dict: Dict[str, Any],
256
+ hierarchy_result_dict: Optional[Dict[str, Any]],
257
+ ) -> List[ClockDomain]:
258
+ """Extract every distinct clock domain from the spec."""
259
+ domains: List[ClockDomain] = []
260
+ seen_clocks: Set[str] = set()
261
+
262
+ target_freq = hw_spec_dict.get("target_frequency_mhz", 50) or 50
263
+
264
+ # 1a. Scan top-level ports for clock signals
265
+ top_ports = hw_spec_dict.get("ports", [])
266
+ for port in top_ports:
267
+ pname = port.get("name", "")
268
+ if _CLOCK_PATTERNS.search(pname) and pname.lower() not in seen_clocks:
269
+ seen_clocks.add(pname.lower())
270
+ domains.append(ClockDomain(
271
+ domain_name=self._clock_to_domain_name(pname),
272
+ source_clock_signal=pname,
273
+ nominal_frequency_mhz=target_freq,
274
+ ))
275
+
276
+ # 1b. Scan submodule ports for additional clock signals
277
+ all_submodules = self._collect_all_submodules(
278
+ hw_spec_dict, hierarchy_result_dict
279
+ )
280
+ for sm in all_submodules:
281
+ sm_ports = sm.get("ports", [])
282
+ sm_name = sm.get("name", "")
283
+ sm_desc = f"{sm_name} {sm.get('description', '')}".lower()
284
+ for port in sm_ports:
285
+ pname = port.get("name", "")
286
+ if _CLOCK_PATTERNS.search(pname) and pname.lower() not in seen_clocks:
287
+ seen_clocks.add(pname.lower())
288
+ # Try to determine if it's a derived clock
289
+ is_derived = False
290
+ parent = ""
291
+ derivation = ""
292
+ freq = target_freq
293
+
294
+ if _CLK_DIVIDER_PATTERNS.search(pname) or \
295
+ _CLK_DIVIDER_PATTERNS.search(sm_desc):
296
+ is_derived = True
297
+ derivation = "divided"
298
+ # Try to extract division factor
299
+ div_match = re.search(r"div(?:ide)?(?:_by)?_?(\d+)",
300
+ pname + " " + sm_desc, re.IGNORECASE)
301
+ if div_match:
302
+ divisor = int(div_match.group(1))
303
+ freq = target_freq / max(divisor, 1)
304
+ else:
305
+ freq = target_freq / 2 # assume /2 if not specified
306
+ parent = domains[0].domain_name if domains else ""
307
+
308
+ elif _CLK_GATE_PATTERNS.search(pname) or \
309
+ _CLK_GATE_PATTERNS.search(sm_desc):
310
+ is_derived = True
311
+ derivation = "gated"
312
+ parent = domains[0].domain_name if domains else ""
313
+
314
+ domains.append(ClockDomain(
315
+ domain_name=self._clock_to_domain_name(pname),
316
+ source_clock_signal=pname,
317
+ nominal_frequency_mhz=freq,
318
+ is_derived=is_derived,
319
+ parent_domain=parent,
320
+ derivation_type=derivation,
321
+ ))
322
+
323
+ # 1c. Check spec-level clock_domains field (if the spec generator
324
+ # explicitly listed them)
325
+ spec_clock_domains = hw_spec_dict.get("clock_domains", [])
326
+ if isinstance(spec_clock_domains, list):
327
+ for cd in spec_clock_domains:
328
+ if isinstance(cd, dict):
329
+ cd_name = cd.get("name", cd.get("domain", ""))
330
+ cd_clk = cd.get("clock", cd.get("signal", cd_name))
331
+ cd_freq = cd.get("frequency_mhz", cd.get("freq", target_freq))
332
+ elif isinstance(cd, str):
333
+ cd_name = cd
334
+ cd_clk = cd
335
+ cd_freq = target_freq
336
+ else:
337
+ continue
338
+ if cd_clk.lower() not in seen_clocks:
339
+ seen_clocks.add(cd_clk.lower())
340
+ domains.append(ClockDomain(
341
+ domain_name=self._clock_to_domain_name(cd_name),
342
+ source_clock_signal=cd_clk,
343
+ nominal_frequency_mhz=cd_freq,
344
+ ))
345
+
346
+ # 1d. Check mandatory_fields_status for clock_domains info
347
+ mfs = hw_spec_dict.get("mandatory_fields_status", {})
348
+ cd_info = mfs.get("clock_domains", {})
349
+ if isinstance(cd_info, dict):
350
+ val = cd_info.get("value", cd_info.get("inferred_value", ""))
351
+ if isinstance(val, str) and val:
352
+ # Try parsing "single domain" or "dual clock" etc.
353
+ multi_match = re.search(r"(\d+)\s*(?:clock|domain)", val, re.IGNORECASE)
354
+ if multi_match and int(multi_match.group(1)) > 1 and len(domains) < 2:
355
+ # The spec says multiple clocks but we only found one in ports
356
+ # Add a placeholder second domain
357
+ domains.append(ClockDomain(
358
+ domain_name="domain_secondary",
359
+ source_clock_signal="clk_secondary",
360
+ nominal_frequency_mhz=target_freq,
361
+ ))
362
+
363
+ # 1e. Assign submodules to domains
364
+ self._assign_submodules_to_domains(domains, all_submodules)
365
+
366
+ # If no clock was found at all, assume single domain
367
+ if not domains:
368
+ domains.append(ClockDomain(
369
+ domain_name="domain_clk",
370
+ source_clock_signal="clk",
371
+ nominal_frequency_mhz=target_freq,
372
+ ))
373
+
374
+ return domains
375
+
376
+ def _clock_to_domain_name(self, clock_signal: str) -> str:
377
+ """Convert a clock signal name to a domain name."""
378
+ name = clock_signal.lower().strip()
379
+ name = re.sub(r"[^a-z0-9_]", "_", name)
380
+ if not name.startswith("domain_"):
381
+ name = f"domain_{name}"
382
+ return name
383
+
384
+ def _assign_submodules_to_domains(
385
+ self,
386
+ domains: List[ClockDomain],
387
+ submodules: List[Dict[str, Any]],
388
+ ) -> None:
389
+ """Assign each submodule to its clock domain based on port connections."""
390
+ if not domains:
391
+ return
392
+
393
+ primary_domain = domains[0]
394
+ domain_clk_map: Dict[str, ClockDomain] = {}
395
+ for d in domains:
396
+ domain_clk_map[d.source_clock_signal.lower()] = d
397
+
398
+ for sm in submodules:
399
+ sm_name = sm.get("name", "")
400
+ sm_ports = sm.get("ports", [])
401
+ assigned = False
402
+ for port in sm_ports:
403
+ pname = port.get("name", "").lower()
404
+ if pname in domain_clk_map:
405
+ domain_clk_map[pname].submodules.append(sm_name)
406
+ assigned = True
407
+ break
408
+ if not assigned:
409
+ # Default: assign to primary domain
410
+ primary_domain.submodules.append(sm_name)
411
+
412
+ def _find_async_domain_pairs(
413
+ self, domains: List[ClockDomain]
414
+ ) -> Set[Tuple[str, str]]:
415
+ """Find all pairs of clock domains that are asynchronous to each other."""
416
+ async_pairs: Set[Tuple[str, str]] = set()
417
+
418
+ for i, d1 in enumerate(domains):
419
+ for d2 in domains[i + 1:]:
420
+ # Same source = synchronous
421
+ if d1.source_clock_signal == d2.source_clock_signal:
422
+ continue
423
+
424
+ # Divided from same parent = synchronous (but flag gated clocks)
425
+ if (d1.parent_domain and d1.parent_domain == d2.domain_name and
426
+ d1.derivation_type == "divided"):
427
+ continue
428
+ if (d2.parent_domain and d2.parent_domain == d1.domain_name and
429
+ d2.derivation_type == "divided"):
430
+ continue
431
+
432
+ # Gated clock from same source: potentially same domain but
433
+ # may have enable timing issues — flag it
434
+ if (d1.parent_domain == d2.domain_name and
435
+ d1.derivation_type == "gated"):
436
+ # Not truly async, but needs care
437
+ continue
438
+ if (d2.parent_domain == d1.domain_name and
439
+ d2.derivation_type == "gated"):
440
+ continue
441
+
442
+ # All other pairs are asynchronous
443
+ async_pairs.add((d1.domain_name, d2.domain_name))
444
+ async_pairs.add((d2.domain_name, d1.domain_name))
445
+
446
+ return async_pairs
447
+
448
+ # ── Step 2: Identify Crossing Signals ────────────────────────────
449
+
450
+ def _identify_crossing_signals(
451
+ self,
452
+ hw_spec_dict: Dict[str, Any],
453
+ hierarchy_result_dict: Optional[Dict[str, Any]],
454
+ domains: List[ClockDomain],
455
+ async_pairs: Set[Tuple[str, str]],
456
+ ) -> List[CrossingSignal]:
457
+ """Find all signals that cross between asynchronous clock domains."""
458
+ crossings: List[CrossingSignal] = []
459
+ seen: Set[str] = set()
460
+
461
+ if not async_pairs:
462
+ return crossings
463
+
464
+ # Build submodule → domain map
465
+ sm_domain_map: Dict[str, str] = {}
466
+ for domain in domains:
467
+ for sm_name in domain.submodules:
468
+ sm_domain_map[sm_name] = domain.domain_name
469
+
470
+ all_submodules = self._collect_all_submodules(
471
+ hw_spec_dict, hierarchy_result_dict
472
+ )
473
+
474
+ # For each submodule, check if any of its ports connect to a
475
+ # submodule in a different (async) domain
476
+ sm_by_name: Dict[str, Dict[str, Any]] = {}
477
+ for sm in all_submodules:
478
+ sm_by_name[sm.get("name", "")] = sm
479
+
480
+ # Strategy: Look for signals that appear in ports of submodules
481
+ # belonging to different async domains. Also check top-level
482
+ # ports that fan out to multiple domains.
483
+ port_consumers: Dict[str, List[Tuple[str, str]]] = {}
484
+ # port_consumers[signal_name] = [(submodule, domain), ...]
485
+
486
+ for sm in all_submodules:
487
+ sm_name = sm.get("name", "")
488
+ sm_domain = sm_domain_map.get(sm_name, domains[0].domain_name)
489
+ for port in sm.get("ports", []):
490
+ pname = port.get("name", "")
491
+ if _CLOCK_PATTERNS.search(pname):
492
+ continue # Skip clock signals themselves
493
+ if pname not in port_consumers:
494
+ port_consumers[pname] = []
495
+ port_consumers[pname].append((sm_name, sm_domain))
496
+
497
+ # Find signals consumed by submodules in different async domains
498
+ for sig_name, consumers in port_consumers.items():
499
+ consumer_domains = {dom for _, dom in consumers}
500
+ if len(consumer_domains) < 2:
501
+ continue
502
+
503
+ # Check each pair of consuming domains
504
+ domain_list = sorted(consumer_domains)
505
+ for i, d1 in enumerate(domain_list):
506
+ for d2 in domain_list[i + 1:]:
507
+ if (d1, d2) in async_pairs:
508
+ key = f"{sig_name}:{d1}->{d2}"
509
+ if key in seen:
510
+ continue
511
+ seen.add(key)
512
+ seen.add(f"{sig_name}:{d2}->{d1}")
513
+
514
+ # Determine signal type and width
515
+ sig_type, bit_width = self._classify_signal(
516
+ sig_name, port_consumers[sig_name], sm_by_name
517
+ )
518
+
519
+ crossings.append(CrossingSignal(
520
+ signal_name=sig_name,
521
+ source_domain=d1,
522
+ destination_domain=d2,
523
+ signal_type=sig_type,
524
+ bit_width=bit_width,
525
+ direction="unidirectional",
526
+ ))
527
+
528
+ # Also check for reset signals crossing domains
529
+ self._check_reset_crossings(
530
+ domains, async_pairs, hw_spec_dict, crossings, seen
531
+ )
532
+
533
+ # Check for inter-domain handshake pairs
534
+ self._check_handshake_crossings(
535
+ domains, async_pairs, all_submodules, sm_domain_map, crossings, seen
536
+ )
537
+
538
+ return crossings
539
+
540
+ def _classify_signal(
541
+ self,
542
+ signal_name: str,
543
+ consumers: List[Tuple[str, str]],
544
+ sm_by_name: Dict[str, Dict[str, Any]],
545
+ ) -> Tuple[str, int]:
546
+ """Classify a signal as single_bit_control, multi_bit_data, bus, handshake, or reset."""
547
+ name_lower = signal_name.lower()
548
+
549
+ # Reset?
550
+ if _RESET_PATTERNS.search(name_lower):
551
+ return "reset", 1
552
+
553
+ # Check port data type for width
554
+ bit_width = 1
555
+ for sm_name, _ in consumers:
556
+ sm = sm_by_name.get(sm_name, {})
557
+ for port in sm.get("ports", []):
558
+ if port.get("name", "") == signal_name:
559
+ dtype = port.get("data_type", "")
560
+ width_match = _BUS_WIDTH_PATTERN.search(dtype)
561
+ if width_match:
562
+ hi = int(width_match.group(1))
563
+ lo = int(width_match.group(2))
564
+ bit_width = abs(hi - lo) + 1
565
+ break
566
+ if bit_width > 1:
567
+ break
568
+
569
+ # Handshake?
570
+ if _HANDSHAKE_PATTERNS.search(name_lower):
571
+ if bit_width == 1:
572
+ return "handshake", 1
573
+ return "handshake", bit_width
574
+
575
+ # Multi-bit data or bus?
576
+ if bit_width > 1:
577
+ if _DATA_BUS_PATTERNS.search(name_lower):
578
+ return "multi_bit_data", bit_width
579
+ return "bus", bit_width
580
+
581
+ # Single-bit control/flag/enable
582
+ if _ENABLE_FLAG_PATTERNS.search(name_lower):
583
+ return "single_bit_control", 1
584
+
585
+ # Default: single-bit control
586
+ return "single_bit_control", 1
587
+
588
+ def _check_reset_crossings(
589
+ self,
590
+ domains: List[ClockDomain],
591
+ async_pairs: Set[Tuple[str, str]],
592
+ hw_spec_dict: Dict[str, Any],
593
+ crossings: List[CrossingSignal],
594
+ seen: Set[str],
595
+ ) -> None:
596
+ """Ensure every reset signal crossing a domain boundary is captured."""
597
+ top_ports = hw_spec_dict.get("ports", [])
598
+ reset_signals = [
599
+ p.get("name", "") for p in top_ports
600
+ if _RESET_PATTERNS.search(p.get("name", ""))
601
+ ]
602
+
603
+ if not reset_signals:
604
+ return
605
+
606
+ # Every reset that enters the chip must be synchronized to each
607
+ # async domain it serves
608
+ primary_domain = domains[0].domain_name if domains else ""
609
+ for rst_sig in reset_signals:
610
+ for domain in domains:
611
+ if domain.domain_name == primary_domain:
612
+ continue
613
+ if (primary_domain, domain.domain_name) not in async_pairs:
614
+ continue
615
+ key = f"{rst_sig}:{primary_domain}->{domain.domain_name}"
616
+ if key in seen:
617
+ continue
618
+ seen.add(key)
619
+ crossings.append(CrossingSignal(
620
+ signal_name=rst_sig,
621
+ source_domain=primary_domain,
622
+ destination_domain=domain.domain_name,
623
+ signal_type="reset",
624
+ bit_width=1,
625
+ direction="unidirectional",
626
+ ))
627
+
628
+ def _check_handshake_crossings(
629
+ self,
630
+ domains: List[ClockDomain],
631
+ async_pairs: Set[Tuple[str, str]],
632
+ submodules: List[Dict[str, Any]],
633
+ sm_domain_map: Dict[str, str],
634
+ crossings: List[CrossingSignal],
635
+ seen: Set[str],
636
+ ) -> None:
637
+ """Detect req/ack handshake pairs that span domains."""
638
+ # Look for paired req/ack or valid/ready signals
639
+ req_ack_pairs = []
640
+ for sm in submodules:
641
+ sm_name = sm.get("name", "")
642
+ ports = sm.get("ports", [])
643
+ port_names = [p.get("name", "") for p in ports]
644
+ for pn in port_names:
645
+ pn_lower = pn.lower()
646
+ # Find req→ack pairs
647
+ if "req" in pn_lower:
648
+ ack_name = pn_lower.replace("req", "ack")
649
+ for pn2 in port_names:
650
+ if pn2.lower() == ack_name:
651
+ req_ack_pairs.append((pn, pn2, sm_name))
652
+ # Find valid→ready pairs
653
+ if "valid" in pn_lower:
654
+ ready_name = pn_lower.replace("valid", "ready")
655
+ for pn2 in port_names:
656
+ if pn2.lower() == ready_name:
657
+ req_ack_pairs.append((pn, pn2, sm_name))
658
+
659
+ # Mark bidirectional if req/ack span domains
660
+ for req_sig, ack_sig, sm_name in req_ack_pairs:
661
+ sm_dom = sm_domain_map.get(sm_name, "")
662
+ for domain in domains:
663
+ if domain.domain_name == sm_dom:
664
+ continue
665
+ if (sm_dom, domain.domain_name) in async_pairs:
666
+ key = f"{req_sig}:{sm_dom}->{domain.domain_name}"
667
+ if key not in seen:
668
+ seen.add(key)
669
+ crossings.append(CrossingSignal(
670
+ signal_name=f"{req_sig}/{ack_sig}",
671
+ source_domain=sm_dom,
672
+ destination_domain=domain.domain_name,
673
+ signal_type="handshake",
674
+ bit_width=1,
675
+ direction="bidirectional",
676
+ ))
677
+
678
+ # ── Step 3: Assign Synchronization Strategy ──────────────────────
679
+
680
+ def _assign_sync_strategy(
681
+ self,
682
+ crossing: CrossingSignal,
683
+ domain_map: Dict[str, ClockDomain],
684
+ warnings: List[str],
685
+ unresolved: List[str],
686
+ ) -> None:
687
+ """Assign the correct synchronization strategy to a crossing signal."""
688
+ src_domain = domain_map.get(crossing.source_domain)
689
+ dst_domain = domain_map.get(crossing.destination_domain)
690
+
691
+ # Calculate frequency ratio for synchronizer depth decisions
692
+ src_freq = src_domain.nominal_frequency_mhz if src_domain else 50
693
+ dst_freq = dst_domain.nominal_frequency_mhz if dst_domain else 50
694
+ freq_ratio = max(src_freq, dst_freq) / max(min(src_freq, dst_freq), 1)
695
+ sync_depth = 3 if freq_ratio > HIGH_FREQ_RATIO_THRESHOLD else 2
696
+
697
+ sig_type = crossing.signal_type
698
+ bit_width = crossing.bit_width
699
+ sig_name = crossing.signal_name
700
+
701
+ # ── RESET signals: ALWAYS use RESET_SYNC ────────────────────
702
+ if sig_type == "reset":
703
+ crossing.sync_strategy = SYNC_RESET
704
+ crossing.sync_details = {
705
+ "origin_domain": crossing.source_domain,
706
+ "target_domain": crossing.destination_domain,
707
+ "behavior": "Asynchronous assert, synchronous deassert",
708
+ "sync_depth": sync_depth,
709
+ }
710
+ return
711
+
712
+ # ── MULTI-BIT data/bus: NEVER use 2-flop sync ───────────────
713
+ if bit_width > 1 and sig_type in ("multi_bit_data", "bus"):
714
+ crossing.sync_strategy = SYNC_ASYNC_FIFO
715
+ fifo_depth = DEFAULT_FIFO_DEPTH
716
+ if freq_ratio > 4:
717
+ fifo_depth = 16 # deeper FIFO for large frequency ratios
718
+ ptr_width = self._gray_pointer_width(fifo_depth)
719
+ crossing.sync_details = {
720
+ "data_width": bit_width,
721
+ "fifo_depth": fifo_depth,
722
+ "gray_pointer_width": ptr_width,
723
+ "behavior": (
724
+ "Gray-coded read/write pointers, pointer sync via "
725
+ f"{sync_depth}-flop synchronizers, no combinational "
726
+ "paths between clock domains"
727
+ ),
728
+ }
729
+ warnings.append(
730
+ f"CDC: Signal '{sig_name}' ({bit_width}-bit) crosses from "
731
+ f"{crossing.source_domain} to {crossing.destination_domain}. "
732
+ f"Assigned ASYNC_FIFO (depth={fifo_depth})."
733
+ )
734
+ return
735
+
736
+ # ── HANDSHAKE signals ────────────────────────────────────────
737
+ if sig_type == "handshake":
738
+ if bit_width > 1:
739
+ # Multi-bit handshake: use HANDSHAKE protocol
740
+ crossing.sync_strategy = SYNC_HANDSHAKE
741
+ crossing.sync_details = {
742
+ "req_signal": f"{sig_name}_req",
743
+ "ack_signal": f"{sig_name}_ack",
744
+ "data_width": bit_width,
745
+ "behavior": (
746
+ "4-phase handshake: req asserted with stable data, "
747
+ "ack confirms receipt, req deasserted, ack deasserted"
748
+ ),
749
+ }
750
+ return
751
+ else:
752
+ # Single-bit handshake control: 2-flop sync is sufficient
753
+ # if it's a level signal (req/ack/valid/ready)
754
+ crossing.sync_strategy = SYNC_SINGLE_BIT
755
+ crossing.sync_details = {
756
+ "sync_depth": sync_depth,
757
+ "dont_touch": True,
758
+ "behavior": (
759
+ f"{sync_depth}-stage synchronizer, both flops have "
760
+ "dont_touch attribute for synthesis"
761
+ ),
762
+ }
763
+ return
764
+
765
+ # ── Single-bit control/enable/flag ───────────────────────────
766
+ if bit_width == 1 and sig_type == "single_bit_control":
767
+ # Check if this might be a pulse shorter than dst clock period
768
+ is_likely_pulse = any(
769
+ kw in sig_name.lower()
770
+ for kw in ["pulse", "strobe", "trigger", "irq", "interrupt"]
771
+ )
772
+
773
+ if is_likely_pulse and src_freq > dst_freq:
774
+ # Source is faster: pulse may be shorter than dst period
775
+ crossing.sync_strategy = SYNC_PULSE
776
+ crossing.sync_details = {
777
+ "behavior": (
778
+ "Toggle flip-flop in source domain, "
779
+ f"{sync_depth}-flop synchronizer in destination domain, "
780
+ "edge detector to regenerate pulse"
781
+ ),
782
+ "sync_depth": sync_depth,
783
+ }
784
+ warnings.append(
785
+ f"CDC: Signal '{sig_name}' is a likely pulse crossing from "
786
+ f"fast ({src_freq} MHz) to slow ({dst_freq} MHz) domain. "
787
+ f"Using PULSE_SYNC to avoid missing it."
788
+ )
789
+ return
790
+ else:
791
+ crossing.sync_strategy = SYNC_SINGLE_BIT
792
+ crossing.sync_details = {
793
+ "sync_depth": sync_depth,
794
+ "dont_touch": True,
795
+ "behavior": (
796
+ f"{sync_depth}-stage synchronizer, both flops have "
797
+ "dont_touch attribute for synthesis"
798
+ ),
799
+ }
800
+ return
801
+
802
+ # ── Config/quasi-static signals ──────────────────────────────
803
+ if _CONFIG_PATTERNS.search(sig_name):
804
+ if bit_width == 1:
805
+ crossing.sync_strategy = SYNC_SINGLE_BIT
806
+ crossing.sync_details = {
807
+ "sync_depth": sync_depth,
808
+ "dont_touch": True,
809
+ "behavior": (
810
+ f"{sync_depth}-stage synchronizer with dont_touch"
811
+ ),
812
+ }
813
+ warnings.append(
814
+ f"CDC: Config signal '{sig_name}' crosses domains. "
815
+ f"If it only changes during configuration/reset, consider "
816
+ f"QUASI_STATIC with documented guarantee."
817
+ )
818
+ return
819
+ else:
820
+ # Multi-bit config: use handshake since it's low bandwidth
821
+ crossing.sync_strategy = SYNC_HANDSHAKE
822
+ crossing.sync_details = {
823
+ "req_signal": f"{sig_name}_cfg_req",
824
+ "ack_signal": f"{sig_name}_cfg_ack",
825
+ "data_width": bit_width,
826
+ "behavior": (
827
+ "4-phase handshake for configuration register update"
828
+ ),
829
+ }
830
+ warnings.append(
831
+ f"CDC: Multi-bit config signal '{sig_name}' ({bit_width}-bit) "
832
+ f"crosses domains. Using HANDSHAKE. If only set during reset, "
833
+ f"consider QUASI_STATIC."
834
+ )
835
+ return
836
+
837
+ # ── Fallback: unresolved ─────────────────────────────────────
838
+ crossing.sync_strategy = SYNC_UNRESOLVED
839
+ crossing.unresolved_reason = (
840
+ f"Cannot confidently classify signal '{sig_name}' "
841
+ f"(type={sig_type}, width={bit_width}) for automatic sync assignment."
842
+ )
843
+ unresolved.append(
844
+ f"CDC_UNRESOLVED: {sig_name} crossing from "
845
+ f"{crossing.source_domain} to {crossing.destination_domain}. "
846
+ f"Reason: {crossing.unresolved_reason}"
847
+ )
848
+
849
+ def _gray_pointer_width(self, fifo_depth: int) -> int:
850
+ """Calculate Gray code pointer width for a given FIFO depth."""
851
+ import math
852
+ return max(2, int(math.ceil(math.log2(max(fifo_depth, 2)))) + 1)
853
+
854
+ # ── Step 4: Generate CDC Submodules ──────────────────────────────
855
+
856
+ def _generate_cdc_submodules(
857
+ self, crossings: List[CrossingSignal]
858
+ ) -> List[CDCSubmodule]:
859
+ """Generate synchronization submodule specs for the RTL generator."""
860
+ submodules: List[CDCSubmodule] = []
861
+ seen_modules: Set[str] = set()
862
+
863
+ for crossing in crossings:
864
+ if crossing.sync_strategy == SYNC_UNRESOLVED:
865
+ continue
866
+
867
+ strategy = crossing.sync_strategy
868
+ sig_name = self._sanitize_name(crossing.signal_name)
869
+
870
+ if strategy == SYNC_SINGLE_BIT:
871
+ mod_name = f"cdc_sync_{sig_name}"
872
+ if mod_name in seen_modules:
873
+ continue
874
+ seen_modules.add(mod_name)
875
+ depth = crossing.sync_details.get("sync_depth", 2)
876
+ submodules.append(CDCSubmodule(
877
+ module_name=mod_name,
878
+ strategy=strategy,
879
+ ports=[
880
+ {"name": "clk_dst", "direction": "input",
881
+ "data_type": "logic", "description": "Destination clock"},
882
+ {"name": "rst_n_dst", "direction": "input",
883
+ "data_type": "logic", "description": "Destination reset (active-low)"},
884
+ {"name": "sig_src", "direction": "input",
885
+ "data_type": "logic", "description": "Source domain signal"},
886
+ {"name": "sig_dst_synced", "direction": "output",
887
+ "data_type": "logic", "description": "Synchronized signal in destination domain"},
888
+ ],
889
+ parameters={"SYNC_DEPTH": depth},
890
+ behavior=(
891
+ f"{depth}-stage synchronizer chain. All flops have "
892
+ "(* dont_touch = \"true\" *) attribute to prevent "
893
+ "synthesis optimization. Reset clears all stages."
894
+ ),
895
+ source_domain=crossing.source_domain,
896
+ destination_domain=crossing.destination_domain,
897
+ ))
898
+
899
+ elif strategy == SYNC_PULSE:
900
+ mod_name = f"cdc_pulse_sync_{sig_name}"
901
+ if mod_name in seen_modules:
902
+ continue
903
+ seen_modules.add(mod_name)
904
+ depth = crossing.sync_details.get("sync_depth", 2)
905
+ submodules.append(CDCSubmodule(
906
+ module_name=mod_name,
907
+ strategy=strategy,
908
+ ports=[
909
+ {"name": "clk_src", "direction": "input",
910
+ "data_type": "logic", "description": "Source clock"},
911
+ {"name": "rst_n_src", "direction": "input",
912
+ "data_type": "logic", "description": "Source reset (active-low)"},
913
+ {"name": "clk_dst", "direction": "input",
914
+ "data_type": "logic", "description": "Destination clock"},
915
+ {"name": "rst_n_dst", "direction": "input",
916
+ "data_type": "logic", "description": "Destination reset (active-low)"},
917
+ {"name": "pulse_src", "direction": "input",
918
+ "data_type": "logic", "description": "Single-cycle pulse in source domain"},
919
+ {"name": "pulse_dst", "direction": "output",
920
+ "data_type": "logic", "description": "Regenerated pulse in destination domain"},
921
+ ],
922
+ parameters={"SYNC_DEPTH": depth},
923
+ behavior=(
924
+ "Toggle flip-flop captures pulse in source domain. "
925
+ f"{depth}-flop synchronizer transfers toggle to "
926
+ "destination domain. XOR edge detector regenerates "
927
+ "single-cycle pulse in destination domain."
928
+ ),
929
+ source_domain=crossing.source_domain,
930
+ destination_domain=crossing.destination_domain,
931
+ ))
932
+
933
+ elif strategy == SYNC_ASYNC_FIFO:
934
+ data_width = crossing.sync_details.get("data_width", 8)
935
+ fifo_depth = crossing.sync_details.get("fifo_depth", DEFAULT_FIFO_DEPTH)
936
+ ptr_width = crossing.sync_details.get("gray_pointer_width", 4)
937
+ mod_name = f"cdc_fifo_{sig_name}"
938
+ if mod_name in seen_modules:
939
+ continue
940
+ seen_modules.add(mod_name)
941
+ submodules.append(CDCSubmodule(
942
+ module_name=mod_name,
943
+ strategy=strategy,
944
+ ports=[
945
+ {"name": "wr_clk", "direction": "input",
946
+ "data_type": "logic", "description": "Write clock"},
947
+ {"name": "wr_rst_n", "direction": "input",
948
+ "data_type": "logic", "description": "Write reset (active-low)"},
949
+ {"name": "wr_en", "direction": "input",
950
+ "data_type": "logic", "description": "Write enable"},
951
+ {"name": "wr_data", "direction": "input",
952
+ "data_type": f"logic [{data_width - 1}:0]",
953
+ "description": "Write data"},
954
+ {"name": "wr_full", "direction": "output",
955
+ "data_type": "logic", "description": "FIFO full flag"},
956
+ {"name": "rd_clk", "direction": "input",
957
+ "data_type": "logic", "description": "Read clock"},
958
+ {"name": "rd_rst_n", "direction": "input",
959
+ "data_type": "logic", "description": "Read reset (active-low)"},
960
+ {"name": "rd_en", "direction": "input",
961
+ "data_type": "logic", "description": "Read enable"},
962
+ {"name": "rd_data", "direction": "output",
963
+ "data_type": f"logic [{data_width - 1}:0]",
964
+ "description": "Read data"},
965
+ {"name": "rd_empty", "direction": "output",
966
+ "data_type": "logic", "description": "FIFO empty flag"},
967
+ ],
968
+ parameters={
969
+ "DATA_WIDTH": data_width,
970
+ "FIFO_DEPTH": fifo_depth,
971
+ "PTR_WIDTH": ptr_width,
972
+ },
973
+ behavior=(
974
+ f"Asynchronous FIFO with {fifo_depth}-deep buffer. "
975
+ f"Gray-coded {ptr_width}-bit read and write pointers. "
976
+ "Pointer synchronization via 2-flop synchronizers. "
977
+ "No combinational paths between write and read clock "
978
+ "domains. Full/empty generation from synchronized "
979
+ "Gray pointers."
980
+ ),
981
+ source_domain=crossing.source_domain,
982
+ destination_domain=crossing.destination_domain,
983
+ ))
984
+
985
+ elif strategy == SYNC_HANDSHAKE:
986
+ data_width = crossing.sync_details.get("data_width", 1)
987
+ req_sig = crossing.sync_details.get("req_signal", f"{sig_name}_req")
988
+ ack_sig = crossing.sync_details.get("ack_signal", f"{sig_name}_ack")
989
+ mod_name = f"cdc_handshake_{sig_name}"
990
+ if mod_name in seen_modules:
991
+ continue
992
+ seen_modules.add(mod_name)
993
+ ports = [
994
+ {"name": "clk_src", "direction": "input",
995
+ "data_type": "logic", "description": "Source clock"},
996
+ {"name": "rst_n_src", "direction": "input",
997
+ "data_type": "logic", "description": "Source reset"},
998
+ {"name": "clk_dst", "direction": "input",
999
+ "data_type": "logic", "description": "Destination clock"},
1000
+ {"name": "rst_n_dst", "direction": "input",
1001
+ "data_type": "logic", "description": "Destination reset"},
1002
+ {"name": req_sig, "direction": "output",
1003
+ "data_type": "logic", "description": "Request signal"},
1004
+ {"name": ack_sig, "direction": "input",
1005
+ "data_type": "logic", "description": "Acknowledge signal"},
1006
+ ]
1007
+ if data_width > 1:
1008
+ ports.extend([
1009
+ {"name": "data_src", "direction": "input",
1010
+ "data_type": f"logic [{data_width - 1}:0]",
1011
+ "description": "Source data"},
1012
+ {"name": "data_dst", "direction": "output",
1013
+ "data_type": f"logic [{data_width - 1}:0]",
1014
+ "description": "Destination data (valid when ack)"},
1015
+ ])
1016
+ submodules.append(CDCSubmodule(
1017
+ module_name=mod_name,
1018
+ strategy=strategy,
1019
+ ports=ports,
1020
+ parameters={"DATA_WIDTH": data_width},
1021
+ behavior=(
1022
+ "4-phase handshake protocol: (1) source asserts req "
1023
+ "with stable data, (2) destination synchronizes req "
1024
+ "and asserts ack, (3) source deasserts req, "
1025
+ "(4) destination deasserts ack. Data must remain "
1026
+ "stable from req assert to ack assert."
1027
+ ),
1028
+ source_domain=crossing.source_domain,
1029
+ destination_domain=crossing.destination_domain,
1030
+ ))
1031
+
1032
+ elif strategy == SYNC_RESET:
1033
+ mod_name = f"cdc_reset_sync_{sig_name}"
1034
+ if mod_name in seen_modules:
1035
+ continue
1036
+ seen_modules.add(mod_name)
1037
+ depth = crossing.sync_details.get("sync_depth", 2)
1038
+ submodules.append(CDCSubmodule(
1039
+ module_name=mod_name,
1040
+ strategy=strategy,
1041
+ ports=[
1042
+ {"name": "clk_dst", "direction": "input",
1043
+ "data_type": "logic", "description": "Destination clock"},
1044
+ {"name": "rst_async_n", "direction": "input",
1045
+ "data_type": "logic",
1046
+ "description": "Asynchronous reset input (active-low)"},
1047
+ {"name": "rst_sync_n", "direction": "output",
1048
+ "data_type": "logic",
1049
+ "description": "Synchronized reset output (active-low)"},
1050
+ ],
1051
+ parameters={"SYNC_DEPTH": depth},
1052
+ behavior=(
1053
+ "Asynchronous assert, synchronous deassert reset "
1054
+ f"synchronizer. {depth}-flop chain clocked by "
1055
+ "destination clock. Reset assertion is immediate "
1056
+ "(async), deassertion is synchronized to destination "
1057
+ "clock to prevent metastability."
1058
+ ),
1059
+ source_domain=crossing.source_domain,
1060
+ destination_domain=crossing.destination_domain,
1061
+ ))
1062
+
1063
+ return submodules
1064
+
1065
+ # ── Utility Methods ──────────────────────────────────────────────
1066
+
1067
+ def _collect_all_submodules(
1068
+ self,
1069
+ hw_spec_dict: Dict[str, Any],
1070
+ hierarchy_result_dict: Optional[Dict[str, Any]],
1071
+ ) -> List[Dict[str, Any]]:
1072
+ """Flatten all submodules from both spec and hierarchy result."""
1073
+ all_subs: List[Dict[str, Any]] = []
1074
+
1075
+ # From spec
1076
+ for sm in hw_spec_dict.get("submodules", []):
1077
+ if isinstance(sm, dict):
1078
+ all_subs.append(sm)
1079
+
1080
+ # From hierarchy result (may have nested specs)
1081
+ if hierarchy_result_dict:
1082
+ for sm in hierarchy_result_dict.get("submodules", []):
1083
+ if isinstance(sm, dict):
1084
+ all_subs.append(sm)
1085
+ # Recurse into nested_spec
1086
+ nested = sm.get("nested_spec")
1087
+ if isinstance(nested, dict):
1088
+ for nsm in nested.get("submodules", []):
1089
+ if isinstance(nsm, dict):
1090
+ all_subs.append(nsm)
1091
+
1092
+ return all_subs
1093
+
1094
+ def _sanitize_name(self, name: str) -> str:
1095
+ """Convert a signal name to a valid Verilog identifier fragment."""
1096
+ # Remove slash-separated compound names
1097
+ name = name.replace("/", "_")
1098
+ name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
1099
+ name = re.sub(r"_+", "_", name).strip("_")
1100
+ return name.lower()
src/agentic/core/feasibility_checker.py ADDED
@@ -0,0 +1,826 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Feasibility Checker — Phase 3 of the Spec Pipeline
3
+ ====================================================
4
+
5
+ Receives a fully expanded hierarchical hardware specification and evaluates
6
+ whether the design is physically realizable on Sky130 within the OpenLane
7
+ RTL-to-GDS flow — before a single line of RTL is written.
8
+
9
+ Pipeline Steps:
10
+ 1. FREQUENCY — Clock target vs. Sky130 achievable limits
11
+ 2. MEMORY — Storage structures vs. register / OpenRAM thresholds
12
+ 3. ARITHMETIC — Multiplier / divider / FPU gate-cost on Sky130
13
+ 4. AREA — Total gate-equivalent budget and floorplan sizing
14
+ 5. SKY130 — PDK-specific incompatibility scan
15
+ 6. OUTPUT — Annotated spec with feasibility verdict
16
+ """
17
+
18
+ import json
19
+ import logging
20
+ import math
21
+ import re
22
+ from dataclasses import asdict, dataclass, field
23
+ from typing import Any, Dict, List, Optional, Tuple
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ # ─── Area Estimation Constants (Gate Equivalents) ────────────────────
29
+ # 1 GE = one 2-input NAND gate on Sky130
30
+
31
+ GE_ESTIMATES: Dict[str, int] = {
32
+ # Registers
33
+ "1_bit_ff": 6,
34
+ "8_bit_register": 48,
35
+ "16_bit_register": 96,
36
+ "32_bit_register": 192,
37
+ # Register files
38
+ "32x32_regfile": 6144,
39
+ "32x64_regfile": 12288,
40
+ "16x32_regfile": 3072,
41
+ # Arithmetic
42
+ "8_bit_adder": 40,
43
+ "16_bit_adder": 80,
44
+ "32_bit_adder": 160,
45
+ "8x8_multiplier": 200,
46
+ "16x16_multiplier": 1000,
47
+ "32x32_multiplier": 4000,
48
+ # Logic
49
+ "4_state_fsm": 100,
50
+ "8_state_fsm": 250,
51
+ "16_state_fsm": 600,
52
+ # Interfaces
53
+ "uart_115200": 500,
54
+ "spi_master": 800,
55
+ "i2c_master": 1200,
56
+ "apb_slave": 600,
57
+ # Processors
58
+ "riscv_5stage_no_cache": 20000,
59
+ }
60
+
61
+ # Floorplan size mapping (GE → recommended area)
62
+ FLOORPLAN_TIERS = [
63
+ (5_000, "TinyTapeout tile (130×160 μm)", "130x160"),
64
+ (50_000, "Chipignite medium (500×500 μm)", "500x500"),
65
+ (200_000, "Chipignite large (1000×1000 μm)", "1000x1000"),
66
+ (500_000, "Multi-tile (2000×2000 μm)", "2000x2000"),
67
+ ]
68
+
69
+
70
+ # ─── Submodule-type GE heuristic keywords ───────────────────────────
71
+
72
+ _GE_KEYWORD_MAP: List[Tuple[List[str], int, str]] = [
73
+ # (keywords, base_ge, description)
74
+ (["riscv", "risc-v", "rv32", "rv64", "processor", "cpu"], 20000, "RISC-V / CPU core"),
75
+ (["uart"], 500, "UART controller"),
76
+ (["spi"], 800, "SPI controller"),
77
+ (["i2c"], 1200, "I2C controller"),
78
+ (["apb", "axi", "wishbone"], 600, "Bus interface"),
79
+ (["alu"], 500, "ALU"),
80
+ (["multiplier", "multiply"], 1000, "Multiplier"),
81
+ (["divider", "divide"], 1500, "Divider"),
82
+ (["fpu", "floating point"], 5000, "Floating-point unit"),
83
+ (["register_file", "regfile", "register file"], 6144, "Register file"),
84
+ (["fifo"], 400, "FIFO buffer"),
85
+ (["cache"], 8000, "Cache"),
86
+ (["dma"], 3000, "DMA controller"),
87
+ (["arbiter", "arbitration"], 300, "Arbiter"),
88
+ (["interrupt", "irq"], 400, "Interrupt controller"),
89
+ (["program_counter", "pc"], 200, "Program counter"),
90
+ (["instruction_fetch", "fetch"], 800, "Instruction fetch"),
91
+ (["instruction_decode", "decode"], 1000, "Instruction decode"),
92
+ (["writeback"], 400, "Writeback stage"),
93
+ (["hazard"], 500, "Hazard unit"),
94
+ (["branch_predict"], 1500, "Branch predictor"),
95
+ (["pipeline_register", "pipe_reg"], 200, "Pipeline register"),
96
+ (["control_unit", "control_logic"], 300, "Control unit"),
97
+ (["state_machine", "fsm"], 100, "State machine"),
98
+ (["shift_register", "barrel_shifter"], 200, "Shift register / barrel shifter"),
99
+ (["counter"], 100, "Counter"),
100
+ (["comparator"], 50, "Comparator"),
101
+ (["mux", "multiplexer"], 30, "Multiplexer"),
102
+ (["adder"], 160, "Adder"),
103
+ (["memory_array", "sram", "ram", "rom"], 2000, "Memory array"),
104
+ (["address_decoder", "decoder"], 100, "Address decoder"),
105
+ (["output_register"], 48, "Output register"),
106
+ (["data_buffer", "buffer"], 200, "Data buffer"),
107
+ (["status_register"], 48, "Status register"),
108
+ (["clock_divider"], 80, "Clock divider"),
109
+ ]
110
+
111
+
112
+ # ─── Output Dataclasses ─────────────────────────────────────────────
113
+
114
+ @dataclass
115
+ class MacroRequirement:
116
+ """OpenRAM macro specification for large memories."""
117
+ submodule_name: str
118
+ width_bits: int
119
+ depth_words: int
120
+ read_ports: int = 1
121
+ write_ports: int = 1
122
+ size_bits: int = 0
123
+
124
+ def to_dict(self) -> Dict[str, Any]:
125
+ return asdict(self)
126
+
127
+
128
+ @dataclass
129
+ class FeasibilityResult:
130
+ """Output of the FeasibilityChecker."""
131
+ feasibility_status: str # "PASS" | "WARN" | "REJECT"
132
+ estimated_gate_equivalents: int = 0
133
+ recommended_floorplan_size_um: str = ""
134
+ target_frequency_mhz: int = 50
135
+ memory_macros_required: List[MacroRequirement] = field(default_factory=list)
136
+ feasibility_warnings: List[str] = field(default_factory=list)
137
+ feasibility_rejections: List[str] = field(default_factory=list)
138
+ # Detailed breakdown
139
+ area_breakdown: Dict[str, int] = field(default_factory=dict)
140
+
141
+ def to_dict(self) -> Dict[str, Any]:
142
+ return {
143
+ "feasibility_status": self.feasibility_status,
144
+ "estimated_gate_equivalents": self.estimated_gate_equivalents,
145
+ "recommended_floorplan_size_um": self.recommended_floorplan_size_um,
146
+ "target_frequency_mhz": self.target_frequency_mhz,
147
+ "memory_macros_required": [m.to_dict() for m in self.memory_macros_required],
148
+ "feasibility_warnings": list(self.feasibility_warnings),
149
+ "feasibility_rejections": list(self.feasibility_rejections),
150
+ "area_breakdown": dict(self.area_breakdown),
151
+ }
152
+
153
+ def to_json(self) -> str:
154
+ return json.dumps(self.to_dict(), indent=2)
155
+
156
+
157
+ # ─── Main Class ──────────────────────────────────────────────────────
158
+
159
+ class FeasibilityChecker:
160
+ """
161
+ Evaluates whether a hardware specification is physically realizable
162
+ on Sky130 within the OpenLane automated flow.
163
+
164
+ Checks: frequency, memory sizing, arithmetic complexity, total area,
165
+ and Sky130-specific incompatibilities. Produces a PASS / WARN / REJECT
166
+ verdict with detailed justification.
167
+ """
168
+
169
+ def __init__(self, pdk: str = "sky130"):
170
+ self.pdk = pdk
171
+
172
+ # ── Public API ───────────────────────────────────────────────────
173
+
174
+ def check(
175
+ self,
176
+ hw_spec_dict: Dict[str, Any],
177
+ hierarchy_result_dict: Optional[Dict[str, Any]] = None,
178
+ ) -> FeasibilityResult:
179
+ """
180
+ Run all feasibility checks against the spec.
181
+
182
+ Args:
183
+ hw_spec_dict: HardwareSpec.to_dict() output.
184
+ hierarchy_result_dict: Optional HierarchyResult.to_dict() for
185
+ expanded submodule analysis.
186
+
187
+ Returns:
188
+ FeasibilityResult with verdict and details.
189
+ """
190
+ warnings: List[str] = []
191
+ rejections: List[str] = []
192
+ area_breakdown: Dict[str, int] = {}
193
+
194
+ # Resolve target frequency
195
+ target_freq = hw_spec_dict.get("target_frequency_mhz", 0)
196
+ if not target_freq or target_freq <= 0:
197
+ target_freq = 50
198
+ warnings.append(
199
+ "INFERRED: target_frequency_mhz was 0 or unspecified — "
200
+ "defaulting to 50 MHz."
201
+ )
202
+
203
+ # Collect all submodule specs (top-level + nested from hierarchy)
204
+ all_submodules = self._collect_all_submodules(
205
+ hw_spec_dict, hierarchy_result_dict
206
+ )
207
+ all_ports = hw_spec_dict.get("ports", [])
208
+ all_contracts = hw_spec_dict.get("behavioral_contract", [])
209
+ design_category = hw_spec_dict.get("design_category", "CONTROL")
210
+ design_desc = hw_spec_dict.get("design_description", "")
211
+
212
+ # Step 1: Frequency feasibility
213
+ freq_warnings, freq_rejections = self._check_frequency(
214
+ target_freq, all_submodules, design_category
215
+ )
216
+ warnings.extend(freq_warnings)
217
+ rejections.extend(freq_rejections)
218
+
219
+ # Step 2: Memory feasibility
220
+ mem_warnings, mem_rejections, macros = self._check_memory(all_submodules)
221
+ warnings.extend(mem_warnings)
222
+ rejections.extend(mem_rejections)
223
+
224
+ # Step 3: Arithmetic feasibility
225
+ arith_warnings = self._check_arithmetic(
226
+ all_submodules, all_contracts, design_desc
227
+ )
228
+ warnings.extend(arith_warnings)
229
+
230
+ # Step 4: Area estimation
231
+ total_ge, area_breakdown = self._estimate_area(all_submodules)
232
+ area_warnings = self._check_area_budget(total_ge)
233
+ warnings.extend(area_warnings)
234
+
235
+ # Step 5: Sky130-specific rules
236
+ sky_warnings, sky_rejections = self._check_sky130_rules(
237
+ all_ports, all_submodules, all_contracts, design_desc, hw_spec_dict
238
+ )
239
+ warnings.extend(sky_warnings)
240
+ rejections.extend(sky_rejections)
241
+
242
+ # Determine floorplan recommendation
243
+ floorplan = self._recommend_floorplan(total_ge)
244
+
245
+ # Determine overall status
246
+ if rejections:
247
+ status = "REJECT"
248
+ elif warnings:
249
+ status = "WARN"
250
+ else:
251
+ status = "PASS"
252
+
253
+ return FeasibilityResult(
254
+ feasibility_status=status,
255
+ estimated_gate_equivalents=total_ge,
256
+ recommended_floorplan_size_um=floorplan,
257
+ target_frequency_mhz=target_freq,
258
+ memory_macros_required=macros,
259
+ feasibility_warnings=warnings,
260
+ feasibility_rejections=rejections,
261
+ area_breakdown=area_breakdown,
262
+ )
263
+
264
+ # ── Step 1: Frequency Feasibility ──────────���─────────────────────
265
+
266
+ def _check_frequency(
267
+ self,
268
+ target_mhz: int,
269
+ submodules: List[Dict[str, Any]],
270
+ design_category: str,
271
+ ) -> Tuple[List[str], List[str]]:
272
+ warnings: List[str] = []
273
+ rejections: List[str] = []
274
+
275
+ if target_mhz > 200:
276
+ rejections.append(
277
+ f"FEASIBILITY_REJECTED: Sky130 cannot reliably achieve "
278
+ f"{target_mhz} MHz for synthesized digital logic in OpenLane. "
279
+ f"Recommend redesigning with target_frequency_mhz <= 100."
280
+ )
281
+ return warnings, rejections
282
+
283
+ if target_mhz > 150:
284
+ warnings.append(
285
+ f"HIGH_RISK: Target frequency {target_mhz} MHz is at the upper "
286
+ f"limit of Sky130. Only feasible for highly pipelined datapaths "
287
+ f"with no combinational paths longer than 3 logic levels."
288
+ )
289
+ # Flag submodules with likely deep logic
290
+ for sm in submodules:
291
+ combined = f"{sm.get('name', '')} {sm.get('description', '')}".lower()
292
+ if any(kw in combined for kw in [
293
+ "alu", "multiplier", "divider", "decode", "arbiter",
294
+ "cache", "branch_predict",
295
+ ]):
296
+ warnings.append(
297
+ f"HIGH_RISK: Submodule '{sm.get('name')}' likely has deep "
298
+ f"combinational paths incompatible with {target_mhz} MHz."
299
+ )
300
+
301
+ elif target_mhz > 100:
302
+ warnings.append(
303
+ f"MARGINAL: Target frequency {target_mhz} MHz requires careful "
304
+ f"constraint tuning in OpenLane. Critical path budget: "
305
+ f"{1000.0 / target_mhz:.1f} ns."
306
+ )
307
+ # Flag submodules whose critical path likely > 6ns
308
+ for sm in submodules:
309
+ combined = f"{sm.get('name', '')} {sm.get('description', '')}".lower()
310
+ deep_logic_keywords = [
311
+ "multiplier", "multiply", "divider", "divide",
312
+ "alu", "decode", "cache", "arbiter", "out-of-order",
313
+ "branch_predict",
314
+ ]
315
+ if any(kw in combined for kw in deep_logic_keywords):
316
+ warnings.append(
317
+ f"MARGINAL: Submodule '{sm.get('name')}' critical path "
318
+ f"likely exceeds 6 ns ({1000.0 / target_mhz:.1f} ns budget)."
319
+ )
320
+
321
+ elif target_mhz > 50:
322
+ # Check for designs with known timing pressure at 51-100 MHz
323
+ for sm in submodules:
324
+ combined = f"{sm.get('name', '')} {sm.get('description', '')}".lower()
325
+ has_wide_mult = (
326
+ ("multiplier" in combined or "multiply" in combined)
327
+ and self._extract_bit_width(combined) > 8
328
+ )
329
+ has_deep_pipeline = (
330
+ "pipeline" in combined
331
+ and self._count_pipeline_stages(combined) > 4
332
+ )
333
+ has_deep_logic = any(
334
+ kw in combined
335
+ for kw in ["deep logic", "long combinational", "barrel"]
336
+ )
337
+ if has_wide_mult:
338
+ warnings.append(
339
+ f"TIMING_WARN: Submodule '{sm.get('name')}' contains a "
340
+ f"multiplier wider than 8 bits at {target_mhz} MHz."
341
+ )
342
+ if has_deep_pipeline:
343
+ warnings.append(
344
+ f"TIMING_WARN: Submodule '{sm.get('name')}' has more than "
345
+ f"4 pipeline stages at {target_mhz} MHz."
346
+ )
347
+ if has_deep_logic:
348
+ warnings.append(
349
+ f"TIMING_WARN: Submodule '{sm.get('name')}' has deep logic "
350
+ f"cones at {target_mhz} MHz."
351
+ )
352
+
353
+ # 0-50 MHz: FEASIBLE for any complexity — no warnings needed
354
+
355
+ return warnings, rejections
356
+
357
+ # ── Step 2: Memory Feasibility ───────────────────────────────────
358
+
359
+ def _check_memory(
360
+ self, submodules: List[Dict[str, Any]]
361
+ ) -> Tuple[List[str], List[str], List[MacroRequirement]]:
362
+ warnings: List[str] = []
363
+ rejections: List[str] = []
364
+ macros: List[MacroRequirement] = []
365
+
366
+ for sm in submodules:
367
+ name = sm.get("name", "unknown")
368
+ combined = f"{name} {sm.get('description', '')}".lower()
369
+
370
+ # Skip non-memory submodules
371
+ mem_keywords = [
372
+ "memory", "ram", "sram", "rom", "fifo", "cache",
373
+ "register_file", "regfile", "register file", "buffer",
374
+ "stack", "queue",
375
+ ]
376
+ if not any(kw in combined for kw in mem_keywords):
377
+ continue
378
+
379
+ # Try to extract width × depth
380
+ width, depth = self._extract_memory_dimensions(combined)
381
+ if width == 0 or depth == 0:
382
+ # Try to infer from port widths
383
+ ports = sm.get("ports", [])
384
+ width, depth = self._infer_memory_from_ports(ports)
385
+
386
+ if width == 0 or depth == 0:
387
+ continue
388
+
389
+ size_bits = width * depth
390
+
391
+ if size_bits > 16384: # > 2KB
392
+ rejections.append(
393
+ f"MEMORY_WARNING: '{name}' requires {size_bits} bits "
394
+ f"({size_bits // 8} bytes) of storage. This must be "
395
+ f"implemented as an OpenRAM macro, not synthesized registers. "
396
+ f"(width={width}, depth={depth})"
397
+ )
398
+ # Infer port counts from description
399
+ rports = 1
400
+ wports = 1
401
+ if "dual" in combined or "2-port" in combined:
402
+ rports = 2
403
+ if "dual write" in combined or "2 write" in combined:
404
+ wports = 2
405
+ macros.append(MacroRequirement(
406
+ submodule_name=name,
407
+ width_bits=width,
408
+ depth_words=depth,
409
+ read_ports=rports,
410
+ write_ports=wports,
411
+ size_bits=size_bits,
412
+ ))
413
+
414
+ elif size_bits > 2048: # 256B–2KB
415
+ ge_estimate = (width * depth * 6) # rough: each bit ≈ 6 GE
416
+ warnings.append(
417
+ f"MEMORY_WARN: '{name}' requires {size_bits} bits "
418
+ f"({size_bits // 8} bytes). Will synthesize as registers but "
419
+ f"consumes ~{ge_estimate} gate equivalents. "
420
+ f"(width={width}, depth={depth})"
421
+ )
422
+ # Below 2048 bits: FEASIBLE, no action
423
+
424
+ return warnings, rejections, macros
425
+
426
+ # ── Step 3: Arithmetic Feasibility ───────────────────────────────
427
+
428
+ def _check_arithmetic(
429
+ self,
430
+ submodules: List[Dict[str, Any]],
431
+ contracts: List[Dict[str, Any]],
432
+ design_desc: str,
433
+ ) -> List[str]:
434
+ warnings: List[str] = []
435
+ combined_text = design_desc.lower()
436
+
437
+ # Collect all text: submodule descriptions + behavioral contracts
438
+ for sm in submodules:
439
+ combined_text += f" {sm.get('name', '')} {sm.get('description', '')}".lower()
440
+ for c in contracts:
441
+ combined_text += f" {c.get('given', '')} {c.get('when', '')} {c.get('then', '')}".lower()
442
+
443
+ # Check for multiplication
444
+ mult_patterns = [
445
+ (r"(\d+)\s*[x×]\s*(\d+)\s*(?:bit|-)?\s*mult", "explicit multiplier"),
446
+ (r"(\d+)\s*-?\s*bit\s+mult", "bit-width multiplier"),
447
+ (r"mult\w*\s+(\d+)\s*(?:bit|-bit)", "multiplier width"),
448
+ ]
449
+ found_mult = False
450
+ for pat, desc in mult_patterns:
451
+ m = re.search(pat, combined_text, re.IGNORECASE)
452
+ if m:
453
+ found_mult = True
454
+ groups = m.groups()
455
+ try:
456
+ if len(groups) == 2:
457
+ w1, w2 = int(groups[0]), int(groups[1])
458
+ else:
459
+ w1 = w2 = int(groups[0])
460
+ except (ValueError, TypeError):
461
+ w1, w2 = 0, 0
462
+
463
+ if w1 > 16 or w2 > 16:
464
+ warnings.append(
465
+ f"ARITHMETIC_WARN: {w1}×{w2}-bit multiplier is expensive on "
466
+ f"Sky130 (~{w1 * w2 * 4} GE, no DSP blocks). Consider "
467
+ f"pipelining or shift-and-add over multiple cycles."
468
+ )
469
+ elif w1 > 8 or w2 > 8:
470
+ warnings.append(
471
+ f"ARITHMETIC_WARN: {w1}×{w2}-bit multiplier will consume "
472
+ f"~1000 GE on Sky130 and may impact timing."
473
+ )
474
+ # ≤ 8×8: feasible (~200 GE)
475
+
476
+ # Check for multiplier keywords even without explicit dimensions
477
+ if not found_mult:
478
+ for sm in submodules:
479
+ combined = f"{sm.get('name', '')} {sm.get('description', '')}".lower()
480
+ if "multiplier" in combined or "multiply" in combined or "mac" in combined:
481
+ width = self._extract_bit_width(combined)
482
+ if width > 16:
483
+ warnings.append(
484
+ f"ARITHMETIC_WARN: Submodule '{sm.get('name')}' contains "
485
+ f"multiplication ({width}-bit). Very expensive on Sky130. "
486
+ f"Consider pipelining."
487
+ )
488
+ elif width > 8:
489
+ warnings.append(
490
+ f"ARITHMETIC_WARN: Submodule '{sm.get('name')}' contains "
491
+ f"multiplication ({width}-bit). ~1000 GE, may impact timing."
492
+ )
493
+
494
+ # Check for division
495
+ if "divider" in combined_text or "divide" in combined_text or "division" in combined_text:
496
+ warnings.append(
497
+ "ARITHMETIC_WARN: Division is extremely expensive on Sky130 "
498
+ "(no hardware divider). Flag for manual review. Consider "
499
+ "iterative shift-subtract implementation."
500
+ )
501
+
502
+ # Check for floating point
503
+ if "float" in combined_text or "fpu" in combined_text or "ieee 754" in combined_text:
504
+ warnings.append(
505
+ "ARITHMETIC_WARN: Floating-point operations are extremely expensive "
506
+ "on Sky130. A minimal FPU can consume >5000 GE. Flag for manual review."
507
+ )
508
+
509
+ return warnings
510
+
511
+ # ── Step 4: Area Estimation ──────────────────────────────────────
512
+
513
+ def _estimate_area(
514
+ self, submodules: List[Dict[str, Any]]
515
+ ) -> Tuple[int, Dict[str, int]]:
516
+ total_ge = 0
517
+ breakdown: Dict[str, int] = {}
518
+
519
+ for sm in submodules:
520
+ name = sm.get("name", "unknown")
521
+ combined = f"{name} {sm.get('description', '')}".lower()
522
+
523
+ ge = self._estimate_submodule_ge(combined, sm)
524
+ breakdown[name] = ge
525
+ total_ge += ge
526
+
527
+ # Add overhead for top-level IO pads, clock tree, etc. (~5%)
528
+ overhead = max(100, int(total_ge * 0.05))
529
+ breakdown["_interconnect_overhead"] = overhead
530
+ total_ge += overhead
531
+
532
+ return total_ge, breakdown
533
+
534
+ def _estimate_submodule_ge(
535
+ self, combined_text: str, sm: Dict[str, Any]
536
+ ) -> int:
537
+ """Estimate gate equivalents for a single submodule."""
538
+ best_ge = 0
539
+ matched = False
540
+
541
+ for keywords, base_ge, _desc in _GE_KEYWORD_MAP:
542
+ for kw in keywords:
543
+ if kw in combined_text:
544
+ # Scale by apparent data width if detectable
545
+ width = self._extract_bit_width(combined_text)
546
+ if width > 0 and kw in (
547
+ "adder", "counter", "comparator", "shift_register",
548
+ "barrel_shifter", "register",
549
+ ):
550
+ scaled = int(base_ge * (width / 32.0)) if width != 32 else base_ge
551
+ best_ge = max(best_ge, max(scaled, base_ge // 4))
552
+ else:
553
+ best_ge = max(best_ge, base_ge)
554
+ matched = True
555
+
556
+ if not matched:
557
+ # Fallback: estimate from port count
558
+ port_count = len(sm.get("ports", []))
559
+ best_ge = max(50, port_count * 20)
560
+
561
+ return best_ge
562
+
563
+ def _check_area_budget(self, total_ge: int) -> List[str]:
564
+ warnings: List[str] = []
565
+
566
+ if total_ge > 200_000:
567
+ warnings.append(
568
+ f"AREA_WARN: Estimated {total_ge} GE exceeds the comfortable "
569
+ f"OpenLane limit. OpenLane may time out or fail placement. "
570
+ f"Consider splitting into multiple tiles or simplifying."
571
+ )
572
+ elif total_ge > 50_000:
573
+ warnings.append(
574
+ f"AREA_INFO: Large design ({total_ge} GE). OpenLane run will "
575
+ f"take 30–60 minutes. Ensure adequate compute resources."
576
+ )
577
+
578
+ return warnings
579
+
580
+ def _recommend_floorplan(self, total_ge: int) -> str:
581
+ for threshold, description, _size in FLOORPLAN_TIERS:
582
+ if total_ge <= threshold:
583
+ return description
584
+ return f"Very large design ({total_ge} GE) — manual floorplan required"
585
+
586
+ # ── Step 5: Sky130-Specific Rules ────────────────────────────────
587
+
588
+ def _check_sky130_rules(
589
+ self,
590
+ top_ports: List[Dict[str, Any]],
591
+ submodules: List[Dict[str, Any]],
592
+ contracts: List[Dict[str, Any]],
593
+ design_desc: str,
594
+ spec: Dict[str, Any],
595
+ ) -> Tuple[List[str], List[str]]:
596
+ warnings: List[str] = []
597
+ rejections: List[str] = []
598
+
599
+ combined_text = design_desc.lower()
600
+ for sm in submodules:
601
+ combined_text += f" {sm.get('name', '')} {sm.get('description', '')}".lower()
602
+ for c in contracts:
603
+ combined_text += f" {c.get('given', '')} {c.get('when', '')} {c.get('then', '')}".lower()
604
+
605
+ # Rule 1: Internal tri-state buses
606
+ top_level_port_names = {p.get("name", "") for p in top_ports}
607
+ for sm in submodules:
608
+ for p in sm.get("ports", []):
609
+ if p.get("direction", "") == "inout":
610
+ pname = p.get("name", "")
611
+ if pname not in top_level_port_names:
612
+ rejections.append(
613
+ f"FEASIBILITY_REJECTED: Internal tri-state port "
614
+ f"'{pname}' in submodule '{sm.get('name')}'. Sky130 "
615
+ f"synthesized logic cannot use internal tri-states. "
616
+ f"Replace with mux/demux logic."
617
+ )
618
+
619
+ # Rule 2: Async reset with > 2 clock domains
620
+ clock_domain_keywords = [
621
+ "clock domain", "clk_domain", "cdc", "multi-clock",
622
+ "clock crossing", "dual clock",
623
+ ]
624
+ has_multi_clock = any(kw in combined_text for kw in clock_domain_keywords)
625
+
626
+ async_reset_keywords = ["async", "asynchronous reset", "async_reset"]
627
+ has_async_reset = any(kw in combined_text for kw in async_reset_keywords)
628
+
629
+ # Count distinct clock-like ports
630
+ clock_ports: set = set()
631
+ for p in top_ports:
632
+ pname = p.get("name", "").lower()
633
+ if "clk" in pname or "clock" in pname:
634
+ clock_ports.add(pname)
635
+ for sm in submodules:
636
+ for p in sm.get("ports", []):
637
+ pname = p.get("name", "").lower()
638
+ if "clk" in pname or "clock" in pname:
639
+ clock_ports.add(pname)
640
+
641
+ if has_async_reset and (has_multi_clock or len(clock_ports) > 2):
642
+ warnings.append(
643
+ "SKY130_WARN: Asynchronous reset with more than 2 clock domains "
644
+ "detected. Cross-domain async reset de-assertion needs "
645
+ "synchronizers. Add reset synchronizer module per domain."
646
+ )
647
+
648
+ # Rule 3: PLL or analog blocks
649
+ analog_keywords = [
650
+ "pll", "phase-locked loop", "dac", "adc", "analog",
651
+ "voltage reference", "bandgap", "ldo", "oscillator",
652
+ ]
653
+ for kw in analog_keywords:
654
+ if kw in combined_text:
655
+ rejections.append(
656
+ f"FEASIBILITY_REJECTED: '{kw.upper()}' is analog and cannot "
657
+ f"be automated through OpenLane. Requires manual custom layout."
658
+ )
659
+
660
+ # Rule 4: Negative-edge triggered flip-flops
661
+ negedge_keywords = [
662
+ "negedge", "negative edge", "falling edge triggered",
663
+ "neg-edge", "negative-edge",
664
+ ]
665
+ for kw in negedge_keywords:
666
+ if kw in combined_text:
667
+ warnings.append(
668
+ "SKY130_WARN: Negative-edge triggered flip-flops detected. "
669
+ "Sky130 standard cell library has limited negedge cells. "
670
+ "Prefer posedge-triggered always_ff."
671
+ )
672
+ break
673
+
674
+ # Rule 5: Latches
675
+ latch_keywords = ["latch", "level-sensitive", "transparent latch"]
676
+ for kw in latch_keywords:
677
+ if kw in combined_text:
678
+ warnings.append(
679
+ "SKY130_WARN: Latch-based storage detected. OpenLane synthesis "
680
+ "may not handle latch inference correctly. Prefer always_ff "
681
+ "with flip-flops."
682
+ )
683
+ break
684
+
685
+ return warnings, rejections
686
+
687
+ # ── Utility: Collect All Submodules ──────────────────────────────
688
+
689
+ def _collect_all_submodules(
690
+ self,
691
+ hw_spec_dict: Dict[str, Any],
692
+ hierarchy_result_dict: Optional[Dict[str, Any]] = None,
693
+ ) -> List[Dict[str, Any]]:
694
+ """Flatten all submodules including nested specs from hierarchy."""
695
+ all_subs: List[Dict[str, Any]] = []
696
+
697
+ # Top-level submodules from hw_spec
698
+ for sm in hw_spec_dict.get("submodules", []):
699
+ all_subs.append(sm)
700
+
701
+ # If hierarchy result exists, also scan expanded nested specs
702
+ if hierarchy_result_dict:
703
+ for sm in hierarchy_result_dict.get("submodules", []):
704
+ # Don't re-add duplicates already in hw_spec
705
+ nested = sm.get("nested_spec")
706
+ if nested and isinstance(nested, dict):
707
+ self._collect_nested_subs(nested, all_subs)
708
+
709
+ return all_subs
710
+
711
+ def _collect_nested_subs(
712
+ self, spec_dict: Dict[str, Any], out: List[Dict[str, Any]]
713
+ ) -> None:
714
+ """Recursively collect submodules from nested specs."""
715
+ for sm in spec_dict.get("submodules", []):
716
+ out.append(sm)
717
+ nested = sm.get("nested_spec")
718
+ if nested and isinstance(nested, dict):
719
+ self._collect_nested_subs(nested, out)
720
+
721
+ # ── Utility: Extract Dimensions ──────────────────────────────────
722
+
723
+ def _extract_bit_width(self, text: str) -> int:
724
+ """Extract the most likely data width (in bits) from text."""
725
+ patterns = [
726
+ r"(\d+)\s*-?\s*bit",
727
+ r"data_width\s*[=:]\s*(\d+)",
728
+ r"width\s*[=:]\s*(\d+)",
729
+ r"\[(\d+):0\]",
730
+ ]
731
+ best = 0
732
+ for pat in patterns:
733
+ for m in re.finditer(pat, text, re.IGNORECASE):
734
+ try:
735
+ val = int(m.group(1))
736
+ if pat == r"\[(\d+):0\]":
737
+ val += 1 # [N:0] means N+1 bits
738
+ best = max(best, val)
739
+ except (ValueError, IndexError):
740
+ pass
741
+ return best
742
+
743
+ def _extract_memory_dimensions(
744
+ self, text: str
745
+ ) -> Tuple[int, int]:
746
+ """Extract width × depth from memory description text."""
747
+ # Patterns: "32x1024", "32-bit × 256-deep", "width 32 depth 256"
748
+ patterns = [
749
+ r"(\d+)\s*[x×]\s*(\d+)",
750
+ r"width\s*[=:]\s*(\d+).*?depth\s*[=:]\s*(\d+)",
751
+ r"(\d+)\s*-?\s*bit\s*.*?(\d+)\s*-?\s*(?:deep|entries|words|locations)",
752
+ ]
753
+ for pat in patterns:
754
+ m = re.search(pat, text, re.IGNORECASE)
755
+ if m:
756
+ try:
757
+ a, b = int(m.group(1)), int(m.group(2))
758
+ # Convention: smaller number is width, larger is depth
759
+ width = min(a, b)
760
+ depth = max(a, b)
761
+ # But if first number looks like a width (8,16,32,64,128)
762
+ if a in (8, 16, 32, 64, 128, 256):
763
+ width, depth = a, b
764
+ return width, depth
765
+ except (ValueError, IndexError):
766
+ pass
767
+
768
+ # Try single dimension: "1024-bit memory" → assume 8-bit wide × 128 deep
769
+ m = re.search(r"(\d+)\s*-?\s*bit\s+(?:memory|ram|sram|rom)", text, re.IGNORECASE)
770
+ if m:
771
+ total = int(m.group(1))
772
+ if total > 256:
773
+ # Assume 8-bit width
774
+ return 8, total // 8
775
+
776
+ return 0, 0
777
+
778
+ def _infer_memory_from_ports(
779
+ self, ports: List[Dict[str, Any]]
780
+ ) -> Tuple[int, int]:
781
+ """Infer memory width/depth from port data types."""
782
+ data_width = 0
783
+ addr_width = 0
784
+
785
+ for p in ports:
786
+ pname = p.get("name", "").lower()
787
+ dtype = p.get("data_type", "")
788
+
789
+ # Extract bus width from data_type like "logic [31:0]"
790
+ m = re.search(r"\[(\d+):0\]", dtype)
791
+ bus_width = (int(m.group(1)) + 1) if m else 1
792
+
793
+ if any(kw in pname for kw in ["data", "din", "dout", "q", "rdata", "wdata"]):
794
+ data_width = max(data_width, bus_width)
795
+ if any(kw in pname for kw in ["addr", "address"]):
796
+ addr_width = max(addr_width, bus_width)
797
+
798
+ if data_width > 0 and addr_width > 0:
799
+ depth = 2 ** addr_width
800
+ return data_width, depth
801
+
802
+ return 0, 0
803
+
804
+ def _count_pipeline_stages(self, text: str) -> int:
805
+ """Try to extract pipeline stage count from text."""
806
+ m = re.search(r"(\d+)\s*-?\s*stage", text, re.IGNORECASE)
807
+ if m:
808
+ return int(m.group(1))
809
+ return 0
810
+
811
+ # ── Enrichment for Downstream Stages ─────────────────────────────
812
+
813
+ def to_feasibility_enrichment(
814
+ self, result: FeasibilityResult
815
+ ) -> Dict[str, Any]:
816
+ """Convert FeasibilityResult to enrichment dict for the spec artifact."""
817
+ return {
818
+ "feasibility_status": result.feasibility_status,
819
+ "estimated_gate_equivalents": result.estimated_gate_equivalents,
820
+ "recommended_floorplan": result.recommended_floorplan_size_um,
821
+ "target_frequency_mhz": result.target_frequency_mhz,
822
+ "memory_macros": [m.to_dict() for m in result.memory_macros_required],
823
+ "warnings_count": len(result.feasibility_warnings),
824
+ "rejections_count": len(result.feasibility_rejections),
825
+ "area_breakdown": result.area_breakdown,
826
+ }
src/agentic/core/hierarchy_expander.py ADDED
@@ -0,0 +1,812 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Hierarchy Expander — Phase 2 of the Spec Pipeline
3
+ ==================================================
4
+
5
+ Receives a structured hardware specification JSON from the SpecElaborator
6
+ (SPEC_VALIDATE) stage and evaluates whether any sub-module is too complex
7
+ to be implemented directly. Complex sub-modules are recursively expanded
8
+ with their own full specification before RTL generation begins.
9
+
10
+ Pipeline Steps:
11
+ 1. EVALUATE — Score every submodule against complexity triggers
12
+ 2. EXPAND — Generate nested specs for complex submodules (max depth 3)
13
+ 3. CONSISTENCY — Verify interface connectivity across the full hierarchy
14
+ 4. OUTPUT — Emit expanded JSON with hierarchy metadata
15
+ """
16
+
17
+ import json
18
+ import logging
19
+ import re
20
+ from copy import deepcopy
21
+ from dataclasses import asdict, dataclass, field
22
+ from typing import Any, Dict, List, Optional, Set, Tuple
23
+
24
+ from crewai import Agent, Crew, LLM, Task
25
+
26
+ from .spec_generator import (
27
+ BehavioralStatement,
28
+ HardwareSpec,
29
+ PortSpec,
30
+ SubModuleSpec,
31
+ )
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ # ─── Complexity Trigger Keywords ─────────────────────────────────────
37
+
38
+ COMPLEXITY_KEYWORDS: List[str] = [
39
+ "pipeline",
40
+ "arbitration",
41
+ "arbiter",
42
+ "protocol handling",
43
+ "protocol handler",
44
+ "state machine",
45
+ "cache",
46
+ "prefetch",
47
+ "out-of-order",
48
+ "branch prediction",
49
+ "dma",
50
+ "interrupt handling",
51
+ "interrupt controller",
52
+ "bus fabric",
53
+ ]
54
+
55
+ # Patterns that signal > 4 state FSM
56
+ _FSM_MANY_STATES = re.compile(
57
+ r"state\s*machine\s+(?:with\s+)?(?:more\s+than\s+)?(\d+)\s+states?",
58
+ re.IGNORECASE,
59
+ )
60
+
61
+ # ─── Simple-submodule patterns (no expansion needed) ─────────────────
62
+
63
+ SIMPLE_PATTERNS: List[str] = [
64
+ "register",
65
+ "flip-flop",
66
+ "flip flop",
67
+ "flipflop",
68
+ "ff bank",
69
+ "latch",
70
+ "mux",
71
+ "multiplexer",
72
+ "adder",
73
+ "comparator",
74
+ "pipeline register",
75
+ "pipe register",
76
+ "combinational",
77
+ ]
78
+
79
+
80
+ # ─── Category Map (for cross-category detection) ────────────────────
81
+
82
+ CATEGORY_KEYWORDS: Dict[str, List[str]] = {
83
+ "PROCESSOR": ["cpu", "processor", "risc", "riscv", "rv32", "rv64",
84
+ "microcontroller", "instruction", "isa", "fetch",
85
+ "decode", "execute", "pipeline"],
86
+ "MEMORY": ["fifo", "sram", "ram", "rom", "cache", "register file",
87
+ "memory", "stack", "queue", "buffer"],
88
+ "INTERFACE": ["uart", "spi", "i2c", "apb", "axi", "wishbone", "usb",
89
+ "serial", "baud", "mosi", "miso", "sclk"],
90
+ "ARITHMETIC": ["alu", "multiplier", "divider", "adder", "mac", "fpu",
91
+ "floating point", "multiply", "accumulate"],
92
+ "CONTROL": ["state machine", "fsm", "arbiter", "scheduler",
93
+ "interrupt", "controller", "priority"],
94
+ "DATAPATH": ["shift register", "barrel shifter", "pipeline stage",
95
+ "datapath", "mux", "demux"],
96
+ }
97
+
98
+
99
+ # ─── Expanded Submodule Dataclass ────────────────────────────────────
100
+
101
+ @dataclass
102
+ class ExpandedSubModule:
103
+ """A submodule that may contain a full nested specification."""
104
+ name: str
105
+ description: str = ""
106
+ ports: List[PortSpec] = field(default_factory=list)
107
+ requires_expansion: bool = False
108
+ nested_spec: Optional[Dict[str, Any]] = None # Full spec dict if expanded
109
+
110
+ def to_dict(self) -> Dict[str, Any]:
111
+ d: Dict[str, Any] = {
112
+ "name": self.name,
113
+ "description": self.description,
114
+ "ports": [p.to_dict() for p in self.ports],
115
+ "requires_expansion": self.requires_expansion,
116
+ "nested_spec": self.nested_spec,
117
+ }
118
+ return d
119
+
120
+
121
+ @dataclass
122
+ class HierarchyResult:
123
+ """Output of the HierarchyExpander."""
124
+ design_category: str
125
+ top_module_name: str
126
+ ports: List[PortSpec] = field(default_factory=list)
127
+ submodules: List[ExpandedSubModule] = field(default_factory=list)
128
+ behavioral_contract: List[BehavioralStatement] = field(default_factory=list)
129
+ hierarchy_depth: int = 1
130
+ expansion_count: int = 0
131
+ warnings: List[str] = field(default_factory=list)
132
+
133
+ def to_dict(self) -> Dict[str, Any]:
134
+ return {
135
+ "design_category": self.design_category,
136
+ "top_module_name": self.top_module_name,
137
+ "ports": [p.to_dict() for p in self.ports],
138
+ "submodules": [s.to_dict() for s in self.submodules],
139
+ "behavioral_contract": [b.to_dict() for b in self.behavioral_contract],
140
+ "hierarchy_depth": self.hierarchy_depth,
141
+ "expansion_count": self.expansion_count,
142
+ "warnings": list(self.warnings),
143
+ }
144
+
145
+ def to_json(self) -> str:
146
+ return json.dumps(self.to_dict(), indent=2)
147
+
148
+
149
+ # ──��� LLM Prompt for Nested Spec Generation ──────────────────────────
150
+
151
+ EXPAND_SUBMODULE_PROMPT = """\
152
+ You are a senior VLSI architect. A parent module named '{parent_name}' (category: {parent_category}) \
153
+ contains a submodule '{sub_name}' that is too complex for direct implementation.
154
+
155
+ Submodule description: {sub_description}
156
+ Parent-facing ports of this submodule:
157
+ {parent_ports_json}
158
+
159
+ Generate a COMPLETE nested specification for this submodule. It must include:
160
+
161
+ 1. Its own ports — these MUST be consistent with the parent ports listed above. \
162
+ Do NOT add, remove, or rename any port that the parent connects to. \
163
+ You MAY add internal-only ports (e.g. sub-sub-module interfaces).
164
+
165
+ 2. Its own submodules — decompose it into simpler blocks. Apply the same rules:
166
+ - Max 8 submodules
167
+ - Each must have: name (snake_case), one-sentence description, port list
168
+ - Do NOT expand further here — we handle recursion externally
169
+
170
+ 3. A behavioral contract — minimum 3 GIVEN/WHEN/THEN/WITHIN statements:
171
+ - 1 reset behavior
172
+ - 1 main functional operation
173
+ - 1 edge case
174
+
175
+ 4. Warnings — list every assumption
176
+
177
+ Return ONLY this JSON (no markdown, no commentary):
178
+ {{
179
+ "design_category": "{sub_category}",
180
+ "top_module_name": "{sub_name}",
181
+ "ports": [
182
+ {{"name": "<name>", "direction": "input|output|inout", "data_type": "logic|logic [N:0]", "description": "<purpose>"}}
183
+ ],
184
+ "submodules": [
185
+ {{
186
+ "name": "<snake_case>",
187
+ "description": "<one sentence>",
188
+ "ports": [
189
+ {{"name": "<name>", "direction": "input|output", "data_type": "logic|logic [N:0]", "description": "<purpose>"}}
190
+ ]
191
+ }}
192
+ ],
193
+ "behavioral_contract": [
194
+ {{"given": "<precondition>", "when": "<trigger>", "then": "<expected>", "within": "<latency>"}}
195
+ ],
196
+ "warnings": ["<assumption>"]
197
+ }}
198
+ """
199
+
200
+
201
+ # ─── Main Class ──────────────────────────────────────────────────────
202
+
203
+ class HierarchyExpander:
204
+ """
205
+ Evaluates sub-modules from a HardwareSpec for complexity, recursively
206
+ expands complex ones into nested specifications, then verifies interface
207
+ consistency across the full hierarchy.
208
+
209
+ Maximum recursion depth: 3 levels.
210
+ """
211
+
212
+ MAX_DEPTH = 3
213
+ MAX_PORTS_SIMPLE = 8
214
+
215
+ def __init__(self, llm: LLM, verbose: bool = False, max_retries: int = 2):
216
+ self.llm = llm
217
+ self.verbose = verbose
218
+ self.max_retries = max_retries
219
+
220
+ # ── Public API ───────────────────────────────────────────────────
221
+
222
+ def expand(self, hw_spec: HardwareSpec) -> HierarchyResult:
223
+ """
224
+ Main entry point.
225
+
226
+ Args:
227
+ hw_spec: Validated HardwareSpec from SpecElaborator / SPEC_VALIDATE.
228
+
229
+ Returns:
230
+ HierarchyResult with expanded submodules, depth, and warnings.
231
+ """
232
+ warnings: List[str] = list(hw_spec.warnings)
233
+ parent_category = hw_spec.design_category
234
+ parent_name = hw_spec.top_module_name
235
+
236
+ # Step 1+2: Evaluate and expand each submodule
237
+ expanded_subs: List[ExpandedSubModule] = []
238
+ total_expansions = 0
239
+ max_depth = 1
240
+
241
+ for sm in hw_spec.submodules:
242
+ needs = self._needs_expansion(sm, parent_category)
243
+ esm = ExpandedSubModule(
244
+ name=sm.name,
245
+ description=sm.description,
246
+ ports=[PortSpec(**asdict(p)) for p in sm.ports],
247
+ requires_expansion=needs,
248
+ nested_spec=None,
249
+ )
250
+
251
+ if needs:
252
+ nested, depth, sub_warnings, sub_expansions = self._expand_recursive(
253
+ sub_name=sm.name,
254
+ sub_description=sm.description,
255
+ sub_ports=sm.ports,
256
+ parent_name=parent_name,
257
+ parent_category=parent_category,
258
+ current_depth=1,
259
+ )
260
+ esm.nested_spec = nested
261
+ total_expansions += 1 + sub_expansions
262
+ max_depth = max(max_depth, depth + 1)
263
+ warnings.extend(sub_warnings)
264
+
265
+ expanded_subs.append(esm)
266
+
267
+ # Depth limit warning
268
+ if max_depth > self.MAX_DEPTH:
269
+ warnings.insert(
270
+ 0,
271
+ "HIERARCHY_WARNING: Design complexity exceeds safe automation "
272
+ "depth. Manual architecture review recommended before RTL generation.",
273
+ )
274
+
275
+ # Step 3: Consistency check
276
+ consistency_fixes = self._consistency_check(
277
+ top_ports=hw_spec.ports,
278
+ submodules=expanded_subs,
279
+ )
280
+ warnings.extend(consistency_fixes)
281
+
282
+ return HierarchyResult(
283
+ design_category=parent_category,
284
+ top_module_name=parent_name,
285
+ ports=[PortSpec(**asdict(p)) for p in hw_spec.ports],
286
+ submodules=expanded_subs,
287
+ behavioral_contract=[
288
+ BehavioralStatement(**asdict(b)) for b in hw_spec.behavioral_contract
289
+ ],
290
+ hierarchy_depth=max_depth,
291
+ expansion_count=total_expansions,
292
+ warnings=warnings,
293
+ )
294
+
295
+ # ── Step 1: Complexity Evaluation ────────────────────────────────
296
+
297
+ def _needs_expansion(self, sub: SubModuleSpec, parent_category: str) -> bool:
298
+ """Evaluate whether a submodule requires recursive expansion."""
299
+ desc = (sub.description or "").lower()
300
+ name_lower = (sub.name or "").lower()
301
+ combined = f"{name_lower} {desc}"
302
+
303
+ # ── Check simplicity first (quick exit) ──
304
+ if self._is_simple(combined):
305
+ return False
306
+
307
+ # ── Trigger A: Complexity keywords ──
308
+ for kw in COMPLEXITY_KEYWORDS:
309
+ if kw in combined:
310
+ # Special case: "state machine with >4 states" check
311
+ if kw == "state machine":
312
+ m = _FSM_MANY_STATES.search(combined)
313
+ if m and int(m.group(1)) <= 4:
314
+ continue # Small FSM — no expansion
315
+ logger.debug(f"[HierarchyExpander] '{sub.name}' triggers on keyword: {kw}")
316
+ return True
317
+
318
+ # ── Trigger B: Port count > 8 ──
319
+ if len(sub.ports) > self.MAX_PORTS_SIMPLE:
320
+ logger.debug(
321
+ f"[HierarchyExpander] '{sub.name}' triggers on port count: "
322
+ f"{len(sub.ports)} > {self.MAX_PORTS_SIMPLE}"
323
+ )
324
+ return True
325
+
326
+ # ── Trigger C: Cross-category submodule ──
327
+ sub_cat = self._infer_category(combined)
328
+ if sub_cat and sub_cat != parent_category and sub_cat != "MIXED":
329
+ logger.debug(
330
+ f"[HierarchyExpander] '{sub.name}' triggers on cross-category: "
331
+ f"{sub_cat} inside {parent_category}"
332
+ )
333
+ return True
334
+
335
+ # ── Trigger D: Large memory (> 256 bits, not a simple register) ──
336
+ if self._has_large_memory(combined):
337
+ logger.debug(f"[HierarchyExpander] '{sub.name}' triggers on large memory")
338
+ return True
339
+
340
+ # ── Trigger E: Would take > 30 min to implement from description ──
341
+ # Heuristic: very short description + non-trivial name ⇒ ambiguous
342
+ if len(desc.split()) < 6 and not self._is_simple(combined):
343
+ # Only trigger if name suggests something non-trivial
344
+ non_trivial_names = [
345
+ "controller", "engine", "handler", "manager", "unit",
346
+ "core", "processor", "interface", "bridge", "fabric",
347
+ ]
348
+ if any(nt in name_lower for nt in non_trivial_names):
349
+ logger.debug(
350
+ f"[HierarchyExpander] '{sub.name}' triggers on ambiguous short description"
351
+ )
352
+ return True
353
+
354
+ return False
355
+
356
+ def _is_simple(self, combined_text: str) -> bool:
357
+ """Return True if the submodule is clearly simple (no expansion)."""
358
+ for pat in SIMPLE_PATTERNS:
359
+ if pat in combined_text:
360
+ # Make sure it isn't disqualified by a complexity keyword
361
+ has_complex = any(kw in combined_text for kw in COMPLEXITY_KEYWORDS)
362
+ if not has_complex:
363
+ return True
364
+
365
+ # "3 lines of Verilog" heuristic: very short description + trivial name
366
+ if len(combined_text.split()) <= 5:
367
+ return True
368
+
369
+ return False
370
+
371
+ def _infer_category(self, text: str) -> Optional[str]:
372
+ """Infer the design category of a submodule from its description."""
373
+ scores: Dict[str, int] = {cat: 0 for cat in CATEGORY_KEYWORDS}
374
+ for cat, keywords in CATEGORY_KEYWORDS.items():
375
+ for kw in keywords:
376
+ if kw in text:
377
+ scores[cat] += 1
378
+
379
+ best = max(scores, key=scores.get) # type: ignore[arg-type]
380
+ if scores[best] == 0:
381
+ return None
382
+ return best
383
+
384
+ def _has_large_memory(self, text: str) -> bool:
385
+ """Detect mentions of memory > 256 bits that isn't a simple register."""
386
+ # Look for patterns like "1024-bit", "512 bits", "1K memory", "4KB"
387
+ mem_patterns = [
388
+ r"(\d+)\s*-?\s*bits?\b",
389
+ r"(\d+)\s*x\s*(\d+)\s*(?:bit|memory|ram|sram)",
390
+ r"(\d+)\s*[kK][bB]?\b",
391
+ ]
392
+ for pat in mem_patterns:
393
+ m = re.search(pat, text, re.IGNORECASE)
394
+ if m:
395
+ groups = m.groups()
396
+ try:
397
+ if len(groups) == 2:
398
+ # AxB pattern
399
+ total = int(groups[0]) * int(groups[1])
400
+ elif "k" in (m.group(0) or "").lower():
401
+ total = int(groups[0]) * 1024
402
+ else:
403
+ total = int(groups[0])
404
+
405
+ if total > 256:
406
+ return True
407
+ except (ValueError, TypeError):
408
+ pass
409
+ return False
410
+
411
+ # ── Step 2: Recursive Expansion ──────────────────────────────────
412
+
413
+ def _expand_recursive(
414
+ self,
415
+ sub_name: str,
416
+ sub_description: str,
417
+ sub_ports: List[PortSpec],
418
+ parent_name: str,
419
+ parent_category: str,
420
+ current_depth: int,
421
+ ) -> Tuple[Optional[Dict[str, Any]], int, List[str], int]:
422
+ """
423
+ Recursively expand a complex submodule.
424
+
425
+ Returns:
426
+ (nested_spec_dict, depth_reached, warnings, expansion_count)
427
+ """
428
+ warnings: List[str] = []
429
+
430
+ if current_depth > self.MAX_DEPTH:
431
+ warnings.append(
432
+ "HIERARCHY_WARNING: Design complexity exceeds safe automation "
433
+ "depth. Manual architecture review recommended before RTL generation."
434
+ )
435
+ return None, current_depth, warnings, 0
436
+
437
+ # Infer sub-category
438
+ combined = f"{sub_name.lower()} {(sub_description or '').lower()}"
439
+ sub_category = self._infer_category(combined) or parent_category
440
+
441
+ # Generate nested spec via LLM
442
+ nested_spec = self._generate_nested_spec(
443
+ sub_name=sub_name,
444
+ sub_description=sub_description,
445
+ sub_ports=sub_ports,
446
+ sub_category=sub_category,
447
+ parent_name=parent_name,
448
+ parent_category=parent_category,
449
+ )
450
+
451
+ if nested_spec is None:
452
+ warnings.append(
453
+ f"Failed to generate nested spec for '{sub_name}' — "
454
+ "will be implemented as a flat module."
455
+ )
456
+ return None, current_depth, warnings, 0
457
+
458
+ # Recursively check nested submodules
459
+ max_depth = current_depth
460
+ sub_expansions = 0
461
+ nested_subs = nested_spec.get("submodules", [])
462
+ for i, nsub in enumerate(nested_subs):
463
+ nsub_name = nsub.get("name", f"sub_{i}")
464
+ nsub_desc = nsub.get("description", "")
465
+ nsub_ports_raw = nsub.get("ports", [])
466
+ nsub_ports = [
467
+ PortSpec(
468
+ name=p.get("name", ""),
469
+ direction=p.get("direction", "input"),
470
+ data_type=p.get("data_type", "logic"),
471
+ description=p.get("description", ""),
472
+ )
473
+ for p in nsub_ports_raw
474
+ ]
475
+
476
+ # Build a temporary SubModuleSpec for evaluation
477
+ temp_sub = SubModuleSpec(
478
+ name=nsub_name,
479
+ description=nsub_desc,
480
+ ports=nsub_ports,
481
+ )
482
+
483
+ if self._needs_expansion(temp_sub, sub_category):
484
+ child_spec, child_depth, child_warnings, child_exp = self._expand_recursive(
485
+ sub_name=nsub_name,
486
+ sub_description=nsub_desc,
487
+ sub_ports=nsub_ports,
488
+ parent_name=sub_name,
489
+ parent_category=sub_category,
490
+ current_depth=current_depth + 1,
491
+ )
492
+ nsub["requires_expansion"] = True
493
+ nsub["nested_spec"] = child_spec
494
+ sub_expansions += 1 + child_exp
495
+ max_depth = max(max_depth, child_depth + 1)
496
+ warnings.extend(child_warnings)
497
+ else:
498
+ nsub["requires_expansion"] = False
499
+ nsub["nested_spec"] = None
500
+
501
+ nested_spec["submodules"] = nested_subs
502
+ return nested_spec, max_depth, warnings, sub_expansions
503
+
504
+ def _generate_nested_spec(
505
+ self,
506
+ sub_name: str,
507
+ sub_description: str,
508
+ sub_ports: List[PortSpec],
509
+ sub_category: str,
510
+ parent_name: str,
511
+ parent_category: str,
512
+ ) -> Optional[Dict[str, Any]]:
513
+ """Use the LLM to generate a nested specification for a complex submodule."""
514
+ ports_json = json.dumps(
515
+ [p.to_dict() for p in sub_ports],
516
+ indent=2,
517
+ )
518
+
519
+ prompt = EXPAND_SUBMODULE_PROMPT.format(
520
+ parent_name=parent_name,
521
+ parent_category=parent_category,
522
+ sub_name=sub_name,
523
+ sub_description=sub_description or "(no description provided)",
524
+ parent_ports_json=ports_json,
525
+ sub_category=sub_category,
526
+ )
527
+
528
+ last_error = ""
529
+ for attempt in range(1, self.max_retries + 1):
530
+ logger.info(
531
+ f"[HierarchyExpander] Expanding '{sub_name}' attempt "
532
+ f"{attempt}/{self.max_retries}"
533
+ )
534
+
535
+ retry_ctx = ""
536
+ if last_error:
537
+ retry_ctx = (
538
+ f"\n\nPREVIOUS ATTEMPT FAILED:\n{last_error}\n"
539
+ "Fix the issues and return corrected JSON."
540
+ )
541
+
542
+ agent = Agent(
543
+ role="Hierarchical RTL Architect",
544
+ goal=f"Generate a nested spec for submodule '{sub_name}'",
545
+ backstory=(
546
+ "You are a principal VLSI architect specializing in hierarchical "
547
+ "design decomposition. You produce clean, consistent nested "
548
+ "specifications that integrate perfectly with their parent module."
549
+ ),
550
+ llm=self.llm,
551
+ verbose=self.verbose,
552
+ )
553
+ task = Task(
554
+ description=prompt + retry_ctx,
555
+ expected_output="Complete nested specification JSON for the submodule",
556
+ agent=agent,
557
+ )
558
+
559
+ try:
560
+ raw = str(Crew(agents=[agent], tasks=[task]).kickoff())
561
+ data = self._extract_json(raw)
562
+
563
+ if data is None:
564
+ last_error = "Response was not valid JSON"
565
+ continue
566
+
567
+ # Validate minimum structure
568
+ if "ports" not in data or not isinstance(data.get("ports"), list):
569
+ last_error = "Missing or invalid 'ports' array"
570
+ continue
571
+
572
+ if "behavioral_contract" not in data or len(data.get("behavioral_contract", [])) < 3:
573
+ last_error = (
574
+ "Behavioral contract must have at least 3 statements "
575
+ f"(got {len(data.get('behavioral_contract', []))})"
576
+ )
577
+ continue
578
+
579
+ # Ensure top_module_name matches
580
+ data["top_module_name"] = sub_name
581
+
582
+ return data
583
+
584
+ except Exception as e:
585
+ last_error = str(e)
586
+ logger.warning(
587
+ f"[HierarchyExpander] Expansion attempt {attempt} for "
588
+ f"'{sub_name}' failed: {e}"
589
+ )
590
+
591
+ logger.error(
592
+ f"[HierarchyExpander] All {self.max_retries} attempts failed for '{sub_name}'"
593
+ )
594
+ return None
595
+
596
+ # ── Step 3: Consistency Check ────────────────────────────────────
597
+
598
+ def _consistency_check(
599
+ self,
600
+ top_ports: List[PortSpec],
601
+ submodules: List[ExpandedSubModule],
602
+ ) -> List[str]:
603
+ """
604
+ Verify interface consistency across the hierarchy.
605
+
606
+ Checks:
607
+ - Every driven port has exactly one driver
608
+ - No port is left unconnected
609
+ - No two submodules drive the same signal
610
+ - Clock and reset reach every sequential submodule
611
+ """
612
+ fixes: List[str] = []
613
+
614
+ # Collect all output signals (drivers) per submodule
615
+ drivers: Dict[str, List[str]] = {} # signal_name → list of driver modules
616
+ receivers: Dict[str, List[str]] = {} # signal_name → list of receiver modules
617
+ all_sub_ports: Dict[str, Set[str]] = {} # module_name → set of port names
618
+
619
+ # Top-level ports
620
+ top_inputs: Set[str] = set()
621
+ top_outputs: Set[str] = set()
622
+ for p in top_ports:
623
+ if p.direction == "input":
624
+ top_inputs.add(p.name)
625
+ elif p.direction == "output":
626
+ top_outputs.add(p.name)
627
+
628
+ # Scan submodules
629
+ for sm in submodules:
630
+ port_names: Set[str] = set()
631
+ for p in sm.ports:
632
+ port_names.add(p.name)
633
+ if p.direction == "output":
634
+ drivers.setdefault(p.name, []).append(sm.name)
635
+ elif p.direction == "input":
636
+ receivers.setdefault(p.name, []).append(sm.name)
637
+ all_sub_ports[sm.name] = port_names
638
+
639
+ # Check: No two submodules drive the same signal
640
+ for sig, drv_list in drivers.items():
641
+ if len(drv_list) > 1:
642
+ fixes.append(
643
+ f"CONSISTENCY_FIX: Signal '{sig}' driven by multiple submodules: "
644
+ f"{drv_list}. Only the first driver is retained."
645
+ )
646
+
647
+ # Check: Clock and reset reach sequential submodules
648
+ sequential_keywords = [
649
+ "register", "flip", "ff", "latch", "memory", "fifo",
650
+ "counter", "state", "fsm", "pipeline", "buffer", "cache",
651
+ ]
652
+ for sm in submodules:
653
+ desc_lower = (sm.description or "").lower()
654
+ name_lower = sm.name.lower()
655
+ is_sequential = any(
656
+ kw in desc_lower or kw in name_lower for kw in sequential_keywords
657
+ )
658
+ if is_sequential:
659
+ port_name_set = all_sub_ports.get(sm.name, set())
660
+ has_clk = any("clk" in pn or "clock" in pn for pn in port_name_set)
661
+ has_rst = any("rst" in pn or "reset" in pn for pn in port_name_set)
662
+ if not has_clk:
663
+ fixes.append(
664
+ f"CONSISTENCY_FIX: Sequential submodule '{sm.name}' "
665
+ "missing clock port — added 'clk' input."
666
+ )
667
+ sm.ports.append(
668
+ PortSpec(
669
+ name="clk",
670
+ direction="input",
671
+ data_type="logic",
672
+ description="Clock signal (auto-added by consistency check)",
673
+ )
674
+ )
675
+ if not has_rst:
676
+ fixes.append(
677
+ f"CONSISTENCY_FIX: Sequential submodule '{sm.name}' "
678
+ "missing reset port — added 'rst_n' input."
679
+ )
680
+ sm.ports.append(
681
+ PortSpec(
682
+ name="rst_n",
683
+ direction="input",
684
+ data_type="logic",
685
+ description="Active-low reset (auto-added by consistency check)",
686
+ )
687
+ )
688
+
689
+ # Recurse into nested spec if present
690
+ if sm.nested_spec and isinstance(sm.nested_spec, dict):
691
+ nested_fixes = self._consistency_check_nested(sm.nested_spec)
692
+ fixes.extend(nested_fixes)
693
+
694
+ return fixes
695
+
696
+ def _consistency_check_nested(self, spec_dict: Dict[str, Any]) -> List[str]:
697
+ """Run consistency check on a nested spec dictionary."""
698
+ fixes: List[str] = []
699
+ module_name = spec_dict.get("top_module_name", "unknown")
700
+
701
+ nested_subs = spec_dict.get("submodules", [])
702
+ sequential_keywords = [
703
+ "register", "flip", "ff", "latch", "memory", "fifo",
704
+ "counter", "state", "fsm", "pipeline", "buffer", "cache",
705
+ ]
706
+
707
+ for nsub in nested_subs:
708
+ nsub_name = nsub.get("name", "")
709
+ nsub_desc = (nsub.get("description", "") or "").lower()
710
+ nsub_name_lower = nsub_name.lower()
711
+
712
+ is_seq = any(
713
+ kw in nsub_desc or kw in nsub_name_lower
714
+ for kw in sequential_keywords
715
+ )
716
+ if is_seq:
717
+ port_names = {p.get("name", "") for p in nsub.get("ports", [])}
718
+ has_clk = any("clk" in pn or "clock" in pn for pn in port_names)
719
+ has_rst = any("rst" in pn or "reset" in pn for pn in port_names)
720
+ if not has_clk:
721
+ fixes.append(
722
+ f"CONSISTENCY_FIX: Nested sequential submodule "
723
+ f"'{module_name}/{nsub_name}' missing clock — added 'clk'."
724
+ )
725
+ nsub.setdefault("ports", []).append({
726
+ "name": "clk",
727
+ "direction": "input",
728
+ "data_type": "logic",
729
+ "description": "Clock (auto-added)",
730
+ })
731
+ if not has_rst:
732
+ fixes.append(
733
+ f"CONSISTENCY_FIX: Nested sequential submodule "
734
+ f"'{module_name}/{nsub_name}' missing reset — added 'rst_n'."
735
+ )
736
+ nsub.setdefault("ports", []).append({
737
+ "name": "rst_n",
738
+ "direction": "input",
739
+ "data_type": "logic",
740
+ "description": "Reset (auto-added)",
741
+ })
742
+
743
+ # Recurse deeper if nested_spec exists
744
+ child_spec = nsub.get("nested_spec")
745
+ if child_spec and isinstance(child_spec, dict):
746
+ fixes.extend(self._consistency_check_nested(child_spec))
747
+
748
+ return fixes
749
+
750
+ # ── Utility ──────────────────────────────────────────────────────
751
+
752
+ @staticmethod
753
+ def _extract_json(raw: str) -> Optional[Dict[str, Any]]:
754
+ """Extract the first JSON object from LLM output."""
755
+ # Strip think tags and markdown fences
756
+ cleaned = re.sub(r"<think>.*?</think>", "", raw, flags=re.DOTALL)
757
+ cleaned = re.sub(r"```(?:json)?\s*", "", cleaned)
758
+ cleaned = re.sub(r"```", "", cleaned)
759
+ cleaned = cleaned.strip()
760
+
761
+ # Try direct parse
762
+ try:
763
+ return json.loads(cleaned)
764
+ except json.JSONDecodeError:
765
+ pass
766
+
767
+ # Find first { ... } block
768
+ depth = 0
769
+ start = -1
770
+ for i, ch in enumerate(cleaned):
771
+ if ch == "{":
772
+ if depth == 0:
773
+ start = i
774
+ depth += 1
775
+ elif ch == "}":
776
+ depth -= 1
777
+ if depth == 0 and start >= 0:
778
+ try:
779
+ return json.loads(cleaned[start : i + 1])
780
+ except json.JSONDecodeError:
781
+ start = -1
782
+
783
+ return None
784
+
785
+ # ── Enrichment for downstream stages ─────────────────────────────
786
+
787
+ def to_hierarchy_enrichment(self, result: HierarchyResult) -> Dict[str, Any]:
788
+ """
789
+ Convert a HierarchyResult into an enrichment dict that can be
790
+ appended to the orchestrator's spec artifact for downstream stages.
791
+ """
792
+ expansion_summary: List[str] = []
793
+ for sm in result.submodules:
794
+ if sm.requires_expansion and sm.nested_spec:
795
+ nested_subs = sm.nested_spec.get("submodules", [])
796
+ nested_contracts = sm.nested_spec.get("behavioral_contract", [])
797
+ expansion_summary.append(
798
+ f" {sm.name}: expanded into {len(nested_subs)} sub-blocks, "
799
+ f"{len(nested_contracts)} assertions"
800
+ )
801
+
802
+ return {
803
+ "hierarchy_depth": result.hierarchy_depth,
804
+ "expansion_count": result.expansion_count,
805
+ "expanded_modules": expansion_summary,
806
+ "hierarchy_warnings": [
807
+ w for w in result.warnings if w.startswith("HIERARCHY_WARNING")
808
+ ],
809
+ "consistency_fixes": [
810
+ w for w in result.warnings if w.startswith("CONSISTENCY_FIX")
811
+ ],
812
+ }
src/agentic/core/spec_generator.py ADDED
@@ -0,0 +1,988 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Hardware Specification Generator — Rigorous 6-Stage Spec Pipeline
3
+ =================================================================
4
+
5
+ Takes a user's plain English hardware description and produces a complete,
6
+ unambiguous, implementation-ready hardware specification (JSON contract).
7
+
8
+ This is the first and most critical stage in the autonomous chip design pipeline.
9
+ Every mistake here gets amplified by every stage after.
10
+
11
+ Stages:
12
+ 1. CLASSIFY — Categorise the design (PROCESSOR / MEMORY / INTERFACE / etc.)
13
+ 2. COMPLETE — Completeness check against mandatory fields per category
14
+ 3. DECOMPOSE — Module decomposition with domain validation
15
+ 4. INTERFACE — Top-level interface specification
16
+ 5. CONTRACT — Behavioral contract (GIVEN/WHEN/THEN assertions)
17
+ 6. OUTPUT — Structured JSON output with warnings
18
+ """
19
+
20
+ import json
21
+ import logging
22
+ import re
23
+ from dataclasses import asdict, dataclass, field
24
+ from typing import Any, Dict, List, Optional, Tuple
25
+
26
+ from crewai import Agent, Task, Crew, LLM
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ # ─── Design Categories ───────────────────────────────────────────────
32
+
33
+ DESIGN_CATEGORIES = [
34
+ "PROCESSOR", # CPU, microcontroller, DSP core, RISC-V, ARM-like
35
+ "MEMORY", # FIFO, SRAM, ROM, cache, register file
36
+ "INTERFACE", # UART, SPI, I2C, APB, AXI, Wishbone, USB
37
+ "ARITHMETIC", # ALU, multiplier, divider, FPU, MAC
38
+ "CONTROL", # State machine, arbiter, scheduler, interrupt controller
39
+ "DATAPATH", # Pipeline stage, shift register, barrel shifter
40
+ "MIXED", # Contains two or more of the above
41
+ ]
42
+
43
+ # ─── Mandatory Fields Per Category ───────────────────────────────────
44
+
45
+ MANDATORY_FIELDS = {
46
+ "PROCESSOR": [
47
+ "isa_subset",
48
+ "pipeline_depth",
49
+ "register_file",
50
+ "memory_interface",
51
+ "hazard_handling",
52
+ "reset_type",
53
+ "clock_domains",
54
+ "target_frequency_mhz",
55
+ ],
56
+ "MEMORY": [
57
+ "mem_type",
58
+ "width_depth",
59
+ "rw_port_count",
60
+ "collision_behavior",
61
+ "reset_behavior",
62
+ ],
63
+ "INTERFACE": [
64
+ "protocol_version_mode",
65
+ "data_width",
66
+ "fifo_depth",
67
+ "flow_control",
68
+ ],
69
+ "ARITHMETIC": [
70
+ "input_output_widths",
71
+ "signed_unsigned",
72
+ "pipeline_stages",
73
+ "overflow_behavior",
74
+ "latency_cycles",
75
+ ],
76
+ "CONTROL": [
77
+ "state_encoding",
78
+ "state_count",
79
+ "reset_type",
80
+ "clock_domains",
81
+ ],
82
+ "DATAPATH": [
83
+ "data_width",
84
+ "pipeline_stages",
85
+ "reset_type",
86
+ ],
87
+ }
88
+
89
+ # ─── Domain-Valid Submodule Names ─────────────────────────────────────
90
+
91
+ DOMAIN_SUBMODULES = {
92
+ "PROCESSOR": [
93
+ "program_counter", "instruction_memory_interface",
94
+ "instruction_fetch", "instruction_decode", "register_file",
95
+ "alu", "data_memory_interface", "writeback", "hazard_unit",
96
+ "branch_predictor", "pipeline_register", "control_unit",
97
+ ],
98
+ "MEMORY": [
99
+ "memory_array", "read_port_logic", "write_port_logic",
100
+ "address_decoder", "collision_logic", "output_register",
101
+ ],
102
+ "INTERFACE": [
103
+ "clock_divider", "shift_register", "state_machine",
104
+ "data_buffer", "control_logic", "status_register", "fifo",
105
+ ],
106
+ "ARITHMETIC": [
107
+ "input_register", "computation_unit", "pipeline_stage_register",
108
+ "output_register", "overflow_detector",
109
+ ],
110
+ "CONTROL": [
111
+ "state_register", "next_state_logic", "output_logic",
112
+ "priority_encoder", "arbiter_logic", "interrupt_register",
113
+ ],
114
+ "DATAPATH": [
115
+ "shift_register", "pipeline_register", "mux_network",
116
+ "barrel_shifter", "data_register",
117
+ ],
118
+ }
119
+
120
+
121
+ # ─── Safe Defaults (Convention-Based Inference) ──────────────────────
122
+
123
+ SAFE_DEFAULTS = {
124
+ "reset_type": {
125
+ "value": "synchronous active-low",
126
+ "reasoning": "Active-low synchronous reset is standard for Sky130 PDK and ASIC flows",
127
+ },
128
+ "clock_domains": {
129
+ "value": "single",
130
+ "reasoning": "Single clock domain is the default unless explicitly specified",
131
+ },
132
+ "reset_behavior": {
133
+ "value": "all_zeros",
134
+ "reasoning": "Resetting all registers to zero is standard practice for deterministic startup",
135
+ },
136
+ "state_encoding": {
137
+ "value": "binary",
138
+ "reasoning": "Binary encoding is the default for small FSMs; one-hot selected automatically by synthesis tools for larger FSMs",
139
+ },
140
+ "flow_control": {
141
+ "value": "none",
142
+ "reasoning": "No flow control by default unless buffering or handshaking is specified",
143
+ },
144
+ "collision_behavior": {
145
+ "value": "write_first",
146
+ "reasoning": "Write-first is the most common RAM collision policy in FPGA/ASIC memory compilers",
147
+ },
148
+ "signed_unsigned": {
149
+ "value": "unsigned",
150
+ "reasoning": "Default to unsigned arithmetic unless explicitly stated otherwise",
151
+ },
152
+ }
153
+
154
+
155
+ # ─── Output Dataclass ────────────────────────────────────────────────
156
+
157
+ @dataclass
158
+ class PortSpec:
159
+ name: str
160
+ direction: str # "input" | "output" | "inout"
161
+ data_type: str # "logic" | "logic [N:0]"
162
+ description: str = ""
163
+
164
+ def to_dict(self) -> Dict[str, str]:
165
+ return asdict(self)
166
+
167
+
168
+ @dataclass
169
+ class SubModuleSpec:
170
+ name: str
171
+ description: str = ""
172
+ ports: List[PortSpec] = field(default_factory=list)
173
+
174
+ def to_dict(self) -> Dict[str, Any]:
175
+ d = asdict(self)
176
+ return d
177
+
178
+
179
+ @dataclass
180
+ class BehavioralStatement:
181
+ given: str
182
+ when: str
183
+ then: str
184
+ within: str # e.g. "1 cycle"
185
+
186
+ def to_dict(self) -> Dict[str, str]:
187
+ return asdict(self)
188
+
189
+ def __str__(self) -> str:
190
+ return f"GIVEN {self.given} WHEN {self.when} THEN {self.then} WITHIN {self.within}"
191
+
192
+
193
+ @dataclass
194
+ class InferredField:
195
+ field_name: str
196
+ inferred_value: str
197
+ reasoning: str
198
+
199
+ def to_dict(self) -> Dict[str, str]:
200
+ return asdict(self)
201
+
202
+
203
+ @dataclass
204
+ class HardwareSpec:
205
+ """Complete hardware specification — output of the 6-stage pipeline."""
206
+ design_category: str
207
+ top_module_name: str
208
+ target_pdk: str = "sky130"
209
+ target_frequency_mhz: int = 50
210
+ ports: List[PortSpec] = field(default_factory=list)
211
+ submodules: List[SubModuleSpec] = field(default_factory=list)
212
+ behavioral_contract: List[BehavioralStatement] = field(default_factory=list)
213
+ inferred_fields: List[InferredField] = field(default_factory=list)
214
+ warnings: List[str] = field(default_factory=list)
215
+ # Extra metadata for downstream pipeline
216
+ design_description: str = ""
217
+ mandatory_fields_status: Dict[str, str] = field(default_factory=dict)
218
+
219
+ def to_json(self) -> str:
220
+ return json.dumps(self.to_dict(), indent=2)
221
+
222
+ def to_dict(self) -> Dict[str, Any]:
223
+ return {
224
+ "design_category": self.design_category,
225
+ "top_module_name": self.top_module_name,
226
+ "target_pdk": self.target_pdk,
227
+ "target_frequency_mhz": self.target_frequency_mhz,
228
+ "ports": [p.to_dict() for p in self.ports],
229
+ "submodules": [s.to_dict() for s in self.submodules],
230
+ "behavioral_contract": [b.to_dict() for b in self.behavioral_contract],
231
+ "inferred_fields": [f.to_dict() for f in self.inferred_fields],
232
+ "warnings": self.warnings,
233
+ "design_description": self.design_description,
234
+ "mandatory_fields_status": self.mandatory_fields_status,
235
+ }
236
+
237
+ @classmethod
238
+ def from_json(cls, json_str: str) -> "HardwareSpec":
239
+ data = json.loads(json_str)
240
+ ports = [PortSpec(**p) for p in data.pop("ports", [])]
241
+ subs = [SubModuleSpec(
242
+ name=s["name"],
243
+ description=s.get("description", ""),
244
+ ports=[PortSpec(**p) for p in s.get("ports", [])],
245
+ ) for s in data.pop("submodules", [])]
246
+ contracts = [BehavioralStatement(**b) for b in data.pop("behavioral_contract", [])]
247
+ inferred = [InferredField(**f) for f in data.pop("inferred_fields", [])]
248
+ return cls(
249
+ ports=ports, submodules=subs,
250
+ behavioral_contract=contracts, inferred_fields=inferred,
251
+ **data,
252
+ )
253
+
254
+
255
+ # ─── Classification Prompt ───────────────────────────────────────────
256
+
257
+ CLASSIFY_PROMPT = """\
258
+ You are a senior VLSI architect. Classify the following hardware design description
259
+ into EXACTLY ONE category. If the design spans multiple categories, use MIXED and
260
+ list which categories it combines.
261
+
262
+ Categories:
263
+ - PROCESSOR: CPU, microcontroller, DSP core, RISC-V, ARM-like
264
+ - MEMORY: FIFO, SRAM, ROM, cache, register file
265
+ - INTERFACE: UART, SPI, I2C, APB, AXI, Wishbone, USB
266
+ - ARITHMETIC: ALU, multiplier, divider, FPU, MAC
267
+ - CONTROL: state machine, arbiter, scheduler, interrupt controller
268
+ - DATAPATH: pipeline stage, shift register, barrel shifter
269
+ - MIXED: contains two or more of the above
270
+
271
+ Design description:
272
+ {description}
273
+
274
+ Respond with ONLY a JSON object:
275
+ {{"category": "<CATEGORY>", "sub_categories": ["<if MIXED>"], "confidence": <0.0-1.0>, "reasoning": "<one sentence>"}}
276
+ """
277
+
278
+
279
+ # ─── Completeness + Decomposition + Contract Prompt ──────────────────
280
+
281
+ SPEC_GENERATION_PROMPT = """\
282
+ You are a senior VLSI architect generating a complete hardware specification.
283
+ The design has been classified as: {category}
284
+
285
+ Design description: {description}
286
+ Design name: {design_name}
287
+
288
+ Perform ALL of the following steps and return a single JSON object:
289
+
290
+ STEP 1 — COMPLETENESS CHECK
291
+ For this {category} design, check these mandatory fields:
292
+ {mandatory_fields}
293
+
294
+ For each field:
295
+ - If present in description → set status to "present" with the value
296
+ - If safely inferable from standard practice → set status to "inferred" with value and reasoning
297
+ - If missing and no safe default → set status to "missing"
298
+
299
+ Safe defaults you may use:
300
+ - Reset: synchronous active-low (standard for Sky130)
301
+ - Clock: single domain unless explicitly specified
302
+ - Memory reset: all zeros
303
+ - FSM encoding: binary for small FSMs
304
+ - Arithmetic: unsigned unless stated otherwise
305
+
306
+ STEP 2 — MODULE DECOMPOSITION
307
+ Decompose into sub-modules. Rules:
308
+ - Maximum 8 sub-modules
309
+ - Each must have: name (snake_case), one-sentence description, complete port list
310
+ - Valid sub-module names for {category}: {valid_submodules}
311
+ - Every sub-module must correspond to a standard hardware component
312
+ - No overlapping responsibilities between sub-modules
313
+
314
+ STEP 3 — TOP-LEVEL INTERFACE
315
+ Define all top-level ports:
316
+ - Always include clk (input) and rst_n (input)
317
+ - Every port: name, direction (input/output/inout), data type (logic/logic[N:0])
318
+ - No floating ports — every port must have a defined purpose
319
+ - Justify every bus width
320
+
321
+ STEP 4 — BEHAVIORAL CONTRACT
322
+ Write precise English statements a testbench engineer can use for assertions.
323
+ Format: GIVEN/WHEN/THEN/WITHIN
324
+ Minimum requirements:
325
+ - 1 reset behavior statement
326
+ - 1 statement per major operation type
327
+ - 1 statement per edge case (overflow, empty, hazard, timeout)
328
+
329
+ STEP 5 — WARNINGS
330
+ List every assumption that could affect correctness. If you have zero warnings,
331
+ you are being overconfident — look again.
332
+
333
+ Return ONLY this JSON (no markdown fences, no commentary):
334
+ {{
335
+ "design_category": "{category}",
336
+ "top_module_name": "<snake_case>",
337
+ "target_pdk": "sky130",
338
+ "target_frequency_mhz": <integer>,
339
+ "mandatory_fields_status": {{
340
+ "<field_name>": {{"status": "present|inferred|missing", "value": "<value>", "reasoning": "<if inferred>"}}
341
+ }},
342
+ "ports": [
343
+ {{"name": "<name>", "direction": "input|output|inout", "data_type": "logic|logic [N:0]", "description": "<purpose>"}}
344
+ ],
345
+ "submodules": [
346
+ {{
347
+ "name": "<snake_case>",
348
+ "description": "<one sentence>",
349
+ "ports": [
350
+ {{"name": "<name>", "direction": "input|output", "data_type": "logic|logic [N:0]", "description": "<purpose>"}}
351
+ ]
352
+ }}
353
+ ],
354
+ "behavioral_contract": [
355
+ {{"given": "<precondition>", "when": "<trigger>", "then": "<expected>", "within": "<latency>"}}
356
+ ],
357
+ "warnings": ["<assumption that could affect correctness>"]
358
+ }}
359
+ """
360
+
361
+
362
+ # ─── The Spec Generator ─────────────────────────────────────────────
363
+
364
+ class HardwareSpecGenerator:
365
+ """
366
+ 6-stage hardware specification generator.
367
+
368
+ Takes a plain English hardware description and produces a complete,
369
+ unambiguous HardwareSpec that can be consumed by the Architect SID
370
+ decomposer for RTL generation.
371
+ """
372
+
373
+ def __init__(self, llm: LLM, verbose: bool = False, max_retries: int = 3):
374
+ self.llm = llm
375
+ self.verbose = verbose
376
+ self.max_retries = max_retries
377
+
378
+ def generate(
379
+ self,
380
+ design_name: str,
381
+ description: str,
382
+ target_pdk: str = "sky130",
383
+ ) -> Tuple[HardwareSpec, List[str]]:
384
+ """
385
+ Main entry point: generate a complete hardware specification.
386
+
387
+ Args:
388
+ design_name: Verilog-safe design name
389
+ description: Natural language hardware description
390
+ target_pdk: Target PDK (sky130, gf180)
391
+
392
+ Returns:
393
+ (HardwareSpec, issues) — spec and any issues/missing fields
394
+ """
395
+ issues: List[str] = []
396
+
397
+ # ── Gate: short descriptions get LLM elaboration, not rejection ──
398
+ word_count = len(description.strip().split())
399
+ if word_count < 10:
400
+ logger.info(f"[SpecGen] Description is short ({word_count} words) — elaborating via LLM")
401
+ options = self._elaborate_description(design_name, description)
402
+ # Return a special spec that signals the orchestrator to present options
403
+ spec = HardwareSpec(
404
+ design_category="ELABORATION_NEEDED",
405
+ top_module_name=design_name,
406
+ design_description=description,
407
+ warnings=[f"ELABORATION_NEEDED: Description is short ({word_count} words). "
408
+ "Please select one of the options below."] + options,
409
+ )
410
+ return spec, [f"Short description ({word_count} words) — 3 design options generated"]
411
+
412
+ # ── Stage 1: Classify ──
413
+ logger.info(f"[SpecGen] Stage 1: Classifying '{design_name}'")
414
+ category, classify_issues = self._classify(description)
415
+ issues.extend(classify_issues)
416
+
417
+ if category is None:
418
+ return self._rejected_spec(
419
+ design_name,
420
+ "Could not classify the design. Description is too ambiguous."
421
+ ), issues
422
+
423
+ logger.info(f"[SpecGen] Classified as: {category}")
424
+
425
+ # ── Stages 2-5: Generate full spec via LLM ──
426
+ logger.info(f"[SpecGen] Stages 2-5: Generating full spec for '{design_name}' ({category})")
427
+ spec, gen_issues = self._generate_full_spec(
428
+ design_name, description, category, target_pdk
429
+ )
430
+ issues.extend(gen_issues)
431
+
432
+ return spec, issues
433
+
434
+ def _elaborate_description(
435
+ self, design_name: str, description: str
436
+ ) -> List[str]:
437
+ """
438
+ When the user's description is short or vague, use LLM VLSI knowledge to
439
+ generate 3 concrete, expert-level design options and return them as a list
440
+ of strings (one per option) suitable for the orchestrator to present.
441
+ """
442
+ prompt = f"""\
443
+ You are a senior VLSI architect. A user wants to build a chip called '{design_name}' and described it as:
444
+
445
+ "{description}"
446
+
447
+ This is very brief. Using your expertise, generate EXACTLY 3 distinct, detailed design interpretations
448
+ for this chip. Each option should specify the architectural variant, key features, I/O ports, and
449
+ typical use cases. Make each option meaningfully different from the others.
450
+
451
+ Return ONLY this JSON (no markdown, no commentary):
452
+ {{
453
+ "options": [
454
+ {{
455
+ "id": 1,
456
+ "title": "<short title, max 8 words>",
457
+ "description": "<2-3 sentence detailed technical description including: bit-widths, port count, reset style, key functionality, and typical target clock frequency on Sky130>",
458
+ "category": "<PROCESSOR|MEMORY|INTERFACE|ARITHMETIC|CONTROL|DATAPATH>",
459
+ "key_ports": ["clk", "rst_n", "<port1>", "<port2>"],
460
+ "target_frequency_mhz": <number>
461
+ }},
462
+ {{
463
+ "id": 2,
464
+ "title": "<short title>",
465
+ "description": "<detailed description>",
466
+ "category": "<category>",
467
+ "key_ports": ["clk", "rst_n", "<port1>"],
468
+ "target_frequency_mhz": <number>
469
+ }},
470
+ {{
471
+ "id": 3,
472
+ "title": "<short title>",
473
+ "description": "<detailed description>",
474
+ "category": "<category>",
475
+ "key_ports": ["clk", "rst_n", "<port1>"],
476
+ "target_frequency_mhz": <number>
477
+ }}
478
+ ]
479
+ }}
480
+ """
481
+ try:
482
+ agent = Agent(
483
+ role="VLSI Design Advisor",
484
+ goal=f"Generate 3 detailed design options for '{design_name}'",
485
+ backstory=(
486
+ "You are a principal VLSI architect with 25 years of experience designing "
487
+ "chips for Sky130 and GF180. You excel at interpreting vague hardware requirements "
488
+ "and proposing concrete, implementable architectures with precise specifications."
489
+ ),
490
+ llm=self.llm,
491
+ verbose=self.verbose,
492
+ )
493
+ task = Task(
494
+ description=prompt,
495
+ expected_output="JSON with 3 design options",
496
+ agent=agent,
497
+ )
498
+ raw = str(Crew(agents=[agent], tasks=[task]).kickoff())
499
+ data = self._extract_json(raw)
500
+
501
+ if data and isinstance(data.get("options"), list):
502
+ result = []
503
+ for opt in data["options"][:3]:
504
+ opt_id = opt.get("id", "?")
505
+ title = opt.get("title", "Option")
506
+ desc = opt.get("description", "")
507
+ category = opt.get("category", "")
508
+ ports = ", ".join(opt.get("key_ports", [])[:6])
509
+ freq = opt.get("target_frequency_mhz", 50)
510
+ result.append(
511
+ f"OPTION_{opt_id}: {title} | "
512
+ f"Category: {category} | "
513
+ f"Freq: {freq} MHz | "
514
+ f"Ports: {ports} | "
515
+ f"Details: {desc}"
516
+ )
517
+ return result
518
+
519
+ except Exception as e:
520
+ logger.warning(f"[SpecGen] Elaboration LLM failed: {e}")
521
+
522
+ # Fallback: rule-based options based on common design patterns
523
+ name_lower = design_name.lower()
524
+ if any(kw in name_lower for kw in ["counter", "cnt"]):
525
+ return [
526
+ f"OPTION_1: Simple Up-Counter | Category: CONTROL | Freq: 50 MHz | "
527
+ f"Ports: clk, rst_n, enable, count[7:0] | "
528
+ f"Details: 8-bit synchronous up-counter with active-low reset and clock enable. "
529
+ f"Counts 0-255, wraps around. Single clock domain. Target 50 MHz on Sky130.",
530
+ f"OPTION_2: Up-Down Counter with Load | Category: CONTROL | Freq: 50 MHz | "
531
+ f"Ports: clk, rst_n, enable, dir, load, d[7:0], count[7:0] | "
532
+ f"Details: 8-bit bidirectional counter with parallel load and direction control. "
533
+ f"Supports up/down counting and preload of arbitrary values.",
534
+ f"OPTION_3: Programmable Counter with Terminal Count | Category: CONTROL | Freq: 100 MHz | "
535
+ f"Ports: clk, rst_n, enable, load, d[7:0], count[7:0], tc | "
536
+ f"Details: 8-bit counter with programmable terminal count compare and TC flag output. "
537
+ f"Auto-reloads on terminal count. Suitable for PWM and timer applications.",
538
+ ]
539
+ else:
540
+ return [
541
+ f"OPTION_1: Basic {design_name} (minimal) | Category: CONTROL | Freq: 50 MHz | "
542
+ f"Ports: clk, rst_n, data_in[7:0], data_out[7:0], valid | "
543
+ f"Details: Minimal synchronous implementation with 8-bit data path, active-low reset, "
544
+ f"and valid handshake. Single clock domain, 50 MHz target.",
545
+ f"OPTION_2: Pipelined {design_name} | Category: DATAPATH | Freq: 100 MHz | "
546
+ f"Ports: clk, rst_n, data_in[15:0], data_out[15:0], valid_in, valid_out | "
547
+ f"Details: 2-stage pipelined 16-bit datapath implementation. Back-to-back throughput "
548
+ f"of 1 result/cycle after 2-cycle latency. 100 MHz target on Sky130.",
549
+ f"OPTION_3: {design_name} with AXI-Lite interface | Category: INTERFACE | Freq: 50 MHz | "
550
+ f"Ports: clk, rst_n, awaddr, awvalid, awready, wdata, wvalid, wready, bresp, bvalid, bready | "
551
+ f"Details: Full AXI4-Lite slave wrapper around the core logic for register-mapped "
552
+ f"configuration from a host processor. 32-bit address/data.",
553
+ ]
554
+
555
+ def _classify(self, description: str) -> Tuple[Optional[str], List[str]]:
556
+ """Stage 1: Classify the design into a category."""
557
+ issues = []
558
+
559
+ prompt = CLASSIFY_PROMPT.format(description=description[:4000])
560
+
561
+ agent = Agent(
562
+ role="VLSI Design Classifier",
563
+ goal="Classify a hardware design into exactly one category",
564
+ backstory="Senior VLSI architect who classifies designs for the spec pipeline.",
565
+ llm=self.llm,
566
+ verbose=self.verbose,
567
+ )
568
+ task = Task(
569
+ description=prompt,
570
+ expected_output="JSON object with category, confidence, and reasoning",
571
+ agent=agent,
572
+ )
573
+
574
+ try:
575
+ raw = str(Crew(agents=[agent], tasks=[task]).kickoff())
576
+ data = self._extract_json(raw)
577
+
578
+ if data is None:
579
+ issues.append("Classification LLM output was not valid JSON")
580
+ # Attempt keyword-based fallback
581
+ return self._keyword_classify(description), issues
582
+
583
+ category = data.get("category", "").upper()
584
+ confidence = float(data.get("confidence", 0.0))
585
+
586
+ if category not in DESIGN_CATEGORIES:
587
+ issues.append(f"LLM returned unknown category '{category}', using keyword fallback")
588
+ return self._keyword_classify(description), issues
589
+
590
+ if confidence < 0.5:
591
+ issues.append(
592
+ f"Low classification confidence ({confidence:.2f}) for category {category}"
593
+ )
594
+
595
+ return category, issues
596
+
597
+ except Exception as e:
598
+ issues.append(f"Classification failed: {e}")
599
+ return self._keyword_classify(description), issues
600
+
601
+ def _keyword_classify(self, description: str) -> Optional[str]:
602
+ """Deterministic keyword-based classification fallback."""
603
+ desc_lower = description.lower()
604
+
605
+ keyword_map = {
606
+ "PROCESSOR": ["cpu", "processor", "risc", "riscv", "rv32", "rv64", "microcontroller",
607
+ "instruction", "isa", "pipeline", "fetch", "decode", "execute"],
608
+ "MEMORY": ["fifo", "sram", "ram", "rom", "cache", "register file", "memory",
609
+ "stack", "queue", "buffer", "depth"],
610
+ "INTERFACE": ["uart", "spi", "i2c", "apb", "axi", "wishbone", "usb", "serial",
611
+ "baud", "mosi", "miso", "sclk", "scl", "sda"],
612
+ "ARITHMETIC": ["alu", "multiplier", "divider", "adder", "mac", "fpu",
613
+ "floating point", "multiply", "accumulate"],
614
+ "CONTROL": ["state machine", "fsm", "arbiter", "scheduler", "interrupt",
615
+ "controller", "priority"],
616
+ "DATAPATH": ["shift register", "barrel shifter", "pipeline stage",
617
+ "datapath", "mux", "demux"],
618
+ }
619
+
620
+ scores: Dict[str, int] = {cat: 0 for cat in keyword_map}
621
+ for cat, keywords in keyword_map.items():
622
+ for kw in keywords:
623
+ if kw in desc_lower:
624
+ scores[cat] += 1
625
+
626
+ best_cat = max(scores, key=scores.get)
627
+ if scores[best_cat] == 0:
628
+ return "CONTROL" # Safe default: treat as generic state machine/controller
629
+
630
+ # Check for MIXED
631
+ active = [cat for cat, score in scores.items() if score > 0]
632
+ if len(active) >= 2 and scores[active[1]] >= 2:
633
+ return "MIXED"
634
+
635
+ return best_cat
636
+
637
+ def _generate_full_spec(
638
+ self,
639
+ design_name: str,
640
+ description: str,
641
+ category: str,
642
+ target_pdk: str,
643
+ ) -> Tuple[HardwareSpec, List[str]]:
644
+ """Stages 2-5: Completeness, decomposition, interface, and contract."""
645
+ issues: List[str] = []
646
+
647
+ # Resolve mandatory fields for category
648
+ if category == "MIXED":
649
+ mandatory = []
650
+ for cat in MANDATORY_FIELDS:
651
+ mandatory.extend(MANDATORY_FIELDS[cat])
652
+ mandatory = list(set(mandatory))
653
+ valid_subs = []
654
+ for cat in DOMAIN_SUBMODULES:
655
+ valid_subs.extend(DOMAIN_SUBMODULES[cat])
656
+ valid_subs = list(set(valid_subs))
657
+ else:
658
+ mandatory = MANDATORY_FIELDS.get(category, [])
659
+ valid_subs = DOMAIN_SUBMODULES.get(category, [])
660
+
661
+ prompt = SPEC_GENERATION_PROMPT.format(
662
+ category=category,
663
+ description=description[:6000],
664
+ design_name=design_name,
665
+ mandatory_fields=json.dumps(mandatory, indent=2),
666
+ valid_submodules=json.dumps(valid_subs),
667
+ )
668
+
669
+ last_error = ""
670
+ for attempt in range(1, self.max_retries + 1):
671
+ logger.info(f"[SpecGen] Full spec attempt {attempt}/{self.max_retries}")
672
+
673
+ retry_context = ""
674
+ if last_error:
675
+ retry_context = (
676
+ f"\n\nPREVIOUS ATTEMPT FAILED:\n{last_error}\n"
677
+ "Fix the issues and return a corrected JSON."
678
+ )
679
+
680
+ agent = Agent(
681
+ role="Hardware Specification Architect",
682
+ goal=f"Generate a complete, unambiguous hardware specification for {design_name}",
683
+ backstory=(
684
+ "You are a principal VLSI architect with expertise in RTL specification. "
685
+ "You produce implementation-ready specs that leave no room for ambiguity. "
686
+ "Every field you fill in must be justified. Every assumption is a warning."
687
+ ),
688
+ llm=self.llm,
689
+ verbose=self.verbose,
690
+ )
691
+ task = Task(
692
+ description=prompt + retry_context,
693
+ expected_output="Complete hardware specification JSON",
694
+ agent=agent,
695
+ )
696
+
697
+ try:
698
+ raw = str(Crew(agents=[agent], tasks=[task]).kickoff())
699
+ data = self._extract_json(raw)
700
+
701
+ if data is None:
702
+ last_error = "Response was not valid JSON"
703
+ continue
704
+
705
+ spec = self._parse_spec(data, design_name, category, target_pdk, description)
706
+ validation_issues = self._validate_spec(spec, mandatory, valid_subs)
707
+
708
+ if validation_issues:
709
+ last_error = "Validation issues:\n" + "\n".join(f" - {i}" for i in validation_issues)
710
+ issues.extend(validation_issues)
711
+ # Accept with warnings on last attempt
712
+ if attempt == self.max_retries:
713
+ spec.warnings.extend(validation_issues)
714
+ logger.warning(f"[SpecGen] Accepting spec with {len(validation_issues)} warnings")
715
+ return spec, issues
716
+ continue
717
+
718
+ logger.info(f"[SpecGen] Spec generated successfully: {len(spec.submodules)} submodules, "
719
+ f"{len(spec.behavioral_contract)} contract statements")
720
+ return spec, issues
721
+
722
+ except Exception as e:
723
+ last_error = f"Error: {e}"
724
+ logger.warning(f"[SpecGen] Attempt {attempt} failed: {e}")
725
+ continue
726
+
727
+ # Fallback: generate minimal spec
728
+ logger.warning("[SpecGen] All attempts failed — generating minimal fallback spec")
729
+ spec = self._fallback_spec(design_name, description, category, target_pdk)
730
+ issues.append("Spec generation fell back to minimal template — manual review required")
731
+ return spec, issues
732
+
733
+ def _parse_spec(
734
+ self,
735
+ data: Dict[str, Any],
736
+ design_name: str,
737
+ category: str,
738
+ target_pdk: str,
739
+ description: str,
740
+ ) -> HardwareSpec:
741
+ """Parse LLM JSON output into a HardwareSpec."""
742
+ ports = []
743
+ for p in data.get("ports", []):
744
+ ports.append(PortSpec(
745
+ name=p.get("name", ""),
746
+ direction=p.get("direction", "input"),
747
+ data_type=p.get("data_type", "logic"),
748
+ description=p.get("description", ""),
749
+ ))
750
+
751
+ # Ensure clk and rst_n are present
752
+ port_names = {p.name for p in ports}
753
+ if "clk" not in port_names:
754
+ ports.insert(0, PortSpec("clk", "input", "logic", "System clock"))
755
+ if "rst_n" not in port_names:
756
+ ports.insert(1, PortSpec("rst_n", "input", "logic", "Active-low synchronous reset"))
757
+
758
+ submodules = []
759
+ for s in data.get("submodules", []):
760
+ sub_ports = [
761
+ PortSpec(
762
+ name=sp.get("name", ""),
763
+ direction=sp.get("direction", "input"),
764
+ data_type=sp.get("data_type", "logic"),
765
+ description=sp.get("description", ""),
766
+ )
767
+ for sp in s.get("ports", [])
768
+ ]
769
+ submodules.append(SubModuleSpec(
770
+ name=s.get("name", ""),
771
+ description=s.get("description", ""),
772
+ ports=sub_ports,
773
+ ))
774
+
775
+ contracts = []
776
+ for b in data.get("behavioral_contract", []):
777
+ contracts.append(BehavioralStatement(
778
+ given=b.get("given", ""),
779
+ when=b.get("when", ""),
780
+ then=b.get("then", ""),
781
+ within=b.get("within", "1 cycle"),
782
+ ))
783
+
784
+ # Parse inferred fields from mandatory_fields_status
785
+ inferred_fields = []
786
+ mfs = data.get("mandatory_fields_status", {})
787
+ for fname, fdata in mfs.items():
788
+ if isinstance(fdata, dict) and fdata.get("status") == "inferred":
789
+ inferred_fields.append(InferredField(
790
+ field_name=fname,
791
+ inferred_value=str(fdata.get("value", "")),
792
+ reasoning=fdata.get("reasoning", ""),
793
+ ))
794
+
795
+ warnings = data.get("warnings", [])
796
+ if not warnings:
797
+ warnings = ["No warnings were generated — spec should be reviewed for implicit assumptions"]
798
+
799
+ return HardwareSpec(
800
+ design_category=category,
801
+ top_module_name=data.get("top_module_name", design_name),
802
+ target_pdk=target_pdk,
803
+ target_frequency_mhz=int(data.get("target_frequency_mhz", 50)),
804
+ ports=ports,
805
+ submodules=submodules,
806
+ behavioral_contract=contracts,
807
+ inferred_fields=inferred_fields,
808
+ warnings=warnings,
809
+ design_description=description,
810
+ mandatory_fields_status={
811
+ k: v if isinstance(v, dict) else {"status": "present", "value": str(v)}
812
+ for k, v in mfs.items()
813
+ },
814
+ )
815
+
816
+ def _validate_spec(
817
+ self,
818
+ spec: HardwareSpec,
819
+ mandatory_fields: List[str],
820
+ valid_submodules: List[str],
821
+ ) -> List[str]:
822
+ """Validate the generated spec for completeness and correctness."""
823
+ issues = []
824
+
825
+ # Check top module name
826
+ if not spec.top_module_name:
827
+ issues.append("top_module_name is empty")
828
+ elif not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', spec.top_module_name):
829
+ issues.append(f"top_module_name '{spec.top_module_name}' is not a valid Verilog identifier")
830
+
831
+ # Check ports
832
+ if len(spec.ports) < 2:
833
+ issues.append("Fewer than 2 ports defined (need at minimum clk and rst_n)")
834
+ port_names = {p.name for p in spec.ports}
835
+ if "clk" not in port_names:
836
+ issues.append("Missing clk port")
837
+ if "rst_n" not in port_names:
838
+ issues.append("Missing rst_n port")
839
+
840
+ # Check for floating ports (output with no description)
841
+ for p in spec.ports:
842
+ if not p.description:
843
+ issues.append(f"Port '{p.name}' has no description — may be floating")
844
+
845
+ # Check submodules
846
+ if not spec.submodules:
847
+ issues.append("No submodules defined")
848
+ elif len(spec.submodules) > 8:
849
+ issues.append(f"Too many submodules ({len(spec.submodules)}) — maximum is 8")
850
+
851
+ # Domain validation of submodule names
852
+ if valid_submodules and spec.submodules:
853
+ for sm in spec.submodules:
854
+ # Fuzzy match: check if any valid name is a substring or vice versa
855
+ name_lower = sm.name.lower().replace("-", "_")
856
+ matched = any(
857
+ valid.lower() in name_lower or name_lower in valid.lower()
858
+ for valid in valid_submodules
859
+ )
860
+ if not matched:
861
+ issues.append(
862
+ f"Submodule '{sm.name}' does not match any standard component "
863
+ f"for {spec.design_category}: {valid_submodules}"
864
+ )
865
+
866
+ # Check behavioral contract
867
+ if not spec.behavioral_contract:
868
+ issues.append("No behavioral contract statements defined")
869
+ else:
870
+ has_reset = any("reset" in b.given.lower() or "rst" in b.given.lower()
871
+ for b in spec.behavioral_contract)
872
+ if not has_reset:
873
+ issues.append("Behavioral contract missing a reset behavior statement")
874
+
875
+ # Check mandatory fields
876
+ missing_fields = []
877
+ for mf in mandatory_fields:
878
+ status = spec.mandatory_fields_status.get(mf, {})
879
+ if isinstance(status, dict) and status.get("status") == "missing":
880
+ missing_fields.append(mf)
881
+ if missing_fields:
882
+ issues.append(f"Missing mandatory fields: {', '.join(missing_fields)}")
883
+
884
+ return issues
885
+
886
+ def _fallback_spec(
887
+ self,
888
+ design_name: str,
889
+ description: str,
890
+ category: str,
891
+ target_pdk: str,
892
+ ) -> HardwareSpec:
893
+ """Generate a minimal fallback spec when LLM generation fails."""
894
+ return HardwareSpec(
895
+ design_category=category,
896
+ top_module_name=design_name,
897
+ target_pdk=target_pdk,
898
+ target_frequency_mhz=50,
899
+ ports=[
900
+ PortSpec("clk", "input", "logic", "System clock"),
901
+ PortSpec("rst_n", "input", "logic", "Active-low synchronous reset"),
902
+ ],
903
+ submodules=[
904
+ SubModuleSpec(
905
+ name=design_name,
906
+ description=description[:500],
907
+ ports=[
908
+ PortSpec("clk", "input", "logic", "System clock"),
909
+ PortSpec("rst_n", "input", "logic", "Active-low synchronous reset"),
910
+ ],
911
+ ),
912
+ ],
913
+ behavioral_contract=[
914
+ BehavioralStatement(
915
+ given="rst_n is asserted low",
916
+ when="the next rising clock edge occurs",
917
+ then="all outputs must be driven to their reset values",
918
+ within="1 cycle",
919
+ ),
920
+ ],
921
+ inferred_fields=[],
922
+ warnings=[
923
+ "Fallback spec generated — LLM decomposition failed",
924
+ "Manual review required before RTL generation",
925
+ "Only minimal ports (clk, rst_n) are defined",
926
+ ],
927
+ design_description=description,
928
+ )
929
+
930
+ def _rejected_spec(self, design_name: str, reason: str) -> HardwareSpec:
931
+ """Create a spec that signals rejection."""
932
+ return HardwareSpec(
933
+ design_category="REJECTED",
934
+ top_module_name=design_name,
935
+ warnings=[f"SPEC_REJECTED: {reason}"],
936
+ design_description=reason,
937
+ )
938
+
939
+ def _extract_json(self, raw: str) -> Optional[Dict[str, Any]]:
940
+ """Extract a JSON object from LLM response text."""
941
+ text = raw.strip()
942
+
943
+ # Strip markdown fences
944
+ json_match = re.search(r'```(?:json)?\s*([\s\S]*?)```', text)
945
+ if json_match:
946
+ text = json_match.group(1).strip()
947
+
948
+ # Find outermost JSON object
949
+ brace_start = text.find('{')
950
+ brace_end = text.rfind('}')
951
+ if brace_start >= 0 and brace_end > brace_start:
952
+ try:
953
+ return json.loads(text[brace_start:brace_end + 1])
954
+ except json.JSONDecodeError:
955
+ pass
956
+
957
+ # Try parsing the whole thing
958
+ try:
959
+ return json.loads(text)
960
+ except json.JSONDecodeError:
961
+ return None
962
+
963
+ def to_sid_enrichment(self, spec: HardwareSpec) -> Dict[str, Any]:
964
+ """
965
+ Convert the HardwareSpec into enrichment data that can augment the
966
+ ArchitectModule's StructuredSpecDict (SID).
967
+
968
+ This bridges the spec generator output → existing SID pipeline.
969
+ """
970
+ enrichment = {
971
+ "design_category": spec.design_category,
972
+ "target_frequency_mhz": spec.target_frequency_mhz,
973
+ "behavioral_contract": [b.to_dict() for b in spec.behavioral_contract],
974
+ "inferred_fields": [f.to_dict() for f in spec.inferred_fields],
975
+ "spec_warnings": spec.warnings,
976
+ "mandatory_fields_status": spec.mandatory_fields_status,
977
+ "spec_validated": spec.design_category != "REJECTED",
978
+ }
979
+
980
+ # Add verification hints derived from behavioral contract
981
+ verification_hints = []
982
+ for b in spec.behavioral_contract:
983
+ verification_hints.append(
984
+ f"Assert: GIVEN {b.given} WHEN {b.when} THEN {b.then} WITHIN {b.within}"
985
+ )
986
+ enrichment["verification_hints_from_spec"] = verification_hints
987
+
988
+ return enrichment
src/agentic/core/verification_planner.py ADDED
@@ -0,0 +1,925 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ VerificationPlanner — Deterministic Verification Plan Generator
3
+ ================================================================
4
+ Produces a complete, structured verification plan from the hardware spec
5
+ BEFORE any testbench code is written. Ensures every P0 test is
6
+ implemented by downstream testbench generation.
7
+
8
+ Pipeline stage: VERIFICATION_PLAN (between CDC_ANALYZE and RTL_GEN)
9
+
10
+ Five steps:
11
+ 1. Extract testable behaviors from behavioral_contract
12
+ 2. Add mandatory tests by design category
13
+ 3. Generate SVA properties for all P0 tests
14
+ 4. Create coverage plan (port bins, FSM, FIFO boundaries)
15
+ 5. Output structured VerificationPlan dataclass
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ import json
21
+ import re
22
+ from dataclasses import dataclass, field, asdict
23
+ from typing import Any, Dict, List, Optional
24
+
25
+
26
+ # ─── Constants ────────────────────────────────────────────────────────
27
+
28
+ class Priority:
29
+ P0 = "P0" # Must pass — testbench generator must implement
30
+ P1 = "P1" # Should pass
31
+ P2 = "P2" # Nice to have
32
+
33
+
34
+ class TestCategory:
35
+ RESET = "RESET"
36
+ FUNCTIONAL = "FUNCTIONAL"
37
+ EDGE_CASE = "EDGE_CASE"
38
+ PROTOCOL = "PROTOCOL"
39
+ TIMING = "TIMING"
40
+ STRESS = "STRESS"
41
+
42
+
43
+ class Complexity:
44
+ TRIVIAL = "TRIVIAL" # ~5 lines
45
+ MODERATE = "MODERATE" # ~20 lines
46
+ COMPLEX = "COMPLEX" # 50+ lines
47
+
48
+
49
+ # ─── Output Dataclasses ──────────────────────────────────────────────
50
+
51
+ @dataclass
52
+ class SVAProperty:
53
+ """A SystemVerilog Assertion derived from a P0 test case."""
54
+ property_name: str
55
+ description: str
56
+ sva_code: str
57
+ related_test_id: str
58
+
59
+ def to_dict(self) -> Dict[str, str]:
60
+ return asdict(self)
61
+
62
+
63
+ @dataclass
64
+ class CoveragePoint:
65
+ """A single coverage collection point."""
66
+ name: str
67
+ cover_type: str # "port_bins" | "fsm_state" | "fsm_transition" | "fifo_boundary" | "toggle"
68
+ target_signal: str
69
+ bins: List[str] = field(default_factory=list)
70
+ description: str = ""
71
+
72
+ def to_dict(self) -> Dict[str, Any]:
73
+ return asdict(self)
74
+
75
+
76
+ @dataclass
77
+ class TestCase:
78
+ """A single verification test case."""
79
+ test_id: str
80
+ title: str
81
+ category: str # TestCategory value
82
+ priority: str # Priority value
83
+ complexity: str # Complexity value
84
+ description: str
85
+ stimulus: str # What to drive
86
+ expected: str # What to check
87
+ sva_property: Optional[SVAProperty] = None
88
+ source: str = "behavioral_contract" # "behavioral_contract" | "mandatory"
89
+
90
+ def to_dict(self) -> Dict[str, Any]:
91
+ d = asdict(self)
92
+ return d
93
+
94
+
95
+ @dataclass
96
+ class VerificationPlan:
97
+ """Complete verification plan output."""
98
+ top_module_name: str
99
+ design_category: str
100
+ total_tests: int = 0
101
+ p0_count: int = 0
102
+ p1_count: int = 0
103
+ p2_count: int = 0
104
+ test_cases: List[TestCase] = field(default_factory=list)
105
+ sva_properties: List[SVAProperty] = field(default_factory=list)
106
+ coverage_points: List[CoveragePoint] = field(default_factory=list)
107
+ warnings: List[str] = field(default_factory=list)
108
+
109
+ def to_dict(self) -> Dict[str, Any]:
110
+ return {
111
+ "top_module_name": self.top_module_name,
112
+ "design_category": self.design_category,
113
+ "total_tests": self.total_tests,
114
+ "p0_count": self.p0_count,
115
+ "p1_count": self.p1_count,
116
+ "p2_count": self.p2_count,
117
+ "test_cases": [t.to_dict() for t in self.test_cases],
118
+ "sva_properties": [s.to_dict() for s in self.sva_properties],
119
+ "coverage_points": [c.to_dict() for c in self.coverage_points],
120
+ "warnings": self.warnings,
121
+ }
122
+
123
+ def to_json(self, indent: int = 2) -> str:
124
+ return json.dumps(self.to_dict(), indent=indent)
125
+
126
+
127
+ # ─── Mandatory Test Templates ────────────────────────────────────────
128
+
129
+ _RESET_TESTS = [
130
+ {
131
+ "test_id": "TEST_RST_001",
132
+ "title": "Assert reset clears all outputs",
133
+ "category": TestCategory.RESET,
134
+ "priority": Priority.P0,
135
+ "complexity": Complexity.TRIVIAL,
136
+ "description": "Apply active-low reset and verify all outputs go to default values",
137
+ "stimulus": "Assert rst_n=0 for 2 cycles, then release",
138
+ "expected": "All output registers go to 0 / default",
139
+ },
140
+ {
141
+ "test_id": "TEST_RST_002",
142
+ "title": "Reset during active operation",
143
+ "category": TestCategory.RESET,
144
+ "priority": Priority.P0,
145
+ "complexity": Complexity.MODERATE,
146
+ "description": "Apply reset while module is in a non-idle state",
147
+ "stimulus": "Drive module to active state, assert rst_n=0 mid-operation",
148
+ "expected": "Module returns to idle/default state within 2 cycles of reset",
149
+ },
150
+ {
151
+ "test_id": "TEST_RST_003",
152
+ "title": "Multiple reset assertion cycles",
153
+ "category": TestCategory.STRESS,
154
+ "priority": Priority.P1,
155
+ "complexity": Complexity.MODERATE,
156
+ "description": "Rapidly toggle reset to verify no metastability or latch-up",
157
+ "stimulus": "Toggle rst_n every 3 cycles for 20 cycles",
158
+ "expected": "No X/Z propagation, outputs remain deterministic",
159
+ },
160
+ ]
161
+
162
+ _PROCESSOR_TESTS = [
163
+ {
164
+ "test_id": "TEST_PROC_001",
165
+ "title": "Instruction fetch and decode",
166
+ "category": TestCategory.FUNCTIONAL,
167
+ "priority": Priority.P0,
168
+ "complexity": Complexity.COMPLEX,
169
+ "description": "Verify basic instruction fetch-decode pipeline stage",
170
+ "stimulus": "Load instruction memory with NOP/ADD/SUB, drive clk",
171
+ "expected": "Correct opcode decoded within pipeline latency",
172
+ },
173
+ {
174
+ "test_id": "TEST_PROC_002",
175
+ "title": "Register file read/write",
176
+ "category": TestCategory.FUNCTIONAL,
177
+ "priority": Priority.P0,
178
+ "complexity": Complexity.MODERATE,
179
+ "description": "Write to register file and read back",
180
+ "stimulus": "Write known values to registers, then read each",
181
+ "expected": "Read data matches written data for all addressed registers",
182
+ },
183
+ {
184
+ "test_id": "TEST_PROC_003",
185
+ "title": "Pipeline stall/flush",
186
+ "category": TestCategory.EDGE_CASE,
187
+ "priority": Priority.P0,
188
+ "complexity": Complexity.COMPLEX,
189
+ "description": "Verify pipeline handles data hazards correctly",
190
+ "stimulus": "Issue back-to-back dependent instructions",
191
+ "expected": "Stall inserts or forwarding produces correct result",
192
+ },
193
+ {
194
+ "test_id": "TEST_PROC_004",
195
+ "title": "Branch taken / not-taken",
196
+ "category": TestCategory.FUNCTIONAL,
197
+ "priority": Priority.P1,
198
+ "complexity": Complexity.COMPLEX,
199
+ "description": "Verify branch resolution and PC update",
200
+ "stimulus": "Issue conditional branch with condition true then false",
201
+ "expected": "PC updates correctly for both taken and not-taken paths",
202
+ },
203
+ {
204
+ "test_id": "TEST_PROC_005",
205
+ "title": "Interrupt handling",
206
+ "category": TestCategory.PROTOCOL,
207
+ "priority": Priority.P1,
208
+ "complexity": Complexity.COMPLEX,
209
+ "description": "Assert interrupt during active execution",
210
+ "stimulus": "Drive interrupt line during instruction execution",
211
+ "expected": "PC jumps to ISR, context is saved",
212
+ },
213
+ {
214
+ "test_id": "TEST_PROC_006",
215
+ "title": "Memory read/write latency",
216
+ "category": TestCategory.TIMING,
217
+ "priority": Priority.P1,
218
+ "complexity": Complexity.MODERATE,
219
+ "description": "Verify data memory access completes within spec'd latency",
220
+ "stimulus": "Issue load/store to data memory",
221
+ "expected": "Data available within declared latency cycles",
222
+ },
223
+ ]
224
+
225
+ _MEMORY_TESTS = [
226
+ {
227
+ "test_id": "TEST_MEM_001",
228
+ "title": "Write then read all addresses",
229
+ "category": TestCategory.FUNCTIONAL,
230
+ "priority": Priority.P0,
231
+ "complexity": Complexity.MODERATE,
232
+ "description": "Exhaustive write-read of all memory addresses",
233
+ "stimulus": "Write unique pattern to each address, read back",
234
+ "expected": "Read data matches written data for every address",
235
+ },
236
+ {
237
+ "test_id": "TEST_MEM_002",
238
+ "title": "Simultaneous read-write (dual port)",
239
+ "category": TestCategory.EDGE_CASE,
240
+ "priority": Priority.P0,
241
+ "complexity": Complexity.MODERATE,
242
+ "description": "If dual-port, read and write same address simultaneously",
243
+ "stimulus": "Write addr=N while reading addr=N in same cycle",
244
+ "expected": "Defined behavior per spec (read-before-write or write-first)",
245
+ },
246
+ {
247
+ "test_id": "TEST_MEM_003",
248
+ "title": "Boundary address access",
249
+ "category": TestCategory.EDGE_CASE,
250
+ "priority": Priority.P0,
251
+ "complexity": Complexity.TRIVIAL,
252
+ "description": "Access first and last valid addresses",
253
+ "stimulus": "Write/read address 0 and address MAX",
254
+ "expected": "No out-of-bounds errors, correct data returned",
255
+ },
256
+ ]
257
+
258
+ _INTERFACE_TESTS = [
259
+ {
260
+ "test_id": "TEST_IF_001",
261
+ "title": "Protocol handshake sequence",
262
+ "category": TestCategory.PROTOCOL,
263
+ "priority": Priority.P0,
264
+ "complexity": Complexity.MODERATE,
265
+ "description": "Verify valid/ready handshake completes correctly",
266
+ "stimulus": "Assert valid, wait for ready, transfer data",
267
+ "expected": "Data transferred on (valid && ready) cycle",
268
+ },
269
+ {
270
+ "test_id": "TEST_IF_002",
271
+ "title": "Back-pressure handling",
272
+ "category": TestCategory.PROTOCOL,
273
+ "priority": Priority.P0,
274
+ "complexity": Complexity.MODERATE,
275
+ "description": "Hold ready low to create back-pressure",
276
+ "stimulus": "Assert valid, keep ready=0 for N cycles, then assert ready",
277
+ "expected": "Data held stable until handshake completes, no data loss",
278
+ },
279
+ {
280
+ "test_id": "TEST_IF_003",
281
+ "title": "Burst transfer",
282
+ "category": TestCategory.FUNCTIONAL,
283
+ "priority": Priority.P1,
284
+ "complexity": Complexity.COMPLEX,
285
+ "description": "Send burst of consecutive transfers",
286
+ "stimulus": "Drive valid continuously with incrementing data",
287
+ "expected": "All data items received in order without loss",
288
+ },
289
+ {
290
+ "test_id": "TEST_IF_004",
291
+ "title": "Idle / inactive state",
292
+ "category": TestCategory.EDGE_CASE,
293
+ "priority": Priority.P1,
294
+ "complexity": Complexity.TRIVIAL,
295
+ "description": "Verify interface outputs in idle when no transaction",
296
+ "stimulus": "Deassert valid and all enables for 10 cycles",
297
+ "expected": "No spurious ready/valid assertions, outputs stable",
298
+ },
299
+ ]
300
+
301
+ _ARITHMETIC_TESTS = [
302
+ {
303
+ "test_id": "TEST_ARITH_001",
304
+ "title": "Zero operand",
305
+ "category": TestCategory.EDGE_CASE,
306
+ "priority": Priority.P0,
307
+ "complexity": Complexity.TRIVIAL,
308
+ "description": "Verify operation with one or both operands zero",
309
+ "stimulus": "Drive a=0, b=non-zero; then a=non-zero, b=0; then both=0",
310
+ "expected": "Correct arithmetic result for all zero combinations",
311
+ },
312
+ {
313
+ "test_id": "TEST_ARITH_002",
314
+ "title": "Maximum value (overflow boundary)",
315
+ "category": TestCategory.EDGE_CASE,
316
+ "priority": Priority.P0,
317
+ "complexity": Complexity.MODERATE,
318
+ "description": "Drive maximum representable values to check overflow",
319
+ "stimulus": "Drive a=MAX, b=MAX for addition/multiply",
320
+ "expected": "Overflow flag set or result wraps correctly per spec",
321
+ },
322
+ {
323
+ "test_id": "TEST_ARITH_003",
324
+ "title": "Signed vs unsigned interpretation",
325
+ "category": TestCategory.FUNCTIONAL,
326
+ "priority": Priority.P1,
327
+ "complexity": Complexity.MODERATE,
328
+ "description": "Verify signed arithmetic produces correct sign extension",
329
+ "stimulus": "Drive negative values (MSB=1) through signed operations",
330
+ "expected": "Result matches expected signed arithmetic",
331
+ },
332
+ {
333
+ "test_id": "TEST_ARITH_004",
334
+ "title": "Pipeline throughput",
335
+ "category": TestCategory.TIMING,
336
+ "priority": Priority.P1,
337
+ "complexity": Complexity.MODERATE,
338
+ "description": "Feed consecutive operations to measure pipeline throughput",
339
+ "stimulus": "Drive new operands every cycle for N cycles",
340
+ "expected": "Results appear after pipeline latency, one per cycle",
341
+ },
342
+ ]
343
+
344
+ _CATEGORY_TESTS = {
345
+ "PROCESSOR": _PROCESSOR_TESTS,
346
+ "MEMORY": _MEMORY_TESTS,
347
+ "INTERFACE": _INTERFACE_TESTS,
348
+ "ARITHMETIC": _ARITHMETIC_TESTS,
349
+ }
350
+
351
+
352
+ # ─── VerificationPlanner ─────────────────────────────────────────────
353
+
354
+ class VerificationPlanner:
355
+ """
356
+ Deterministic verification plan generator.
357
+
358
+ Usage::
359
+
360
+ planner = VerificationPlanner()
361
+ plan = planner.plan(hardware_spec)
362
+ """
363
+
364
+ def plan(
365
+ self,
366
+ hardware_spec: Any,
367
+ cdc_result: Any = None,
368
+ hierarchy_result: Any = None,
369
+ ) -> VerificationPlan:
370
+ """
371
+ Generate a complete verification plan from the hardware spec.
372
+
373
+ Parameters
374
+ ----------
375
+ hardware_spec : HardwareSpec
376
+ The validated hardware specification.
377
+ cdc_result : CDCResult, optional
378
+ CDC analysis result (for multi-clock coverage).
379
+ hierarchy_result : HierarchyResult, optional
380
+ Hierarchy expansion result (for submodule coverage).
381
+
382
+ Returns
383
+ -------
384
+ VerificationPlan
385
+ """
386
+ top_name = getattr(hardware_spec, "top_module_name", "unknown")
387
+ category = getattr(hardware_spec, "design_category", "MIXED")
388
+
389
+ plan = VerificationPlan(
390
+ top_module_name=top_name,
391
+ design_category=category,
392
+ )
393
+
394
+ # Step 1 — extract testable behaviors from behavioral_contract
395
+ self._extract_testable_behaviors(hardware_spec, plan)
396
+
397
+ # Step 2 — add mandatory tests by design category
398
+ self._add_mandatory_tests(category, plan)
399
+
400
+ # Step 3 — generate SVA properties for P0 tests
401
+ self._generate_sva_properties(plan, hardware_spec)
402
+
403
+ # Step 4 — create coverage plan
404
+ self._generate_coverage_plan(plan, hardware_spec, cdc_result)
405
+
406
+ # Step 5 — finalize counts
407
+ self._finalize(plan)
408
+
409
+ return plan
410
+
411
+ # ── Step 1: Extract Testable Behaviors ────────────────────────────
412
+
413
+ def _extract_testable_behaviors(
414
+ self,
415
+ spec: Any,
416
+ plan: VerificationPlan,
417
+ ) -> None:
418
+ """Parse behavioral_contract statements into test cases."""
419
+ contracts = getattr(spec, "behavioral_contract", []) or []
420
+ if not contracts:
421
+ plan.warnings.append("No behavioral_contract found — only mandatory tests will be generated")
422
+ return
423
+
424
+ for idx, stmt in enumerate(contracts, start=1):
425
+ given = getattr(stmt, "given", str(stmt)) if not isinstance(stmt, str) else stmt
426
+ when = getattr(stmt, "when", "") if not isinstance(stmt, str) else ""
427
+ then = getattr(stmt, "then", "") if not isinstance(stmt, str) else ""
428
+ within = getattr(stmt, "within", "1 cycle") if not isinstance(stmt, str) else "1 cycle"
429
+
430
+ test_id = f"TEST_BEH_{idx:03d}"
431
+
432
+ # Determine category from statement content
433
+ category = self._categorize_behavior(given, when, then)
434
+
435
+ # Determine priority from timing constraint
436
+ priority = Priority.P0 if within and within != "N/A" else Priority.P1
437
+
438
+ # Determine complexity from description length
439
+ total_len = len(given) + len(when) + len(then)
440
+ if total_len < 60:
441
+ complexity = Complexity.TRIVIAL
442
+ elif total_len < 150:
443
+ complexity = Complexity.MODERATE
444
+ else:
445
+ complexity = Complexity.COMPLEX
446
+
447
+ title = self._shorten(f"GIVEN {given} WHEN {when} THEN {then}", 80)
448
+
449
+ tc = TestCase(
450
+ test_id=test_id,
451
+ title=title,
452
+ category=category,
453
+ priority=priority,
454
+ complexity=complexity,
455
+ description=f"GIVEN {given} WHEN {when} THEN {then} WITHIN {within}",
456
+ stimulus=f"Set up: {given}. Drive: {when}",
457
+ expected=f"Check: {then} within {within}",
458
+ source="behavioral_contract",
459
+ )
460
+ plan.test_cases.append(tc)
461
+
462
+ def _categorize_behavior(self, given: str, when: str, then: str) -> str:
463
+ """Infer test category from behavioral statement text."""
464
+ combined = f"{given} {when} {then}".lower()
465
+ if any(kw in combined for kw in ("reset", "rst", "rst_n", "power-on")):
466
+ return TestCategory.RESET
467
+ if any(kw in combined for kw in ("edge", "boundary", "overflow", "underflow", "max", "min", "full", "empty")):
468
+ return TestCategory.EDGE_CASE
469
+ if any(kw in combined for kw in ("valid", "ready", "handshake", "ack", "req", "protocol")):
470
+ return TestCategory.PROTOCOL
471
+ if any(kw in combined for kw in ("latency", "cycle", "timing", "clock", "delay", "frequency")):
472
+ return TestCategory.TIMING
473
+ if any(kw in combined for kw in ("stress", "continuous", "rapid", "burst", "saturation")):
474
+ return TestCategory.STRESS
475
+ return TestCategory.FUNCTIONAL
476
+
477
+ @staticmethod
478
+ def _shorten(text: str, max_len: int) -> str:
479
+ if len(text) <= max_len:
480
+ return text
481
+ return text[: max_len - 3] + "..."
482
+
483
+ # ── Step 2: Add Mandatory Tests ───────────────────────────────────
484
+
485
+ def _add_mandatory_tests(self, category: str, plan: VerificationPlan) -> None:
486
+ """Add reset tests (for all designs) + category-specific mandatory tests."""
487
+ # Reset tests are mandatory for all designs
488
+ for tmpl in _RESET_TESTS:
489
+ plan.test_cases.append(TestCase(**tmpl, source="mandatory"))
490
+
491
+ # Category-specific tests
492
+ cat_tests = _CATEGORY_TESTS.get(category, [])
493
+ for tmpl in cat_tests:
494
+ plan.test_cases.append(TestCase(**tmpl, source="mandatory"))
495
+
496
+ # ── Step 3: Generate SVA Properties ───────────────────────────────
497
+
498
+ def _generate_sva_properties(
499
+ self,
500
+ plan: VerificationPlan,
501
+ spec: Any,
502
+ ) -> None:
503
+ """Generate SystemVerilog Assertions for all P0 tests."""
504
+ p0_tests = [t for t in plan.test_cases if t.priority == Priority.P0]
505
+
506
+ for tc in p0_tests:
507
+ sva = self._test_to_sva(tc, spec)
508
+ if sva:
509
+ tc.sva_property = sva
510
+ plan.sva_properties.append(sva)
511
+
512
+ def _test_to_sva(self, tc: TestCase, spec: Any) -> Optional[SVAProperty]:
513
+ """Convert a P0 test case into an SVA property."""
514
+ # Derive a clean property name
515
+ prop_name = re.sub(r"[^a-zA-Z0-9_]", "_", tc.test_id).lower()
516
+
517
+ if tc.category == TestCategory.RESET:
518
+ return self._sva_reset(tc, prop_name, spec)
519
+ elif tc.source == "behavioral_contract":
520
+ return self._sva_from_contract(tc, prop_name, spec)
521
+ elif tc.category == TestCategory.FUNCTIONAL:
522
+ return self._sva_functional(tc, prop_name, spec)
523
+ elif tc.category == TestCategory.EDGE_CASE:
524
+ return self._sva_edge_case(tc, prop_name, spec)
525
+ elif tc.category == TestCategory.PROTOCOL:
526
+ return self._sva_protocol(tc, prop_name, spec)
527
+ elif tc.category == TestCategory.TIMING:
528
+ return self._sva_timing(tc, prop_name, spec)
529
+ else:
530
+ return self._sva_generic(tc, prop_name)
531
+
532
+ def _get_output_ports(self, spec: Any) -> List[str]:
533
+ """Extract output port names from spec."""
534
+ ports = getattr(spec, "ports", []) or []
535
+ return [
536
+ getattr(p, "name", str(p))
537
+ for p in ports
538
+ if getattr(p, "direction", "").lower() == "output"
539
+ ]
540
+
541
+ def _get_clk_rst(self, spec: Any) -> tuple:
542
+ """Identify clock and reset signal names from spec ports."""
543
+ ports = getattr(spec, "ports", []) or []
544
+ clk_name = "clk"
545
+ rst_name = "rst_n"
546
+ for p in ports:
547
+ name = getattr(p, "name", "").lower()
548
+ if name in ("clk", "clock", "i_clk", "sys_clk"):
549
+ clk_name = getattr(p, "name", "clk")
550
+ if name in ("rst_n", "reset_n", "rstn", "i_rst_n", "rst", "reset"):
551
+ rst_name = getattr(p, "name", "rst_n")
552
+ return clk_name, rst_name
553
+
554
+ def _sva_reset(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
555
+ """Generate reset assertion: all outputs go to 0 after reset."""
556
+ clk, rst = self._get_clk_rst(spec)
557
+ outputs = self._get_output_ports(spec)
558
+
559
+ if outputs:
560
+ checks = " && ".join(f"({o} == '0)" for o in outputs[:8]) # Cap at 8
561
+ sva_code = (
562
+ f"property {prop_name};\n"
563
+ f" @(posedge {clk}) !{rst} |-> ##2 ({checks});\n"
564
+ f"endproperty\n"
565
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: reset check failed\");"
566
+ )
567
+ else:
568
+ sva_code = (
569
+ f"// {prop_name}: No output ports found — manual SVA required\n"
570
+ f"// property {prop_name};\n"
571
+ f"// @(posedge clk) !rst_n |-> ##2 (outputs == '0);\n"
572
+ f"// endproperty"
573
+ )
574
+
575
+ return SVAProperty(
576
+ property_name=prop_name,
577
+ description=tc.description,
578
+ sva_code=sva_code,
579
+ related_test_id=tc.test_id,
580
+ )
581
+
582
+ def _sva_from_contract(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
583
+ """Generate SVA from a behavioral contract statement."""
584
+ clk, rst = self._get_clk_rst(spec)
585
+
586
+ # Parse WITHIN for cycle count
587
+ cycles = self._parse_within_cycles(tc.description)
588
+
589
+ # Build antecedent from stimulus, consequent from expected
590
+ antecedent = self._to_sva_expr(tc.stimulus)
591
+ consequent = self._to_sva_expr(tc.expected)
592
+
593
+ if cycles and cycles > 0:
594
+ delay = f"##[1:{cycles}]" if cycles > 1 else "##1"
595
+ sva_code = (
596
+ f"property {prop_name};\n"
597
+ f" @(posedge {clk}) disable iff (!{rst})\n"
598
+ f" ({antecedent}) |-> {delay} ({consequent});\n"
599
+ f"endproperty\n"
600
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: behavioral contract violated\");"
601
+ )
602
+ else:
603
+ sva_code = (
604
+ f"property {prop_name};\n"
605
+ f" @(posedge {clk}) disable iff (!{rst})\n"
606
+ f" ({antecedent}) |=> ({consequent});\n"
607
+ f"endproperty\n"
608
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: behavioral contract violated\");"
609
+ )
610
+
611
+ return SVAProperty(
612
+ property_name=prop_name,
613
+ description=tc.description,
614
+ sva_code=sva_code,
615
+ related_test_id=tc.test_id,
616
+ )
617
+
618
+ def _sva_functional(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
619
+ """Generic functional SVA with overlapping implication."""
620
+ clk, rst = self._get_clk_rst(spec)
621
+ sva_code = (
622
+ f"property {prop_name};\n"
623
+ f" @(posedge {clk}) disable iff (!{rst})\n"
624
+ f" /* Functional: {self._shorten(tc.title, 60)} */\n"
625
+ f" 1 |-> ##1 1; // TODO: replace with design-specific check\n"
626
+ f"endproperty\n"
627
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: {tc.title}\");"
628
+ )
629
+ return SVAProperty(
630
+ property_name=prop_name,
631
+ description=tc.description,
632
+ sva_code=sva_code,
633
+ related_test_id=tc.test_id,
634
+ )
635
+
636
+ def _sva_edge_case(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
637
+ """Edge case SVA — check stability after edge condition."""
638
+ clk, rst = self._get_clk_rst(spec)
639
+ sva_code = (
640
+ f"property {prop_name};\n"
641
+ f" @(posedge {clk}) disable iff (!{rst})\n"
642
+ f" /* Edge case: {self._shorten(tc.title, 60)} */\n"
643
+ f" 1 |-> ##1 $stable(1'b1); // TODO: replace with edge-case signal\n"
644
+ f"endproperty\n"
645
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: {tc.title}\");"
646
+ )
647
+ return SVAProperty(
648
+ property_name=prop_name,
649
+ description=tc.description,
650
+ sva_code=sva_code,
651
+ related_test_id=tc.test_id,
652
+ )
653
+
654
+ def _sva_protocol(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
655
+ """Protocol SVA using $rose/$fell for handshake signals."""
656
+ clk, rst = self._get_clk_rst(spec)
657
+ sva_code = (
658
+ f"property {prop_name};\n"
659
+ f" @(posedge {clk}) disable iff (!{rst})\n"
660
+ f" /* Protocol: {self._shorten(tc.title, 60)} */\n"
661
+ f" $rose(valid) |-> ##[1:4] $rose(ready);\n"
662
+ f"endproperty\n"
663
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: {tc.title}\");"
664
+ )
665
+ return SVAProperty(
666
+ property_name=prop_name,
667
+ description=tc.description,
668
+ sva_code=sva_code,
669
+ related_test_id=tc.test_id,
670
+ )
671
+
672
+ def _sva_timing(self, tc: TestCase, prop_name: str, spec: Any) -> SVAProperty:
673
+ """Timing SVA — latency bound check."""
674
+ clk, rst = self._get_clk_rst(spec)
675
+ cycles = self._parse_within_cycles(tc.description) or 4
676
+ sva_code = (
677
+ f"property {prop_name};\n"
678
+ f" @(posedge {clk}) disable iff (!{rst})\n"
679
+ f" /* Timing: {self._shorten(tc.title, 60)} */\n"
680
+ f" $rose(start) |-> ##[1:{cycles}] $rose(done);\n"
681
+ f"endproperty\n"
682
+ f"assert property ({prop_name}) else $error(\"{tc.test_id}: timing violated\");"
683
+ )
684
+ return SVAProperty(
685
+ property_name=prop_name,
686
+ description=tc.description,
687
+ sva_code=sva_code,
688
+ related_test_id=tc.test_id,
689
+ )
690
+
691
+ def _sva_generic(self, tc: TestCase, prop_name: str) -> SVAProperty:
692
+ """Fallback generic SVA placeholder."""
693
+ sva_code = (
694
+ f"// {prop_name}: {tc.title}\n"
695
+ f"// TODO: Implement SVA for test {tc.test_id}\n"
696
+ f"// Category: {tc.category}, Priority: {tc.priority}"
697
+ )
698
+ return SVAProperty(
699
+ property_name=prop_name,
700
+ description=tc.description,
701
+ sva_code=sva_code,
702
+ related_test_id=tc.test_id,
703
+ )
704
+
705
+ @staticmethod
706
+ def _parse_within_cycles(text: str) -> Optional[int]:
707
+ """Extract cycle count from 'WITHIN N cycle(s)' patterns."""
708
+ m = re.search(r"(\d+)\s*cycle", text, re.IGNORECASE)
709
+ if m:
710
+ return int(m.group(1))
711
+ return None
712
+
713
+ @staticmethod
714
+ def _to_sva_expr(text: str) -> str:
715
+ """Convert stimulus/expected text to an SVA-like expression.
716
+ Returns a sanitized placeholder for manual refinement.
717
+ """
718
+ if not text:
719
+ return "1'b1"
720
+ # Strip common prefixes
721
+ cleaned = re.sub(r"^(Set up:|Drive:|Check:)\s*", "", text).strip()
722
+ # Sanitize for SVA: keep alphanumeric, underscores, spaces, ops
723
+ cleaned = re.sub(r"[^\w\s=!<>&|().,\[\]:]", "", cleaned)
724
+ if not cleaned:
725
+ return "1'b1"
726
+ # Truncate for readability
727
+ if len(cleaned) > 80:
728
+ cleaned = cleaned[:77] + "..."
729
+ return cleaned
730
+
731
+ # ── Step 4: Generate Coverage Plan ────────────────────────────────
732
+
733
+ def _generate_coverage_plan(
734
+ self,
735
+ plan: VerificationPlan,
736
+ spec: Any,
737
+ cdc_result: Any = None,
738
+ ) -> None:
739
+ """Generate coverage points: port bins, FSM, FIFO boundaries."""
740
+ self._port_coverage(plan, spec)
741
+ self._fsm_coverage(plan, spec)
742
+ self._fifo_coverage(plan, spec, cdc_result)
743
+
744
+ def _port_coverage(self, plan: VerificationPlan, spec: Any) -> None:
745
+ """Generate port-width-based bin coverage for input ports."""
746
+ ports = getattr(spec, "ports", []) or []
747
+
748
+ for p in ports:
749
+ direction = getattr(p, "direction", "").lower()
750
+ if direction != "input":
751
+ continue
752
+ name = getattr(p, "name", "")
753
+ dtype = getattr(p, "data_type", "logic")
754
+
755
+ # Skip clock/reset
756
+ if name.lower() in ("clk", "clock", "rst_n", "reset_n", "rstn", "rst", "i_clk", "i_rst_n"):
757
+ continue
758
+
759
+ width = self._extract_width(dtype)
760
+ bins = self._generate_bins(width, name)
761
+
762
+ plan.coverage_points.append(CoveragePoint(
763
+ name=f"cov_{name}",
764
+ cover_type="port_bins",
765
+ target_signal=name,
766
+ bins=bins,
767
+ description=f"Input port '{name}' coverage ({width}-bit)",
768
+ ))
769
+
770
+ def _fsm_coverage(self, plan: VerificationPlan, spec: Any) -> None:
771
+ """Generate FSM state and transition coverage from spec hints."""
772
+ contracts = getattr(spec, "behavioral_contract", []) or []
773
+
774
+ # Look for state-like keywords in behavioral contracts
775
+ state_names = set()
776
+ for stmt in contracts:
777
+ text = str(stmt).lower()
778
+ for keyword in ("idle", "active", "ready", "busy", "wait", "done",
779
+ "fetch", "decode", "execute", "writeback",
780
+ "read", "write", "init", "error"):
781
+ if keyword in text:
782
+ state_names.add(keyword.upper())
783
+
784
+ if not state_names:
785
+ # Check design category for default FSM states
786
+ category = plan.design_category
787
+ if category == "PROCESSOR":
788
+ state_names = {"FETCH", "DECODE", "EXECUTE", "WRITEBACK", "IDLE"}
789
+ elif category == "INTERFACE":
790
+ state_names = {"IDLE", "ACTIVE", "WAIT", "DONE"}
791
+ elif category == "MEMORY":
792
+ state_names = {"IDLE", "READ", "WRITE", "DONE"}
793
+ else:
794
+ return # No FSM detected
795
+
796
+ states_list = sorted(state_names)
797
+
798
+ # State coverage
799
+ plan.coverage_points.append(CoveragePoint(
800
+ name="cov_fsm_state",
801
+ cover_type="fsm_state",
802
+ target_signal="state",
803
+ bins=states_list,
804
+ description=f"FSM state coverage — {len(states_list)} states",
805
+ ))
806
+
807
+ # Transition coverage (all pairs from detected states)
808
+ transitions = []
809
+ for s1 in states_list:
810
+ for s2 in states_list:
811
+ if s1 != s2:
812
+ transitions.append(f"{s1} => {s2}")
813
+
814
+ if transitions:
815
+ plan.coverage_points.append(CoveragePoint(
816
+ name="cov_fsm_transition",
817
+ cover_type="fsm_transition",
818
+ target_signal="state",
819
+ bins=transitions[:32], # Cap at 32 transitions
820
+ description=f"FSM transition coverage — {min(len(transitions), 32)} transitions",
821
+ ))
822
+
823
+ def _fifo_coverage(
824
+ self,
825
+ plan: VerificationPlan,
826
+ spec: Any,
827
+ cdc_result: Any = None,
828
+ ) -> None:
829
+ """Generate FIFO boundary condition coverage."""
830
+ has_fifo = False
831
+
832
+ # Check submodules for FIFO references
833
+ submodules = getattr(spec, "submodules", []) or []
834
+ for sm in submodules:
835
+ sm_name = getattr(sm, "name", "").lower()
836
+ sm_desc = getattr(sm, "description", "").lower()
837
+ if "fifo" in sm_name or "fifo" in sm_desc:
838
+ has_fifo = True
839
+ break
840
+
841
+ # Check CDC result for async FIFO submodules
842
+ if not has_fifo and cdc_result:
843
+ cdc_subs = getattr(cdc_result, "cdc_submodules_added", []) or []
844
+ for sub in cdc_subs:
845
+ if isinstance(sub, dict):
846
+ if "fifo" in sub.get("name", "").lower():
847
+ has_fifo = True
848
+ break
849
+ else:
850
+ if "fifo" in getattr(sub, "name", "").lower():
851
+ has_fifo = True
852
+ break
853
+
854
+ if not has_fifo:
855
+ return
856
+
857
+ plan.coverage_points.append(CoveragePoint(
858
+ name="cov_fifo_empty",
859
+ cover_type="fifo_boundary",
860
+ target_signal="fifo_empty",
861
+ bins=["empty_asserted", "empty_deasserted"],
862
+ description="FIFO empty flag boundary condition",
863
+ ))
864
+ plan.coverage_points.append(CoveragePoint(
865
+ name="cov_fifo_full",
866
+ cover_type="fifo_boundary",
867
+ target_signal="fifo_full",
868
+ bins=["full_asserted", "full_deasserted"],
869
+ description="FIFO full flag boundary condition",
870
+ ))
871
+ plan.coverage_points.append(CoveragePoint(
872
+ name="cov_fifo_level",
873
+ cover_type="fifo_boundary",
874
+ target_signal="fifo_count",
875
+ bins=["level_0", "level_1", "level_half", "level_max_minus_1", "level_max"],
876
+ description="FIFO fill-level boundary conditions",
877
+ ))
878
+
879
+ @staticmethod
880
+ def _extract_width(data_type: str) -> int:
881
+ """Extract bit width from data type string like 'logic [7:0]'."""
882
+ m = re.search(r"\[(\d+):(\d+)\]", data_type)
883
+ if m:
884
+ return abs(int(m.group(1)) - int(m.group(2))) + 1
885
+ return 1
886
+
887
+ @staticmethod
888
+ def _generate_bins(width: int, signal_name: str) -> List[str]:
889
+ """Generate appropriate bins for a given signal width."""
890
+ if width == 1:
891
+ return ["0", "1"]
892
+ max_val = (1 << width) - 1
893
+ if width <= 4:
894
+ return [str(v) for v in range(max_val + 1)]
895
+ # For wider signals, use boundary + range bins
896
+ bins = [
897
+ f"{signal_name}_zero = 0",
898
+ f"{signal_name}_one = 1",
899
+ f"{signal_name}_mid = [{max_val // 4}:{3 * max_val // 4}]",
900
+ f"{signal_name}_max_m1 = {max_val - 1}",
901
+ f"{signal_name}_max = {max_val}",
902
+ ]
903
+ return bins
904
+
905
+ # ── Step 5: Finalize ──────────────────────────────────────────────
906
+
907
+ def _finalize(self, plan: VerificationPlan) -> None:
908
+ """Compute summary counts and validate the plan."""
909
+ plan.total_tests = len(plan.test_cases)
910
+ plan.p0_count = sum(1 for t in plan.test_cases if t.priority == Priority.P0)
911
+ plan.p1_count = sum(1 for t in plan.test_cases if t.priority == Priority.P1)
912
+ plan.p2_count = sum(1 for t in plan.test_cases if t.priority == Priority.P2)
913
+
914
+ # Validate: every P0 test must have an SVA property
915
+ p0_no_sva = [
916
+ t.test_id for t in plan.test_cases
917
+ if t.priority == Priority.P0 and t.sva_property is None
918
+ ]
919
+ if p0_no_sva:
920
+ plan.warnings.append(
921
+ f"P0 tests without SVA: {', '.join(p0_no_sva)}"
922
+ )
923
+
924
+ if plan.total_tests == 0:
925
+ plan.warnings.append("Empty verification plan — no test cases generated")
src/agentic/orchestrator.py CHANGED
@@ -34,6 +34,7 @@ from .agents.verifier import get_verification_agent, get_error_analyst_agent, ge
34
  from .agents.doc_agent import get_doc_agent
35
  from .agents.sdc_agent import get_sdc_agent
36
  from .core import ArchitectModule, SelfReflectPipeline, ReActAgent, WaveformExpertModule, DeepDebuggerModule
 
37
  from .contracts import (
38
  AgentResult,
39
  ArtifactRef,
@@ -128,6 +129,11 @@ class BuildStrategy(enum.Enum):
128
  class BuildState(enum.Enum):
129
  INIT = "Initializing"
130
  SPEC = "Architectural Planning"
 
 
 
 
 
131
  RTL_GEN = "RTL Generation"
132
  RTL_FIX = "RTL Syntax Fixing"
133
  VERIFICATION = "Verification & Testbench"
@@ -808,6 +814,16 @@ class BuildOrchestrator:
808
  self.do_init()
809
  elif self.state == BuildState.SPEC:
810
  self.do_spec()
 
 
 
 
 
 
 
 
 
 
811
  elif self.state == BuildState.RTL_GEN:
812
  self.do_rtl_gen()
813
  elif self.state == BuildState.RTL_FIX:
@@ -909,7 +925,7 @@ class BuildOrchestrator:
909
  return
910
 
911
  self.log("Architecture Plan Generated (SID validated)", refined=True)
912
- self.transition(BuildState.RTL_GEN)
913
 
914
  def _do_spec_fallback(self):
915
  """Fallback spec generation using a single CrewAI agent."""
@@ -961,7 +977,389 @@ SPECIFICATION SECTIONS (Markdown):
961
 
962
  self.artifacts['spec'] = str(result)
963
  self.log("Architecture Plan Generated (fallback)", refined=True)
964
- self.transition(BuildState.RTL_GEN)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
965
 
966
  def _get_strategy_prompt(self) -> str:
967
  if self.strategy == BuildStrategy.SV_MODULAR:
@@ -1291,9 +1689,12 @@ SPECIFICATION SECTIONS (Markdown):
1291
  return count >= 2
1292
 
1293
  def _clear_tb_fingerprints(self) -> None:
1294
- """Reset all TB failure fingerprints.
1295
- """
1296
- pass # Bug 2: Do not clear fingerprints or retry counters to prevent infinite loops
 
 
 
1297
 
1298
  def generate_uvm_lite_tb_from_rtl_ports(self, design_name: str, rtl_code: str) -> str:
1299
  """Deterministic Verilator-safe testbench generated from RTL ports.
@@ -2772,7 +3173,7 @@ Before returning any testbench code, mentally compile it with strict SystemVeril
2772
  self.log("Skipping Hardening (User Cancelled).", refined=True)
2773
  self.transition(BuildState.FORMAL_VERIFY)
2774
  else:
2775
- output_for_llm = self._condense_failure_log(output, kind="timing")
2776
  if self._record_failure_fingerprint(output_for_llm):
2777
  self.log("Detected repeated simulation failure fingerprint. Failing closed.", refined=True)
2778
  self.state = BuildState.FAIL
@@ -2983,11 +3384,11 @@ Reply with JSON only, no prose, using this exact schema:
2983
  # Add post-reset stabilization: insert wait cycles after reset de-assertion
2984
  if "reset_phase" in fixed_tb and "// post-reset-stab" not in fixed_tb:
2985
  # After rst_n = 1'b1 (or 1'b0 for active-high), add stabilization
2986
- for deassert in ["vif.rst_n = 1'b1;", "vif.reset = 1'b0;"]:
2987
  if deassert in fixed_tb:
2988
  fixed_tb = fixed_tb.replace(
2989
  deassert,
2990
- f"{deassert}\n repeat (3) @(posedge vif.clk); // post-reset-stab"
2991
  )
2992
  break
2993
  if fixed_tb != tb_code:
@@ -3647,7 +4048,7 @@ Generate SVA assertions that are compatible with the Yosys formal verification e
3647
  }
3648
  coverage_pass = all(coverage_checks.values())
3649
 
3650
- if self.retry_count == 0:
3651
  self.best_coverage = -1.0
3652
  self.best_tb_backup = None
3653
 
@@ -3828,8 +4229,7 @@ Generate SVA assertions that are compatible with the Yosys formal verification e
3828
  self.logger.info(f"REGRESSION TESTS:\n{result}")
3829
 
3830
  # Parse out individual tests from the LLM output
3831
- import re as regex
3832
- test_blocks = regex.findall(r'```(?:verilog|v)?\s*\n(.*?)```', result, regex.DOTALL)
3833
 
3834
  if not test_blocks:
3835
  self.log("No regression tests extracted. Skipping regression.", refined=True)
@@ -3867,8 +4267,7 @@ Generate SVA assertions that are compatible with the Yosys formal verification e
3867
  try:
3868
  with open(test_path, 'r') as f:
3869
  tb_content = f.read()
3870
- import re as _re
3871
- m = _re.search(r'module\s+(\w+)', tb_content)
3872
  if m:
3873
  tb_module = m.group(1)
3874
  except Exception:
 
34
  from .agents.doc_agent import get_doc_agent
35
  from .agents.sdc_agent import get_sdc_agent
36
  from .core import ArchitectModule, SelfReflectPipeline, ReActAgent, WaveformExpertModule, DeepDebuggerModule
37
+ from .core import HardwareSpecGenerator, HardwareSpec, HierarchyExpander, FeasibilityChecker, CDCAnalyzer, VerificationPlanner
38
  from .contracts import (
39
  AgentResult,
40
  ArtifactRef,
 
129
  class BuildState(enum.Enum):
130
  INIT = "Initializing"
131
  SPEC = "Architectural Planning"
132
+ SPEC_VALIDATE = "Specification Validation"
133
+ HIERARCHY_EXPAND = "Hierarchy Expansion"
134
+ FEASIBILITY_CHECK = "Feasibility Check"
135
+ CDC_ANALYZE = "CDC Analysis"
136
+ VERIFICATION_PLAN = "Verification Planning"
137
  RTL_GEN = "RTL Generation"
138
  RTL_FIX = "RTL Syntax Fixing"
139
  VERIFICATION = "Verification & Testbench"
 
814
  self.do_init()
815
  elif self.state == BuildState.SPEC:
816
  self.do_spec()
817
+ elif self.state == BuildState.SPEC_VALIDATE:
818
+ self.do_spec_validate()
819
+ elif self.state == BuildState.HIERARCHY_EXPAND:
820
+ self.do_hierarchy_expand()
821
+ elif self.state == BuildState.FEASIBILITY_CHECK:
822
+ self.do_feasibility_check()
823
+ elif self.state == BuildState.CDC_ANALYZE:
824
+ self.do_cdc_analyze()
825
+ elif self.state == BuildState.VERIFICATION_PLAN:
826
+ self.do_verification_plan()
827
  elif self.state == BuildState.RTL_GEN:
828
  self.do_rtl_gen()
829
  elif self.state == BuildState.RTL_FIX:
 
925
  return
926
 
927
  self.log("Architecture Plan Generated (SID validated)", refined=True)
928
+ self.transition(BuildState.SPEC_VALIDATE)
929
 
930
  def _do_spec_fallback(self):
931
  """Fallback spec generation using a single CrewAI agent."""
 
977
 
978
  self.artifacts['spec'] = str(result)
979
  self.log("Architecture Plan Generated (fallback)", refined=True)
980
+ self.transition(BuildState.SPEC_VALIDATE)
981
+
982
+ # ─── NEW PIPELINE STAGES ─────────────────────────────────────────
983
+
984
+ def do_spec_validate(self):
985
+ """Stage: Validate and enrich the spec via HardwareSpecGenerator (6-stage pipeline)."""
986
+ self.log("Running HardwareSpecGenerator (classify → complete → decompose → interface → contract → output)...", refined=True)
987
+
988
+ try:
989
+ spec_gen = HardwareSpecGenerator(llm=self.llm, verbose=self.verbose)
990
+ hw_spec, issues = spec_gen.generate(
991
+ design_name=self.name,
992
+ description=self.desc,
993
+ target_pdk=self.pdk_profile.get("profile", "sky130"),
994
+ )
995
+
996
+ # Store the spec as structured JSON
997
+ self.artifacts['hw_spec'] = hw_spec.to_dict()
998
+ self.artifacts['hw_spec_json'] = hw_spec.to_json()
999
+ self.artifacts['hw_spec_object'] = hw_spec
1000
+
1001
+ # Log classification and stats
1002
+ self.log(f"Design classified as: {hw_spec.design_category}", refined=True)
1003
+ self.log(f"Ports: {len(hw_spec.ports)} | Submodules: {len(hw_spec.submodules)} | "
1004
+ f"Contract statements: {len(hw_spec.behavioral_contract)}", refined=True)
1005
+
1006
+ if issues:
1007
+ for issue in issues[:5]:
1008
+ self.log(f" ⚠ {issue}", refined=True)
1009
+
1010
+ # Handle ELABORATION_NEEDED — present 3 options to user
1011
+ if hw_spec.design_category == "ELABORATION_NEEDED":
1012
+ options = [w for w in hw_spec.warnings if w.startswith("OPTION_")]
1013
+ self.log(f"📋 Description is brief — generated {len(options)} design options.", refined=True)
1014
+
1015
+ # Store options in artifact bus for the web API to surface
1016
+ parsed_options = []
1017
+ for opt_str in options:
1018
+ # Format: OPTION_1: title | Category: X | Freq: Y MHz | Ports: ... | Details: ...
1019
+ parts = {p.split(":")[0].strip(): ":".join(p.split(":")[1:]).strip()
1020
+ for p in opt_str.split(" | ") if ":" in p}
1021
+ parsed_options.append(parts)
1022
+ self.artifacts['spec_elaboration_options'] = parsed_options
1023
+ self.artifacts['spec_elaboration_needed'] = True
1024
+
1025
+ # ── CLI: Rich interactive display ──
1026
+ try:
1027
+ import typer
1028
+ from rich.table import Table
1029
+
1030
+ table = Table(title=f"💡 AgentIC VLSI Design Advisor — 3 Options for '{self.name}'",
1031
+ show_lines=True)
1032
+ table.add_column("#", style="bold cyan", width=3)
1033
+ table.add_column("Design Variant", style="bold white", width=30)
1034
+ table.add_column("Category", style="yellow", width=12)
1035
+ table.add_column("Freq", style="green", width=8)
1036
+ table.add_column("Details", style="dim")
1037
+
1038
+ for i, opt in enumerate(parsed_options, 1):
1039
+ opt_id = str(opt.get("OPTION_1".replace("1", str(i)), i))
1040
+ title = (opt.get("OPTION_1", opt.get(f"OPTION_{i}", f"Option {i}")))
1041
+ # Get key from dynamic OPTION_N key
1042
+ option_key = [k for k in opt if k.startswith("OPTION_")]
1043
+ title = opt[option_key[0]] if option_key else f"Option {i}"
1044
+ category = opt.get("Category", "")
1045
+ freq = opt.get("Freq", "")
1046
+ details = opt.get("Details", "")[:80] + "…" if len(opt.get("Details", "")) > 80 else opt.get("Details", "")
1047
+ table.add_row(str(i), title, category, freq, details)
1048
+
1049
+ console.print(table)
1050
+ console.print()
1051
+
1052
+ # Prompt user to choose
1053
+ choice_str = typer.prompt(
1054
+ f"Choose an option (1-{len(parsed_options)}) or type a custom description",
1055
+ default="1"
1056
+ )
1057
+
1058
+ chosen_desc = None
1059
+ if choice_str.strip().isdigit():
1060
+ idx = int(choice_str.strip()) - 1
1061
+ if 0 <= idx < len(parsed_options):
1062
+ opt = parsed_options[idx]
1063
+ option_key = [k for k in opt if k.startswith("OPTION_")]
1064
+ title = opt[option_key[0]] if option_key else f"Option {idx+1}"
1065
+ details = opt.get("Details", "")
1066
+ ports = opt.get("Ports", "")
1067
+ category = opt.get("Category", "")
1068
+ freq = opt.get("Freq", "50 MHz")
1069
+ chosen_desc = (
1070
+ f"{self.name}: {title}. {details} "
1071
+ f"Category: {category}. Target frequency: {freq}. "
1072
+ f"Key ports: {ports}. Module name: {self.name}."
1073
+ )
1074
+ self.log(f"✅ Selected option {idx+1}: {title}", refined=True)
1075
+ else:
1076
+ # Custom description entered directly
1077
+ chosen_desc = choice_str.strip()
1078
+ self.log(f"✅ Using custom description: {chosen_desc[:80]}…", refined=True)
1079
+
1080
+ if chosen_desc:
1081
+ # Store the enriched description and retry spec validation
1082
+ self.desc = chosen_desc
1083
+ self.artifacts['original_desc'] = hw_spec.design_description
1084
+ self.artifacts['elaborated_desc'] = chosen_desc
1085
+ self.log("🔄 Re-running spec generation with elaborated description…", refined=True)
1086
+ # Re-enter this stage to regenerate with the full description
1087
+ self.state = BuildState.SPEC_VALIDATE
1088
+ return
1089
+
1090
+ except Exception as prompt_err:
1091
+ # If running non-interactively (e.g. web API), use the first option automatically
1092
+ self.log(f"Non-interactive mode — auto-selecting option 1 ({prompt_err})", refined=True)
1093
+ if parsed_options:
1094
+ opt = parsed_options[0]
1095
+ option_key = [k for k in opt if k.startswith("OPTION_")]
1096
+ title = opt[option_key[0]] if option_key else "Option 1"
1097
+ details = opt.get("Details", "")
1098
+ ports = opt.get("Ports", "")
1099
+ freq = opt.get("Freq", "50 MHz")
1100
+ self.desc = (
1101
+ f"{self.name}: {title}. {details} "
1102
+ f"Target frequency: {freq}. Key ports: {ports}. Module name: {self.name}."
1103
+ )
1104
+ self.artifacts['elaborated_desc'] = self.desc
1105
+ self.state = BuildState.SPEC_VALIDATE
1106
+ return
1107
+
1108
+ # If we got here without a valid choice, fail gracefully
1109
+ self.log("❌ No valid design option selected.", refined=True)
1110
+ self.state = BuildState.FAIL
1111
+ return
1112
+
1113
+ # Check for hard rejection
1114
+ if hw_spec.design_category == "REJECTED":
1115
+ rejection_reason = hw_spec.warnings[0] if hw_spec.warnings else "Specification rejected"
1116
+ self.log(f"❌ SPEC REJECTED: {rejection_reason}", refined=True)
1117
+ self.errors.append(f"Specification rejected: {rejection_reason}")
1118
+ self.artifacts['spec_rejection_reason'] = rejection_reason
1119
+ self.state = BuildState.FAIL
1120
+ return
1121
+
1122
+ # Enrich the existing spec artifact with structured data
1123
+ enrichment = spec_gen.to_sid_enrichment(hw_spec)
1124
+ self.artifacts['spec_enrichment'] = enrichment
1125
+
1126
+ # Append behavioral verification hints to spec for downstream RTL gen
1127
+ if enrichment.get('verification_hints_from_spec'):
1128
+ existing_spec = self.artifacts.get('spec', '')
1129
+ hints = "\n".join(enrichment['verification_hints_from_spec'])
1130
+ self.artifacts['spec'] = (
1131
+ existing_spec +
1132
+ f"\n\n## Behavioral Contract (Auto-Generated Assertions)\n{hints}\n"
1133
+ )
1134
+
1135
+ self.log(f"Spec validation complete: {hw_spec.design_category} "
1136
+ f"({len(hw_spec.inferred_fields)} fields inferred, "
1137
+ f"{len(hw_spec.warnings)} warnings)", refined=True)
1138
+ self.transition(BuildState.HIERARCHY_EXPAND)
1139
+
1140
+ except Exception as e:
1141
+ self.log(f"HardwareSpecGenerator failed ({e}); skipping to HIERARCHY_EXPAND with basic spec.", refined=True)
1142
+ self.logger.warning(f"HardwareSpecGenerator error: {e}")
1143
+ # Create a minimal hw_spec so downstream stages can still run
1144
+ self.artifacts['hw_spec'] = {
1145
+ 'design_category': 'CONTROL',
1146
+ 'top_module_name': self.name,
1147
+ 'target_pdk': self.pdk_profile.get("profile", "sky130"),
1148
+ 'target_frequency_mhz': 50,
1149
+ 'ports': [], 'submodules': [],
1150
+ 'behavioral_contract': [], 'warnings': ['Spec validation was skipped due to error'],
1151
+ }
1152
+ self.transition(BuildState.HIERARCHY_EXPAND)
1153
+
1154
+ def do_hierarchy_expand(self):
1155
+ """Stage: Evaluate submodule complexity and recursively expand complex ones."""
1156
+ self.log("Running HierarchyExpander (evaluate → expand → consistency check)...", refined=True)
1157
+
1158
+ hw_spec_dict = self.artifacts.get('hw_spec', {})
1159
+
1160
+ # If we have a full HardwareSpec object, use it directly
1161
+ hw_spec_obj = self.artifacts.get('hw_spec_object')
1162
+ if hw_spec_obj is None:
1163
+ # Reconstruct from dict
1164
+ try:
1165
+ hw_spec_obj = HardwareSpec.from_json(json.dumps(hw_spec_dict))
1166
+ except Exception:
1167
+ self.log("No valid HardwareSpec for hierarchy expansion; skipping.", refined=True)
1168
+ self.artifacts['hierarchy_result'] = {}
1169
+ self.transition(BuildState.FEASIBILITY_CHECK)
1170
+ return
1171
+
1172
+ try:
1173
+ expander = HierarchyExpander(llm=self.llm, verbose=self.verbose)
1174
+ result = expander.expand(hw_spec_obj)
1175
+
1176
+ self.artifacts['hierarchy_result'] = result.to_dict()
1177
+ self.artifacts['hierarchy_result_json'] = result.to_json()
1178
+
1179
+ self.log(f"Hierarchy: depth={result.hierarchy_depth}, "
1180
+ f"expansions={result.expansion_count}, "
1181
+ f"submodules={len(result.submodules)}", refined=True)
1182
+
1183
+ # Log any consistency fixes
1184
+ consistency_fixes = [w for w in result.warnings if w.startswith("CONSISTENCY_FIX")]
1185
+ for fix in consistency_fixes[:3]:
1186
+ self.log(f" 🔧 {fix}", refined=True)
1187
+
1188
+ # Store enrichment for downstream
1189
+ enrichment = expander.to_hierarchy_enrichment(result)
1190
+ self.artifacts['hierarchy_enrichment'] = enrichment
1191
+
1192
+ self.transition(BuildState.FEASIBILITY_CHECK)
1193
+
1194
+ except Exception as e:
1195
+ self.log(f"HierarchyExpander failed ({e}); skipping to FEASIBILITY_CHECK.", refined=True)
1196
+ self.logger.warning(f"HierarchyExpander error: {e}")
1197
+ self.artifacts['hierarchy_result'] = {}
1198
+ self.transition(BuildState.FEASIBILITY_CHECK)
1199
+
1200
+ def do_feasibility_check(self):
1201
+ """Stage: Check physical realizability on Sky130 before RTL generation."""
1202
+ self.log("Running FeasibilityChecker (frequency → memory → arithmetic → area → Sky130 rules)...", refined=True)
1203
+
1204
+ hw_spec_dict = self.artifacts.get('hw_spec', {})
1205
+ hierarchy_result_dict = self.artifacts.get('hierarchy_result', None)
1206
+
1207
+ try:
1208
+ checker = FeasibilityChecker(pdk=self.pdk_profile.get("profile", "sky130"))
1209
+ result = checker.check(hw_spec_dict, hierarchy_result_dict)
1210
+
1211
+ self.artifacts['feasibility_result'] = result.to_dict()
1212
+ self.artifacts['feasibility_result_json'] = result.to_json()
1213
+
1214
+ self.log(f"Feasibility: {result.feasibility_status} | "
1215
+ f"~{result.estimated_gate_equivalents} GE | "
1216
+ f"Floorplan: {result.recommended_floorplan_size_um}", refined=True)
1217
+
1218
+ if result.feasibility_warnings:
1219
+ for w in result.feasibility_warnings[:5]:
1220
+ self.log(f" ⚠ {w[:120]}", refined=True)
1221
+
1222
+ if result.feasibility_status == "REJECT":
1223
+ for r in result.feasibility_rejections:
1224
+ self.log(f" ❌ {r}", refined=True)
1225
+ self.log("❌ FEASIBILITY REJECTED — pipeline halted before RTL generation.", refined=True)
1226
+ self.errors.append(f"Feasibility rejected: {'; '.join(result.feasibility_rejections[:3])}")
1227
+ self.artifacts['feasibility_rejection_reasons'] = result.feasibility_rejections
1228
+ self.state = BuildState.FAIL
1229
+ return
1230
+
1231
+ if result.memory_macros_required:
1232
+ for macro in result.memory_macros_required:
1233
+ self.log(f" 📦 OpenRAM macro needed: {macro.submodule_name} "
1234
+ f"({macro.width_bits}×{macro.depth_words})", refined=True)
1235
+
1236
+ # Store enrichment
1237
+ enrichment = checker.to_feasibility_enrichment(result)
1238
+ self.artifacts['feasibility_enrichment'] = enrichment
1239
+
1240
+ self.transition(BuildState.CDC_ANALYZE)
1241
+
1242
+ except Exception as e:
1243
+ self.log(f"FeasibilityChecker failed ({e}); skipping to CDC_ANALYZE.", refined=True)
1244
+ self.logger.warning(f"FeasibilityChecker error: {e}")
1245
+ self.artifacts['feasibility_result'] = {'feasibility_status': 'WARN', 'feasibility_warnings': [f'Check skipped: {e}']}
1246
+ self.transition(BuildState.CDC_ANALYZE)
1247
+
1248
+ def do_cdc_analyze(self):
1249
+ """Stage: Identify clock domain crossings and assign synchronization strategies."""
1250
+ self.log("Running CDCAnalyzer (identify domains → crossings → sync strategies → submodules)...", refined=True)
1251
+
1252
+ hw_spec_dict = self.artifacts.get('hw_spec', {})
1253
+ hierarchy_result_dict = self.artifacts.get('hierarchy_result', None)
1254
+
1255
+ try:
1256
+ analyzer = CDCAnalyzer()
1257
+ result = analyzer.analyze(hw_spec_dict, hierarchy_result_dict)
1258
+
1259
+ self.artifacts['cdc_result'] = result.to_dict()
1260
+ self.artifacts['cdc_result_json'] = result.to_json()
1261
+
1262
+ if result.cdc_status == "SINGLE_DOMAIN":
1263
+ self.log("CDC: Single clock domain detected — no CDC analysis required.", refined=True)
1264
+ else:
1265
+ self.log(f"CDC: {result.domain_count} clock domains, "
1266
+ f"{len(result.crossing_signals)} crossing signals, "
1267
+ f"{len(result.cdc_submodules_added)} sync submodules generated.", refined=True)
1268
+
1269
+ for crossing in result.crossing_signals[:5]:
1270
+ self.log(f" 🔀 {crossing.signal_name}: {crossing.source_domain} → "
1271
+ f"{crossing.destination_domain} [{crossing.sync_strategy}]", refined=True)
1272
+
1273
+ if result.cdc_unresolved:
1274
+ for u in result.cdc_unresolved:
1275
+ self.log(f" ⚠ {u}", refined=True)
1276
+
1277
+ # Inject CDC submodule specs into the spec artifact for RTL gen
1278
+ if result.cdc_submodules_added:
1279
+ cdc_section = "\n\n## CDC Synchronization Submodules (Auto-Generated)\n"
1280
+ for sub in result.cdc_submodules_added:
1281
+ cdc_section += (
1282
+ f"\n### {sub.module_name} ({sub.strategy})\n"
1283
+ f"- Source: {sub.source_domain} → Destination: {sub.destination_domain}\n"
1284
+ f"- Behavior: {sub.behavior}\n"
1285
+ f"- Ports: {json.dumps(sub.ports, indent=2)}\n"
1286
+ )
1287
+ existing_spec = self.artifacts.get('spec', '')
1288
+ self.artifacts['spec'] = existing_spec + cdc_section
1289
+
1290
+ self.transition(BuildState.VERIFICATION_PLAN)
1291
+
1292
+ except Exception as e:
1293
+ self.log(f"CDCAnalyzer failed ({e}); skipping to VERIFICATION_PLAN.", refined=True)
1294
+ self.logger.warning(f"CDCAnalyzer error: {e}")
1295
+ self.artifacts['cdc_result'] = {'cdc_status': 'SINGLE_DOMAIN', 'cdc_warnings': [f'Analysis skipped: {e}']}
1296
+ self.transition(BuildState.VERIFICATION_PLAN)
1297
+
1298
+ def do_verification_plan(self):
1299
+ """Stage: Generate structured verification plan with test cases, SVA, and coverage."""
1300
+ self.log("Running VerificationPlanner (behaviors → mandatory tests → SVA → coverage → finalize)...", refined=True)
1301
+
1302
+ hw_spec_obj = self.artifacts.get('hw_spec_object')
1303
+ if hw_spec_obj is None:
1304
+ # Try to reconstruct
1305
+ hw_spec_dict = self.artifacts.get('hw_spec', {})
1306
+ try:
1307
+ hw_spec_obj = HardwareSpec.from_json(json.dumps(hw_spec_dict))
1308
+ except Exception:
1309
+ self.log("No valid HardwareSpec for verification planning; skipping.", refined=True)
1310
+ self.transition(BuildState.RTL_GEN)
1311
+ return
1312
+
1313
+ # Get optional CDC and hierarchy results for coverage planning
1314
+ cdc_result_dict = self.artifacts.get('cdc_result', {})
1315
+ hierarchy_result_dict = self.artifacts.get('hierarchy_result', {})
1316
+
1317
+ try:
1318
+ planner = VerificationPlanner()
1319
+ plan = planner.plan(
1320
+ hardware_spec=hw_spec_obj,
1321
+ cdc_result=cdc_result_dict if cdc_result_dict else None,
1322
+ hierarchy_result=hierarchy_result_dict if hierarchy_result_dict else None,
1323
+ )
1324
+
1325
+ self.artifacts['verification_plan'] = plan.to_dict()
1326
+ self.artifacts['verification_plan_json'] = plan.to_json()
1327
+
1328
+ self.log(f"Verification Plan: {plan.total_tests} tests "
1329
+ f"(P0={plan.p0_count}, P1={plan.p1_count}, P2={plan.p2_count})", refined=True)
1330
+ self.log(f"SVA properties: {len(plan.sva_properties)} | "
1331
+ f"Coverage points: {len(plan.coverage_points)}", refined=True)
1332
+
1333
+ if plan.warnings:
1334
+ for w in plan.warnings[:3]:
1335
+ self.log(f" ⚠ {w}", refined=True)
1336
+
1337
+ # Inject verification plan context into spec for testbench generation
1338
+ vplan_section = "\n\n## Verification Plan (Auto-Generated)\n"
1339
+ for tc in plan.test_cases:
1340
+ vplan_section += f"- [{tc.priority}] {tc.test_id}: {tc.title}\n"
1341
+ vplan_section += f" Stimulus: {tc.stimulus}\n"
1342
+ vplan_section += f" Expected: {tc.expected}\n"
1343
+
1344
+ # Inject SVA assertions into spec for formal verification stage
1345
+ if plan.sva_properties:
1346
+ vplan_section += "\n### SVA Assertions\n```systemverilog\n"
1347
+ for sva in plan.sva_properties:
1348
+ vplan_section += f"// {sva.description}\n{sva.sva_code}\n\n"
1349
+ vplan_section += "```\n"
1350
+
1351
+ existing_spec = self.artifacts.get('spec', '')
1352
+ self.artifacts['spec'] = existing_spec + vplan_section
1353
+
1354
+ self.transition(BuildState.RTL_GEN)
1355
+
1356
+ except Exception as e:
1357
+ self.log(f"VerificationPlanner failed ({e}); skipping to RTL_GEN.", refined=True)
1358
+ self.logger.warning(f"VerificationPlanner error: {e}")
1359
+ self.artifacts['verification_plan'] = {}
1360
+ self.transition(BuildState.RTL_GEN)
1361
+
1362
+ # ─── END NEW PIPELINE STAGES ─────────────────────────────────────
1363
 
1364
  def _get_strategy_prompt(self) -> str:
1365
  if self.strategy == BuildStrategy.SV_MODULAR:
 
1689
  return count >= 2
1690
 
1691
  def _clear_tb_fingerprints(self) -> None:
1692
+ """Reset all TB failure fingerprints so gate-level retries start fresh."""
1693
+ self.tb_failure_fingerprint_history.clear()
1694
+ self.tb_failed_code_by_fingerprint.clear()
1695
+ self.tb_static_fail_count = 0
1696
+ self.tb_compile_fail_count = 0
1697
+ self.tb_repair_fail_count = 0
1698
 
1699
  def generate_uvm_lite_tb_from_rtl_ports(self, design_name: str, rtl_code: str) -> str:
1700
  """Deterministic Verilator-safe testbench generated from RTL ports.
 
3173
  self.log("Skipping Hardening (User Cancelled).", refined=True)
3174
  self.transition(BuildState.FORMAL_VERIFY)
3175
  else:
3176
+ output_for_llm = self._condense_failure_log(output, kind="simulation")
3177
  if self._record_failure_fingerprint(output_for_llm):
3178
  self.log("Detected repeated simulation failure fingerprint. Failing closed.", refined=True)
3179
  self.state = BuildState.FAIL
 
3384
  # Add post-reset stabilization: insert wait cycles after reset de-assertion
3385
  if "reset_phase" in fixed_tb and "// post-reset-stab" not in fixed_tb:
3386
  # After rst_n = 1'b1 (or 1'b0 for active-high), add stabilization
3387
+ for deassert in ["rst_n = 1'b1;", "reset = 1'b0;"]:
3388
  if deassert in fixed_tb:
3389
  fixed_tb = fixed_tb.replace(
3390
  deassert,
3391
+ f"{deassert}\n repeat (3) @(posedge clk); // post-reset-stab"
3392
  )
3393
  break
3394
  if fixed_tb != tb_code:
 
4048
  }
4049
  coverage_pass = all(coverage_checks.values())
4050
 
4051
+ if not hasattr(self, 'best_coverage') or self.best_coverage is None:
4052
  self.best_coverage = -1.0
4053
  self.best_tb_backup = None
4054
 
 
4229
  self.logger.info(f"REGRESSION TESTS:\n{result}")
4230
 
4231
  # Parse out individual tests from the LLM output
4232
+ test_blocks = re.findall(r'```(?:verilog|v)?\s*\n(.*?)```', result, re.DOTALL)
 
4233
 
4234
  if not test_blocks:
4235
  self.log("No regression tests extracted. Skipping regression.", refined=True)
 
4267
  try:
4268
  with open(test_path, 'r') as f:
4269
  tb_content = f.read()
4270
+ m = re.search(r'module\s+(\w+)', tb_content)
 
4271
  if m:
4272
  tb_module = m.group(1)
4273
  except Exception:
web/.env.production CHANGED
@@ -1,2 +1,6 @@
1
  # Production — HuggingFace Space backend
2
  VITE_API_BASE_URL=https://vxkyyy-AgentIC.hf.space
 
 
 
 
 
1
  # Production — HuggingFace Space backend
2
  VITE_API_BASE_URL=https://vxkyyy-AgentIC.hf.space
3
+
4
+ # Supabase — get these from https://supabase.com/dashboard → Settings → API
5
+ VITE_SUPABASE_URL=
6
+ VITE_SUPABASE_ANON_KEY=
web/package-lock.json CHANGED
@@ -11,6 +11,7 @@
11
  "@microsoft/fetch-event-source": "^2.0.1",
12
  "@react-three/drei": "^10.7.7",
13
  "@react-three/fiber": "^9.5.0",
 
14
  "axios": "^1.13.5",
15
  "framer-motion": "^12.34.3",
16
  "lucide-react": "^0.575.0",
@@ -1502,6 +1503,86 @@
1502
  "win32"
1503
  ]
1504
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1505
  "node_modules/@tweenjs/tween.js": {
1506
  "version": "23.1.3",
1507
  "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-23.1.3.tgz",
@@ -1618,7 +1699,6 @@
1618
  "version": "24.10.13",
1619
  "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.13.tgz",
1620
  "integrity": "sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==",
1621
- "dev": true,
1622
  "license": "MIT",
1623
  "dependencies": {
1624
  "undici-types": "~7.16.0"
@@ -1630,6 +1710,12 @@
1630
  "integrity": "sha512-ieXiYmgSRXUDeOntE1InxjWyvEelZGP63M+cGuquuRLuIKKT1osnkXjxev9B7d1nXSug5vpunx+gNlbVxMlC9A==",
1631
  "license": "MIT"
1632
  },
 
 
 
 
 
 
1633
  "node_modules/@types/react": {
1634
  "version": "19.2.14",
1635
  "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz",
@@ -1691,6 +1777,15 @@
1691
  "integrity": "sha512-h8fgEd/DpoS9CBrjEQXR+dIDraopAEfu4wYVNY2tEPwk60stPWhvZMf4Foo5FakuQ7HFZoa8WceaWFervK2Ovg==",
1692
  "license": "MIT"
1693
  },
 
 
 
 
 
 
 
 
 
1694
  "node_modules/@typescript-eslint/eslint-plugin": {
1695
  "version": "8.56.0",
1696
  "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz",
@@ -3260,6 +3355,15 @@
3260
  "url": "https://opencollective.com/unified"
3261
  }
3262
  },
 
 
 
 
 
 
 
 
 
3263
  "node_modules/ieee754": {
3264
  "version": "1.2.1",
3265
  "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
@@ -5338,7 +5442,6 @@
5338
  "version": "7.16.0",
5339
  "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
5340
  "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
5341
- "dev": true,
5342
  "license": "MIT"
5343
  },
5344
  "node_modules/unified": {
@@ -5626,6 +5729,27 @@
5626
  "node": ">=0.10.0"
5627
  }
5628
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5629
  "node_modules/yallist": {
5630
  "version": "3.1.1",
5631
  "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
 
11
  "@microsoft/fetch-event-source": "^2.0.1",
12
  "@react-three/drei": "^10.7.7",
13
  "@react-three/fiber": "^9.5.0",
14
+ "@supabase/supabase-js": "^2.99.0",
15
  "axios": "^1.13.5",
16
  "framer-motion": "^12.34.3",
17
  "lucide-react": "^0.575.0",
 
1503
  "win32"
1504
  ]
1505
  },
1506
+ "node_modules/@supabase/auth-js": {
1507
+ "version": "2.99.0",
1508
+ "resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.99.0.tgz",
1509
+ "integrity": "sha512-tHiIST/OEoLmWBE+3X69xRY5srJM/lL86KltmMlIfDo9ePJLo14vQQV9T4NF+P+MoGhCwQL1GTmk51zuAFMXKw==",
1510
+ "license": "MIT",
1511
+ "dependencies": {
1512
+ "tslib": "2.8.1"
1513
+ },
1514
+ "engines": {
1515
+ "node": ">=20.0.0"
1516
+ }
1517
+ },
1518
+ "node_modules/@supabase/functions-js": {
1519
+ "version": "2.99.0",
1520
+ "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.99.0.tgz",
1521
+ "integrity": "sha512-zA9oad6EqGwMLLu2LfP1bXbqKcJGiotAdbdTfZG7YS7619YZQAEgejj9mp+E5vglKE1yMWbKK+S1J3PbuUtgLg==",
1522
+ "license": "MIT",
1523
+ "dependencies": {
1524
+ "tslib": "2.8.1"
1525
+ },
1526
+ "engines": {
1527
+ "node": ">=20.0.0"
1528
+ }
1529
+ },
1530
+ "node_modules/@supabase/postgrest-js": {
1531
+ "version": "2.99.0",
1532
+ "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-2.99.0.tgz",
1533
+ "integrity": "sha512-8qfOMi2pu9y0IQhUAeFqjrvR49G4ELGevXCWV9qAHXFQ/h2FFh0I8PYjFQj4rHcHSq6hrpozDnS1vbQU8NAQ/A==",
1534
+ "license": "MIT",
1535
+ "dependencies": {
1536
+ "tslib": "2.8.1"
1537
+ },
1538
+ "engines": {
1539
+ "node": ">=20.0.0"
1540
+ }
1541
+ },
1542
+ "node_modules/@supabase/realtime-js": {
1543
+ "version": "2.99.0",
1544
+ "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.99.0.tgz",
1545
+ "integrity": "sha512-7nFTZhNeANR7FvEY6PfWLCfE8dHqcaJd9SuR7IPEZvBPG9K4uEHMivpjZx4NWRSU7Eji7ZbKy2LG+cJ48DhwHg==",
1546
+ "license": "MIT",
1547
+ "dependencies": {
1548
+ "@types/phoenix": "^1.6.6",
1549
+ "@types/ws": "^8.18.1",
1550
+ "tslib": "2.8.1",
1551
+ "ws": "^8.18.2"
1552
+ },
1553
+ "engines": {
1554
+ "node": ">=20.0.0"
1555
+ }
1556
+ },
1557
+ "node_modules/@supabase/storage-js": {
1558
+ "version": "2.99.0",
1559
+ "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.99.0.tgz",
1560
+ "integrity": "sha512-mAEEbfsght5EEALejYrwAP9k8sFBGjfMZT8n4SyMXk2iYuWVeRMs1kA/uKg0uDMctWdZ0bL+L4jZzksUJpCjMA==",
1561
+ "license": "MIT",
1562
+ "dependencies": {
1563
+ "iceberg-js": "^0.8.1",
1564
+ "tslib": "2.8.1"
1565
+ },
1566
+ "engines": {
1567
+ "node": ">=20.0.0"
1568
+ }
1569
+ },
1570
+ "node_modules/@supabase/supabase-js": {
1571
+ "version": "2.99.0",
1572
+ "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.99.0.tgz",
1573
+ "integrity": "sha512-SP9Sn9tsHDB7N4u2gT13rdeZJewE4xibAxasG7vOz+fYi92+XkMMbWNx0uGK53zKTnAnvTs16isRooyBy4sn5w==",
1574
+ "license": "MIT",
1575
+ "dependencies": {
1576
+ "@supabase/auth-js": "2.99.0",
1577
+ "@supabase/functions-js": "2.99.0",
1578
+ "@supabase/postgrest-js": "2.99.0",
1579
+ "@supabase/realtime-js": "2.99.0",
1580
+ "@supabase/storage-js": "2.99.0"
1581
+ },
1582
+ "engines": {
1583
+ "node": ">=20.0.0"
1584
+ }
1585
+ },
1586
  "node_modules/@tweenjs/tween.js": {
1587
  "version": "23.1.3",
1588
  "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-23.1.3.tgz",
 
1699
  "version": "24.10.13",
1700
  "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.13.tgz",
1701
  "integrity": "sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==",
 
1702
  "license": "MIT",
1703
  "dependencies": {
1704
  "undici-types": "~7.16.0"
 
1710
  "integrity": "sha512-ieXiYmgSRXUDeOntE1InxjWyvEelZGP63M+cGuquuRLuIKKT1osnkXjxev9B7d1nXSug5vpunx+gNlbVxMlC9A==",
1711
  "license": "MIT"
1712
  },
1713
+ "node_modules/@types/phoenix": {
1714
+ "version": "1.6.7",
1715
+ "resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.7.tgz",
1716
+ "integrity": "sha512-oN9ive//QSBkf19rfDv45M7eZPi0eEXylht2OLEXicu5b4KoQ1OzXIw+xDSGWxSxe1JmepRR/ZH283vsu518/Q==",
1717
+ "license": "MIT"
1718
+ },
1719
  "node_modules/@types/react": {
1720
  "version": "19.2.14",
1721
  "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz",
 
1777
  "integrity": "sha512-h8fgEd/DpoS9CBrjEQXR+dIDraopAEfu4wYVNY2tEPwk60stPWhvZMf4Foo5FakuQ7HFZoa8WceaWFervK2Ovg==",
1778
  "license": "MIT"
1779
  },
1780
+ "node_modules/@types/ws": {
1781
+ "version": "8.18.1",
1782
+ "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
1783
+ "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
1784
+ "license": "MIT",
1785
+ "dependencies": {
1786
+ "@types/node": "*"
1787
+ }
1788
+ },
1789
  "node_modules/@typescript-eslint/eslint-plugin": {
1790
  "version": "8.56.0",
1791
  "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz",
 
3355
  "url": "https://opencollective.com/unified"
3356
  }
3357
  },
3358
+ "node_modules/iceberg-js": {
3359
+ "version": "0.8.1",
3360
+ "resolved": "https://registry.npmjs.org/iceberg-js/-/iceberg-js-0.8.1.tgz",
3361
+ "integrity": "sha512-1dhVQZXhcHje7798IVM+xoo/1ZdVfzOMIc8/rgVSijRK38EDqOJoGula9N/8ZI5RD8QTxNQtK/Gozpr+qUqRRA==",
3362
+ "license": "MIT",
3363
+ "engines": {
3364
+ "node": ">=20.0.0"
3365
+ }
3366
+ },
3367
  "node_modules/ieee754": {
3368
  "version": "1.2.1",
3369
  "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
 
5442
  "version": "7.16.0",
5443
  "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
5444
  "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
 
5445
  "license": "MIT"
5446
  },
5447
  "node_modules/unified": {
 
5729
  "node": ">=0.10.0"
5730
  }
5731
  },
5732
+ "node_modules/ws": {
5733
+ "version": "8.19.0",
5734
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
5735
+ "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
5736
+ "license": "MIT",
5737
+ "engines": {
5738
+ "node": ">=10.0.0"
5739
+ },
5740
+ "peerDependencies": {
5741
+ "bufferutil": "^4.0.1",
5742
+ "utf-8-validate": ">=5.0.2"
5743
+ },
5744
+ "peerDependenciesMeta": {
5745
+ "bufferutil": {
5746
+ "optional": true
5747
+ },
5748
+ "utf-8-validate": {
5749
+ "optional": true
5750
+ }
5751
+ }
5752
+ },
5753
  "node_modules/yallist": {
5754
  "version": "3.1.1",
5755
  "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
web/package.json CHANGED
@@ -13,6 +13,7 @@
13
  "@microsoft/fetch-event-source": "^2.0.1",
14
  "@react-three/drei": "^10.7.7",
15
  "@react-three/fiber": "^9.5.0",
 
16
  "axios": "^1.13.5",
17
  "framer-motion": "^12.34.3",
18
  "lucide-react": "^0.575.0",
 
13
  "@microsoft/fetch-event-source": "^2.0.1",
14
  "@react-three/drei": "^10.7.7",
15
  "@react-three/fiber": "^9.5.0",
16
+ "@supabase/supabase-js": "^2.99.0",
17
  "axios": "^1.13.5",
18
  "framer-motion": "^12.34.3",
19
  "lucide-react": "^0.575.0",
web/src/App.tsx CHANGED
@@ -1,4 +1,7 @@
1
  import { useEffect, useMemo, useState } from 'react';
 
 
 
2
  import { Dashboard } from './pages/Dashboard';
3
  import { DesignStudio } from './pages/DesignStudio';
4
  import { HumanInLoopBuild } from './pages/HumanInLoopBuild';
@@ -9,6 +12,8 @@ import { api } from './api';
9
  import './index.css';
10
 
11
  const App = () => {
 
 
12
  const [selectedPage, setSelectedPage] = useState('Design Studio');
13
  const [designs, setDesigns] = useState<{ name: string, has_gds: boolean }[]>([]);
14
  const [selectedDesign, setSelectedDesign] = useState<string>('');
@@ -17,14 +22,26 @@ const App = () => {
17
  return saved === 'dark' ? 'dark' : 'light';
18
  });
19
 
20
-
 
 
 
 
 
 
 
 
 
 
21
 
22
  useEffect(() => {
23
  document.documentElement.setAttribute('data-theme', theme);
24
  localStorage.setItem('agentic-theme', theme);
25
  }, [theme]);
26
 
 
27
  useEffect(() => {
 
28
  api.get('/designs')
29
  .then(res => {
30
  const data = res.data?.designs || [];
@@ -35,7 +52,13 @@ const App = () => {
35
  }
36
  })
37
  .catch(err => console.error("Failed to fetch designs", err));
38
- }, []);
 
 
 
 
 
 
39
 
40
  const navItems = useMemo(
41
  () => [
@@ -50,6 +73,21 @@ const App = () => {
50
  []
51
  );
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  return (
54
  <div className="app-shell">
55
  <aside className="app-sidebar">
@@ -90,12 +128,24 @@ const App = () => {
90
  </nav>
91
 
92
  <div className="app-sidebar-footer">
 
 
 
 
 
 
 
 
 
93
  <button
94
  className="theme-toggle"
95
  onClick={() => setTheme((t) => (t === 'light' ? 'dark' : 'light'))}
96
  >
97
  {theme === 'light' ? '🌙 Dark' : '☀️ Light'}
98
  </button>
 
 
 
99
  <div className="app-version">AgentIC · 2026</div>
100
  </div>
101
  </aside>
@@ -121,7 +171,7 @@ const App = () => {
121
 
122
  <div className="home-card-grid">
123
  <div className="home-kpi">{designs.length}<span>Designs</span></div>
124
- <div className="home-kpi">15<span>Pipeline Stages</span></div>
125
  <div className="home-kpi">5<span>Core Modules</span></div>
126
  <div className="home-kpi">AI<span>Agents</span></div>
127
  </div>
@@ -166,21 +216,26 @@ const App = () => {
166
  <h3 className="home-section-title">Pipeline Flow</h3>
167
  <div className="pipeline-flow">
168
  {[
169
- { icon: '📐', label: 'SPEC', sub: 'Spec Analysis' },
170
- { icon: '💻', label: 'RTL_GEN', sub: 'RTL Generation' },
171
- { icon: '🔨', label: 'RTL_FIX', sub: 'Code Quality' },
172
- { icon: '🧪', label: 'VERIFY', sub: 'Functional Verify' },
173
- { icon: '📊', label: 'FORMAL', sub: 'Formal Verify' },
174
- { icon: '📈', label: 'COVERAGE', sub: 'Coverage' },
 
 
 
 
 
175
  { icon: '🗺️', label: 'FLOOR', sub: 'Floorplan' },
176
- { icon: '🏗️', label: 'HARDEN', sub: 'Optimization' },
177
- { icon: '✅', label: 'SIGNOFF', sub: 'Signoff' },
178
  ].map((s, i) => (
179
  <div className="pipeline-stage" key={s.label}>
180
  <div className="pipeline-stage-icon">{s.icon}</div>
181
  <div className="pipeline-stage-label">{s.label}</div>
182
  <div className="pipeline-stage-sub">{s.sub}</div>
183
- {i < 8 && <div className="pipeline-arrow">→</div>}
184
  </div>
185
  ))}
186
  </div>
 
1
  import { useEffect, useMemo, useState } from 'react';
2
+ import type { Session } from '@supabase/supabase-js';
3
+ import { supabase } from './supabaseClient';
4
+ import { AuthPage } from './components/AuthPage';
5
  import { Dashboard } from './pages/Dashboard';
6
  import { DesignStudio } from './pages/DesignStudio';
7
  import { HumanInLoopBuild } from './pages/HumanInLoopBuild';
 
12
  import './index.css';
13
 
14
  const App = () => {
15
+ const [session, setSession] = useState<Session | null>(null);
16
+ const [authLoading, setAuthLoading] = useState(true);
17
  const [selectedPage, setSelectedPage] = useState('Design Studio');
18
  const [designs, setDesigns] = useState<{ name: string, has_gds: boolean }[]>([]);
19
  const [selectedDesign, setSelectedDesign] = useState<string>('');
 
22
  return saved === 'dark' ? 'dark' : 'light';
23
  });
24
 
25
+ // ── Auth state ──
26
+ useEffect(() => {
27
+ supabase.auth.getSession().then(({ data: { session: s } }) => {
28
+ setSession(s);
29
+ setAuthLoading(false);
30
+ });
31
+ const { data: { subscription } } = supabase.auth.onAuthStateChange((_event, s) => {
32
+ setSession(s);
33
+ });
34
+ return () => subscription.unsubscribe();
35
+ }, []);
36
 
37
  useEffect(() => {
38
  document.documentElement.setAttribute('data-theme', theme);
39
  localStorage.setItem('agentic-theme', theme);
40
  }, [theme]);
41
 
42
+ // Fetch designs only when authenticated
43
  useEffect(() => {
44
+ if (!session) return;
45
  api.get('/designs')
46
  .then(res => {
47
  const data = res.data?.designs || [];
 
52
  }
53
  })
54
  .catch(err => console.error("Failed to fetch designs", err));
55
+ }, [session]);
56
+
57
+ const handleLogout = async () => {
58
+ await supabase.auth.signOut();
59
+ setSession(null);
60
+ setSelectedPage('Design Studio');
61
+ };
62
 
63
  const navItems = useMemo(
64
  () => [
 
73
  []
74
  );
75
 
76
+ // ── Auth loading spinner ──
77
+ if (authLoading) {
78
+ return (
79
+ <div className="auth-loading">
80
+ <div className="auth-loading-spinner" />
81
+ <span>Loading AgentIC…</span>
82
+ </div>
83
+ );
84
+ }
85
+
86
+ // ── Auth gate ──
87
+ if (!session) {
88
+ return <AuthPage onAuth={() => supabase.auth.getSession().then(({ data: { session: s } }) => setSession(s))} />;
89
+ }
90
+
91
  return (
92
  <div className="app-shell">
93
  <aside className="app-sidebar">
 
128
  </nav>
129
 
130
  <div className="app-sidebar-footer">
131
+ {/* User info */}
132
+ <div className="app-user-info">
133
+ <div className="app-user-avatar">
134
+ {session.user.email?.[0]?.toUpperCase() || '?'}
135
+ </div>
136
+ <div className="app-user-details">
137
+ <div className="app-user-email">{session.user.email}</div>
138
+ </div>
139
+ </div>
140
  <button
141
  className="theme-toggle"
142
  onClick={() => setTheme((t) => (t === 'light' ? 'dark' : 'light'))}
143
  >
144
  {theme === 'light' ? '🌙 Dark' : '☀️ Light'}
145
  </button>
146
+ <button className="logout-btn" onClick={handleLogout}>
147
+ ↩ Sign Out
148
+ </button>
149
  <div className="app-version">AgentIC · 2026</div>
150
  </div>
151
  </aside>
 
171
 
172
  <div className="home-card-grid">
173
  <div className="home-kpi">{designs.length}<span>Designs</span></div>
174
+ <div className="home-kpi">19<span>Pipeline Stages</span></div>
175
  <div className="home-kpi">5<span>Core Modules</span></div>
176
  <div className="home-kpi">AI<span>Agents</span></div>
177
  </div>
 
216
  <h3 className="home-section-title">Pipeline Flow</h3>
217
  <div className="pipeline-flow">
218
  {[
219
+ { icon: '📐', label: 'SPEC', sub: 'Specification' },
220
+ { icon: '🔍', label: 'VALIDATE', sub: 'Spec Validation' },
221
+ { icon: '🌲', label: 'EXPAND', sub: 'Hierarchy' },
222
+ { icon: '⚖️', label: 'FEASIBLE', sub: 'Feasibility' },
223
+ { icon: '🔀', label: 'CDC', sub: 'Clock Domains' },
224
+ { icon: '📋', label: 'V-PLAN', sub: 'Verify Plan' },
225
+ { icon: '💻', label: 'RTL', sub: 'Generation' },
226
+ { icon: '🔨', label: 'FIX', sub: 'Code Quality' },
227
+ { icon: '🧪', label: 'VERIFY', sub: 'Simulation' },
228
+ { icon: '📊', label: 'FORMAL', sub: 'Formal' },
229
+ { icon: '📈', label: 'COV', sub: 'Coverage' },
230
  { icon: '🗺️', label: 'FLOOR', sub: 'Floorplan' },
231
+ { icon: '🏗️', label: 'HARDEN', sub: 'Place+Route' },
232
+ { icon: '✅', label: 'SIGNOFF', sub: 'Tape-out' },
233
  ].map((s, i) => (
234
  <div className="pipeline-stage" key={s.label}>
235
  <div className="pipeline-stage-icon">{s.icon}</div>
236
  <div className="pipeline-stage-label">{s.label}</div>
237
  <div className="pipeline-stage-sub">{s.sub}</div>
238
+ {i < 13 && <div className="pipeline-arrow">→</div>}
239
  </div>
240
  ))}
241
  </div>
web/src/api.ts CHANGED
@@ -1,9 +1,23 @@
1
  import axios from 'axios';
 
2
 
3
  export const API_BASE = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
4
 
5
- // Pre-configured axios instance with ngrok header
6
  export const api = axios.create({
7
  baseURL: API_BASE,
8
  headers: { 'ngrok-skip-browser-warning': 'true' },
9
  });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import axios from 'axios';
2
+ import { supabase } from './supabaseClient';
3
 
4
  export const API_BASE = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
5
 
6
+ // Pre-configured axios instance with auth + ngrok header
7
  export const api = axios.create({
8
  baseURL: API_BASE,
9
  headers: { 'ngrok-skip-browser-warning': 'true' },
10
  });
11
+
12
+ // Attach Supabase JWT to every request
13
+ api.interceptors.request.use(async (config) => {
14
+ try {
15
+ const { data: { session } } = await supabase.auth.getSession();
16
+ if (session?.access_token) {
17
+ config.headers.Authorization = `Bearer ${session.access_token}`;
18
+ }
19
+ } catch {
20
+ // No session — request goes without auth (backend will 401 if needed)
21
+ }
22
+ return config;
23
+ });
web/src/components/ApprovalCard.tsx CHANGED
@@ -21,7 +21,7 @@ interface Props {
21
  }
22
 
23
  const STAGE_ICONS: Record<string, string> = {
24
- INIT: '⚙', SPEC: '◈', RTL_GEN: '⌨', RTL_FIX: '◪',
25
  VERIFICATION: '◉', FORMAL_VERIFY: '◈', COVERAGE_CHECK: '◎',
26
  REGRESSION: '↺', SDC_GEN: '⧗', FLOORPLAN: '▣',
27
  HARDENING: '⬡', CONVERGENCE_REVIEW: '◎', ECO_PATCH: '⟴',
 
21
  }
22
 
23
  const STAGE_ICONS: Record<string, string> = {
24
+ INIT: '⚙', SPEC: '◈', SPEC_VALIDATE: '⊘', HIERARCHY_EXPAND: '⊞', FEASIBILITY_CHECK: '⚖', CDC_ANALYZE: '↔', VERIFICATION_PLAN: '☑', RTL_GEN: '⌨', RTL_FIX: '◪',
25
  VERIFICATION: '◉', FORMAL_VERIFY: '◈', COVERAGE_CHECK: '◎',
26
  REGRESSION: '↺', SDC_GEN: '⧗', FLOORPLAN: '▣',
27
  HARDENING: '⬡', CONVERGENCE_REVIEW: '◎', ECO_PATCH: '⟴',
web/src/components/AuthPage.tsx ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useState } from 'react';
2
+ import { supabase } from '../supabaseClient';
3
+
4
+ type AuthMode = 'login' | 'signup';
5
+
6
+ export const AuthPage = ({ onAuth }: { onAuth: () => void }) => {
7
+ const [mode, setMode] = useState<AuthMode>('login');
8
+ const [email, setEmail] = useState('');
9
+ const [password, setPassword] = useState('');
10
+ const [loading, setLoading] = useState(false);
11
+ const [error, setError] = useState('');
12
+ const [successMsg, setSuccessMsg] = useState('');
13
+
14
+ const handleSubmit = async (e: React.FormEvent) => {
15
+ e.preventDefault();
16
+ setError('');
17
+ setSuccessMsg('');
18
+ setLoading(true);
19
+
20
+ try {
21
+ if (mode === 'login') {
22
+ const { error: err } = await supabase.auth.signInWithPassword({ email, password });
23
+ if (err) throw err;
24
+ onAuth();
25
+ } else {
26
+ const { error: err } = await supabase.auth.signUp({ email, password });
27
+ if (err) throw err;
28
+ setSuccessMsg('Check your email for a confirmation link.');
29
+ }
30
+ } catch (err: any) {
31
+ setError(err.message || 'Authentication failed');
32
+ }
33
+ setLoading(false);
34
+ };
35
+
36
+ return (
37
+ <div className="auth-root">
38
+ <div className="auth-glow auth-glow-1" />
39
+ <div className="auth-glow auth-glow-2" />
40
+
41
+ <div className="auth-card">
42
+ <div className="auth-brand">
43
+ <div className="auth-brand-logo">A</div>
44
+ <div>
45
+ <div className="auth-brand-title">AgentIC</div>
46
+ <div className="auth-brand-sub">Autonomous Silicon Studio</div>
47
+ </div>
48
+ </div>
49
+
50
+ <h2 className="auth-heading">
51
+ {mode === 'login' ? 'Welcome back' : 'Create your account'}
52
+ </h2>
53
+ <p className="auth-subheading">
54
+ {mode === 'login'
55
+ ? 'Sign in to continue designing chips'
56
+ : 'Start building silicon from plain English'}
57
+ </p>
58
+
59
+ <div className="auth-tabs">
60
+ <button
61
+ className={`auth-tab ${mode === 'login' ? 'auth-tab--active' : ''}`}
62
+ onClick={() => { setMode('login'); setError(''); setSuccessMsg(''); }}
63
+ >
64
+ Sign In
65
+ </button>
66
+ <button
67
+ className={`auth-tab ${mode === 'signup' ? 'auth-tab--active' : ''}`}
68
+ onClick={() => { setMode('signup'); setError(''); setSuccessMsg(''); }}
69
+ >
70
+ Sign Up
71
+ </button>
72
+ </div>
73
+
74
+ <form className="auth-form" onSubmit={handleSubmit}>
75
+ <label className="auth-field">
76
+ <span className="auth-label">Email</span>
77
+ <input
78
+ type="email"
79
+ className="auth-input"
80
+ placeholder="you@company.com"
81
+ value={email}
82
+ onChange={e => setEmail(e.target.value)}
83
+ required
84
+ autoFocus
85
+ />
86
+ </label>
87
+
88
+ <label className="auth-field">
89
+ <span className="auth-label">Password</span>
90
+ <input
91
+ type="password"
92
+ className="auth-input"
93
+ placeholder="••••••••"
94
+ value={password}
95
+ onChange={e => setPassword(e.target.value)}
96
+ required
97
+ minLength={6}
98
+ />
99
+ </label>
100
+
101
+ {error && <div className="auth-error">{error}</div>}
102
+ {successMsg && <div className="auth-success">{successMsg}</div>}
103
+
104
+ <button
105
+ type="submit"
106
+ className="auth-submit"
107
+ disabled={loading || !email || !password}
108
+ >
109
+ {loading
110
+ ? (mode === 'login' ? 'Signing in…' : 'Creating account…')
111
+ : (mode === 'login' ? 'Sign In' : 'Create Account')}
112
+ </button>
113
+ </form>
114
+
115
+ <p className="auth-footer-text">
116
+ {mode === 'login' ? "Don't have an account? " : 'Already have an account? '}
117
+ <button
118
+ className="auth-switch-btn"
119
+ onClick={() => { setMode(mode === 'login' ? 'signup' : 'login'); setError(''); setSuccessMsg(''); }}
120
+ >
121
+ {mode === 'login' ? 'Sign up' : 'Sign in'}
122
+ </button>
123
+ </p>
124
+ </div>
125
+ </div>
126
+ );
127
+ };
web/src/components/BuildMonitor.tsx CHANGED
@@ -5,6 +5,11 @@ import { api } from '../api';
5
  const STATES_DISPLAY: Record<string, { label: string; icon: string }> = {
6
  INIT: { label: 'Initializing Workspace', icon: '🔧' },
7
  SPEC: { label: 'Architectural Planning', icon: '📐' },
 
 
 
 
 
8
  RTL_GEN: { label: 'RTL Generation', icon: '💻' },
9
  RTL_FIX: { label: 'RTL Syntax Fixing', icon: '🔨' },
10
  VERIFICATION: { label: 'Verification & Testbench', icon: '🧪' },
 
5
  const STATES_DISPLAY: Record<string, { label: string; icon: string }> = {
6
  INIT: { label: 'Initializing Workspace', icon: '🔧' },
7
  SPEC: { label: 'Architectural Planning', icon: '📐' },
8
+ SPEC_VALIDATE: { label: 'Specification Validation', icon: '🔍' },
9
+ HIERARCHY_EXPAND: { label: 'Hierarchy Expansion', icon: '🌲' },
10
+ FEASIBILITY_CHECK: { label: 'Feasibility Check', icon: '⚖️' },
11
+ CDC_ANALYZE: { label: 'CDC Analysis', icon: '🔀' },
12
+ VERIFICATION_PLAN: { label: 'Verification Planning', icon: '📋' },
13
  RTL_GEN: { label: 'RTL Generation', icon: '💻' },
14
  RTL_FIX: { label: 'RTL Syntax Fixing', icon: '🔨' },
15
  VERIFICATION: { label: 'Verification & Testbench', icon: '🧪' },
web/src/components/StageProgressBar.tsx CHANGED
@@ -3,6 +3,11 @@ import React from 'react';
3
  const STAGES = [
4
  { key: 'INIT', label: 'Initialization' },
5
  { key: 'SPEC', label: 'Specification' },
 
 
 
 
 
6
  { key: 'RTL_GEN', label: 'RTL Generation' },
7
  { key: 'RTL_FIX', label: 'RTL Fix' },
8
  { key: 'VERIFICATION', label: 'Verification' },
@@ -21,6 +26,11 @@ const STAGES = [
21
  const STAGE_DESCRIPTIONS: Record<string, string> = {
22
  INIT: 'Setting up build context',
23
  SPEC: 'Translating your idea into chip spec',
 
 
 
 
 
24
  RTL_GEN: 'Writing synthesizable Verilog',
25
  RTL_FIX: 'Resolving any RTL issues',
26
  VERIFICATION: 'Running simulation testbench',
 
3
  const STAGES = [
4
  { key: 'INIT', label: 'Initialization' },
5
  { key: 'SPEC', label: 'Specification' },
6
+ { key: 'SPEC_VALIDATE', label: 'Spec Validation' },
7
+ { key: 'HIERARCHY_EXPAND', label: 'Hierarchy Expansion' },
8
+ { key: 'FEASIBILITY_CHECK', label: 'Feasibility Check' },
9
+ { key: 'CDC_ANALYZE', label: 'CDC Analysis' },
10
+ { key: 'VERIFICATION_PLAN', label: 'Verification Plan' },
11
  { key: 'RTL_GEN', label: 'RTL Generation' },
12
  { key: 'RTL_FIX', label: 'RTL Fix' },
13
  { key: 'VERIFICATION', label: 'Verification' },
 
26
  const STAGE_DESCRIPTIONS: Record<string, string> = {
27
  INIT: 'Setting up build context',
28
  SPEC: 'Translating your idea into chip spec',
29
+ SPEC_VALIDATE: 'Validating spec completeness & generating assertions',
30
+ HIERARCHY_EXPAND: 'Expanding complex submodules into nested specs',
31
+ FEASIBILITY_CHECK: 'Evaluating Sky130 physical design feasibility',
32
+ CDC_ANALYZE: 'Identifying clock domain crossings',
33
+ VERIFICATION_PLAN: 'Generating verification plan & SVA properties',
34
  RTL_GEN: 'Writing synthesizable Verilog',
35
  RTL_FIX: 'Resolving any RTL issues',
36
  VERIFICATION: 'Running simulation testbench',
web/src/hitl.css CHANGED
@@ -542,6 +542,74 @@
542
  line-height: 1;
543
  }
544
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
545
  .hitl-stage--failed {
546
  border-left-color: var(--red);
547
  }
 
542
  line-height: 1;
543
  }
544
 
545
+ /* Stall warning banner ────────────────────────────────────────────── */
546
+ .hitl-stall-banner {
547
+ display: flex;
548
+ align-items: center;
549
+ justify-content: space-between;
550
+ gap: 0.75rem;
551
+ background: rgba(251, 191, 36, 0.08);
552
+ border: 1px solid rgba(251, 191, 36, 0.40);
553
+ border-left: 3px solid #fbbf24;
554
+ border-radius: 8px;
555
+ padding: 0.75rem 1rem;
556
+ margin-bottom: 0.75rem;
557
+ flex-wrap: wrap;
558
+ animation: hitl-toast-in 280ms cubic-bezier(0.22,1,0.36,1) both;
559
+ }
560
+
561
+ .hitl-stall-body {
562
+ display: flex;
563
+ align-items: flex-start;
564
+ gap: 0.5rem;
565
+ flex: 1;
566
+ min-width: 0;
567
+ }
568
+
569
+ .hitl-stall-icon {
570
+ font-size: 1rem;
571
+ flex-shrink: 0;
572
+ line-height: 1.4;
573
+ }
574
+
575
+ .hitl-stall-msg {
576
+ font-size: 0.8125rem;
577
+ color: #fef3c7;
578
+ line-height: 1.5;
579
+ }
580
+
581
+ .hitl-stall-actions {
582
+ display: flex;
583
+ gap: 0.5rem;
584
+ flex-shrink: 0;
585
+ }
586
+
587
+ .hitl-stall-cancel-btn {
588
+ padding: 0.3rem 0.75rem;
589
+ border-radius: 5px;
590
+ font-size: 0.75rem;
591
+ font-weight: 600;
592
+ cursor: pointer;
593
+ background: #dc2626;
594
+ color: #fff;
595
+ border: none;
596
+ transition: background 0.15s;
597
+ }
598
+ .hitl-stall-cancel-btn:hover { background: #b91c1c; }
599
+
600
+ .hitl-stall-dismiss-btn {
601
+ padding: 0.3rem 0.75rem;
602
+ border-radius: 5px;
603
+ font-size: 0.75rem;
604
+ font-weight: 500;
605
+ cursor: pointer;
606
+ background: rgba(255,255,255,0.08);
607
+ color: var(--text-secondary, #9a9590);
608
+ border: 1px solid rgba(255,255,255,0.12);
609
+ transition: background 0.15s;
610
+ }
611
+ .hitl-stall-dismiss-btn:hover { background: rgba(255,255,255,0.14); }
612
+
613
  .hitl-stage--failed {
614
  border-left-color: var(--red);
615
  }
web/src/index.css CHANGED
@@ -3019,3 +3019,325 @@ body {
3019
  .cs-stat-divider { width: auto; height: 1px; margin: 0; }
3020
  }
3021
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3019
  .cs-stat-divider { width: auto; height: 1px; margin: 0; }
3020
  }
3021
 
3022
+
3023
+ /* ═══════════════════════════════════════════════════════════════════
3024
+ AUTH PAGE — Premium Login/Signup
3025
+ ═══════════════════════════════════════════════════════════════════ */
3026
+ .auth-root {
3027
+ min-height: 100vh;
3028
+ display: flex;
3029
+ align-items: center;
3030
+ justify-content: center;
3031
+ background: var(--bg);
3032
+ position: relative;
3033
+ overflow: hidden;
3034
+ padding: 2rem;
3035
+ }
3036
+
3037
+ .auth-glow {
3038
+ position: absolute;
3039
+ border-radius: 50%;
3040
+ filter: blur(120px);
3041
+ pointer-events: none;
3042
+ opacity: 0.5;
3043
+ }
3044
+
3045
+ .auth-glow-1 {
3046
+ width: 500px;
3047
+ height: 500px;
3048
+ background: var(--accent-glow);
3049
+ top: -10%;
3050
+ right: -5%;
3051
+ animation: auth-float 8s ease-in-out infinite alternate;
3052
+ }
3053
+
3054
+ .auth-glow-2 {
3055
+ width: 400px;
3056
+ height: 400px;
3057
+ background: rgba(58, 120, 86, 0.12);
3058
+ bottom: -10%;
3059
+ left: -5%;
3060
+ animation: auth-float 10s ease-in-out infinite alternate-reverse;
3061
+ }
3062
+
3063
+ @keyframes auth-float {
3064
+ from { transform: translate(0, 0) scale(1); }
3065
+ to { transform: translate(30px, -20px) scale(1.08); }
3066
+ }
3067
+
3068
+ .auth-card {
3069
+ position: relative;
3070
+ z-index: 1;
3071
+ width: 100%;
3072
+ max-width: 420px;
3073
+ background: var(--bg-card);
3074
+ border: 1px solid var(--border);
3075
+ border-radius: var(--radius-lg);
3076
+ padding: 2.5rem 2rem 2rem;
3077
+ box-shadow: var(--shadow-lg);
3078
+ }
3079
+
3080
+ .auth-brand {
3081
+ display: flex;
3082
+ align-items: center;
3083
+ gap: 0.65rem;
3084
+ margin-bottom: 1.75rem;
3085
+ }
3086
+
3087
+ .auth-brand-logo {
3088
+ width: 36px;
3089
+ height: 36px;
3090
+ border-radius: 10px;
3091
+ background: var(--accent);
3092
+ color: var(--text-inverse);
3093
+ display: grid;
3094
+ place-items: center;
3095
+ font-weight: 800;
3096
+ font-size: 1.1rem;
3097
+ }
3098
+
3099
+ .auth-brand-title {
3100
+ font-size: 1rem;
3101
+ font-weight: 700;
3102
+ line-height: 1.2;
3103
+ }
3104
+
3105
+ .auth-brand-sub {
3106
+ font-size: 0.72rem;
3107
+ color: var(--text-dim);
3108
+ }
3109
+
3110
+ .auth-heading {
3111
+ font-size: 1.25rem;
3112
+ font-weight: 700;
3113
+ margin-bottom: 0.3rem;
3114
+ }
3115
+
3116
+ .auth-subheading {
3117
+ color: var(--text-mid);
3118
+ font-size: 0.85rem;
3119
+ margin-bottom: 1.25rem;
3120
+ }
3121
+
3122
+ .auth-tabs {
3123
+ display: flex;
3124
+ gap: 0;
3125
+ margin-bottom: 1.5rem;
3126
+ border: 1px solid var(--border);
3127
+ border-radius: var(--radius);
3128
+ overflow: hidden;
3129
+ }
3130
+
3131
+ .auth-tab {
3132
+ flex: 1;
3133
+ padding: 0.55rem;
3134
+ border: none;
3135
+ background: transparent;
3136
+ color: var(--text-mid);
3137
+ font-family: inherit;
3138
+ font-size: 0.84rem;
3139
+ font-weight: 500;
3140
+ cursor: pointer;
3141
+ transition: all var(--fast);
3142
+ }
3143
+
3144
+ .auth-tab--active {
3145
+ background: var(--accent);
3146
+ color: var(--text-inverse);
3147
+ font-weight: 600;
3148
+ }
3149
+
3150
+ .auth-tab:not(.auth-tab--active):hover {
3151
+ background: var(--bg-hover);
3152
+ color: var(--text);
3153
+ }
3154
+
3155
+ .auth-form {
3156
+ display: flex;
3157
+ flex-direction: column;
3158
+ gap: 1rem;
3159
+ }
3160
+
3161
+ .auth-field {
3162
+ display: flex;
3163
+ flex-direction: column;
3164
+ gap: 0.3rem;
3165
+ }
3166
+
3167
+ .auth-label {
3168
+ font-size: 0.78rem;
3169
+ font-weight: 600;
3170
+ color: var(--text-mid);
3171
+ letter-spacing: 0.02em;
3172
+ }
3173
+
3174
+ .auth-input {
3175
+ padding: 0.65rem 0.85rem;
3176
+ background: var(--bg);
3177
+ border: 1px solid var(--border);
3178
+ border-radius: var(--radius);
3179
+ color: var(--text);
3180
+ font-family: inherit;
3181
+ font-size: 0.9rem;
3182
+ outline: none;
3183
+ transition: border-color var(--fast), box-shadow var(--fast);
3184
+ }
3185
+
3186
+ .auth-input:focus {
3187
+ border-color: var(--accent);
3188
+ box-shadow: 0 0 0 3px var(--accent-soft);
3189
+ }
3190
+
3191
+ .auth-input::placeholder {
3192
+ color: var(--text-dim);
3193
+ }
3194
+
3195
+ .auth-error {
3196
+ background: var(--fail-bg);
3197
+ border: 1px solid var(--fail-bdr);
3198
+ color: var(--fail);
3199
+ border-radius: var(--radius);
3200
+ padding: 0.55rem 0.75rem;
3201
+ font-size: 0.82rem;
3202
+ line-height: 1.45;
3203
+ }
3204
+
3205
+ .auth-success {
3206
+ background: var(--success-bg);
3207
+ border: 1px solid var(--success-bdr);
3208
+ color: var(--success);
3209
+ border-radius: var(--radius);
3210
+ padding: 0.55rem 0.75rem;
3211
+ font-size: 0.82rem;
3212
+ line-height: 1.45;
3213
+ }
3214
+
3215
+ .auth-submit {
3216
+ padding: 0.7rem;
3217
+ background: var(--accent);
3218
+ color: var(--text-inverse);
3219
+ border: none;
3220
+ border-radius: var(--radius);
3221
+ font-family: inherit;
3222
+ font-size: 0.9rem;
3223
+ font-weight: 600;
3224
+ cursor: pointer;
3225
+ transition: opacity var(--fast), transform var(--fast), box-shadow var(--fast);
3226
+ margin-top: 0.25rem;
3227
+ }
3228
+
3229
+ .auth-submit:hover:not(:disabled) {
3230
+ opacity: 0.92;
3231
+ transform: translateY(-1px);
3232
+ box-shadow: 0 4px 12px var(--accent-glow);
3233
+ }
3234
+
3235
+ .auth-submit:disabled {
3236
+ opacity: 0.5;
3237
+ cursor: not-allowed;
3238
+ }
3239
+
3240
+ .auth-footer-text {
3241
+ text-align: center;
3242
+ font-size: 0.82rem;
3243
+ color: var(--text-mid);
3244
+ margin-top: 1.25rem;
3245
+ }
3246
+
3247
+ .auth-switch-btn {
3248
+ background: none;
3249
+ border: none;
3250
+ color: var(--accent);
3251
+ cursor: pointer;
3252
+ font-family: inherit;
3253
+ font-size: 0.82rem;
3254
+ font-weight: 600;
3255
+ text-decoration: underline;
3256
+ text-underline-offset: 2px;
3257
+ }
3258
+
3259
+ .auth-switch-btn:hover {
3260
+ color: var(--accent-light);
3261
+ }
3262
+
3263
+ /* Auth loading spinner */
3264
+ .auth-loading {
3265
+ min-height: 100vh;
3266
+ display: flex;
3267
+ flex-direction: column;
3268
+ align-items: center;
3269
+ justify-content: center;
3270
+ gap: 1rem;
3271
+ background: var(--bg);
3272
+ color: var(--text-mid);
3273
+ font-size: 0.9rem;
3274
+ }
3275
+
3276
+ .auth-loading-spinner {
3277
+ width: 28px;
3278
+ height: 28px;
3279
+ border: 3px solid var(--border);
3280
+ border-top-color: var(--accent);
3281
+ border-radius: 50%;
3282
+ animation: auth-spin 0.7s linear infinite;
3283
+ }
3284
+
3285
+ @keyframes auth-spin {
3286
+ to { transform: rotate(360deg); }
3287
+ }
3288
+
3289
+ /* ── Sidebar user info + logout ──────────────────── */
3290
+ .app-user-info {
3291
+ display: flex;
3292
+ align-items: center;
3293
+ gap: 0.55rem;
3294
+ padding: 0.5rem 0.35rem;
3295
+ border: 1px solid var(--border);
3296
+ border-radius: var(--radius);
3297
+ background: var(--bg-card);
3298
+ }
3299
+
3300
+ .app-user-avatar {
3301
+ width: 28px;
3302
+ height: 28px;
3303
+ border-radius: 50%;
3304
+ background: var(--accent-soft);
3305
+ color: var(--accent);
3306
+ display: grid;
3307
+ place-items: center;
3308
+ font-weight: 700;
3309
+ font-size: 0.78rem;
3310
+ flex-shrink: 0;
3311
+ border: 1px solid var(--border);
3312
+ }
3313
+
3314
+ .app-user-details {
3315
+ min-width: 0;
3316
+ }
3317
+
3318
+ .app-user-email {
3319
+ font-size: 0.75rem;
3320
+ color: var(--text-mid);
3321
+ white-space: nowrap;
3322
+ overflow: hidden;
3323
+ text-overflow: ellipsis;
3324
+ }
3325
+
3326
+ .logout-btn {
3327
+ border: 1px solid var(--fail-bdr);
3328
+ border-radius: var(--radius);
3329
+ background: var(--fail-bg);
3330
+ color: var(--fail);
3331
+ padding: 0.4rem 0.55rem;
3332
+ font-size: 0.78rem;
3333
+ font-family: inherit;
3334
+ font-weight: 500;
3335
+ cursor: pointer;
3336
+ transition: all var(--fast);
3337
+ }
3338
+
3339
+ .logout-btn:hover {
3340
+ background: var(--fail);
3341
+ color: var(--text-inverse);
3342
+ border-color: var(--fail);
3343
+ }
web/src/pages/Documentation.tsx CHANGED
@@ -440,6 +440,11 @@ export const Documentation = () => {
440
  const stageDescriptions: Record<string, string> = {
441
  INIT: 'Create workspace directory structure, validate dependencies, and initialize build artifacts dictionary.',
442
  SPEC: 'Generate a detailed architecture specification from the natural-language prompt, including module interfaces, FSM descriptions, and clock/reset requirements.',
 
 
 
 
 
443
  RTL_GEN: 'Generate synthesizable RTL (SystemVerilog or Verilog-2005) from the architecture spec using multi-agent collaboration. Falls back to template library when available.',
444
  RTL_FIX: 'Run static analysis, pre-synthesis semantic checks, and iterative auto-repair. Supports strategy pivot (SV → Verilog-2005) when fixes stall.',
445
  VERIFICATION: 'Generate self-checking testbenches, compile, run simulation, and check for passing results. Includes static contract checking and fingerprint deduplication.',
 
440
  const stageDescriptions: Record<string, string> = {
441
  INIT: 'Create workspace directory structure, validate dependencies, and initialize build artifacts dictionary.',
442
  SPEC: 'Generate a detailed architecture specification from the natural-language prompt, including module interfaces, FSM descriptions, and clock/reset requirements.',
443
+ SPEC_VALIDATE: 'Run 6-stage hardware spec validation: classify design category (PROCESSOR/MEMORY/INTERFACE/etc.), check mandatory field completeness, decompose into domain-valid submodules, define top-level interface, generate GIVEN/WHEN/THEN behavioral contract assertions, and produce warnings for implicit assumptions.',
444
+ HIERARCHY_EXPAND: 'Evaluate each submodule against complexity triggers (pipeline, arbitration, cache, cross-category, large memory, >8 ports). Recursively expand complex submodules into nested specs with their own ports, sub-blocks, and behavioral contracts (max depth 3). Verify interface consistency — clock/reset propagation, no multi-driver conflicts, no unconnected ports.',
445
+ FEASIBILITY_CHECK: 'Evaluate Sky130/OpenLane physical design feasibility before RTL generation. Checks frequency limits (≤200 MHz), memory sizing (register vs. OpenRAM thresholds), arithmetic complexity (multiplier/divider gate cost), total area budget in gate equivalents, and Sky130-specific rules (no internal tri-states, no analog blocks). REJECT halts the pipeline; WARN passes with caveats.',
446
+ CDC_ANALYZE: 'Identify every clock domain in the design and enumerate all signals crossing domain boundaries. Assign synchronization strategies: 2-flop synchronizer for single-bit controls, pulse synchronizer for fast-to-slow pulses, async FIFO with Gray-coded pointers for multi-bit data, 4-phase handshake for low-bandwidth transfers, and reset synchronizer (async assert / sync deassert) for all cross-domain resets. Generate CDC submodule specifications that the RTL generator must instantiate. UNRESOLVED crossings halt the pipeline.',
447
+ VERIFICATION_PLAN: 'Generate a structured verification plan from the hardware spec before RTL generation. Extract testable behaviors from behavioral contracts (GIVEN/WHEN/THEN), add mandatory tests by design category (PROCESSOR/MEMORY/INTERFACE/ARITHMETIC), generate SystemVerilog Assertions for all P0 tests, and create a coverage plan with port bins, FSM state/transition coverage, and FIFO boundary conditions. Outputs structured JSON consumed by downstream testbench and formal verification stages.',
448
  RTL_GEN: 'Generate synthesizable RTL (SystemVerilog or Verilog-2005) from the architecture spec using multi-agent collaboration. Falls back to template library when available.',
449
  RTL_FIX: 'Run static analysis, pre-synthesis semantic checks, and iterative auto-repair. Supports strategy pivot (SV → Verilog-2005) when fixes stall.',
450
  VERIFICATION: 'Generate self-checking testbenches, compile, run simulation, and check for passing results. Includes static contract checking and fingerprint deduplication.',
web/src/pages/HumanInLoopBuild.tsx CHANGED
@@ -7,7 +7,7 @@ import { api, API_BASE } from '../api';
7
  import '../hitl.css';
8
 
9
  const PIPELINE_STAGES = [
10
- 'INIT', 'SPEC', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'FORMAL_VERIFY',
11
  'COVERAGE_CHECK', 'REGRESSION', 'SDC_GEN', 'FLOORPLAN', 'HARDENING',
12
  'CONVERGENCE_REVIEW', 'ECO_PATCH', 'SIGNOFF',
13
  ];
@@ -17,6 +17,11 @@ const TOTAL = PIPELINE_STAGES.length;
17
  const STAGE_ENCOURAGEMENTS: Record<string, string> = {
18
  INIT: 'Setting up your build environment…',
19
  SPEC: 'Translating your description into a chip specification…',
 
 
 
 
 
20
  RTL_GEN: 'Writing Verilog — your chip is taking shape…',
21
  RTL_FIX: 'Fixing any RTL issues automatically…',
22
  VERIFICATION: 'Running simulation — making sure your logic is correct…',
@@ -41,7 +46,7 @@ const MILESTONE_TOASTS: Record<string, { title: string; msg: string }> = {
41
 
42
  // Human-readable stage names
43
  const STAGE_LABELS: Record<string, string> = {
44
- INIT: 'Initialization', SPEC: 'Specification', RTL_GEN: 'RTL Generation',
45
  RTL_FIX: 'RTL Fix', VERIFICATION: 'Verification', FORMAL_VERIFY: 'Formal Verification',
46
  COVERAGE_CHECK: 'Coverage Check', REGRESSION: 'Regression', SDC_GEN: 'SDC Generation',
47
  FLOORPLAN: 'Floorplan', HARDENING: 'Hardening', CONVERGENCE_REVIEW: 'Convergence',
@@ -50,7 +55,7 @@ const STAGE_LABELS: Record<string, string> = {
50
 
51
  // Mandatory stages (cannot be skipped)
52
  const MANDATORY_STAGES = new Set([
53
- 'INIT', 'SPEC', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'HARDENING', 'SIGNOFF',
54
  ]);
55
 
56
  // Build mode presets
@@ -147,6 +152,9 @@ export const HumanInLoopBuild = () => {
147
  // Thinking indicator (Improvement 1)
148
  const [thinkingData, setThinkingData] = useState<{ agent_name: string; message: string } | null>(null);
149
 
 
 
 
150
  // Milestone toast: shown briefly when a key stage completes
151
  const [milestoneToast, setMilestoneToast] = useState<{ title: string; msg: string } | null>(null);
152
  const milestoneTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
@@ -215,6 +223,17 @@ export const HumanInLoopBuild = () => {
215
  return;
216
  }
217
 
 
 
 
 
 
 
 
 
 
 
 
218
  // Handle agent_thinking: show pulsing indicator
219
  if (data.type === 'agent_thinking') {
220
  setThinkingData({ agent_name: data.agent_name || '', message: data.message || '' });
@@ -394,6 +413,7 @@ export const HumanInLoopBuild = () => {
394
  setWaitingForApproval(false);
395
  setApprovalData(null);
396
  setThinkingData(null);
 
397
  setBuildMode('verified');
398
  setSkipStages(new Set(BUILD_MODE_SKIPS.verified));
399
  setSkipCoverage(false);
@@ -630,6 +650,18 @@ export const HumanInLoopBuild = () => {
630
  skippedStages={skipStages}
631
  />
632
  <div className="hitl-main">
 
 
 
 
 
 
 
 
 
 
 
 
633
  <ActivityFeed events={events} thinkingData={thinkingData} />
634
  {approvalData && (
635
  <ApprovalCard
 
7
  import '../hitl.css';
8
 
9
  const PIPELINE_STAGES = [
10
+ 'INIT', 'SPEC', 'SPEC_VALIDATE', 'HIERARCHY_EXPAND', 'FEASIBILITY_CHECK', 'CDC_ANALYZE', 'VERIFICATION_PLAN', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'FORMAL_VERIFY',
11
  'COVERAGE_CHECK', 'REGRESSION', 'SDC_GEN', 'FLOORPLAN', 'HARDENING',
12
  'CONVERGENCE_REVIEW', 'ECO_PATCH', 'SIGNOFF',
13
  ];
 
17
  const STAGE_ENCOURAGEMENTS: Record<string, string> = {
18
  INIT: 'Setting up your build environment…',
19
  SPEC: 'Translating your description into a chip specification…',
20
+ SPEC_VALIDATE: 'Validating spec — classifying design, checking completeness, generating assertions…',
21
+ HIERARCHY_EXPAND: 'Expanding complex submodules into nested specifications…',
22
+ FEASIBILITY_CHECK: 'Evaluating Sky130 physical design feasibility…',
23
+ CDC_ANALYZE: 'Analyzing clock domain crossings…',
24
+ VERIFICATION_PLAN: 'Building verification plan & SVA properties…',
25
  RTL_GEN: 'Writing Verilog — your chip is taking shape…',
26
  RTL_FIX: 'Fixing any RTL issues automatically…',
27
  VERIFICATION: 'Running simulation — making sure your logic is correct…',
 
46
 
47
  // Human-readable stage names
48
  const STAGE_LABELS: Record<string, string> = {
49
+ INIT: 'Initialization', SPEC: 'Specification', SPEC_VALIDATE: 'Spec Validation', HIERARCHY_EXPAND: 'Hierarchy Expansion', FEASIBILITY_CHECK: 'Feasibility Check', CDC_ANALYZE: 'CDC Analysis', VERIFICATION_PLAN: 'Verification Plan', RTL_GEN: 'RTL Generation',
50
  RTL_FIX: 'RTL Fix', VERIFICATION: 'Verification', FORMAL_VERIFY: 'Formal Verification',
51
  COVERAGE_CHECK: 'Coverage Check', REGRESSION: 'Regression', SDC_GEN: 'SDC Generation',
52
  FLOORPLAN: 'Floorplan', HARDENING: 'Hardening', CONVERGENCE_REVIEW: 'Convergence',
 
55
 
56
  // Mandatory stages (cannot be skipped)
57
  const MANDATORY_STAGES = new Set([
58
+ 'INIT', 'SPEC', 'SPEC_VALIDATE', 'HIERARCHY_EXPAND', 'FEASIBILITY_CHECK', 'CDC_ANALYZE', 'VERIFICATION_PLAN', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'HARDENING', 'SIGNOFF',
59
  ]);
60
 
61
  // Build mode presets
 
152
  // Thinking indicator (Improvement 1)
153
  const [thinkingData, setThinkingData] = useState<{ agent_name: string; message: string } | null>(null);
154
 
155
+ // Stall detection — shown when LLM is silent for 5+ minutes
156
+ const [stallWarning, setStallWarning] = useState<string | null>(null);
157
+
158
  // Milestone toast: shown briefly when a key stage completes
159
  const [milestoneToast, setMilestoneToast] = useState<{ title: string; msg: string } | null>(null);
160
  const milestoneTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
 
223
  return;
224
  }
225
 
226
+ // Handle stall warning: LLM silent for 5+ minutes
227
+ if (data.type === 'stall_warning') {
228
+ setStallWarning(data.message || '⚠️ No activity for 5 minutes — the LLM may be stuck. You can cancel and retry.');
229
+ return;
230
+ }
231
+
232
+ // Any real event clears the stall warning and thinking indicator
233
+ if (data.type === 'log' || data.type === 'checkpoint' || data.type === 'transition') {
234
+ setStallWarning(null);
235
+ }
236
+
237
  // Handle agent_thinking: show pulsing indicator
238
  if (data.type === 'agent_thinking') {
239
  setThinkingData({ agent_name: data.agent_name || '', message: data.message || '' });
 
413
  setWaitingForApproval(false);
414
  setApprovalData(null);
415
  setThinkingData(null);
416
+ setStallWarning(null);
417
  setBuildMode('verified');
418
  setSkipStages(new Set(BUILD_MODE_SKIPS.verified));
419
  setSkipCoverage(false);
 
650
  skippedStages={skipStages}
651
  />
652
  <div className="hitl-main">
653
+ {stallWarning && (
654
+ <div className="hitl-stall-banner">
655
+ <div className="hitl-stall-body">
656
+ <span className="hitl-stall-icon">⚠️</span>
657
+ <span className="hitl-stall-msg">{stallWarning}</span>
658
+ </div>
659
+ <div className="hitl-stall-actions">
660
+ <button className="hitl-stall-cancel-btn" onClick={handleCancel}>Cancel Build</button>
661
+ <button className="hitl-stall-dismiss-btn" onClick={() => setStallWarning(null)}>Dismiss</button>
662
+ </div>
663
+ </div>
664
+ )}
665
  <ActivityFeed events={events} thinkingData={thinkingData} />
666
  {approvalData && (
667
  <ApprovalCard
web/src/supabaseClient.ts ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import { createClient } from '@supabase/supabase-js';
2
+
3
+ const supabaseUrl = import.meta.env.VITE_SUPABASE_URL || '';
4
+ const supabaseAnonKey = import.meta.env.VITE_SUPABASE_ANON_KEY || '';
5
+
6
+ export const supabase = createClient(supabaseUrl, supabaseAnonKey);