mohammademad2003 commited on
Commit
7477316
·
1 Parent(s): 55e3496

first commit

Browse files
.dockerignore ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python
2
+ __pycache__/
3
+ *.pyc
4
+ *.pyo
5
+ *.egg-info/
6
+ dist/
7
+ *.egg
8
+ venv/
9
+ .venv/
10
+ env/
11
+
12
+ # Node
13
+ node_modules/
14
+ frontend/node_modules/
15
+
16
+ # IDE
17
+ .vscode/
18
+ .idea/
19
+ *.swp
20
+ *.swo
21
+
22
+ # Git
23
+ .git/
24
+ .gitignore
25
+
26
+ # OS
27
+ .DS_Store
28
+ Thumbs.db
29
+
30
+ # Runtime / data
31
+ optiq.db
32
+ *.db
33
+ *.log
34
+ *.pid
35
+
36
+ # Environment secrets
37
+ .env
38
+
39
+ # Misc
40
+ EcoHackathon/
.gitignore CHANGED
@@ -1,44 +1,136 @@
1
- # Python
 
 
2
  __pycache__/
3
  *.py[cod]
 
 
 
4
  *.egg-info/
 
5
  dist/
6
  build/
7
- .eggs/
 
 
 
 
 
 
 
 
 
8
 
9
  # Virtual environments
10
  .venv/
11
  venv/
12
  env/
 
13
  *.env
 
 
14
  .env.local
15
 
16
- # IDE
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  .vscode/
18
  .idea/
19
  *.swp
20
  *.swo
 
21
 
22
- # OS
 
 
23
  .DS_Store
24
  Thumbs.db
25
 
26
- # Models (large binary files)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  models/*.pt
28
  models/*.pth
 
 
29
  !models/.gitkeep
30
 
31
- # Data
 
 
32
  data/*.csv
33
  data/*.json
 
34
  *.h5
35
 
36
- # Logs
37
- *.log
 
38
  runs/
 
 
39
 
40
- # Test artifacts
41
- test_*.py
 
 
 
 
42
 
43
- # Jupyter
44
- .ipynb_checkpoints/
 
 
 
 
 
1
+ ########################################
2
+ # ===== Python / FastAPI =====
3
+ ########################################
4
  __pycache__/
5
  *.py[cod]
6
+ *.pyo
7
+ *.pyd
8
+ *.so
9
  *.egg-info/
10
+ .eggs/
11
  dist/
12
  build/
13
+ pip-wheel-metadata/
14
+ .pytest_cache/
15
+ .mypy_cache/
16
+ .pytype/
17
+ ruff_cache/
18
+
19
+ # FastAPI / Uvicorn / Gunicorn
20
+ *.log
21
+ logs/
22
+ uvicorn.log
23
 
24
  # Virtual environments
25
  .venv/
26
  venv/
27
  env/
28
+ ENV/
29
  *.env
30
+ .env
31
+ .env.*
32
  .env.local
33
 
34
+ ########################################
35
+ # ===== React / Node =====
36
+ ########################################
37
+ node_modules/
38
+ npm-debug.log*
39
+ yarn-debug.log*
40
+ yarn-error.log*
41
+ pnpm-debug.log*
42
+ .pnpm-store/
43
+
44
+ # Build outputs
45
+ build/
46
+ dist/
47
+ out/
48
+ .next/
49
+ .vercel/
50
+ .svelte-kit/
51
+
52
+ # Vite / React
53
+ .vite/
54
+ .cache/
55
+
56
+ ########################################
57
+ # ===== Frontend tooling =====
58
+ ########################################
59
+ .eslintcache
60
+ .stylelintcache
61
+ .parcel-cache
62
+ .turbo
63
+
64
+ ########################################
65
+ # ===== IDE / Editors =====
66
+ ########################################
67
  .vscode/
68
  .idea/
69
  *.swp
70
  *.swo
71
+ *.iml
72
 
73
+ ########################################
74
+ # ===== OS =====
75
+ ########################################
76
  .DS_Store
77
  Thumbs.db
78
 
79
+ ########################################
80
+ # ===== Jupyter =====
81
+ ########################################
82
+ .ipynb_checkpoints/
83
+
84
+ ########################################
85
+ # ===== Docker =====
86
+ ########################################
87
+ docker-compose.override.yml
88
+ *.pid
89
+
90
+ ########################################
91
+ # ===== Testing =====
92
+ ########################################
93
+ htmlcov/
94
+ .coverage
95
+ .coverage.*
96
+ coverage.xml
97
+ test_*.py
98
+ pytestdebug.log
99
+
100
+ ########################################
101
+ # ===== ML / AI Artifacts =====
102
+ ########################################
103
  models/*.pt
104
  models/*.pth
105
+ models/*.onnx
106
+ models/*.joblib
107
  !models/.gitkeep
108
 
109
+ ########################################
110
+ # ===== Data =====
111
+ ########################################
112
  data/*.csv
113
  data/*.json
114
+ data/*.parquet
115
  *.h5
116
 
117
+ ########################################
118
+ # ===== Runtime =====
119
+ ########################################
120
  runs/
121
+ tmp/
122
+ temp/
123
 
124
+ ########################################
125
+ # ===== Security =====
126
+ ########################################
127
+ *.key
128
+ *.pem
129
+ *.crt
130
 
131
+ ########################################
132
+ # ===== Misc =====
133
+ ########################################
134
+ *.bak
135
+ *.tmp
136
+ *.orig
FRONTEND_SPEC.md DELETED
@@ -1,235 +0,0 @@
1
- # OptiQ Frontend Specification (for Lovable Pro)
2
-
3
- ## API Base URL
4
-
5
- ```
6
- http://localhost:8000
7
- ```
8
-
9
- ## Endpoints
10
-
11
- ### 1. Health Check
12
-
13
- ```
14
- GET /
15
- Response: { "status": "ok", "project": "OptiQ", "version": "0.1.0" }
16
- ```
17
-
18
- ### 2. Get Baseline (Network + Power Flow)
19
-
20
- ```
21
- GET /api/baseline/{system}
22
- ```
23
-
24
- **Params**: `system` = `"case33bw"` or `"case118"`
25
-
26
- **Response** (key fields):
27
- ```json
28
- {
29
- "system": "case33bw",
30
- "network": {
31
- "n_buses": 33,
32
- "n_lines": 37,
33
- "n_lines_in_service": 32,
34
- "n_tie_lines": 5,
35
- "n_generators": 1,
36
- "n_loads": 32,
37
- "total_load_mw": 3.715,
38
- "total_load_mvar": 2.3,
39
- "tie_line_indices": [32, 33, 34, 35, 36]
40
- },
41
- "power_flow": {
42
- "converged": true,
43
- "total_loss_kw": 202.68,
44
- "loss_pct": 5.17,
45
- "min_voltage_pu": 0.9131,
46
- "max_voltage_pu": 1.0,
47
- "voltage_violations": 21,
48
- "bus_voltages": [1.0, 0.997, ...], // 33 values
49
- "line_loadings_pct": [47.2, 43.1, ...], // 37 values
50
- "line_losses_kw": [12.5, 11.3, ...] // 37 values
51
- },
52
- "buses": [
53
- {"index": 0, "name": "Bus 0", "vn_kv": 12.66, "load_mw": 0.0, "load_mvar": 0.0, "is_slack": true},
54
- {"index": 1, "name": "Bus 1", "vn_kv": 12.66, "load_mw": 0.1, "load_mvar": 0.06, "is_slack": false},
55
- ...
56
- ],
57
- "lines": [
58
- {"index": 0, "from_bus": 0, "to_bus": 1, "r_ohm_per_km": 0.0922, "x_ohm_per_km": 0.047, "length_km": 1.0, "in_service": true, "is_tie": false},
59
- {"index": 32, "from_bus": 20, "to_bus": 7, "r_ohm_per_km": 2.0, "x_ohm_per_km": 2.0, "length_km": 1.0, "in_service": false, "is_tie": true},
60
- ...
61
- ]
62
- }
63
- ```
64
-
65
- ### 3. Optimize
66
-
67
- ```
68
- POST /api/optimize
69
- Content-Type: application/json
70
- ```
71
-
72
- **Body**:
73
- ```json
74
- {
75
- "system": "case33bw",
76
- "method": "hybrid", // "classical", "quantum", "ai", or "hybrid"
77
- "quantum_iters": 300, // optional
78
- "quantum_restarts": 3, // optional
79
- "quantum_top_k": 5 // optional
80
- }
81
- ```
82
-
83
- **Response**:
84
- ```json
85
- {
86
- "system": "case33bw",
87
- "method": "hybrid",
88
- "baseline": { /* same as power_flow above */ },
89
- "optimized": {
90
- "converged": true,
91
- "total_loss_kw": 139.55,
92
- "loss_pct": 3.62,
93
- "min_voltage_pu": 0.9378,
94
- "voltage_violations": 7,
95
- "open_lines": [6, 8, 13, 31, 36],
96
- "bus_voltages": [...],
97
- "line_loadings_pct": [...],
98
- "line_losses_kw": [...]
99
- },
100
- "impact": {
101
- "baseline_loss_kw": 202.68,
102
- "optimized_loss_kw": 139.55,
103
- "loss_reduction_kw": 63.13,
104
- "loss_reduction_pct": 31.15,
105
- "energy_saved_mwh_year": 553.02,
106
- "co2_saved_tonnes_year": 262.68,
107
- "cost_saved_usd_year": 55301.88,
108
- "voltage_violations_fixed": 14,
109
- "equivalent_trees_planted": 12508,
110
- "equivalent_cars_removed": 57.1
111
- },
112
- "candidates": [
113
- {"open_lines": [6,8,13,31,36], "loss_kw": 139.55, "min_voltage": 0.9378},
114
- {"open_lines": [6,8,13,27,31], "loss_kw": 139.98, "min_voltage": 0.9364}
115
- ],
116
- "timings": {
117
- "baseline_sec": 0.15,
118
- "quantum_sec": 7.1,
119
- "ai_classical_sec": 0.7,
120
- "total_sec": 8.0
121
- },
122
- "total_time_sec": 8.0
123
- }
124
- ```
125
-
126
- ### 4. Compare (Side-by-Side)
127
-
128
- ```
129
- POST /api/compare
130
- Content-Type: application/json
131
- ```
132
-
133
- **Body**:
134
- ```json
135
- {
136
- "system": "case33bw",
137
- "methods": ["classical", "quantum", "hybrid"]
138
- }
139
- ```
140
-
141
- **Response**:
142
- ```json
143
- {
144
- "system": "case33bw",
145
- "baseline": { /* power_flow results */ },
146
- "comparisons": {
147
- "classical": {
148
- "optimized": { /* power_flow results + open_lines */ },
149
- "impact": { /* impact metrics */ },
150
- "time_sec": 10.5
151
- },
152
- "quantum": {
153
- "optimized": { ... },
154
- "impact": { ... },
155
- "time_sec": 17.2,
156
- "timings": { "baseline_sec": ..., "quantum_sec": ..., ... }
157
- },
158
- "hybrid": {
159
- "optimized": { ... },
160
- "impact": { ... },
161
- "time_sec": 8.0,
162
- "timings": { ... }
163
- }
164
- }
165
- }
166
- ```
167
-
168
- ---
169
-
170
- ## Frontend Pages
171
-
172
- ### Page 1: Grid Overview
173
-
174
- **Data source**: `GET /api/baseline/case33bw`
175
-
176
- **Layout**:
177
- - **Network Graph**: Use the `buses` and `lines` arrays to draw a force-directed or hierarchical graph
178
- - Nodes = buses, colored by `bus_voltages[i]` (red < 0.95, green 0.95-1.05, yellow > 1.05)
179
- - Edges = lines, solid if `in_service`, dashed if `is_tie`
180
- - Node size proportional to `load_mw`
181
- - Label the slack bus (is_slack=true)
182
- - **Stats Panel** (sidebar or top):
183
- - Total Load: `network.total_load_mw` MW
184
- - Total Losses: `power_flow.total_loss_kw` kW (`loss_pct`%)
185
- - Min Voltage: `power_flow.min_voltage_pu` p.u.
186
- - Voltage Violations: `power_flow.voltage_violations`
187
- - **Voltage Profile Chart**: Bar chart of all 33 bus voltages with 0.95/1.05 limit lines
188
-
189
- ### Page 2: Optimizer
190
-
191
- **Data source**: `POST /api/optimize`
192
-
193
- **Layout**:
194
- - **Method Selector**: Dropdown or tabs for Classical / Quantum / Hybrid
195
- - **"Optimize" Button**: Triggers POST, shows loading spinner
196
- - **Before/After Side-by-Side**:
197
- - Left panel: Baseline network graph + stats
198
- - Right panel: Optimized network graph (highlight changed lines in orange)
199
- - Center: Delta metrics (loss reduction kW, voltage improvement, etc.)
200
- - **Impact Cards** (large, prominent):
201
- - Loss Reduction: `impact.loss_reduction_pct`% with arrow down icon
202
- - CO₂ Saved: `impact.co2_saved_tonnes_year` tonnes/year
203
- - Cost Saved: `$impact.cost_saved_usd_year`/year
204
- - Voltage Fixed: `impact.voltage_violations_fixed` violations resolved
205
- - **Timing Bar**: Show quantum / AI / classical time breakdown
206
- - **Candidates Table**: List all evaluated topologies with loss values
207
-
208
- ### Page 3: Impact Calculator
209
-
210
- **Data source**: `POST /api/optimize` (use cached result) + local calculation
211
-
212
- **Layout**:
213
- - **Scaling Inputs** (sliders):
214
- - Grid size: number of buses (33 to 100,000)
215
- - Annual energy throughput (MWh/year)
216
- - Grid emission factor (kg CO₂/kWh)
217
- - Electricity price ($/kWh)
218
- - **Projected Impact** (scales linearly from the 33-bus result):
219
- - Annual energy saved (MWh)
220
- - Annual CO₂ saved (tonnes)
221
- - Annual cost saved ($)
222
- - Equivalent trees planted
223
- - Equivalent cars removed from road
224
- - **Validation Questions** (expandable accordion):
225
- - Pre-populate with the 8 validation answers from the plan
226
-
227
- ---
228
-
229
- ## Design Notes
230
-
231
- - Color scheme: Dark mode preferred, with green (#22c55e) for improvements, red (#ef4444) for violations
232
- - Charts: Use Recharts or Chart.js
233
- - Network graph: Use react-force-graph, vis-network, or D3.js
234
- - Responsive: Must look good on both desktop and projector
235
- - The API runs on localhost:8000; configure CORS is already set to allow all origins
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -1,22 +1,65 @@
1
- # OptiQ — Hybrid Quantum-AI-Classical Grid Optimization
 
 
 
 
 
 
 
2
 
3
- **The first working prototype of the hybrid Quantum-AI-Classical optimization stack for power distribution networks.**
4
 
5
- OptiQ reduces distribution grid losses by 30%+ through intelligent network reconfiguration [1][9] — a software-only solution that works on existing infrastructure with zero hardware changes.
 
6
 
7
- ## The Innovation
8
 
9
- The state-of-the-art literature describes a future "Hybrid Intelligence stack" where quantum processors explore topological configurations, AI provides millisecond-scale predictions, and classical solvers verify feasibility. **Nobody has built this.** OptiQ is the first working implementation.
10
 
11
- | Layer | Technology | Role |
12
- |-------|-----------|------|
13
- | Quantum | QAOA / Simulated Annealing on QUBO (Qiskit) | Explore combinatorial space of network topologies |
14
- | AI | Physics-Informed GNN (PyTorch Geometric) | Predict optimal voltage profiles in milliseconds |
15
- | Classical | AC Power Flow (pandapower) | Verify feasibility and compute true losses |
16
 
17
- ## Results (IEEE 33-Bus System)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- ### OptiQ vs Published Algorithms
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  Many metaheuristics get trapped at local optima (~146 kW). OptiQ consistently finds the global optimal. All sources listed in [REFERENCES.md](REFERENCES.md).
22
 
@@ -42,7 +85,7 @@ Many metaheuristics get trapped at local optima (~146 kW). OptiQ consistently fi
42
  | Basic ADMS module (ABB/Siemens/GE) | 15-25% [9][22] | $5-50M [22] | Massive CAPEX. 12-24 month deploy. New hardware. |
43
  | **OptiQ** | **28-32%** | $200/feeder/month | Software-only. Zero CAPEX. Deploys in weeks. |
44
 
45
- ### OptiQ Detailed Results
46
 
47
  | Method | Loss (kW) | Reduction | Min V (pu) | Violations | Time (s) |
48
  |--------|-----------|-----------|------------|------------|----------|
@@ -62,10 +105,37 @@ Many metaheuristics get trapped at local optima (~146 kW). OptiQ consistently fi
62
  | 1.15x | 274.58 | 187.90 | 31.6% |
63
  | 1.30x | 359.82 | 243.80 | 32.2% |
64
 
 
 
65
  ## Architecture
66
 
67
  ```
68
- IEEE 33-Bus Data (pandapower built-in)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
 
70
  [Quantum: SA on QUBO] ──→ Top-5 candidate topologies
71
 
@@ -76,166 +146,360 @@ IEEE 33-Bus Data (pandapower built-in)
76
  Best Solution ──→ FastAPI ──→ Frontend Dashboard
77
  ```
78
 
79
- ## Quick Start
80
 
81
- ### Prerequisites
82
 
83
- - Python 3.11+ with conda
84
- - CUDA GPU (optional, for faster GNN training)
85
 
86
- ### Installation
87
 
88
- ```bash
89
- # Activate your conda environment
90
- conda activate projects
91
 
92
- # Install dependencies
93
- pip install -r requirements.txt
 
 
 
 
 
 
 
 
 
94
 
95
- # Train the GNN model (takes ~60 seconds)
96
- python -c "
97
- import sys; sys.path.insert(0, '.')
98
- from src.ai.train import train
99
- train(n_scenarios=1000, epochs=100, verbose=True)
100
- "
101
- ```
102
 
103
- ### Run the Benchmark
104
 
105
- ```bash
106
- cd OptiQ
107
- conda run -n projects python scripts/benchmark.py
108
- ```
 
 
109
 
110
- ### Run the API
111
 
112
- ```bash
113
- conda run -n projects uvicorn api.main:app --host 0.0.0.0 --port 8000 --reload
114
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
 
116
- ### API Endpoints
117
 
118
- - `GET /` — Health check
119
- - `GET /api/baseline/{system}` — Baseline power flow (system: `case33bw` or `case118`)
120
- - `POST /api/optimize` — Run optimization (methods: `classical`, `quantum`, `ai`, `hybrid`)
121
- - `POST /api/compare` — Compare multiple methods side-by-side
122
- - `GET /api/validate` — All hackathon validation answers as structured JSON
123
 
124
- See [FRONTEND_SPEC.md](FRONTEND_SPEC.md) for full API documentation.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
 
126
  ## Project Structure
127
 
128
  ```
129
  OptiQ/
130
- ├── config.py # Central configuration (incl. EgyptConfig)
131
- ├── api/
132
- │ ├── main.py # FastAPI entry point
 
133
  │ └── routes/
134
- │ ├── baseline.py # GET /api/baseline/{system}
135
- │ ├── optimize.py # POST /api/optimize
136
- │ ├── compare.py # POST /api/compare
137
- ── validate.py # GET /api/validate — hackathon answers
138
- ├── src/
139
- ├── grid/
140
- ├── loader.py # IEEE 33/118-bus via pandapower
141
- ├── power_flow.py # AC power flow + radiality checks
142
- │ └── reconfiguration.py # Classical branch-exchange
 
 
 
 
 
143
  │ ├── quantum/
144
- │ │ ├── hamiltonian.py # QUBO matrix construction
145
- ��� │ ├── qaoa_reconfig.py # SA solver + QAOA (reduced)
146
- │ │ └── decoder.py # Decode quantum results
147
  │ ├── ai/
148
- │ │ ├── model.py # GNN architecture (SAGEConv)
149
- │ │ ├── dataset.py # PyG data conversion
150
- │ │ ├── physics_loss.py # Unsupervised physics loss
151
- │ │ ├── train.py # Training loop
152
- │ │ └── inference.py # Fast prediction + warm start
153
- │ ├── hybrid/
154
- │ │ ── pipeline.py # Quantum -> AI -> Classical orchestrator
155
- ── evaluation/
156
- └── metrics.py # Impact, footprint, Egypt scaling
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
  ├── scripts/
158
- │ └── benchmark.py # Full benchmark suite vs published literature
159
- ├── models/ # Saved GNN checkpoints
160
- ├── requirements.txt
161
- ├── FRONTEND_SPEC.md # API contract for Lovable Pro
162
- ├── REFERENCES.md # All external sources with numbered citations
163
- ── README.md
 
 
164
  ```
165
 
166
- ## Tech Stack
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
167
 
168
- - **pandapower 3.4** Power system simulation (IEEE test cases built-in)
169
- - **Qiskit 2.3** — Quantum circuits and QUBO optimization
170
- - **PyTorch 2.9 + PyG 2.7** Graph Neural Network (CUDA-accelerated)
171
- - **FastAPI** REST API
172
- - **Lovable Pro** Frontend dashboard
 
 
 
 
 
 
 
 
 
 
173
 
174
- ## How It Actually Works in Egypt
 
 
 
 
 
 
175
 
176
- All numbers below are computed by `scripts/benchmark.py` and served live by `GET /api/validate`.
177
 
178
- ### Real Implementation Plan
179
 
180
- Egypt's distribution grid is operated by **9 regional companies** under the Egyptian Electricity Holding Company (EEHC) [15]. The target entry point is **North Cairo Electricity Distribution Company (NCEDC)**, which is already deploying 500,000 smart meters with Iskraemeco [16][17] and has SCADA infrastructure.
181
 
182
- | Phase | Timeline | What Happens | Cost |
183
- |-------|----------|--------------|------|
184
- | Phase 0 (MVP) | Done | IEEE benchmark validated, matches published optimal | $0 |
185
- | Phase 1 (Pilot) | 3-6 months | 5-10 feeders in one NCEDC substation, shadow mode first | $10-20K |
186
- | Phase 2 (District) | 6-12 months | 100+ feeders, automated SCADA pipeline, verified savings | $50-100K |
187
- | Phase 3 (Cairo) | 1-2 years | 5,000+ feeders across NCEDC + South Cairo EDC | $500K-1M |
188
- | Phase 4 (National) | 2-3 years | All 9 distribution companies | $2-5M |
 
 
 
 
 
 
 
 
189
 
190
- **Step-by-step for Phase 1 pilot:**
191
- 1. Partner with NCEDC (they already have SCADA + smart meters)
192
- 2. Get read-only SCADA data for 5-10 feeders (bus loads, switch states, voltages)
193
- 3. Map feeder topology to pandapower format (impedances from utility records)
194
- 4. Run OptiQ in **shadow mode**: compute optimal switches but do NOT actuate
195
- 5. After 1 month proving accuracy, actuate on 1-2 feeders with motorised switches
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
 
197
- **Hardware needed:** None. Runs on a standard cloud VM. Uses existing SCADA.
 
 
198
 
199
- ### Is It One-Time or Recurring?
200
 
201
- **Recurring, not one-time.** The solution runs per feeder, every 15-60 minutes.
202
 
203
- A feeder is one radial distribution circuit (20-40 buses, serving 500-5,000 customers). Load patterns change hourly (morning/evening peaks), seasonally (Egypt summer AC doubles demand), and with new connections. The optimal switch configuration changes with load. Static one-time reconfiguration captures only ~40% of the benefit vs dynamic recurring optimisation.
 
 
 
204
 
205
- ### Pricing
206
 
207
- | Metric | Per Feeder |
208
- |--------|-----------|
209
- | Energy saved | 553,020 kWh/year |
210
- | Cost saved (subsidised rate) | $16,591/year |
211
- | Cost saved (real cost) | $44,242/year |
212
- | CO2 saved | 26.3 tonnes/year |
213
 
214
- | Pricing Model | Price | Value |
215
- |---------------|-------|-------|
216
- | **SaaS Subscription** | $200/feeder/month ($2,400/year) | 5.4% of savings -- immediate payback |
217
- | **Revenue Share** | 15% of verified savings | ~$6,636/feeder/year, zero upfront cost |
218
- | **Enterprise License** | $500K/year for up to 1,000 feeders | $500/feeder/year for large utilities |
219
 
220
- **Revenue projections:**
221
- - Pilot (10 feeders): $24,000/year revenue, $442,416/year saved for utility
222
- - Cairo (5,000 feeders): **$12M/year revenue**, **$221M/year saved for utility**
223
 
224
- ### Why OptiQ vs Existing Solutions?
225
 
226
- | Solution | Loss Reduction | Cost | Limitation |
227
- |----------|---------------|------|-----------|
228
- | **Manual switching** (status quo in Egypt) | 5-10% [9] | $0 software | Cannot adapt to load changes. Human error. Slow. |
229
- | **Full ADMS** (ABB/Siemens/GE) | 15-25% [9][22] | **$5-50 million** [22] | Massive CAPEX. 12-24 month deploy. New hardware required. |
230
- | **OptiQ** | **28-32%** | $200/feeder/month | Software-only. Zero CAPEX. Deploys in weeks. |
231
 
232
- OptiQ achieves the **published global optimal** (31.15% on IEEE 33-bus) [1], matching or exceeding results from PSO [5], GA [7], MILP [4], and all other published methods [7]. Full ADMS platforms use simple heuristics for reconfiguration (it's one small module in a huge platform) [22][23]. OptiQ is 10-100x cheaper and achieves better loss reduction because the entire system is purpose-built for this problem.
233
 
234
- The global ADMS market is $3.8B (2024) growing to $10.5B by 2030 [22]. OptiQ targets the reconfiguration-specific slice at a fraction of the price.
235
 
236
- ### Waste Elimination (Correct Framing)
237
 
238
- The correct comparison is **how much waste we eliminate from the grid**, not how much the solution consumes vs saves (it's software -- it consumes almost nothing).
 
 
 
 
 
 
 
 
 
 
239
 
240
  | Metric | Before OptiQ | After OptiQ |
241
  |--------|-------------|-------------|
@@ -243,31 +507,403 @@ The correct comparison is **how much waste we eliminate from the grid**, not how
243
  | **Waste eliminated** | -- | **553,020 kWh/year (31.15%)** |
244
  | Solution computational overhead | -- | 36.5 kWh/year (0.007% of savings) |
245
 
246
- ### Dependent Variables
 
 
 
 
247
 
248
- | Category | Count |
249
- |----------|-------|
250
- | Physical (bus loads, impedances, voltages) | 178 |
251
- | Algorithmic hyperparameters | 20 |
252
- | External assumptions | 3 |
253
- | **Decision variables (switch states)** | **5** |
254
- | **Grand total** | **201** |
255
 
256
  ### Impact at Scale
257
 
258
- | Scope | Savings | CO2 Saved | Cost Saved |
259
  |-------|---------|-----------|------------|
260
  | Single feeder | 553 MWh/year | 26.3 t/year | $44K/year |
261
  | Cairo (5,000 feeders) | 2.0 TWh/year | 1.0 Mt/year | $221M/year |
262
  | Egypt (all feeders) | 7.4 TWh/year | 3.7 Mt/year | $592M/year |
263
  | Global | 467 TWh/year | 222 Mt/year | -- |
264
 
265
- ### CO2 Trustworthiness
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
266
 
267
- Energy savings are computed from **pandapower's Newton-Raphson AC power flow** [25] -- an industry-standard, physics-validated solver derived from Kirchhoff's laws, used by grid operators worldwide. CO2 uses Egypt's grid factor (0.50 kg CO2/kWh for 88% gas) [12][21]. Annualisation assumes constant load; real-world savings are ~60-80% of this figure due to load variation. Even at 60%, a single feeder eliminates 332 MWh/year of waste.
268
 
269
- All bracketed numbers (e.g. [1], [12]) refer to [REFERENCES.md](REFERENCES.md) for full citations.
 
 
 
 
 
 
 
 
270
 
271
- ## License
272
 
273
- MIT
 
 
 
1
+ <p align="center">
2
+ <img src="https://img.shields.io/badge/Python-3.12-blue?style=flat-square&logo=python" />
3
+ <img src="https://img.shields.io/badge/Qiskit-2.3-6929C4?style=flat-square&logo=ibm" />
4
+ <img src="https://img.shields.io/badge/PyTorch-2.9-EE4C2C?style=flat-square&logo=pytorch" />
5
+ <img src="https://img.shields.io/badge/FastAPI-0.128-009688?style=flat-square&logo=fastapi" />
6
+ <img src="https://img.shields.io/badge/React-18-61DAFB?style=flat-square&logo=react" />
7
+ <img src="https://img.shields.io/badge/License-MIT-green?style=flat-square" />
8
+ </p>
9
 
10
+ # OptiQ Hybrid Quantum-AI-Classical Grid Optimization Platform
11
 
12
+ > **AI-Powered Distribution Grid Reconfiguration SaaS.**
13
+ > Reduces grid losses by **31 %+** through intelligent network reconfiguration using a three-stage hybrid pipeline: **Quantum Topology Search → GNN Warm-Start → Classical AC-Power-Flow Verification.**
14
 
15
+ **The first working prototype of the hybrid Quantum-AI-Classical optimization stack for power distribution networks** — a software-only solution that works on existing infrastructure with zero hardware changes.
16
 
17
+ ---
18
 
19
+ ## Table of Contents
 
 
 
 
20
 
21
+ 1. [Overview](#overview)
22
+ 2. [Results](#results)
23
+ 3. [Architecture](#architecture)
24
+ 4. [Hybrid Pipeline — How It Works](#hybrid-pipeline--how-it-works)
25
+ 5. [Mathematical Foundations](#mathematical-foundations)
26
+ 6. [Project Structure](#project-structure)
27
+ 7. [Backend API Endpoints](#backend-api-endpoints)
28
+ 8. [Frontend Pages](#frontend-pages)
29
+ 9. [Database Schema](#database-schema)
30
+ 10. [Authentication](#authentication)
31
+ 11. [Grid Visualization & Out-of-Service Lines](#grid-visualization--out-of-service-lines)
32
+ 12. [Evaluation & Impact Metrics](#evaluation--impact-metrics)
33
+ 13. [Egypt-Specific Scaling & Implementation](#egypt-specific-scaling--implementation)
34
+ 14. [Configuration](#configuration)
35
+ 15. [Installation](#installation)
36
+ 16. [Running the Application](#running-the-application)
37
+ 17. [Deployment with ngrok](#deployment-with-ngrok)
38
+ 18. [Environment Variables](#environment-variables)
39
+ 19. [Tech Stack](#tech-stack)
40
+ 20. [References](#references)
41
 
42
+ ---
43
+
44
+ ## Overview
45
+
46
+ Electric power distribution grids lose **5–13 %** of generated energy as resistive (I²R) losses in feeder cables. **Network reconfiguration** — opening / closing sectionalising and tie switches — can dramatically reduce these losses while maintaining supply to every bus.
47
+
48
+ OptiQ solves this NP-hard combinatorial problem with a **hybrid pipeline** that chains:
49
+
50
+ | Stage | Engine | Role |
51
+ |-------|--------|------|
52
+ | **1. Quantum / SA Topology Search** | Qiskit QAOA or Simulated Annealing | Explore the exponential configuration space, produce top-K candidate topologies |
53
+ | **2. AI Warm-Start** | 3-layer GraphSAGE GNN | Predict bus voltages instantly for each candidate (avoids expensive Newton–Raphson per candidate) |
54
+ | **3. Classical Verification** | pandapower Newton–Raphson AC-OPF | Verify the best candidate(s) with full AC power flow for engineering-grade accuracy |
55
+
56
+ The platform includes a **full SaaS frontend** (React + Vite + Tailwind CSS) with interactive grid visualization (React Flow), real-time optimization, ROI calculator, PDF reports, audit logs, and a digital-twin scenario simulator.
57
+
58
+ ---
59
+
60
+ ## Results
61
+
62
+ ### OptiQ vs Published Algorithms (IEEE 33-Bus)
63
 
64
  Many metaheuristics get trapped at local optima (~146 kW). OptiQ consistently finds the global optimal. All sources listed in [REFERENCES.md](REFERENCES.md).
65
 
 
85
  | Basic ADMS module (ABB/Siemens/GE) | 15-25% [9][22] | $5-50M [22] | Massive CAPEX. 12-24 month deploy. New hardware. |
86
  | **OptiQ** | **28-32%** | $200/feeder/month | Software-only. Zero CAPEX. Deploys in weeks. |
87
 
88
+ ### Detailed Results
89
 
90
  | Method | Loss (kW) | Reduction | Min V (pu) | Violations | Time (s) |
91
  |--------|-----------|-----------|------------|------------|----------|
 
105
  | 1.15x | 274.58 | 187.90 | 31.6% |
106
  | 1.30x | 359.82 | 243.80 | 32.2% |
107
 
108
+ ---
109
+
110
  ## Architecture
111
 
112
  ```
113
+ ┌──────────────────────────────────────────────────────────────┐
114
+ │ React SPA (Vite) │
115
+ │ Landing · Dashboard · Grid View · ROI · Audit · Reports │
116
+ └──────────┬───────────────────────────────────────────────────┘
117
+ │ REST / JSON
118
+ ┌──────────▼───────────────────────────────────────────────────┐
119
+ │ FastAPI (uvicorn + ngrok tunnel) │
120
+ │ /api/baseline · /api/optimize · /api/grid · /api/auth ... │
121
+ └──────────┬─────────────┬─────────────┬───────────────────────┘
122
+ │ │ │
123
+ ┌──────▼──────┐ ┌────▼─────┐ ┌────▼──────┐
124
+ │ Quantum / │ │ AI / │ │ Classical │
125
+ │ SA Search │ │ GNN │ │ pandapower│
126
+ │ (Qiskit) │ │ (PyTorch)│ │ (AC PF) │
127
+ └─────────────┘ └──────────┘ └───────────┘
128
+
129
+ ┌─────▼─────┐
130
+ │ SQLite │
131
+ │ optiq.db │
132
+ └───────────┘
133
+ ```
134
+
135
+ **Pipeline flow:**
136
+
137
+ ```
138
+ IEEE Test Data (pandapower built-in)
139
 
140
  [Quantum: SA on QUBO] ──→ Top-5 candidate topologies
141
 
 
146
  Best Solution ──→ FastAPI ──→ Frontend Dashboard
147
  ```
148
 
149
+ ---
150
 
151
+ ## Hybrid Pipeline — How It Works
152
 
153
+ The core pipeline is implemented in `src/hybrid/pipeline.py run_hybrid_pipeline()`:
 
154
 
155
+ ### Stage 1 — Quantum / Simulated Annealing Topology Search
156
 
157
+ **File:** `src/quantum/qaoa_reconfig.py`
 
 
158
 
159
+ 1. Build a **QUBO (Quadratic Unconstrained Binary Optimization)** matrix from the network graph.
160
+ 2. Each binary decision variable xᵢ represents a line: `1 = open (out of service)`, `0 = closed`.
161
+ 3. **Simulated Annealing** (primary solver):
162
+ - Proposes swap moves (close one tie line, open one feeder line).
163
+ - Checks radiality (connected tree for distribution networks).
164
+ - Runs quick AC power flow via pandapower.
165
+ - Accepts/rejects via **Metropolis criterion**.
166
+ - Multi-restart: 5 restarts × 500 iterations, cooling rate 0.99.
167
+ 4. Alternatively, a **reduced QAOA** solver (`solve_qaoa_reduced`) can run on ≤15 qubits using Qiskit's `StatevectorSampler` + COBYLA optimizer.
168
+ 5. A **QUBO export** function (`get_qubo_for_qpu`) prepares the matrix for external QPU submission (D-Wave / IBM Quantum).
169
+ 6. Output: **Top-K candidate topologies** ranked by total losses.
170
 
171
+ ### Stage 2 GNN Warm-Start
 
 
 
 
 
 
172
 
173
+ **File:** `src/ai/model.py`
174
 
175
+ 1. A pre-trained **3-layer GraphSAGE GNN** (`OptiQGNN`) predicts bus voltage magnitudes for each candidate topology.
176
+ 2. Input features per node (5-dim): `[Pd, Qd, Vm_init, is_slack, is_gen]`.
177
+ 3. Input features per edge (3-dim): `[R, X, in_service]`.
178
+ 4. Architecture: 3 × SAGEConv layers with **residual connections**, **LayerNorm**, and **dropout**.
179
+ 5. Output: Per-bus voltage magnitude clamped to [0.90, 1.10] p.u. via sigmoid scaling.
180
+ 6. Training uses a **physics-informed loss** with dynamic Lagrange multipliers (DeepOPF-NGT inspired).
181
 
182
+ ### Stage 3 — Classical AC Power Flow Verification
183
 
184
+ **File:** `src/grid/power_flow.py`
185
+
186
+ 1. The best candidate from Stage 2 is passed to **pandapower's Newton–Raphson AC power flow solver**.
187
+ 2. Extracts full results: total losses (kW), voltage profile, line loadings, voltage violations.
188
+ 3. Returns the verified optimal topology with engineering-grade accuracy.
189
+
190
+ ### Fallback — Classical Branch-Exchange
191
+
192
+ **File:** `src/grid/reconfiguration.py`
193
+
194
+ A heuristic branch-exchange search (`branch_exchange_search`) is used as:
195
+ - Baseline comparator for the hybrid pipeline.
196
+ - Fallback when quantum/AI stages are unavailable.
197
+ - Iteratively swaps open/closed line pairs, keeping swaps that reduce losses, until no improving swap exists.
198
+
199
+ ---
200
+
201
+ ## Mathematical Foundations
202
+
203
+ ### QUBO Formulation
204
+
205
+ The cost Hamiltonian (`src/quantum/hamiltonian.py`) constructs:
206
+
207
+ $$\min_{\mathbf{x}} \; \mathbf{x}^T \mathbf{Q} \, \mathbf{x}$$
208
+
209
+ Where:
210
+ - **Objective (diagonal):** $Q_{ii} = -c_i + P(1 - 2K)$
211
+ - **Coupling (off-diagonal):** $Q_{ij} = P \quad \forall\, i < j$
212
+ - **Loss coefficient:** $c_i = r_i \cdot P_i^2 / V^2$ (per-line approximate resistive loss)
213
+ - **Radiality constraint:** Penalty $P \cdot \left(\sum x_i - K\right)^2$ ensures exactly $K$ lines are open
214
+ - $K = $ number of tie switches (5 for IEEE 33-bus)
215
+
216
+ ### Simulated Annealing — Metropolis Criterion
217
+
218
+ $$P(\text{accept}) = \begin{cases} 1 & \text{if } \Delta E < 0 \\ e^{-\Delta E / T} & \text{otherwise} \end{cases}$$
219
+
220
+ Cooling schedule: $T \leftarrow 0.99 \cdot T$ per iteration.
221
+
222
+ ### GNN Physics-Informed Loss
223
+
224
+ $$\mathcal{L} = \underbrace{\| V_{pred} - V_{true} \|^2}_{\text{MSE}} + \lambda_v \cdot \underbrace{\text{mean}\left[\max(0, v_{min} - V_m)^2 + \max(0, V_m - v_{max})^2\right]}_{\text{Voltage bound violation}}$$
225
+
226
+ With **dynamic Lagrange multiplier** update (dual gradient ascent):
227
 
228
+ $$\lambda_v \leftarrow \max\!\left(0,\; \lambda_v + \eta \cdot \text{violation}\right)$$
229
 
230
+ ### Power Flow (Newton–Raphson)
 
 
 
 
231
 
232
+ pandapower solves the full AC power flow equations:
233
+
234
+ $$S_i = V_i \sum_{k=1}^{n} Y_{ik}^* V_k^*$$
235
+
236
+ Where $S_i$ is complex power injection, $V_i$ is bus voltage, and $Y_{ik}$ is the bus admittance matrix element. The Newton–Raphson method iteratively solves the non-linear system using the Jacobian matrix.
237
+
238
+ ### Impact Calculations
239
+
240
+ | Metric | Formula |
241
+ |--------|---------|
242
+ | **Loss reduction (kW)** | $\Delta P = P_{baseline} - P_{optimized}$ |
243
+ | **Loss reduction (%)** | $100 \times \Delta P / P_{baseline}$ |
244
+ | **Annual energy saved (MWh/yr)** | $\Delta P \times 8760 / 1000$ |
245
+ | **CO₂ saved (tonnes/yr)** | $\text{Energy saved (kWh)} \times \epsilon_f / 1000$ |
246
+ | **Cost saved (USD/yr)** | $\text{Energy saved (kWh)} \times \text{price}_{\$/kWh}$ |
247
+
248
+ Where $\epsilon_f = 0.475$ kg CO₂/kWh (global grid average) and price = $0.10/kWh.
249
+
250
+ ---
251
 
252
  ## Project Structure
253
 
254
  ```
255
  OptiQ/
256
+ ├── api/ # FastAPI backend
257
+ ├── main.py # Entry point, router registration, ngrok startup
258
+ │ ├── auth.py # Auth middleware (Bearer token verification)
259
+ │ ├── database.py # SQLite DB (users, usage, audit_logs, feeders)
260
  │ └── routes/
261
+ │ ├── auth_routes.py # POST /auth/register, /auth/login
262
+ │ ├── baseline.py # GET /baseline/{system}
263
+ │ ├── optimize.py # POST /optimize
264
+ ── compare.py # POST /compare
265
+ ├── simulate.py # POST /simulate, /simulate/toggle
266
+ ├── grid.py # GET /grid, POST /grid/set-out-of-service
267
+ ├── digital_twin.py # POST /digital-twin
268
+ ├── roi.py # POST /roi, GET /roi/pricing
269
+ ── audit.py # GET /audit, /audit/summary
270
+ │ ├── report.py # POST /report (PDF generation)
271
+ │ ├── usage.py # GET /usage, /usage/stats
272
+ │ └── validate.py # GET /validate/{system}
273
+
274
+ ├── src/ # Core computation engine
275
  │ ├── quantum/
276
+ │ │ ├── qaoa_reconfig.py # SA solver, QAOA solver, QUBO export
277
+ │ ├── hamiltonian.py # QUBO matrix construction
278
+ │ │ └── decoder.py # Decode binary strings to topologies
279
  │ ├── ai/
280
+ │ │ ├── model.py # GraphSAGE GNN (OptiQGNN)
281
+ │ │ ├── physics_loss.py # Physics-informed + dynamic Lagrange loss
282
+ │ │ ├── train.py # Training loop with scenario generation
283
+ │ │ ├── inference.py # GNN inference on new topologies
284
+ │ │ └── dataset.py # PyG data construction
285
+ │ ├── grid/
286
+ │ │ ── loader.py # Load IEEE test cases (33-bus, 118-bus)
287
+ │ ├── power_flow.py # AC power flow, topology validation
288
+ └── reconfiguration.py # Classical branch-exchange heuristic
289
+ │ ├── evaluation/
290
+ │ │ └── metrics.py # Impact, business model, Egypt scaling
291
+ │ └── hybrid/
292
+ │ └── pipeline.py # 3-stage hybrid pipeline orchestration
293
+
294
+ ├── frontend/ # React SPA
295
+ │ ├── src/
296
+ │ │ ├── App.jsx # Router + page transitions
297
+ │ │ ├── index.css # Tailwind + custom animations
298
+ │ │ ├── contexts/
299
+ │ │ │ └── AuthContext.jsx # SQLite-backed auth context
300
+ │ │ ├── services/
301
+ │ │ │ └── api.js # API client with Bearer token
302
+ │ │ ├── components/
303
+ │ │ │ ├── Navbar.jsx # Frosted glass navbar with glow effects
304
+ │ │ │ └── Footer.jsx # Animated footer
305
+ │ │ └── pages/
306
+ │ │ ├── LandingPage.jsx # Hero, features, stats, CTA
307
+ │ │ ├── DashboardPage.jsx # Optimization controls, results, charts
308
+ │ │ ├── GridViewPage.jsx # Interactive React Flow graph, out-of-service panel
309
+ │ │ ├── ROICalculatorPage.jsx # Calculate savings per feeder
310
+ │ │ ├── AuditPage.jsx # Audit log history with before/after
311
+ │ │ ├── LoginPage.jsx # Email + password sign in
312
+ │ │ ├── SignupPage.jsx # Registration form
313
+ │ │ ├── PricingPage.jsx # SaaS tier comparison
314
+ │ │ └── AboutPage.jsx # Team, methodology, tech stack
315
+ │ ├── package.json
316
+ │ ├── vite.config.js
317
+ │ └── tailwind.config.js
318
+
319
+ ├── config.py # Centralized configuration (dataclasses)
320
+ ├── requirements.txt # Python dependencies
321
+ ├── Dockerfile # Multi-stage Docker build
322
+ ├── docker-compose.yml # Docker Compose for one-command deployment
323
+ ├── .dockerignore # Docker build exclusions
324
  ├── scripts/
325
+ │ └── benchmark.py # Full benchmark suite vs published literature
326
+ ├── models/ # Saved GNN checkpoints
327
+ ├── start.sh # Launch script (backend + ngrok)
328
+ ├── .env # Environment variables (git-ignored)
329
+ ├── .env.example # Template for environment variables
330
+ ── optiq.db # SQLite database (auto-created)
331
+ ├── FRONTEND_SPEC.md # API contract documentation
332
+ └── REFERENCES.md # All external sources with numbered citations
333
  ```
334
 
335
+ ---
336
+
337
+ ## Backend API Endpoints
338
+
339
+ All endpoints are prefixed with `/api`.
340
+
341
+ ### Authentication
342
+ | Method | Endpoint | Description |
343
+ |--------|----------|-------------|
344
+ | POST | `/auth/register` | Register new user (email, password, display_name) → returns token |
345
+ | POST | `/auth/login` | Login with email + password → returns token |
346
+
347
+ ### Grid & Power Flow
348
+ | Method | Endpoint | Description |
349
+ |--------|----------|-------------|
350
+ | GET | `/grid?system=case33bw` | Get grid topology (nodes + branches) for visualization |
351
+ | POST | `/grid/set-out-of-service` | Set specific lines out of service, run power flow |
352
+ | GET | `/grid/{system}/switches` | Get switch states for all lines |
353
+ | GET | `/baseline/{system}` | Run baseline AC power flow on default topology |
354
+
355
+ ### Optimization
356
+ | Method | Endpoint | Description |
357
+ |--------|----------|-------------|
358
+ | POST | `/optimize` | Run hybrid pipeline (quantum SA → GNN → AC PF) |
359
+ | POST | `/compare` | Compare Classical vs Quantum+Classical vs Full Hybrid |
360
+ | POST | `/simulate` | Run simulation with custom parameters |
361
+ | POST | `/simulate/toggle` | Toggle individual line switches |
362
+
363
+ ### Analytics & Reporting
364
+ | Method | Endpoint | Description |
365
+ |--------|----------|-------------|
366
+ | POST | `/roi` | Calculate ROI for given parameters |
367
+ | GET | `/roi/pricing` | Get SaaS pricing tiers |
368
+ | GET | `/roi/comparison` | Compare pricing models |
369
+ | GET | `/audit` | Get user's audit log history |
370
+ | GET | `/audit/summary` | Aggregated audit statistics |
371
+ | POST | `/report` | Generate downloadable PDF report |
372
+ | GET | `/report/data` | Get report data as JSON |
373
+ | GET | `/usage` | Get usage history |
374
+ | GET | `/usage/stats` | Get aggregated user stats |
375
+
376
+ ### System
377
+ | Method | Endpoint | Description |
378
+ |--------|----------|-------------|
379
+ | GET | `/health` | Health check |
380
+ | GET | `/validate/{system}` | Validate a network system |
381
+ | POST | `/digital-twin` | Run digital-twin scenario simulation |
382
+ | GET | `/digital-twin/scenarios` | Get predefined scenarios |
383
+
384
+ ---
385
+
386
+ ## Frontend Pages
387
 
388
+ | Page | Route | Description |
389
+ |------|-------|-------------|
390
+ | **Landing** | `/` | Hero section with shimmer effect, feature showcases, animated stats, CTA |
391
+ | **Login** | `/login` | Email + password sign in (SQLite-backed) |
392
+ | **Sign Up** | `/signup` | Registration with name, email, password, benefit highlights |
393
+ | **Dashboard** | `/dashboard` | System selector, optimization controls, results charts, power flow stats |
394
+ | **Grid View** | `/grid` | Interactive React Flow graph with custom BusNode components, out-of-service line panel |
395
+ | **ROI Calculator** | `/roi` | Calculate savings for given # of feeders & electricity price |
396
+ | **Audit Log** | `/audit` | Full history of optimizations with before/after loss comparison |
397
+ | **Pricing** | `/pricing` | SaaS tier comparison (Starter $199, Professional $499, Enterprise custom) |
398
+ | **About** | `/about` | Team bios, methodology explanation, technology stack cards |
399
+
400
+ ### UI Animations
401
+
402
+ All pages feature modern CSS animations defined in `index.css` and `tailwind.config.js`:
403
 
404
+ - **Page transitions:** `animate-page-in` slide-up + fade-in on route change (keyed by pathname)
405
+ - **Staggered elements:** `stagger-1` through `stagger-5` — sequential fade-in with 100ms delays
406
+ - **Floating backgrounds:** `animate-float-slow` — decorative gradient orbs that drift vertically
407
+ - **Shimmer effect:** `animate-shimmer` — gradient sweep on hero text
408
+ - **Hover interactions:** `hover-lift` (translateY -4px) + `glow-ring` (box-shadow pulse)
409
+ - **Navbar:** `backdrop-blur-md` frosted glass with hover-glow brand icon
410
+ - **Reduced motion:** Respects `prefers-reduced-motion` media query
411
 
412
+ ---
413
 
414
+ ## Database Schema
415
 
416
+ The SQLite database (`optiq.db`) is auto-created on first run via `init_db()`.
417
 
418
+ ### `users` Table
419
+ | Column | Type | Description |
420
+ |--------|------|-------------|
421
+ | id | INTEGER PK | Auto-increment |
422
+ | firebase_uid | TEXT UNIQUE | User identifier (format: `user_<hex>`) |
423
+ | email | TEXT UNIQUE | User email |
424
+ | display_name | TEXT | Display name |
425
+ | password_hash | TEXT | SHA-256 salted password hash |
426
+ | password_salt | TEXT | Random 16-byte hex salt |
427
+ | created_at | TIMESTAMP | Registration time |
428
+ | last_login | TIMESTAMP | Last login time |
429
+ | total_optimizations | INTEGER | Cumulative optimization count |
430
+ | total_energy_saved_kwh | REAL | Cumulative energy savings |
431
+ | total_co2_saved_kg | REAL | Cumulative CO₂ savings |
432
+ | total_money_saved_usd | REAL | Cumulative cost savings |
433
 
434
+ ### `usage` Table
435
+ Tracks every optimization run: user_id, system, method, load_multiplier, baseline/optimized losses, energy/CO₂/money saved, computation time, shadow mode flag, switches changed.
436
+
437
+ ### `audit_logs` Table
438
+ Full audit trail: action, system, method, details, baseline/optimized losses, loss reduction %, annual energy/CO₂/cost savings, open lines before/after.
439
+
440
+ ### `feeders` Table
441
+ Multi-feeder management: user_id, name, system type, created_at.
442
+
443
+ ---
444
+
445
+ ## Authentication
446
+
447
+ OptiQ uses **SQLite-based authentication** (no external services required):
448
+
449
+ 1. **Registration:** `POST /api/auth/register` — hashes password with SHA-256 + random salt, creates user in `optiq.db`, returns a Bearer token.
450
+ 2. **Login:** `POST /api/auth/login` — verifies password against stored hash, returns Bearer token.
451
+ 3. **Token format:** `uid:email:displayName` — sent as `Authorization: Bearer <token>` header.
452
+ 4. **Middleware:** `api/auth.py` parses the Bearer token and injects user info into route handlers via FastAPI `Depends()`.
453
+ 5. **Frontend:** `AuthContext.jsx` manages session state via `localStorage`, syncs token with the API client.
454
 
455
+ No Firebase, no external OAuth, no third-party dependencies. The system is fully self-contained.
456
+
457
+ ---
458
 
459
+ ## Grid Visualization & Out-of-Service Lines
460
 
461
+ ### Supported Systems
462
 
463
+ | System | Buses | Lines | Topology | Layout Algorithm |
464
+ |--------|-------|-------|----------|-----------------|
465
+ | **IEEE 33-bus** | 33 | 37 (32 feeder + 5 tie) | Radial (tree) | BFS layered |
466
+ | **IEEE 118-bus** | 118 | 186 | Meshed (loops) | Kamada-Kawai |
467
 
468
+ ### Out-of-Service Lines
469
 
470
+ Users can manually set lines as out of service via the Grid View page:
 
 
 
 
 
471
 
472
+ - Enter line IDs in the out-of-service panel
473
+ - Click "Apply & Run Power Flow" to validate and compute results
474
+ - **Distribution networks (33-bus):** Must maintain a connected **tree** (radial) topology
475
+ - **Transmission networks (118-bus):** Only requires **connectivity** (mesh loops allowed)
 
476
 
477
+ The validation function `check_topology_valid()` in `power_flow.py` automatically detects the system type and enforces the appropriate constraint.
 
 
478
 
479
+ ### Visualization Features
480
 
481
+ - **Custom BusNode components** with React Flow Handle elements for proper edge connections
482
+ - **Color-coded nodes:** Slack bus (green), generator (blue), load (orange)
483
+ - **Line status indicators:** In-service (solid), out-of-service (dashed/red)
484
+ - **Interactive controls:** Pan, zoom, minimap, drag-to-rearrange
 
485
 
486
+ ---
487
 
488
+ ## Evaluation & Impact Metrics
489
 
490
+ ### What Gets Calculated (`src/evaluation/metrics.py`)
491
 
492
+ | Category | Metrics |
493
+ |----------|---------|
494
+ | **Power Flow** | Total losses (kW/MW), loss %, voltage profile (min/max/mean p.u.), voltage violations count, per-line loadings (%), per-line losses (kW) |
495
+ | **Optimization Impact** | Loss reduction (kW & %), annual energy saved (MWh/yr), CO₂ reduction (tonnes/yr), cost savings (USD/yr) |
496
+ | **Environmental** | Trees planted equivalent, cars removed equivalent |
497
+ | **Business Model** | SaaS revenue per feeder ($200/month), revenue-share (15%), enterprise licensing |
498
+ | **Solution Footprint** | Energy/CO₂ cost of running the optimization (150W TDP assumed) |
499
+ | **Net Benefit** | Waste eliminated minus solution overhead (typically 0.007% overhead) |
500
+ | **Dependent Variables** | ~178 physical, ~20 algorithmic, 3 external, 5 decision = 201 total |
501
+
502
+ ### Waste Elimination Framework
503
 
504
  | Metric | Before OptiQ | After OptiQ |
505
  |--------|-------------|-------------|
 
507
  | **Waste eliminated** | -- | **553,020 kWh/year (31.15%)** |
508
  | Solution computational overhead | -- | 36.5 kWh/year (0.007% of savings) |
509
 
510
+ ---
511
+
512
+ ## Egypt-Specific Scaling & Implementation
513
+
514
+ ### Scaling Parameters
515
 
516
+ | Parameter | Value | Source |
517
+ |-----------|-------|--------|
518
+ | Grid emission factor | 0.50 kg CO₂/kWh | IEA 2022 (88% natural gas) |
519
+ | Total generation | 215.8 TWh | IEA 2022 |
520
+ | T&D losses | 17 % of output | FY 2022/23 target |
521
+ | Distribution losses | 11 % of output | Estimated |
522
+ | Cairo consumption share | 27 % | Estimated |
523
 
524
  ### Impact at Scale
525
 
526
+ | Scope | Savings | CO₂ Saved | Cost Saved |
527
  |-------|---------|-----------|------------|
528
  | Single feeder | 553 MWh/year | 26.3 t/year | $44K/year |
529
  | Cairo (5,000 feeders) | 2.0 TWh/year | 1.0 Mt/year | $221M/year |
530
  | Egypt (all feeders) | 7.4 TWh/year | 3.7 Mt/year | $592M/year |
531
  | Global | 467 TWh/year | 222 Mt/year | -- |
532
 
533
+ ### Implementation Plan
534
+
535
+ | Phase | Timeline | Scope | Cost |
536
+ |-------|----------|-------|------|
537
+ | Phase 0 (MVP) | Done | IEEE benchmark validated | $0 |
538
+ | Phase 1 (Pilot) | 3-6 months | 5-10 feeders, NCEDC substation, shadow mode | $10-20K |
539
+ | Phase 2 (District) | 6-12 months | 100+ feeders, automated SCADA pipeline | $50-100K |
540
+ | Phase 3 (Cairo) | 1-2 years | 5,000+ feeders across NCEDC + SCEDC | $500K-1M |
541
+ | Phase 4 (National) | 2-3 years | All 9 distribution companies | $2-5M |
542
+
543
+ ### Pricing Model
544
+
545
+ | Model | Price | Value Proposition |
546
+ |-------|-------|-------------------|
547
+ | **SaaS Subscription** | $200/feeder/month | 5.4% of savings — immediate payback |
548
+ | **Revenue Share** | 15% of verified savings | ~$6,636/feeder/year, zero upfront cost |
549
+ | **Enterprise License** | $500K/year (up to 1,000 feeders) | $500/feeder/year for large utilities |
550
+
551
+ ### CO₂ Trustworthiness
552
+
553
+ Energy savings are computed from **pandapower's Newton–Raphson AC power flow** — an industry-standard, physics-validated solver derived from Kirchhoff's laws. CO₂ uses Egypt's grid factor (0.50 kg CO₂/kWh for 88% gas). Annualisation assumes constant load; real-world savings are ~60-80% of this figure due to load variation.
554
+
555
+ ---
556
+
557
+ ## Configuration
558
+
559
+ All hyperparameters are centralized in `config.py` via Python dataclasses:
560
+
561
+ | Section | Key Parameters |
562
+ |---------|---------------|
563
+ | **Grid** | `system=case33bw`, `v_min=0.95`, `v_max=1.05`, `n_tie_switches=5` |
564
+ | **Quantum** | `reps=2`, `shots=250,000`, `top_k=5`, `radiality_penalty=100`, `optimizer=COBYLA`, `maxiter=200` |
565
+ | **AI** | `hidden_dim=64`, `num_layers=3`, `dropout=0.1`, `lr=1e-3`, `epochs=200`, `n_scenarios=2000` |
566
+ | **Impact** | `emission_factor=0.475` kg CO₂/kWh, `electricity_price=$0.10/kWh`, `hours_per_year=8760` |
567
+ | **Egypt** | `emission_factor=0.50`, `total_generation=215.8 TWh`, `td_loss=17%`, `dist_loss=11%` |
568
+ | **API** | `host=0.0.0.0`, `port=8000`, `reload=True`, `cors_origins=["*"]` |
569
+
570
+ ---
571
+
572
+ ## Installation
573
+
574
+ ### Prerequisites
575
+ - Python 3.11+ (tested on 3.12)
576
+ - Node.js 18+ (for frontend build)
577
+ - pip / conda
578
+
579
+ ### Backend Setup
580
+
581
+ ```bash
582
+ # Clone the repository
583
+ git clone https://github.com/your-org/OptiQ.git
584
+ cd OptiQ
585
+
586
+ # Create virtual environment
587
+ python -m venv venv
588
+ source venv/bin/activate
589
+
590
+ # Install Python dependencies
591
+ pip install -r requirements.txt
592
+
593
+ # (Optional) Train the GNN model (~60 seconds)
594
+ python -c "
595
+ import sys; sys.path.insert(0, '.')
596
+ from src.ai.train import train
597
+ train(n_scenarios=1000, epochs=100, verbose=True)
598
+ "
599
+ ```
600
+
601
+ ### Frontend Setup
602
+
603
+ ```bash
604
+ cd frontend
605
+ npm install
606
+ npm run build # Produces frontend/dist/ served by FastAPI
607
+ cd ..
608
+ ```
609
+
610
+ ---
611
+
612
+ ## Running the Application
613
+
614
+ ### Quick Start
615
+
616
+ ```bash
617
+ # Using the start script (loads .env automatically):
618
+ ./start.sh
619
+
620
+ # Or manually:
621
+ source venv/bin/activate
622
+ python -m uvicorn api.main:app --host 0.0.0.0 --port 8000 --reload
623
+ ```
624
+
625
+ The application is available at **http://localhost:8000**. The built React frontend is served directly from FastAPI.
626
+
627
+ ### Development (separate frontend dev server)
628
+
629
+ ```bash
630
+ # Terminal 1 — Backend
631
+ source venv/bin/activate
632
+ python -m uvicorn api.main:app --host 0.0.0.0 --port 8000 --reload
633
+
634
+ # Terminal 2 — Frontend (with hot reload)
635
+ cd frontend
636
+ npm run dev
637
+ ```
638
+
639
+ ### Run the Benchmark
640
+
641
+ ```bash
642
+ source venv/bin/activate
643
+ python scripts/benchmark.py
644
+ ```
645
+
646
+ ---
647
+
648
+ ## Deployment with ngrok
649
+
650
+ OptiQ uses **pyngrok** (Python library) to create a public tunnel automatically on server startup — no CLI tool needed.
651
+
652
+ ### Setup
653
+
654
+ 1. Copy `.env.example` to `.env`:
655
+ ```bash
656
+ cp .env.example .env
657
+ ```
658
+
659
+ 2. Add your ngrok authtoken (get from [ngrok.com](https://ngrok.com)):
660
+ ```env
661
+ NGROK_AUTHTOKEN=your_authtoken_here
662
+ NGROK_DOMAIN=your-custom-domain.ngrok-free.app
663
+ ```
664
+
665
+ 3. Start the server — ngrok tunnel opens automatically:
666
+ ```bash
667
+ ./start.sh
668
+ ```
669
+
670
+ The console will show:
671
+ ```
672
+ ✓ ngrok tunnel open → https://your-custom-domain.ngrok-free.app
673
+ ```
674
+
675
+ If `NGROK_AUTHTOKEN` is not set, the server starts normally without a tunnel.
676
+
677
+ ### CRL Workaround
678
+
679
+ On first start, the server writes a clean ngrok v3 config with `crl_noverify: true` to work around the known CRL verification issue in ngrok v3. No manual config is needed.
680
+
681
+ ---
682
+
683
+ ## Deployment with Docker
684
+
685
+ ### Quick Start (Docker Compose)
686
+
687
+ ```bash
688
+ # Build and run
689
+ docker compose up -d
690
+
691
+ # View logs
692
+ docker compose logs -f optiq
693
+
694
+ # Stop
695
+ docker compose down
696
+ ```
697
+
698
+ The app is available at **http://localhost:8000**.
699
+
700
+ ### Build Image Only
701
+
702
+ ```bash
703
+ docker build -t optiq .
704
+ docker run -p 8000:8000 -e OPTIQ_MOCK_AUTH=true optiq
705
+ ```
706
+
707
+ ### Environment Variables via Docker
708
+
709
+ Pass any env var at runtime:
710
+
711
+ ```bash
712
+ docker run -p 8000:8000 \
713
+ -e OPTIQ_MOCK_AUTH=true \
714
+ -e NGROK_AUTHTOKEN=your_token \
715
+ -e NGROK_DOMAIN=your-domain.ngrok-free.app \
716
+ -v optiq-data:/app/data \
717
+ optiq
718
+ ```
719
+
720
+ ### Docker Architecture
721
+
722
+ The image uses a **multi-stage build**:
723
+ 1. **Stage 1 (Node 20):** Builds the React frontend → `frontend/dist/`
724
+ 2. **Stage 2 (Python 3.12-slim):** Installs Python dependencies + copies source + built frontend
725
+
726
+ The `docker-compose.yml` persists the SQLite database and GNN models via named volumes.
727
+
728
+ ---
729
+
730
+ ## Environment Variables
731
+
732
+ Create a `.env` file (or copy from `.env.example`):
733
+
734
+ | Variable | Default | Description |
735
+ |----------|---------|-------------|
736
+ | `NGROK_AUTHTOKEN` | _(empty)_ | ngrok authentication token (optional) |
737
+ | `NGROK_DOMAIN` | _(empty)_ | Custom ngrok domain (optional) |
738
+ | `PORT` | `8000` | Server port |
739
+ | `OPTIQ_MOCK_AUTH` | `true` | `true` = accept any Bearer token; `false` = verify against SQLite |
740
+
741
+ ---
742
+
743
+ ## Tech Stack
744
+
745
+ ### Backend
746
+ | Technology | Version | Purpose |
747
+ |-----------|---------|---------|
748
+ | **Python** | 3.12 | Runtime |
749
+ | **FastAPI** | 0.128+ | REST API framework |
750
+ | **uvicorn** | 0.40+ | ASGI server |
751
+ | **pandapower** | 3.4+ | AC power flow (Newton–Raphson), IEEE test cases |
752
+ | **Qiskit** | 2.3+ | Quantum computing (QAOA circuits, QUBO) |
753
+ | **PyTorch** | 2.9+ | GNN training & inference |
754
+ | **PyTorch Geometric** | 2.7+ | Graph neural network layers (SAGEConv) |
755
+ | **NetworkX** | 3.4+ | Graph algorithms (topology validation, layouts) |
756
+ | **SQLite** | built-in | User database, usage tracking, audit logs |
757
+ | **pyngrok** | 7.0+ | ngrok tunnel from Python (public URL) |
758
+ | **python-dotenv** | 1.0+ | Load .env environment variables |
759
+
760
+ ### Frontend
761
+ | Technology | Version | Purpose |
762
+ |-----------|---------|---------|
763
+ | **React** | 18.3 | UI library |
764
+ | **Vite** | 5.4 | Build tool & dev server |
765
+ | **Tailwind CSS** | 3.4 | Utility-first CSS framework |
766
+ | **React Flow** | 11.11 | Interactive graph visualization |
767
+ | **Recharts** | 2.13 | Charts & data visualization |
768
+ | **Lucide React** | 0.460 | Icon library |
769
+ | **React Router** | 6.28 | Client-side routing |
770
+
771
+ ---
772
+
773
+ ## References
774
+
775
+ 1. Baran & Wu, "Network reconfiguration in distribution systems for loss reduction and load balancing," IEEE Trans. Power Delivery, 1989.
776
+ 2. Civanlar et al., "Distribution feeder reconfiguration for loss reduction," IEEE Trans. Power Delivery, 1988.
777
+ 3. Goswami & Basu, "A new algorithm for the reconfiguration of distribution feeders for loss minimization," IEEE Trans. Power Delivery, 1992.
778
+ 4. Jabr et al., "Minimum Loss Network Reconfiguration Using Mixed-Integer Convex Programming," IEEE Trans. Power Systems, 2012.
779
+ 5. Sulaima et al., "A DNR for loss minimization by using improved PSO," IJICIC, 2014.
780
+ 6. Pereira et al., "Branch Exchange + Clustering," Applied Sciences, 2023.
781
+ 7. Various authors — GA, SA, and other metaheuristics for IEEE 33-bus reconfiguration.
782
+ 8. Qiskit SDK 2.x — Migration from 1.x to 2.x (arXiv:2512.08245).
783
+ 9. Egyptian Electricity Holding Company (EEHC) annual reports.
784
+ 10. Hamilton et al., "Inductive Representation Learning on Large Graphs," NeurIPS 2017 (GraphSAGE).
785
+ 11. Thurner et al., "pandapower — An Open-Source Python Tool for Convenient Modeling, Analysis, and Optimization of Electric Power Systems," IEEE Trans. Power Systems, 2018.
786
+ 12. IEA Egypt Energy Data, Country profile 2022.
787
+ 13. DeepOPF-NGT — Physics-informed neural network with dynamic Lagrange multipliers.
788
+
789
+ All bracketed numbers (e.g. [1], [12]) refer to the [References](#full-references) section below for full citations.
790
+
791
+ ---
792
+
793
+ ## Full References
794
+
795
+ All externally-sourced numbers in this project are listed below with their original source.
796
+
797
+ ### IEEE 33-Bus Test System
798
+
799
+ - **[1]** M. E. Baran and F. F. Wu, "Network reconfiguration in distribution systems for loss reduction and load balancing," *IEEE Trans. Power Delivery*, vol. 4, no. 2, pp. 1401-1407, Apr. 1989. — Source of the IEEE 33-bus benchmark. Base case losses: 202.67 kW. Optimal reconfiguration: 139.55 kW (31.15% reduction). Open switches: 7, 9, 14, 32, 37 (1-indexed).
800
+ - **[2]** S. Civanlar et al., "Distribution feeder reconfiguration for loss reduction," *IEEE Trans. Power Delivery*, 1988. — Load-transfer heuristic. ~146 kW on 33-bus.
801
+ - **[3]** S. K. Goswami and S. K. Basu, "A new algorithm for the reconfiguration of distribution feeders for loss minimization," *IEEE Trans. Power Delivery*, 1992. — ~139.55 kW on 33-bus.
802
+ - **[4]** R. S. Jabr et al., "Minimum loss network reconfiguration using mixed-integer convex programming," *IEEE Trans. Power Systems*, 2012. — MILP exact: 139.55 kW.
803
+ - **[5]** M. F. Sulaima et al., "A DNR by Using Rank Evolutionary PSO for Power Loss Minimization," *ISMS*, 2014. — PSO: 146.1 kW (local optimum).
804
+ - **[6]** E. C. Pereira et al., "Distribution Network Reconfiguration Using Iterative Branch Exchange and Clustering Technique," *Energies*, 2023. — 139.55 kW. Applied to 81 real feeders at CEMIG-D (Brazil).
805
+ - **[7]** F. Bohigas-Daranas et al., "Open-source implementation of distribution network reconfiguration methods," *arXiv:2511.22957*, 2025. — Compares 7 methods, confirms 139.55 kW optimal.
806
+ - **[8]** S. H. Dolatabadi et al., "An Enhanced IEEE 33 Bus Benchmark Test System," *IEEE Trans. Power Systems*, 2021. — Enhanced 33-bus with DG, total load 3.715 MW.
807
+
808
+ ### Distribution Loss Reduction: Industry Practice
809
+
810
+ - **[9]** "Power Distribution Network Reconfiguration Techniques: A Thorough Review," *Sustainability*, 2024. — Survey of 200+ articles. Manual: 5-10%. Automated: 25-34%.
811
+ - **[10]** Bohigas-Daranas et al., 2025 (same as [7]). Confirms 25-34% on real networks.
812
+ - **[11]** "Operational Cost Minimization of Electrical Distribution Network during Switching for Sustainable Operation," *Sustainability*, 2022. — MISOCP on real 71-bus Malaysian network: 25.5%.
813
+
814
+ ### Egypt Energy Data
815
+
816
+ - **[12]** IEA, "Egypt — Countries & Regions," 2022. — Total generation: 215.8 TWh. Natural gas: 81%.
817
+ - **[13]** "Egypt plans to reduce electricity network loss to 16.83% in FY23/24," *Egypt Today*, 2023. — T&D losses: 22.19% (FY 2021/22), target 16.83%.
818
+ - **[14]** CEIC, "Egypt: Electric Power T&D Losses: % of Output." — Historical losses: 11.15% (2014), 22.16% (1985).
819
+ - **[15]** EEHC, "Geographical distribution of electricity distribution companies." — 9 regional distribution companies.
820
+ - **[16]** Iskraemeco, "Improving energy efficiency — NCEDC." — 500,000 smart meters, AMI, SCADA.
821
+ - **[17]** PRIME Alliance, "PRIME 1.4 Roll-out of 63,000 Smart Meters in Egypt," 2022. — 300,000 more planned.
822
+
823
+ ### Global Data & Emission Factors
824
+
825
+ - **[18]** IEA, "Electricity 2025 — Supply." — Global demand grew 4.3% in 2024.
826
+ - **[19]** World Bank, "Electric power T&D losses (% of output)." — Global: 7-10%.
827
+ - **[20]** IEA, "Emission Factors." — Global average: 0.475 kg CO₂/kWh.
828
+ - **[21]** Egypt grid emission factor: ~0.50 kg CO₂/kWh (derived from 88% gas).
829
+ - **[22]** Strategic Market Research, "ADMS Market, 2024-2030." — $3.8B (2024), projected $10.5B by 2030.
830
+ - **[23]** Intent Market Research, "ADMS Market, 2024-2030." — Cloud-based ADMS fastest-growing.
831
+ - **[24]** U.S. EPA, "Greenhouse Gas Equivalencies Calculator." — 21 kg CO₂/tree/year, 4.6 t CO₂/car/year.
832
+
833
+ ### Power System Simulation
834
+
835
+ - **[25]** L. Thurner et al., "pandapower — An Open-Source Python Tool for Power Systems," *IEEE Trans. Power Systems*, 2018. — Newton-Raphson AC power flow solver.
836
+ - **[26]** MATPOWER, "case33bw — Baran & Wu 33-bus system." — 33 buses, 37 lines, 12.66 kV, 3.715 MW.
837
+
838
+ ---
839
+
840
+ ## API Contract (Frontend Specification)
841
+
842
+ > Detailed request/response examples for frontend integration.
843
+
844
+ ### Baseline Endpoint
845
+
846
+ ```
847
+ GET /api/baseline/{system} (system = "case33bw" | "case118")
848
+ ```
849
+
850
+ **Response** (key fields):
851
+ ```json
852
+ {
853
+ "system": "case33bw",
854
+ "network": { "n_buses": 33, "n_lines": 37, "n_tie_lines": 5, "total_load_mw": 3.715 },
855
+ "power_flow": {
856
+ "converged": true, "total_loss_kw": 202.68, "loss_pct": 5.17,
857
+ "min_voltage_pu": 0.9131, "voltage_violations": 21,
858
+ "bus_voltages": [1.0, 0.997, "..."],
859
+ "line_loadings_pct": [47.2, "..."],
860
+ "line_losses_kw": [12.5, "..."]
861
+ },
862
+ "buses": [{ "index": 0, "vn_kv": 12.66, "load_mw": 0.0, "is_slack": true }, "..."],
863
+ "lines": [{ "index": 0, "from_bus": 0, "to_bus": 1, "in_service": true, "is_tie": false }, "..."]
864
+ }
865
+ ```
866
+
867
+ ### Optimize Endpoint
868
+
869
+ ```
870
+ POST /api/optimize
871
+ Body: { "system": "case33bw", "method": "hybrid", "quantum_iters": 300 }
872
+ ```
873
+
874
+ **Response** includes `baseline`, `optimized` (with `open_lines`), `impact` (loss reduction, CO₂, cost), `candidates`, and `timings`.
875
+
876
+ ### Compare Endpoint
877
+
878
+ ```
879
+ POST /api/compare
880
+ Body: { "system": "case33bw", "methods": ["classical", "quantum", "hybrid"] }
881
+ ```
882
+
883
+ **Response** includes per-method `optimized`, `impact`, and `time_sec`.
884
+
885
+ ### Grid Endpoints
886
+
887
+ ```
888
+ GET /api/grid?system=case33bw → nodes + branches for React Flow
889
+ POST /api/grid/set-out-of-service → set lines OOS, run power flow
890
+ GET /api/grid/{system}/switches → switch states for all lines
891
+ ```
892
 
893
+ ### Additional Endpoints
894
 
895
+ | Method | Endpoint | Purpose |
896
+ |--------|----------|---------|
897
+ | POST | `/api/simulate` | Custom switch configuration |
898
+ | POST | `/api/simulate/toggle` | Toggle single switch |
899
+ | POST | `/api/digital-twin` | Scenario simulation |
900
+ | POST | `/api/roi` | ROI calculation |
901
+ | POST | `/api/report` | Generate HTML report |
902
+ | GET | `/api/audit` | Audit log history |
903
+ | GET | `/api/usage` | Usage statistics |
904
 
905
+ ---
906
 
907
+ <p align="center">
908
+ Built with ⚡ by the OptiQ Team
909
+ </p>
REFERENCES.md DELETED
@@ -1,114 +0,0 @@
1
- # References
2
-
3
- All externally-sourced numbers in this project are listed below with their original source.
4
-
5
- ---
6
-
7
- ## IEEE 33-Bus Test System
8
-
9
- - **[1]** M. E. Baran and F. F. Wu, "Network reconfiguration in distribution systems for loss reduction and load balancing," *IEEE Trans. Power Delivery*, vol. 4, no. 2, pp. 1401-1407, Apr. 1989.
10
- - Source of the IEEE 33-bus benchmark. Base case losses: 202.67 kW. Optimal reconfiguration: 139.55 kW (31.15% reduction). Open switches: 7, 9, 14, 32, 37 (1-indexed branch numbers).
11
- - MATPOWER reference file: [case33bw.m](https://github.com/MATPOWER/matpower/blob/master/data/case33bw.m)
12
-
13
- - **[2]** S. Civanlar, J. J. Grainger, H. Yin, and S. S. H. Lee, "Distribution feeder reconfiguration for loss reduction," *IEEE Trans. Power Delivery*, vol. 3, no. 3, pp. 1217-1223, 1988.
14
- - Load-transfer heuristic for reconfiguration. Approximate result on 33-bus: ~146 kW. Limited by dependence on initial switch configuration.
15
-
16
- - **[3]** S. K. Goswami and S. K. Basu, "A new algorithm for the reconfiguration of distribution feeders for loss minimization," *IEEE Trans. Power Delivery*, vol. 7, no. 3, pp. 1484-1491, 1992.
17
- - Power-flow-minimum heuristic. Achieves ~139.55 kW on 33-bus when properly converged.
18
-
19
- - **[4]** R. S. Jabr, R. Singh, and B. C. Pal, "Minimum loss network reconfiguration using mixed-integer convex programming," *IEEE Trans. Power Systems*, vol. 27, no. 2, pp. 1106-1115, 2012.
20
- - MILP exact method using convex relaxation. Provably optimal: 139.55 kW on 33-bus.
21
-
22
- - **[5]** M. F. Sulaima, S. A. Othman, M. S. Jamri, R. Omar, and M. Sulaiman, "A DNR by Using Rank Evolutionary Particle Swarm Optimization for Power Loss Minimization," in *Proc. 5th Int. Conf. Intelligent Systems Modelling and Simulation*, 2014, pp. 417-422.
23
- - PSO on 33-bus: 146.1 kW (local optimum). EPSO: 131.1 kW. REPSO: 120.7 kW (note: REPSO result likely uses different base data or load model).
24
-
25
- - **[6]** E. C. Pereira, C. H. N. R. Barbosa, and J. A. Vasconcelos, "Distribution Network Reconfiguration Using Iterative Branch Exchange and Clustering Technique," *Energies*, vol. 16, no. 5, p. 2395, 2023.
26
- - Branch exchange + clustering on 33-bus: 139.55 kW. Applied to 81 real feeders at CEMIG-D (Brazil).
27
-
28
- - **[7]** F. Bohigas-Daranas, O. Gomis-Bellmunt, and E. Prieto-Araujo, "Open-source implementation of distribution network reconfiguration methods: Analysis and comparison," *arXiv:2511.22957*, Nov. 2025.
29
- - Compares 7 methods (Merlin, Baran, Goswami, PSO, GA, MST, MILP) with open-source Python code. Confirms 139.55 kW optimal on 33-bus.
30
-
31
- - **[8]** S. H. Dolatabadi, M. Ghorbanian, P. Siano, and N. D. Hatziargyriou, "An Enhanced IEEE 33 Bus Benchmark Test System for Distribution System Studies," *IEEE Trans. Power Systems*, vol. 36, no. 3, pp. 2565-2567, 2021.
32
- - Enhanced 33-bus with DG, reactive compensation, hourly load profiles. Radial config losses: 97 kW (with DG). Total load: 3.715 MW.
33
-
34
- ---
35
-
36
- ## Distribution Loss Reduction: Industry Practice
37
-
38
- - **[9]** "Power Distribution Network Reconfiguration Techniques: A Thorough Review," *Sustainability*, vol. 16, no. 23, p. 10307, 2024.
39
- - Survey of 200+ articles. Manual reconfiguration: 5-10% loss reduction. Automated optimisation: 25-34%.
40
-
41
- - **[10]** F. Bohigas-Daranas et al., 2025 (same as [7]).
42
- - Confirms automated reconfiguration achieves 25-34% on real networks. Spain distribution losses: ~25 GWh/year, average 8% in Europe.
43
-
44
- - **[11]** Operational Cost Minimization of Electrical Distribution Network during Switching for Sustainable Operation, *Sustainability*, vol. 14, p. 4196, 2022.
45
- - MISOCP on real 71-bus Malaysian network: 25.5% loss reduction. IEEE 33-bus: 34.14% reduction.
46
-
47
- ---
48
-
49
- ## Egypt Energy Data
50
-
51
- - **[12]** IEA, "Egypt - Countries & Regions," 2022. [Online]. Available: https://www.iea.org/countries/egypt/electricity
52
- - Egypt total electricity generation: 215.8 TWh (2022). Natural gas: 174.9 TWh (81%).
53
-
54
- - **[13]** "Egypt plans to reduce electricity network loss to 16.83% in FY23/24," *Egypt Today*, 2023. [Online]. Available: https://www.egypttoday.com/Article/3/125528
55
- - T&D losses: 22.188% (FY 2021/2022), target 18.21% (FY 2022/2023), target 16.83% (FY 2023/2024).
56
-
57
- - **[14]** CEIC Data, "Egypt: Electric Power Transmission and Distribution Losses: % of Output." [Online]. Available: https://www.ceicdata.com/en/egypt/energy-production-and-consumption/eg-electric-power-transmission-and-distribution-losses--of-output
58
- - Historical losses: 11.15% (2014), declined from 22.16% (1985).
59
-
60
- - **[15]** EEHC, "Geographical distribution of electricity distribution companies." [Online]. Available: https://eehc.gov.eg/CMSEehc/en/consumer-information/geographical-distribution-of-electricity-distribution-companies/
61
- - Lists 9 regional distribution companies: North Cairo, South Cairo, Alexandria, Canal, North Delta, South Delta, Al Beheira, Middle Egypt, Upper Egypt.
62
-
63
- - **[16]** Iskraemeco, "Improving energy efficiency and reliability of distribution networks in Egypt | North Cairo Electricity Distribution Company." [Online]. Available: https://iskraemeco.com/project/improving-energy-efficiency-and-reliability-of-distribution-networks-in-egypt-north-cairo-electricity-distribution-company/
64
- - NCEDC deploying 500,000 smart meters. AMI, distribution management, SCADA integration.
65
-
66
- - **[17]** PRIME Alliance, "PRIME 1.4 Roll-out of 63,000 Smart Meters in Egypt," Dec. 2022. [Online]. Available: https://prime-alliance.org/blog/2022/12/22/prime-1-4-rollout-of-63000-smart-meters-in-egypt/
67
- - 63,000 smart meters deployed (2022), 300,000 more planned under JICA Lot 1. 98% using PRIME PLC.
68
-
69
- ---
70
-
71
- ## Global Energy Data
72
-
73
- - **[18]** IEA, "Electricity 2025 - Supply." [Online]. Available: https://www.iea.org/reports/electricity-2025/supply
74
- - Global electricity demand grew 4.3% in 2024. Renewables surpassing coal in 2025. Coal below 33% for first time in 100 years.
75
-
76
- - **[19]** World Bank, "Electric power transmission and distribution losses (% of output)." [Online]. Available: https://data.worldbank.org/indicator/eg.elc.loss.zs
77
- - Global T&D losses: 7-10% historically (1960-2014). Varies by country: 4% (Bahrain) to 24% (Albania).
78
-
79
- ---
80
-
81
- ## Emission Factors
82
-
83
- - **[20]** IEA, "Emission Factors." Global average grid emission factor: 0.475 kg CO2/kWh.
84
- - Used as default in `config.py` ImpactConfig.
85
-
86
- - **[21]** Egypt grid emission factor: ~0.50 kg CO2/kWh.
87
- - Derived from Egypt's 88% natural gas generation [12]. Gas-fired plants emit 0.40-0.55 kg CO2/kWh depending on efficiency. 0.50 is a conservative mid-point.
88
-
89
- ---
90
-
91
- ## ADMS Market Data
92
-
93
- - **[22]** Strategic Market Research, "Advanced Distribution Management System Market, 2024-2030." [Online]. Available: https://www.strategicmarketresearch.com/market-report/advanced-distribution-management-system-market
94
- - ADMS market: $3.8B (2024), projected $10.5B by 2030, 18.5% CAGR. Full deployment: $5-50M.
95
-
96
- - **[23]** Intent Market Research, "Advanced Distribution Management System (ADMS) Market, 2024-2030." [Online]. Available: https://intentmarketresearch.com/latest-reports/advanced-distribution-management-system-adms-market-4584
97
- - Cloud-based ADMS fastest-growing. Major vendors: Siemens, ABB, GE, Schneider Electric.
98
-
99
- ---
100
-
101
- ## Equivalence Factors
102
-
103
- - **[24]** U.S. EPA, "Greenhouse Gas Equivalencies Calculator." [Online]. Available: https://www.epa.gov/energy/greenhouse-gas-equivalencies-calculator
104
- - ~21 kg CO2 absorbed per tree per year. ~4.6 metric tons CO2 per passenger vehicle per year.
105
-
106
- ---
107
-
108
- ## Power System Simulation
109
-
110
- - **[25]** L. Thurner et al., "pandapower - An Open-Source Python Tool for Convenient Modeling, Analysis, and Optimization of Electric Power Systems," *IEEE Trans. Power Systems*, vol. 33, no. 6, pp. 6510-6521, 2018.
111
- - Newton-Raphson AC power flow solver used for all loss calculations in OptiQ.
112
-
113
- - **[26]** MATPOWER, "case33bw - Baran & Wu 33-bus system." [Online]. Available: https://matpower.org/docs/ref/matpower6.0/case33bw.html
114
- - Reference data for the IEEE 33-bus system. 33 buses, 32 branches + 5 tie lines, 12.66 kV, 3.715 MW total load.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/auth.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Firebase Authentication Middleware for OptiQ API.
3
+ Verifies Firebase JWT tokens and extracts user info.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import os
8
+ from typing import Optional
9
+ from functools import wraps
10
+
11
+ from fastapi import Request, HTTPException, Depends
12
+ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
13
+
14
+ # Mock Firebase verification for development
15
+ # In production, use firebase_admin to verify tokens
16
+ USE_MOCK_AUTH = os.environ.get("OPTIQ_MOCK_AUTH", "true").lower() == "true"
17
+
18
+ security = HTTPBearer(auto_error=False)
19
+
20
+
21
+ class FirebaseUser:
22
+ """Represents an authenticated Firebase user."""
23
+ def __init__(self, uid: str, email: str, name: Optional[str] = None):
24
+ self.uid = uid
25
+ self.email = email
26
+ self.name = name or email.split("@")[0]
27
+
28
+
29
+ async def verify_firebase_token(
30
+ credentials: HTTPAuthorizationCredentials = Depends(security),
31
+ ) -> Optional[FirebaseUser]:
32
+ """Verify Firebase JWT token and return user info.
33
+
34
+ In mock mode (development), accepts any token and extracts user info from it.
35
+ In production mode, verifies the token with Firebase Admin SDK.
36
+ """
37
+ if credentials is None:
38
+ return None
39
+
40
+ token = credentials.credentials
41
+
42
+ if USE_MOCK_AUTH:
43
+ # Development mode: parse mock token or accept any token
44
+ try:
45
+ # Try to parse as "uid:email:name" format for testing
46
+ parts = token.split(":")
47
+ if len(parts) >= 2:
48
+ return FirebaseUser(uid=parts[0], email=parts[1], name=parts[2] if len(parts) > 2 else None)
49
+ else:
50
+ # Accept any token as demo user
51
+ return FirebaseUser(
52
+ uid="demo_user_" + token[:8] if len(token) >= 8 else "demo_user",
53
+ email="demo@optiq.ai",
54
+ name="Demo User"
55
+ )
56
+ except Exception:
57
+ return FirebaseUser(uid="demo_user", email="demo@optiq.ai", name="Demo User")
58
+ else:
59
+ # Production mode: verify with Firebase Admin SDK
60
+ try:
61
+ import firebase_admin
62
+ from firebase_admin import auth, credentials as fb_credentials
63
+
64
+ # Initialize Firebase Admin if not already done
65
+ if not firebase_admin._apps:
66
+ cred_path = os.environ.get("FIREBASE_CREDENTIALS")
67
+ if cred_path:
68
+ cred = fb_credentials.Certificate(cred_path)
69
+ firebase_admin.initialize_app(cred)
70
+ else:
71
+ firebase_admin.initialize_app()
72
+
73
+ decoded_token = auth.verify_id_token(token)
74
+ return FirebaseUser(
75
+ uid=decoded_token["uid"],
76
+ email=decoded_token.get("email", ""),
77
+ name=decoded_token.get("name"),
78
+ )
79
+ except Exception as e:
80
+ raise HTTPException(status_code=401, detail=f"Invalid authentication token: {str(e)}")
81
+
82
+
83
+ async def require_auth(
84
+ credentials: HTTPAuthorizationCredentials = Depends(security),
85
+ ) -> FirebaseUser:
86
+ """Require authentication - raises 401 if not authenticated."""
87
+ user = await verify_firebase_token(credentials)
88
+ if user is None:
89
+ raise HTTPException(status_code=401, detail="Authentication required")
90
+ return user
91
+
92
+
93
+ async def optional_auth(
94
+ credentials: HTTPAuthorizationCredentials = Depends(security),
95
+ ) -> Optional[FirebaseUser]:
96
+ """Optional authentication - returns None if not authenticated."""
97
+ return await verify_firebase_token(credentials)
api/database.py ADDED
@@ -0,0 +1,348 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ OptiQ Database - SQLite database for users, usage tracking, and audit logs.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import os
7
+ import sqlite3
8
+ import hashlib
9
+ import secrets
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Optional
13
+ from contextlib import contextmanager
14
+
15
+ # Database file path
16
+ DB_PATH = Path(__file__).parent.parent / "optiq.db"
17
+
18
+
19
+ def get_connection():
20
+ """Get a database connection."""
21
+ conn = sqlite3.connect(str(DB_PATH))
22
+ conn.row_factory = sqlite3.Row
23
+ return conn
24
+
25
+
26
+ @contextmanager
27
+ def get_db():
28
+ """Context manager for database connection."""
29
+ conn = get_connection()
30
+ try:
31
+ yield conn
32
+ conn.commit()
33
+ except Exception:
34
+ conn.rollback()
35
+ raise
36
+ finally:
37
+ conn.close()
38
+
39
+
40
+ def _hash_password(password: str, salt: str | None = None) -> tuple[str, str]:
41
+ """Hash a password with a salt using SHA-256. Returns (hash, salt)."""
42
+ if salt is None:
43
+ salt = secrets.token_hex(16)
44
+ hashed = hashlib.sha256(f"{salt}{password}".encode()).hexdigest()
45
+ return hashed, salt
46
+
47
+
48
+ def init_db():
49
+ """Initialize the database with required tables."""
50
+ with get_db() as conn:
51
+ cursor = conn.cursor()
52
+
53
+ # Users table
54
+ cursor.execute("""
55
+ CREATE TABLE IF NOT EXISTS users (
56
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
57
+ firebase_uid TEXT UNIQUE NOT NULL,
58
+ email TEXT UNIQUE NOT NULL,
59
+ display_name TEXT,
60
+ password_hash TEXT,
61
+ password_salt TEXT,
62
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
63
+ last_login TIMESTAMP,
64
+ total_optimizations INTEGER DEFAULT 0,
65
+ total_energy_saved_kwh REAL DEFAULT 0,
66
+ total_co2_saved_kg REAL DEFAULT 0,
67
+ total_money_saved_usd REAL DEFAULT 0
68
+ )
69
+ """)
70
+
71
+ # Usage tracking table
72
+ cursor.execute("""
73
+ CREATE TABLE IF NOT EXISTS usage (
74
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
75
+ user_id INTEGER,
76
+ firebase_uid TEXT,
77
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
78
+ system TEXT,
79
+ method TEXT,
80
+ load_multiplier REAL DEFAULT 1.0,
81
+ baseline_loss_kw REAL,
82
+ optimized_loss_kw REAL,
83
+ energy_saved_kwh REAL,
84
+ co2_saved_kg REAL,
85
+ money_saved_usd REAL,
86
+ computation_time_sec REAL,
87
+ shadow_mode BOOLEAN DEFAULT 0,
88
+ switches_changed TEXT,
89
+ FOREIGN KEY (user_id) REFERENCES users(id)
90
+ )
91
+ """)
92
+
93
+ # Audit logs table
94
+ cursor.execute("""
95
+ CREATE TABLE IF NOT EXISTS audit_logs (
96
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
97
+ user_id INTEGER,
98
+ firebase_uid TEXT,
99
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
100
+ action TEXT NOT NULL,
101
+ system TEXT,
102
+ method TEXT,
103
+ details TEXT,
104
+ baseline_loss_kw REAL,
105
+ optimized_loss_kw REAL,
106
+ loss_reduction_pct REAL,
107
+ energy_saved_kwh_year REAL,
108
+ co2_saved_tonnes_year REAL,
109
+ cost_saved_usd_year REAL,
110
+ open_lines_before TEXT,
111
+ open_lines_after TEXT,
112
+ FOREIGN KEY (user_id) REFERENCES users(id)
113
+ )
114
+ """)
115
+
116
+ # Feeders table (for multi-feeder simulation)
117
+ cursor.execute("""
118
+ CREATE TABLE IF NOT EXISTS feeders (
119
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
120
+ user_id INTEGER,
121
+ name TEXT NOT NULL,
122
+ system TEXT DEFAULT 'case33bw',
123
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
124
+ FOREIGN KEY (user_id) REFERENCES users(id)
125
+ )
126
+ """)
127
+
128
+ conn.commit()
129
+
130
+ # Migrate: add password columns if they don't exist (handles old DBs)
131
+ _migrate_add_columns()
132
+
133
+
134
+ def _migrate_add_columns():
135
+ """Add new columns to existing tables (safe for re-runs)."""
136
+ with get_db() as conn:
137
+ cursor = conn.cursor()
138
+ for col, coltype in [("password_hash", "TEXT"), ("password_salt", "TEXT")]:
139
+ try:
140
+ cursor.execute(f"ALTER TABLE users ADD COLUMN {col} {coltype}")
141
+ except sqlite3.OperationalError:
142
+ pass # column already exists
143
+ conn.commit()
144
+
145
+
146
+ # ── Auth helpers ──────────────────────────────────────────────────────────
147
+
148
+ def register_user(email: str, password: str, display_name: str | None = None) -> dict:
149
+ """Register a new user with email/password. Returns user dict or raises ValueError."""
150
+ pw_hash, pw_salt = _hash_password(password)
151
+ uid = f"user_{secrets.token_hex(8)}"
152
+ with get_db() as conn:
153
+ cursor = conn.cursor()
154
+ # Check if email already exists
155
+ cursor.execute("SELECT id FROM users WHERE email = ?", (email,))
156
+ if cursor.fetchone():
157
+ raise ValueError("An account with this email already exists")
158
+ cursor.execute(
159
+ """INSERT INTO users (firebase_uid, email, display_name, password_hash, password_salt, created_at, last_login)
160
+ VALUES (?, ?, ?, ?, ?, ?, ?)""",
161
+ (uid, email, display_name or email.split("@")[0], pw_hash, pw_salt,
162
+ datetime.utcnow().isoformat(), datetime.utcnow().isoformat())
163
+ )
164
+ cursor.execute("SELECT * FROM users WHERE firebase_uid = ?", (uid,))
165
+ return dict(cursor.fetchone())
166
+
167
+
168
+ def authenticate_user(email: str, password: str) -> dict | None:
169
+ """Authenticate a user by email/password. Returns user dict or None."""
170
+ with get_db() as conn:
171
+ cursor = conn.cursor()
172
+ cursor.execute("SELECT * FROM users WHERE email = ?", (email,))
173
+ row = cursor.fetchone()
174
+ if not row:
175
+ return None
176
+ user = dict(row)
177
+ # If user was created before password auth, accept any password
178
+ if not user.get("password_hash"):
179
+ return user
180
+ pw_hash, _ = _hash_password(password, user["password_salt"])
181
+ if pw_hash != user["password_hash"]:
182
+ return None
183
+ # Update last login
184
+ cursor.execute(
185
+ "UPDATE users SET last_login = ? WHERE id = ?",
186
+ (datetime.utcnow().isoformat(), user["id"])
187
+ )
188
+ return user
189
+
190
+
191
+ def get_or_create_user(firebase_uid: str, email: str, display_name: Optional[str] = None) -> dict:
192
+ """Get or create a user by Firebase UID."""
193
+ with get_db() as conn:
194
+ cursor = conn.cursor()
195
+
196
+ cursor.execute("SELECT * FROM users WHERE firebase_uid = ?", (firebase_uid,))
197
+ row = cursor.fetchone()
198
+
199
+ if row:
200
+ # Update last login
201
+ cursor.execute(
202
+ "UPDATE users SET last_login = ? WHERE firebase_uid = ?",
203
+ (datetime.utcnow().isoformat(), firebase_uid)
204
+ )
205
+ return dict(row)
206
+ else:
207
+ cursor.execute(
208
+ """INSERT INTO users (firebase_uid, email, display_name, created_at, last_login)
209
+ VALUES (?, ?, ?, ?, ?)""",
210
+ (firebase_uid, email, display_name, datetime.utcnow().isoformat(), datetime.utcnow().isoformat())
211
+ )
212
+ cursor.execute("SELECT * FROM users WHERE firebase_uid = ?", (firebase_uid,))
213
+ return dict(cursor.fetchone())
214
+
215
+
216
+ def log_usage(
217
+ firebase_uid: str,
218
+ system: str,
219
+ method: str,
220
+ baseline_loss_kw: float,
221
+ optimized_loss_kw: float,
222
+ energy_saved_kwh: float,
223
+ co2_saved_kg: float,
224
+ money_saved_usd: float,
225
+ computation_time_sec: float,
226
+ shadow_mode: bool = False,
227
+ switches_changed: Optional[str] = None,
228
+ load_multiplier: float = 1.0,
229
+ ):
230
+ """Log a usage event."""
231
+ with get_db() as conn:
232
+ cursor = conn.cursor()
233
+
234
+ # Get user_id
235
+ cursor.execute("SELECT id FROM users WHERE firebase_uid = ?", (firebase_uid,))
236
+ row = cursor.fetchone()
237
+ user_id = row["id"] if row else None
238
+
239
+ cursor.execute(
240
+ """INSERT INTO usage (
241
+ user_id, firebase_uid, system, method, load_multiplier,
242
+ baseline_loss_kw, optimized_loss_kw, energy_saved_kwh,
243
+ co2_saved_kg, money_saved_usd, computation_time_sec,
244
+ shadow_mode, switches_changed
245
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
246
+ (user_id, firebase_uid, system, method, load_multiplier,
247
+ baseline_loss_kw, optimized_loss_kw, energy_saved_kwh,
248
+ co2_saved_kg, money_saved_usd, computation_time_sec,
249
+ shadow_mode, switches_changed)
250
+ )
251
+
252
+ # Update user totals
253
+ if user_id:
254
+ cursor.execute(
255
+ """UPDATE users SET
256
+ total_optimizations = total_optimizations + 1,
257
+ total_energy_saved_kwh = total_energy_saved_kwh + ?,
258
+ total_co2_saved_kg = total_co2_saved_kg + ?,
259
+ total_money_saved_usd = total_money_saved_usd + ?
260
+ WHERE id = ?""",
261
+ (energy_saved_kwh, co2_saved_kg, money_saved_usd, user_id)
262
+ )
263
+
264
+
265
+ def log_audit(
266
+ firebase_uid: str,
267
+ action: str,
268
+ system: Optional[str] = None,
269
+ method: Optional[str] = None,
270
+ details: Optional[str] = None,
271
+ baseline_loss_kw: Optional[float] = None,
272
+ optimized_loss_kw: Optional[float] = None,
273
+ loss_reduction_pct: Optional[float] = None,
274
+ energy_saved_kwh_year: Optional[float] = None,
275
+ co2_saved_tonnes_year: Optional[float] = None,
276
+ cost_saved_usd_year: Optional[float] = None,
277
+ open_lines_before: Optional[str] = None,
278
+ open_lines_after: Optional[str] = None,
279
+ ):
280
+ """Log an audit event."""
281
+ with get_db() as conn:
282
+ cursor = conn.cursor()
283
+
284
+ cursor.execute("SELECT id FROM users WHERE firebase_uid = ?", (firebase_uid,))
285
+ row = cursor.fetchone()
286
+ user_id = row["id"] if row else None
287
+
288
+ cursor.execute(
289
+ """INSERT INTO audit_logs (
290
+ user_id, firebase_uid, action, system, method, details,
291
+ baseline_loss_kw, optimized_loss_kw, loss_reduction_pct,
292
+ energy_saved_kwh_year, co2_saved_tonnes_year, cost_saved_usd_year,
293
+ open_lines_before, open_lines_after
294
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
295
+ (user_id, firebase_uid, action, system, method, details,
296
+ baseline_loss_kw, optimized_loss_kw, loss_reduction_pct,
297
+ energy_saved_kwh_year, co2_saved_tonnes_year, cost_saved_usd_year,
298
+ open_lines_before, open_lines_after)
299
+ )
300
+
301
+
302
+ def get_user_usage(firebase_uid: str, limit: int = 100) -> list:
303
+ """Get usage history for a user."""
304
+ with get_db() as conn:
305
+ cursor = conn.cursor()
306
+ cursor.execute(
307
+ """SELECT * FROM usage WHERE firebase_uid = ?
308
+ ORDER BY timestamp DESC LIMIT ?""",
309
+ (firebase_uid, limit)
310
+ )
311
+ return [dict(row) for row in cursor.fetchall()]
312
+
313
+
314
+ def get_user_audit_logs(firebase_uid: str, limit: int = 100) -> list:
315
+ """Get audit logs for a user."""
316
+ with get_db() as conn:
317
+ cursor = conn.cursor()
318
+ cursor.execute(
319
+ """SELECT * FROM audit_logs WHERE firebase_uid = ?
320
+ ORDER BY timestamp DESC LIMIT ?""",
321
+ (firebase_uid, limit)
322
+ )
323
+ return [dict(row) for row in cursor.fetchall()]
324
+
325
+
326
+ def get_user_stats(firebase_uid: str) -> dict:
327
+ """Get aggregated stats for a user."""
328
+ with get_db() as conn:
329
+ cursor = conn.cursor()
330
+ cursor.execute("SELECT * FROM users WHERE firebase_uid = ?", (firebase_uid,))
331
+ row = cursor.fetchone()
332
+ if row:
333
+ return {
334
+ "total_optimizations": row["total_optimizations"],
335
+ "total_energy_saved_kwh": row["total_energy_saved_kwh"],
336
+ "total_co2_saved_kg": row["total_co2_saved_kg"],
337
+ "total_money_saved_usd": row["total_money_saved_usd"],
338
+ }
339
+ return {
340
+ "total_optimizations": 0,
341
+ "total_energy_saved_kwh": 0,
342
+ "total_co2_saved_kg": 0,
343
+ "total_money_saved_usd": 0,
344
+ }
345
+
346
+
347
+ # Initialize database on import
348
+ init_db()
api/main.py CHANGED
@@ -1,6 +1,7 @@
1
  """
2
  OptiQ API — FastAPI entry point.
3
  Serves the hybrid Quantum-AI-Classical optimization pipeline.
 
4
  """
5
  from __future__ import annotations
6
 
@@ -10,25 +11,51 @@ import os
10
  # Ensure project root is on the path so ``from src.*`` and ``from config`` work.
11
  sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
12
 
13
- from fastapi import FastAPI
 
 
 
 
 
 
 
 
 
14
  from fastapi.middleware.cors import CORSMiddleware
 
 
15
 
16
  from config import CFG
 
 
 
 
 
 
17
  from api.routes.baseline import router as baseline_router
18
  from api.routes.optimize import router as optimize_router
19
  from api.routes.compare import router as compare_router
20
  from api.routes.validate import router as validate_router
 
 
 
 
 
 
 
 
21
 
22
  app = FastAPI(
23
- title="OptiQ API",
24
  description=(
25
- "Hybrid Quantum-AI-Classical power grid optimization. "
26
- "Minimizes distribution losses on IEEE test systems."
 
27
  ),
28
- version="0.1.0",
29
  )
30
 
31
- # CORS — allow the Lovable Pro frontend (and any origin during dev)
32
  app.add_middleware(
33
  CORSMiddleware,
34
  allow_origins=CFG.api.cors_origins,
@@ -42,11 +69,42 @@ app.include_router(baseline_router, prefix="/api", tags=["Baseline"])
42
  app.include_router(optimize_router, prefix="/api", tags=["Optimize"])
43
  app.include_router(compare_router, prefix="/api", tags=["Compare"])
44
  app.include_router(validate_router, prefix="/api", tags=["Validate"])
 
 
 
 
 
 
 
 
45
 
46
 
47
- @app.get("/", tags=["Health"])
48
  def health():
49
- return {"status": "ok", "project": "OptiQ", "version": "0.1.0"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
 
52
  if __name__ == "__main__":
 
1
  """
2
  OptiQ API — FastAPI entry point.
3
  Serves the hybrid Quantum-AI-Classical optimization pipeline.
4
+ Extended for SaaS platform with authentication, analytics, and reporting.
5
  """
6
  from __future__ import annotations
7
 
 
11
  # Ensure project root is on the path so ``from src.*`` and ``from config`` work.
12
  sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
13
 
14
+ # Load .env file if present
15
+ try:
16
+ from dotenv import load_dotenv
17
+ load_dotenv(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), ".env"))
18
+ except ImportError:
19
+ pass
20
+
21
+ from pathlib import Path
22
+
23
+ from fastapi import FastAPI, Request
24
  from fastapi.middleware.cors import CORSMiddleware
25
+ from fastapi.staticfiles import StaticFiles
26
+ from fastapi.responses import FileResponse
27
 
28
  from config import CFG
29
+
30
+ # Initialize database on startup
31
+ from api.database import init_db
32
+ init_db()
33
+
34
+ # Import routers
35
  from api.routes.baseline import router as baseline_router
36
  from api.routes.optimize import router as optimize_router
37
  from api.routes.compare import router as compare_router
38
  from api.routes.validate import router as validate_router
39
+ from api.routes.grid import router as grid_router
40
+ from api.routes.simulate import router as simulate_router
41
+ from api.routes.digital_twin import router as digital_twin_router
42
+ from api.routes.usage import router as usage_router
43
+ from api.routes.audit import router as audit_router
44
+ from api.routes.roi import router as roi_router
45
+ from api.routes.report import router as report_router
46
+ from api.routes.auth_routes import router as auth_router
47
 
48
  app = FastAPI(
49
+ title="OptiQ SaaS API",
50
  description=(
51
+ "AI-Powered Distribution Grid Reconfiguration Platform. "
52
+ "Hybrid Quantum-AI-Classical optimization for power distribution networks. "
53
+ "Reduces grid losses by 30%+ through intelligent network reconfiguration."
54
  ),
55
+ version="1.0.0",
56
  )
57
 
58
+ # CORS — allow the frontend (and any origin during dev)
59
  app.add_middleware(
60
  CORSMiddleware,
61
  allow_origins=CFG.api.cors_origins,
 
69
  app.include_router(optimize_router, prefix="/api", tags=["Optimize"])
70
  app.include_router(compare_router, prefix="/api", tags=["Compare"])
71
  app.include_router(validate_router, prefix="/api", tags=["Validate"])
72
+ app.include_router(grid_router, prefix="/api", tags=["Grid"])
73
+ app.include_router(simulate_router, prefix="/api", tags=["Simulate"])
74
+ app.include_router(digital_twin_router, prefix="/api", tags=["Digital Twin"])
75
+ app.include_router(usage_router, prefix="/api", tags=["Usage"])
76
+ app.include_router(audit_router, prefix="/api", tags=["Audit"])
77
+ app.include_router(roi_router, prefix="/api", tags=["ROI"])
78
+ app.include_router(report_router, prefix="/api", tags=["Report"])
79
+ app.include_router(auth_router, prefix="/api", tags=["Auth"])
80
 
81
 
82
+ @app.get("/api/health", tags=["Health"])
83
  def health():
84
+ return {"status": "ok", "project": "OptiQ", "version": "1.0.0"}
85
+
86
+
87
+ # ── Serve React frontend from /frontend/dist ─────────────────────────────
88
+ _PROJECT_ROOT = Path(__file__).resolve().parent.parent
89
+ _FRONTEND_DIST = _PROJECT_ROOT / "frontend" / "dist"
90
+
91
+ if _FRONTEND_DIST.is_dir():
92
+ # Serve static assets (JS, CSS, images, etc.)
93
+ app.mount("/assets", StaticFiles(directory=_FRONTEND_DIST / "assets"), name="assets")
94
+
95
+ # Catch-all: serve index.html for any non-API route (SPA client-side routing)
96
+ @app.get("/{full_path:path}", include_in_schema=False)
97
+ async def serve_spa(request: Request, full_path: str):
98
+ # If a static file exists at the path, serve it directly
99
+ file_path = _FRONTEND_DIST / full_path
100
+ if full_path and file_path.is_file():
101
+ return FileResponse(file_path)
102
+ # Otherwise serve index.html for client-side routing
103
+ return FileResponse(_FRONTEND_DIST / "index.html")
104
+ else:
105
+ @app.get("/", include_in_schema=False)
106
+ def root_no_frontend():
107
+ return {"message": "OptiQ API running. Build frontend with: cd frontend && npm run build"}
108
 
109
 
110
  if __name__ == "__main__":
api/routes/audit.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Audit endpoint — Audit logs for optimization history.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import json
7
+ from typing import Optional
8
+ from fastapi import APIRouter, HTTPException, Depends
9
+ from pydantic import BaseModel, Field
10
+
11
+ from api.auth import require_auth, optional_auth, FirebaseUser
12
+ from api.database import get_user_audit_logs, log_audit
13
+
14
+ router = APIRouter()
15
+
16
+
17
+ @router.get("/audit")
18
+ def get_audit_logs(limit: int = 100, user: FirebaseUser = Depends(require_auth)):
19
+ """Get audit logs for the authenticated user."""
20
+ logs = get_user_audit_logs(user.uid, limit=limit)
21
+
22
+ # Parse JSON fields
23
+ for log in logs:
24
+ if log.get("open_lines_before"):
25
+ try:
26
+ log["open_lines_before"] = json.loads(log["open_lines_before"])
27
+ except:
28
+ pass
29
+ if log.get("open_lines_after"):
30
+ try:
31
+ log["open_lines_after"] = json.loads(log["open_lines_after"])
32
+ except:
33
+ pass
34
+
35
+ return {
36
+ "user_id": user.uid,
37
+ "logs": logs,
38
+ "count": len(logs),
39
+ }
40
+
41
+
42
+ @router.get("/audit/summary")
43
+ def get_audit_summary(user: FirebaseUser = Depends(require_auth)):
44
+ """Get a summary of optimization history."""
45
+ logs = get_user_audit_logs(user.uid, limit=1000)
46
+
47
+ total_optimizations = len([l for l in logs if l.get("action") == "optimization"])
48
+ total_simulations = len([l for l in logs if l.get("action") == "simulation"])
49
+
50
+ # Calculate total savings from all optimizations
51
+ total_energy_saved = sum(l.get("energy_saved_kwh_year", 0) or 0 for l in logs)
52
+ total_co2_saved = sum(l.get("co2_saved_tonnes_year", 0) or 0 for l in logs)
53
+ total_cost_saved = sum(l.get("cost_saved_usd_year", 0) or 0 for l in logs)
54
+
55
+ # Get switches changed stats
56
+ switch_changes = []
57
+ for log in logs:
58
+ before = log.get("open_lines_before", [])
59
+ after = log.get("open_lines_after", [])
60
+ if isinstance(before, str):
61
+ try:
62
+ before = json.loads(before)
63
+ except:
64
+ before = []
65
+ if isinstance(after, str):
66
+ try:
67
+ after = json.loads(after)
68
+ except:
69
+ after = []
70
+ if before and after:
71
+ changes = set(before) ^ set(after)
72
+ switch_changes.append(len(changes))
73
+
74
+ avg_switches_changed = sum(switch_changes) / len(switch_changes) if switch_changes else 0
75
+
76
+ return {
77
+ "total_optimizations": total_optimizations,
78
+ "total_simulations": total_simulations,
79
+ "total_energy_saved_kwh_year": round(total_energy_saved, 2),
80
+ "total_co2_saved_tonnes_year": round(total_co2_saved, 2),
81
+ "total_cost_saved_usd_year": round(total_cost_saved, 2),
82
+ "avg_switches_changed": round(avg_switches_changed, 1),
83
+ }
api/routes/auth_routes.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Authentication routes — Register & Login using SQLite (optiq.db).
3
+ Issues JWT-like tokens (base64-encoded JSON) for session management.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import json
8
+ import base64
9
+ import hashlib
10
+ import time
11
+
12
+ from fastapi import APIRouter, HTTPException
13
+ from pydantic import BaseModel, Field
14
+
15
+ from api.database import register_user, authenticate_user, get_or_create_user
16
+
17
+ router = APIRouter()
18
+
19
+
20
+ def _make_token(uid: str, email: str, name: str) -> str:
21
+ """Create a simple signed token: base64(json payload).sha256 signature.
22
+
23
+ For a production system you'd use PyJWT with RS256. For this self-hosted
24
+ platform the token format is uid:email:name which is what the existing
25
+ auth middleware already parses.
26
+ """
27
+ return f"{uid}:{email}:{name}"
28
+
29
+
30
+ class RegisterRequest(BaseModel):
31
+ email: str = Field(..., description="User email address")
32
+ password: str = Field(..., min_length=6, description="Password (min 6 chars)")
33
+ display_name: str | None = Field(None, description="Display name")
34
+
35
+
36
+ class LoginRequest(BaseModel):
37
+ email: str = Field(..., description="User email address")
38
+ password: str = Field(..., description="Password")
39
+
40
+
41
+ @router.post("/auth/register")
42
+ def register(req: RegisterRequest):
43
+ """Register a new user account."""
44
+ try:
45
+ user = register_user(req.email, req.password, req.display_name)
46
+ except ValueError as exc:
47
+ raise HTTPException(status_code=400, detail=str(exc))
48
+
49
+ token = _make_token(user["firebase_uid"], user["email"], user["display_name"] or "")
50
+ return {
51
+ "token": token,
52
+ "user": {
53
+ "uid": user["firebase_uid"],
54
+ "email": user["email"],
55
+ "displayName": user["display_name"],
56
+ },
57
+ }
58
+
59
+
60
+ @router.post("/auth/login")
61
+ def login(req: LoginRequest):
62
+ """Authenticate with email + password."""
63
+ user = authenticate_user(req.email, req.password)
64
+ if user is None:
65
+ raise HTTPException(status_code=401, detail="Invalid email or password")
66
+
67
+ token = _make_token(user["firebase_uid"], user["email"], user["display_name"] or "")
68
+ return {
69
+ "token": token,
70
+ "user": {
71
+ "uid": user["firebase_uid"],
72
+ "email": user["email"],
73
+ "displayName": user["display_name"],
74
+ },
75
+ }
76
+
77
+
78
+ @router.get("/auth/me")
79
+ def get_me():
80
+ """Placeholder — in production, decode the Bearer token."""
81
+ return {"detail": "Use the token from /auth/login or /auth/register as Bearer header."}
api/routes/digital_twin.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Digital Twin endpoint — Load scaling and scenario analysis.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import json
7
+ from typing import Optional
8
+ from fastapi import APIRouter, HTTPException, Depends
9
+ from pydantic import BaseModel, Field
10
+
11
+ from src.grid.loader import load_network, clone_network
12
+ from src.grid.power_flow import get_baseline, evaluate_topology
13
+ from src.hybrid.pipeline import run_hybrid_pipeline
14
+ from src.evaluation.metrics import compute_impact
15
+ from api.auth import optional_auth, FirebaseUser
16
+ from api.database import log_usage, log_audit
17
+
18
+ router = APIRouter()
19
+
20
+
21
+ class DigitalTwinRequest(BaseModel):
22
+ system: str = Field(default="case33bw", description="IEEE test system name")
23
+ load_multiplier: float = Field(
24
+ default=1.0,
25
+ ge=0.5,
26
+ le=2.0,
27
+ description="Load scaling factor (0.5 = 50% load, 1.3 = 130% load)"
28
+ )
29
+ optimize: bool = Field(default=False, description="Whether to run optimization")
30
+ method: str = Field(default="hybrid", description="Optimization method if optimize=True")
31
+
32
+
33
+ @router.post("/digital-twin")
34
+ def digital_twin(req: DigitalTwinRequest, user: FirebaseUser = Depends(optional_auth)):
35
+ """Digital Twin simulation with load scaling.
36
+
37
+ Scales all loads by the given multiplier and optionally runs optimization.
38
+ This simulates different operating conditions (peak, off-peak, etc.).
39
+ """
40
+ try:
41
+ net = load_network(req.system)
42
+ except ValueError as exc:
43
+ raise HTTPException(status_code=400, detail=str(exc))
44
+
45
+ # Store original loads
46
+ original_p = net.load["p_mw"].copy()
47
+ original_q = net.load["q_mvar"].copy()
48
+
49
+ # Scale loads
50
+ net.load["p_mw"] *= req.load_multiplier
51
+ net.load["q_mvar"] *= req.load_multiplier
52
+
53
+ # Get baseline with scaled loads
54
+ baseline = get_baseline(net)
55
+ if not baseline.get("converged"):
56
+ raise HTTPException(status_code=500, detail="Power flow did not converge for this load level")
57
+
58
+ result = {
59
+ "system": req.system,
60
+ "load_multiplier": req.load_multiplier,
61
+ "total_load_mw": float(net.load["p_mw"].sum()),
62
+ "baseline": baseline,
63
+ }
64
+
65
+ # Run optimization if requested
66
+ if req.optimize:
67
+ use_quantum = req.method in ("quantum", "hybrid")
68
+ use_ai = req.method in ("ai", "hybrid")
69
+
70
+ pipeline_result = run_hybrid_pipeline(
71
+ net,
72
+ use_quantum=use_quantum,
73
+ use_ai=use_ai,
74
+ quantum_iters=300,
75
+ quantum_restarts=3,
76
+ quantum_top_k=5,
77
+ )
78
+
79
+ if "error" not in pipeline_result:
80
+ result["optimized"] = pipeline_result["optimized"]
81
+ result["impact"] = pipeline_result["impact"]
82
+ result["method"] = pipeline_result["method"]
83
+ result["timings"] = pipeline_result["timings"]
84
+
85
+ # Log if authenticated
86
+ if user:
87
+ try:
88
+ impact = pipeline_result["impact"]
89
+ log_usage(
90
+ firebase_uid=user.uid,
91
+ system=req.system,
92
+ method=f"digital_twin_{req.method}",
93
+ baseline_loss_kw=baseline["total_loss_kw"],
94
+ optimized_loss_kw=pipeline_result["optimized"]["total_loss_kw"],
95
+ energy_saved_kwh=impact["energy_saved_mwh_year"] * 1000,
96
+ co2_saved_kg=impact["co2_saved_tonnes_year"] * 1000,
97
+ money_saved_usd=impact["cost_saved_usd_year"],
98
+ computation_time_sec=pipeline_result["timings"]["total_sec"],
99
+ load_multiplier=req.load_multiplier,
100
+ switches_changed=json.dumps(pipeline_result["optimized"].get("open_lines", [])),
101
+ )
102
+ except Exception:
103
+ pass
104
+ else:
105
+ result["optimization_error"] = pipeline_result["error"]
106
+
107
+ return result
108
+
109
+
110
+ @router.get("/digital-twin/scenarios")
111
+ def get_scenarios(system: str = "case33bw"):
112
+ """Get predefined load scenarios for digital twin analysis."""
113
+ try:
114
+ net = load_network(system)
115
+ except ValueError as exc:
116
+ raise HTTPException(status_code=400, detail=str(exc))
117
+
118
+ base_load = float(net.load["p_mw"].sum())
119
+
120
+ scenarios = [
121
+ {"name": "Light Load (70%)", "multiplier": 0.7, "load_mw": round(base_load * 0.7, 2)},
122
+ {"name": "Morning (85%)", "multiplier": 0.85, "load_mw": round(base_load * 0.85, 2)},
123
+ {"name": "Nominal (100%)", "multiplier": 1.0, "load_mw": round(base_load, 2)},
124
+ {"name": "Peak (115%)", "multiplier": 1.15, "load_mw": round(base_load * 1.15, 2)},
125
+ {"name": "Heavy Load (130%)", "multiplier": 1.3, "load_mw": round(base_load * 1.3, 2)},
126
+ ]
127
+
128
+ return {"system": system, "base_load_mw": base_load, "scenarios": scenarios}
api/routes/grid.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Grid endpoint — Returns grid topology data for visualization.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import math
7
+ from fastapi import APIRouter, HTTPException, Depends
8
+
9
+ from pydantic import BaseModel, Field
10
+
11
+ from src.grid.loader import load_network, get_bus_data, get_topology_data, get_network_summary
12
+ from src.grid.power_flow import (
13
+ check_radial_connected,
14
+ check_topology_valid,
15
+ try_repair_connectivity,
16
+ is_distribution_grid,
17
+ apply_topology,
18
+ run_power_flow,
19
+ extract_results,
20
+ )
21
+ from api.auth import optional_auth, FirebaseUser
22
+
23
+ router = APIRouter()
24
+
25
+
26
+ def compute_bus_positions(net) -> dict[int, dict]:
27
+ """Compute 2D positions for buses using scaled graph layouts.
28
+
29
+ - Smaller systems (<= 40 buses): layered radial layout from slack bus.
30
+ - Larger systems: force-directed layout for better distribution.
31
+ """
32
+ import networkx as nx
33
+
34
+ def scale_positions(raw_positions: dict, width: float, height: float, padding: float) -> dict:
35
+ xs = [p[0] for p in raw_positions.values()]
36
+ ys = [p[1] for p in raw_positions.values()]
37
+ if not xs or not ys:
38
+ return {}
39
+ min_x, max_x = min(xs), max(xs)
40
+ min_y, max_y = min(ys), max(ys)
41
+ span_x = max(max_x - min_x, 1e-6)
42
+ span_y = max(max_y - min_y, 1e-6)
43
+ scaled = {}
44
+ for node, (x, y) in raw_positions.items():
45
+ nx_pos = padding + (x - min_x) / span_x * (width - 2 * padding)
46
+ ny_pos = padding + (y - min_y) / span_y * (height - 2 * padding)
47
+ scaled[node] = {"x": float(nx_pos), "y": float(ny_pos)}
48
+ return scaled
49
+
50
+ def spread_positions(raw_positions: dict, min_dist: float, steps: int) -> dict:
51
+ # Simple repulsion to reduce overlap in dense layouts.
52
+ if not raw_positions:
53
+ return raw_positions
54
+ positions = {node: [float(x), float(y)] for node, (x, y) in raw_positions.items()}
55
+ nodes = list(positions.keys())
56
+ for _ in range(steps):
57
+ moved = False
58
+ for i in range(len(nodes)):
59
+ for j in range(i + 1, len(nodes)):
60
+ a = nodes[i]
61
+ b = nodes[j]
62
+ dx = positions[a][0] - positions[b][0]
63
+ dy = positions[a][1] - positions[b][1]
64
+ dist_sq = dx * dx + dy * dy
65
+ if dist_sq == 0:
66
+ dx, dy = 1e-3, 0.0
67
+ dist_sq = dx * dx + dy * dy
68
+ dist = math.sqrt(dist_sq)
69
+ if dist < min_dist:
70
+ push = (min_dist - dist) * 0.5
71
+ nx_dir = dx / dist
72
+ ny_dir = dy / dist
73
+ positions[a][0] += nx_dir * push
74
+ positions[a][1] += ny_dir * push
75
+ positions[b][0] -= nx_dir * push
76
+ positions[b][1] -= ny_dir * push
77
+ moved = True
78
+ if not moved:
79
+ break
80
+ return {node: (pos[0], pos[1]) for node, pos in positions.items()}
81
+
82
+ # Build graph from all lines to preserve visual continuity
83
+ G = nx.Graph()
84
+ for idx in net.bus.index:
85
+ G.add_node(int(idx))
86
+
87
+ for _, row in net.line.iterrows():
88
+ G.add_edge(int(row["from_bus"]), int(row["to_bus"]))
89
+
90
+ n_buses = len(net.bus)
91
+ slack_bus = int(net.ext_grid.bus.iloc[0]) if len(net.ext_grid) > 0 else 0
92
+
93
+ if n_buses <= 40:
94
+ # Layered radial layout
95
+ layers = {slack_bus: 0}
96
+ visited = {slack_bus}
97
+ queue = [slack_bus]
98
+
99
+ while queue:
100
+ node = queue.pop(0)
101
+ for neighbor in G.neighbors(node):
102
+ if neighbor not in visited:
103
+ visited.add(neighbor)
104
+ layers[neighbor] = layers[node] + 1
105
+ queue.append(neighbor)
106
+
107
+ for node in G.nodes():
108
+ if node not in layers:
109
+ layers[node] = 0
110
+
111
+ layer_groups: dict[int, list[int]] = {}
112
+ for node, layer in layers.items():
113
+ layer_groups.setdefault(layer, []).append(node)
114
+
115
+ raw_positions = {}
116
+ for layer, nodes in layer_groups.items():
117
+ x = float(layer)
118
+ n_nodes = len(nodes)
119
+ for i, node in enumerate(sorted(nodes)):
120
+ if n_nodes == 1:
121
+ y = 0.0
122
+ else:
123
+ y = float(i) / (n_nodes - 1)
124
+ raw_positions[node] = (x, y)
125
+
126
+ return scale_positions(raw_positions, width=1600, height=900, padding=60)
127
+
128
+ # Force-directed layout for larger systems
129
+ # Use Kamada-Kawai for better distribution than spring_layout
130
+ try:
131
+ raw_positions = nx.kamada_kawai_layout(G)
132
+ except Exception:
133
+ raw_positions = nx.spring_layout(
134
+ G,
135
+ seed=42,
136
+ k=3.0 / max(math.sqrt(n_buses), 1.0),
137
+ iterations=300,
138
+ )
139
+ raw_positions = spread_positions(raw_positions, min_dist=0.06, steps=10)
140
+ return scale_positions(raw_positions, width=3200, height=2000, padding=100)
141
+
142
+
143
+ @router.get("/grid")
144
+ def get_grid(system: str = "case33bw", user: FirebaseUser = Depends(optional_auth)):
145
+ """Get grid topology for visualization.
146
+
147
+ Returns nodes (buses) and branches (lines) with positions and status.
148
+ """
149
+ try:
150
+ net = load_network(system)
151
+ except ValueError as exc:
152
+ raise HTTPException(status_code=400, detail=str(exc))
153
+
154
+ # Get bus and line data
155
+ buses = get_bus_data(net)
156
+ lines = get_topology_data(net)
157
+ summary = get_network_summary(net)
158
+
159
+ # Compute positions for visualization
160
+ positions = compute_bus_positions(net)
161
+
162
+ # Build nodes response
163
+ nodes = []
164
+ for bus in buses:
165
+ pos = positions.get(bus["index"], {"x": 0, "y": 0})
166
+ nodes.append({
167
+ "id": f"bus_{bus['index']}",
168
+ "data": {
169
+ "label": f"Bus {bus['index']}",
170
+ "busId": bus["index"],
171
+ "vn_kv": bus["vn_kv"],
172
+ "load_mw": bus["load_mw"],
173
+ "load_mvar": bus["load_mvar"],
174
+ "is_slack": bus["is_slack"],
175
+ },
176
+ "position": pos,
177
+ "type": "busNode",
178
+ })
179
+
180
+ # Build branches response
181
+ branches = []
182
+ for line in lines:
183
+ branches.append({
184
+ "id": f"line_{line['index']}",
185
+ "source": f"bus_{line['from_bus']}",
186
+ "target": f"bus_{line['to_bus']}",
187
+ "data": {
188
+ "lineId": line["index"],
189
+ "from_bus": line["from_bus"],
190
+ "to_bus": line["to_bus"],
191
+ "r_ohm_per_km": line["r_ohm_per_km"],
192
+ "x_ohm_per_km": line["x_ohm_per_km"],
193
+ "length_km": line["length_km"],
194
+ "in_service": line["in_service"],
195
+ "is_tie": line["is_tie"],
196
+ },
197
+ "type": "switchEdge",
198
+ "animated": not line["in_service"], # Animate open switches
199
+ "style": {
200
+ "stroke": "#22c55e" if line["in_service"] else "#ef4444",
201
+ "strokeWidth": 2,
202
+ },
203
+ })
204
+
205
+ return {
206
+ "system": system,
207
+ "summary": summary,
208
+ "nodes": nodes,
209
+ "branches": branches,
210
+ }
211
+
212
+
213
+ class SetOutOfServiceRequest(BaseModel):
214
+ system: str = Field(default="case33bw", description="IEEE test system name")
215
+ out_of_service_lines: list[int] = Field(description="Line indices to set as OUT OF SERVICE")
216
+
217
+
218
+ @router.post("/grid/set-out-of-service")
219
+ def set_out_of_service(req: SetOutOfServiceRequest, user: FirebaseUser = Depends(optional_auth)):
220
+ """Set specific lines as out of service and return updated grid + power flow."""
221
+ try:
222
+ net = load_network(req.system)
223
+ except ValueError as exc:
224
+ raise HTTPException(status_code=400, detail=str(exc))
225
+
226
+ # Validate line indices
227
+ valid_lines = set(net.line.index.tolist())
228
+ invalid = [l for l in req.out_of_service_lines if l not in valid_lines]
229
+ if invalid:
230
+ raise HTTPException(status_code=400, detail=f"Invalid line indices: {invalid}")
231
+
232
+ # ── Connectivity check with auto-repair ──
233
+ warnings = []
234
+ working_oos = list(req.out_of_service_lines)
235
+
236
+ if not check_topology_valid(net, working_oos, require_radial=False):
237
+ # Try to restore connectivity by closing tie-lines automatically
238
+ repaired, auto_closed = try_repair_connectivity(net, working_oos)
239
+ if repaired is not None:
240
+ working_oos = repaired
241
+ closed_str = ", ".join(str(l) for l in auto_closed)
242
+ n_buses = len(net.bus)
243
+ max_open = len(net.line) - (n_buses - 1)
244
+ warnings.append(
245
+ f"To maintain connectivity, line(s) {closed_str} were automatically "
246
+ f"kept in service (closed). The {n_buses}-bus system supports at "
247
+ f"most {max_open} open lines while staying connected."
248
+ )
249
+ else:
250
+ return {
251
+ "valid": False,
252
+ "error": "Configuration is not connected. All buses must remain reachable.",
253
+ }
254
+
255
+ is_dist = is_distribution_grid(net)
256
+ if is_dist and not check_topology_valid(net, working_oos, require_radial=True):
257
+ n_buses = len(net.bus)
258
+ n_required_open = len(net.line) - (n_buses - 1)
259
+ warnings.append(
260
+ f"Configuration has loops (not radial). Distribution grids are normally "
261
+ f"operated as trees. For {n_buses}-bus system you need exactly "
262
+ f"{n_required_open} open lines to maintain radiality."
263
+ )
264
+
265
+ # Apply topology and run power flow
266
+ net_new = apply_topology(net, working_oos)
267
+ if not run_power_flow(net_new):
268
+ return {
269
+ "valid": False,
270
+ "error": "Power flow did not converge for this configuration.",
271
+ }
272
+
273
+ results = extract_results(net_new)
274
+ results["open_lines"] = working_oos
275
+
276
+ resp = {
277
+ "valid": True,
278
+ "system": req.system,
279
+ "open_lines": working_oos,
280
+ "power_flow": results,
281
+ }
282
+ if warnings:
283
+ resp["warnings"] = warnings
284
+ return resp
285
+
286
+
287
+ @router.get("/grid/{system}/switches")
288
+ def get_switch_states(system: str = "case33bw"):
289
+ """Get current switch states for all lines."""
290
+ try:
291
+ net = load_network(system)
292
+ except ValueError as exc:
293
+ raise HTTPException(status_code=400, detail=str(exc))
294
+
295
+ lines = get_topology_data(net)
296
+ switches = []
297
+ for line in lines:
298
+ switches.append({
299
+ "line_id": line["index"],
300
+ "from_bus": line["from_bus"],
301
+ "to_bus": line["to_bus"],
302
+ "status": "closed" if line["in_service"] else "open",
303
+ "is_tie": line["is_tie"],
304
+ })
305
+
306
+ return {"system": system, "switches": switches}
api/routes/optimize.py CHANGED
@@ -27,6 +27,10 @@ class OptimizeRequest(BaseModel):
27
  quantum_iters: int = Field(default=300, description="SA iterations for quantum solver")
28
  quantum_restarts: int = Field(default=3, description="SA restarts")
29
  quantum_top_k: int = Field(default=5, description="Number of candidate topologies")
 
 
 
 
30
 
31
 
32
  @router.post("/optimize")
@@ -44,7 +48,7 @@ def optimize(req: OptimizeRequest):
44
  t_start = time.perf_counter()
45
 
46
  if req.method == "classical":
47
- result = branch_exchange_search(net, verbose=False)
48
  if "error" in result:
49
  raise HTTPException(status_code=500, detail=result["error"])
50
  optimized = evaluate_topology(net, result["best_open_lines"])
@@ -77,6 +81,7 @@ def optimize(req: OptimizeRequest):
77
  quantum_iters=req.quantum_iters,
78
  quantum_restarts=req.quantum_restarts,
79
  quantum_top_k=req.quantum_top_k,
 
80
  )
81
 
82
  if "error" in pipeline_result:
 
27
  quantum_iters: int = Field(default=300, description="SA iterations for quantum solver")
28
  quantum_restarts: int = Field(default=3, description="SA restarts")
29
  quantum_top_k: int = Field(default=5, description="Number of candidate topologies")
30
+ open_lines: list[int] | None = Field(
31
+ default=None,
32
+ description="User's current open lines to use as starting point for optimisation",
33
+ )
34
 
35
 
36
  @router.post("/optimize")
 
48
  t_start = time.perf_counter()
49
 
50
  if req.method == "classical":
51
+ result = branch_exchange_search(net, verbose=False, initial_open_lines=req.open_lines)
52
  if "error" in result:
53
  raise HTTPException(status_code=500, detail=result["error"])
54
  optimized = evaluate_topology(net, result["best_open_lines"])
 
81
  quantum_iters=req.quantum_iters,
82
  quantum_restarts=req.quantum_restarts,
83
  quantum_top_k=req.quantum_top_k,
84
+ initial_open_lines=req.open_lines,
85
  )
86
 
87
  if "error" in pipeline_result:
api/routes/report.py ADDED
@@ -0,0 +1,344 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Report endpoint — Generate PDF reports for optimization results.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import io
7
+ import json
8
+ from datetime import datetime
9
+ from typing import Optional
10
+
11
+ from fastapi import APIRouter, HTTPException, Depends, Response
12
+ from pydantic import BaseModel, Field
13
+
14
+ from src.grid.loader import load_network
15
+ from src.grid.power_flow import get_baseline, evaluate_topology
16
+ from src.hybrid.pipeline import run_hybrid_pipeline
17
+ from src.evaluation.metrics import compute_impact
18
+ from api.auth import optional_auth, require_auth, FirebaseUser
19
+ from api.database import log_audit
20
+
21
+ router = APIRouter()
22
+
23
+
24
+ def generate_report_html(data: dict) -> str:
25
+ """Generate HTML report from optimization data."""
26
+
27
+ baseline = data.get("baseline", {})
28
+ optimized = data.get("optimized", {})
29
+ impact = data.get("impact", {})
30
+
31
+ html = f"""
32
+ <!DOCTYPE html>
33
+ <html>
34
+ <head>
35
+ <meta charset="UTF-8">
36
+ <title>OptiQ Optimization Report</title>
37
+ <style>
38
+ body {{
39
+ font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
40
+ margin: 40px;
41
+ color: #1e293b;
42
+ line-height: 1.6;
43
+ }}
44
+ .header {{
45
+ text-align: center;
46
+ border-bottom: 3px solid #0ea5e9;
47
+ padding-bottom: 20px;
48
+ margin-bottom: 30px;
49
+ }}
50
+ .logo {{
51
+ font-size: 36px;
52
+ font-weight: bold;
53
+ color: #0ea5e9;
54
+ }}
55
+ .subtitle {{
56
+ color: #64748b;
57
+ font-size: 14px;
58
+ }}
59
+ h1 {{ color: #0f172a; font-size: 24px; }}
60
+ h2 {{ color: #0ea5e9; font-size: 18px; border-bottom: 1px solid #e2e8f0; padding-bottom: 8px; }}
61
+ .summary-grid {{
62
+ display: grid;
63
+ grid-template-columns: repeat(3, 1fr);
64
+ gap: 20px;
65
+ margin: 20px 0;
66
+ }}
67
+ .summary-card {{
68
+ background: #f1f5f9;
69
+ padding: 20px;
70
+ border-radius: 8px;
71
+ text-align: center;
72
+ }}
73
+ .summary-value {{
74
+ font-size: 28px;
75
+ font-weight: bold;
76
+ color: #0ea5e9;
77
+ }}
78
+ .summary-label {{
79
+ color: #64748b;
80
+ font-size: 12px;
81
+ text-transform: uppercase;
82
+ }}
83
+ .highlight {{
84
+ background: linear-gradient(135deg, #0ea5e9, #22c55e);
85
+ color: white;
86
+ padding: 20px;
87
+ border-radius: 8px;
88
+ text-align: center;
89
+ margin: 20px 0;
90
+ }}
91
+ .highlight-value {{
92
+ font-size: 36px;
93
+ font-weight: bold;
94
+ }}
95
+ table {{
96
+ width: 100%;
97
+ border-collapse: collapse;
98
+ margin: 20px 0;
99
+ }}
100
+ th, td {{
101
+ padding: 12px;
102
+ text-align: left;
103
+ border-bottom: 1px solid #e2e8f0;
104
+ }}
105
+ th {{
106
+ background: #f1f5f9;
107
+ font-weight: 600;
108
+ }}
109
+ .good {{ color: #22c55e; }}
110
+ .neutral {{ color: #64748b; }}
111
+ .footer {{
112
+ margin-top: 40px;
113
+ text-align: center;
114
+ color: #94a3b8;
115
+ font-size: 12px;
116
+ border-top: 1px solid #e2e8f0;
117
+ padding-top: 20px;
118
+ }}
119
+ </style>
120
+ </head>
121
+ <body>
122
+ <div class="header">
123
+ <div class="logo">OptiQ</div>
124
+ <div class="subtitle">AI-Powered Grid Optimization Report</div>
125
+ </div>
126
+
127
+ <h1>Optimization Report</h1>
128
+ <p><strong>Generated:</strong> {datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC')}</p>
129
+ <p><strong>System:</strong> {data.get('system', 'IEEE 33-bus')} |
130
+ <strong>Method:</strong> {data.get('method', 'Hybrid Quantum-AI-Classical')}</p>
131
+
132
+ <div class="highlight">
133
+ <div class="highlight-value">{impact.get('loss_reduction_pct', 0):.1f}%</div>
134
+ <div>Total Loss Reduction</div>
135
+ </div>
136
+
137
+ <h2>Summary</h2>
138
+ <div class="summary-grid">
139
+ <div class="summary-card">
140
+ <div class="summary-value">{impact.get('loss_reduction_kw', 0):.1f}</div>
141
+ <div class="summary-label">kW Saved</div>
142
+ </div>
143
+ <div class="summary-card">
144
+ <div class="summary-value">{impact.get('energy_saved_mwh_year', 0):.1f}</div>
145
+ <div class="summary-label">MWh/Year Saved</div>
146
+ </div>
147
+ <div class="summary-card">
148
+ <div class="summary-value">${impact.get('cost_saved_usd_year', 0):,.0f}</div>
149
+ <div class="summary-label">Annual Savings</div>
150
+ </div>
151
+ </div>
152
+
153
+ <h2>Before vs After</h2>
154
+ <table>
155
+ <tr>
156
+ <th>Metric</th>
157
+ <th>Before (Baseline)</th>
158
+ <th>After (Optimized)</th>
159
+ <th>Improvement</th>
160
+ </tr>
161
+ <tr>
162
+ <td>Total Losses</td>
163
+ <td>{baseline.get('total_loss_kw', 0):.2f} kW</td>
164
+ <td>{optimized.get('total_loss_kw', 0):.2f} kW</td>
165
+ <td class="good">-{impact.get('loss_reduction_kw', 0):.2f} kW</td>
166
+ </tr>
167
+ <tr>
168
+ <td>Loss Percentage</td>
169
+ <td>{baseline.get('loss_pct', 0):.2f}%</td>
170
+ <td>{optimized.get('loss_pct', 0):.2f}%</td>
171
+ <td class="good">-{baseline.get('loss_pct', 0) - optimized.get('loss_pct', 0):.2f}%</td>
172
+ </tr>
173
+ <tr>
174
+ <td>Min Voltage</td>
175
+ <td>{baseline.get('min_voltage_pu', 0):.4f} pu</td>
176
+ <td>{optimized.get('min_voltage_pu', 0):.4f} pu</td>
177
+ <td class="good">+{optimized.get('min_voltage_pu', 0) - baseline.get('min_voltage_pu', 0):.4f} pu</td>
178
+ </tr>
179
+ <tr>
180
+ <td>Voltage Violations</td>
181
+ <td>{baseline.get('voltage_violations', 0)}</td>
182
+ <td>{optimized.get('voltage_violations', 0)}</td>
183
+ <td class="good">-{impact.get('voltage_violations_fixed', 0)}</td>
184
+ </tr>
185
+ </table>
186
+
187
+ <h2>Environmental Impact (Annual)</h2>
188
+ <div class="summary-grid">
189
+ <div class="summary-card">
190
+ <div class="summary-value">{impact.get('co2_saved_tonnes_year', 0):.1f}</div>
191
+ <div class="summary-label">Tonnes CO₂ Saved</div>
192
+ </div>
193
+ <div class="summary-card">
194
+ <div class="summary-value">{impact.get('equivalent_trees_planted', 0):,}</div>
195
+ <div class="summary-label">Trees Equivalent</div>
196
+ </div>
197
+ <div class="summary-card">
198
+ <div class="summary-value">{impact.get('equivalent_cars_removed', 0)}</div>
199
+ <div class="summary-label">Cars Off Road</div>
200
+ </div>
201
+ </div>
202
+
203
+ <h2>Switch Configuration</h2>
204
+ <table>
205
+ <tr>
206
+ <th>Parameter</th>
207
+ <th>Value</th>
208
+ </tr>
209
+ <tr>
210
+ <td>Open Lines (Optimized)</td>
211
+ <td>{json.dumps(optimized.get('open_lines', []))}</td>
212
+ </tr>
213
+ <tr>
214
+ <td>Computation Time</td>
215
+ <td>{data.get('total_time_sec', 0):.2f} seconds</td>
216
+ </tr>
217
+ </table>
218
+
219
+ <div class="footer">
220
+ <p>Generated by OptiQ — Hybrid Quantum-AI-Classical Grid Optimization Platform</p>
221
+ <p>© 2024 OptiQ Team | Mohammad Emad, Ahmed Samir, Loay Medhat</p>
222
+ </div>
223
+ </body>
224
+ </html>
225
+ """
226
+ return html
227
+
228
+
229
+ class ReportRequest(BaseModel):
230
+ system: str = Field(default="case33bw")
231
+ method: str = Field(default="hybrid")
232
+ include_optimization: bool = Field(default=True)
233
+
234
+
235
+ @router.post("/report")
236
+ def generate_report(req: ReportRequest, user: FirebaseUser = Depends(optional_auth)):
237
+ """Generate an HTML optimization report.
238
+
239
+ Returns HTML that can be printed as PDF from the browser.
240
+ """
241
+ try:
242
+ net = load_network(req.system)
243
+ except ValueError as exc:
244
+ raise HTTPException(status_code=400, detail=str(exc))
245
+
246
+ baseline = get_baseline(net)
247
+ if not baseline.get("converged"):
248
+ raise HTTPException(status_code=500, detail="Baseline power flow did not converge")
249
+
250
+ data = {
251
+ "system": req.system,
252
+ "method": req.method,
253
+ "baseline": baseline,
254
+ "generated_at": datetime.utcnow().isoformat(),
255
+ }
256
+
257
+ if req.include_optimization:
258
+ use_quantum = req.method in ("quantum", "hybrid")
259
+ use_ai = req.method in ("ai", "hybrid")
260
+
261
+ # Use classical branch-exchange for faster reports
262
+ if req.method == "classical" or not use_quantum:
263
+ from src.grid.reconfiguration import branch_exchange_search
264
+ reconfig = branch_exchange_search(net, verbose=False)
265
+ if "error" not in reconfig:
266
+ optimized = evaluate_topology(net, reconfig["best_open_lines"])
267
+ if optimized.get("converged"):
268
+ data["optimized"] = optimized
269
+ data["impact"] = compute_impact(baseline, optimized)
270
+ data["total_time_sec"] = reconfig.get("time_sec", 0)
271
+ else:
272
+ result = run_hybrid_pipeline(
273
+ net,
274
+ use_quantum=use_quantum,
275
+ use_ai=use_ai,
276
+ )
277
+
278
+ if "error" not in result:
279
+ data["optimized"] = result["optimized"]
280
+ data["impact"] = result["impact"]
281
+ data["total_time_sec"] = result["timings"]["total_sec"]
282
+
283
+ # Log audit for ALL methods (not just hybrid)
284
+ if user and data.get("impact"):
285
+ try:
286
+ impact = data["impact"]
287
+ log_audit(
288
+ firebase_uid=user.uid,
289
+ action="report_generated",
290
+ system=req.system,
291
+ method=req.method,
292
+ baseline_loss_kw=baseline["total_loss_kw"],
293
+ optimized_loss_kw=data["optimized"]["total_loss_kw"],
294
+ loss_reduction_pct=impact["loss_reduction_pct"],
295
+ energy_saved_kwh_year=impact["energy_saved_mwh_year"] * 1000,
296
+ co2_saved_tonnes_year=impact["co2_saved_tonnes_year"],
297
+ cost_saved_usd_year=impact["cost_saved_usd_year"],
298
+ open_lines_after=json.dumps(data["optimized"].get("open_lines", [])),
299
+ )
300
+ except Exception:
301
+ pass
302
+
303
+ html = generate_report_html(data)
304
+
305
+ return Response(
306
+ content=html,
307
+ media_type="text/html",
308
+ headers={"Content-Disposition": "inline; filename=optiq_report.html"}
309
+ )
310
+
311
+
312
+ @router.get("/report/data")
313
+ def get_report_data(system: str = "case33bw", method: str = "hybrid", user: FirebaseUser = Depends(optional_auth)):
314
+ """Get report data as JSON (for frontend rendering)."""
315
+ try:
316
+ net = load_network(system)
317
+ except ValueError as exc:
318
+ raise HTTPException(status_code=400, detail=str(exc))
319
+
320
+ baseline = get_baseline(net)
321
+ if not baseline.get("converged"):
322
+ raise HTTPException(status_code=500, detail="Baseline power flow did not converge")
323
+
324
+ use_quantum = method in ("quantum", "hybrid")
325
+ use_ai = method in ("ai", "hybrid")
326
+
327
+ result = run_hybrid_pipeline(
328
+ net,
329
+ use_quantum=use_quantum,
330
+ use_ai=use_ai,
331
+ )
332
+
333
+ if "error" in result:
334
+ raise HTTPException(status_code=500, detail=result["error"])
335
+
336
+ return {
337
+ "system": system,
338
+ "method": method,
339
+ "baseline": baseline,
340
+ "optimized": result["optimized"],
341
+ "impact": result["impact"],
342
+ "timings": result["timings"],
343
+ "generated_at": datetime.utcnow().isoformat(),
344
+ }
api/routes/roi.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ROI Calculator endpoint — Calculate return on investment for OptiQ deployment.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ from fastapi import APIRouter, Depends
7
+ from pydantic import BaseModel, Field
8
+
9
+ from config import CFG
10
+ from api.auth import optional_auth, FirebaseUser
11
+
12
+ router = APIRouter()
13
+
14
+
15
+ # OptiQ pricing tiers (from README)
16
+ PRICING = {
17
+ "saas": {
18
+ "name": "SaaS Subscription",
19
+ "price_per_feeder_month": 200,
20
+ "price_per_feeder_year": 2400,
21
+ "description": "Pay-as-you-go pricing. Best for small deployments.",
22
+ },
23
+ "revenue_share": {
24
+ "name": "Revenue Share",
25
+ "share_pct": 15,
26
+ "description": "No upfront cost. Pay 15% of verified savings.",
27
+ },
28
+ "enterprise": {
29
+ "name": "Enterprise License",
30
+ "annual_fee": 500000,
31
+ "max_feeders": 1000,
32
+ "price_per_feeder_year": 500,
33
+ "description": "Flat annual fee for up to 1,000 feeders.",
34
+ },
35
+ }
36
+
37
+ # Impact per feeder (from README benchmarks)
38
+ PER_FEEDER_IMPACT = {
39
+ "loss_reduction_pct": 31.15,
40
+ "energy_saved_kwh_year": 553020,
41
+ "co2_saved_tonnes_year": 26.3,
42
+ "cost_saved_subsidised_year": 16591, # at $0.03/kWh
43
+ "cost_saved_real_year": 44242, # at $0.08/kWh
44
+ }
45
+
46
+
47
+ class ROIRequest(BaseModel):
48
+ number_of_feeders: int = Field(ge=1, le=100000, description="Number of feeders to deploy")
49
+ pricing_model: str = Field(default="saas", description="Pricing model: saas, revenue_share, or enterprise")
50
+ use_real_cost: bool = Field(default=True, description="Use real electricity cost (True) or subsidised (False)")
51
+
52
+
53
+ @router.post("/roi")
54
+ def calculate_roi(req: ROIRequest, user: FirebaseUser = Depends(optional_auth)):
55
+ """Calculate ROI for OptiQ deployment.
56
+
57
+ Returns annual savings, OptiQ cost, and net profit for the given number of feeders.
58
+ """
59
+ n = req.number_of_feeders
60
+ impact = PER_FEEDER_IMPACT
61
+
62
+ # Calculate savings per feeder
63
+ if req.use_real_cost:
64
+ savings_per_feeder = impact["cost_saved_real_year"]
65
+ electricity_rate = 0.08
66
+ else:
67
+ savings_per_feeder = impact["cost_saved_subsidised_year"]
68
+ electricity_rate = 0.03
69
+
70
+ total_savings = savings_per_feeder * n
71
+ total_energy_saved = impact["energy_saved_kwh_year"] * n
72
+ total_co2_saved = impact["co2_saved_tonnes_year"] * n
73
+
74
+ # Calculate OptiQ cost based on pricing model
75
+ pricing = PRICING[req.pricing_model]
76
+
77
+ if req.pricing_model == "saas":
78
+ optiq_cost = pricing["price_per_feeder_year"] * n
79
+ effective_price_per_feeder = pricing["price_per_feeder_year"]
80
+ elif req.pricing_model == "revenue_share":
81
+ optiq_cost = total_savings * (pricing["share_pct"] / 100)
82
+ effective_price_per_feeder = optiq_cost / n if n > 0 else 0
83
+ else: # enterprise
84
+ if n <= pricing["max_feeders"]:
85
+ optiq_cost = pricing["annual_fee"]
86
+ else:
87
+ # Additional feeders at $500/feeder/year
88
+ optiq_cost = pricing["annual_fee"] + (n - pricing["max_feeders"]) * pricing["price_per_feeder_year"]
89
+ effective_price_per_feeder = optiq_cost / n if n > 0 else 0
90
+
91
+ net_profit = total_savings - optiq_cost
92
+ roi_pct = (net_profit / optiq_cost * 100) if optiq_cost > 0 else float("inf")
93
+ savings_to_cost_ratio = total_savings / optiq_cost if optiq_cost > 0 else float("inf")
94
+ payback_months = (optiq_cost / (total_savings / 12)) if total_savings > 0 else float("inf")
95
+
96
+ return {
97
+ "number_of_feeders": n,
98
+ "pricing_model": req.pricing_model,
99
+ "pricing_details": pricing,
100
+ "electricity_rate_usd_kwh": electricity_rate,
101
+
102
+ "per_feeder": {
103
+ "energy_saved_kwh_year": impact["energy_saved_kwh_year"],
104
+ "co2_saved_tonnes_year": impact["co2_saved_tonnes_year"],
105
+ "cost_saved_usd_year": savings_per_feeder,
106
+ "loss_reduction_pct": impact["loss_reduction_pct"],
107
+ },
108
+
109
+ "annual_totals": {
110
+ "energy_saved_mwh": round(total_energy_saved / 1000, 2),
111
+ "energy_saved_gwh": round(total_energy_saved / 1_000_000, 4),
112
+ "co2_saved_tonnes": round(total_co2_saved, 2),
113
+ "utility_savings_usd": round(total_savings, 2),
114
+ "optiq_cost_usd": round(optiq_cost, 2),
115
+ "net_profit_usd": round(net_profit, 2),
116
+ },
117
+
118
+ "metrics": {
119
+ "roi_pct": round(roi_pct, 1) if roi_pct != float("inf") else "∞",
120
+ "savings_to_cost_ratio": round(savings_to_cost_ratio, 1) if savings_to_cost_ratio != float("inf") else "∞",
121
+ "payback_months": round(payback_months, 1) if payback_months != float("inf") else 0,
122
+ "effective_price_per_feeder_year": round(effective_price_per_feeder, 2),
123
+ },
124
+
125
+ "equivalents": {
126
+ "trees_planted": int(total_co2_saved * 1000 / 21),
127
+ "cars_removed": round(total_co2_saved / 4.6, 0),
128
+ "homes_powered": int(total_energy_saved / 10000), # Average home ~10 MWh/year
129
+ },
130
+ }
131
+
132
+
133
+ @router.get("/roi/pricing")
134
+ def get_pricing(user: FirebaseUser = Depends(optional_auth)):
135
+ """Get available pricing models."""
136
+ return {
137
+ "pricing_models": PRICING,
138
+ "per_feeder_impact": PER_FEEDER_IMPACT,
139
+ "factors": {
140
+ "egypt_emission_factor_kg_kwh": CFG.egypt.emission_factor,
141
+ "electricity_price_subsidised": CFG.egypt.electricity_price_subsidised,
142
+ "electricity_price_real": CFG.egypt.electricity_price_real,
143
+ },
144
+ }
145
+
146
+
147
+ @router.get("/roi/comparison")
148
+ def compare_pricing(number_of_feeders: int = 100, user: FirebaseUser = Depends(optional_auth)):
149
+ """Compare all pricing models for a given number of feeders."""
150
+ results = {}
151
+ for model in PRICING.keys():
152
+ req = ROIRequest(number_of_feeders=number_of_feeders, pricing_model=model)
153
+ results[model] = calculate_roi(req, user)
154
+
155
+ # Find best model
156
+ best_model = max(
157
+ results.keys(),
158
+ key=lambda m: results[m]["annual_totals"]["net_profit_usd"]
159
+ )
160
+
161
+ return {
162
+ "number_of_feeders": number_of_feeders,
163
+ "comparison": results,
164
+ "recommended": best_model,
165
+ "recommendation_reason": f"{PRICING[best_model]['name']} offers the highest net profit for {number_of_feeders} feeders",
166
+ }
api/routes/simulate.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Simulate endpoint — Manual switch control and power flow simulation.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import json
7
+ from typing import Optional
8
+ from fastapi import APIRouter, HTTPException, Depends
9
+ from pydantic import BaseModel, Field
10
+
11
+ from src.grid.loader import load_network, clone_network, get_line_info
12
+ from src.grid.power_flow import apply_topology, run_power_flow, extract_results, check_radial_connected, check_topology_valid
13
+ from src.evaluation.metrics import compute_impact
14
+ from api.auth import optional_auth, require_auth, FirebaseUser
15
+ from api.database import log_usage, log_audit
16
+
17
+ router = APIRouter()
18
+
19
+
20
+ class SimulateRequest(BaseModel):
21
+ system: str = Field(default="case33bw", description="IEEE test system name")
22
+ open_lines: list[int] = Field(description="List of line indices to set as OPEN")
23
+ load_multiplier: float = Field(default=1.0, ge=0.5, le=2.0, description="Load scaling factor")
24
+
25
+
26
+ @router.post("/simulate")
27
+ def simulate(req: SimulateRequest, user: FirebaseUser = Depends(optional_auth)):
28
+ """Simulate power flow with custom switch configuration.
29
+
30
+ Allows manual open/close of switches and recomputes losses.
31
+ """
32
+ try:
33
+ net = load_network(req.system)
34
+ except ValueError as exc:
35
+ raise HTTPException(status_code=400, detail=str(exc))
36
+
37
+ # Scale loads if multiplier != 1.0
38
+ if req.load_multiplier != 1.0:
39
+ net.load["p_mw"] *= req.load_multiplier
40
+ net.load["q_mvar"] *= req.load_multiplier
41
+
42
+ # Get baseline first
43
+ net_baseline = clone_network(net)
44
+ if not run_power_flow(net_baseline):
45
+ raise HTTPException(status_code=500, detail="Baseline power flow did not converge")
46
+ baseline = extract_results(net_baseline)
47
+
48
+ # Check connectivity (required) — radiality is a soft warning, not a hard block
49
+ if not check_topology_valid(net, req.open_lines, require_radial=False):
50
+ raise HTTPException(
51
+ status_code=400,
52
+ detail="Invalid topology: all buses must remain connected"
53
+ )
54
+
55
+ # Apply topology and run power flow
56
+ net_sim = apply_topology(net, req.open_lines)
57
+ if not run_power_flow(net_sim):
58
+ raise HTTPException(status_code=500, detail="Power flow did not converge for this configuration")
59
+
60
+ simulated = extract_results(net_sim)
61
+ simulated["open_lines"] = req.open_lines
62
+
63
+ # Compute impact
64
+ impact = compute_impact(baseline, simulated)
65
+
66
+ # Log if authenticated
67
+ if user:
68
+ try:
69
+ # Calculate annualized values
70
+ hours_per_year = 8760
71
+ saved_kw = baseline["total_loss_kw"] - simulated["total_loss_kw"]
72
+ saved_kwh = saved_kw * hours_per_year
73
+ co2_saved = saved_kwh * 0.50 # Egypt factor
74
+ money_saved = saved_kwh * 0.08
75
+
76
+ log_usage(
77
+ firebase_uid=user.uid,
78
+ system=req.system,
79
+ method="simulate",
80
+ baseline_loss_kw=baseline["total_loss_kw"],
81
+ optimized_loss_kw=simulated["total_loss_kw"],
82
+ energy_saved_kwh=saved_kwh,
83
+ co2_saved_kg=co2_saved,
84
+ money_saved_usd=money_saved,
85
+ computation_time_sec=0.0,
86
+ load_multiplier=req.load_multiplier,
87
+ switches_changed=json.dumps(req.open_lines),
88
+ )
89
+ except Exception:
90
+ pass # Don't fail request if logging fails
91
+
92
+ return {
93
+ "system": req.system,
94
+ "load_multiplier": req.load_multiplier,
95
+ "baseline": baseline,
96
+ "simulated": simulated,
97
+ "impact": impact,
98
+ "open_lines": req.open_lines,
99
+ }
100
+
101
+
102
+ class ToggleSwitchRequest(BaseModel):
103
+ system: str = Field(default="case33bw")
104
+ line_id: int = Field(description="Line index to toggle")
105
+ current_open_lines: list[int] = Field(description="Current list of open line indices")
106
+
107
+
108
+ @router.post("/simulate/toggle")
109
+ def toggle_switch(req: ToggleSwitchRequest, user: FirebaseUser = Depends(optional_auth)):
110
+ """Toggle a single switch and return the new configuration validity."""
111
+ try:
112
+ net = load_network(req.system)
113
+ except ValueError as exc:
114
+ raise HTTPException(status_code=400, detail=str(exc))
115
+
116
+ # Compute new open lines
117
+ if req.line_id in req.current_open_lines:
118
+ new_open_lines = [l for l in req.current_open_lines if l != req.line_id]
119
+ action = "closed"
120
+ else:
121
+ new_open_lines = req.current_open_lines + [req.line_id]
122
+ action = "opened"
123
+
124
+ # Check connectivity (hard requirement) — loops are allowed
125
+ is_connected = check_topology_valid(net, new_open_lines, require_radial=False)
126
+ is_distribution = "33" in req.system
127
+ is_radial = check_topology_valid(net, new_open_lines, require_radial=True) if is_connected else False
128
+
129
+ result = {
130
+ "line_id": req.line_id,
131
+ "action": action,
132
+ "new_open_lines": new_open_lines,
133
+ "is_valid": is_connected,
134
+ }
135
+
136
+ if is_connected:
137
+ # Run power flow — pandapower handles meshed grids fine
138
+ net_sim = apply_topology(net, new_open_lines)
139
+ if run_power_flow(net_sim):
140
+ simulated = extract_results(net_sim)
141
+ result["power_flow"] = simulated
142
+ if is_distribution and not is_radial:
143
+ result["warnings"] = ["Configuration has loops — distribution grids are normally operated radially."]
144
+ else:
145
+ result["is_valid"] = False
146
+ result["error"] = "Power flow did not converge"
147
+ else:
148
+ result["error"] = "Configuration disconnects the network — all buses must remain reachable"
149
+
150
+ return result
api/routes/usage.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Usage endpoint — User usage tracking and analytics.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ from fastapi import APIRouter, HTTPException, Depends
7
+
8
+ from api.auth import require_auth, FirebaseUser
9
+ from api.database import get_user_usage, get_user_stats, get_or_create_user
10
+
11
+ router = APIRouter()
12
+
13
+
14
+ @router.get("/usage")
15
+ def get_usage(limit: int = 100, user: FirebaseUser = Depends(require_auth)):
16
+ """Get usage history for the authenticated user."""
17
+ usage = get_user_usage(user.uid, limit=limit)
18
+ return {
19
+ "user_id": user.uid,
20
+ "email": user.email,
21
+ "usage": usage,
22
+ "count": len(usage),
23
+ }
24
+
25
+
26
+ @router.get("/usage/stats")
27
+ def get_usage_stats(user: FirebaseUser = Depends(require_auth)):
28
+ """Get aggregated usage statistics for the authenticated user."""
29
+ # Ensure user exists in database
30
+ get_or_create_user(user.uid, user.email, user.name)
31
+
32
+ stats = get_user_stats(user.uid)
33
+ usage = get_user_usage(user.uid, limit=1000)
34
+
35
+ # Calculate additional stats
36
+ total_runs = len(usage)
37
+ methods_used = {}
38
+ systems_used = {}
39
+
40
+ for u in usage:
41
+ method = u.get("method", "unknown")
42
+ system = u.get("system", "unknown")
43
+ methods_used[method] = methods_used.get(method, 0) + 1
44
+ systems_used[system] = systems_used.get(system, 0) + 1
45
+
46
+ return {
47
+ "user_id": user.uid,
48
+ "email": user.email,
49
+ "name": user.name,
50
+ "stats": {
51
+ **stats,
52
+ "total_runs": total_runs,
53
+ "methods_used": methods_used,
54
+ "systems_used": systems_used,
55
+ },
56
+ }
57
+
58
+
59
+ @router.get("/usage/summary")
60
+ def get_usage_summary(user: FirebaseUser = Depends(require_auth)):
61
+ """Get a dashboard-friendly summary of user's impact."""
62
+ get_or_create_user(user.uid, user.email, user.name)
63
+ stats = get_user_stats(user.uid)
64
+
65
+ # Calculate equivalents
66
+ co2_tonnes = stats["total_co2_saved_kg"] / 1000
67
+ trees_equivalent = int(stats["total_co2_saved_kg"] / 21) # ~21 kg CO2/tree/year
68
+ cars_removed = round(co2_tonnes / 4.6, 1) # ~4.6 t CO2/car/year
69
+
70
+ return {
71
+ "total_optimizations": stats["total_optimizations"],
72
+ "energy_saved": {
73
+ "kwh": round(stats["total_energy_saved_kwh"], 2),
74
+ "mwh": round(stats["total_energy_saved_kwh"] / 1000, 2),
75
+ },
76
+ "co2_saved": {
77
+ "kg": round(stats["total_co2_saved_kg"], 2),
78
+ "tonnes": round(co2_tonnes, 2),
79
+ },
80
+ "money_saved_usd": round(stats["total_money_saved_usd"], 2),
81
+ "equivalents": {
82
+ "trees_planted": trees_equivalent,
83
+ "cars_removed": cars_removed,
84
+ },
85
+ }
frontend ADDED
@@ -0,0 +1 @@
 
 
1
+ Subproject commit f7f7f06ac15e28267319cfb42628cd4d0ec30c23
nginx/Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ──────────────────────────────────────────────────
2
+ # OptiQ Nginx — Build React + serve static + proxy API
3
+ # ──────────────────────────────────────────────────
4
+
5
+ # ── Stage 1: Build the React frontend ─────────────
6
+ FROM node:20-alpine AS build
7
+ WORKDIR /app
8
+ COPY frontend/package*.json ./
9
+ RUN npm ci --no-audit --no-fund
10
+ COPY frontend/ .
11
+ RUN npm run build
12
+
13
+ # ── Stage 2: Nginx ────────────────────────────────
14
+ FROM nginx:alpine
15
+ COPY --from=build /app/dist /usr/share/nginx/html
16
+ COPY nginx/nginx.conf /etc/nginx/conf.d/default.conf
17
+ EXPOSE 80
18
+ CMD ["nginx", "-g", "daemon off;"]
nginx/nginx.conf ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ server {
2
+ listen 80;
3
+ server_name _;
4
+
5
+ # ── React SPA ─────────────────────────────────
6
+ root /usr/share/nginx/html;
7
+ index index.html;
8
+
9
+ location / {
10
+ try_files $uri $uri/ /index.html;
11
+ }
12
+
13
+ # ── FastAPI proxy ─────────────────────────────
14
+ location /api/ {
15
+ proxy_pass http://api:8000/api/;
16
+ proxy_http_version 1.1;
17
+ proxy_set_header Host $host;
18
+ proxy_set_header X-Real-IP $remote_addr;
19
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
20
+ proxy_set_header X-Forwarded-Proto $scheme;
21
+ proxy_read_timeout 120s;
22
+ proxy_send_timeout 120s;
23
+ }
24
+
25
+ # ── FastAPI docs (optional) ───────────────────
26
+ location /docs {
27
+ proxy_pass http://api:8000/docs;
28
+ proxy_set_header Host $host;
29
+ }
30
+ location /openapi.json {
31
+ proxy_pass http://api:8000/openapi.json;
32
+ proxy_set_header Host $host;
33
+ }
34
+
35
+ # ── Gzip ──────────────────────────────────────
36
+ gzip on;
37
+ gzip_types text/plain text/css application/json application/javascript text/xml application/xml;
38
+ gzip_min_length 256;
39
+ }
optiq.db ADDED
Binary file (32.8 kB). View file
 
requirements.txt CHANGED
@@ -17,6 +17,7 @@ torch-geometric>=2.7.0
17
  fastapi>=0.128.0
18
  uvicorn[standard]>=0.40.0
19
  python-multipart>=0.0.20
 
20
 
21
  # Visualization & data
22
  plotly>=6.5.0
 
17
  fastapi>=0.128.0
18
  uvicorn[standard]>=0.40.0
19
  python-multipart>=0.0.20
20
+ python-dotenv>=1.0.0
21
 
22
  # Visualization & data
23
  plotly>=6.5.0
src/grid/loader.py CHANGED
@@ -20,6 +20,8 @@ _LOADERS = {
20
 
21
  # IEEE 33-bus tie line indices (out of service in default config)
22
  IEEE33_TIE_LINES = [32, 33, 34, 35, 36]
 
 
23
 
24
 
25
  def load_network(system: Literal["case33bw", "case118"] = "case33bw") -> pp.pandapowerNet:
@@ -39,7 +41,20 @@ def load_network(system: Literal["case33bw", "case118"] = "case33bw") -> pp.pand
39
  loader = _LOADERS.get(system)
40
  if loader is None:
41
  raise ValueError(f"Unknown system '{system}'. Choose from {list(_LOADERS)}")
42
- return loader()
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
 
45
  def clone_network(net: pp.pandapowerNet) -> pp.pandapowerNet:
@@ -64,12 +79,14 @@ def get_line_info(net: pp.pandapowerNet) -> dict:
64
  all_idx = net.line.index.tolist()
65
  in_svc = net.line.index[net.line.in_service].tolist()
66
  out_svc = net.line.index[~net.line.in_service].tolist()
 
 
67
  return {
68
  "all": all_idx,
69
  "in_service": in_svc,
70
  "out_of_service": out_svc,
71
- "tie_lines": out_svc, # default out-of-service = tie lines
72
- "n_required_open": len(out_svc),
73
  }
74
 
75
 
@@ -95,7 +112,7 @@ def get_topology_data(net: pp.pandapowerNet) -> list[dict]:
95
  Each entry has: index, from_bus, to_bus, r_ohm, x_ohm, in_service, is_tie.
96
  """
97
  line_info = get_line_info(net)
98
- tie_set = set(line_info["out_of_service"])
99
  rows = []
100
  for idx, row in net.line.iterrows():
101
  rows.append({
 
20
 
21
  # IEEE 33-bus tie line indices (out of service in default config)
22
  IEEE33_TIE_LINES = [32, 33, 34, 35, 36]
23
+ # Default open lines for IEEE 33-bus in OptiQ (override to 3 OOS)
24
+ IEEE33_DEFAULT_OPEN_LINES = [32, 33, 34]
25
 
26
 
27
  def load_network(system: Literal["case33bw", "case118"] = "case33bw") -> pp.pandapowerNet:
 
41
  loader = _LOADERS.get(system)
42
  if loader is None:
43
  raise ValueError(f"Unknown system '{system}'. Choose from {list(_LOADERS)}")
44
+ net = loader()
45
+ net["optiq_system"] = system
46
+ net["optiq_is_distribution"] = system == "case33bw"
47
+
48
+ if system == "case33bw":
49
+ # Ensure only 3 lines are open by default for 33-bus
50
+ net["optiq_tie_lines"] = list(IEEE33_TIE_LINES)
51
+ net["optiq_default_open_lines"] = list(IEEE33_DEFAULT_OPEN_LINES)
52
+ net.line["in_service"] = True
53
+ for idx in IEEE33_DEFAULT_OPEN_LINES:
54
+ if idx in net.line.index:
55
+ net.line.at[idx, "in_service"] = False
56
+
57
+ return net
58
 
59
 
60
  def clone_network(net: pp.pandapowerNet) -> pp.pandapowerNet:
 
79
  all_idx = net.line.index.tolist()
80
  in_svc = net.line.index[net.line.in_service].tolist()
81
  out_svc = net.line.index[~net.line.in_service].tolist()
82
+ tie_lines = net.get("optiq_tie_lines", out_svc)
83
+ n_required_open = len(net.line) - (len(net.bus) - 1)
84
  return {
85
  "all": all_idx,
86
  "in_service": in_svc,
87
  "out_of_service": out_svc,
88
+ "tie_lines": list(tie_lines),
89
+ "n_required_open": n_required_open,
90
  }
91
 
92
 
 
112
  Each entry has: index, from_bus, to_bus, r_ohm, x_ohm, in_service, is_tie.
113
  """
114
  line_info = get_line_info(net)
115
+ tie_set = set(line_info["tie_lines"])
116
  rows = []
117
  for idx, row in net.line.iterrows():
118
  rows.append({
src/grid/power_flow.py CHANGED
@@ -97,12 +97,16 @@ def get_baseline(net: pp.pandapowerNet) -> dict:
97
  return extract_results(net_copy)
98
 
99
 
100
- def check_radial_connected(net: pp.pandapowerNet, open_lines: list[int]) -> bool:
101
- """Check if a topology is radial (tree) and fully connected.
102
 
103
  Builds a NetworkX graph from in-service lines and verifies:
104
- 1. All buses are reachable (connected)
105
- 2. The graph is a tree (no cycles, exactly N-1 edges for N nodes)
 
 
 
 
106
 
107
  Parameters
108
  ----------
@@ -110,11 +114,13 @@ def check_radial_connected(net: pp.pandapowerNet, open_lines: list[int]) -> bool
110
  The base network (not modified).
111
  open_lines : list[int]
112
  Line indices that are OUT of service.
 
 
113
 
114
  Returns
115
  -------
116
  bool
117
- ``True`` if the topology is a connected tree.
118
  """
119
  open_set = set(open_lines)
120
  G = nx.Graph()
@@ -122,7 +128,67 @@ def check_radial_connected(net: pp.pandapowerNet, open_lines: list[int]) -> bool
122
  for idx, row in net.line.iterrows():
123
  if idx not in open_set:
124
  G.add_edge(int(row["from_bus"]), int(row["to_bus"]))
125
- return nx.is_connected(G) and nx.is_tree(G)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
 
128
  def apply_topology(
@@ -154,10 +220,10 @@ def apply_topology(
154
  return net_copy
155
 
156
 
157
- def evaluate_topology(net: pp.pandapowerNet, open_lines: list[int]) -> dict:
158
  """Apply a topology and evaluate it via AC power flow.
159
 
160
- First checks radiality/connectivity, then runs AC power flow.
161
 
162
  Parameters
163
  ----------
@@ -165,6 +231,8 @@ def evaluate_topology(net: pp.pandapowerNet, open_lines: list[int]) -> dict:
165
  Base network.
166
  open_lines : list[int]
167
  Line indices to set out of service.
 
 
168
 
169
  Returns
170
  -------
@@ -172,8 +240,8 @@ def evaluate_topology(net: pp.pandapowerNet, open_lines: list[int]) -> dict:
172
  Full result dict from ``extract_results()``, plus ``open_lines``.
173
  If infeasible or power flow diverges: ``{"converged": False, ...}``.
174
  """
175
- # Feasibility check: must be a connected radial tree
176
- if not check_radial_connected(net, open_lines):
177
  return {"converged": False, "open_lines": open_lines, "reason": "not_radial_connected"}
178
 
179
  net_new = apply_topology(net, open_lines)
 
97
  return extract_results(net_copy)
98
 
99
 
100
+ def check_topology_valid(net: pp.pandapowerNet, open_lines: list[int], require_radial: bool = True) -> bool:
101
+ """Check if a topology is valid (connected, and optionally radial).
102
 
103
  Builds a NetworkX graph from in-service lines and verifies:
104
+ 1. All buses are reachable (connected graph)
105
+ 2. Optionally: the graph is a tree (radial no cycles)
106
+
107
+ For distribution networks (e.g. IEEE 33-bus) ``require_radial=True``
108
+ enforces a tree topology. For meshed transmission networks (e.g. IEEE
109
+ 118-bus) ``require_radial=False`` only checks connectivity.
110
 
111
  Parameters
112
  ----------
 
114
  The base network (not modified).
115
  open_lines : list[int]
116
  Line indices that are OUT of service.
117
+ require_radial : bool
118
+ If True, also verify the network is a tree (no loops). Default True.
119
 
120
  Returns
121
  -------
122
  bool
123
+ ``True`` if the topology passes all checks.
124
  """
125
  open_set = set(open_lines)
126
  G = nx.Graph()
 
128
  for idx, row in net.line.iterrows():
129
  if idx not in open_set:
130
  G.add_edge(int(row["from_bus"]), int(row["to_bus"]))
131
+ # Include transformers — they connect buses just like lines
132
+ if hasattr(net, "trafo") and len(net.trafo) > 0:
133
+ for _, row in net.trafo.iterrows():
134
+ G.add_edge(int(row["hv_bus"]), int(row["lv_bus"]))
135
+ if not nx.is_connected(G):
136
+ return False
137
+ if require_radial and not nx.is_tree(G):
138
+ return False
139
+ return True
140
+
141
+
142
+ def check_radial_connected(net: pp.pandapowerNet, open_lines: list[int]) -> bool:
143
+ """Legacy wrapper — checks connected tree (radial) topology."""
144
+ return check_topology_valid(net, open_lines, require_radial=True)
145
+
146
+
147
+ def is_distribution_grid(net: pp.pandapowerNet) -> bool:
148
+ """Auto-detect whether a network is distribution (radial) or transmission (meshed).
149
+
150
+ Distribution grids have exactly N-1 in-service lines for N buses (a tree).
151
+ Transmission grids have more edges (meshed with loops).
152
+ """
153
+ if "optiq_is_distribution" in net:
154
+ return bool(net["optiq_is_distribution"])
155
+ n_buses = len(net.bus)
156
+ n_in_service = int(net.line.in_service.sum())
157
+ return n_in_service == n_buses - 1
158
+
159
+
160
+ def try_repair_connectivity(
161
+ net: pp.pandapowerNet,
162
+ open_lines: list[int],
163
+ ) -> tuple[list[int] | None, list[int]]:
164
+ """Try to restore connectivity by closing lines from the OOS list.
165
+
166
+ Prioritises closing *default* tie-lines (the lines that are already
167
+ out-of-service when the network is first loaded) because those are
168
+ the natural candidates for toggling in a reconfiguration.
169
+
170
+ Returns
171
+ -------
172
+ (repaired_oos, auto_closed) if a connected configuration was found.
173
+ (None, []) if repair is impossible.
174
+ """
175
+ default_oos = set(net.line.index[~net.line.in_service].tolist())
176
+
177
+ # Sort: try closing default (tie) lines first, then user-added feeders
178
+ candidates = sorted(open_lines, key=lambda l: (l not in default_oos, l))
179
+
180
+ repaired = list(open_lines)
181
+ auto_closed: list[int] = []
182
+
183
+ for line_to_close in candidates:
184
+ if check_topology_valid(net, repaired, require_radial=False):
185
+ break
186
+ repaired.remove(line_to_close)
187
+ auto_closed.append(line_to_close)
188
+
189
+ if check_topology_valid(net, repaired, require_radial=False):
190
+ return repaired, auto_closed
191
+ return None, []
192
 
193
 
194
  def apply_topology(
 
220
  return net_copy
221
 
222
 
223
+ def evaluate_topology(net: pp.pandapowerNet, open_lines: list[int], require_radial: bool = True) -> dict:
224
  """Apply a topology and evaluate it via AC power flow.
225
 
226
+ First checks connectivity (and optionally radiality), then runs AC power flow.
227
 
228
  Parameters
229
  ----------
 
231
  Base network.
232
  open_lines : list[int]
233
  Line indices to set out of service.
234
+ require_radial : bool
235
+ If True, enforce tree topology (for distribution grids). Default True.
236
 
237
  Returns
238
  -------
 
240
  Full result dict from ``extract_results()``, plus ``open_lines``.
241
  If infeasible or power flow diverges: ``{"converged": False, ...}``.
242
  """
243
+ # Feasibility check
244
+ if not check_topology_valid(net, open_lines, require_radial=require_radial):
245
  return {"converged": False, "open_lines": open_lines, "reason": "not_radial_connected"}
246
 
247
  net_new = apply_topology(net, open_lines)
src/grid/reconfiguration.py CHANGED
@@ -23,6 +23,7 @@ def branch_exchange_search(
23
  net: pp.pandapowerNet,
24
  max_iterations: int = 100,
25
  verbose: bool = False,
 
26
  ) -> dict:
27
  """Heuristic branch-exchange search for optimal reconfiguration.
28
 
@@ -31,6 +32,12 @@ def branch_exchange_search(
31
  if total losses decrease. Terminates when no improving swap exists
32
  or max iterations reached.
33
 
 
 
 
 
 
 
34
  Returns
35
  -------
36
  dict with keys:
@@ -38,7 +45,12 @@ def branch_exchange_search(
38
  """
39
  start = time.perf_counter()
40
  line_info = get_line_info(net)
41
- current_open = list(line_info["out_of_service"])
 
 
 
 
 
42
  n_open = len(current_open)
43
 
44
  if n_open == 0:
 
23
  net: pp.pandapowerNet,
24
  max_iterations: int = 100,
25
  verbose: bool = False,
26
+ initial_open_lines: list[int] | None = None,
27
  ) -> dict:
28
  """Heuristic branch-exchange search for optimal reconfiguration.
29
 
 
32
  if total losses decrease. Terminates when no improving swap exists
33
  or max iterations reached.
34
 
35
+ Parameters
36
+ ----------
37
+ initial_open_lines : list[int] | None
38
+ If provided, use these as the starting open-line configuration
39
+ instead of the network’s default out-of-service lines.
40
+
41
  Returns
42
  -------
43
  dict with keys:
 
45
  """
46
  start = time.perf_counter()
47
  line_info = get_line_info(net)
48
+
49
+ # Use user-provided starting config or fall back to default
50
+ current_open = (
51
+ list(initial_open_lines) if initial_open_lines is not None
52
+ else list(line_info["out_of_service"])
53
+ )
54
  n_open = len(current_open)
55
 
56
  if n_open == 0:
src/hybrid/pipeline.py CHANGED
@@ -37,6 +37,7 @@ def run_hybrid_pipeline(
37
  quantum_restarts: int = 5,
38
  quantum_top_k: int = 5,
39
  ai_checkpoint: str | None = None,
 
40
  verbose: bool = False,
41
  ) -> dict:
42
  """Execute the full hybrid Quantum-AI-Classical pipeline.
@@ -86,18 +87,31 @@ def run_hybrid_pipeline(
86
  n_iter=quantum_iters,
87
  n_restarts=quantum_restarts,
88
  top_k=quantum_top_k,
 
89
  )
90
  timings["quantum_sec"] = round(time.perf_counter() - t1, 4)
91
 
92
- if verbose:
93
- print(f"[Hybrid] Quantum: {len(quantum_result['candidates'])} candidates "
94
- f"in {timings['quantum_sec']}s")
95
-
96
- candidates = [c["open_lines"] for c in quantum_result["candidates"]]
 
 
 
 
 
 
 
 
 
97
  else:
98
- # Default topology only
99
- line_info = get_line_info(net)
100
- candidates = [line_info["out_of_service"]]
 
 
 
101
  timings["quantum_sec"] = 0.0
102
 
103
  # --- Step 2 + 3: AI Prediction + Classical Verification ---
@@ -108,6 +122,10 @@ def run_hybrid_pipeline(
108
  result = None
109
  used_ai = False
110
 
 
 
 
 
111
  if use_ai:
112
  try:
113
  result = ai_warm_start_power_flow(net, open_lines, ai_checkpoint)
@@ -118,7 +136,7 @@ def run_hybrid_pipeline(
118
 
119
  # Fallback to classical evaluation if AI failed or wasn't used
120
  if result is None or not result.get("converged"):
121
- result = evaluate_topology(net, open_lines)
122
 
123
  if result.get("converged"):
124
  result["used_ai_warmstart"] = used_ai
 
37
  quantum_restarts: int = 5,
38
  quantum_top_k: int = 5,
39
  ai_checkpoint: str | None = None,
40
+ initial_open_lines: list[int] | None = None,
41
  verbose: bool = False,
42
  ) -> dict:
43
  """Execute the full hybrid Quantum-AI-Classical pipeline.
 
87
  n_iter=quantum_iters,
88
  n_restarts=quantum_restarts,
89
  top_k=quantum_top_k,
90
+ initial_open_lines=initial_open_lines,
91
  )
92
  timings["quantum_sec"] = round(time.perf_counter() - t1, 4)
93
 
94
+ if "error" in quantum_result:
95
+ # SA failed fall back to initial or default topology
96
+ if initial_open_lines is not None:
97
+ candidates = [initial_open_lines]
98
+ else:
99
+ line_info = get_line_info(net)
100
+ candidates = [line_info["out_of_service"]]
101
+ if verbose:
102
+ print(f"[Hybrid] Quantum SA error: {quantum_result['error']}. Using fallback topology.")
103
+ else:
104
+ if verbose:
105
+ print(f"[Hybrid] Quantum: {len(quantum_result['candidates'])} candidates "
106
+ f"in {timings['quantum_sec']}s")
107
+ candidates = [c["open_lines"] for c in quantum_result["candidates"]]
108
  else:
109
+ # Default or user-provided topology only
110
+ if initial_open_lines is not None:
111
+ candidates = [initial_open_lines]
112
+ else:
113
+ line_info = get_line_info(net)
114
+ candidates = [line_info["out_of_service"]]
115
  timings["quantum_sec"] = 0.0
116
 
117
  # --- Step 2 + 3: AI Prediction + Classical Verification ---
 
122
  result = None
123
  used_ai = False
124
 
125
+ # Auto-detect radiality requirement
126
+ from src.grid.power_flow import is_distribution_grid
127
+ require_radial = is_distribution_grid(net)
128
+
129
  if use_ai:
130
  try:
131
  result = ai_warm_start_power_flow(net, open_lines, ai_checkpoint)
 
136
 
137
  # Fallback to classical evaluation if AI failed or wasn't used
138
  if result is None or not result.get("converged"):
139
+ result = evaluate_topology(net, open_lines, require_radial=require_radial)
140
 
141
  if result.get("converged"):
142
  result["used_ai_warmstart"] = used_ai
src/quantum/qaoa_reconfig.py CHANGED
@@ -32,14 +32,18 @@ def solve_sa(
32
  T0: float = 50.0,
33
  cooling: float = 0.99,
34
  top_k: int | None = None,
 
35
  ) -> dict:
36
  """Solve reconfiguration using SA that directly minimises AC losses.
37
 
38
  At each step, proposes a swap (close one tie line, open one feeder line),
39
  checks radiality, runs AC power flow, and accepts/rejects by Metropolis.
40
 
41
- This is significantly faster than QUBO-based SA because it only evaluates
42
- feasible topologies and uses the true AC loss (not an approximation).
 
 
 
43
  """
44
  cfg = CFG.quantum
45
  top_k = top_k or cfg.top_k
@@ -47,28 +51,58 @@ def solve_sa(
47
  start = time.perf_counter()
48
  line_info = get_line_info(net)
49
  all_lines = line_info["all"]
50
- current_open = list(line_info["out_of_service"])
51
- n_open = len(current_open)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
  # Evaluate initial topology
54
- init_result = evaluate_topology(net, current_open)
55
  if not init_result.get("converged"):
56
- return {"error": "Default topology does not converge."}
 
 
 
57
 
58
  all_solutions = {} # (tuple of sorted open_lines) -> loss_kw
59
- best_global_open = list(current_open)
60
  best_global_loss = init_result["total_loss_kw"]
61
- all_solutions[tuple(sorted(current_open))] = best_global_loss
62
 
63
  for restart in range(n_restarts):
64
  rng = np.random.RandomState(restart * 13 + 42)
65
- current = list(line_info["out_of_service"]) # reset to default
66
  current_loss = init_result["total_loss_kw"]
67
  T = T0
68
 
69
  for it in range(n_iter):
70
  # Propose swap: close one open line, open one closed line
71
  closed_lines = [l for l in all_lines if l not in current]
 
 
72
  i = rng.choice(len(current)) # index into current open
73
  j = rng.choice(len(closed_lines)) # index into closed
74
 
@@ -81,16 +115,25 @@ def solve_sa(
81
  if key in all_solutions:
82
  cand_loss = all_solutions[key]
83
  else:
84
- # Check radiality
85
- if not check_radial_connected(net, candidate_sorted):
86
- T *= cooling
87
- continue
 
 
 
 
 
 
88
 
89
  # Run AC power flow
90
- res = evaluate_topology(net, candidate_sorted)
91
  if not res.get("converged"):
92
- T *= cooling
93
- continue
 
 
 
94
  cand_loss = res["total_loss_kw"]
95
  all_solutions[key] = cand_loss
96
 
 
32
  T0: float = 50.0,
33
  cooling: float = 0.99,
34
  top_k: int | None = None,
35
+ initial_open_lines: list[int] | None = None,
36
  ) -> dict:
37
  """Solve reconfiguration using SA that directly minimises AC losses.
38
 
39
  At each step, proposes a swap (close one tie line, open one feeder line),
40
  checks radiality, runs AC power flow, and accepts/rejects by Metropolis.
41
 
42
+ Parameters
43
+ ----------
44
+ initial_open_lines : list[int] | None
45
+ If provided, use these as the starting open-line configuration
46
+ instead of the network’s default out-of-service lines.
47
  """
48
  cfg = CFG.quantum
49
  top_k = top_k or cfg.top_k
 
51
  start = time.perf_counter()
52
  line_info = get_line_info(net)
53
  all_lines = line_info["all"]
54
+
55
+ # Use user-provided starting config or fall back to default
56
+ starting_open = (
57
+ list(initial_open_lines) if initial_open_lines is not None
58
+ else list(line_info["out_of_service"])
59
+ )
60
+ n_open = len(starting_open)
61
+
62
+ # Auto-detect: distribution (radial) vs transmission (meshed)
63
+ from src.grid.power_flow import is_distribution_grid
64
+ require_radial = is_distribution_grid(net)
65
+
66
+ if n_open == 0:
67
+ # No open lines → evaluate the default (all-closed) topology
68
+ result = evaluate_topology(net, [], require_radial=False)
69
+ if result.get("converged"):
70
+ return {
71
+ "best_open_lines": [],
72
+ "best_loss_kw": round(result["total_loss_kw"], 2),
73
+ "candidates": [{"open_lines": [], "loss_kw": round(result["total_loss_kw"], 2), "feasible": True}],
74
+ "n_evaluated": 1,
75
+ "time_sec": 0.0,
76
+ "method": "SA_Physics",
77
+ "n_restarts": 0,
78
+ "n_iter": 0,
79
+ }
80
+ return {"error": "No open lines to optimise and default topology does not converge."}
81
 
82
  # Evaluate initial topology
83
+ init_result = evaluate_topology(net, starting_open, require_radial=require_radial)
84
  if not init_result.get("converged"):
85
+ # Try without radiality constraint
86
+ init_result = evaluate_topology(net, starting_open, require_radial=False)
87
+ if not init_result.get("converged"):
88
+ return {"error": "Starting topology does not converge."}
89
 
90
  all_solutions = {} # (tuple of sorted open_lines) -> loss_kw
91
+ best_global_open = list(starting_open)
92
  best_global_loss = init_result["total_loss_kw"]
93
+ all_solutions[tuple(sorted(starting_open))] = best_global_loss
94
 
95
  for restart in range(n_restarts):
96
  rng = np.random.RandomState(restart * 13 + 42)
97
+ current = list(starting_open) # reset to user/default start
98
  current_loss = init_result["total_loss_kw"]
99
  T = T0
100
 
101
  for it in range(n_iter):
102
  # Propose swap: close one open line, open one closed line
103
  closed_lines = [l for l in all_lines if l not in current]
104
+ if len(current) == 0 or len(closed_lines) == 0:
105
+ break
106
  i = rng.choice(len(current)) # index into current open
107
  j = rng.choice(len(closed_lines)) # index into closed
108
 
 
115
  if key in all_solutions:
116
  cand_loss = all_solutions[key]
117
  else:
118
+ # Check topology validity (radial for distribution, connected for transmission)
119
+ if require_radial:
120
+ if not check_radial_connected(net, candidate_sorted):
121
+ T *= cooling
122
+ continue
123
+ else:
124
+ from src.grid.power_flow import check_topology_valid
125
+ if not check_topology_valid(net, candidate_sorted, require_radial=False):
126
+ T *= cooling
127
+ continue
128
 
129
  # Run AC power flow
130
+ res = evaluate_topology(net, candidate_sorted, require_radial=require_radial)
131
  if not res.get("converged"):
132
+ # Retry without radiality
133
+ res = evaluate_topology(net, candidate_sorted, require_radial=False)
134
+ if not res.get("converged"):
135
+ T *= cooling
136
+ continue
137
  cand_loss = res["total_loss_kw"]
138
  all_solutions[key] = cand_loss
139