Arpit-Bansal commited on
Commit
ac0363b
·
1 Parent(s): 0f86bb9

nsga-2 updation and block production issue resolution

Browse files
greedyOptim/hybrid_optimizers.py CHANGED
@@ -21,6 +21,8 @@ class MultiObjectiveOptimizer:
21
  self.evaluator = evaluator
22
  self.config = config or OptimizationConfig()
23
  self.n_genes = evaluator.num_trainsets
 
 
24
 
25
  def dominates(self, solution1: Dict[str, float], solution2: Dict[str, float]) -> bool:
26
  """Check if solution1 dominates solution2 in multi-objective sense."""
@@ -102,23 +104,59 @@ class MultiObjectiveOptimizer:
102
 
103
  return distances
104
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  def optimize(self) -> OptimizationResult:
106
  """Run NSGA-II multi-objective optimization."""
107
- # Initialize population
108
  population = []
 
109
  for _ in range(self.config.population_size):
110
  solution = np.random.randint(0, 3, self.n_genes)
111
  population.append(solution)
 
 
 
112
 
113
  best_solutions = []
 
114
 
115
  print(f"Starting NSGA-II multi-objective optimization for {self.config.generations} generations")
 
 
116
 
117
  for gen in range(self.config.generations):
118
  try:
119
  # Evaluate objectives for all solutions
120
  objectives = []
121
- for solution in population:
122
  obj = self.evaluator.calculate_objectives(solution)
123
  objectives.append(obj)
124
 
@@ -127,9 +165,12 @@ class MultiObjectiveOptimizer:
127
 
128
  # Selection for next generation
129
  new_population = []
 
130
  for front in fronts:
131
  if len(new_population) + len(front) <= self.config.population_size:
132
  new_population.extend([population[i] for i in front])
 
 
133
  else:
134
  # Use crowding distance to select from this front
135
  distances = self.crowding_distance(front, objectives)
@@ -137,17 +178,30 @@ class MultiObjectiveOptimizer:
137
  key=lambda x: x[1], reverse=True)
138
  remaining = self.config.population_size - len(new_population)
139
  new_population.extend([population[i] for i, _ in sorted_front[:remaining]])
 
 
140
  break
141
 
142
  # Store best solutions from first front
143
  if fronts and len(fronts[0]) > 0:
144
- best_solutions = [(population[i], objectives[i]) for i in fronts[0]]
 
 
145
 
146
  # Generate offspring through crossover and mutation
147
  offspring = []
 
 
 
 
 
 
 
148
  while len(offspring) < self.config.population_size:
149
- parent1 = random.choice(new_population)
150
- parent2 = random.choice(new_population)
 
 
151
 
152
  # Simple crossover
153
  if random.random() < self.config.crossover_rate:
@@ -162,8 +216,31 @@ class MultiObjectiveOptimizer:
162
  child[i] = random.randint(0, 2)
163
 
164
  offspring.append(child)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165
 
166
  population = offspring
 
 
167
 
168
  if gen % 50 == 0:
169
  print(f"Generation {gen}: {len(fronts)} fronts, best front size: {len(fronts[0]) if fronts else 0}")
@@ -173,18 +250,27 @@ class MultiObjectiveOptimizer:
173
  break
174
 
175
  # Select best solution from Pareto front
 
176
  if best_solutions:
177
  # Choose solution with best overall fitness
178
- best_solution, best_objectives = min(best_solutions,
179
- key=lambda x: self.evaluator.fitness_function(x[0]))
 
 
 
 
 
180
  else:
181
  # Fallback to first solution
182
  best_solution = population[0]
183
  best_objectives = self.evaluator.calculate_objectives(best_solution)
 
 
184
 
185
- return self._build_result(best_solution, best_objectives)
186
 
187
- def _build_result(self, solution: np.ndarray, objectives: Dict[str, float]) -> OptimizationResult:
 
188
  """Build optimization result."""
189
  fitness = self.evaluator.fitness_function(solution)
190
 
@@ -197,16 +283,29 @@ class MultiObjectiveOptimizer:
197
  valid, reason = self.evaluator.check_hard_constraints(ts_id)
198
  explanations[ts_id] = "✓ Fit for service" if valid else f"⚠ {reason}"
199
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  return OptimizationResult(
201
  selected_trainsets=service,
202
  standby_trainsets=standby,
203
  maintenance_trainsets=maintenance,
204
  objectives=objectives,
205
  fitness_score=fitness,
206
- explanation=explanations
 
207
  )
208
 
209
-
210
  class AdaptiveOptimizer:
211
  """Adaptive optimizer that switches between algorithms based on performance."""
212
 
 
21
  self.evaluator = evaluator
22
  self.config = config or OptimizationConfig()
23
  self.n_genes = evaluator.num_trainsets
24
+ self.n_blocks = evaluator.num_blocks
25
+ self.optimize_blocks = self.config.optimize_block_assignment
26
 
27
  def dominates(self, solution1: Dict[str, float], solution2: Dict[str, float]) -> bool:
28
  """Check if solution1 dominates solution2 in multi-objective sense."""
 
104
 
105
  return distances
106
 
107
+ def _create_block_assignment(self, trainset_sol: np.ndarray) -> np.ndarray:
108
+ """Create block assignments for a trainset solution."""
109
+ service_indices = np.where(trainset_sol == 0)[0]
110
+
111
+ if len(service_indices) == 0:
112
+ return np.full(self.n_blocks, -1, dtype=int)
113
+
114
+ # Distribute blocks evenly across service trains
115
+ block_sol = np.zeros(self.n_blocks, dtype=int)
116
+ for i in range(self.n_blocks):
117
+ block_sol[i] = service_indices[i % len(service_indices)]
118
+
119
+ return block_sol
120
+
121
+ def _mutate_block_assignment(self, block_sol: np.ndarray, service_indices: np.ndarray) -> np.ndarray:
122
+ """Mutate block assignment."""
123
+ mutated = block_sol.copy()
124
+
125
+ if len(service_indices) == 0:
126
+ return mutated
127
+
128
+ # Randomly reassign some blocks
129
+ num_mutations = max(1, self.n_blocks // 10)
130
+ for _ in range(num_mutations):
131
+ idx = np.random.randint(0, len(mutated))
132
+ mutated[idx] = np.random.choice(service_indices)
133
+
134
+ return mutated
135
+
136
  def optimize(self) -> OptimizationResult:
137
  """Run NSGA-II multi-objective optimization."""
138
+ # Initialize population with trainset solutions and block assignments
139
  population = []
140
+ block_population = []
141
  for _ in range(self.config.population_size):
142
  solution = np.random.randint(0, 3, self.n_genes)
143
  population.append(solution)
144
+ if self.optimize_blocks:
145
+ block_sol = self._create_block_assignment(solution)
146
+ block_population.append(block_sol)
147
 
148
  best_solutions = []
149
+ best_block_solutions = []
150
 
151
  print(f"Starting NSGA-II multi-objective optimization for {self.config.generations} generations")
152
+ if self.optimize_blocks:
153
+ print(f"Optimizing block assignments for {self.n_blocks} service blocks")
154
 
155
  for gen in range(self.config.generations):
156
  try:
157
  # Evaluate objectives for all solutions
158
  objectives = []
159
+ for idx, solution in enumerate(population):
160
  obj = self.evaluator.calculate_objectives(solution)
161
  objectives.append(obj)
162
 
 
165
 
166
  # Selection for next generation
167
  new_population = []
168
+ new_block_population = [] if self.optimize_blocks else None
169
  for front in fronts:
170
  if len(new_population) + len(front) <= self.config.population_size:
171
  new_population.extend([population[i] for i in front])
172
+ if self.optimize_blocks:
173
+ new_block_population.extend([block_population[i] for i in front])
174
  else:
175
  # Use crowding distance to select from this front
176
  distances = self.crowding_distance(front, objectives)
 
178
  key=lambda x: x[1], reverse=True)
179
  remaining = self.config.population_size - len(new_population)
180
  new_population.extend([population[i] for i, _ in sorted_front[:remaining]])
181
+ if self.optimize_blocks:
182
+ new_block_population.extend([block_population[i] for i, _ in sorted_front[:remaining]])
183
  break
184
 
185
  # Store best solutions from first front
186
  if fronts and len(fronts[0]) > 0:
187
+ best_solutions = [(population[i].copy(), objectives[i].copy()) for i in fronts[0]]
188
+ if self.optimize_blocks:
189
+ best_block_solutions = [block_population[i].copy() for i in fronts[0]]
190
 
191
  # Generate offspring through crossover and mutation
192
  offspring = []
193
+ offspring_blocks = [] if self.optimize_blocks else None
194
+
195
+ # Ensure block population is synchronized
196
+ if self.optimize_blocks and len(new_block_population) != len(new_population):
197
+ # Rebuild block population if out of sync
198
+ new_block_population = [self._create_block_assignment(sol) for sol in new_population]
199
+
200
  while len(offspring) < self.config.population_size:
201
+ idx1 = random.randint(0, len(new_population) - 1)
202
+ idx2 = random.randint(0, len(new_population) - 1)
203
+ parent1 = new_population[idx1]
204
+ parent2 = new_population[idx2]
205
 
206
  # Simple crossover
207
  if random.random() < self.config.crossover_rate:
 
216
  child[i] = random.randint(0, 2)
217
 
218
  offspring.append(child)
219
+
220
+ # Handle block crossover and mutation
221
+ if self.optimize_blocks:
222
+ block_parent1 = new_block_population[idx1]
223
+ block_parent2 = new_block_population[idx2]
224
+
225
+ # Block crossover
226
+ if random.random() < self.config.crossover_rate:
227
+ block_point = random.randint(1, self.n_blocks - 1)
228
+ block_child = np.concatenate([block_parent1[:block_point], block_parent2[block_point:]])
229
+ else:
230
+ block_child = block_parent1.copy()
231
+
232
+ # Ensure valid block assignments for new child's service trains
233
+ service_indices = np.where(child == 0)[0]
234
+ if len(service_indices) > 0:
235
+ block_child = self._mutate_block_assignment(block_child, service_indices)
236
+ else:
237
+ block_child = np.full(self.n_blocks, -1, dtype=int)
238
+
239
+ offspring_blocks.append(block_child)
240
 
241
  population = offspring
242
+ if self.optimize_blocks:
243
+ block_population = offspring_blocks
244
 
245
  if gen % 50 == 0:
246
  print(f"Generation {gen}: {len(fronts)} fronts, best front size: {len(fronts[0]) if fronts else 0}")
 
250
  break
251
 
252
  # Select best solution from Pareto front
253
+ best_block_sol = None
254
  if best_solutions:
255
  # Choose solution with best overall fitness
256
+ best_idx = min(range(len(best_solutions)),
257
+ key=lambda i: self.evaluator.fitness_function(best_solutions[i][0]))
258
+ best_solution, best_objectives = best_solutions[best_idx]
259
+ if self.optimize_blocks:
260
+ # Always create fresh block assignment for the best solution
261
+ # to ensure all 106 blocks are properly assigned
262
+ best_block_sol = self._create_block_assignment(best_solution)
263
  else:
264
  # Fallback to first solution
265
  best_solution = population[0]
266
  best_objectives = self.evaluator.calculate_objectives(best_solution)
267
+ if self.optimize_blocks:
268
+ best_block_sol = self._create_block_assignment(best_solution)
269
 
270
+ return self._build_result(best_solution, best_objectives, best_block_sol)
271
 
272
+ def _build_result(self, solution: np.ndarray, objectives: Dict[str, float],
273
+ block_solution: Optional[np.ndarray] = None) -> OptimizationResult:
274
  """Build optimization result."""
275
  fitness = self.evaluator.fitness_function(solution)
276
 
 
283
  valid, reason = self.evaluator.check_hard_constraints(ts_id)
284
  explanations[ts_id] = "✓ Fit for service" if valid else f"⚠ {reason}"
285
 
286
+ # Build block assignments
287
+ block_assignments = {}
288
+ if block_solution is not None and self.optimize_blocks:
289
+ for ts_id in service:
290
+ block_assignments[ts_id] = []
291
+
292
+ for block_idx, train_idx in enumerate(block_solution):
293
+ if 0 <= train_idx < len(self.evaluator.trainsets):
294
+ ts_id = self.evaluator.trainsets[int(train_idx)]
295
+ if ts_id in block_assignments:
296
+ block_id = self.evaluator.all_blocks[block_idx]['block_id']
297
+ block_assignments[ts_id].append(block_id)
298
+
299
  return OptimizationResult(
300
  selected_trainsets=service,
301
  standby_trainsets=standby,
302
  maintenance_trainsets=maintenance,
303
  objectives=objectives,
304
  fitness_score=fitness,
305
+ explanation=explanations,
306
+ service_block_assignments=block_assignments
307
  )
308
 
 
309
  class AdaptiveOptimizer:
310
  """Adaptive optimizer that switches between algorithms based on performance."""
311
 
test_block_optimization.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """Test script to verify block optimization is working properly."""
3
+ import json
4
+ import sys
5
+ sys.path.insert(0, '.')
6
+
7
+ from DataService.enhanced_generator import EnhancedMetroDataGenerator
8
+ from greedyOptim.scheduler import TrainsetSchedulingOptimizer
9
+ from greedyOptim.schedule_generator import ScheduleGenerator
10
+ from greedyOptim.models import OptimizationConfig
11
+
12
+ def test_block_optimization():
13
+ """Test that optimizers are actually producing block assignments."""
14
+
15
+ # Generate test data
16
+ generator = EnhancedMetroDataGenerator()
17
+ data = generator.generate_complete_enhanced_dataset()
18
+
19
+ # Configure optimizer with block optimization enabled
20
+ config = OptimizationConfig(
21
+ required_service_trains=6,
22
+ min_standby=2,
23
+ optimize_block_assignment=True,
24
+ iterations=5 # Fewer iterations for quick test
25
+ )
26
+
27
+ print("=" * 60)
28
+ print("TESTING BLOCK OPTIMIZATION")
29
+ print("=" * 60)
30
+
31
+ optimizer = TrainsetSchedulingOptimizer(data, config)
32
+
33
+ methods_to_test = ['ga', 'cmaes', 'pso', 'sa', 'nsga2']
34
+
35
+ results = {}
36
+ for method in methods_to_test:
37
+ print(f"\n{'='*60}")
38
+ print(f"Testing {method.upper()}")
39
+ print("=" * 60)
40
+
41
+ try:
42
+ result = optimizer.optimize(method=method)
43
+
44
+ # Check for block assignments
45
+ has_blocks = bool(result.service_block_assignments)
46
+ num_assigned = sum(len(blocks) for blocks in result.service_block_assignments.values()) if has_blocks else 0
47
+
48
+ print(f"\n{method.upper()} Results:")
49
+ print(f" - Selected trainsets: {len(result.selected_trainsets)}")
50
+ print(f" - Has block assignments: {has_blocks}")
51
+ print(f" - Total blocks assigned: {num_assigned}")
52
+ print(f" - Fitness score: {result.fitness_score:.2f}")
53
+
54
+ if has_blocks:
55
+ print(f" - Block assignments per trainset:")
56
+ for ts_id, blocks in result.service_block_assignments.items():
57
+ print(f" {ts_id}: {len(blocks)} blocks")
58
+
59
+ # Generate schedule using the result
60
+ schedule_gen = ScheduleGenerator(data, config)
61
+ schedule = schedule_gen.generate_schedule(result, method=method, runtime_ms=100)
62
+
63
+ print(f"\n Generated Schedule:")
64
+ print(f" - Schedule ID: {schedule.schedule_id}")
65
+ print(f" - Trainsets in schedule: {len(schedule.trainsets)}")
66
+
67
+ # Check service trainsets have blocks
68
+ for trainset in schedule.trainsets:
69
+ if trainset.status.value == "REVENUE_SERVICE":
70
+ block_count = len(trainset.service_blocks) if trainset.service_blocks else 0
71
+ total_km = trainset.daily_km_allocation
72
+ print(f" - {trainset.trainset_id}: {block_count} blocks, {total_km} km")
73
+
74
+ results[method] = {
75
+ 'success': True,
76
+ 'has_blocks': has_blocks,
77
+ 'num_blocks': num_assigned,
78
+ 'fitness': result.fitness_score
79
+ }
80
+
81
+ except Exception as e:
82
+ print(f"ERROR with {method}: {e}")
83
+ import traceback
84
+ traceback.print_exc()
85
+ results[method] = {'success': False, 'error': str(e)}
86
+
87
+ # Summary
88
+ print("\n" + "=" * 60)
89
+ print("SUMMARY")
90
+ print("=" * 60)
91
+
92
+ all_passed = True
93
+ for method, res in results.items():
94
+ if res['success']:
95
+ status = "✓ PASS" if res['has_blocks'] else "⚠ NO BLOCKS"
96
+ print(f"{method.upper()}: {status} (blocks: {res['num_blocks']}, fitness: {res['fitness']:.2f})")
97
+ if not res['has_blocks']:
98
+ all_passed = False
99
+ else:
100
+ print(f"{method.upper()}: ✗ FAIL ({res['error']})")
101
+ all_passed = False
102
+
103
+ if all_passed:
104
+ print("\n✓ All optimizers producing block assignments correctly!")
105
+ else:
106
+ print("\n⚠ Some issues detected - check above for details")
107
+
108
+ return all_passed
109
+
110
+ if __name__ == "__main__":
111
+ test_block_optimization()