Commit ·
1b2c6dc
1
Parent(s): 026e398
schedule generation endpoint
Browse files- api/greedyoptim_api.py +222 -2
- greedyOptim/__init__.py +18 -2
- greedyOptim/error_handling.py +3 -1
- greedyOptim/models.py +185 -2
- greedyOptim/schedule_generator.py +376 -0
api/greedyoptim_api.py
CHANGED
|
@@ -19,6 +19,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
| 19 |
from greedyOptim.scheduler import optimize_trainset_schedule, compare_optimization_methods
|
| 20 |
from greedyOptim.models import OptimizationConfig, OptimizationResult
|
| 21 |
from greedyOptim.error_handling import DataValidator
|
|
|
|
| 22 |
|
| 23 |
# Import DataService for synthetic data generation (optional)
|
| 24 |
from DataService.enhanced_generator import EnhancedMetroDataGenerator
|
|
@@ -172,6 +173,74 @@ class ScheduleOptimizationResponse(BaseModel):
|
|
| 172 |
warnings: Optional[List[str]] = None
|
| 173 |
|
| 174 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
# ============================================================================
|
| 176 |
# Helper Functions
|
| 177 |
# ============================================================================
|
|
@@ -253,6 +322,78 @@ def convert_result_to_response(
|
|
| 253 |
)
|
| 254 |
|
| 255 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 256 |
# ============================================================================
|
| 257 |
# API Endpoints
|
| 258 |
# ============================================================================
|
|
@@ -265,7 +406,8 @@ async def root():
|
|
| 265 |
"version": "2.0.0",
|
| 266 |
"description": "Advanced train scheduling optimization",
|
| 267 |
"endpoints": {
|
| 268 |
-
"POST /optimize": "Optimize schedule with custom data",
|
|
|
|
| 269 |
"POST /compare": "Compare multiple optimization methods",
|
| 270 |
"POST /generate-synthetic": "Generate synthetic test data",
|
| 271 |
"POST /validate": "Validate input data structure",
|
|
@@ -398,6 +540,84 @@ async def optimize_schedule(request: ScheduleOptimizationRequest):
|
|
| 398 |
)
|
| 399 |
|
| 400 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 401 |
@app.post("/compare")
|
| 402 |
async def compare_methods(request: CompareMethodsRequest):
|
| 403 |
"""
|
|
@@ -547,7 +767,7 @@ async def validate_data(request: ScheduleOptimizationRequest):
|
|
| 547 |
"message": "Data structure is valid",
|
| 548 |
"num_trainsets": len(request.trainset_status),
|
| 549 |
"num_certificates": len(request.fitness_certificates),
|
| 550 |
-
"num_job_cards": len(request.job_cards),
|
| 551 |
"num_component_health": len(request.component_health)
|
| 552 |
}
|
| 553 |
|
|
|
|
| 19 |
from greedyOptim.scheduler import optimize_trainset_schedule, compare_optimization_methods
|
| 20 |
from greedyOptim.models import OptimizationConfig, OptimizationResult
|
| 21 |
from greedyOptim.error_handling import DataValidator
|
| 22 |
+
from greedyOptim.schedule_generator import generate_schedule_from_result
|
| 23 |
|
| 24 |
# Import DataService for synthetic data generation (optional)
|
| 25 |
from DataService.enhanced_generator import EnhancedMetroDataGenerator
|
|
|
|
| 173 |
warnings: Optional[List[str]] = None
|
| 174 |
|
| 175 |
|
| 176 |
+
# New models for full schedule response
|
| 177 |
+
class ServiceBlockResponse(BaseModel):
|
| 178 |
+
"""Service block with timing details"""
|
| 179 |
+
block_id: str
|
| 180 |
+
departure_time: str
|
| 181 |
+
origin: str
|
| 182 |
+
destination: str
|
| 183 |
+
trip_count: int
|
| 184 |
+
estimated_km: float
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class TrainsetScheduleResponse(BaseModel):
|
| 188 |
+
"""Complete schedule for a single trainset"""
|
| 189 |
+
trainset_id: str
|
| 190 |
+
status: str
|
| 191 |
+
readiness_score: float
|
| 192 |
+
daily_km_allocation: float
|
| 193 |
+
cumulative_km: float
|
| 194 |
+
assigned_duty: Optional[str] = None
|
| 195 |
+
priority_rank: Optional[int] = None
|
| 196 |
+
service_blocks: Optional[List[ServiceBlockResponse]] = None
|
| 197 |
+
stabling_bay: Optional[str] = None
|
| 198 |
+
standby_reason: Optional[str] = None
|
| 199 |
+
maintenance_type: Optional[str] = None
|
| 200 |
+
ibl_bay: Optional[str] = None
|
| 201 |
+
estimated_completion: Optional[str] = None
|
| 202 |
+
alerts: Optional[List[str]] = None
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
class FleetSummaryResponse(BaseModel):
|
| 206 |
+
"""Fleet summary statistics"""
|
| 207 |
+
total_trainsets: int
|
| 208 |
+
revenue_service: int
|
| 209 |
+
standby: int
|
| 210 |
+
maintenance: int
|
| 211 |
+
availability_percent: float
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class OptimizationMetricsResponse(BaseModel):
|
| 215 |
+
"""Optimization metrics"""
|
| 216 |
+
fitness_score: float
|
| 217 |
+
method: str
|
| 218 |
+
mileage_variance_coefficient: float
|
| 219 |
+
total_planned_km: float
|
| 220 |
+
optimization_runtime_ms: int
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class AlertResponse(BaseModel):
|
| 224 |
+
"""Schedule alert"""
|
| 225 |
+
trainset_id: str
|
| 226 |
+
severity: str
|
| 227 |
+
alert_type: str
|
| 228 |
+
message: str
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class FullScheduleResponse(BaseModel):
|
| 232 |
+
"""Complete schedule response with service blocks and timing"""
|
| 233 |
+
schedule_id: str
|
| 234 |
+
generated_at: str
|
| 235 |
+
valid_from: str
|
| 236 |
+
valid_until: str
|
| 237 |
+
depot: str
|
| 238 |
+
trainsets: List[TrainsetScheduleResponse]
|
| 239 |
+
fleet_summary: FleetSummaryResponse
|
| 240 |
+
optimization_metrics: OptimizationMetricsResponse
|
| 241 |
+
alerts: List[AlertResponse]
|
| 242 |
+
|
| 243 |
+
|
| 244 |
# ============================================================================
|
| 245 |
# Helper Functions
|
| 246 |
# ============================================================================
|
|
|
|
| 322 |
)
|
| 323 |
|
| 324 |
|
| 325 |
+
def convert_schedule_result_to_response(schedule_result) -> FullScheduleResponse:
|
| 326 |
+
"""Convert ScheduleResult to API FullScheduleResponse"""
|
| 327 |
+
from greedyOptim.models import ScheduleResult
|
| 328 |
+
|
| 329 |
+
trainsets = []
|
| 330 |
+
for ts in schedule_result.trainsets:
|
| 331 |
+
service_blocks_resp = None
|
| 332 |
+
if ts.service_blocks:
|
| 333 |
+
service_blocks_resp = [
|
| 334 |
+
ServiceBlockResponse(
|
| 335 |
+
block_id=sb.block_id,
|
| 336 |
+
departure_time=sb.departure_time,
|
| 337 |
+
origin=sb.origin,
|
| 338 |
+
destination=sb.destination,
|
| 339 |
+
trip_count=sb.trip_count,
|
| 340 |
+
estimated_km=sb.estimated_km
|
| 341 |
+
)
|
| 342 |
+
for sb in ts.service_blocks
|
| 343 |
+
]
|
| 344 |
+
|
| 345 |
+
trainsets.append(TrainsetScheduleResponse(
|
| 346 |
+
trainset_id=ts.trainset_id,
|
| 347 |
+
status=ts.status.value if hasattr(ts.status, 'value') else ts.status,
|
| 348 |
+
readiness_score=ts.readiness_score,
|
| 349 |
+
daily_km_allocation=ts.daily_km_allocation,
|
| 350 |
+
cumulative_km=ts.cumulative_km,
|
| 351 |
+
assigned_duty=ts.assigned_duty,
|
| 352 |
+
priority_rank=ts.priority_rank,
|
| 353 |
+
service_blocks=service_blocks_resp,
|
| 354 |
+
stabling_bay=ts.stabling_bay,
|
| 355 |
+
standby_reason=ts.standby_reason,
|
| 356 |
+
maintenance_type=ts.maintenance_type.value if ts.maintenance_type and hasattr(ts.maintenance_type, 'value') else ts.maintenance_type,
|
| 357 |
+
ibl_bay=ts.ibl_bay,
|
| 358 |
+
estimated_completion=ts.estimated_completion,
|
| 359 |
+
alerts=ts.alerts
|
| 360 |
+
))
|
| 361 |
+
|
| 362 |
+
alerts = [
|
| 363 |
+
AlertResponse(
|
| 364 |
+
trainset_id=a.trainset_id,
|
| 365 |
+
severity=a.severity.value if hasattr(a.severity, 'value') else a.severity,
|
| 366 |
+
alert_type=a.alert_type,
|
| 367 |
+
message=a.message
|
| 368 |
+
)
|
| 369 |
+
for a in schedule_result.alerts
|
| 370 |
+
]
|
| 371 |
+
|
| 372 |
+
return FullScheduleResponse(
|
| 373 |
+
schedule_id=schedule_result.schedule_id,
|
| 374 |
+
generated_at=schedule_result.generated_at,
|
| 375 |
+
valid_from=schedule_result.valid_from,
|
| 376 |
+
valid_until=schedule_result.valid_until,
|
| 377 |
+
depot=schedule_result.depot,
|
| 378 |
+
trainsets=trainsets,
|
| 379 |
+
fleet_summary=FleetSummaryResponse(
|
| 380 |
+
total_trainsets=schedule_result.fleet_summary.total_trainsets,
|
| 381 |
+
revenue_service=schedule_result.fleet_summary.revenue_service,
|
| 382 |
+
standby=schedule_result.fleet_summary.standby,
|
| 383 |
+
maintenance=schedule_result.fleet_summary.maintenance,
|
| 384 |
+
availability_percent=schedule_result.fleet_summary.availability_percent
|
| 385 |
+
),
|
| 386 |
+
optimization_metrics=OptimizationMetricsResponse(
|
| 387 |
+
fitness_score=schedule_result.optimization_metrics.fitness_score,
|
| 388 |
+
method=schedule_result.optimization_metrics.method,
|
| 389 |
+
mileage_variance_coefficient=schedule_result.optimization_metrics.mileage_variance_coefficient,
|
| 390 |
+
total_planned_km=schedule_result.optimization_metrics.total_planned_km,
|
| 391 |
+
optimization_runtime_ms=schedule_result.optimization_metrics.optimization_runtime_ms
|
| 392 |
+
),
|
| 393 |
+
alerts=alerts
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
|
| 397 |
# ============================================================================
|
| 398 |
# API Endpoints
|
| 399 |
# ============================================================================
|
|
|
|
| 406 |
"version": "2.0.0",
|
| 407 |
"description": "Advanced train scheduling optimization",
|
| 408 |
"endpoints": {
|
| 409 |
+
"POST /optimize": "Optimize schedule with custom data (returns trainset allocations)",
|
| 410 |
+
"POST /schedule": "Generate full schedule with service blocks and timing",
|
| 411 |
"POST /compare": "Compare multiple optimization methods",
|
| 412 |
"POST /generate-synthetic": "Generate synthetic test data",
|
| 413 |
"POST /validate": "Validate input data structure",
|
|
|
|
| 540 |
)
|
| 541 |
|
| 542 |
|
| 543 |
+
@app.post("/schedule", response_model=FullScheduleResponse)
|
| 544 |
+
async def generate_full_schedule(request: ScheduleOptimizationRequest):
|
| 545 |
+
"""
|
| 546 |
+
Generate complete schedule with service blocks and timing.
|
| 547 |
+
|
| 548 |
+
This endpoint returns a full schedule with:
|
| 549 |
+
- Service blocks with departure times and routes
|
| 550 |
+
- Bay allocations
|
| 551 |
+
- Daily km assignments
|
| 552 |
+
- Fleet summary
|
| 553 |
+
- Alerts and warnings
|
| 554 |
+
|
| 555 |
+
Use this endpoint when you need operational timetables, not just trainset allocations.
|
| 556 |
+
"""
|
| 557 |
+
try:
|
| 558 |
+
import time
|
| 559 |
+
start_time = time.time()
|
| 560 |
+
|
| 561 |
+
logger.info(f"Received full schedule request with {len(request.trainset_status)} trainsets, method: {request.method}")
|
| 562 |
+
|
| 563 |
+
# Convert request to dict format
|
| 564 |
+
data = convert_pydantic_to_dict(request)
|
| 565 |
+
|
| 566 |
+
# Validate data
|
| 567 |
+
validation_errors = DataValidator.validate_data(data)
|
| 568 |
+
if validation_errors:
|
| 569 |
+
raise HTTPException(
|
| 570 |
+
status_code=400,
|
| 571 |
+
detail={
|
| 572 |
+
"error": "Data validation failed",
|
| 573 |
+
"validation_errors": validation_errors,
|
| 574 |
+
"message": "Please fix the data structure and try again"
|
| 575 |
+
}
|
| 576 |
+
)
|
| 577 |
+
|
| 578 |
+
# Convert config
|
| 579 |
+
config = convert_config(request.config)
|
| 580 |
+
|
| 581 |
+
# Run optimization
|
| 582 |
+
result = optimize_trainset_schedule(data, request.method, config)
|
| 583 |
+
|
| 584 |
+
execution_time = time.time() - start_time
|
| 585 |
+
runtime_ms = int(execution_time * 1000)
|
| 586 |
+
|
| 587 |
+
logger.info(f"Optimization completed in {execution_time:.3f}s, fitness: {result.fitness_score:.4f}")
|
| 588 |
+
|
| 589 |
+
# Generate full schedule with service blocks
|
| 590 |
+
schedule_result = generate_schedule_from_result(
|
| 591 |
+
data=data,
|
| 592 |
+
optimization_result=result,
|
| 593 |
+
method=request.method,
|
| 594 |
+
runtime_ms=runtime_ms,
|
| 595 |
+
config=config,
|
| 596 |
+
date=request.date,
|
| 597 |
+
depot="Muttom_Depot"
|
| 598 |
+
)
|
| 599 |
+
|
| 600 |
+
# Convert to response
|
| 601 |
+
response = convert_schedule_result_to_response(schedule_result)
|
| 602 |
+
|
| 603 |
+
logger.info(f"Full schedule generated: {schedule_result.schedule_id}")
|
| 604 |
+
|
| 605 |
+
return response
|
| 606 |
+
|
| 607 |
+
except HTTPException:
|
| 608 |
+
raise
|
| 609 |
+
except Exception as e:
|
| 610 |
+
logger.error(f"Schedule generation error: {str(e)}", exc_info=True)
|
| 611 |
+
raise HTTPException(
|
| 612 |
+
status_code=500,
|
| 613 |
+
detail={
|
| 614 |
+
"error": "Schedule generation failed",
|
| 615 |
+
"message": str(e),
|
| 616 |
+
"type": type(e).__name__
|
| 617 |
+
}
|
| 618 |
+
)
|
| 619 |
+
|
| 620 |
+
|
| 621 |
@app.post("/compare")
|
| 622 |
async def compare_methods(request: CompareMethodsRequest):
|
| 623 |
"""
|
|
|
|
| 767 |
"message": "Data structure is valid",
|
| 768 |
"num_trainsets": len(request.trainset_status),
|
| 769 |
"num_certificates": len(request.fitness_certificates),
|
| 770 |
+
"num_job_cards": len(request.job_cards) if request.job_cards else 0,
|
| 771 |
"num_component_health": len(request.component_health)
|
| 772 |
}
|
| 773 |
|
greedyOptim/__init__.py
CHANGED
|
@@ -16,7 +16,11 @@ Usage:
|
|
| 16 |
result = optimize_trainset_schedule(data, method='ga', config=config)
|
| 17 |
"""
|
| 18 |
|
| 19 |
-
from .models import
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
from .evaluator import TrainsetSchedulingEvaluator
|
| 21 |
from .genetic_algorithm import GeneticAlgorithmOptimizer
|
| 22 |
from .advanced_optimizers import CMAESOptimizer, ParticleSwarmOptimizer, SimulatedAnnealingOptimizer
|
|
@@ -33,6 +37,7 @@ from .error_handling import (
|
|
| 33 |
safe_optimize, DataValidator, OptimizationError,
|
| 34 |
DataValidationError, ConstraintViolationError, ConfigurationError
|
| 35 |
)
|
|
|
|
| 36 |
|
| 37 |
# Optional OR-Tools integration
|
| 38 |
try:
|
|
@@ -55,6 +60,15 @@ __all__ = [
|
|
| 55 |
'OptimizationResult',
|
| 56 |
'OptimizationConfig',
|
| 57 |
'TrainsetConstraints',
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
'TrainsetSchedulingEvaluator',
|
| 59 |
'GeneticAlgorithmOptimizer',
|
| 60 |
'CMAESOptimizer',
|
|
@@ -73,7 +87,9 @@ __all__ = [
|
|
| 73 |
'OptimizationError',
|
| 74 |
'DataValidationError',
|
| 75 |
'ConstraintViolationError',
|
| 76 |
-
'ConfigurationError'
|
|
|
|
|
|
|
| 77 |
]
|
| 78 |
|
| 79 |
# Add OR-Tools to exports if available
|
|
|
|
| 16 |
result = optimize_trainset_schedule(data, method='ga', config=config)
|
| 17 |
"""
|
| 18 |
|
| 19 |
+
from .models import (
|
| 20 |
+
OptimizationResult, OptimizationConfig, TrainsetConstraints,
|
| 21 |
+
ScheduleResult, ScheduleTrainset, ServiceBlock, FleetSummary,
|
| 22 |
+
OptimizationMetrics, ScheduleAlert, TrainStatus, MaintenanceType, AlertSeverity
|
| 23 |
+
)
|
| 24 |
from .evaluator import TrainsetSchedulingEvaluator
|
| 25 |
from .genetic_algorithm import GeneticAlgorithmOptimizer
|
| 26 |
from .advanced_optimizers import CMAESOptimizer, ParticleSwarmOptimizer, SimulatedAnnealingOptimizer
|
|
|
|
| 37 |
safe_optimize, DataValidator, OptimizationError,
|
| 38 |
DataValidationError, ConstraintViolationError, ConfigurationError
|
| 39 |
)
|
| 40 |
+
from .schedule_generator import ScheduleGenerator, generate_schedule_from_result
|
| 41 |
|
| 42 |
# Optional OR-Tools integration
|
| 43 |
try:
|
|
|
|
| 60 |
'OptimizationResult',
|
| 61 |
'OptimizationConfig',
|
| 62 |
'TrainsetConstraints',
|
| 63 |
+
'ScheduleResult',
|
| 64 |
+
'ScheduleTrainset',
|
| 65 |
+
'ServiceBlock',
|
| 66 |
+
'FleetSummary',
|
| 67 |
+
'OptimizationMetrics',
|
| 68 |
+
'ScheduleAlert',
|
| 69 |
+
'TrainStatus',
|
| 70 |
+
'MaintenanceType',
|
| 71 |
+
'AlertSeverity',
|
| 72 |
'TrainsetSchedulingEvaluator',
|
| 73 |
'GeneticAlgorithmOptimizer',
|
| 74 |
'CMAESOptimizer',
|
|
|
|
| 87 |
'OptimizationError',
|
| 88 |
'DataValidationError',
|
| 89 |
'ConstraintViolationError',
|
| 90 |
+
'ConfigurationError',
|
| 91 |
+
'ScheduleGenerator',
|
| 92 |
+
'generate_schedule_from_result'
|
| 93 |
]
|
| 94 |
|
| 95 |
# Add OR-Tools to exports if available
|
greedyOptim/error_handling.py
CHANGED
|
@@ -232,8 +232,10 @@ class DataValidator:
|
|
| 232 |
if len(trainset_ids) < 10:
|
| 233 |
errors.append("Insufficient trainsets for optimization (minimum 10 required)")
|
| 234 |
|
|
|
|
|
|
|
| 235 |
available_trainsets = sum(1 for record in data['trainset_status']
|
| 236 |
-
if record.get('operational_status')
|
| 237 |
if available_trainsets < 15:
|
| 238 |
errors.append(f"Insufficient available trainsets for optimization ({available_trainsets} available, need at least 15)")
|
| 239 |
|
|
|
|
| 232 |
if len(trainset_ids) < 10:
|
| 233 |
errors.append("Insufficient trainsets for optimization (minimum 10 required)")
|
| 234 |
|
| 235 |
+
# Count available trainsets (both legacy and new formats)
|
| 236 |
+
available_statuses = {'Available', 'In-Service', 'Standby', 'IN_SERVICE', 'STANDBY'}
|
| 237 |
available_trainsets = sum(1 for record in data['trainset_status']
|
| 238 |
+
if record.get('operational_status') in available_statuses)
|
| 239 |
if available_trainsets < 15:
|
| 240 |
errors.append(f"Insufficient available trainsets for optimization ({available_trainsets} available, need at least 15)")
|
| 241 |
|
greedyOptim/models.py
CHANGED
|
@@ -1,8 +1,191 @@
|
|
| 1 |
"""
|
| 2 |
Data models and dataclasses for the optimization system.
|
| 3 |
"""
|
| 4 |
-
from dataclasses import dataclass
|
| 5 |
-
from typing import Dict, List
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
|
| 8 |
@dataclass
|
|
|
|
| 1 |
"""
|
| 2 |
Data models and dataclasses for the optimization system.
|
| 3 |
"""
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from typing import Dict, List, Optional
|
| 6 |
+
from enum import Enum
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TrainStatus(str, Enum):
|
| 10 |
+
"""Train operational status for schedule output."""
|
| 11 |
+
REVENUE_SERVICE = "REVENUE_SERVICE"
|
| 12 |
+
STANDBY = "STANDBY"
|
| 13 |
+
MAINTENANCE = "MAINTENANCE"
|
| 14 |
+
OUT_OF_SERVICE = "OUT_OF_SERVICE"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class MaintenanceType(str, Enum):
|
| 18 |
+
"""Types of maintenance."""
|
| 19 |
+
SCHEDULED_INSPECTION = "SCHEDULED_INSPECTION"
|
| 20 |
+
PREVENTIVE = "PREVENTIVE"
|
| 21 |
+
CORRECTIVE = "CORRECTIVE"
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class AlertSeverity(str, Enum):
|
| 25 |
+
"""Alert severity levels."""
|
| 26 |
+
LOW = "LOW"
|
| 27 |
+
MEDIUM = "MEDIUM"
|
| 28 |
+
HIGH = "HIGH"
|
| 29 |
+
CRITICAL = "CRITICAL"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
@dataclass
|
| 33 |
+
class ServiceBlock:
|
| 34 |
+
"""A service block represents a continuous operating period for a train."""
|
| 35 |
+
block_id: str
|
| 36 |
+
departure_time: str # HH:MM format
|
| 37 |
+
origin: str
|
| 38 |
+
destination: str
|
| 39 |
+
trip_count: int
|
| 40 |
+
estimated_km: int
|
| 41 |
+
|
| 42 |
+
def to_dict(self) -> Dict:
|
| 43 |
+
return {
|
| 44 |
+
'block_id': self.block_id,
|
| 45 |
+
'departure_time': self.departure_time,
|
| 46 |
+
'origin': self.origin,
|
| 47 |
+
'destination': self.destination,
|
| 48 |
+
'trip_count': self.trip_count,
|
| 49 |
+
'estimated_km': self.estimated_km
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@dataclass
|
| 54 |
+
class ScheduleTrainset:
|
| 55 |
+
"""Complete trainset information in the schedule."""
|
| 56 |
+
trainset_id: str
|
| 57 |
+
status: TrainStatus
|
| 58 |
+
readiness_score: float
|
| 59 |
+
daily_km_allocation: int
|
| 60 |
+
cumulative_km: int
|
| 61 |
+
|
| 62 |
+
# For REVENUE_SERVICE
|
| 63 |
+
assigned_duty: Optional[str] = None
|
| 64 |
+
priority_rank: Optional[int] = None
|
| 65 |
+
service_blocks: List[ServiceBlock] = field(default_factory=list)
|
| 66 |
+
stabling_bay: Optional[str] = None
|
| 67 |
+
|
| 68 |
+
# For STANDBY
|
| 69 |
+
standby_reason: Optional[str] = None
|
| 70 |
+
|
| 71 |
+
# For MAINTENANCE
|
| 72 |
+
maintenance_type: Optional[MaintenanceType] = None
|
| 73 |
+
ibl_bay: Optional[str] = None
|
| 74 |
+
estimated_completion: Optional[str] = None
|
| 75 |
+
|
| 76 |
+
# Alerts
|
| 77 |
+
alerts: List[str] = field(default_factory=list)
|
| 78 |
+
|
| 79 |
+
def to_dict(self) -> Dict:
|
| 80 |
+
result = {
|
| 81 |
+
'trainset_id': self.trainset_id,
|
| 82 |
+
'status': self.status.value,
|
| 83 |
+
'readiness_score': self.readiness_score,
|
| 84 |
+
'daily_km_allocation': self.daily_km_allocation,
|
| 85 |
+
'cumulative_km': self.cumulative_km,
|
| 86 |
+
'alerts': self.alerts
|
| 87 |
+
}
|
| 88 |
+
if self.assigned_duty:
|
| 89 |
+
result['assigned_duty'] = self.assigned_duty
|
| 90 |
+
if self.priority_rank is not None:
|
| 91 |
+
result['priority_rank'] = self.priority_rank
|
| 92 |
+
if self.service_blocks:
|
| 93 |
+
result['service_blocks'] = [b.to_dict() for b in self.service_blocks]
|
| 94 |
+
if self.stabling_bay:
|
| 95 |
+
result['stabling_bay'] = self.stabling_bay
|
| 96 |
+
if self.standby_reason:
|
| 97 |
+
result['standby_reason'] = self.standby_reason
|
| 98 |
+
if self.maintenance_type:
|
| 99 |
+
result['maintenance_type'] = self.maintenance_type.value
|
| 100 |
+
if self.ibl_bay:
|
| 101 |
+
result['ibl_bay'] = self.ibl_bay
|
| 102 |
+
if self.estimated_completion:
|
| 103 |
+
result['estimated_completion'] = self.estimated_completion
|
| 104 |
+
return result
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@dataclass
|
| 108 |
+
class FleetSummary:
|
| 109 |
+
"""Summary statistics for the fleet."""
|
| 110 |
+
total_trainsets: int
|
| 111 |
+
revenue_service: int
|
| 112 |
+
standby: int
|
| 113 |
+
maintenance: int
|
| 114 |
+
availability_percent: float
|
| 115 |
+
|
| 116 |
+
def to_dict(self) -> Dict:
|
| 117 |
+
return {
|
| 118 |
+
'total_trainsets': self.total_trainsets,
|
| 119 |
+
'revenue_service': self.revenue_service,
|
| 120 |
+
'standby': self.standby,
|
| 121 |
+
'maintenance': self.maintenance,
|
| 122 |
+
'availability_percent': self.availability_percent
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@dataclass
|
| 127 |
+
class ScheduleAlert:
|
| 128 |
+
"""Alert or warning in the schedule."""
|
| 129 |
+
trainset_id: str
|
| 130 |
+
severity: AlertSeverity
|
| 131 |
+
alert_type: str
|
| 132 |
+
message: str
|
| 133 |
+
|
| 134 |
+
def to_dict(self) -> Dict:
|
| 135 |
+
return {
|
| 136 |
+
'trainset_id': self.trainset_id,
|
| 137 |
+
'severity': self.severity.value,
|
| 138 |
+
'alert_type': self.alert_type,
|
| 139 |
+
'message': self.message
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@dataclass
|
| 144 |
+
class OptimizationMetrics:
|
| 145 |
+
"""Metrics about the optimization."""
|
| 146 |
+
fitness_score: float
|
| 147 |
+
method: str
|
| 148 |
+
mileage_variance_coefficient: float
|
| 149 |
+
total_planned_km: int
|
| 150 |
+
optimization_runtime_ms: int
|
| 151 |
+
|
| 152 |
+
def to_dict(self) -> Dict:
|
| 153 |
+
return {
|
| 154 |
+
'fitness_score': self.fitness_score,
|
| 155 |
+
'method': self.method,
|
| 156 |
+
'mileage_variance_coefficient': self.mileage_variance_coefficient,
|
| 157 |
+
'total_planned_km': self.total_planned_km,
|
| 158 |
+
'optimization_runtime_ms': self.optimization_runtime_ms
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@dataclass
|
| 163 |
+
class ScheduleResult:
|
| 164 |
+
"""Complete schedule result with all trainsets and service blocks."""
|
| 165 |
+
schedule_id: str
|
| 166 |
+
generated_at: str
|
| 167 |
+
valid_from: str
|
| 168 |
+
valid_until: str
|
| 169 |
+
depot: str
|
| 170 |
+
|
| 171 |
+
trainsets: List[ScheduleTrainset]
|
| 172 |
+
fleet_summary: FleetSummary
|
| 173 |
+
optimization_metrics: OptimizationMetrics
|
| 174 |
+
alerts: List[ScheduleAlert] = field(default_factory=list)
|
| 175 |
+
|
| 176 |
+
def to_dict(self) -> Dict:
|
| 177 |
+
"""Convert to dictionary for JSON serialization and benchmarks."""
|
| 178 |
+
return {
|
| 179 |
+
'schedule_id': self.schedule_id,
|
| 180 |
+
'generated_at': self.generated_at,
|
| 181 |
+
'valid_from': self.valid_from,
|
| 182 |
+
'valid_until': self.valid_until,
|
| 183 |
+
'depot': self.depot,
|
| 184 |
+
'trainsets': [ts.to_dict() for ts in self.trainsets],
|
| 185 |
+
'fleet_summary': self.fleet_summary.to_dict(),
|
| 186 |
+
'optimization_metrics': self.optimization_metrics.to_dict(),
|
| 187 |
+
'alerts': [a.to_dict() for a in self.alerts]
|
| 188 |
+
}
|
| 189 |
|
| 190 |
|
| 191 |
@dataclass
|
greedyOptim/schedule_generator.py
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Schedule Generator Module
|
| 3 |
+
Converts optimization results into complete schedules with service blocks.
|
| 4 |
+
"""
|
| 5 |
+
import random
|
| 6 |
+
from datetime import datetime, timedelta
|
| 7 |
+
from typing import Dict, List, Optional
|
| 8 |
+
|
| 9 |
+
from .models import (
|
| 10 |
+
OptimizationResult, OptimizationConfig,
|
| 11 |
+
ScheduleResult, ScheduleTrainset, ServiceBlock, FleetSummary,
|
| 12 |
+
OptimizationMetrics, ScheduleAlert, TrainStatus, MaintenanceType, AlertSeverity
|
| 13 |
+
)
|
| 14 |
+
from .service_blocks import ServiceBlockGenerator
|
| 15 |
+
from .evaluator import normalize_certificate_status, normalize_component_status
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# Depot configuration
|
| 19 |
+
DEPOT_BAYS = [f"BAY-{str(i).zfill(2)}" for i in range(1, 16)]
|
| 20 |
+
IBL_BAYS = [f"IBL-{str(i).zfill(2)}" for i in range(1, 6)]
|
| 21 |
+
|
| 22 |
+
# Standby reasons
|
| 23 |
+
STANDBY_REASONS = ["MILEAGE_BALANCING", "EMERGENCY_BACKUP", "PEAK_HOUR_RESERVE", "CREW_UNAVAILABLE"]
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ScheduleGenerator:
|
| 27 |
+
"""Generates complete schedules from optimization results."""
|
| 28 |
+
|
| 29 |
+
def __init__(self, data: Dict, config: Optional[OptimizationConfig] = None):
|
| 30 |
+
"""Initialize schedule generator.
|
| 31 |
+
|
| 32 |
+
Args:
|
| 33 |
+
data: Input data with trainset_status, fitness_certificates, component_health
|
| 34 |
+
config: Optimization configuration
|
| 35 |
+
"""
|
| 36 |
+
self.data = data
|
| 37 |
+
self.config = config or OptimizationConfig()
|
| 38 |
+
self.service_block_generator = ServiceBlockGenerator()
|
| 39 |
+
|
| 40 |
+
# Build lookups
|
| 41 |
+
self._build_lookups()
|
| 42 |
+
|
| 43 |
+
def _build_lookups(self):
|
| 44 |
+
"""Build lookup dictionaries for quick access."""
|
| 45 |
+
self.status_map = {ts['trainset_id']: ts for ts in self.data['trainset_status']}
|
| 46 |
+
|
| 47 |
+
# Fitness certificates
|
| 48 |
+
self.cert_map = {}
|
| 49 |
+
for cert in self.data.get('fitness_certificates', []):
|
| 50 |
+
ts_id = cert['trainset_id']
|
| 51 |
+
if ts_id not in self.cert_map:
|
| 52 |
+
self.cert_map[ts_id] = []
|
| 53 |
+
self.cert_map[ts_id].append(cert)
|
| 54 |
+
|
| 55 |
+
# Component health
|
| 56 |
+
self.health_map = {}
|
| 57 |
+
for health in self.data.get('component_health', []):
|
| 58 |
+
ts_id = health['trainset_id']
|
| 59 |
+
if ts_id not in self.health_map:
|
| 60 |
+
self.health_map[ts_id] = []
|
| 61 |
+
self.health_map[ts_id].append(health)
|
| 62 |
+
|
| 63 |
+
def generate_schedule(
|
| 64 |
+
self,
|
| 65 |
+
optimization_result: OptimizationResult,
|
| 66 |
+
method: str = "ga",
|
| 67 |
+
runtime_ms: int = 0,
|
| 68 |
+
date: Optional[str] = None,
|
| 69 |
+
depot: str = "Muttom_Depot"
|
| 70 |
+
) -> ScheduleResult:
|
| 71 |
+
"""Generate complete schedule from optimization result.
|
| 72 |
+
|
| 73 |
+
Args:
|
| 74 |
+
optimization_result: Result from optimizer with trainset allocations
|
| 75 |
+
method: Optimization method used
|
| 76 |
+
runtime_ms: Optimization runtime in milliseconds
|
| 77 |
+
date: Schedule date (default: today)
|
| 78 |
+
depot: Depot name
|
| 79 |
+
|
| 80 |
+
Returns:
|
| 81 |
+
Complete ScheduleResult with service blocks
|
| 82 |
+
"""
|
| 83 |
+
if date is None:
|
| 84 |
+
date = datetime.now().strftime("%Y-%m-%d")
|
| 85 |
+
|
| 86 |
+
now = datetime.now()
|
| 87 |
+
|
| 88 |
+
# Generate schedule ID
|
| 89 |
+
schedule_id = f"SCH-{date.replace('-', '')}-{random.randint(100, 999)}"
|
| 90 |
+
|
| 91 |
+
# Generate trainset schedules
|
| 92 |
+
trainsets = []
|
| 93 |
+
alerts = []
|
| 94 |
+
total_km = 0
|
| 95 |
+
mileages = []
|
| 96 |
+
|
| 97 |
+
# Service trains
|
| 98 |
+
num_service = len(optimization_result.selected_trainsets)
|
| 99 |
+
for idx, ts_id in enumerate(optimization_result.selected_trainsets):
|
| 100 |
+
trainset, ts_alerts, km = self._generate_service_trainset(
|
| 101 |
+
ts_id, idx, num_service
|
| 102 |
+
)
|
| 103 |
+
trainsets.append(trainset)
|
| 104 |
+
alerts.extend(ts_alerts)
|
| 105 |
+
total_km += km
|
| 106 |
+
mileages.append(km)
|
| 107 |
+
|
| 108 |
+
# Standby trains
|
| 109 |
+
for ts_id in optimization_result.standby_trainsets:
|
| 110 |
+
trainset, ts_alerts = self._generate_standby_trainset(ts_id)
|
| 111 |
+
trainsets.append(trainset)
|
| 112 |
+
alerts.extend(ts_alerts)
|
| 113 |
+
|
| 114 |
+
# Maintenance trains
|
| 115 |
+
for ts_id in optimization_result.maintenance_trainsets:
|
| 116 |
+
trainset, ts_alerts = self._generate_maintenance_trainset(ts_id)
|
| 117 |
+
trainsets.append(trainset)
|
| 118 |
+
alerts.extend(ts_alerts)
|
| 119 |
+
|
| 120 |
+
# Calculate fleet summary
|
| 121 |
+
fleet_summary = FleetSummary(
|
| 122 |
+
total_trainsets=len(trainsets),
|
| 123 |
+
revenue_service=len(optimization_result.selected_trainsets),
|
| 124 |
+
standby=len(optimization_result.standby_trainsets),
|
| 125 |
+
maintenance=len(optimization_result.maintenance_trainsets),
|
| 126 |
+
availability_percent=round(
|
| 127 |
+
(len(optimization_result.selected_trainsets) + len(optimization_result.standby_trainsets))
|
| 128 |
+
/ max(1, len(trainsets)) * 100, 1
|
| 129 |
+
)
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
# Calculate mileage variance
|
| 133 |
+
if len(mileages) > 1:
|
| 134 |
+
import statistics
|
| 135 |
+
mean_km = statistics.mean(mileages)
|
| 136 |
+
std_km = statistics.stdev(mileages)
|
| 137 |
+
variance_coeff = std_km / mean_km if mean_km > 0 else 0
|
| 138 |
+
else:
|
| 139 |
+
variance_coeff = 0
|
| 140 |
+
|
| 141 |
+
# Optimization metrics
|
| 142 |
+
opt_metrics = OptimizationMetrics(
|
| 143 |
+
fitness_score=optimization_result.fitness_score,
|
| 144 |
+
method=method,
|
| 145 |
+
mileage_variance_coefficient=round(variance_coeff, 3),
|
| 146 |
+
total_planned_km=total_km,
|
| 147 |
+
optimization_runtime_ms=runtime_ms
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
return ScheduleResult(
|
| 151 |
+
schedule_id=schedule_id,
|
| 152 |
+
generated_at=now.isoformat(),
|
| 153 |
+
valid_from=f"{date}T05:00:00+05:30",
|
| 154 |
+
valid_until=f"{date}T23:00:00+05:30",
|
| 155 |
+
depot=depot,
|
| 156 |
+
trainsets=trainsets,
|
| 157 |
+
fleet_summary=fleet_summary,
|
| 158 |
+
optimization_metrics=opt_metrics,
|
| 159 |
+
alerts=alerts
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
def _generate_service_trainset(
|
| 163 |
+
self,
|
| 164 |
+
trainset_id: str,
|
| 165 |
+
index: int,
|
| 166 |
+
num_service: int
|
| 167 |
+
) -> tuple:
|
| 168 |
+
"""Generate schedule for a service trainset.
|
| 169 |
+
|
| 170 |
+
Returns:
|
| 171 |
+
Tuple of (ScheduleTrainset, alerts, daily_km)
|
| 172 |
+
"""
|
| 173 |
+
ts_data = self.status_map.get(trainset_id, {})
|
| 174 |
+
cumulative_km = ts_data.get('total_mileage_km', 0)
|
| 175 |
+
|
| 176 |
+
# Generate service blocks
|
| 177 |
+
blocks_data = self.service_block_generator.generate_service_blocks(index, num_service)
|
| 178 |
+
service_blocks = [
|
| 179 |
+
ServiceBlock(
|
| 180 |
+
block_id=b['block_id'],
|
| 181 |
+
departure_time=b['departure_time'],
|
| 182 |
+
origin=b['origin'],
|
| 183 |
+
destination=b['destination'],
|
| 184 |
+
trip_count=b['trip_count'],
|
| 185 |
+
estimated_km=b['estimated_km']
|
| 186 |
+
)
|
| 187 |
+
for b in blocks_data
|
| 188 |
+
]
|
| 189 |
+
|
| 190 |
+
daily_km = sum(b.estimated_km for b in service_blocks)
|
| 191 |
+
|
| 192 |
+
# Calculate readiness score
|
| 193 |
+
readiness = self._calculate_readiness(trainset_id)
|
| 194 |
+
|
| 195 |
+
# Generate alerts
|
| 196 |
+
alerts = self._check_alerts(trainset_id)
|
| 197 |
+
alert_messages = [a.message for a in alerts]
|
| 198 |
+
|
| 199 |
+
trainset = ScheduleTrainset(
|
| 200 |
+
trainset_id=trainset_id,
|
| 201 |
+
status=TrainStatus.REVENUE_SERVICE,
|
| 202 |
+
readiness_score=readiness,
|
| 203 |
+
daily_km_allocation=daily_km,
|
| 204 |
+
cumulative_km=cumulative_km,
|
| 205 |
+
assigned_duty=f"DUTY-{chr(65 + index // 10)}{(index % 10) + 1}",
|
| 206 |
+
priority_rank=index + 1,
|
| 207 |
+
service_blocks=service_blocks,
|
| 208 |
+
stabling_bay=random.choice(DEPOT_BAYS),
|
| 209 |
+
alerts=alert_messages
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
return trainset, alerts, daily_km
|
| 213 |
+
|
| 214 |
+
def _generate_standby_trainset(self, trainset_id: str) -> tuple:
|
| 215 |
+
"""Generate schedule for a standby trainset.
|
| 216 |
+
|
| 217 |
+
Returns:
|
| 218 |
+
Tuple of (ScheduleTrainset, alerts)
|
| 219 |
+
"""
|
| 220 |
+
ts_data = self.status_map.get(trainset_id, {})
|
| 221 |
+
cumulative_km = ts_data.get('total_mileage_km', 0)
|
| 222 |
+
readiness = self._calculate_readiness(trainset_id)
|
| 223 |
+
|
| 224 |
+
alerts = self._check_alerts(trainset_id)
|
| 225 |
+
alert_messages = [a.message for a in alerts]
|
| 226 |
+
|
| 227 |
+
trainset = ScheduleTrainset(
|
| 228 |
+
trainset_id=trainset_id,
|
| 229 |
+
status=TrainStatus.STANDBY,
|
| 230 |
+
readiness_score=readiness,
|
| 231 |
+
daily_km_allocation=0,
|
| 232 |
+
cumulative_km=cumulative_km,
|
| 233 |
+
stabling_bay=random.choice(DEPOT_BAYS),
|
| 234 |
+
standby_reason=random.choice(STANDBY_REASONS),
|
| 235 |
+
alerts=alert_messages
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
return trainset, alerts
|
| 239 |
+
|
| 240 |
+
def _generate_maintenance_trainset(self, trainset_id: str) -> tuple:
|
| 241 |
+
"""Generate schedule for a maintenance trainset.
|
| 242 |
+
|
| 243 |
+
Returns:
|
| 244 |
+
Tuple of (ScheduleTrainset, alerts)
|
| 245 |
+
"""
|
| 246 |
+
ts_data = self.status_map.get(trainset_id, {})
|
| 247 |
+
cumulative_km = ts_data.get('total_mileage_km', 0)
|
| 248 |
+
readiness = self._calculate_readiness(trainset_id)
|
| 249 |
+
|
| 250 |
+
# Estimate completion time (4-12 hours from now)
|
| 251 |
+
completion = datetime.now() + timedelta(hours=random.randint(4, 12))
|
| 252 |
+
|
| 253 |
+
alerts = self._check_alerts(trainset_id)
|
| 254 |
+
alert_messages = [a.message for a in alerts]
|
| 255 |
+
|
| 256 |
+
trainset = ScheduleTrainset(
|
| 257 |
+
trainset_id=trainset_id,
|
| 258 |
+
status=TrainStatus.MAINTENANCE,
|
| 259 |
+
readiness_score=readiness,
|
| 260 |
+
daily_km_allocation=0,
|
| 261 |
+
cumulative_km=cumulative_km,
|
| 262 |
+
maintenance_type=MaintenanceType.SCHEDULED_INSPECTION,
|
| 263 |
+
ibl_bay=random.choice(IBL_BAYS),
|
| 264 |
+
estimated_completion=completion.isoformat(),
|
| 265 |
+
alerts=alert_messages
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
return trainset, alerts
|
| 269 |
+
|
| 270 |
+
def _calculate_readiness(self, trainset_id: str) -> float:
|
| 271 |
+
"""Calculate readiness score for a trainset."""
|
| 272 |
+
score = 1.0
|
| 273 |
+
|
| 274 |
+
# Check certificates
|
| 275 |
+
certs = self.cert_map.get(trainset_id, [])
|
| 276 |
+
for cert in certs:
|
| 277 |
+
status = normalize_certificate_status(cert.get('status', 'Valid'))
|
| 278 |
+
if status == 'Expired':
|
| 279 |
+
score -= 0.3
|
| 280 |
+
elif status == 'Expiring-Soon':
|
| 281 |
+
score -= 0.1
|
| 282 |
+
elif status == 'Suspended':
|
| 283 |
+
score -= 0.2
|
| 284 |
+
|
| 285 |
+
# Check component health
|
| 286 |
+
components = self.health_map.get(trainset_id, [])
|
| 287 |
+
for comp in components:
|
| 288 |
+
status = normalize_component_status(comp.get('status', 'Good'))
|
| 289 |
+
if status == 'Critical':
|
| 290 |
+
score -= 0.15
|
| 291 |
+
elif status == 'Warning':
|
| 292 |
+
score -= 0.05
|
| 293 |
+
|
| 294 |
+
return max(0.0, min(1.0, round(score, 2)))
|
| 295 |
+
|
| 296 |
+
def _check_alerts(self, trainset_id: str) -> List[ScheduleAlert]:
|
| 297 |
+
"""Check for alerts on a trainset."""
|
| 298 |
+
alerts = []
|
| 299 |
+
|
| 300 |
+
# Check certificates
|
| 301 |
+
certs = self.cert_map.get(trainset_id, [])
|
| 302 |
+
for cert in certs:
|
| 303 |
+
status = normalize_certificate_status(cert.get('status', 'Valid'))
|
| 304 |
+
dept = cert.get('department', 'Unknown')
|
| 305 |
+
|
| 306 |
+
if status == 'Expired':
|
| 307 |
+
alerts.append(ScheduleAlert(
|
| 308 |
+
trainset_id=trainset_id,
|
| 309 |
+
severity=AlertSeverity.HIGH,
|
| 310 |
+
alert_type="CERTIFICATE_EXPIRED",
|
| 311 |
+
message=f"{dept} certificate expired"
|
| 312 |
+
))
|
| 313 |
+
elif status == 'Expiring-Soon':
|
| 314 |
+
alerts.append(ScheduleAlert(
|
| 315 |
+
trainset_id=trainset_id,
|
| 316 |
+
severity=AlertSeverity.MEDIUM,
|
| 317 |
+
alert_type="CERTIFICATE_EXPIRING",
|
| 318 |
+
message=f"{dept} certificate expiring soon"
|
| 319 |
+
))
|
| 320 |
+
|
| 321 |
+
# Check component health
|
| 322 |
+
components = self.health_map.get(trainset_id, [])
|
| 323 |
+
for comp in components:
|
| 324 |
+
status = normalize_component_status(comp.get('status', 'Good'))
|
| 325 |
+
comp_name = comp.get('component', 'Unknown')
|
| 326 |
+
wear = comp.get('wear_level', 0)
|
| 327 |
+
|
| 328 |
+
if status == 'Critical':
|
| 329 |
+
alerts.append(ScheduleAlert(
|
| 330 |
+
trainset_id=trainset_id,
|
| 331 |
+
severity=AlertSeverity.HIGH,
|
| 332 |
+
alert_type="COMPONENT_CRITICAL",
|
| 333 |
+
message=f"{comp_name} in critical condition (wear: {wear}%)"
|
| 334 |
+
))
|
| 335 |
+
elif status == 'Warning' and wear > 80:
|
| 336 |
+
alerts.append(ScheduleAlert(
|
| 337 |
+
trainset_id=trainset_id,
|
| 338 |
+
severity=AlertSeverity.MEDIUM,
|
| 339 |
+
alert_type="COMPONENT_WARNING",
|
| 340 |
+
message=f"{comp_name} requires attention (wear: {wear}%)"
|
| 341 |
+
))
|
| 342 |
+
|
| 343 |
+
return alerts
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
def generate_schedule_from_result(
|
| 347 |
+
data: Dict,
|
| 348 |
+
optimization_result: OptimizationResult,
|
| 349 |
+
method: str = "ga",
|
| 350 |
+
runtime_ms: int = 0,
|
| 351 |
+
config: Optional[OptimizationConfig] = None,
|
| 352 |
+
date: Optional[str] = None,
|
| 353 |
+
depot: str = "Muttom_Depot"
|
| 354 |
+
) -> ScheduleResult:
|
| 355 |
+
"""Convenience function to generate schedule from optimization result.
|
| 356 |
+
|
| 357 |
+
Args:
|
| 358 |
+
data: Input data dictionary
|
| 359 |
+
optimization_result: Result from optimizer
|
| 360 |
+
method: Optimization method used
|
| 361 |
+
runtime_ms: Runtime in milliseconds
|
| 362 |
+
config: Optimization configuration
|
| 363 |
+
date: Schedule date
|
| 364 |
+
depot: Depot name
|
| 365 |
+
|
| 366 |
+
Returns:
|
| 367 |
+
Complete ScheduleResult
|
| 368 |
+
"""
|
| 369 |
+
generator = ScheduleGenerator(data, config)
|
| 370 |
+
return generator.generate_schedule(
|
| 371 |
+
optimization_result,
|
| 372 |
+
method=method,
|
| 373 |
+
runtime_ms=runtime_ms,
|
| 374 |
+
date=date,
|
| 375 |
+
depot=depot
|
| 376 |
+
)
|