petter2025 commited on
Commit
6c66a14
·
verified ·
1 Parent(s): c07fad9

Delete config

Browse files
config/__init__.py DELETED
@@ -1,12 +0,0 @@
1
- """
2
- Configuration module for ARF Demo
3
- """
4
- from .settings import Settings, get_settings, ARFMode, SafetyMode, InstallationStatus
5
-
6
- __all__ = [
7
- "Settings",
8
- "get_settings",
9
- "ARFMode",
10
- "SafetyMode",
11
- "InstallationStatus"
12
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
config/scenario_registry.py DELETED
@@ -1,155 +0,0 @@
1
- """
2
- Single source of truth for incident scenarios
3
- """
4
- import json
5
- import yaml
6
- from pathlib import Path
7
- from typing import Dict, Any, List, Optional
8
- import logging
9
- from functools import lru_cache
10
-
11
- logger = logging.getLogger(__name__)
12
-
13
-
14
- class ScenarioRegistry:
15
- """Registry for incident scenarios with caching"""
16
-
17
- _instance = None
18
- _scenarios = None
19
-
20
- def __new__(cls):
21
- if cls._instance is None:
22
- cls._instance = super().__new__(cls)
23
- return cls._instance
24
-
25
- @classmethod
26
- @lru_cache(maxsize=1)
27
- def load_scenarios(cls) -> Dict[str, Dict[str, Any]]:
28
- """
29
- Load scenarios from config files with caching
30
-
31
- Priority:
32
- 1. scenarios.json in config directory
33
- 2. scenarios.yaml in config directory
34
- 3. scenarios.yml in config directory
35
- 4. Hardcoded scenarios from demo module
36
- """
37
- from config.settings import settings
38
-
39
- config_path = Path(settings.scenario_config_path)
40
-
41
- # Try to load from config directory
42
- config_files = [
43
- config_path / "scenarios.json",
44
- config_path / "scenarios.yaml",
45
- config_path / "scenarios.yml",
46
- ]
47
-
48
- for file_path in config_files:
49
- if file_path.exists():
50
- try:
51
- logger.info(f"Loading scenarios from {file_path}")
52
- with open(file_path, 'r', encoding='utf-8') as f:
53
- if file_path.suffix == '.json':
54
- scenarios = json.load(f)
55
- else:
56
- scenarios = yaml.safe_load(f)
57
-
58
- # Validate scenarios
59
- validated = cls._validate_scenarios(scenarios)
60
- logger.info(f"Loaded {len(validated)} scenarios from {file_path}")
61
- return validated
62
- except Exception as e:
63
- logger.error(f"Failed to load scenarios from {file_path}: {e}")
64
-
65
- # Fallback to hardcoded scenarios
66
- logger.info("Loading hardcoded scenarios from demo module")
67
- from demo.scenarios import INCIDENT_SCENARIOS
68
- return INCIDENT_SCENARIOS
69
-
70
- @classmethod
71
- def get_scenario(cls, name: str) -> Dict[str, Any]:
72
- """Get scenario by name"""
73
- if cls._scenarios is None:
74
- cls._scenarios = cls.load_scenarios()
75
-
76
- scenario = cls._scenarios.get(name)
77
- if not scenario:
78
- raise ValueError(f"Scenario '{name}' not found")
79
-
80
- return scenario.copy() # Return copy to prevent mutation
81
-
82
- @classmethod
83
- def get_all_scenario_names(cls) -> List[str]:
84
- """Get all scenario names"""
85
- if cls._scenarios is None:
86
- cls._scenarios = cls.load_scenarios()
87
-
88
- return list(cls._scenarios.keys())
89
-
90
- @classmethod
91
- def get_scenario_metrics(cls, scenario_name: str) -> Dict[str, Any]:
92
- """Get metrics for a specific scenario"""
93
- scenario = cls.get_scenario(scenario_name)
94
- return scenario.get("metrics", {})
95
-
96
- @classmethod
97
- def get_scenario_business_impact(cls, scenario_name: str) -> Dict[str, Any]:
98
- """Get business impact for a specific scenario"""
99
- scenario = cls.get_scenario(scenario_name)
100
- return scenario.get("business_impact", {})
101
-
102
- @classmethod
103
- def get_scenario_roi_data(cls, scenario_name: str) -> Dict[str, Any]:
104
- """Get ROI data for a specific scenario"""
105
- scenario = cls.get_scenario(scenario_name)
106
- return scenario.get("roi_data", {})
107
-
108
- @classmethod
109
- def _validate_scenario(cls, scenario: Dict[str, Any]) -> bool:
110
- """Validate single scenario"""
111
- required_fields = ["description", "severity", "component", "metrics", "business_impact"]
112
-
113
- # Check required fields
114
- for field in required_fields:
115
- if field not in scenario:
116
- logger.error(f"Missing required field: {field}")
117
- return False
118
-
119
- # Validate severity
120
- valid_severities = ["LOW", "MEDIUM", "HIGH", "CRITICAL"]
121
- if scenario["severity"] not in valid_severities:
122
- logger.error(f"Invalid severity: {scenario['severity']}")
123
- return False
124
-
125
- # Validate metrics (at least one metric required)
126
- if not isinstance(scenario["metrics"], dict) or not scenario["metrics"]:
127
- logger.error("Metrics must be a non-empty dictionary")
128
- return False
129
-
130
- # Validate business impact
131
- if not isinstance(scenario["business_impact"], dict):
132
- logger.error("Business impact must be a dictionary")
133
- return False
134
-
135
- return True
136
-
137
- @classmethod
138
- def _validate_scenarios(cls, scenarios: Dict[str, Dict]) -> Dict[str, Dict]:
139
- """Validate all scenarios and return valid ones"""
140
- validated = {}
141
-
142
- for name, scenario in scenarios.items():
143
- if cls._validate_scenario(scenario):
144
- validated[name] = scenario
145
- else:
146
- logger.warning(f"Skipping invalid scenario: {name}")
147
-
148
- return validated
149
-
150
- @classmethod
151
- def reload_scenarios(cls) -> None:
152
- """Clear cache and reload scenarios"""
153
- cls.load_scenarios.cache_clear()
154
- cls._scenarios = None
155
- logger.info("Scenario cache cleared")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config/scenarios.json DELETED
@@ -1,57 +0,0 @@
1
- {
2
- "Cache Miss Storm": {
3
- "description": "Redis cluster experiencing 80% cache miss rate causing database overload",
4
- "severity": "CRITICAL",
5
- "component": "redis_cache",
6
- "metrics": {
7
- "cache_hit_rate": 18.5,
8
- "database_load": 92,
9
- "response_time_ms": 1850,
10
- "affected_users": 45000,
11
- "eviction_rate_per_sec": 125
12
- },
13
- "business_impact": {
14
- "revenue_loss_per_hour": 8500,
15
- "sla_violation": true,
16
- "customer_sat_change": -40,
17
- "affected_services": ["API Gateway", "User Service", "Payment"]
18
- },
19
- "roi_data": {
20
- "hourly_revenue_loss": 8500,
21
- "manual_recovery_hours": 1.0,
22
- "enterprise_recovery_hours": 0.2,
23
- "engineers_required": 4,
24
- "engineer_hourly_rate": 150,
25
- "estimated_monthly_occurrences": 2,
26
- "enterprise_savings_percentage": 0.85
27
- }
28
- },
29
- "Database Connection Pool Exhaustion": {
30
- "description": "PostgreSQL connection pool exhausted causing API timeouts",
31
- "severity": "HIGH",
32
- "component": "postgresql_database",
33
- "metrics": {
34
- "active_connections": 98,
35
- "max_connections": 100,
36
- "api_latency_ms": 2450,
37
- "error_rate": 15.2,
38
- "queue_depth": 1250,
39
- "connection_wait_seconds": 45
40
- },
41
- "business_impact": {
42
- "revenue_loss_per_hour": 4200,
43
- "affected_services": ["API Gateway", "User Service", "Payment Service"],
44
- "sla_violation": true,
45
- "partner_api_impact": 3
46
- },
47
- "roi_data": {
48
- "hourly_revenue_loss": 4200,
49
- "manual_recovery_hours": 0.75,
50
- "enterprise_recovery_hours": 0.13,
51
- "engineers_required": 2,
52
- "engineer_hourly_rate": 150,
53
- "estimated_monthly_occurrences": 3,
54
- "enterprise_savings_percentage": 0.82
55
- }
56
- }
57
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config/scenarios.yaml DELETED
@@ -1,150 +0,0 @@
1
- Cache Miss Storm:
2
- description: "Redis cluster experiencing 80% cache miss rate causing database overload"
3
- severity: "CRITICAL"
4
- component: "redis_cache"
5
- metrics:
6
- cache_hit_rate: 18.5
7
- database_load: 92
8
- response_time_ms: 1850
9
- affected_users: 45000
10
- eviction_rate_per_sec: 125
11
- business_impact:
12
- revenue_loss_per_hour: 8500
13
- sla_violation: true
14
- customer_sat_change: -40
15
- affected_services:
16
- - "API Gateway"
17
- - "User Service"
18
- - "Payment"
19
- roi_data:
20
- hourly_revenue_loss: 8500
21
- manual_recovery_hours: 1.0
22
- enterprise_recovery_hours: 0.2
23
- engineers_required: 4
24
- engineer_hourly_rate: 150
25
- estimated_monthly_occurrences: 2
26
- enterprise_savings_percentage: 0.85
27
-
28
- Database Connection Pool Exhaustion:
29
- description: "PostgreSQL connection pool exhausted causing API timeouts"
30
- severity: "HIGH"
31
- component: "postgresql_database"
32
- metrics:
33
- active_connections: 98
34
- max_connections: 100
35
- api_latency_ms: 2450
36
- error_rate: 15.2
37
- queue_depth: 1250
38
- connection_wait_seconds: 45
39
- business_impact:
40
- revenue_loss_per_hour: 4200
41
- affected_services:
42
- - "API Gateway"
43
- - "User Service"
44
- - "Payment Service"
45
- sla_violation: true
46
- partner_api_impact: 3
47
- roi_data:
48
- hourly_revenue_loss: 4200
49
- manual_recovery_hours: 0.75
50
- enterprise_recovery_hours: 0.13
51
- engineers_required: 2
52
- engineer_hourly_rate: 150
53
- estimated_monthly_occurrences: 3
54
- enterprise_savings_percentage: 0.82
55
-
56
- Kubernetes Memory Leak:
57
- description: "Java microservice memory leak causing pod restarts"
58
- severity: "HIGH"
59
- component: "java_payment_service"
60
- metrics:
61
- memory_usage: 96
62
- gc_pause_time_ms: 4500
63
- error_rate: 28.5
64
- restart_frequency_per_hour: 12
65
- heap_fragmentation: 42
66
- business_impact:
67
- revenue_loss_per_hour: 5500
68
- session_loss: 8500
69
- payment_failures_percentage: 3.2
70
- support_tickets_increase: 300
71
- roi_data:
72
- hourly_revenue_loss: 5500
73
- manual_recovery_hours: 1.5
74
- enterprise_recovery_hours: 0.25
75
- engineers_required: 3
76
- engineer_hourly_rate: 150
77
- estimated_monthly_occurrences: 1
78
- enterprise_savings_percentage: 0.79
79
-
80
- API Rate Limit Storm:
81
- description: "Third-party API rate limiting causing cascading failures"
82
- severity: "MEDIUM"
83
- component: "external_api_gateway"
84
- metrics:
85
- rate_limit_hits_percentage: 95
86
- error_rate: 42.8
87
- retry_storm: true
88
- cascade_effect_services: 3
89
- queue_backlog: 8500
90
- business_impact:
91
- revenue_loss_per_hour: 3800
92
- partner_sla_breach: true
93
- data_sync_delay_hours: 4
94
- customer_reports_delay_hours: 6
95
- roi_data:
96
- hourly_revenue_loss: 3800
97
- manual_recovery_hours: 1.25
98
- enterprise_recovery_hours: 0.17
99
- engineers_required: 3
100
- engineer_hourly_rate: 150
101
- estimated_monthly_occurrences: 4
102
- enterprise_savings_percentage: 0.85
103
-
104
- Network Partition:
105
- description: "Network partition causing split-brain in distributed database"
106
- severity: "CRITICAL"
107
- component: "distributed_database"
108
- metrics:
109
- partition_detected: true
110
- write_conflicts: 1250
111
- data_inconsistency_percentage: 8.5
112
- replication_lag_seconds: 45
113
- quorum_lost: true
114
- business_impact:
115
- revenue_loss_per_hour: 12000
116
- data_corruption_risk: true
117
- recovery_complexity: "HIGH"
118
- compliance_violation: true
119
- roi_data:
120
- hourly_revenue_loss: 12000
121
- manual_recovery_hours: 2.0
122
- enterprise_recovery_hours: 0.3
123
- engineers_required: 5
124
- engineer_hourly_rate: 150
125
- estimated_monthly_occurrences: 0.5
126
- enterprise_savings_percentage: 0.88
127
-
128
- Storage I/O Saturation:
129
- description: "Storage system I/O saturation causing application timeouts"
130
- severity: "HIGH"
131
- component: "storage_cluster"
132
- metrics:
133
- io_utilization: 98
134
- latency_ms: 450
135
- throughput_mbps: 1250
136
- queue_depth: 850
137
- error_rate: 8.5
138
- business_impact:
139
- revenue_loss_per_hour: 6800
140
- data_processing_delay_hours: 3
141
- analytics_backlog: true
142
- reporting_failure: true
143
- roi_data:
144
- hourly_revenue_loss: 6800
145
- manual_recovery_hours: 1.75
146
- enterprise_recovery_hours: 0.22
147
- engineers_required: 3
148
- engineer_hourly_rate: 150
149
- estimated_monthly_occurrences: 1.5
150
- enterprise_savings_percentage: 0.83
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config/settings.py DELETED
@@ -1,318 +0,0 @@
1
- """
2
- ARF Settings Configuration with Pydantic 2.x Compatibility
3
- Fixed for both OSS and Enterprise deployments - FIXED VERSION CONSISTENCY
4
- """
5
- import os
6
- import sys
7
- from enum import Enum
8
- from typing import Optional, Dict, Any, List, Union
9
- from datetime import datetime
10
-
11
- # Try to import pydantic with version detection
12
- try:
13
- from pydantic import Field, validator, model_validator
14
- from pydantic_settings import BaseSettings, SettingsConfigDict
15
- PYDANTIC_V2 = True
16
- except ImportError:
17
- # Fallback for pydantic v1
18
- from pydantic import Field, validator
19
- from pydantic_settings import BaseSettings
20
- PYDANTIC_V2 = False
21
-
22
- import logging
23
- logger = logging.getLogger(__name__)
24
-
25
- # Enums for ARF modes
26
- class ARFMode(str, Enum):
27
- """ARF Operational Modes"""
28
- DEMO = "demo"
29
- OSS = "oss"
30
- ENTERPRISE = "enterprise"
31
- HYBRID = "hybrid"
32
- SIMULATION = "simulation"
33
-
34
- class SafetyMode(str, Enum):
35
- """Safety and Compliance Modes"""
36
- STANDARD = "standard"
37
- STRICT = "strict"
38
- PERMISSIVE = "permissive"
39
- ENTERPRISE = "enterprise"
40
- COMPLIANCE = "compliance"
41
-
42
- class InstallationStatus(str, Enum):
43
- """Package Installation Status"""
44
- NOT_INSTALLED = "not_installed"
45
- OSS_ONLY = "oss_only"
46
- ENTERPRISE_ONLY = "enterprise_only"
47
- FULL_INSTALL = "full_install"
48
- DEMO_MODE = "demo_mode"
49
-
50
- # Pydantic 2.x compatible Settings class
51
- if PYDANTIC_V2:
52
- class Settings(BaseSettings):
53
- """ARF Settings with Pydantic 2.x compatibility - FIXED: Version consistency"""
54
- model_config = SettingsConfigDict(
55
- env_file=".env",
56
- env_file_encoding="utf-8",
57
- env_prefix="ARF_",
58
- case_sensitive=False,
59
- extra="ignore"
60
- )
61
-
62
- # Core Settings
63
- arf_mode: ARFMode = Field(default=ARFMode.DEMO, description="ARF operational mode")
64
- safety_mode: SafetyMode = Field(default=SafetyMode.STANDARD, description="Safety compliance mode")
65
- debug: bool = Field(default=False, description="Enable debug logging")
66
- log_level: str = Field(default="INFO", description="Logging level")
67
-
68
- # Version Info - FIXED: Updated to v3.3.9 (actual PyPI version)
69
- arf_version: str = Field(default="3.3.9", description="ARF version")
70
- demo_version: str = Field(default="3.8.0", description="Demo app version")
71
-
72
- # Installation Status
73
- oss_installed: bool = Field(default=False, description="ARF OSS package installed")
74
- enterprise_installed: bool = Field(default=False, description="Enterprise package installed")
75
- installation_status: InstallationStatus = Field(
76
- default=InstallationStatus.DEMO_MODE,
77
- description="Overall installation status"
78
- )
79
-
80
- # Feature Flags
81
- enable_telemetry: bool = Field(default=True, description="Enable telemetry collection")
82
- enable_anomaly_detection: bool = Field(default=True, description="Enable anomaly detection")
83
- enable_business_metrics: bool = Field(default=True, description="Enable business metrics")
84
- enable_audit_trail: bool = Field(default=True, description="Enable audit trail")
85
-
86
- # Paths
87
- data_dir: str = Field(default="./data", description="Data directory")
88
- log_dir: str = Field(default="./logs", description="Log directory")
89
- cache_dir: str = Field(default="./cache", description="Cache directory")
90
- scenario_config_path: str = Field(default="./config/scenarios", description="Path to scenario configuration files")
91
-
92
- # API Settings
93
- api_timeout: int = Field(default=30, description="API timeout in seconds")
94
- max_retries: int = Field(default=3, description="Maximum API retries")
95
- rate_limit: int = Field(default=100, description="API rate limit per minute")
96
-
97
- # UI Settings
98
- ui_theme: str = Field(default="dark", description="UI theme")
99
- refresh_interval: int = Field(default=5, description="UI refresh interval in seconds")
100
-
101
- # Business Settings
102
- default_currency: str = Field(default="USD", description="Default currency")
103
- cost_per_incident: float = Field(default=10000.0, description="Default cost per incident")
104
- engineer_hourly_rate: float = Field(default=150.0, description="Engineer hourly rate")
105
- engineer_annual_cost: float = Field(default=200000.0, description="Engineer annual cost")
106
-
107
- @model_validator(mode='before')
108
- @classmethod
109
- def validate_enums(cls, data: Any) -> Any:
110
- """Validate and convert enum fields from strings"""
111
- if isinstance(data, dict):
112
- # Convert string values to enums if needed
113
- if 'arf_mode' in data and isinstance(data['arf_mode'], str):
114
- try:
115
- data['arf_mode'] = ARFMode(data['arf_mode'].lower())
116
- except ValueError:
117
- data['arf_mode'] = ARFMode.DEMO
118
-
119
- if 'safety_mode' in data and isinstance(data['safety_mode'], str):
120
- try:
121
- data['safety_mode'] = SafetyMode(data['safety_mode'].lower())
122
- except ValueError:
123
- data['safety_mode'] = SafetyMode.STANDARD
124
-
125
- if 'installation_status' in data and isinstance(data['installation_status'], str):
126
- try:
127
- data['installation_status'] = InstallationStatus(data['installation_status'].lower())
128
- except ValueError:
129
- data['installation_status'] = InstallationStatus.DEMO_MODE
130
- return data
131
-
132
- @model_validator(mode='after')
133
- def check_installation_status(self):
134
- """Update installation status based on detected packages"""
135
- # Check if OSS is installed
136
- try:
137
- import agentic_reliability_framework
138
- self.oss_installed = True
139
- except ImportError:
140
- self.oss_installed = False
141
-
142
- # Check if Enterprise is installed
143
- try:
144
- import agentic_reliability_enterprise
145
- self.enterprise_installed = True
146
- except ImportError:
147
- self.enterprise_installed = False
148
-
149
- # Update installation status
150
- if self.oss_installed and self.enterprise_installed:
151
- self.installation_status = InstallationStatus.FULL_INSTALL
152
- elif self.oss_installed:
153
- self.installation_status = InstallationStatus.OSS_ONLY
154
- elif self.enterprise_installed:
155
- self.installation_status = InstallationStatus.ENTERPRISE_ONLY
156
- else:
157
- self.installation_status = InstallationStatus.DEMO_MODE
158
-
159
- return self
160
-
161
- else:
162
- # Pydantic v1 compatible Settings class
163
- class Settings(BaseSettings):
164
- """ARF Settings with Pydantic v1 compatibility - FIXED: Version consistency"""
165
- class Config:
166
- env_file = ".env"
167
- env_file_encoding = "utf-8"
168
- env_prefix = "ARF_"
169
- case_sensitive = False
170
-
171
- # Core Settings
172
- arf_mode: ARFMode = Field(default=ARFMode.DEMO, description="ARF operational mode")
173
- safety_mode: SafetyMode = Field(default=SafetyMode.STANDARD, description="Safety compliance mode")
174
- debug: bool = Field(default=False, description="Enable debug logging")
175
- log_level: str = Field(default="INFO", description="Logging level")
176
-
177
- # Version Info - FIXED: Updated to v3.3.9 (actual PyPI version)
178
- arf_version: str = Field(default="3.3.9", description="ARF version")
179
- demo_version: str = Field(default="3.8.0", description="Demo app version")
180
-
181
- # Installation Status
182
- oss_installed: bool = Field(default=False, description="ARF OSS package installed")
183
- enterprise_installed: bool = Field(default=False, description="Enterprise package installed")
184
- installation_status: InstallationStatus = Field(
185
- default=InstallationStatus.DEMO_MODE,
186
- description="Overall installation status"
187
- )
188
-
189
- # Feature Flags
190
- enable_telemetry: bool = Field(default=True, description="Enable telemetry collection")
191
- enable_anomaly_detection: bool = Field(default=True, description="Enable anomaly detection")
192
- enable_business_metrics: bool = Field(default=True, description="Enable business metrics")
193
- enable_audit_trail: bool = Field(default=True, description="Enable audit trail")
194
-
195
- # Paths
196
- data_dir: str = Field(default="./data", description="Data directory")
197
- log_dir: str = Field(default="./logs", description="Log directory")
198
- cache_dir: str = Field(default="./cache", description="Cache directory")
199
- scenario_config_path: str = Field(default="./config/scenarios", description="Path to scenario configuration files")
200
-
201
- # API Settings
202
- api_timeout: int = Field(default=30, description="API timeout in seconds")
203
- max_retries: int = Field(default=3, description="Maximum API retries")
204
- rate_limit: int = Field(default=100, description="API rate limit per minute")
205
-
206
- # UI Settings
207
- ui_theme: str = Field(default="dark", description="UI theme")
208
- refresh_interval: int = Field(default=5, description="UI refresh interval in seconds")
209
-
210
- # Business Settings
211
- default_currency: str = Field(default="USD", description="Default currency")
212
- cost_per_incident: float = Field(default=10000.0, description="Default cost per incident")
213
- engineer_hourly_rate: float = Field(default=150.0, description="Engineer hourly rate")
214
- engineer_annual_cost: float = Field(default=200000.0, description="Engineer annual cost")
215
-
216
- @validator('arf_mode', 'safety_mode', 'installation_status', pre=True)
217
- def validate_enum_strings(cls, v, field):
218
- """Convert string values to enums"""
219
- if isinstance(v, str):
220
- try:
221
- if field.name == 'arf_mode':
222
- return ARFMode(v.lower())
223
- elif field.name == 'safety_mode':
224
- return SafetyMode(v.lower())
225
- elif field.name == 'installation_status':
226
- return InstallationStatus(v.lower())
227
- except ValueError:
228
- # Return default value
229
- if field.name == 'arf_mode':
230
- return ARFMode.DEMO
231
- elif field.name == 'safety_mode':
232
- return SafetyMode.STANDARD
233
- elif field.name == 'installation_status':
234
- return InstallationStatus.DEMO_MODE
235
- return v
236
-
237
- @validator('installation_status', always=True)
238
- def update_installation_status(cls, v, values):
239
- """Update installation status based on detected packages"""
240
- # Check if OSS is installed
241
- try:
242
- import agentic_reliability_framework
243
- values['oss_installed'] = True
244
- except ImportError:
245
- values['oss_installed'] = False
246
-
247
- # Check if Enterprise is installed
248
- try:
249
- import agentic_reliability_enterprise
250
- values['enterprise_installed'] = True
251
- except ImportError:
252
- values['enterprise_installed'] = False
253
-
254
- # Determine installation status
255
- if values.get('oss_installed') and values.get('enterprise_installed'):
256
- return InstallationStatus.FULL_INSTALL
257
- elif values.get('oss_installed'):
258
- return InstallationStatus.OSS_ONLY
259
- elif values.get('enterprise_installed'):
260
- return InstallationStatus.ENTERPRISE_ONLY
261
- else:
262
- return InstallationStatus.DEMO_MODE
263
-
264
- # Singleton settings instance
265
- try:
266
- settings = Settings()
267
- logger.info("✅ Settings initialized successfully")
268
- logger.info(f"📊 ARF Mode: {settings.arf_mode}")
269
- logger.info(f"🛡️ Safety Mode: {settings.safety_mode}")
270
- logger.info(f"📦 Installation Status: {settings.installation_status}")
271
- logger.info(f"🔖 ARF Version: {settings.arf_version}") # Added version logging
272
- except Exception as e:
273
- logger.error(f"❌ Failed to initialize settings: {e}")
274
- # Create fallback settings - FIXED: Updated version to 3.3.9
275
- class FallbackSettings:
276
- arf_mode = ARFMode.DEMO
277
- safety_mode = SafetyMode.STANDARD
278
- debug = False
279
- log_level = "INFO"
280
- arf_version = "3.3.9" # FIXED: Updated from 3.3.7 to 3.3.9
281
- demo_version = "3.8.0"
282
- oss_installed = False
283
- enterprise_installed = False
284
- installation_status = InstallationStatus.DEMO_MODE
285
- enable_telemetry = True
286
- enable_anomaly_detection = True
287
- enable_business_metrics = True
288
- enable_audit_trail = True
289
- data_dir = "./data"
290
- log_dir = "./logs"
291
- cache_dir = "./cache"
292
- scenario_config_path = "./config/scenarios"
293
- api_timeout = 30
294
- max_retries = 3
295
- rate_limit = 100
296
- ui_theme = "dark"
297
- refresh_interval = 5
298
- default_currency = "USD"
299
- cost_per_incident = 10000.0
300
- engineer_hourly_rate = 150.0
301
- engineer_annual_cost = 200000.0
302
-
303
- settings = FallbackSettings()
304
- logger.warning("⚠️ Using fallback settings due to initialization error")
305
-
306
- def get_settings() -> Settings:
307
- """Get the settings singleton instance"""
308
- return settings
309
-
310
- # Export everything
311
- __all__ = [
312
- "Settings",
313
- "get_settings",
314
- "ARFMode",
315
- "SafetyMode",
316
- "InstallationStatus",
317
- "settings"
318
- ]