danicor commited on
Commit
9a84bab
·
verified ·
1 Parent(s): e6e5043

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +627 -0
app.py ADDED
@@ -0,0 +1,627 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ import asyncio
4
+ import uuid
5
+ import hashlib
6
+ import threading
7
+ from datetime import datetime
8
+ from typing import Dict, List, Optional
9
+ from collections import deque
10
+ from fastapi import FastAPI, HTTPException, Request
11
+ from fastapi.middleware.cors import CORSMiddleware
12
+ from pydantic import BaseModel
13
+ import requests
14
+
15
+ # Configuration
16
+ ORCHESTRATOR_VERSION = "2.1.0"
17
+ WORKER_HEALTH_CHECK_INTERVAL = 30
18
+ WORKER_TIMEOUT = 120
19
+ CACHE_EXPIRY = 3600 # 1 hour
20
+
21
+ # Cache System
22
+ class TranslationCache:
23
+ def __init__(self):
24
+ self.cache: Dict[str, Dict] = {}
25
+ self.lock = threading.Lock()
26
+
27
+ def _generate_key(self, text: str, source_lang: str, target_lang: str) -> str:
28
+ content = f"{text}:{source_lang}:{target_lang}"
29
+ return hashlib.md5(content.encode()).hexdigest()
30
+
31
+ def get(self, text: str, source_lang: str, target_lang: str) -> Optional[Dict]:
32
+ key = self._generate_key(text, source_lang, target_lang)
33
+ with self.lock:
34
+ if key in self.cache:
35
+ entry = self.cache[key]
36
+ if time.time() - entry['timestamp'] < CACHE_EXPIRY:
37
+ print(f"✅ Cache HIT for key: {key[:8]}...")
38
+ return entry['data']
39
+ else:
40
+ del self.cache[key]
41
+ print(f"⏰ Cache EXPIRED for key: {key[:8]}...")
42
+ return None
43
+
44
+ def set(self, text: str, source_lang: str, target_lang: str, data: Dict):
45
+ key = self._generate_key(text, source_lang, target_lang)
46
+ with self.lock:
47
+ self.cache[key] = {
48
+ 'data': data,
49
+ 'timestamp': time.time()
50
+ }
51
+ print(f"💾 Cache SAVED for key: {key[:8]}... (Total: {len(self.cache)})")
52
+
53
+ def clear_expired(self):
54
+ current_time = time.time()
55
+ with self.lock:
56
+ expired_keys = [
57
+ key for key, entry in self.cache.items()
58
+ if current_time - entry['timestamp'] >= CACHE_EXPIRY
59
+ ]
60
+ for key in expired_keys:
61
+ del self.cache[key]
62
+ if expired_keys:
63
+ print(f"🧹 Cleared {len(expired_keys)} expired cache entries")
64
+
65
+ def get_stats(self) -> Dict:
66
+ with self.lock:
67
+ return {
68
+ 'cache_size': len(self.cache),
69
+ 'total_entries': len(self.cache)
70
+ }
71
+
72
+ # Models
73
+ class TranslationRequest(BaseModel):
74
+ request_id: str
75
+ text: str
76
+ source_lang: str
77
+ target_lang: str
78
+ auto_charge: bool = False
79
+ notification_url: Optional[str] = None
80
+ wordpress_user_id: Optional[int] = None
81
+
82
+ class CacheCheckRequest(BaseModel):
83
+ text: str
84
+ source_lang: str
85
+ target_lang: str
86
+
87
+ class WorkerConfig(BaseModel):
88
+ url: str
89
+ name: str
90
+ priority: int = 1
91
+ max_concurrent: int = 3
92
+
93
+ class WorkerStatus:
94
+ def __init__(self, config: WorkerConfig):
95
+ self.config = config
96
+ self.available = False
97
+ self.active_jobs = 0
98
+ self.last_health_check = 0
99
+ self.total_completed = 0
100
+ self.total_failed = 0
101
+ self.avg_response_time = 0.0
102
+
103
+ class TranslationOrchestrator:
104
+ def __init__(self):
105
+ self.workers: Dict[str, WorkerStatus] = {}
106
+ self.job_queue = deque()
107
+ self.active_jobs: Dict[str, Dict] = {}
108
+ self.completed_jobs: Dict[str, Dict] = {}
109
+ self.worker_lock = threading.Lock()
110
+ self.job_lock = threading.Lock()
111
+ self.cache = TranslationCache()
112
+
113
+ self.load_worker_configs()
114
+ self.start_health_checker()
115
+ self.start_job_processor()
116
+ self.start_cache_cleaner()
117
+
118
+ def load_worker_configs(self):
119
+ worker_index = 1
120
+ workers_loaded = 0
121
+
122
+ print("🔧 Loading worker configurations...")
123
+
124
+ while True:
125
+ url_key = f"WORKER_{worker_index}_URL"
126
+ name_key = f"WORKER_{worker_index}_NAME"
127
+
128
+ url = os.getenv(url_key)
129
+ if not url:
130
+ break
131
+
132
+ name = os.getenv(name_key, f"Worker {worker_index}")
133
+ priority = int(os.getenv(f"WORKER_{worker_index}_PRIORITY", "1"))
134
+ max_concurrent = int(os.getenv(f"WORKER_{worker_index}_MAX_CONCURRENT", "1"))
135
+
136
+ worker_id = f"worker_{worker_index}"
137
+ config = WorkerConfig(
138
+ url=url.rstrip('/'),
139
+ name=name,
140
+ priority=priority,
141
+ max_concurrent=max_concurrent
142
+ )
143
+
144
+ self.workers[worker_id] = WorkerStatus(config)
145
+ workers_loaded += 1
146
+ print(f"✅ Loaded worker: {name} at {url}")
147
+ worker_index += 1
148
+
149
+ if workers_loaded == 0:
150
+ print("⚠️ No workers in env, using hardcoded fallback")
151
+ fallback_workers = [
152
+ WorkerConfig(url="https://danicor-w1.hf.space", name="Worker 1", priority=1, max_concurrent=1),
153
+ WorkerConfig(url="https://danicor-w2.hf.space", name="Worker 2", priority=1, max_concurrent=1),
154
+ WorkerConfig(url="https://danicor-w3.hf.space", name="Worker 3", priority=1, max_concurrent=1),
155
+ ]
156
+ for i, cfg in enumerate(fallback_workers, start=1):
157
+ self.workers[f"worker_{i}"] = WorkerStatus(cfg)
158
+ print(f"✅ Hardcoded worker: {cfg.name} at {cfg.url}")
159
+
160
+ def start_cache_cleaner(self):
161
+ def cache_clean_loop():
162
+ while True:
163
+ time.sleep(300) # Every 5 minutes
164
+ self.cache.clear_expired()
165
+
166
+ thread = threading.Thread(target=cache_clean_loop, daemon=True)
167
+ thread.start()
168
+ print("🧹 Cache cleaner started")
169
+
170
+ def start_health_checker(self):
171
+ def health_check_loop():
172
+ while True:
173
+ self.check_all_workers_health()
174
+ time.sleep(WORKER_HEALTH_CHECK_INTERVAL)
175
+
176
+ thread = threading.Thread(target=health_check_loop, daemon=True)
177
+ thread.start()
178
+ print("🏥 Health checker started")
179
+
180
+ def check_all_workers_health(self):
181
+ with self.worker_lock:
182
+ for worker_id, worker in self.workers.items():
183
+ try:
184
+ health_url = f"{worker.config.url}/api/health"
185
+ response = requests.get(health_url, timeout=10)
186
+
187
+ if response.status_code == 200:
188
+ data = response.json()
189
+ was_available = worker.available
190
+ worker.available = data.get('status') == 'healthy'
191
+ worker.last_health_check = time.time()
192
+
193
+ if worker.available and not was_available:
194
+ print(f"✅ {worker.config.name} is now available")
195
+ elif not worker.available and was_available:
196
+ print(f"❌ {worker.config.name} became unavailable")
197
+
198
+ else:
199
+ worker.available = False
200
+ print(f"⚠ {worker.config.name}: HTTP {response.status_code}")
201
+
202
+ except Exception as e:
203
+ worker.available = False
204
+
205
+ def get_available_worker(self) -> Optional[str]:
206
+ with self.worker_lock:
207
+ available_workers = [
208
+ (worker_id, worker)
209
+ for worker_id, worker in self.workers.items()
210
+ if worker.available and worker.active_jobs < worker.config.max_concurrent
211
+ ]
212
+
213
+ if not available_workers:
214
+ return None
215
+
216
+ available_workers.sort(
217
+ key=lambda x: (-x[1].config.priority, x[1].active_jobs)
218
+ )
219
+
220
+ return available_workers[0][0]
221
+
222
+ def start_job_processor(self):
223
+ def process_queue_loop():
224
+ while True:
225
+ self.process_job_queue()
226
+ time.sleep(2)
227
+
228
+ thread = threading.Thread(target=process_queue_loop, daemon=True)
229
+ thread.start()
230
+ print("🔄 Job processor started")
231
+
232
+ def add_job_to_queue(self, job_data: Dict):
233
+ with self.job_lock:
234
+ self.job_queue.append(job_data)
235
+ print(f"📥 Job {job_data['request_id']} queued. Queue size: {len(self.job_queue)}")
236
+
237
+ def process_job_queue(self):
238
+ if not self.job_queue:
239
+ return
240
+
241
+ with self.job_lock:
242
+ if not self.job_queue:
243
+ return
244
+
245
+ worker_id = self.get_available_worker()
246
+ if not worker_id:
247
+ return
248
+
249
+ job_data = self.job_queue.popleft()
250
+
251
+ self.assign_job_to_worker(worker_id, job_data)
252
+
253
+ def assign_job_to_worker(self, worker_id: str, job_data: Dict):
254
+ worker = self.workers[worker_id]
255
+ request_id = job_data['request_id']
256
+
257
+ print(f"🚀 Assigning job {request_id} to {worker.config.name}")
258
+
259
+ with self.worker_lock:
260
+ worker.active_jobs += 1
261
+
262
+ self.active_jobs[request_id] = {
263
+ 'worker_id': worker_id,
264
+ 'job_data': job_data,
265
+ 'start_time': time.time(),
266
+ 'status': 'processing'
267
+ }
268
+
269
+ thread = threading.Thread(
270
+ target=self.send_to_worker,
271
+ args=(worker_id, job_data),
272
+ daemon=True
273
+ )
274
+ thread.start()
275
+
276
+ def send_to_worker(self, worker_id: str, job_data: Dict):
277
+ worker = self.workers[worker_id]
278
+ request_id = job_data['request_id']
279
+
280
+ try:
281
+ worker_request = {
282
+ 'request_id': request_id,
283
+ 'text': job_data['text'],
284
+ 'source_lang': job_data['source_lang'],
285
+ 'target_lang': job_data['target_lang'],
286
+ 'auto_charge': False,
287
+ 'notification_url': None
288
+ }
289
+
290
+ response = requests.post(
291
+ f"{worker.config.url}/api/translate/heavy",
292
+ json=worker_request,
293
+ timeout=WORKER_TIMEOUT
294
+ )
295
+
296
+ if response.status_code == 200:
297
+ print(f"✅ Job {request_id} sent to {worker.config.name}")
298
+ self.monitor_worker_job(worker_id, request_id)
299
+ else:
300
+ self.handle_worker_failure(worker_id, request_id, f"HTTP {response.status_code}")
301
+
302
+ except Exception as e:
303
+ self.handle_worker_failure(worker_id, request_id, str(e))
304
+
305
+ def monitor_worker_job(self, worker_id: str, request_id: str):
306
+ worker = self.workers[worker_id]
307
+ max_checks = 60
308
+ check_count = 0
309
+
310
+ while check_count < max_checks:
311
+ time.sleep(5)
312
+ check_count += 1
313
+
314
+ try:
315
+ response = requests.post(
316
+ f"{worker.config.url}/api/check-translation-status",
317
+ json={'request_id': request_id},
318
+ timeout=15
319
+ )
320
+
321
+ if response.status_code == 200:
322
+ data = response.json()
323
+
324
+ if data.get('success') and data.get('status') == 'completed':
325
+ self.handle_job_completion(worker_id, request_id, data)
326
+ return
327
+ elif data.get('status') == 'failed':
328
+ self.handle_worker_failure(worker_id, request_id, "Worker reported failure")
329
+ return
330
+
331
+ except Exception as e:
332
+ print(f"⚠ Error checking job {request_id}: {str(e)}")
333
+
334
+ self.handle_worker_failure(worker_id, request_id, "Timeout waiting for completion")
335
+
336
+ def handle_job_completion(self, worker_id: str, request_id: str, worker_response: Dict):
337
+ worker = self.workers[worker_id]
338
+
339
+ print(f"🎉 Job {request_id} completed by {worker.config.name}")
340
+
341
+ with self.worker_lock:
342
+ worker.active_jobs -= 1
343
+ worker.total_completed += 1
344
+
345
+ job_info = self.active_jobs.get(request_id, {})
346
+ job_data = job_info.get('job_data', {})
347
+
348
+ processing_time = time.time() - job_info.get('start_time', time.time())
349
+
350
+ completion_data = {
351
+ 'request_id': request_id,
352
+ 'status': 'completed',
353
+ 'translated_text': worker_response.get('translated_text'),
354
+ 'processing_time': processing_time,
355
+ 'character_count': worker_response.get('character_count', len(job_data.get('text', ''))),
356
+ 'translation_length': worker_response.get('translation_length', 0),
357
+ 'from_cache': False, # از worker آمده، نه از کش orchestrator
358
+ 'worker_name': worker.config.name,
359
+ 'completed_at': datetime.now().isoformat()
360
+ }
361
+
362
+ self.completed_jobs[request_id] = completion_data
363
+
364
+ # ذخیره در کش orchestrator
365
+ self.cache.set(
366
+ job_data.get('text', ''),
367
+ job_data.get('source_lang', ''),
368
+ job_data.get('target_lang', ''),
369
+ completion_data
370
+ )
371
+
372
+ if request_id in self.active_jobs:
373
+ del self.active_jobs[request_id]
374
+
375
+ def handle_worker_failure(self, worker_id: str, request_id: str, error_message: str):
376
+ worker = self.workers[worker_id]
377
+
378
+ print(f"💥 Job {request_id} failed on {worker.config.name}: {error_message}")
379
+
380
+ with self.worker_lock:
381
+ worker.active_jobs -= 1
382
+ worker.total_failed += 1
383
+
384
+ job_info = self.active_jobs.get(request_id, {})
385
+ job_data = job_info.get('job_data', {})
386
+
387
+ if job_info.get('retry_count', 0) < 2:
388
+ print(f"🔄 Retrying job {request_id} (attempt {job_info.get('retry_count', 0) + 1})")
389
+
390
+ job_data['retry_count'] = job_info.get('retry_count', 0) + 1
391
+ self.add_job_to_queue(job_data)
392
+
393
+ if request_id in self.active_jobs:
394
+ del self.active_jobs[request_id]
395
+ else:
396
+ failure_data = {
397
+ 'request_id': request_id,
398
+ 'status': 'failed',
399
+ 'error_message': error_message,
400
+ 'failed_at': datetime.now().isoformat()
401
+ }
402
+
403
+ self.completed_jobs[request_id] = failure_data
404
+
405
+ if request_id in self.active_jobs:
406
+ del self.active_jobs[request_id]
407
+
408
+ # Initialize FastAPI app
409
+ app = FastAPI(
410
+ title="Translation Orchestrator with Cache",
411
+ description="Main orchestrator with integrated caching system",
412
+ version=ORCHESTRATOR_VERSION
413
+ )
414
+
415
+ app.add_middleware(
416
+ CORSMiddleware,
417
+ allow_origins=["*"],
418
+ allow_credentials=True,
419
+ allow_methods=["*"],
420
+ allow_headers=["*"],
421
+ )
422
+
423
+ orchestrator = TranslationOrchestrator()
424
+
425
+ @app.get("/")
426
+ async def root():
427
+ available_workers = sum(1 for w in orchestrator.workers.values() if w.available)
428
+ total_workers = len(orchestrator.workers)
429
+ cache_stats = orchestrator.cache.get_stats()
430
+
431
+ return {
432
+ "service": "Translation Orchestrator with Cache",
433
+ "version": ORCHESTRATOR_VERSION,
434
+ "status": "running",
435
+ "workers": {
436
+ "total": total_workers,
437
+ "available": available_workers,
438
+ "unavailable": total_workers - available_workers
439
+ },
440
+ "queue": {
441
+ "active_jobs": len(orchestrator.active_jobs),
442
+ "queued_jobs": len(orchestrator.job_queue),
443
+ "completed_jobs": len(orchestrator.completed_jobs)
444
+ },
445
+ "cache": cache_stats
446
+ }
447
+
448
+ @app.get("/api/health")
449
+ async def health_check():
450
+ available_workers = sum(1 for w in orchestrator.workers.values() if w.available)
451
+
452
+ return {
453
+ "status": "healthy" if available_workers > 0 else "degraded",
454
+ "timestamp": datetime.now().isoformat(),
455
+ "workers": {
456
+ "total": len(orchestrator.workers),
457
+ "available": available_workers
458
+ },
459
+ "queue_stats": {
460
+ "active": len(orchestrator.active_jobs),
461
+ "queued": len(orchestrator.job_queue)
462
+ },
463
+ "cache_stats": orchestrator.cache.get_stats()
464
+ }
465
+
466
+ @app.post("/api/cache/check")
467
+ async def check_cache(request: CacheCheckRequest):
468
+ """بررسی وجود ترجمه در کش"""
469
+ cached_data = orchestrator.cache.get(
470
+ request.text,
471
+ request.source_lang,
472
+ request.target_lang
473
+ )
474
+
475
+ if cached_data:
476
+ return {
477
+ "success": True,
478
+ "cached": True,
479
+ "data": cached_data
480
+ }
481
+ else:
482
+ return {
483
+ "success": True,
484
+ "cached": False,
485
+ "message": "Translation not in cache"
486
+ }
487
+
488
+ @app.post("/api/translate")
489
+ async def submit_translation(request: TranslationRequest):
490
+ # ابتدا کش را بررسی کن
491
+ cached_data = orchestrator.cache.get(
492
+ request.text,
493
+ request.source_lang,
494
+ request.target_lang
495
+ )
496
+
497
+ if cached_data:
498
+ print(f"📦 Returning cached translation for {request.request_id}")
499
+ return {
500
+ "success": True,
501
+ **cached_data,
502
+ "from_cache": True
503
+ }
504
+
505
+ # اگر در کش نبود، به صف اضافه کن
506
+ if not any(w.available for w in orchestrator.workers.values()):
507
+ raise HTTPException(
508
+ status_code=503,
509
+ detail="No translation workers available. Please try again later."
510
+ )
511
+
512
+ job_data = {
513
+ 'request_id': request.request_id,
514
+ 'text': request.text,
515
+ 'source_lang': request.source_lang,
516
+ 'target_lang': request.target_lang,
517
+ 'auto_charge': request.auto_charge,
518
+ 'notification_url': request.notification_url,
519
+ 'wordpress_user_id': request.wordpress_user_id,
520
+ 'retry_count': 0
521
+ }
522
+
523
+ orchestrator.add_job_to_queue(job_data)
524
+
525
+ return {
526
+ "success": True,
527
+ "request_id": request.request_id,
528
+ "status": "queued",
529
+ "message": "Translation request queued successfully",
530
+ "queue_position": len(orchestrator.job_queue),
531
+ "from_cache": False
532
+ }
533
+
534
+ @app.get("/api/status/{request_id}")
535
+ async def check_status(request_id: str):
536
+ if request_id in orchestrator.completed_jobs:
537
+ return {
538
+ "success": True,
539
+ **orchestrator.completed_jobs[request_id]
540
+ }
541
+
542
+ if request_id in orchestrator.active_jobs:
543
+ job_info = orchestrator.active_jobs[request_id]
544
+ elapsed_time = time.time() - job_info['start_time']
545
+
546
+ return {
547
+ "success": True,
548
+ "request_id": request_id,
549
+ "status": "processing",
550
+ "worker_id": job_info['worker_id'],
551
+ "elapsed_time": elapsed_time,
552
+ "message": "Translation in progress"
553
+ }
554
+
555
+ for job in orchestrator.job_queue:
556
+ if job['request_id'] == request_id:
557
+ return {
558
+ "success": True,
559
+ "request_id": request_id,
560
+ "status": "queued",
561
+ "message": "Translation request is queued"
562
+ }
563
+
564
+ return {
565
+ "success": False,
566
+ "request_id": request_id,
567
+ "status": "not_found",
568
+ "message": "Translation request not found"
569
+ }
570
+
571
+ @app.get("/api/cache/stats")
572
+ async def get_cache_stats():
573
+ """دریافت آمار کش"""
574
+ return {
575
+ "success": True,
576
+ "cache_stats": orchestrator.cache.get_stats()
577
+ }
578
+
579
+ @app.post("/api/cache/clear")
580
+ async def clear_cache():
581
+ """پاک کردن کش (فقط برای مدیران)"""
582
+ with orchestrator.cache.lock:
583
+ orchestrator.cache.cache.clear()
584
+
585
+ return {
586
+ "success": True,
587
+ "message": "Cache cleared successfully"
588
+ }
589
+
590
+ @app.get("/api/workers")
591
+ async def list_workers():
592
+ workers_info = []
593
+
594
+ for worker_id, worker in orchestrator.workers.items():
595
+ workers_info.append({
596
+ "id": worker_id,
597
+ "name": worker.config.name,
598
+ "url": worker.config.url,
599
+ "available": worker.available,
600
+ "active_jobs": worker.active_jobs,
601
+ "max_concurrent": worker.config.max_concurrent,
602
+ "priority": worker.config.priority,
603
+ "total_completed": worker.total_completed,
604
+ "total_failed": worker.total_failed,
605
+ "last_health_check": worker.last_health_check
606
+ })
607
+
608
+ return {
609
+ "success": True,
610
+ "workers": workers_info
611
+ }
612
+
613
+ if __name__ == "__main__":
614
+ import uvicorn
615
+
616
+ port = int(os.getenv("PORT", 7860))
617
+
618
+ print(f"🚀 Translation Orchestrator v{ORCHESTRATOR_VERSION}")
619
+ print(f"📡 Starting on port {port}")
620
+ print(f"💾 Cache system enabled")
621
+
622
+ uvicorn.run(
623
+ app,
624
+ host="0.0.0.0",
625
+ port=port,
626
+ log_level="info"
627
+ )