Prathamesh Sarjerao Vaidya commited on
Commit
0e9dd68
Β·
1 Parent(s): 8770a8c

removed redis feature

Browse files
backend/app/api/routes.py CHANGED
@@ -1,5 +1,5 @@
1
  """
2
- API Routes - With Optional Redis/Celery Support
3
  """
4
 
5
  from fastapi import APIRouter, File, UploadFile, HTTPException, WebSocket, WebSocketDisconnect
@@ -23,7 +23,6 @@ from app.services.cleanup_service import cleanup_service
23
 
24
  logger = logging.getLogger(__name__)
25
 
26
- # Create router
27
  router = APIRouter()
28
 
29
  @router.get("/health", response_model=HealthResponse)
@@ -78,7 +77,7 @@ async def upload_video(file: UploadFile = File(...)):
78
 
79
  @router.post("/api/analyze/{session_id}")
80
  async def analyze_video(session_id: str):
81
- """Start video analysis (uses Celery if enabled, otherwise direct processing)"""
82
  try:
83
  session = session_manager.get_session(session_id)
84
  if not session:
@@ -90,43 +89,23 @@ async def analyze_video(session_id: str):
90
  detail=f"Invalid session status: {session['status']}"
91
  )
92
 
93
- # Check if Redis/Celery is enabled
94
- if Config.USE_REDIS:
95
- # Use Celery for async processing
96
- from app.services.processing_service import processing_service
97
-
98
- task_info = processing_service.start_processing(session_id)
99
-
100
- return {
101
- "success": True,
102
- "message": "Analysis queued",
103
- "session_id": session_id,
104
- "task_id": task_info["task_id"],
105
- "poll_url": f"/api/task/{task_info['task_id']}",
106
- "mode": "celery"
107
- }
108
- else:
109
- # Direct processing without Celery
110
- logger.info(f"⚑ Direct processing mode (Redis disabled)")
111
-
112
- # Update status
113
- session_manager.update_session(session_id, {
114
- "status": "processing",
115
- "start_time": datetime.now().isoformat()
116
- })
117
 
118
- # Start async processing without Celery
119
- asyncio.create_task(
120
- process_video_direct(session_id)
121
- )
122
 
123
- return {
124
- "success": True,
125
- "message": "Analysis started",
126
- "session_id": session_id,
127
- "websocket_url": f"/ws/{session_id}",
128
- "mode": "direct"
129
- }
130
 
131
  except HTTPException:
132
  raise
@@ -136,7 +115,7 @@ async def analyze_video(session_id: str):
136
 
137
 
138
  async def process_video_direct(session_id: str):
139
- """Direct video processing without Celery (for Windows)"""
140
  from app.core.video_processor import VideoProcessor
141
  import json
142
 
@@ -169,12 +148,19 @@ async def process_video_direct(session_id: str):
169
 
170
  logger.info(f"Progress {session_id}: {progress*100:.1f}% - {message}")
171
 
172
- # Process video
173
  logger.info(f"Processing video: {input_path}")
174
- results = processor.process_video(
175
- video_path=input_path,
176
- output_path=output_path,
177
- progress_callback=progress_callback
 
 
 
 
 
 
 
178
  )
179
 
180
  # Save JSON
@@ -212,27 +198,6 @@ async def process_video_direct(session_id: str):
212
  })
213
 
214
 
215
- @router.get("/api/task/{task_id}")
216
- async def get_task_status(task_id: str):
217
- """Get Celery task status (only works if Redis is enabled)"""
218
- if not Config.USE_REDIS:
219
- raise HTTPException(
220
- status_code=501,
221
- detail="Task polling not available in direct processing mode"
222
- )
223
-
224
- try:
225
- from app.services.processing_service import processing_service
226
- status = processing_service.get_task_status(task_id)
227
- return {
228
- "success": True,
229
- "task_id": task_id,
230
- **status
231
- }
232
- except Exception as e:
233
- raise HTTPException(status_code=500, detail=str(e))
234
-
235
-
236
  @router.get("/api/results/{session_id}", response_model=ResultsResponse)
237
  async def get_results(session_id: str):
238
  """Get analysis results for a session"""
@@ -249,19 +214,11 @@ async def get_results(session_id: str):
249
  download_url=None
250
  )
251
 
252
- # Convert results to ensure JSON serialization
253
- safe_results = session.get("results", {})
254
-
255
- # Only convert if Redis is enabled and results might have numpy types
256
- if Config.USE_REDIS:
257
- from app.services.processing_service import convert_to_native_bool
258
- safe_results = convert_to_native_bool(safe_results)
259
-
260
  return ResultsResponse(
261
  success=True,
262
  session_id=session_id,
263
  status=session["status"],
264
- results=safe_results,
265
  download_url=f"/api/download/{session_id}"
266
  )
267
 
@@ -378,16 +335,4 @@ async def trigger_cleanup():
378
  "success": True,
379
  "message": "Cleanup completed",
380
  **result
381
- }
382
-
383
-
384
- @router.get("/api/config")
385
- async def get_config_info():
386
- """Get current configuration info"""
387
- return {
388
- "platform": "Windows" if Config.IS_WINDOWS else "Linux/Mac",
389
- "redis_enabled": Config.USE_REDIS,
390
- "docker": Config.IS_DOCKER,
391
- "hf_space": Config.IS_HF_SPACE,
392
- "processing_mode": "celery" if Config.USE_REDIS else "direct"
393
  }
 
1
  """
2
+ API Routes - Direct Processing (No Redis/Celery)
3
  """
4
 
5
  from fastapi import APIRouter, File, UploadFile, HTTPException, WebSocket, WebSocketDisconnect
 
23
 
24
  logger = logging.getLogger(__name__)
25
 
 
26
  router = APIRouter()
27
 
28
  @router.get("/health", response_model=HealthResponse)
 
77
 
78
  @router.post("/api/analyze/{session_id}")
79
  async def analyze_video(session_id: str):
80
+ """Start video analysis with direct processing"""
81
  try:
82
  session = session_manager.get_session(session_id)
83
  if not session:
 
89
  detail=f"Invalid session status: {session['status']}"
90
  )
91
 
92
+ logger.info(f"⚑ Starting direct processing for {session_id}")
93
+
94
+ # Update status
95
+ session_manager.update_session(session_id, {
96
+ "status": "processing",
97
+ "start_time": datetime.now().isoformat()
98
+ })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
+ # Start async processing
101
+ asyncio.create_task(process_video_direct(session_id))
 
 
102
 
103
+ return {
104
+ "success": True,
105
+ "message": "Analysis started",
106
+ "session_id": session_id,
107
+ "websocket_url": f"/ws/{session_id}"
108
+ }
 
109
 
110
  except HTTPException:
111
  raise
 
115
 
116
 
117
  async def process_video_direct(session_id: str):
118
+ """Direct video processing (async background task)"""
119
  from app.core.video_processor import VideoProcessor
120
  import json
121
 
 
148
 
149
  logger.info(f"Progress {session_id}: {progress*100:.1f}% - {message}")
150
 
151
+ # Process video (blocking call in background task)
152
  logger.info(f"Processing video: {input_path}")
153
+
154
+ # Run blocking code in executor to not block event loop
155
+ import asyncio
156
+ loop = asyncio.get_event_loop()
157
+
158
+ results = await loop.run_in_executor(
159
+ None,
160
+ processor.process_video,
161
+ input_path,
162
+ output_path,
163
+ lambda p, m: asyncio.create_task(progress_callback(p, m))
164
  )
165
 
166
  # Save JSON
 
198
  })
199
 
200
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
201
  @router.get("/api/results/{session_id}", response_model=ResultsResponse)
202
  async def get_results(session_id: str):
203
  """Get analysis results for a session"""
 
214
  download_url=None
215
  )
216
 
 
 
 
 
 
 
 
 
217
  return ResultsResponse(
218
  success=True,
219
  session_id=session_id,
220
  status=session["status"],
221
+ results=session.get("results", {}),
222
  download_url=f"/api/download/{session_id}"
223
  )
224
 
 
335
  "success": True,
336
  "message": "Cleanup completed",
337
  **result
 
 
 
 
 
 
 
 
 
 
 
 
338
  }
backend/app/celery_app.py DELETED
@@ -1,34 +0,0 @@
1
- """
2
- Celery application for background task processing
3
- """
4
-
5
- from celery import Celery
6
- import os
7
-
8
- # Redis URL from environment or default
9
- REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
10
-
11
- # Create Celery app
12
- celery_app = Celery(
13
- "dance_analyzer",
14
- broker=REDIS_URL,
15
- backend=REDIS_URL
16
- )
17
-
18
- # Celery configuration
19
- celery_app.conf.update(
20
- task_serializer='json',
21
- accept_content=['json'],
22
- result_serializer='json',
23
- timezone='UTC',
24
- enable_utc=True,
25
- task_track_started=True,
26
- task_time_limit=600, # 10 minutes max per task
27
- task_soft_time_limit=540, # 9 minutes soft limit
28
- worker_prefetch_multiplier=1, # Take one task at a time
29
- worker_max_tasks_per_child=10, # Restart worker after 10 tasks
30
- imports=['app.tasks'] # βœ… IMPORTANT: Tell Celery where to find tasks
31
- )
32
-
33
- # Auto-discover tasks
34
- celery_app.autodiscover_tasks(['app'])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/app/config.py CHANGED
@@ -22,17 +22,6 @@ class Config:
22
  IS_DOCKER: bool = os.path.exists("/.dockerenv")
23
  IS_HF_SPACE: bool = os.getenv("SPACE_ID") is not None
24
 
25
- # ==================== Redis/Celery Configuration ====================
26
- # Auto-disable Redis on Windows unless explicitly enabled
27
- USE_REDIS: bool = field(default_factory=lambda: (
28
- os.getenv("USE_REDIS", "auto").lower() == "true" if os.getenv("USE_REDIS", "auto").lower() != "auto"
29
- else not (sys.platform == "win32") # Disable on Windows by default
30
- ))
31
-
32
- REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379/0")
33
- CELERY_BROKER_URL: str = os.getenv("CELERY_BROKER_URL", "redis://localhost:6379/0")
34
- CELERY_RESULT_BACKEND: str = os.getenv("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")
35
-
36
  # ==================== API Configuration ====================
37
  API_HOST: str = os.getenv("API_HOST", "0.0.0.0")
38
  API_PORT: int = int(os.getenv("API_PORT", "7860"))
@@ -222,13 +211,15 @@ class Config:
222
  print(f"Output Folder: {cls.OUTPUT_FOLDER}")
223
  print("=" * 70)
224
 
 
 
225
 
226
  # Validate configuration on import
227
- if not Config.validate_config():
228
  raise RuntimeError("Invalid configuration. Please check environment variables.")
229
 
230
  # Initialize folders on import
231
- Config.initialize_folders()
232
 
233
  if __name__ == "__main__":
234
- Config.print_config()
 
22
  IS_DOCKER: bool = os.path.exists("/.dockerenv")
23
  IS_HF_SPACE: bool = os.getenv("SPACE_ID") is not None
24
 
 
 
 
 
 
 
 
 
 
 
 
25
  # ==================== API Configuration ====================
26
  API_HOST: str = os.getenv("API_HOST", "0.0.0.0")
27
  API_PORT: int = int(os.getenv("API_PORT", "7860"))
 
211
  print(f"Output Folder: {cls.OUTPUT_FOLDER}")
212
  print("=" * 70)
213
 
214
+ # ==================== Instantiate Global Config ====================
215
+ config = Config()
216
 
217
  # Validate configuration on import
218
+ if not config.validate_config():
219
  raise RuntimeError("Invalid configuration. Please check environment variables.")
220
 
221
  # Initialize folders on import
222
+ config.initialize_folders()
223
 
224
  if __name__ == "__main__":
225
+ config.print_config()
backend/app/core/video_processor.py CHANGED
@@ -17,6 +17,9 @@ from app.utils.file_utils import format_file_size
17
 
18
  logger = logging.getLogger(__name__)
19
 
 
 
 
20
 
21
  class VideoProcessor:
22
  """
@@ -31,6 +34,13 @@ class VideoProcessor:
31
  self.current_video_path: Optional[Path] = None
32
  self.video_info: Dict[str, Any] = {}
33
  logger.info("VideoProcessor initialized")
 
 
 
 
 
 
 
34
 
35
  def load_video(self, video_path: Path) -> Dict[str, Any]:
36
  """
@@ -193,7 +203,8 @@ class VideoProcessor:
193
  if progress_callback:
194
  progress = (frame_number + 1) / video_info['frame_count']
195
  message = f"Processing frame {frame_number + 1}/{video_info['frame_count']}"
196
- progress_callback(progress, message)
 
197
 
198
  frame_number += 1
199
 
 
17
 
18
  logger = logging.getLogger(__name__)
19
 
20
+ import inspect
21
+ import asyncio
22
+
23
 
24
  class VideoProcessor:
25
  """
 
34
  self.current_video_path: Optional[Path] = None
35
  self.video_info: Dict[str, Any] = {}
36
  logger.info("VideoProcessor initialized")
37
+
38
+ def _safe_callback(self, callback, *args, **kwargs):
39
+ """Safely handle async or sync progress callbacks."""
40
+ if inspect.iscoroutinefunction(callback):
41
+ asyncio.create_task(callback(*args, **kwargs))
42
+ else:
43
+ callback(*args, **kwargs)
44
 
45
  def load_video(self, video_path: Path) -> Dict[str, Any]:
46
  """
 
203
  if progress_callback:
204
  progress = (frame_number + 1) / video_info['frame_count']
205
  message = f"Processing frame {frame_number + 1}/{video_info['frame_count']}"
206
+ # progress_callback(progress, message)
207
+ self._safe_callback(progress_callback, progress, message)
208
 
209
  frame_number += 1
210
 
backend/app/services/__init__.py CHANGED
@@ -4,7 +4,6 @@ Business services
4
 
5
  from .session_manager import session_manager, SessionManager
6
  from .video_service import video_service, VideoService
7
- from .processing_service import processing_service, ProcessingService
8
  from .cleanup_service import cleanup_service, CleanupService
9
 
10
  __all__ = [
 
4
 
5
  from .session_manager import session_manager, SessionManager
6
  from .video_service import video_service, VideoService
 
7
  from .cleanup_service import cleanup_service, CleanupService
8
 
9
  __all__ = [
backend/app/services/processing_service.py DELETED
@@ -1,86 +0,0 @@
1
- """
2
- Video processing service - Updated for Celery
3
- """
4
-
5
- import logging
6
-
7
- logger = logging.getLogger(__name__)
8
-
9
-
10
- class ProcessingService:
11
- """Handles video processing tasks"""
12
-
13
- def start_processing(self, session_id: str) -> dict:
14
- """
15
- Start video processing with Celery
16
-
17
- Args:
18
- session_id: Session ID to process
19
-
20
- Returns:
21
- Dict with task info
22
- """
23
- # βœ… LAZY IMPORT - Import only when function is called
24
- from app.tasks import process_video_task
25
- from app.services.session_manager import session_manager
26
-
27
- try:
28
- # Submit task to Celery
29
- task = process_video_task.delay(session_id)
30
-
31
- # Update session with task ID
32
- session_manager.update_session(session_id, {
33
- "status": "queued",
34
- "celery_task_id": task.id
35
- })
36
-
37
- logger.info(f"βœ… Task queued: {session_id} (task_id: {task.id})")
38
-
39
- return {
40
- "task_id": task.id,
41
- "status": "queued"
42
- }
43
-
44
- except Exception as e:
45
- logger.error(f"Failed to queue task: {e}")
46
- raise
47
-
48
- def get_task_status(self, task_id: str) -> dict:
49
- """Get Celery task status"""
50
- # βœ… LAZY IMPORT
51
- from celery.result import AsyncResult
52
- from app.celery_app import celery_app
53
-
54
- task = AsyncResult(task_id, app=celery_app)
55
-
56
- if task.state == 'PENDING':
57
- response = {
58
- 'state': task.state,
59
- 'progress': 0,
60
- 'message': 'Task pending...'
61
- }
62
- elif task.state == 'PROGRESS':
63
- response = {
64
- 'state': task.state,
65
- 'progress': task.info.get('progress', 0),
66
- 'message': task.info.get('message', ''),
67
- 'session_id': task.info.get('session_id', '')
68
- }
69
- elif task.state == 'SUCCESS':
70
- response = {
71
- 'state': task.state,
72
- 'progress': 1.0,
73
- 'result': task.info
74
- }
75
- else: # FAILURE, RETRY, etc.
76
- response = {
77
- 'state': task.state,
78
- 'progress': 0,
79
- 'error': str(task.info)
80
- }
81
-
82
- return response
83
-
84
-
85
- # Global processing service instance
86
- processing_service = ProcessingService()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/app/tasks.py DELETED
@@ -1,120 +0,0 @@
1
- """
2
- Celery background tasks
3
- """
4
-
5
- from pathlib import Path
6
- from datetime import datetime
7
- import json
8
- import logging
9
- import numpy as np
10
-
11
- from app.celery_app import celery_app
12
- from app.config import Config
13
-
14
- logger = logging.getLogger(__name__)
15
-
16
-
17
- def convert_to_native_bool(obj):
18
- """Convert numpy types to native Python types"""
19
- if isinstance(obj, np.bool_):
20
- return bool(obj)
21
- elif isinstance(obj, (np.integer, np.floating)):
22
- return obj.item()
23
- elif isinstance(obj, dict):
24
- return {k: convert_to_native_bool(v) for k, v in obj.items()}
25
- elif isinstance(obj, (list, tuple)):
26
- return [convert_to_native_bool(v) for v in obj]
27
- else:
28
- return obj
29
-
30
-
31
- @celery_app.task(bind=True, name='process_video_task')
32
- def process_video_task(self, session_id: str):
33
- """
34
- Celery task for video processing
35
-
36
- Args:
37
- self: Celery task instance
38
- session_id: Session ID to process
39
- """
40
- # βœ… LAZY IMPORT - Import only when task runs, not at module load
41
- from app.services.session_manager import session_manager
42
- from app.core.video_processor import VideoProcessor
43
-
44
- try:
45
- logger.info(f"🎬 Starting processing for session: {session_id}")
46
-
47
- # Get session
48
- session = session_manager.get_session(session_id)
49
- if not session:
50
- raise ValueError(f"Session not found: {session_id}")
51
-
52
- input_path = Path(session["upload_path"])
53
- output_path = Config.OUTPUT_FOLDER / f"analyzed_{session_id}.mp4"
54
- results_path = Config.OUTPUT_FOLDER / f"results_{session_id}.json"
55
-
56
- # Update session status
57
- session_manager.update_session(session_id, {
58
- "status": "processing",
59
- "celery_task_id": self.request.id
60
- })
61
-
62
- # Create processor (inside task, not at module level)
63
- processor = VideoProcessor()
64
-
65
- # Progress callback
66
- def progress_callback(progress: float, message: str):
67
- self.update_state(
68
- state='PROGRESS',
69
- meta={
70
- 'progress': progress,
71
- 'message': message,
72
- 'session_id': session_id
73
- }
74
- )
75
- logger.info(f"Progress {session_id}: {progress*100:.1f}% - {message}")
76
-
77
- # Process video
78
- logger.info(f"Processing video: {input_path}")
79
- raw_results = processor.process_video(
80
- video_path=input_path,
81
- output_path=output_path,
82
- progress_callback=progress_callback
83
- )
84
-
85
- # Convert results
86
- results = convert_to_native_bool(raw_results)
87
-
88
- # Save JSON
89
- with open(results_path, 'w') as f:
90
- json.dump(results, f, indent=2, default=str)
91
-
92
- # Update session
93
- session_manager.update_session(session_id, {
94
- "status": "completed",
95
- "output_path": str(output_path),
96
- "results_path": str(results_path),
97
- "end_time": datetime.now().isoformat(),
98
- "results": results
99
- })
100
-
101
- logger.info(f"βœ… Processing completed: {session_id}")
102
-
103
- return {
104
- "status": "completed",
105
- "session_id": session_id,
106
- "results": results,
107
- "output_path": str(output_path)
108
- }
109
-
110
- except Exception as e:
111
- logger.error(f"❌ Processing failed for {session_id}: {str(e)}")
112
-
113
- # Lazy import here too
114
- from app.services.session_manager import session_manager
115
- session_manager.update_session(session_id, {
116
- "status": "failed",
117
- "error": str(e)
118
- })
119
-
120
- raise # Re-raise for Celery to mark as failed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/requirements.txt CHANGED
@@ -29,9 +29,3 @@ python-jose==3.3.0
29
 
30
  # Templating Support
31
  jinja2==3.1.2
32
-
33
- # Celery and Redis
34
- celery==5.5.3
35
- redis==7.0.1
36
-
37
- flower==2.0.1 # Optional: Celery monitoring UI
 
29
 
30
  # Templating Support
31
  jinja2==3.1.2
 
 
 
 
 
 
docker_compose.yml DELETED
@@ -1,89 +0,0 @@
1
- version: '3.8'
2
-
3
- services:
4
- # Redis for Celery broker
5
- redis:
6
- image: redis:7-alpine
7
- container_name: dance-analyzer-redis
8
- ports:
9
- - "6379:6379"
10
- volumes:
11
- - redis_data:/data
12
- restart: unless-stopped
13
- networks:
14
- - dance-analyzer-network
15
-
16
- # Main web application
17
- dance-analyzer:
18
- build:
19
- context: .
20
- dockerfile: Dockerfile
21
- container_name: dance-movement-analyzer
22
- ports:
23
- - "7860:7860"
24
- volumes:
25
- - ./uploads:/app/uploads
26
- - ./outputs:/app/outputs
27
- - ./logs:/app/logs
28
- environment:
29
- - API_HOST=0.0.0.0
30
- - API_PORT=7860
31
- - DEBUG=false
32
- - REDIS_URL=redis://redis:6379/0
33
- - MAX_FILE_SIZE=104857600
34
- - MAX_VIDEO_DURATION=60
35
- - MEDIAPIPE_MODEL_COMPLEXITY=1
36
- - MAX_WORKERS=2
37
- depends_on:
38
- - redis
39
- restart: unless-stopped
40
- networks:
41
- - dance-analyzer-network
42
-
43
- # Celery worker
44
- celery-worker:
45
- build:
46
- context: .
47
- dockerfile: Dockerfile
48
- container_name: dance-analyzer-worker
49
- command: celery -A app.celery_app worker --loglevel=info --concurrency=2
50
- volumes:
51
- - ./uploads:/app/uploads
52
- - ./outputs:/app/outputs
53
- - ./logs:/app/logs
54
- environment:
55
- - REDIS_URL=redis://redis:6379/0
56
- - MEDIAPIPE_MODEL_COMPLEXITY=1
57
- depends_on:
58
- - redis
59
- restart: unless-stopped
60
- networks:
61
- - dance-analyzer-network
62
-
63
- # Flower (Celery monitoring UI) - Optional
64
- flower:
65
- build:
66
- context: .
67
- dockerfile: Dockerfile
68
- container_name: dance-analyzer-flower
69
- command: celery -A app.celery_app flower --port=5555
70
- ports:
71
- - "5555:5555"
72
- environment:
73
- - REDIS_URL=redis://redis:6379/0
74
- depends_on:
75
- - redis
76
- - celery-worker
77
- restart: unless-stopped
78
- networks:
79
- - dance-analyzer-network
80
-
81
- networks:
82
- dance-analyzer-network:
83
- driver: bridge
84
-
85
- volumes:
86
- redis_data:
87
- uploads:
88
- outputs:
89
- logs:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
startup.sh CHANGED
@@ -1,58 +1,4 @@
1
  #!/bin/bash
2
- set -e
3
-
4
  echo "πŸš€ Starting Dance Movement Analyzer..."
5
- echo "πŸ“¦ MediaPipe models pre-downloaded during build"
6
-
7
- # ===============================
8
- # Start Redis in background
9
- # ===============================
10
- echo "🧠 Starting Redis server..."
11
- redis-server --bind 127.0.0.1 --port 6379 --requirepass "" &
12
- REDIS_PID=$!
13
- sleep 3
14
-
15
- if redis-cli ping > /dev/null 2>&1; then
16
- echo "βœ… Redis started on localhost:6379 (PID: $REDIS_PID)"
17
- else
18
- echo "❌ Redis failed to start"
19
- exit 1
20
- fi
21
-
22
- # ===============================
23
- # Start Celery Worker in Background
24
- # ===============================
25
- echo "βš™οΈ Starting Celery worker..."
26
- celery -A app.celery_app worker \
27
- --loglevel=info \
28
- --concurrency=2 \
29
- --max-tasks-per-child=10 \
30
- --time-limit=600 \
31
- --soft-time-limit=540 \
32
- > /tmp/celery.log 2>&1 &
33
-
34
- CELERY_PID=$!
35
- sleep 3
36
-
37
- if kill -0 $CELERY_PID 2>/dev/null; then
38
- echo "βœ… Celery worker started (PID: $CELERY_PID)"
39
- else
40
- echo "❌ Celery worker failed to start"
41
- cat /tmp/celery.log
42
- exit 1
43
- fi
44
-
45
- # ===============================
46
- # Start Uvicorn (FastAPI)
47
- # ===============================
48
  PORT=${PORT:-7860}
49
-
50
- echo "🎬 Starting Uvicorn server on port $PORT..."
51
- echo "πŸ“ Application will be available at http://0.0.0.0:$PORT"
52
- echo ""
53
-
54
- exec uvicorn app.main:app \
55
- --host 0.0.0.0 \
56
- --port $PORT \
57
- --workers 1 \
58
- --log-level info
 
1
  #!/bin/bash
 
 
2
  echo "πŸš€ Starting Dance Movement Analyzer..."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  PORT=${PORT:-7860}
4
+ exec uvicorn app.main:app --host 0.0.0.0 --port $PORT --workers 1