Spaces:
Sleeping
Sleeping
github-actions[bot]
commited on
Commit
·
d5f26e7
1
Parent(s):
782f718
Deploy from GitHub - 2026-01-21 06:33:16
Browse files
app.py
CHANGED
|
@@ -23,6 +23,7 @@ import time
|
|
| 23 |
import os
|
| 24 |
from pathlib import Path
|
| 25 |
from typing import Optional, Tuple, Dict, List, Any
|
|
|
|
| 26 |
from datetime import datetime
|
| 27 |
from collections import deque
|
| 28 |
import tempfile
|
|
@@ -159,6 +160,26 @@ BACKENDS = {
|
|
| 159 |
# Performance Tracking with Live Charts
|
| 160 |
# ============================================================================
|
| 161 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 162 |
class PerformanceTracker:
|
| 163 |
"""Track and display Space performance metrics with backend comparison"""
|
| 164 |
|
|
@@ -183,7 +204,7 @@ class PerformanceTracker:
|
|
| 183 |
self.backend_times[backend].append(elapsed_ms)
|
| 184 |
self.total_inferences += 1
|
| 185 |
|
| 186 |
-
def get_stats(self) ->
|
| 187 |
"""Get performance statistics"""
|
| 188 |
if not self.inference_times:
|
| 189 |
return None
|
|
@@ -191,22 +212,31 @@ class PerformanceTracker:
|
|
| 191 |
times = list(self.inference_times)
|
| 192 |
uptime = (datetime.now() - self.start_time).total_seconds()
|
| 193 |
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
'
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 210 |
|
| 211 |
def get_comparison(self) -> str:
|
| 212 |
"""Get backend comparison string"""
|
|
@@ -229,16 +259,16 @@ class PerformanceTracker:
|
|
| 229 |
### Speedup: {speedup:.2f}x faster with CUDA! 🚀
|
| 230 |
"""
|
| 231 |
|
| 232 |
-
def get_chart_data(self) ->
|
| 233 |
"""Get data for real-time chart"""
|
| 234 |
if not self.timestamps:
|
| 235 |
return None
|
| 236 |
|
| 237 |
-
return
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
|
| 243 |
# Global tracker
|
| 244 |
perf_tracker = PerformanceTracker()
|
|
@@ -1243,7 +1273,7 @@ def create_performance_chart() -> str:
|
|
| 1243 |
return "### Chart Unavailable\n\nPlotly is not installed. Install with: `pip install plotly`"
|
| 1244 |
|
| 1245 |
data = perf_tracker.get_chart_data()
|
| 1246 |
-
if not data or len(data
|
| 1247 |
return "### Performance Chart\n\nRun some inferences to see the chart populate..."
|
| 1248 |
|
| 1249 |
# Color mapping for backends
|
|
@@ -1256,13 +1286,13 @@ def create_performance_chart() -> str:
|
|
| 1256 |
# Create scatter plot with color-coded backends
|
| 1257 |
fig = go.Figure()
|
| 1258 |
|
| 1259 |
-
for backend in set(data
|
| 1260 |
backend_times = []
|
| 1261 |
backend_timestamps = []
|
| 1262 |
-
for i, b in enumerate(data
|
| 1263 |
if b == backend:
|
| 1264 |
-
backend_times.append(data
|
| 1265 |
-
backend_timestamps.append(data
|
| 1266 |
|
| 1267 |
if backend_times:
|
| 1268 |
fig.add_trace(go.Scatter(
|
|
@@ -1451,8 +1481,8 @@ def stylize_image_impl(
|
|
| 1451 |
| **Style** | {style_display} |
|
| 1452 |
| **Backend** | {backend_display} |
|
| 1453 |
| **Time** | {elapsed_ms:.1f} ms ({fps:.0f} FPS) |
|
| 1454 |
-
| **Avg Time** | {stats
|
| 1455 |
-
| **Total Images** | {stats
|
| 1456 |
| **Size** | {width}x{height} |
|
| 1457 |
| **Device** | {get_device().type.upper()} |
|
| 1458 |
|
|
@@ -1618,11 +1648,11 @@ def get_performance_stats() -> str:
|
|
| 1618 |
|
| 1619 |
| Metric | Value |
|
| 1620 |
|--------|-------|
|
| 1621 |
-
| **Avg Time** | {stats
|
| 1622 |
-
| **Fastest** | {stats
|
| 1623 |
-
| **Slowest** | {stats
|
| 1624 |
-
| **Total Images** | {stats
|
| 1625 |
-
| **Uptime** | {stats
|
| 1626 |
|
| 1627 |
---
|
| 1628 |
{perf_tracker.get_comparison()}
|
|
|
|
| 23 |
import os
|
| 24 |
from pathlib import Path
|
| 25 |
from typing import Optional, Tuple, Dict, List, Any
|
| 26 |
+
from pydantic import BaseModel
|
| 27 |
from datetime import datetime
|
| 28 |
from collections import deque
|
| 29 |
import tempfile
|
|
|
|
| 160 |
# Performance Tracking with Live Charts
|
| 161 |
# ============================================================================
|
| 162 |
|
| 163 |
+
class PerformanceStats(BaseModel):
|
| 164 |
+
"""Pydantic model for performance stats - Gradio 5.x compatible"""
|
| 165 |
+
avg_ms: float
|
| 166 |
+
min_ms: float
|
| 167 |
+
max_ms: float
|
| 168 |
+
total_inferences: int
|
| 169 |
+
uptime_hours: float
|
| 170 |
+
cuda_avg: Optional[float] = None
|
| 171 |
+
cuda_count: Optional[int] = None
|
| 172 |
+
pytorch_avg: Optional[float] = None
|
| 173 |
+
pytorch_count: Optional[int] = None
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class ChartData(BaseModel):
|
| 177 |
+
"""Pydantic model for chart data - Gradio 5.x compatible"""
|
| 178 |
+
timestamps: List[str]
|
| 179 |
+
times: List[float]
|
| 180 |
+
backends: List[str]
|
| 181 |
+
|
| 182 |
+
|
| 183 |
class PerformanceTracker:
|
| 184 |
"""Track and display Space performance metrics with backend comparison"""
|
| 185 |
|
|
|
|
| 204 |
self.backend_times[backend].append(elapsed_ms)
|
| 205 |
self.total_inferences += 1
|
| 206 |
|
| 207 |
+
def get_stats(self) -> Optional[PerformanceStats]:
|
| 208 |
"""Get performance statistics"""
|
| 209 |
if not self.inference_times:
|
| 210 |
return None
|
|
|
|
| 212 |
times = list(self.inference_times)
|
| 213 |
uptime = (datetime.now() - self.start_time).total_seconds()
|
| 214 |
|
| 215 |
+
# Get backend-specific stats
|
| 216 |
+
cuda_avg, cuda_count = None, None
|
| 217 |
+
pytorch_avg, pytorch_count = None, None
|
| 218 |
+
|
| 219 |
+
if self.backend_times['cuda']:
|
| 220 |
+
bt = list(self.backend_times['cuda'])
|
| 221 |
+
cuda_avg = sum(bt) / len(bt)
|
| 222 |
+
cuda_count = len(bt)
|
| 223 |
+
|
| 224 |
+
if self.backend_times['pytorch']:
|
| 225 |
+
bt = list(self.backend_times['pytorch'])
|
| 226 |
+
pytorch_avg = sum(bt) / len(bt)
|
| 227 |
+
pytorch_count = len(bt)
|
| 228 |
+
|
| 229 |
+
return PerformanceStats(
|
| 230 |
+
avg_ms=sum(times) / len(times),
|
| 231 |
+
min_ms=min(times),
|
| 232 |
+
max_ms=max(times),
|
| 233 |
+
total_inferences=self.total_inferences,
|
| 234 |
+
uptime_hours=uptime / 3600,
|
| 235 |
+
cuda_avg=cuda_avg,
|
| 236 |
+
cuda_count=cuda_count,
|
| 237 |
+
pytorch_avg=pytorch_avg,
|
| 238 |
+
pytorch_count=pytorch_count,
|
| 239 |
+
)
|
| 240 |
|
| 241 |
def get_comparison(self) -> str:
|
| 242 |
"""Get backend comparison string"""
|
|
|
|
| 259 |
### Speedup: {speedup:.2f}x faster with CUDA! 🚀
|
| 260 |
"""
|
| 261 |
|
| 262 |
+
def get_chart_data(self) -> Optional[ChartData]:
|
| 263 |
"""Get data for real-time chart"""
|
| 264 |
if not self.timestamps:
|
| 265 |
return None
|
| 266 |
|
| 267 |
+
return ChartData(
|
| 268 |
+
timestamps=[ts.strftime('%H:%M:%S') for ts in self.timestamps],
|
| 269 |
+
times=list(self.inference_times),
|
| 270 |
+
backends=list(self.backends_used),
|
| 271 |
+
)
|
| 272 |
|
| 273 |
# Global tracker
|
| 274 |
perf_tracker = PerformanceTracker()
|
|
|
|
| 1273 |
return "### Chart Unavailable\n\nPlotly is not installed. Install with: `pip install plotly`"
|
| 1274 |
|
| 1275 |
data = perf_tracker.get_chart_data()
|
| 1276 |
+
if not data or len(data.timestamps) < 2:
|
| 1277 |
return "### Performance Chart\n\nRun some inferences to see the chart populate..."
|
| 1278 |
|
| 1279 |
# Color mapping for backends
|
|
|
|
| 1286 |
# Create scatter plot with color-coded backends
|
| 1287 |
fig = go.Figure()
|
| 1288 |
|
| 1289 |
+
for backend in set(data.backends):
|
| 1290 |
backend_times = []
|
| 1291 |
backend_timestamps = []
|
| 1292 |
+
for i, b in enumerate(data.backends):
|
| 1293 |
if b == backend:
|
| 1294 |
+
backend_times.append(data.times[i])
|
| 1295 |
+
backend_timestamps.append(data.timestamps[i])
|
| 1296 |
|
| 1297 |
if backend_times:
|
| 1298 |
fig.add_trace(go.Scatter(
|
|
|
|
| 1481 |
| **Style** | {style_display} |
|
| 1482 |
| **Backend** | {backend_display} |
|
| 1483 |
| **Time** | {elapsed_ms:.1f} ms ({fps:.0f} FPS) |
|
| 1484 |
+
| **Avg Time** | {stats.avg_ms:.1f if stats else elapsed_ms:.1f} ms |
|
| 1485 |
+
| **Total Images** | {stats.total_inferences if stats else 1} |
|
| 1486 |
| **Size** | {width}x{height} |
|
| 1487 |
| **Device** | {get_device().type.upper()} |
|
| 1488 |
|
|
|
|
| 1648 |
|
| 1649 |
| Metric | Value |
|
| 1650 |
|--------|-------|
|
| 1651 |
+
| **Avg Time** | {stats.avg_ms:.1f} ms |
|
| 1652 |
+
| **Fastest** | {stats.min_ms:.1f} ms |
|
| 1653 |
+
| **Slowest** | {stats.max_ms:.1f} ms |
|
| 1654 |
+
| **Total Images** | {stats.total_inferences} |
|
| 1655 |
+
| **Uptime** | {stats.uptime_hours:.1f} hours |
|
| 1656 |
|
| 1657 |
---
|
| 1658 |
{perf_tracker.get_comparison()}
|