import json import time import threading from fastapi import FastAPI, HTTPException, BackgroundTasks, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse, HTMLResponse import uvicorn from typing import Dict, List from datetime import datetime import humanize # Import storage and test module from test_ai_integration_http import test_ai_integration_http from http_storage import LocalStorage # Initialize FastAPI app app = FastAPI() # Define embedded templates and styles STYLES = """ body { font-family: 'Arial', sans-serif; margin: 0; padding: 20px; background: #f5f5f5; } """ TEMPLATE = """
{content}
""" # Create CSS file css_content = """ body { font-family: 'Arial', sans-serif; margin: 0; padding: 20px; background: #f5f5f5; } .container { max-width: 1200px; margin: 0 auto; } .stats-grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); gap: 20px; margin-bottom: 30px; } .stat-card { background: white; padding: 20px; border-radius: 10px; box-shadow: 0 2px 5px rgba(0,0,0,0.1); } .stat-title { font-size: 14px; color: #666; margin-bottom: 10px; } .stat-value { font-size: 24px; color: #2c3e50; font-weight: bold; } .files-section { background: white; padding: 20px; border-radius: 10px; box-shadow: 0 2px 5px rgba(0,0,0,0.1); } table { width: 100%; border-collapse: collapse; } th, td { padding: 12px; text-align: left; border-bottom: 1px solid #eee; } th { background: #f8f9fa; color: #666; } .file-icon { width: 20px; margin-right: 10px; } .header { margin-bottom: 30px; } .header h1 { color: #2c3e50; margin: 0; } .refresh-time { color: #666; font-size: 14px; margin-top: 5px; } """ # Create static directory and CSS file from pathlib import Path static_dir = Path("static") templates_dir = Path("templates") static_dir.mkdir(exist_ok=True) templates_dir.mkdir(exist_ok=True) with open(static_dir / "style.css", "w") as f: f.write(css_content) # Create HTML template html_content = """ GPU Storage Dashboard

GPU Storage Dashboard

Last updated: {{ last_updated }}
Total Storage Used
{{ storage_stats.total_used }}
Free Space
{{ storage_stats.free_space }}
Active Models
{{ storage_stats.active_models }}
Active Tensors
{{ storage_stats.active_tensors }}

Models

{% for model in models %} {% endfor %}
Name Size Last Modified
{{ model.name }} {{ model.size }} {{ model.modified }}

Tensors

{% for tensor in tensors %} {% endfor %}
Name Size Shape Last Modified
{{ tensor.name }} {{ tensor.size }} {{ tensor.shape }} {{ tensor.modified }}
""" # Create HTML template file with open(templates_dir / "dashboard.html", "w") as f: f.write(html_content) # Initialize storage storage = LocalStorage() @app.get("/", response_class=HTMLResponse) async def get_dashboard(request: Request): """Serve the storage dashboard""" stats = {} # Get storage statistics total_size = 0 for path in [storage.models_path, storage.vram_path, storage.cache_path, storage.state_path]: if path.exists(): total_size += sum(f.stat().st_size for f in path.rglob('*') if f.is_file()) # Get disk usage disk_usage = shutil.disk_usage(str(storage.base_path)) stats['storage_stats'] = { 'total_used': humanize.naturalsize(total_size), 'free_space': humanize.naturalsize(disk_usage.free), 'active_models': len(storage.model_registry), 'active_tensors': len(storage.tensor_registry) } # Get model files models = [] if storage.models_path.exists(): for model_dir in storage.models_path.iterdir(): if model_dir.is_dir(): config_file = model_dir.joinpath('config.json') if config_file.exists(): size = sum(f.stat().st_size for f in model_dir.rglob('*') if f.is_file()) models.append({ 'name': model_dir.name, 'size': humanize.naturalsize(size), 'modified': datetime.fromtimestamp(model_dir.stat().st_mtime).strftime('%Y-%m-%d %H:%M:%S') }) # Get tensor files tensors = [] if storage.vram_path.exists(): for tensor_file in storage.vram_path.glob('*.npy'): if tensor_file.exists(): meta_file = tensor_file.with_name(tensor_file.stem + '_meta.json') shape = "Unknown" if meta_file.exists(): with open(meta_file) as f: meta = json.load(f) shape = str(meta.get('shape', 'Unknown')) tensors.append({ 'name': tensor_file.stem, 'size': humanize.naturalsize(tensor_file.stat().st_size), 'shape': shape, 'modified': datetime.fromtimestamp(tensor_file.stat().st_mtime).strftime('%Y-%m-%d %H:%M:%S') }) return templates.TemplateResponse("dashboard.html", { "request": request, "storage_stats": stats['storage_stats'], "models": models, "tensors": tensors, "last_updated": datetime.now().strftime('%Y-%m-%d %H:%M:%S') }) # Run the server if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=8000) # FastAPI App Definition app = FastAPI() # Add CORS middleware to allow cross-origin requests app.add_middleware( CORSMiddleware, allow_origins=["*"], # Allows all origins allow_credentials=True, allow_methods=["*"], # Allows all methods allow_headers=["*"], ) # Global variable to track if processing is running processing_thread = None def log_message(message): """Add a log message with timestamp""" timestamp = datetime.now().strftime("%H:%M:%S") log_entry = f"[{timestamp}] {message}" processing_status["logs"].append(log_entry) # Keep only the last 100 logs if len(processing_status["logs"]) > 100: processing_status["logs"] = processing_status["logs"][-100:] print(log_entry) @app.on_event("startup") async def startup_event(): global processing_thread if not (processing_thread and processing_thread.is_alive()): processing_thread = threading.Thread(target=test_ai_integration_http) processing_thread.daemon = True processing_thread.start() from fastapi.staticfiles import StaticFiles # app.mount("/static", StaticFiles(directory="static"), name="static") # Serve your main HTML file @app.get("/") async def root(): return () from fastapi.encoders import jsonable_encoder def get_disk_usage(path: str) -> Dict[str, float]: """Get disk usage statistics in GB""" statvfs = os.statvfs(path) total = statvfs.f_frsize * statvfs.f_blocks / (1024**3) free = statvfs.f_frsize * statvfs.f_bavail / (1024**3) used = total - free return {"total": total, "free": free, "used": used} class SafeJSONEncoder(json.JSONEncoder): def default(self, obj): try: if isinstance(obj, float): if obj != obj: # Check for NaN return None if obj == float('inf') or obj == float('-inf'): return None return super().default(obj) except: return None if __name__ == "__main__": get_disk_usage() uvicorn.run( app, host="0.0.0.0", port=8000, log_level="info", reload=False # Set to False for production )