|
|
|
|
|
""" |
|
|
Hardware Specifications and System Requirements |
|
|
============================================== |
|
|
Comprehensive hardware specifications for the LiMp Pipeline Integration System. |
|
|
""" |
|
|
|
|
|
import psutil |
|
|
import torch |
|
|
import platform |
|
|
import json |
|
|
from typing import Dict, Any, List, Optional |
|
|
from dataclasses import dataclass, asdict |
|
|
from pathlib import Path |
|
|
|
|
|
@dataclass |
|
|
class HardwareSpecs: |
|
|
"""Hardware specifications for LiMp pipeline.""" |
|
|
|
|
|
|
|
|
cpu_model: str |
|
|
cpu_cores: int |
|
|
cpu_threads: int |
|
|
cpu_frequency: float |
|
|
cpu_architecture: str |
|
|
|
|
|
|
|
|
total_ram_gb: float |
|
|
available_ram_gb: float |
|
|
swap_memory_gb: float |
|
|
|
|
|
|
|
|
gpu_available: bool |
|
|
gpu_model: Optional[str] = None |
|
|
gpu_memory_gb: Optional[float] = None |
|
|
gpu_cuda_version: Optional[str] = None |
|
|
gpu_compute_capability: Optional[str] = None |
|
|
|
|
|
|
|
|
total_storage_gb: float = 0.0 |
|
|
available_storage_gb: float = 0.0 |
|
|
storage_type: str = "unknown" |
|
|
|
|
|
|
|
|
os_name: str = "unknown" |
|
|
os_version: str = "unknown" |
|
|
python_version: str = "unknown" |
|
|
architecture: str = "unknown" |
|
|
|
|
|
@dataclass |
|
|
class ModelRequirements: |
|
|
"""Model-specific hardware requirements.""" |
|
|
|
|
|
model_name: str |
|
|
model_size_gb: float |
|
|
minimum_ram_gb: float |
|
|
recommended_ram_gb: float |
|
|
minimum_vram_gb: Optional[float] = None |
|
|
recommended_vram_gb: Optional[float] = None |
|
|
cpu_cores_minimum: int = 4 |
|
|
cpu_cores_recommended: int = 8 |
|
|
storage_requirements_gb: float = 10.0 |
|
|
|
|
|
class HardwareAnalyzer: |
|
|
"""Analyze current hardware and determine system capabilities.""" |
|
|
|
|
|
def __init__(self): |
|
|
self.specs = self._analyze_hardware() |
|
|
|
|
|
def _analyze_hardware(self) -> HardwareSpecs: |
|
|
"""Analyze current hardware specifications.""" |
|
|
|
|
|
|
|
|
cpu_info = platform.processor() or "Unknown" |
|
|
cpu_cores = psutil.cpu_count(logical=False) |
|
|
cpu_threads = psutil.cpu_count(logical=True) |
|
|
cpu_frequency = psutil.cpu_freq().max / 1000 if psutil.cpu_freq() else 0.0 |
|
|
cpu_architecture = platform.machine() |
|
|
|
|
|
|
|
|
memory = psutil.virtual_memory() |
|
|
total_ram_gb = memory.total / (1024**3) |
|
|
available_ram_gb = memory.available / (1024**3) |
|
|
swap_memory = psutil.swap_memory() |
|
|
swap_memory_gb = swap_memory.total / (1024**3) |
|
|
|
|
|
|
|
|
gpu_available = torch.cuda.is_available() |
|
|
gpu_model = None |
|
|
gpu_memory_gb = None |
|
|
gpu_cuda_version = None |
|
|
gpu_compute_capability = None |
|
|
|
|
|
if gpu_available: |
|
|
gpu_model = torch.cuda.get_device_name(0) |
|
|
gpu_memory_gb = torch.cuda.get_device_properties(0).total_memory / (1024**3) |
|
|
gpu_cuda_version = torch.version.cuda |
|
|
gpu_compute_capability = f"{torch.cuda.get_device_capability(0)[0]}.{torch.cuda.get_device_capability(0)[1]}" |
|
|
|
|
|
|
|
|
disk_usage = psutil.disk_usage('/') |
|
|
total_storage_gb = disk_usage.total / (1024**3) |
|
|
available_storage_gb = disk_usage.free / (1024**3) |
|
|
|
|
|
|
|
|
storage_type = "SSD" if "ssd" in str(disk_usage).lower() else "HDD" |
|
|
|
|
|
|
|
|
os_name = platform.system() |
|
|
os_version = platform.release() |
|
|
python_version = platform.python_version() |
|
|
architecture = platform.architecture()[0] |
|
|
|
|
|
return HardwareSpecs( |
|
|
cpu_model=cpu_info, |
|
|
cpu_cores=cpu_cores, |
|
|
cpu_threads=cpu_threads, |
|
|
cpu_frequency=cpu_frequency, |
|
|
cpu_architecture=cpu_architecture, |
|
|
total_ram_gb=total_ram_gb, |
|
|
available_ram_gb=available_ram_gb, |
|
|
swap_memory_gb=swap_memory_gb, |
|
|
gpu_available=gpu_available, |
|
|
gpu_model=gpu_model, |
|
|
gpu_memory_gb=gpu_memory_gb, |
|
|
gpu_cuda_version=gpu_cuda_version, |
|
|
gpu_compute_capability=gpu_compute_capability, |
|
|
total_storage_gb=total_storage_gb, |
|
|
available_storage_gb=available_storage_gb, |
|
|
storage_type=storage_type, |
|
|
os_name=os_name, |
|
|
os_version=os_version, |
|
|
python_version=python_version, |
|
|
architecture=architecture |
|
|
) |
|
|
|
|
|
def get_model_requirements(self) -> Dict[str, ModelRequirements]: |
|
|
"""Get hardware requirements for each model in the pipeline.""" |
|
|
|
|
|
return { |
|
|
"LFM2-8B-A1B-Dimensional-Entanglement": ModelRequirements( |
|
|
model_name="LFM2-8B-A1B-Dimensional-Entanglement", |
|
|
model_size_gb=16.0, |
|
|
minimum_ram_gb=32.0, |
|
|
recommended_ram_gb=64.0, |
|
|
minimum_vram_gb=16.0, |
|
|
recommended_vram_gb=24.0, |
|
|
cpu_cores_minimum=8, |
|
|
cpu_cores_recommended=16, |
|
|
storage_requirements_gb=20.0 |
|
|
), |
|
|
"9xdSq-LIMPS-FemTO-R1C": ModelRequirements( |
|
|
model_name="9xdSq-LIMPS-FemTO-R1C", |
|
|
model_size_gb=14.0, |
|
|
minimum_ram_gb=28.0, |
|
|
recommended_ram_gb=56.0, |
|
|
minimum_vram_gb=14.0, |
|
|
recommended_vram_gb=20.0, |
|
|
cpu_cores_minimum=6, |
|
|
cpu_cores_recommended=12, |
|
|
storage_requirements_gb=18.0 |
|
|
), |
|
|
"Enhanced-Advanced-Tokenizer": ModelRequirements( |
|
|
model_name="Enhanced-Advanced-Tokenizer", |
|
|
model_size_gb=2.0, |
|
|
minimum_ram_gb=8.0, |
|
|
recommended_ram_gb=16.0, |
|
|
minimum_vram_gb=4.0, |
|
|
recommended_vram_gb=8.0, |
|
|
cpu_cores_minimum=4, |
|
|
cpu_cores_recommended=8, |
|
|
storage_requirements_gb=5.0 |
|
|
), |
|
|
"Integrated-Pipeline": ModelRequirements( |
|
|
model_name="Integrated-Pipeline", |
|
|
model_size_gb=32.0, |
|
|
minimum_ram_gb=64.0, |
|
|
recommended_ram_gb=128.0, |
|
|
minimum_vram_gb=32.0, |
|
|
recommended_vram_gb=48.0, |
|
|
cpu_cores_minimum=16, |
|
|
cpu_cores_recommended=32, |
|
|
storage_requirements_gb=50.0 |
|
|
) |
|
|
} |
|
|
|
|
|
def check_compatibility(self, model_name: str) -> Dict[str, Any]: |
|
|
"""Check if current hardware is compatible with a specific model.""" |
|
|
|
|
|
requirements = self.get_model_requirements().get(model_name) |
|
|
if not requirements: |
|
|
return {"compatible": False, "error": f"Unknown model: {model_name}"} |
|
|
|
|
|
compatibility = { |
|
|
"model_name": model_name, |
|
|
"compatible": True, |
|
|
"warnings": [], |
|
|
"requirements_met": {}, |
|
|
"performance_estimate": "unknown" |
|
|
} |
|
|
|
|
|
|
|
|
if self.specs.available_ram_gb < requirements.minimum_ram_gb: |
|
|
compatibility["compatible"] = False |
|
|
compatibility["warnings"].append(f"Insufficient RAM: {self.specs.available_ram_gb:.1f}GB available, {requirements.minimum_ram_gb:.1f}GB minimum required") |
|
|
elif self.specs.available_ram_gb < requirements.recommended_ram_gb: |
|
|
compatibility["warnings"].append(f"RAM below recommended: {self.specs.available_ram_gb:.1f}GB available, {requirements.recommended_ram_gb:.1f}GB recommended") |
|
|
|
|
|
compatibility["requirements_met"]["ram"] = self.specs.available_ram_gb >= requirements.minimum_ram_gb |
|
|
|
|
|
|
|
|
if self.specs.cpu_cores < requirements.cpu_cores_minimum: |
|
|
compatibility["compatible"] = False |
|
|
compatibility["warnings"].append(f"Insufficient CPU cores: {self.specs.cpu_cores} available, {requirements.cpu_cores_minimum} minimum required") |
|
|
elif self.specs.cpu_cores < requirements.cpu_cores_recommended: |
|
|
compatibility["warnings"].append(f"CPU cores below recommended: {self.specs.cpu_cores} available, {requirements.cpu_cores_recommended} recommended") |
|
|
|
|
|
compatibility["requirements_met"]["cpu"] = self.specs.cpu_cores >= requirements.cpu_cores_minimum |
|
|
|
|
|
|
|
|
if requirements.minimum_vram_gb: |
|
|
if not self.specs.gpu_available: |
|
|
compatibility["warnings"].append("No GPU available - will run on CPU (slower)") |
|
|
compatibility["requirements_met"]["gpu"] = False |
|
|
elif self.specs.gpu_memory_gb < requirements.minimum_vram_gb: |
|
|
compatibility["warnings"].append(f"Insufficient GPU memory: {self.specs.gpu_memory_gb:.1f}GB available, {requirements.minimum_vram_gb:.1f}GB minimum required") |
|
|
compatibility["requirements_met"]["gpu"] = False |
|
|
else: |
|
|
compatibility["requirements_met"]["gpu"] = True |
|
|
|
|
|
|
|
|
if self.specs.available_storage_gb < requirements.storage_requirements_gb: |
|
|
compatibility["compatible"] = False |
|
|
compatibility["warnings"].append(f"Insufficient storage: {self.specs.available_storage_gb:.1f}GB available, {requirements.storage_requirements_gb:.1f}GB required") |
|
|
|
|
|
compatibility["requirements_met"]["storage"] = self.specs.available_storage_gb >= requirements.storage_requirements_gb |
|
|
|
|
|
|
|
|
if compatibility["compatible"]: |
|
|
if self.specs.gpu_available and compatibility["requirements_met"].get("gpu", False): |
|
|
if self.specs.gpu_memory_gb >= requirements.recommended_vram_gb: |
|
|
compatibility["performance_estimate"] = "excellent" |
|
|
else: |
|
|
compatibility["performance_estimate"] = "good" |
|
|
elif self.specs.available_ram_gb >= requirements.recommended_ram_gb: |
|
|
compatibility["performance_estimate"] = "moderate" |
|
|
else: |
|
|
compatibility["performance_estimate"] = "limited" |
|
|
else: |
|
|
compatibility["performance_estimate"] = "incompatible" |
|
|
|
|
|
return compatibility |
|
|
|
|
|
def generate_hardware_report(self) -> Dict[str, Any]: |
|
|
"""Generate comprehensive hardware analysis report.""" |
|
|
|
|
|
report = { |
|
|
"timestamp": "2024-01-01T00:00:00", |
|
|
"hardware_specifications": asdict(self.specs), |
|
|
"model_compatibility": {}, |
|
|
"recommendations": [], |
|
|
"performance_estimates": {} |
|
|
} |
|
|
|
|
|
|
|
|
model_requirements = self.get_model_requirements() |
|
|
for model_name in model_requirements.keys(): |
|
|
report["model_compatibility"][model_name] = self.check_compatibility(model_name) |
|
|
|
|
|
|
|
|
if not self.specs.gpu_available: |
|
|
report["recommendations"].append("Consider adding a GPU for better performance with large models") |
|
|
|
|
|
if self.specs.available_ram_gb < 64: |
|
|
report["recommendations"].append("Consider upgrading RAM to 64GB+ for optimal performance") |
|
|
|
|
|
if self.specs.cpu_cores < 16: |
|
|
report["recommendations"].append("Consider upgrading to 16+ CPU cores for better parallel processing") |
|
|
|
|
|
if self.specs.available_storage_gb < 100: |
|
|
report["recommendations"].append("Consider adding more storage for model caching and data processing") |
|
|
|
|
|
|
|
|
for model_name, compatibility in report["model_compatibility"].items(): |
|
|
report["performance_estimates"][model_name] = { |
|
|
"estimated_inference_time_per_token": self._estimate_inference_time(model_name, compatibility), |
|
|
"estimated_memory_usage": model_requirements[model_name].model_size_gb, |
|
|
"recommended_batch_size": self._estimate_batch_size(model_name, compatibility) |
|
|
} |
|
|
|
|
|
return report |
|
|
|
|
|
def _estimate_inference_time(self, model_name: str, compatibility: Dict[str, Any]) -> float: |
|
|
"""Estimate inference time per token in milliseconds.""" |
|
|
|
|
|
base_times = { |
|
|
"LFM2-8B-A1B-Dimensional-Entanglement": 50.0, |
|
|
"9xdSq-LIMPS-FemTO-R1C": 45.0, |
|
|
"Enhanced-Advanced-Tokenizer": 5.0, |
|
|
"Integrated-Pipeline": 200.0 |
|
|
} |
|
|
|
|
|
base_time = base_times.get(model_name, 100.0) |
|
|
|
|
|
|
|
|
if compatibility["performance_estimate"] == "excellent": |
|
|
return base_time * 0.5 |
|
|
elif compatibility["performance_estimate"] == "good": |
|
|
return base_time * 0.7 |
|
|
elif compatibility["performance_estimate"] == "moderate": |
|
|
return base_time * 1.2 |
|
|
elif compatibility["performance_estimate"] == "limited": |
|
|
return base_time * 2.0 |
|
|
else: |
|
|
return base_time * 10.0 |
|
|
|
|
|
def _estimate_batch_size(self, model_name: str, compatibility: Dict[str, Any]) -> int: |
|
|
"""Estimate recommended batch size.""" |
|
|
|
|
|
if not compatibility["compatible"]: |
|
|
return 1 |
|
|
|
|
|
base_batch_sizes = { |
|
|
"LFM2-8B-A1B-Dimensional-Entanglement": 4, |
|
|
"9xdSq-LIMPS-FemTO-R1C": 6, |
|
|
"Enhanced-Advanced-Tokenizer": 32, |
|
|
"Integrated-Pipeline": 1 |
|
|
} |
|
|
|
|
|
base_batch = base_batch_sizes.get(model_name, 2) |
|
|
|
|
|
|
|
|
if compatibility["performance_estimate"] == "excellent": |
|
|
return base_batch * 2 |
|
|
elif compatibility["performance_estimate"] == "good": |
|
|
return base_batch |
|
|
elif compatibility["performance_estimate"] == "moderate": |
|
|
return max(1, base_batch // 2) |
|
|
else: |
|
|
return 1 |
|
|
|
|
|
def save_report(self, filename: str = "hardware_analysis_report.json"): |
|
|
"""Save hardware analysis report to file.""" |
|
|
|
|
|
report = self.generate_hardware_report() |
|
|
|
|
|
with open(filename, 'w', encoding='utf-8') as f: |
|
|
json.dump(report, f, indent=2, ensure_ascii=False) |
|
|
|
|
|
print(f"π Hardware analysis report saved to: {filename}") |
|
|
return report |
|
|
|
|
|
def main(): |
|
|
"""Main function to run hardware analysis.""" |
|
|
|
|
|
print("π§ LiMp Pipeline Hardware Analysis") |
|
|
print("=" * 50) |
|
|
|
|
|
analyzer = HardwareAnalyzer() |
|
|
|
|
|
|
|
|
print("\nπ» Current Hardware Specifications:") |
|
|
print(f" CPU: {analyzer.specs.cpu_model}") |
|
|
print(f" Cores: {analyzer.specs.cpu_cores} cores, {analyzer.specs.cpu_threads} threads") |
|
|
print(f" RAM: {analyzer.specs.total_ram_gb:.1f}GB total, {analyzer.specs.available_ram_gb:.1f}GB available") |
|
|
|
|
|
if analyzer.specs.gpu_available: |
|
|
print(f" GPU: {analyzer.specs.gpu_model}") |
|
|
print(f" GPU Memory: {analyzer.specs.gpu_memory_gb:.1f}GB") |
|
|
print(f" CUDA Version: {analyzer.specs.gpu_cuda_version}") |
|
|
else: |
|
|
print(" GPU: Not available") |
|
|
|
|
|
print(f" Storage: {analyzer.specs.available_storage_gb:.1f}GB available") |
|
|
|
|
|
|
|
|
print("\nπ Model Compatibility Analysis:") |
|
|
model_requirements = analyzer.get_model_requirements() |
|
|
|
|
|
for model_name in model_requirements.keys(): |
|
|
compatibility = analyzer.check_compatibility(model_name) |
|
|
status = "β
Compatible" if compatibility["compatible"] else "β Incompatible" |
|
|
performance = compatibility["performance_estimate"].title() |
|
|
|
|
|
print(f" {model_name}:") |
|
|
print(f" Status: {status}") |
|
|
print(f" Performance: {performance}") |
|
|
|
|
|
if compatibility["warnings"]: |
|
|
for warning in compatibility["warnings"]: |
|
|
print(f" β οΈ {warning}") |
|
|
|
|
|
|
|
|
print("\nπ Generating comprehensive report...") |
|
|
report = analyzer.save_report() |
|
|
|
|
|
|
|
|
if report["recommendations"]: |
|
|
print("\nπ‘ Recommendations:") |
|
|
for rec in report["recommendations"]: |
|
|
print(f" β’ {rec}") |
|
|
|
|
|
print("\nπ Hardware analysis complete!") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|