Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| import torch | |
| import transformers | |
| import peft | |
| import datasets | |
| import evaluate | |
| import sklearn | |
| import wandb | |
| import tensorboard | |
| print('✅ Alle Training-Pakete erfolgreich importiert!') | |
| print(f'🚀 PyTorch Version: {torch.__version__}') | |
| print(f'🤖 Transformers Version: {transformers.__version__}') | |
| print(f'🔧 PEFT Version: {peft.__version__}') | |
| print(f'📊 Datasets Version: {datasets.__version__}') | |
| print(f'🎯 CUDA verfügbar: {torch.cuda.is_available()}') | |
| if torch.cuda.is_available(): | |
| print(f'📱 GPU: {torch.cuda.get_device_name(0)}') | |
| print(f'💾 VRAM: {torch.cuda.get_device_properties(0).total_memory / 1024**3:.1f} GB') | |
| else: | |
| print('💻 CPU-Modus (kein CUDA)') | |
| print('\n🎯 Hardware-Empfehlungen für LoRA-Training:') | |
| print('• Minimum: 8GB RAM + 4GB VRAM') | |
| print('• Empfohlen: 16GB RAM + 8GB VRAM') | |
| print('• Optimal: 32GB RAM + 16GB+ VRAM') | |