Spaces:
Sleeping
Sleeping
File size: 3,664 Bytes
2ed7323 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
#!/usr/bin/env python3
"""
Launch script for Clinical Trial Matching Pipeline
Checks dependencies and provides helpful startup information.
"""
import sys
import subprocess
import importlib.util
def check_package(package_name, display_name=None):
"""Check if a package is installed."""
if display_name is None:
display_name = package_name
spec = importlib.util.find_spec(package_name)
if spec is None:
return False, display_name
return True, display_name
def check_dependencies():
"""Check if all required dependencies are installed."""
required_packages = [
('gradio', 'gradio'),
('pandas', 'pandas'),
('numpy', 'numpy'),
('torch', 'PyTorch'),
('transformers', 'transformers'),
('sentence_transformers', 'sentence-transformers'),
]
optional_packages = [
('vllm', 'vLLM (for faster LLM inference)'),
]
print("Checking dependencies...\n")
missing = []
for package, display in required_packages:
installed, name = check_package(package, display)
status = "β" if installed else "β"
print(f" {status} {name}")
if not installed:
missing.append(package)
print("\nOptional packages:")
for package, display in optional_packages:
installed, name = check_package(package, display)
status = "β" if installed else "β"
print(f" {status} {name}")
if missing:
print(f"\nβ Missing required packages: {', '.join(missing)}")
print("\nInstall with:")
print(f" pip install {' '.join(missing)}")
print("\nOr install all requirements:")
print(" pip install -r requirements.txt")
return False
print("\nβ All required dependencies installed!")
return True
def check_cuda():
"""Check CUDA availability."""
try:
import torch
if torch.cuda.is_available():
print(f"\nπ CUDA available!")
print(f" GPU count: {torch.cuda.device_count()}")
for i in range(torch.cuda.device_count()):
print(f" GPU {i}: {torch.cuda.get_device_name(i)}")
return True
else:
print("\nβ οΈ CUDA not available - running on CPU")
print(" For better performance, install PyTorch with CUDA:")
print(" pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121")
return False
except ImportError:
return False
def print_startup_info():
"""Print helpful startup information."""
print("\n" + "="*70)
print("Clinical Trial Matching Pipeline")
print("="*70)
print("\nStarting Gradio web interface...")
print("\nOnce started, the interface will be available at:")
print(" Local: http://localhost:7860")
print(" Network: http://0.0.0.0:7860")
print("\nPress Ctrl+C to stop the server.")
print("\n" + "="*70 + "\n")
def main():
"""Main launch function."""
# Check dependencies
if not check_dependencies():
sys.exit(1)
# Check CUDA
check_cuda()
# Print startup info
print_startup_info()
# Launch the app
try:
import trial_matching_app
# The app will launch automatically when imported
except KeyboardInterrupt:
print("\n\nShutting down gracefully...")
sys.exit(0)
except Exception as e:
print(f"\nβ Error launching application: {e}")
import traceback
traceback.print_exc()
sys.exit(1)
if __name__ == "__main__":
main()
|