| | #!/bin/bash |
| |
|
| | |
| | set -e |
| |
|
| | |
| | mkdir -p ~/.huggingface |
| |
|
| | |
| | if [ ! -z "$HUGGINGFACE_TOKEN" ]; then |
| | echo "Setting up Hugging Face credentials..." |
| | echo "default_token=$HUGGINGFACE_TOKEN" > ~/.huggingface/token |
| | fi |
| |
|
| | |
| | if [ -d "/workspace/android-project" ]; then |
| | echo "Android project detected, installing dependencies..." |
| | cd /workspace/android-project |
| | if [ -f "gradlew" ]; then |
| | chmod +x gradlew |
| | ./gradlew --no-daemon dependencies |
| | fi |
| | fi |
| |
|
| | |
| | python3 -c "import torch; print('CUDA available:', torch.cuda.is_available())" |
| |
|
| | |
| | cat > /workspace/test_environment.py << EOL |
| | from transformers import pipeline |
| | import torch |
| | |
| | print("PyTorch version:", torch.__version__) |
| | print("CUDA available:", torch.cuda.is_available()) |
| | if torch.cuda.is_available(): |
| | print("CUDA device:", torch.cuda.get_device_name(0)) |
| | |
| | # 测试Hugging Face pipeline |
| | classifier = pipeline("sentiment-analysis") |
| | result = classifier("Hello, world!") |
| | print("Test pipeline result:", result) |
| | EOL |
| |
|
| | echo "Environment setup complete!" |
| | echo "You can test the environment by running: python3 /workspace/test_environment.py" |
| |
|
| | |
| | exec "$@" |