update run scripts
Browse files- run_sup_example.sh +7 -0
- run_unsup_example.sh +4 -0
run_sup_example.sh
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
python ../../SimCSE/train.py \
|
| 2 |
--model_name_or_path NbAiLab/nb-bert-base \
|
| 3 |
--train_file data/mnli_no_for_simcse.csv \
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
export ZONE=us-central2-b
|
| 4 |
+
export XRT_TPU_CONFIG="localservice;0;localhost:51011"
|
| 5 |
+
export TPU_NUM_DEVICES=4
|
| 6 |
+
export ALLOW_MULTIPLE_LIBTPU_LOAD=1
|
| 7 |
+
|
| 8 |
python ../../SimCSE/train.py \
|
| 9 |
--model_name_or_path NbAiLab/nb-bert-base \
|
| 10 |
--train_file data/mnli_no_for_simcse.csv \
|
run_unsup_example.sh
CHANGED
|
@@ -3,6 +3,10 @@
|
|
| 3 |
# In this example, we show how to train SimCSE on unsupervised Wikipedia data.
|
| 4 |
# If you want to train it with multiple GPU cards, see "run_sup_example.sh"
|
| 5 |
# about how to use PyTorch's distributed data parallel.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
python3 ../../SimCSE/train.py \
|
| 8 |
--model_name_or_path NbAiLab/nb-bert-base \
|
|
|
|
| 3 |
# In this example, we show how to train SimCSE on unsupervised Wikipedia data.
|
| 4 |
# If you want to train it with multiple GPU cards, see "run_sup_example.sh"
|
| 5 |
# about how to use PyTorch's distributed data parallel.
|
| 6 |
+
export ZONE=us-central2-b
|
| 7 |
+
export XRT_TPU_CONFIG="localservice;0;localhost:51011"
|
| 8 |
+
export TPU_NUM_DEVICES=4
|
| 9 |
+
export ALLOW_MULTIPLE_LIBTPU_LOAD=1
|
| 10 |
|
| 11 |
python3 ../../SimCSE/train.py \
|
| 12 |
--model_name_or_path NbAiLab/nb-bert-base \
|