File size: 1,349 Bytes
174ecb2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
{
"output_path": "/rep/nhamad/ArabicNER/newTraining/Wojoodoutput",
"train_path": "/rep/nhamad/ArabicNER/newTraining/train.txt",
"val_path": "/rep/nhamad/ArabicNER/newTraining/val.txt",
"test_path": "/rep/nhamad/ArabicNER/newTraining/test.txt",
"bert_model": "aubmindlab/bert-base-arabertv2",
"gpus": [
0
],
"log_interval": 10,
"batch_size": 8,
"num_workers": 0,
"data_config": {
"fn": "arabiner.data.datasets.DefaultDataset",
"kwargs": {
"max_seq_len": 512,
"bert_model": "aubmindlab/bert-base-arabertv2"
}
},
"trainer_config": {
"fn": "arabiner.trainers.BertTrainer",
"kwargs": {
"max_epochs": 50
}
},
"network_config": {
"fn": "arabiner.nn.BertSeqTagger",
"kwargs": {
"dropout": 0.1,
"bert_model": "aubmindlab/bert-base-arabertv2",
"num_labels": 43
}
},
"optimizer": {
"fn": "torch.optim.AdamW",
"kwargs": {
"lr": 1e-05
}
},
"lr_scheduler": {
"fn": "torch.optim.lr_scheduler.ExponentialLR",
"kwargs": {
"gamma": 1
}
},
"loss": {
"fn": "torch.nn.CrossEntropyLoss",
"kwargs": {}
},
"overwrite": false,
"seed": 1
} |