| { | |
| "_name_or_path": "/local/musaeed/UofkDistill/model/checkpoint.pth", | |
| "activation": "gelu", | |
| "attention_dropout": 0.1, | |
| "dim": 1024, | |
| "dropout": 0.1, | |
| "hidden_dim": 4096, | |
| "initializer_range": 0.02, | |
| "max_position_embeddings": 512, | |
| "model_type": "distilbert", | |
| "n_heads": 16, | |
| "n_layers": 6, | |
| "output_hidden_states": true, | |
| "pad_token_id": 0, | |
| "qa_dropout": 0.1, | |
| "seq_classif_dropout": 0.2, | |
| "sinusoidal_pos_embds": true, | |
| "tie_weights_": true, | |
| "transformers_version": "4.24.0", | |
| "vocab_size": 32000 | |
| } | |