| { | |
| "annotations": true, | |
| "architectures": [ | |
| "CAMP" | |
| ], | |
| "dropout": 0.05, | |
| "hidden_dim": 640, | |
| "input_dim": 640, | |
| "intermediate_dim": 640, | |
| "out_dim": 640, | |
| "kernel_size": 7, | |
| "model_type": "CAMP", | |
| "nhead": 4, | |
| "bias": true, | |
| "nlp_path": "lhallee/annotation_transformer_test", | |
| "num_hidden_layers": 1, | |
| "num_layers": 1, | |
| "plm_path": "facebook/esm2_t33_650M_UR50D", | |
| "pooling": "avg", | |
| "annotations": true, | |
| "loss_type": "diff", | |
| "token": null, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.41.1" | |
| } | |