| { | |
| "architectures": [ | |
| "GPT2WithEngram" | |
| ], | |
| "bucket_size": 50000, | |
| "d_model": 768, | |
| "dtype": "float32", | |
| "engram_dim": 384, | |
| "injection_layer": 1, | |
| "model_type": "engram", | |
| "ngram_orders": [ | |
| 2, | |
| 3 | |
| ], | |
| "num_heads": 8, | |
| "transformers_version": "4.57.6", | |
| "vocab_size": 50257 | |
| } | |