| { | |
| "model_type": "tiny-gpt", | |
| "vocab_size": 1920, | |
| "n_layers": 6, | |
| "n_heads": 6, | |
| "d_model": 240, | |
| "d_ff": 960, | |
| "n_ctx": 64, | |
| "dropout": 0.0, | |
| "tie_word_embeddings": true, | |
| "special_tokens": { | |
| "pad_token": "<PAD>", | |
| "bos_token": "<BOS>", | |
| "eos_token": "<EOS>", | |
| "sep_token": "<SEP>", | |
| "unk_token": "<UNK>", | |
| "ids": { | |
| "pad": 0, | |
| "bos": 1, | |
| "eos": 2, | |
| "sep": 3, | |
| "unk": 4 | |
| } | |
| }, | |
| "quantization": { | |
| "default": "int8", | |
| "embedding_bits": 18, | |
| "layernorm_bits": 24, | |
| "format": "safetensors+scales" | |
| } | |
| } |