SykoLLM-0.1B / config.json
syko818121's picture
Upload folder using huggingface_hub
04ea0cc verified
{
"n_layer": 6,
"n_head": 4,
"n_embd": 256,
"max_position_embeddings": 512,
"vocab_size": 194,
"model_type": "gpt2",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"embd_pdrop": 0.1,
"resid_pdrop": 0.1,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"n_positions": 512,
"scale_attn_weights": true,
"use_cache": true,
"pad_token_id": 0,
"bos_token_id": 2,
"eos_token_id": 3,
"unk_token_id": 1
}