File size: 560 Bytes
9e31d55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
  "architectures": [
    "EMGForCausalLM"
  ],
  "dropout": 0.01,
  "embedding_dim": 650,
  "hidden_dim": 650,
  "model_type": "emg",
  "num_layers": 1,
  "pad_token_id": 60004,
  "torch_dtype": "float32",
  "transformers_version": "4.52.3",
  "use_gradient_checkpointing": false,
  "use_layer_norm": true,
  "vocab_size": 60001,
  "auto_map": {
    "AutoConfig": "modeling_emg.EMGConfig",
    "AutoModel": "modeling_emg.EMGLanguageModel",
    "AutoModelForCausalLM": "modeling_emg.EMGForCausalLM",
    "AutoTokenizer": "modeling_emg.MorPieceTokenizer"
  }
}