File size: 289 Bytes
30da979 | 1 2 3 4 5 6 7 8 9 10 11 | {
"_from_model_config": true,
"max_length": 1024,
"output_attentions": false,
"output_hidden_states": false,
"transformers_version": "5.3.0",
"auto_map": {
"AutoConfig": "modeling_loop_lm.LoopLMConfig",
"AutoModelForCausalLM": "modeling_loop_lm.LoopLMForCausalLM"
}
} |