File size: 320 Bytes
2363b1f 1974b30 2363b1f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | {
"model_type": "twentyq",
"num_questions": 156,
"num_targets": 1200,
"vocab_size": 259,
"architectures": [
"TwentyQForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_twentyq.TwentyQConfig",
"AutoModelForCausalLM": "modeling_twentyq.TwentyQForCausalLM"
},
"torch_dtype": "float32"
} |