{ "model_type": "twentyq", "num_questions": 156, "num_targets": 1200, "vocab_size": 259, "architectures": [ "TwentyQForCausalLM" ], "auto_map": { "AutoConfig": "configuration_twentyq.TwentyQConfig", "AutoModelForCausalLM": "modeling_twentyq.TwentyQForCausalLM" }, "torch_dtype": "float32" }