| { | |
| "model_type": "tx_model", | |
| "architectures": [ | |
| "TXModelForHF" | |
| ], | |
| "vocab_size": 62720, | |
| "d_model": 512, | |
| "n_layers": 12, | |
| "n_heads": 8, | |
| "expansion_ratio": 4, | |
| "norm_scheme": "pre", | |
| "transformer_activation": "relu", | |
| "use_glu": false, | |
| "pad_token_id": 0, | |
| "pad_value": -2, | |
| "num_bins": 51, | |
| "use_chem_token": false, | |
| "keep_first_n_tokens": 1, | |
| "cell_emb_style": "cls", | |
| "return_gene_embeddings": false, | |
| "standard_scale_outputs": false, | |
| "attn_config": { | |
| "attn_impl": "flash", | |
| "use_attn_mask": false, | |
| "attn_type": "grouped_query_attention", | |
| "kv_nheads": 8, | |
| "attn_pdrop": 0 | |
| }, | |
| "norm_config": { | |
| "eps": 1e-05, | |
| "norm_type": "layernorm" | |
| }, | |
| "gene_encoder_config": { | |
| "use_norm": true | |
| }, | |
| "expression_encoder_config": { | |
| "dropout": 0.1, | |
| "use_norm": true, | |
| "max_value": 512, | |
| "activation": "relu", | |
| "input_emb_style": "continuous" | |
| }, | |
| "expression_decoder_config": { | |
| "n_layers": 1, | |
| "n_outputs": 1, | |
| "activation": "leaky_relu" | |
| }, | |
| "mvc_config": { | |
| "arch_style": "inner product", | |
| "query_activation": "sigmoid", | |
| "scaled_dot_product": true | |
| }, | |
| "auto_map": { | |
| "AutoConfig": "modeling.TXConfig", | |
| "AutoModel": "modeling.TXModelForHF", | |
| "AutoModelForCausalLM": "modeling.TXModelForHF" | |
| }, | |
| "transformers_version": "4.35.0" | |
| } |