{ "module": "keras_hub.src.models.gpt_oss.gpt_oss_causal_lm", "class_name": "GptOssCausalLM", "config": { "backbone": { "module": "keras_hub.src.models.gpt_oss.gpt_oss_backbone", "class_name": "GptOssBackbone", "config": { "name": "gpt_oss_backbone", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "vocabulary_size": 201088, "num_layers": 24, "num_query_heads": 64, "hidden_dim": 2880, "intermediate_dim": 2880, "num_experts": 32, "top_k": 4, "rope_max_wavelength": 150000, "rope_scaling_factor": 32.0, "num_key_value_heads": 8, "sliding_window": 128, "layer_norm_epsilon": 1e-05, "dropout": 0, "output_router_logits": false, "head_dim": 64 }, "registered_name": "keras_hub>GptOssBackbone" }, "preprocessor": { "module": "keras_hub.src.models.gpt_oss.gpt_oss_causal_lm_preprocessor", "class_name": "GptOssCausalLMPreprocessor", "config": { "name": "gpt_oss_causal_lm_preprocessor_1", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "tokenizer": { "module": "keras_hub.src.models.gpt_oss.gpt_oss_tokenizer", "class_name": "GptOssTokenizer", "config": { "name": "gpt_oss_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "config_file": "tokenizer.json", "sequence_length": null, "add_prefix_space": false, "unsplittable_tokens": [ "<|endoftext|>" ] }, "registered_name": "keras_hub>GptOssTokenizer" }, "config_file": "preprocessor.json", "sequence_length": 1024, "add_start_token": true, "add_end_token": true }, "registered_name": "keras_hub>GptOssCausalLMPreprocessor" }, "name": "gpt_oss_causal_lm" }, "registered_name": "keras_hub>GptOssCausalLM" }