{ "module": "keras_hub.src.models.llama.llama_causal_lm", "class_name": "LlamaCausalLM", "config": { "backbone": { "module": "keras_hub.src.models.llama.llama_backbone", "class_name": "LlamaBackbone", "config": { "name": "llama_backbone_1", "trainable": true, "vocabulary_size": 32000, "num_layers": 32, "num_query_heads": 32, "hidden_dim": 4096, "intermediate_dim": 11008, "rope_max_wavelength": 10000.0, "rope_position_scaling_factor": 1.0, "rope_frequency_adjustment_factor": null, "rope_low_freq_factor": null, "rope_high_freq_factor": null, "rope_pretraining_sequence_length": null, "num_key_value_heads": 32, "layer_norm_epsilon": 1e-05, "dropout": 0 }, "registered_name": "keras_hub>LlamaBackbone" }, "preprocessor": { "module": "keras_hub.src.models.llama.llama_causal_lm_preprocessor", "class_name": "LlamaCausalLMPreprocessor", "config": { "name": "llama_causal_lm_preprocessor", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "bfloat16" }, "registered_name": null }, "tokenizer": { "module": "keras_hub.src.models.llama.llama_tokenizer", "class_name": "LlamaTokenizer", "config": { "name": "llama_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "config_file": "tokenizer.json", "proto": null, "sequence_length": null, "add_bos": false, "add_eos": false }, "registered_name": "keras_hub>LlamaTokenizer" }, "config_file": "preprocessor.json", "sequence_length": 1024, "add_start_token": true, "add_end_token": true }, "registered_name": "keras_hub>LlamaCausalLMPreprocessor" }, "name": "llama_causal_lm" }, "registered_name": "keras_hub>LlamaCausalLM" }