File size: 741 Bytes
f5c9a12
acd339b
f5c9a12
 
 
 
 
 
 
 
 
 
79441aa
 
 
 
 
f5c9a12
 
 
 
acd339b
f5c9a12
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
    "module": "keras_hub.src.models.llama.llama_backbone",
    "class_name": "LlamaBackbone",
    "config": {
        "name": "llama_backbone_1",
        "trainable": true,
        "vocabulary_size": 32000,
        "num_layers": 32,
        "num_query_heads": 32,
        "hidden_dim": 4096,
        "intermediate_dim": 11008,
        "rope_max_wavelength": 10000.0,
        "rope_position_scaling_factor": 1.0,
        "rope_frequency_adjustment_factor": null,
        "rope_low_freq_factor": null,
        "rope_high_freq_factor": null,
        "rope_pretraining_sequence_length": null,
        "num_key_value_heads": 32,
        "layer_norm_epsilon": 1e-05,
        "dropout": 0
    },
    "registered_name": "keras_hub>LlamaBackbone"
}