File size: 3,393 Bytes
f6e5219
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
{
    "producer": {
        "name": "modelopt",
        "version": "0.37.0"
    },
    "architecture": "Phi3ForCausalLM",
    "dtype": "bfloat16",
    "logits_dtype": "float16",
    "num_hidden_layers": 32,
    "num_attention_heads": 24,
    "num_key_value_heads": 8,
    "hidden_size": 3072,
    "norm_epsilon": 1e-05,
    "vocab_size": 200064,
    "max_position_embeddings": 131072,
    "hidden_act": "swiglu",
    "use_parallel_embedding": true,
    "embedding_sharding_dim": 0,
    "head_size": 128,
    "intermediate_size": 8192,
    "position_embedding_type": "long_rope",
    "share_embedding_table": false,
    "residual_mlp": false,
    "bias": false,
    "rotary_pct": 0.75,
    "rank": 0,
    "decoder": "phi3",
    "rmsnorm": true,
    "lm_head_bias": false,
    "rotary_base": 10000.0,
    "rotary_scaling": null,
    "runtime_defaults": null,
    "mapping": {
        "world_size": 1,
        "gpus_per_node": 8,
        "cp_size": 1,
        "tp_size": 1,
        "pp_size": 1,
        "moe_tp_size": 1,
        "moe_cluster_size": 1,
        "moe_ep_size": 1,
        "attn_tp_size": 1,
        "attn_cp_size": 1,
        "cp_config": {},
        "enable_attention_dp": false,
        "enable_lm_head_tp_in_adp": false
    },
    "quantization": {
        "quant_algo": "FP8",
        "kv_cache_quant_algo": "FP8",
        "group_size": 128,
        "smoothquant_val": 0.5,
        "clamp_val": null,
        "use_meta_recipe": false,
        "has_zero_point": false,
        "pre_quant_scale": false,
        "exclude_modules": [
            "lm_head"
        ],
        "mamba_ssm_cache_dtype": null
    },
    "qk_layernorm": false,
    "rotary_embedding_dim": 96,
    "tie_word_embeddings": true,
    "original_max_position_embeddings": 4096,
    "longrope_scaling_short_factors": [
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0
    ],
    "longrope_scaling_long_factors": [
        1,
        1.118320672,
        1.250641126,
        1.398617824,
        1.564103225,
        1.74916897,
        1.956131817,
        2.187582649,
        2.446418898,
        2.735880826,
        3.059592084,
        3.421605075,
        3.826451687,
        4.279200023,
        4.785517845,
        5.351743533,
        5.984965424,
        6.693110555,
        7.485043894,
        8.370679318,
        9.36110372,
        10.4687158,
        11.70738129,
        13.09260651,
        14.64173252,
        16.37415215,
        18.31155283,
        20.47818807,
        22.90118105,
        25.61086418,
        28.64115884,
        32.03,
        32.1,
        32.13,
        32.23,
        32.6,
        32.61,
        32.64,
        32.66,
        32.7,
        32.71,
        32.93,
        32.97,
        33.28,
        33.49,
        33.5,
        44.16,
        47.77
    ],
    "model_type": "phi3"
}