Ctrl+K
- state.param_states.decoder.layers_0.pre_self_attention_layer_norm.scale.v
- state.param_states.decoder.layers_1.pre_cross_attention_layer_norm.scale.v
- state.param_states.decoder.layers_12.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_14.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_15.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_16.pre_cross_attention_layer_norm.scale.v
- state.param_states.decoder.layers_17.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_17.pre_self_attention_layer_norm.scale.v
- state.param_states.decoder.layers_19.pre_self_attention_layer_norm.scale.v
- state.param_states.decoder.layers_2.pre_self_attention_layer_norm.scale.v
- state.param_states.decoder.layers_22.pre_cross_attention_layer_norm.scale.v
- state.param_states.decoder.layers_5.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_7.pre_mlp_layer_norm.scale.v
- state.param_states.decoder.layers_8.pre_cross_attention_layer_norm.scale.v
- state.param_states.decoder.relpos_bias.rel_embedding.v
- state.param_states.encoder.layers_1.pre_mlp_layer_norm.scale.v
- state.param_states.encoder.layers_14.pre_attention_layer_norm.scale.v
- state.param_states.encoder.layers_16.pre_mlp_layer_norm.scale.v
- state.param_states.encoder.layers_4.pre_attention_layer_norm.scale.v
- target.decoder.layers_0.encoder_decoder_attention.value.kernel
- target.decoder.layers_0.mlp.wi_1.kernel
- target.decoder.layers_0.self_attention.key.kernel
- target.decoder.layers_10.encoder_decoder_attention.query.kernel
- target.decoder.layers_11.encoder_decoder_attention.value.kernel
- target.decoder.layers_12.encoder_decoder_attention.out.kernel
- target.decoder.layers_12.encoder_decoder_attention.value.kernel
- target.decoder.layers_12.pre_cross_attention_layer_norm.scale
- target.decoder.layers_12.pre_self_attention_layer_norm.scale
- target.decoder.layers_13.encoder_decoder_attention.out.kernel
- target.decoder.layers_13.pre_cross_attention_layer_norm.scale
- target.decoder.layers_14.encoder_decoder_attention.value.kernel
- target.decoder.layers_14.pre_self_attention_layer_norm.scale
- target.decoder.layers_15.mlp.wi_0.kernel
- target.decoder.layers_16.encoder_decoder_attention.key.kernel
- target.decoder.layers_16.mlp.wi_0.kernel
- target.decoder.layers_17.encoder_decoder_attention.query.kernel
- target.decoder.layers_17.encoder_decoder_attention.value.kernel
- target.decoder.layers_18.encoder_decoder_attention.query.kernel
- target.decoder.layers_18.pre_self_attention_layer_norm.scale
- target.decoder.layers_18.self_attention.key.kernel
- target.decoder.layers_18.self_attention.out.kernel
- target.decoder.layers_19.encoder_decoder_attention.query.kernel
- target.decoder.layers_19.pre_mlp_layer_norm.scale
- target.decoder.layers_19.self_attention.key.kernel
- target.decoder.layers_20.encoder_decoder_attention.key.kernel
- target.decoder.layers_20.encoder_decoder_attention.value.kernel
- target.decoder.layers_21.encoder_decoder_attention.out.kernel
- target.decoder.layers_21.mlp.wo.kernel
- target.decoder.layers_21.pre_mlp_layer_norm.scale
- target.decoder.layers_21.self_attention.key.kernel