dtype: bfloat16 merge_method: linear parameters: int8_mask: 1.0 normalize: 1.0 slices: - sources: - layer_range: [0, 4] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.2314007323608976 - layer_range: [0, 4] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.6555169289144125 - sources: - layer_range: [4, 8] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.40114629790472045 - layer_range: [4, 8] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.3638313057798464 - sources: - layer_range: [8, 12] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.6552004806174763 - layer_range: [8, 12] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.5932631992622696 - sources: - layer_range: [12, 16] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.5796797657003963 - layer_range: [12, 16] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.5509781091865962 - sources: - layer_range: [16, 20] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.3670138586048981 - layer_range: [16, 20] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.4819576330601912 - sources: - layer_range: [20, 24] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.8704563123348978 - layer_range: [20, 24] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.23823012325912632 - sources: - layer_range: [24, 28] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.3527054439651195 - layer_range: [24, 28] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.7886966464065228 - sources: - layer_range: [28, 32] model: Nisk36/finetuned-lmsys_vicuna-7b-v1.5 parameters: weight: 0.615452246302311 - layer_range: [28, 32] model: Nisk36/FT_elyza_ELYZA-japanese-Llama-2-7b-instruct parameters: weight: 0.10099107707206684