merge_method: dare_ties base_model: mistralai/Mistral-7B-Instruct-v0.3 models: - model: mistralai/Mistral-7B-Instruct-v0.3 parameters: weight: 0.15 density: 0.5 - model: uukuguy/speechless-code-mistral-7b-v2.0 parameters: weight: 0.25 density: 0.7 - model: Nondzu/Mistral-7B-Instruct-v0.2-code-ft parameters: weight: 0.2 density: 0.6 - model: teknium/OpenHermes-2.5-Mistral-7B parameters: weight: 0.2 density: 0.6 - model: meta-math/MetaMath-Mistral-7B parameters: weight: 0.2 density: 0.6 parameters: int8_mask: true normalize: true dtype: bfloat16 tokenizer_source: mistralai/Mistral-7B-Instruct-v0.3