merge_method: dare_ties base_model: meta-llama/Meta-Llama-3-8B-Instruct models: - model: meta-llama/Meta-Llama-3-8B-Instruct parameters: weight: 0.45 density: 0.6 - model: rombodawg/Llama-3-8B-Instruct-Coder parameters: weight: 0.55 density: 0.7 parameters: int8_mask: true normalize: true dtype: bfloat16 tokenizer_source: meta-llama/Meta-Llama-3-8B-Instruct