| base_model: mistralai/Mistral-7B-v0.1 | |
| models: | |
| - model: EstherXC/mixtral_7b_protein_pretrain | |
| parameters: | |
| weight: 0.3 | |
| - model: wanglab/mixtral_7b_dna_pretrain #dnagpt/llama-dna | |
| parameters: | |
| weight: 0.3 | |
| merge_method: task_arithmetic | |
| dtype: float16 | |
| tokenizer_source: "base" |