File size: 401 Bytes
7a7ed4b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
merge_method: dare_ties
base_model: meta-llama/Meta-Llama-3-8B-Instruct
models:
- model: meta-llama/Meta-Llama-3-8B-Instruct
parameters:
weight: 0.45
density: 0.6
- model: rombodawg/Llama-3-8B-Instruct-Coder
parameters:
weight: 0.55
density: 0.7
parameters:
int8_mask: true
normalize: true
dtype: bfloat16
tokenizer_source: meta-llama/Meta-Llama-3-8B-Instruct
|