File size: 520 Bytes
d7ce24b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
slices:
  - sources:
      - model: dbands/llama-3-8b-instruct_code_instructions_122k_alpaca_style_16bit
        layer_range: [0, 32]
      - model: artificialguybr/llama3-8b-redmond-code290k
        layer_range: [0, 32]
      
merge_method: slerp
base_model: dbands/llama-3-8b-instruct_code_instructions_122k_alpaca_style_16bit
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5 # fallback for rest of tensors
dtype: float16