Radiant-Shadow-12B / mergekit_config.yml
Vortex5's picture
Upload folder using huggingface_hub
011019a verified
raw
history blame contribute delete
313 Bytes
slices:
- sources:
- model: Vortex5/Lunar-Nexus-12B
layer_range: [0, 17]
- sources:
- model: Retreatcost/KansenSakura-Radiance-RP-12b
layer_range: [17, 31]
- sources:
- model: Vortex5/Shadow-Crystal-12B
layer_range: [31, 40]
merge_method: passthrough
dtype: bfloat16
tokenizer:
source: union