MN-12B-Hydra-RP-RU / mergekit_config.yml
limloop's picture
init
a516664
raw
history blame contribute delete
596 Bytes
models:
- model: Aleteian/Pathfinder-RP-12B-RU
parameters:
weight: 0.6
- model: IlyaGusev/vikhr_nemo_orpo_dostoevsky_12b_slerp
parameters:
weight: 0.25
density: 0.9
- model: DavidAU/Mistral-Nemo-2407-12B-Thinking-Claude-Gemini-GPT5.2-Uncensored-HERETIC
parameters:
weight: 0.3
density: 0.9
- model: Naphula/MN-12B-Mag-Mell-R1-Uncensored
parameters:
weight: 0.2
density: 0.9
merge_method: ties
parameters:
epsilon: 0.01
normalize: true
base_model: Aleteian/Pathfinder-RP-12B-RU
dtype: bfloat16
tokenizer:
source: "base"