UNNAMED-MODEL-2D / mergekit_config.yml
Tarek07's picture
Add files using upload-large-folder tool
daf657d verified
raw
history blame contribute delete
874 Bytes
models:
- model: TareksLab/Wordsmith-V7-LLaMa-70B
parameters:
weight: 0.20
density: 0.5
epsilon: 0.1
lambda: 1.0
- model: TareksLab/Anathema-V8-LLaMA-70B
parameters:
weight: 0.20
density: 0.5
epsilon: 0.1
lambda: 1.0
- model: TareksLab/Scrivener-Base-V6-LLaMA-70B
parameters:
weight: 0.20
density: 0.5
epsilon: 0.1
lambda: 1.0
- model: TareksLab/RolePlayer-V6-LLaMa-70B
parameters:
weight: 0.20
density: 0.5
epsilon: 0.1
lambda: 1.0
- model: TareksLab/Cortex-V4-LLaMA-70B
parameters:
weight: 0.20
density: 0.5
epsilon: 0.1
lambda: 1.0
merge_method: della
base_model: TareksLab/Scrivener-Base-V6-LLaMA-70B
parameters:
normalize: false
dtype: bfloat16
chat_template: llama3
tokenizer:
source: TareksLab/Cortex-V4-LLaMA-70B