Merge Experiments
Collection
Sorted from newest (top) to oldest (bottom) • 69 items • Updated
• 4
⚠️ Warning: This model can produce narratives and RP that contain violent and graphic erotic content. Adjust your system prompt accordingly, and use Llama 3 chat template.
This is a highly creative, uncensored merge of pre-trained language models created using mergekit.
There are a few refusals, but the censorship is so minimal that if you reswipe with moderate temp it often works. The model could still be ablated if needed.
This model was merged using the DELLA merge method using aifeifei798/DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored as a base.
The following models were included in the merge:
The following YAML configuration was used to produce this model:
architecture: LlamaForCausalLM
base_model: B:\8B\!models--aifeifei798--DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored # B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated
models:
- model: B:\8B\!models--aifeifei798--DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!Morpheus_v1_8B_finetuned_adapter\LoRA
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!Morpheus_v2_77_8B_finetuned_adapter\LoRA
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!Morpheus_v3_prototype_checkpoint_526
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!minibella_checkpoint-52
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!Raven_v1_8B_finetuned_adapter\checkpoint-125
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated@B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated\!Cthulhu_v1.4_8B_finetuned_adapter\LoRA
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\SicariusSicariiStuffAssistantPepe8B
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--OpenPipe--Hermes-2-Theta-Llama-3-8B-32k
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--Bacon666--Athlon-8B-0.1
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\juiceb0xc0de__bella-bartender-8b-llama3.1
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Dusk_Rainbow
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--Sao10K--L3-8B-Stheno-v3.2
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--akjindal53244--Llama-3.1-Storm-8B
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Llama-3.1-Nemotron-8B-UltraLong-1M-Instruct_Abliterated
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--aifeifei798--DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored@B:\8B\!models--Azazelle--Llama-3-LongStory-LORA
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--Babsie--ThetaBlackGorgon-8B
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--TroyDoesAI--BlackSheep-X-Dolphin
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--Hastagaras--Jamet-8B-L3-MK.V-Blackroot
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--NeverSleep--Llama-3-Lumimaid-8B-v0.1-OAS
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
- model: B:\8B\!models--SicariusSicariiStuff--Impish_Mind_8B
parameters:
weight: 0.1
density: 0.9
epsilon: 0.099
# Seed: 420
merge_method: della
parameters:
lambda: 1.0
normalize: false
int8_mask: false
dtype: float32
out_dtype: bfloat16
tokenizer:
source: B:\8B\SicariusSicariiStuffAssistantPepe8B
chat_template: auto
name: 🦙 Llamatron-8B-v1