Text Generation
Transformers
Safetensors
English
mistral
creative
creative writing
fiction writing
plot generation
sub-plot generation
story generation
scene continue
storytelling
fiction story
science fiction
romance
all genres
story
writing
vivid prosing
vivid writing
fiction
roleplaying
float32
swearing
rp
horror
della
Merge
mergekit
conversational
text-generation-inference
metadata
base_model:
- allura-forge/ms32-final-TEXTONLY
- ArliAI/Mistral-Small-24B-ArliAI-RPMax-v1.4
- anthracite-core/Mistral-Small-3.2-24B-Instruct-2506-Text-Only
- arcee-ai/Arcee-Blitz
- ConicCat/Mistral-Small-3.2-AntiRep-24B
- CrucibleLab/M3.2-24B-Loki-V2
- DarkArtsForge/Morax-24B-v1
- Darkhn/M3.2-24B-Animus-V5.1-Pro
- Darkhn/M3.2-24B-Animus-V7.1
- Delta-Vector/Rei-24B-KTO
- dphn/Dolphin-Mistral-24B-Venice-Edition
- Doctor-Shotgun/MS3.2-24B-Magnum-Diamond
- FlareRebellion/WeirdCompound-v1.7-24b
- Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly
- OddTheGreat/Circuitry_24B_V.3
- PocketDoc/Dans-DangerousWinds-V1.1.1-24b
- ReadyArt/4.2.0-Broken-Tutu-24b
- ReadyArt/Broken-Tutu-24B-Transgression-v2.0
- ReadyArt/Dark-Nexus-24B-v2.0
- ReadyArt/MS3.2-The-Omega-Directive-24B-Unslop-v2.1
- spacewars123/Space-Wars-24B-v1.00a
- TheDrummer/Cydonia-24B-v2
- TheDrummer/Cydonia-24B-v4.2.0
- TheDrummer/Cydonia-24B-v4.3
- TheDrummer/Magidonia-24B-v4.2.0
- TheDrummer/Magidonia-24B-v4.3
- TheDrummer/Precog-24B-v1
- trashpanda-org/MS-24B-Instruct-Mullein-v0
- trashpanda-org/MS3.2-24B-Mullein-v2
- TroyDoesAI/BlackSheep-24B
- Undi95/MistralThinker-v1.1
- zerofata/MS3.2-PaintedFantasy-v2-24B
- zerofata/MS3.2-PaintedFantasy-v3-24B
datasets:
- OccultAI/illuminati_imatrix_v1
language:
- en
library_name: transformers
license: apache-2.0
tags:
- creative
- creative writing
- fiction writing
- plot generation
- sub-plot generation
- fiction writing
- story generation
- scene continue
- storytelling
- fiction story
- science fiction
- romance
- all genres
- story
- writing
- vivid prosing
- vivid writing
- fiction
- roleplaying
- float32
- swearing
- rp
- horror
- della
- mistral
- merge
- mergekit
widget:
- text: Goetia-24B-v1.3
output:
url: >-
https://cdn-uploads.huggingface.co/production/uploads/68e840caa318194c44ec2a04/DHbuh4efzjCGpxDUciZ_-.jpeg
๐ Goetia 24B v1.3
The "Della Edition" meant to test bridging 2501 and 2503 models. See this post and this other post for more info.
This is a merge of pre-trained language models created using mergekit.
Merge Details
Merge Methods
This model was merged using the following merge method:
architecture: MistralForCausalLM
models:
- model: B:\24B\!models--anthracite-core--Mistral-Small-3.2-24B-Instruct-2506-Text-Only
- model: B:\24B\!models--TheDrummer--Cydonia-24B-v4.3
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--ReadyArt--4.2.0-Broken-Tutu-24b
parameters:
density: 0.8
weight: 0.05
epsilon: 0.1
- model: B:\24B\!models--zerofata--MS3.2-PaintedFantasy-v2-24B
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--TheDrummer--Magidonia-24B-v4.3
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--TheDrummer--Precog-24B-v1
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--zerofata--MS3.2-PaintedFantasy-v3-24B
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--ReadyArt--Broken-Tutu-24B-Transgression-v2.0
parameters:
density: 0.8
weight: 0.05
epsilon: 0.1
- model: B:\24B\!models--trashpanda-org--MS3.2-24B-Mullein-v2
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
# - model: B:\24B\!models--LatitudeGames--Hearthfire-24B
# parameters:
# density: 0.8
# weight: 0.1
# epsilon: 0.1
- model: B:\24B\!models--TheDrummer--Cydonia-24B-v4.2.0
parameters:
density: 0.8
weight: 0.1
epsilon: 0.1
- model: B:\24B\!models--TheDrummer--Magidonia-24B-v4.2.0
parameters:
density: 0.8
weight: 0.1
epsilon: 0.1
- model: B:\24B\!models--ConicCat--Mistral-Small-3.2-AntiRep-24B
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
- model: B:\24B\!models--Undi95--MistralThinker-v1.1
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--CrucibleLab--M3.2-24B-Loki-V2
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--Darkhn--M3.2-24B-Animus-V7.1
parameters:
density: 0.8
weight: 0.1
epsilon: 0.1
- model: B:\24B\Morax-24B-v1
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--FlareRebellion--WeirdCompound-v1.7-24b
parameters:
density: 0.8
weight: 0.1
epsilon: 0.1
# - model: B:\24B\!models--aixonlab--Eurydice-24b-v3.5
# parameters:
# density: 0.8
# weight: 0.08
# epsilon: 0.1
- model: B:\24B\!models--allura-forge--ms32-final-TEXTONLY
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
- model: B:\24B\!models--Delta-Vector--Rei-24B-KTO
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
- model: B:\24B\!models--Doctor-Shotgun--MS3.2-24B-Magnum-Diamond
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
- model: B:\24B\!models--ReadyArt--MS3.2-The-Omega-Directive-24B-Unslop-v2.1
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
# - model: B:\24B\!models--Gryphe--Codex-24B-Small-3.2
# parameters:
# density: 0.8
# weight: 0.1
# epsilon: 0.1
# - model: B:\24B\!models--CrucibleLab--M3.2-24B-Loki-V1.3
# parameters:
# density: 0.8
# weight: 0.15
# epsilon: 0.1
- model: B:\24B\!models--arcee-ai--Arcee-Blitz
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--ArliAI--Mistral-Small-24B-ArliAI-RPMax-v1.4
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
# - model: B:\24B\!models--PocketDoc--Dans-PersonalityEngine-V1.3.0-24b
# parameters:
# density: 0.8
# weight: 0.1
# epsilon: 0.1
- model: B:\24B\!models--ReadyArt--Dark-Nexus-24B-v2.0
parameters:
density: 0.8
weight: 0.2
epsilon: 0.1
- model: B:\24B\!models--Darkhn--M3.2-24B-Animus-V5.1-Pro
parameters:
density: 0.8
weight: 0.15
epsilon: 0.1
- model: B:\24B\!models--dphn--Dolphin-Mistral-24B-Venice-Edition
parameters:
density: 0.8
weight: 0.01
epsilon: 0.1
- model: B:\24B\!models--TroyDoesAI--BlackSheep-24B
parameters:
density: 0.8
weight: 0.01
epsilon: 0.1
- model: B:\24B\!models--TheDrummer--Cydonia-24B-v2
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--PocketDoc--Dans-DangerousWinds-V1.1.1-24b
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--trashpanda-org--MS-24B-Instruct-Mullein-v0
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
- model: B:\24B\!models--OddTheGreat--Circuitry_24B_V.3
parameters:
density: 0.8
weight: 0.1
epsilon: 0.1
- model: B:\24B\!models--spacewars123--Space-Wars-24B-v1.00a
parameters:
density: 0.8
weight: 0.02
epsilon: 0.1
# Total Donors: 33
# Total Weights: 3.3
# Seed: 420
merge_method: della
base_model: B:\24B\!models--anthracite-core--Mistral-Small-3.2-24B-Instruct-2506-Text-Only
parameters:
lambda: 1.0
normalize: true # key variable to test
int8_mask: false
dtype: float32
out_dtype: bfloat16
tokenizer:
source: base
# chat_template: auto
name: ๐ Goetia-24B-v1.3
