Text Generation
Transformers
Safetensors
mistral
Merge
mergekit
lazymergekit
liminerity/M7-7b
Kukedlc/Neural4gsm8k
Kukedlc/Jupiter-k-7B-slerp
Kukedlc/NeuralMaxime-7B-slerp
Kukedlc/NeuralFusion-7b-Dare-Ties
Kukedlc/Neural-Krishna-Multiverse-7b-v3
Kukedlc/NeuTrixOmniBe-DPO
Kukedlc/NeuralSirKrishna-7b
text-generation-inference
| models: | |
| - model: Kukedlc/NeuralSirKrishna-7b | |
| # no parameters necessary for base model | |
| - model: liminerity/M7-7b | |
| parameters: | |
| weight: 0.1 | |
| density: 0.88 | |
| - model: Kukedlc/Neural4gsm8k | |
| parameters: | |
| weight: 0.1 | |
| density: 0.66 | |
| - model: Kukedlc/Jupiter-k-7B-slerp | |
| parameters: | |
| weight: 0.1 | |
| density: 0.66 | |
| - model: Kukedlc/NeuralMaxime-7B-slerp | |
| parameters: | |
| weight: 0.1 | |
| density: 0.44 | |
| - model: Kukedlc/NeuralFusion-7b-Dare-Ties | |
| parameters: | |
| weight: 0.1 | |
| density: 0.44 | |
| - model: Kukedlc/Neural-Krishna-Multiverse-7b-v3 | |
| parameters: | |
| weight: 0.2 | |
| density: 0.66 | |
| - model: Kukedlc/NeuTrixOmniBe-DPO | |
| parameters: | |
| weight: 0.1 | |
| density: 0.33 | |
| - model: Kukedlc/NeuralSirKrishna-7b | |
| parameters: | |
| weight: 0.2 | |
| density: 0.88 | |
| merge_method: dare_ties | |
| base_model: Kukedlc/NeuralSirKrishna-7b | |
| parameters: | |
| int8_mask: true | |
| normalize: true | |
| dtype: bfloat16 | |