index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
13,700
Haary/TinyLlama-1.1B-id-Unsloth
llama
["LlamaForCausalLM"]
32000
float16
4.41.2
2,048
5,632
22
32
4
silu
0
True
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
13,701
Haary/haryra-7b-llama2-id
llama
["LlamaForCausalLM"]
32000
float16
4.37.2
4,096
11,008
32
32
32
silu
0
True
4,096
10,000
0.000001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
13,702
Habaznya/p_model
bert
["BertForSequenceClassification"]
30522
float32
4.40.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,703
Habib-Rehman/gemma-Code-Instruct-Finetune-test
gemma
["GemmaForCausalLM"]
256000
float16
4.38.0
2,048
16,384
18
8
1
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
13,704
Habib94/gpt3-chat-kidney-desease
gpt2
["GPT2LMHeadModel"]
50259
float32
4.45.2
null
null
null
null
null
null
null
True
null
null
null
0.02
50257
50258
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
13,705
Habiba-Eid/xlm-roberta-base-finetuned-panx-all
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.44.2
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,706
Habiba-Eid/xlm-roberta-base-finetuned-panx-de
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.44.2
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,707
Habiba-Eid/xlm-roberta-base-finetuned-panx-en
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.44.2
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,708
Habiba-Eid/xlm-roberta-base-finetuned-panx-it
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.44.2
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,709
Habiba9/mpnet-base-all-nli-triplet
mpnet
["MPNetModel"]
30527
float32
4.41.2
768
3,072
12
12
null
gelu
null
null
514
null
null
0.02
0
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
13,710
HabibaElbehairy/Fastest-Roberta-model
roberta
["RobertaForSequenceClassification"]
50265
float32
4.49.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,711
HabibaElbehairy/codebert-multitask-similarity
roberta
["RobertaForSequenceClassification"]
50265
null
null
768
3,072
12
12
null
gelu
null
null
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
13,712
HabibiBear/TimmTry
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
13,713
HaceHazretleri/distilhubert-finetuned-gtzan
hubert
["HubertForSequenceClassification"]
32
float32
4.46.0.dev0
768
3,072
2
12
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.014156
small
null
13,714
HaceHazretleri/ppo-Huggy_default
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
13,715
HachiML/Bit-Llama2-jp-123M
llama
["LlamaForCausalLM"]
43176
float32
4.36.2
1,024
2,048
4
32
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
32
8
false
null
null
true
false
false
false
false
false
false
false
true
true
0.050332
small
short
13,716
HachiML/Bit-Llama2-jp-123M-test-1
llama
["LlamaForCausalLM"]
43176
float32
4.36.2
1,024
2,048
4
32
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
32
8
false
null
null
true
false
false
false
false
false
false
false
true
true
0.050332
small
short
13,717
HachiML/Bit-Llama2-jp-127M-test-1
llama
["LlamaForCausalLM"]
43176
float32
4.36.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.00001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,718
HachiML/BitLlama2-jp-127M-optim-6
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,719
HachiML/Llama2-jp-123M
llama
["LlamaForCausalLM"]
43176
float32
4.38.2
1,024
2,048
4
32
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
32
8
false
null
null
true
false
false
false
false
false
false
false
true
true
0.050332
small
short
13,720
HachiML/Mistral-7B-Instruct-v0.3-dpo-lora
mistral
["MistralForCausalLM"]
32768
bfloat16
4.41.1
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
13,721
HachiML/Mistral-7B-v0.3-m2-lora
mistral
["MistralForCausalLM"]
32768
bfloat16
4.41.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
13,722
HachiML/Mists-7B-v01-simple-projector-trained
mists
["MistsForConditionalGeneration"]
null
float32
4.42.3
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
13,723
HachiML/QwQ-CoT-0.5B-JA-v1.0
qwen2
["Qwen2ForCausalLM"]
151669
bfloat16
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,724
HachiML/QwQ-CoT-0.5B-JA-v1.1
qwen2
["Qwen2ForCausalLM"]
151669
bfloat16
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,725
HachiML/SkillTree-Chat-LAB-Mistral-7B-v0.1
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
13,726
HachiML/Swallow-MS-7b-instruct-v0.1
mistral
["MistralForCausalLM"]
42800
bfloat16
4.39.1
4,096
14,336
32
32
8
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
medium
13,727
HachiML/Swallow-MS-7b-v0.1-ChatSkill-LAB-Evo-v0.6
mistral
["MistralForCausalLM"]
42800
bfloat16
4.40.0
4,096
14,336
32
32
8
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
medium
13,728
HachiML/Swallow-MS-7b-v0.1-ChatSkill-LAB-Evo-v0.7
mistral
["MistralForCausalLM"]
42800
bfloat16
4.40.0
4,096
14,336
32
32
8
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
medium
13,729
HachiML/Swallow-MS-7b-v0.1-ChatSkill-LAB-Evo-v0.8
mistral
["MistralForCausalLM"]
42800
bfloat16
4.40.0
4,096
14,336
32
32
8
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
medium
13,730
HachiML/Swallow-MS-7b-v0.1-ChatSkill-LAB-Evo-v0.9
mistral
["MistralForCausalLM"]
42800
bfloat16
4.40.0
4,096
14,336
32
32
8
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
medium
13,731
HachiML/mistral_2x7b_v0.1
mixtral
["MixtralForCausalLM"]
32000
bfloat16
4.40.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
2
2
false
false
false
false
false
false
false
true
true
true
6.442451
medium
very_long
13,732
HachiML/myBit-Llama2-jp-127M-test-23
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,733
HachiML/myBit-Llama2-jp-127M-test-24
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,734
HachiML/myBit-Llama2-jp-127M-test-25
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,735
HachiML/myBit-Llama2-jp-127M-test-26
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,736
HachiML/myBit-Llama2-jp-127M-test-27
bit_llama
["BitLlamaForCausalLM"]
43176
float32
4.38.2
768
1,536
12
12
4
silu
0
True
1,024
10,000
0.000001
0.02
1
2
false
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.084935
small
short
13,737
Hachipo/Llama3-8B_MIFT-En_opencoder-edu_EnTrans
llama
["LlamaForCausalLM"]
128256
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,738
Hachipo/Llama3-8B_MIFT-En_opencoder-edu_MIFT-ja
llama
["LlamaForCausalLM"]
128256
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,739
Hachipo/Llama3-8B_MIFT-En_opencoder-edu_MIFT-ja_5000
llama
["LlamaForCausalLM"]
128256
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,740
Hachipo/Llama3-8B_MIFT-En_opencoder-edu_PIFT-jaen
llama
["LlamaForCausalLM"]
128256
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,741
Hachipo/OpenCoder-8B-Base-MIFT-en_newbase_v1
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,742
Hachipo/OpenCoder-8B-Base-MIFT-en_newbase_v1-MIFT-en_1000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,743
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_CoTRFT_10000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,744
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_EnTrans_1000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,745
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_PIFT-enja
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,746
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_PIFT-enja_1000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,747
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_PIFT-jaen
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,748
Hachipo/OpenCoder-8B-Base_MIFT-En_opencoder-edu_PIFT-jaen_1000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,749
Hachipo/Qwen2.5-0.5B_MIFT-en_250
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,750
Hachipo/Qwen2.5-0.5B_MIFT-en_8250
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,751
Hachipo/Qwen2.5-0.5B_MIFT-ja_250
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,752
Hachipo/Qwen2.5-0.5B_MIFT-ja_8250
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,753
Hachipo/Qwen2.5-7B_MIFT-En_opencoder-edu_MIFT-en_MIFT-ja_10000
llama
["LlamaForCausalLM"]
96640
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,754
Hachipo/Qwen2.5-Coder-0.5B-Instruct_MIFT-en_manywords_6000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,755
Hachipo/Qwen2.5-Coder-0.5B-Instruct_MIFT-en_manywords_8000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,756
Hachipo/Qwen2.5-Coder-0.5B-Instruct_MIFT-ja_manywords_2000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,757
Hachipo/Qwen2.5-Coder-0.5B-Instruct_MIFT-ja_manywords_6000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,758
Hachipo/Qwen2.5-Coder-0.5B-Instruct_MIFT-ja_manywords_8000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,759
Hachipo/Qwen2.5-Coder-0.5B-Instruct_PIFT-enja_manywords_2000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,760
Hachipo/Qwen2.5-Coder-0.5B-Instruct_PIFT-enja_manywords_6000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,761
Hachipo/Qwen2.5-Coder-0.5B-Instruct_PIFT-enja_manywords_8000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,762
Hachipo/Qwen2.5-Coder-0.5B-Instruct_PIFT-jaen_manywords_8000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,763
Hachipo/llama3-8B-Instruct_MIFT-en_manywords_4000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,764
Hachipo/llama3-8B-Instruct_MIFT-en_manywords_6000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,765
Hachipo/llama3-8B-Instruct_MIFT-en_manywords_8000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,766
Hachipo/llama3-8B-Instruct_MIFT-ja_manywords_4000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,767
Hachipo/llama3-8B-Instruct_MIFT-ja_manywords_6000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,768
Hachipo/llama3-8B-Instruct_PIFT-enja_manywords_4000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,769
Hachipo/llama3-8B-Instruct_PIFT-enja_manywords_6000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,770
Hachipo/llama3-8B-Instruct_PIFT-jaen_manywords_6000
llama
["LlamaForCausalLM"]
128256
float16
4.47.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
13,771
Hachipo/qwen-0.5b-2epoch_inst
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.1
896
4,864
24
14
2
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,772
Hachipo/qwen-0.5b-4epoch_inst
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.1
896
4,864
24
14
2
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,773
Hachipo/qwen-0.5b-8epoch_inst
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.1
896
4,864
24
14
2
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,774
Hachipo/qwen2.5-0.5B_MIFT-ja_manywords_1000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,775
Hachipo/qwen2.5-0.5B_PIFT-enja_manywords_1000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,776
Hachipo/qwen2.5-0.5B_PIFT-jaen_manywords_1000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,777
Hachipo/qwen2.5-0.5B_educational_instruct_selec10000_pythonblock_dataselection_en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,778
Hachipo/qwen2.5-0.5B_educational_instruct_selec10000_pythonblock_dataselection_enja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,779
Hachipo/qwen2.5-0.5B_educational_instruct_selec10000_pythonblock_dataselection_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,780
Hachipo/qwen2.5-0.5B_educational_instruct_selec10000_pythonblock_dataselection_jaen
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,781
Hachipo/qwen2.5-0.5B_educational_instruct_selec1000_pythonblock_en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,782
Hachipo/qwen2.5-0.5B_educational_instruct_selec1000_pythonblock_en_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,783
Hachipo/qwen2.5-0.5B_educational_instruct_selec1000_pythonblock_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,784
Hachipo/qwen2.5-0.5B_educational_instruct_selec1000_pythonblock_ja_en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,785
Hachipo/qwen2.5-0.5B_educational_instruct_selec5000_pythonblock_dataselection_en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,786
Hachipo/qwen2.5-0.5B_educational_instruct_selec5000_pythonblock_dataselection_enja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,787
Hachipo/qwen2.5-0.5B_educational_instruct_selec5000_pythonblock_dataselection_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,788
Hachipo/qwen2.5-0.5B_educational_instruct_selec5000_pythonblock_dataselection_jaen
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,789
Hachipo/qwen2.5-0.5B_educational_instruct_top1000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,790
Hachipo/qwen2.5-0.5B_educational_instruct_top10000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,791
Hachipo/qwen2.5-0.5B_educational_instruct_top2000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,792
Hachipo/qwen2.5-0.5B_educational_instruct_top3000_en-ja-2
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,793
Hachipo/qwen2.5-0.5B_educational_instruct_top3000_en-ja_noncreated
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,794
Hachipo/qwen2.5-0.5B_educational_instruct_top3000_pythonblock_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,795
Hachipo/qwen2.5-0.5B_educational_instruct_top3000_pythonblock_ja-en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,796
Hachipo/qwen2.5-0.5B_educational_instruct_top6000
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.46.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,797
Hachipo/qwen2.5-0.5B_educational_instruct_top_2000_pythonblock_en_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,798
Hachipo/qwen2.5-0.5B_educational_instruct_top_3000_codeonly_en
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
13,799
Hachipo/qwen2.5-0.5B_educational_instruct_top_5000_pythonblock_en_ja
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.47.1
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long