index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
14,600
Heoni/Aguie_v0.1
llama
["LlamaForCausalLM"]
56252
bfloat16
4.33.1
5,120
13,824
40
40
40
silu
null
True
2,048
10,000
0.000001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
14,601
Heoni/Llama-3-Open-Ko-8B-Aguie_ep1_proto
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,602
Heoni/Llama-3-Open-Ko-8B-Aguie_ep2_proto
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,603
Heoni/Llama-3-Open-Ko-8B-Aguie_ep3_proto
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,604
Heoni/SNR_gemma-3-1b-Op_OR1r_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262144
bfloat16
4.51.3
1,152
6,912
26
4
1
null
0
True
32,768
1,000,000
0.000001
0.02
2
[1, 106]
null
288
4
false
null
null
false
false
false
false
false
false
false
false
true
true
0.414056
small
very_long
14,605
Heoni/SNR_gemma-3-1b-Op_Or_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262144
bfloat16
4.51.3
1,152
6,912
26
4
1
null
0
True
32,768
1,000,000
0.000001
0.02
2
[1, 106]
null
288
4
false
null
null
false
false
false
false
false
false
false
false
true
true
0.414056
small
very_long
14,606
Heoni/SNR_gemma-3-1b-Rp_RR1r_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262144
bfloat16
4.51.3
1,152
6,912
26
4
1
null
0
True
32,768
1,000,000
0.000001
0.02
2
[1, 106]
null
288
4
false
null
null
false
false
false
false
false
false
false
false
true
true
0.414056
small
very_long
14,607
Heoni/SNR_gemma-3-4b-Op_OR1r_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262208
bfloat16
4.51.3
2,560
10,240
34
8
4
null
0
True
131,072
1,000,000
0.000001
0.02
2
1
null
320
2
false
null
null
false
false
false
false
false
false
false
false
true
true
2.673869
medium
very_long
14,608
Heoni/SNR_gemma-3-4b-Op_Or_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262208
bfloat16
4.51.3
2,560
10,240
34
8
4
null
0
True
131,072
1,000,000
0.000001
0.02
2
1
null
320
2
false
null
null
false
false
false
false
false
false
false
false
true
true
2.673869
medium
very_long
14,609
Heoni/SNR_gemma-3-4b-Rp_RR1r_20250518_10ep
gemma3_text
["Gemma3ForCausalLM"]
262208
bfloat16
4.51.3
2,560
10,240
34
8
4
null
0
True
131,072
1,000,000
0.000001
0.02
2
1
null
320
2
false
null
null
false
false
false
false
false
false
false
false
true
true
2.673869
medium
very_long
14,610
Heoni/llama-3-KoEn-8b_sft_ep3_merged_red_teaming_20240614
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,611
Heoni/llama-3-KoEn-8b_sft_ep4_merged_red_teaming_20240614
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,612
Heoni/llama-3-KoEn-8b_sft_ep5_merged_red_teaming_20240614
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,613
Heoni/llama-3-KoEn-8b_sft_ep5_merged_red_teaming_20240623_final_data
llama
["LlamaForCausalLM"]
128257
bfloat16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,614
Heoni/old_v2_1_pt_ep1_sft_ep3_merged_model_based_on_llama3_20240717
llama
["LlamaForCausalLM"]
128256
bfloat16
4.40.0.dev0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,615
Heoni/wo_packing_v2_1_pt_ep1_sft_ep4_merged_model_based_on_llama3_our_inst_rt_lgk_aug_20240724
llama
["LlamaForCausalLM"]
128256
float32
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,616
Heoni/wo_packing_v2_1_pt_ep1_sft_ep5_merged_model_based_on_llama3_our_inst_rt_lgk_aug_20240724
llama
["LlamaForCausalLM"]
128256
float32
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,617
Heoni/wo_packing_v2_1_pt_ep1_sft_ept1_merged_model_based_on_llama3_our_inst_rt_lgk_aug_20240724
llama
["LlamaForCausalLM"]
128256
float32
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,618
Heoni/wo_packing_v2_1_pt_ep1_sft_ept2_merged_model_based_on_llama3_our_inst_rt_lgk_aug_20240724
llama
["LlamaForCausalLM"]
128256
float32
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,619
Heoni/wo_packing_v2_1_pt_ep1_sft_ept3_merged_model_based_on_llama3_our_inst_rt_lgk_aug_20240724
llama
["LlamaForCausalLM"]
128256
float32
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,620
HeraiHench/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,621
Heralax/Augmental-Unholy-Chai-Submission
llama
["LlamaForCausalLM"]
32000
float32
4.33.3
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
14,622
Heralax/Augmentoolkit-DataSpecialist-v0.1
mistral
["MistralForCausalLM"]
32768
bfloat16
4.51.3
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,623
Heralax/llama-Augmentoolkit-MilitaryModel-Demo-NotUndertrained
mistral
["MistralForCausalLM"]
32000
bfloat16
4.52.3
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,624
Heralax/llama-gRPo-emotions-nothoughts
mistral
["MistralForCausalLM"]
32001
float16
4.52.3
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,625
Hercule66/glessi-yoyo_v2
t5
["T5ForConditionalGeneration"]
32128
float32
4.44.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,626
HereBeCode/deberta-fine-tuned-abortion-stance-detect
deberta
["DebertaForSequenceClassification"]
50265
null
4.20.1
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,627
Herluberlu/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,628
Hermann-Meier/Bert_Bourse_new
bert
["BertForPreTraining"]
42000
null
4.46.0.dev0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,629
HermesPenn/athena_model
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,630
Hermi2023/doc2query-ppo-msmarco-128-1024
t5
["T5ForConditionalGeneration"]
32128
float32
4.29.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,631
Hermi2023/doc2query-ppo-msmarco-128-2048
t5
["T5ForConditionalGeneration"]
32128
float32
4.29.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,632
Hermi2023/doc2query-ppo-msmarco-128-4096
t5
["T5ForConditionalGeneration"]
32128
float32
4.29.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,633
Herry443/LLaMA2-ko-7B-KNUT-v0.1
llama
["LlamaForCausalLM"]
46336
float16
4.35.0.dev0
4,096
11,008
32
32
32
silu
null
True
2,048
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,634
Herry443/Mistral-7B-KNUT-ref
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,635
Herry443/Mistral-7B-KNUT-ref-100-HellaSWAG
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,636
Herry443/Mistral-7B-KNUT-ref-en-arc-1
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,637
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.1-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,638
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.3-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,639
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.5-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,640
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.6
mistral
["MistralForCausalLM"]
32000
float16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,641
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.6-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,642
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.7
mistral
["MistralForCausalLM"]
32000
float16
4.39.0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,643
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.8-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,644
Herry443/Mistral-7B-KNUT-ref-en-mmlu-0.9-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,645
Herry443/Mistral-7B-KNUT-ref-en-mmlu-1-final
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,646
Herry443/Mistral-7B-KNUT-v0.1-100-HellaSWAG
mistral
["MistralForCausalLM"]
32000
float16
4.37.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,647
Herry443/mistralai-Code-Instruct
mistral
["MistralForCausalLM"]
32000
float16
4.35.0.dev0
4,096
14,336
32
32
8
silu
null
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,648
Herteg/03_SIS_100v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,649
Herteg/03_SIS_250v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,650
Herteg/03_SIS_500v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,651
Herteg/03_SIS_50v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,652
Herteg/03_SR_500v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,653
Herteg/05_LWTR_100v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,654
Herteg/05_LWTR_250v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,655
Herteg/05_LWTR_500v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,656
Herteg/05_LWTR_50v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,657
Herteg/05_SIS_100v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,658
Herteg/05_SIS_250v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,659
Herteg/05_SIS_500v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,660
Herteg/05_SIS_50v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,661
Herteg/05_SR_100v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,662
Herteg/05_SR_250v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,663
Herteg/05_SR_500v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,664
Herteg/05_SR_50v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,665
Herteg/07_LWTR_100v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,666
Herteg/07_LWTR_250v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,667
Herteg/07_LWTR_50v0_NER_Model_3Epochs_AUGMENTED
bert
["BertForTokenClassification"]
28996
float32
4.27.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,668
HeshamHaroon/llama-3-instruct-slerp-arabic
llama
["LlamaForCausalLM"]
128256
bfloat16
4.41.2
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
14,669
HeshamMamdouh/arabart-finetune-sum-v5-fine-tuned
mbart
["MBartForConditionalGeneration"]
50002
float32
4.30.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,670
HeshamMamdouh/arabart-finetune-sum-v6-fine-tuned
mbart
["MBartForConditionalGeneration"]
50002
float32
4.30.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,671
HeshamMamdouh/arabart-finetune-sum-v7-fine-tuned
mbart
["MBartForConditionalGeneration"]
50002
float32
4.30.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,672
HeshamMamdouh/bart-large-cnn-sum-fine-tuned
bart
["BartForConditionalGeneration"]
50264
null
4.30.2
null
null
12
null
null
null
0
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,673
HeshamMamdouh/mbart-finetune-ar-xlsum-fine-tuned
mbart
["MBartForConditionalGeneration"]
250054
float32
4.30.2
null
null
12
null
null
null
0
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,674
HeshamMamdouh/mt5-small-v8-sum-fine-tuned
mbart
["MBartForConditionalGeneration"]
50002
float32
4.30.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,675
HeshamMamdouh/mt5-small-v9-sum-fine-tuned
mbart
["MBartForConditionalGeneration"]
50002
float32
4.30.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,676
Hevagog/ppo-PandaSlide-v3
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
14,677
Hevagog/tqc-PandaPickAndPlace-v3
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
14,678
Hevhs726kshv/oldEngTranslator
gpt_neo
["GPTNeoForCausalLM"]
50259
float32
4.31.0.dev0
768
null
null
null
null
null
0
True
2,048
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
medium
14,679
HexHands/finishSTUDIO
gpt2
["GPT2LMHeadModel"]
50257
float32
4.30.2
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
14,680
HeydarS/Llama-2-7b-chat-hf_peft_v1
llama
["LlamaForCausalLM"]
32000
float32
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,681
HeydarS/Llama-2-7b-chat-hf_peft_v2
llama
["LlamaForCausalLM"]
32000
float32
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,682
HeydarS/flan-t5-base_no_peft_v25
t5
["T5ForConditionalGeneration"]
32128
float32
4.37.0.dev0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,683
Hezam/ArabicT5-49GB-small-classification-generation
t5
["T5ForConditionalGeneration"]
32128
float32
4.26.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,684
Hgatsadrtasd/attempt1
bert
["BertForQuestionAnswering"]
28996
float32
4.27.4
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,685
Hgatsadrtasd/attempt2
bert
["BertForQuestionAnswering"]
28996
float32
4.27.4
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,686
Hghanem96/Llama-2-7b-chat-finetuned
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,687
Hgkang00/FT-label-aug-consent-10
bert
["BertModel"]
30522
float32
4.41.1
384
1,536
6
12
null
gelu
null
True
512
null
null
0.02
null
null
null
32
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.010617
small
short
14,688
Hgkang00/FT-triple-2
bert
["BertModel"]
30522
float32
4.41.1
384
1,536
6
12
null
gelu
null
True
512
null
null
0.02
null
null
null
32
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.010617
small
short
14,689
Hhorse/document-generation
qwen2
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,690
Hi-Q/Qwen2.5-7B-Instruct_1129_k1
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
14,691
Hi-Q/Qwen2.5-7B-Instruct_1202_k1
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
14,692
Hi-Q/krx_1203_test_model_2
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
14,693
Hi-Q/krx_1203_test_model_3
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
14,694
Hi-Q/krx_gemma-2-9b-it_1025
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.44.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
14,695
Hi-Q/krx_gemma_9B_it_1122_4
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.44.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
false
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
14,696
Hi-Q/krx_gemma_9B_it_1122_5
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.44.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
false
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
14,697
Hi-Q/krx_gemma_9B_it_1125-3
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.44.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
false
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
14,698
Hi-Q/krx_qwen-2.5-7b-it_1128
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
14,699
Hi-Q/krx_qwen_2-7b-it_1103
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.44.2
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long