index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
16,500
JayHyeon/Qwen_0.5-IRPO_5e-5-100ep_1alp_0lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.48.1
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,501
JayHyeon/Qwen_0.5-IRPO_5e-7-3ep_0.1alp_0.5bdpo_lam_0dpop_lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,502
JayHyeon/Qwen_0.5-IRPO_5e-7-3ep_10alp_0.5bdpo_lam_0dpop_lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.53.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,503
JayHyeon/Qwen_0.5-IRPO_5e-7-3ep_1alp_0lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,504
JayHyeon/Qwen_0.5-MDPO_0.3_3e-6-3ep_0alp_0lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.1
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,505
JayHyeon/Qwen_0.5-MDPO_0.7_5e-7-3ep_0alp_0lam
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.1
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,506
JayHyeon/Qwen_0.5-ORPO-5e-7-3ep
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,507
JayHyeon/Qwen_0.5-SimPO-1e-6-3ep
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,508
JayHyeon/Qwen_0.5-SimPO-5e-7-3ep
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,509
JayHyeon/Qwen_0.5-VDPO_3e-6-1ep_0vpo_const_exp
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,510
JayHyeon/Qwen_0.5-VDPO_5e-7-1ep_10vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,511
JayHyeon/Qwen_0.5-VDPO_5e-7-1ep_1vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,512
JayHyeon/Qwen_0.5-VDPO_5e-7-1ep_30vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,513
JayHyeon/Qwen_0.5-VDPO_5e-7-1ep_3vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,514
JayHyeon/Qwen_0.5-VIPO_3e-6-1ep_0.3vpo_const_exp
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,515
JayHyeon/Qwen_0.5-VIPO_5e-7-1ep_10vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,516
JayHyeon/Qwen_0.5-VIPO_5e-7-1ep_30vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,517
JayHyeon/Qwen_0.5-VIPO_5e-7-3ep_30vpo_const
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,518
JayHyeon/Qwen_0.5-rDPO_5e-7-1ep_0vpo_const_0.1
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,519
JayHyeon/Qwen_1.5B-BDPO_5e-7-3ep_0.3bdpo_lambda
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.50.0
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
16,520
JayHyeon/Qwen_1.5B-math-IPO_5e-7_1.0vpo_constant-5ep
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.50.0
1,536
8,960
28
12
2
silu
0
False
4,096
10,000
0.000001
0.02
151643
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
medium
16,521
JayHyeon/Qwen_math-MDPO_0.5_5e-7-1ep_0alp_0lam
qwen2
["Qwen2Model"]
151936
float32
4.47.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,522
JayHyeon/pythia-2.8b-IPO_5e-7_1.0vpo_const-1ep
gpt_neox
["GPTNeoXForCausalLM"]
50304
bfloat16
4.47.0.dev0
2,560
10,240
32
32
null
gelu
0
False
2,048
10,000
null
0.02
0
0
false
80
null
false
null
null
false
false
true
false
false
false
false
false
false
true
2.516582
medium
medium
16,523
JayLee131/vqbet_pusht2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,524
JayND/cnn_news_summary_model_trained_on_reduced_data
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,525
JayNauti/whisper-small-hi
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.40.1
null
null
12
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,526
JayRevolinsky/climategpt_7b_miniplatypus
llama
["LlamaForCausalLM"]
32128
float16
4.45.2
4,096
11,008
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,527
JayShah07/ppo-LunarLander-v1
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,528
JayYH/whisper-small-ko
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.42.3
null
null
12
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,529
JayaDevi/ma_mistral
llama
["LlamaForCausalLM"]
32000
float16
4.37.2
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
16,530
Jayanshu05/IOT
bart
["BartForConditionalGeneration"]
50265
float32
4.51.3
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
16,531
Jayant9928/derma_llava_v2
llava_phi
["LlavaPhiForCausalLM"]
32064
bfloat16
4.41.0.dev0
3,072
8,192
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
16,532
Jayantez/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,533
Jayanth-V/Llama-2-7b-chat-finetune
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,534
Jayanth231/codeparrot-ds
gpt2
["GPT2LMHeadModel"]
50000
float32
4.31.0.dev0
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,535
Jayantjivi/orpo_med_v5
llama
["LlamaForCausalLM"]
128258
float16
4.37.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128256
128257
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,536
Jayaprakash-JPVL/qa_model_finance_domain_fiqa_v6
roberta
["RobertaForQuestionAnswering"]
50265
float32
4.29.2
768
3,072
6
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.042467
small
short
16,537
Jayavardhan7/dummy-model
camembert
["CamembertForMaskedLM"]
32005
float32
4.47.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
5
6
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,538
Jayem-11/afrispeech_small_A100
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0
null
null
4
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,539
Jayem-11/mistral_7b_malawi
mistral
["MistralForCausalLM"]
32000
float16
4.37.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,540
Jayem-11/wav2vec2-large-xlsr-swahili
wav2vec2
["Wav2Vec2ForCTC"]
43
float32
4.33.0
1,024
4,096
24
16
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.30199
small
null
16,541
Jayem-11/whisper-small-swahili
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0
null
null
12
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,542
Jayem-11/zephyr-7b-beta_assistant_v0.2_merged
mistral
["MistralForCausalLM"]
32000
float16
4.37.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,543
Jayesh1310/t5-base-custom
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,544
JayhC/Llama-3-Soliloquy-8B-v2-8bpw-h8-exl2
llama
["LlamaForCausalLM"]
128256
bfloat16
4.40.1
4,096
14,336
32
32
8
silu
0
False
24,576
4,000,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,545
JayhC/Llama-3-Soliloquy-Max-70B-v1-3bpw-h6-exl2
llama
["LlamaForCausalLM"]
128256
bfloat16
4.40.1
8,192
28,672
80
64
8
silu
0
True
32,768
16,000,000
0.00001
0.02
128000
128009
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
very_long
16,546
JayhC/Miqu-MS-70B-3.2bpw-h6-exl2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.39.2
8,192
28,672
80
64
8
silu
0
True
32,764
1,000,000
0.00001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
long
16,547
JayhC/NoromaidxOpenGPT4-1-4.5bpw-h6-exl2
mixtral
["MixtralForCausalLM"]
32000
float16
4.39.1
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
8
2
false
false
false
false
false
false
false
true
true
true
6.442451
medium
very_long
16,548
JayhC/NoromaidxOpenGPT4-2-4.5bpw-h6-exl2
mixtral
["MixtralForCausalLM"]
32000
bfloat16
4.39.1
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
8
2
false
false
false
false
false
false
false
true
true
true
6.442451
medium
very_long
16,549
JayhC/kuno-kunoichi-v1-DPO-v2-SLERP-7B-8bpw-h8-exl2
mistral
["MistralForCausalLM"]
32000
float16
4.38.2
4,096
14,336
32
32
8
silu
0
True
8,192
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
16,550
JayhC/kuno-kunoichi-v1-DPO-v2-SLERP-7B-8bpw-h8-exl2-rpcal
mistral
["MistralForCausalLM"]
32000
float16
4.38.2
4,096
14,336
32
32
8
silu
0
True
8,192
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
16,551
Jayicebear/gpt2-imdb-negative-v2
gpt2
["GPT2LMHeadModel"]
50257
float32
4.31.0
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,552
Jayicebear/medcpt-article-encoder
bert
["BertModel"]
30522
float32
4.43.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,553
Jaylin3691/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-elusive_wild_snail
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,554
Jaylin3691/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-lumbering_freckled_walrus
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,555
Jaymax/llama3_FDA_qnabot-sft-test-push1
llama
["LlamaForCausalLM"]
128256
float32
4.41.0.dev0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,556
Jaymin123321/investor_update
bert
["BertForSequenceClassification"]
30522
float32
4.53.0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,557
Jayveersinh-Raj/Assamese_hatespeech_extension
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.31.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,558
Jayveersinh-Raj/Bodo_hatespeech_extension
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.31.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,559
Jayveersinh-Raj/Gujarati_hatespeech_extension
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.32.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,560
Jayveersinh-Raj/Indo-Aryan-abuse-detection
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.31.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,561
Jayveersinh-Raj/bengali_hatespeech_extension
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.31.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,562
Jayveersinh-Raj/hindi-summarizer-small
mt5
["MT5ForConditionalGeneration"]
250112
float32
4.34.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,563
Jayveersinh-Raj/mixed_lang_context_hatespeech
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.32.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,564
Jayveersinh-Raj/sinhala_hatespeech_extension
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.32.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,565
JayxC/pegasus-samsum
pegasus
["PegasusForConditionalGeneration"]
96103
float32
4.34.0
null
null
16
null
null
null
0.1
True
1,024
null
null
null
0
1
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
16,566
Jazlynn0095/intern_study_L0_4
internlm2
["InternLM2ForCausalLM"]
92544
bfloat16
4.41.0
4,096
14,336
32
32
8
silu
null
True
262,144
50,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
16,567
Jbbok/LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,568
Jbot/a2c-AntBulletEnv-v0
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,569
Jbot/a2c-PandaReachDense-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,570
Jbot/poca-SoccerTwos
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,571
Jbot/ppo-PyramidsTraining
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,572
Jbot/ppo-SnowballTarget
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,573
Jbot/rl_course_vizdoom_health_gathering_supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,574
Jccc-l/Qwen2.5-0.5B-GPTQ-Int8
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,575
Jccc-l/Qwen2.5-0.5B-GPTQ-Int8_batch_2
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,576
Jccqqqqq/clip-roberta-finetuned
vision-text-dual-encoder
["VisionTextDualEncoderModel"]
null
float32
4.42.0.dev0
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,577
Jccqqqqq/shudfie0
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,578
JeSSSeL/pegasus-samsum
pegasus
["PegasusForConditionalGeneration"]
96103
float32
4.51.1
null
null
16
null
null
null
0.1
True
1,024
null
null
null
0
1
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
16,579
JeaHyung/bert-phishing-classifier_student
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.44.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,580
Jean-Baptiste/lunar-lander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,581
Jean-Baptiste/roberta-large-financial-news-topics-en
roberta
["RobertaForSequenceClassification"]
50265
float32
4.16.2
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,582
JeanAbbiateci/autotrain-dqwq9-raj8z
mpnet
["MPNetForSequenceClassification"]
30527
float32
4.46.2
768
3,072
12
12
null
gelu
null
null
514
null
null
0.02
0
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
16,583
JeanFaootMaia/the_prince__niccolo
gpt2
["GPT2LMHeadModel"]
50000
float32
4.28.1
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,584
JeanFaootMaia/vaz_de_camoes
gpt2
["GPT2LMHeadModel"]
50257
float32
4.28.1
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,585
JeanM45/0ce6d2aa
llama
["LlamaForCausalLM"]
32000
float32
4.40.2
2,048
5,632
22
32
4
silu
0
True
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
16,586
JeanM45/f76195c9
phi
["PhiForCausalLM"]
51200
float32
4.40.0
2,560
10,240
32
32
32
gelu_new
0
True
2,048
10,000
null
0.02
50256
50256
false
80
1
false
null
null
false
false
false
false
false
false
false
false
false
true
2.516582
medium
medium
16,587
JeanPaulLePape/MasterCAMPDataetIAModelGoogleTrained
vit
["ViTForImageClassification"]
null
float32
4.53.0
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,588
JeanneNdzana/TP_LMN
mistral
["MistralForCausalLM"]
32000
float16
4.38.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,589
Jebadiah/Aria-7b-128k-v1
mistral
["MistralForCausalLM"]
32000
float16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,590
Jebadiah/Aria-7b-128k-v2
mistral
["MistralForCausalLM"]
32000
float16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,591
Jebadiah/Aria-7b-128k-v3
mistral
["MistralForCausalLM"]
32000
float16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,592
Jebadiah/Aria-7b-128k-v4
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,593
Jebadiah/Aria-CodeQwen-stone
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,594
Jebadiah/Aria-Hermes-stone-l3-8b
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,595
Jebadiah/Aria-Laylelemon-sand-stone
mistral
["MistralForCausalLM"]
32000
float16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,596
Jebadiah/Aria-coder-7b
mistral
["MistralForCausalLM"]
32002
bfloat16
4.49.0
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,597
Jebadiah/Aria-dolphin-1m-sand-stone
mistral
["MistralForCausalLM"]
32000
float16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,598
Jebadiah/Aria-dolphin-stone
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,599
Jebadiah/Aria-flammen22x-stone-l3-8b
mistral
["MistralForCausalLM"]
32000
bfloat16
4.39.3
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long