index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
1,400
AMindToThink/gemma-2-2b-it_RMU_s100_a100_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,401
AMindToThink/gemma-2-2b-it_RMU_s100_a100_layer15
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,402
AMindToThink/gemma-2-2b-it_RMU_s100_a100_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,403
AMindToThink/gemma-2-2b-it_RMU_s100_a1200_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,404
AMindToThink/gemma-2-2b-it_RMU_s100_a1200_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,405
AMindToThink/gemma-2-2b-it_RMU_s100_a1200_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,406
AMindToThink/gemma-2-2b-it_RMU_s100_a300_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,407
AMindToThink/gemma-2-2b-it_RMU_s100_a300_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,408
AMindToThink/gemma-2-2b-it_RMU_s100_a500_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,409
AMindToThink/gemma-2-2b-it_RMU_s100_a500_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,410
AMindToThink/gemma-2-2b-it_RMU_s200_a100_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,411
AMindToThink/gemma-2-2b-it_RMU_s200_a100_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,412
AMindToThink/gemma-2-2b-it_RMU_s200_a100_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,413
AMindToThink/gemma-2-2b-it_RMU_s200_a1200_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,414
AMindToThink/gemma-2-2b-it_RMU_s200_a1200_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,415
AMindToThink/gemma-2-2b-it_RMU_s200_a1200_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,416
AMindToThink/gemma-2-2b-it_RMU_s200_a300_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,417
AMindToThink/gemma-2-2b-it_RMU_s200_a300_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,418
AMindToThink/gemma-2-2b-it_RMU_s200_a300_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,419
AMindToThink/gemma-2-2b-it_RMU_s200_a500_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,420
AMindToThink/gemma-2-2b-it_RMU_s200_a500_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,421
AMindToThink/gemma-2-2b-it_RMU_s200_a500_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,422
AMindToThink/gemma-2-2b-it_RMU_s400_a100_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,423
AMindToThink/gemma-2-2b-it_RMU_s400_a100_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,424
AMindToThink/gemma-2-2b-it_RMU_s400_a1200_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,425
AMindToThink/gemma-2-2b-it_RMU_s400_a1200_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,426
AMindToThink/gemma-2-2b-it_RMU_s400_a300_layer11
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,427
AMindToThink/gemma-2-2b-it_RMU_s400_a300_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,428
AMindToThink/gemma-2-2b-it_RMU_s400_a300_layer7
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
[1, 107]
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,429
AMindToThink/gemma-2-2b_RMU_cyber-forget-corpus_s100_a100_layer3
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.48.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
1,430
AMompo/AICustomerTinyLlama-Full2
llama
["LlamaForCausalLM"]
32000
float32
4.35.0.dev0
2,048
5,632
22
32
4
silu
null
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
1,431
ANGKJ1995/GIST-small-Embedding-v0-checkthat-fitset-128
bert
["BertModel"]
30522
float32
4.40.2
384
1,536
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
32
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.021234
small
short
1,432
ANGKJ1995/GIST-small-Embedding-v0-checkthat-fitset-256
bert
["BertModel"]
30522
float32
4.40.2
384
1,536
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
32
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.021234
small
short
1,433
ANGKJ1995/distilroberta-base-checkthat
roberta
["RobertaForSequenceClassification"]
50265
float32
4.40.2
768
3,072
6
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.042467
small
short
1,434
ANGKJ1995/my_awesome_model
distilbert
["DistilBertForSequenceClassification"]
30522
null
4.57.3
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
1,435
ANHKKK/E2TP
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,436
ANHKKK/E2TP00
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,437
ANHKKK/E2TP11
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,438
ANHKKK/E2TPASTE
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,439
ANISH-j/finetuned_gptneo
gpt_neo
["GPTNeoForCausalLM"]
16384
float32
4.47.1
768
null
null
null
null
null
0.2
True
512
null
null
0.02
1
1
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
short
1,440
ANT1P4T1C4/generate_charade_prompt
t5
["T5ForConditionalGeneration"]
32128
float32
4.45.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,441
APIPROJECT2/educational-random-chatter-classifier
bert
["BertForSequenceClassification"]
30522
float32
4.44.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,442
APJ23/MultiHeaded_Sentiment_Analysis_Model
bert
["BertForSequenceClassification"]
28996
float32
4.28.1
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,443
APLunch/ppo-Huggy
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,444
APLunch/rl_course_vizdoom_health_gathering_supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,445
APMIC/DistilGPT-2-TPU-Fine-tune
gpt2
["GPT2LMHeadModel"]
50257
null
4.28.1
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
1,446
APP0001/rl_course_vizdoom_health_gathering_supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,447
APP0001/sd-class-butterflies-32
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,448
APaul/multi-class-full-data
bert
["BertForTokenClassification"]
197285
null
4.44.2
1,024
4,096
24
16
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
1,449
APauli/Persuasive_language_in_pairs
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.40.0
1,024
4,096
24
16
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
1,450
APudding/bigcode-starcoderbase-1b-finetuned-defect-detection
gpt_bigcode
["GPTBigCodeForSequenceClassification"]
49152
float32
4.39.3
null
null
null
null
null
null
null
True
null
null
null
0.02
0
0
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
1,451
ARAIcorp/sn29-0016m1-v002-beta
llama
["LlamaForCausalLM"]
100288
bfloat16
4.44.0
4,096
18,752
48
32
8
silu
0
False
4,096
500,000
0.00001
0.02
100257
100257
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
1,452
ARAIcorp/sn29-harley-v27-alfa
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.0
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128040
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
1,453
ARC4N3/experiment-model-bertbase
bert
["BertForSequenceClassification"]
30522
float32
4.38.1
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,454
ARC4N3/experiment-model-bertweet
roberta
["RobertaForSequenceClassification"]
64001
float32
4.38.1
768
3,072
12
12
null
gelu
null
True
130
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,455
ARC4N3/roberta-hate-speech-olid
roberta
["RobertaForSequenceClassification"]
50265
float32
4.38.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,456
ARCQUB/aya-vision-8b-4bit-BPMN
aya_vision
["AyaVisionForConditionalGeneration"]
null
bfloat16
4.51.1
null
null
null
null
null
null
null
null
null
null
null
null
5
255001
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
1,457
ARCQUB/llama3.2-vision-11B-4bit-BPMN
mllama
["MllamaForConditionalGeneration"]
null
bfloat16
4.50.2
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
true
null
null
null
1,458
ARG-NCTU/detr-resnet-50-finetuned-600-epochs-GuardBoat-dataset
detr
["DetrForObjectDetection"]
null
float32
4.46.3
null
null
6
null
null
null
0
null
1,024
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
1,459
ARG-NCTU/detr-resnet-50-finetuned-600-epochs-KS-Buoy-dataset
detr
["DetrForObjectDetection"]
null
float32
4.46.3
null
null
6
null
null
null
0
null
1,024
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
1,460
ARG-NCTU/detr-resnet-50-finetuned-federated-600-epochs-Kaohsiung_Port_dataset_2024
detr
["DetrForObjectDetection"]
null
float32
4.52.4
null
null
6
null
null
null
0
null
1,024
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
1,461
ARKON-Scientist/ppo-Huggy
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,462
ARM-Development/Llama-3.1-8B-text-full-1.0
llama
["LlamaForCausalLM"]
128256
bfloat16
4.50.0
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
1,463
AROOJNCBC/llama-3.2-3b-it-wheat-ChatBot
llama
["LlamaForCausalLM"]
128256
float16
4.49.0
3,072
8,192
28
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
1,464
ARTmOnAh/t5-small-finetuned-xsum
t5
["T5ForConditionalGeneration"]
32128
float32
4.42.4
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,465
ARUNNIVAAS7299/IMDB-Distiltbert-sentiment-classifier
distilbert
["DistilBertForSequenceClassification"]
30522
null
4.52.4
null
null
null
null
null
null
0.4
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
1,466
ARahul2003/opt-125M-4bit-gptq
opt
["OPTForCausalLM"]
50272
float16
4.33.2
768
null
12
12
null
null
0
True
2,048
null
null
null
2
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
medium
1,467
ARicci/PrithviTest2
vit_mae
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,468
ARtOrias11/swin-tiny-patch4-window7-224-finetuned-eurosat
swin
["SwinForImageClassification"]
null
float32
4.42.3
768
null
null
null
null
gelu
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,469
ARtOrias11/videomae-base-finetuned-ucf101-subset
videomae
["VideoMAEForVideoClassification"]
null
float32
4.42.3
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
1,470
ASCCCCCCCC/PENGMENGJIE-finetuned-mix_info
bert
["BertForSequenceClassification"]
21128
float32
4.20.1
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,471
ASCCCCCCCC/PENGMENGJIE-finetuned-sms
bert
["BertForSequenceClassification"]
21128
float32
4.16.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,472
ASID/GPT-JT-6B-v1-float16
gptj
["GPTJForCausalLM"]
50400
float16
4.26.1
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
false
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
1,473
ASaska/TAMASI-200-ft
llama
["LlamaForCausalLM"]
32000
float32
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
1,474
ASaska/TAMASI-ft
llama
["LlamaForCausalLM"]
32000
float32
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
1,475
ASaska/tamasi-5000-ft
llama
["LlamaForCausalLM"]
32000
float32
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
1,476
ASethi04/Qwen-Qwen2.5-7B-gsm8k-first-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,477
ASethi04/Qwen-Qwen2.5-7B-gsm8k-second-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,478
ASethi04/Qwen-Qwen2.5-7B-gsm8k-third-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,479
ASethi04/Qwen-Qwen2.5-7B-hellaswag-first-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,480
ASethi04/Qwen-Qwen2.5-7B-legalbench-first-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,481
ASethi04/Qwen-Qwen2.5-7B-legalbench-second-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,482
ASethi04/Qwen-Qwen2.5-7B-legalbench-third-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,483
ASethi04/Qwen-Qwen2.5-7B-opc-sft-third-full-parameter-1-1e-05-num-epochs-1
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.51.2
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
1,484
ASethi04/google-gemma-2-9b-opc-sft-first-full-parameter-1-1e-05-num-epochs-1
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.51.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
1,485
ASethi04/google-gemma-2-9b-opc-sft-second-full-parameter-1-1e-05-num-epochs-1
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.51.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
1,486
ASethi04/google-gemma-2-9b-opc-sft-third-full-parameter-1-1e-05-num-epochs-1
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.51.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
1,487
ASethi04/meta-llama-Llama-3.1-8B-pubmedqa-second-full-parameter-4-1e-05
llama
["LlamaForCausalLM"]
128256
float32
4.51.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
1,488
ASethi04/meta-llama-Llama-3.1-8B-pubmedqa-third-full-parameter-4-1e-05
llama
["LlamaForCausalLM"]
128256
float32
4.51.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
1,489
ASibenaler/llama-2-7b-miniguanaco
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
1,490
ATSiem/sd-class-butterflies-32
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
1,491
ATYOSHIDA/20231227_Q1_split_40_train_40_bert-base-japanese-v3
bert
["BertForSequenceClassification"]
32768
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,492
ATYOSHIDA/Q1_v1
bert
["BertForSequenceClassification"]
32768
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,493
ATYOSHIDA/Q1_v2_seed42_20231226
bert
["BertForSequenceClassification"]
32768
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,494
ATYOSHIDA/Q1_v2_seed42_test
bert
["BertForSequenceClassification"]
32768
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,495
ATYOSHIDA/testBERT
bert
["BertForSequenceClassification"]
32768
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
1,496
AUEB-NLP/ByT5_g2g
t5
["T5ForConditionalGeneration"]
384
float32
4.30.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
1,497
AV10/distilbert-base-uncased-emotions-fintuned
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.28.0
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
1,498
AVAhug/layoutxlm-finetuned-xfund-fr
layoutlmv2
["LayoutLMv2ForTokenClassification"]
250002
float32
4.40.2
768
3,072
12
12
null
gelu
null
null
514
null
null
0.02
0
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
1,499
AWBHASS/distilbert-base-uncased-finetuned-cola
distilbert
["DistilBertForSequenceClassification"]
30522
null
4.35.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short