index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
16,000
JUNEYEOB/FT_batch16_lyric_con_sent
roberta
["RobertaForSequenceClassification"]
32000
float32
4.21.3
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,001
JUNEYEOB/FT_batch32_lyric_con_sent
roberta
["RobertaForSequenceClassification"]
32000
float32
4.21.3
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,002
JUNEYEOB/FT_batch32_lyric_con_sent_origin
roberta
["RobertaForSequenceClassification"]
32000
float32
4.21.3
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,003
JUNEYEOB/FT_lcs_adafactor_lr1e_6
roberta
["RobertaForSequenceClassification"]
32000
float32
4.21.3
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,004
JUNGU/a2c-AntBulletEnv-v0
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,005
JV007/Qwen2.5-1.5B-Open-R1-Distill
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0
1,536
8,960
28
12
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
16,006
JVBrothers/krx-qwen-2.5-v1
qwen2
["Qwen2ForCausalLM"]
152064
float16
4.46.1
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
16,007
JVBrothers/krx-qwen-v2
qwen2
["Qwen2ForCausalLM"]
152064
float16
4.46.1
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
16,008
JVNH/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,009
JVtest1/results
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.53.1
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,010
JW17/L32-1B-UC-BatchSum-seed51
llama
["LlamaForSequenceClassification"]
128256
float32
4.48.0
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
16,011
JW17/L32-3B-UC-BatchSum-seed51
llama
["LlamaForSequenceClassification"]
128256
float32
4.48.0
3,072
8,192
28
24
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
16,012
JW17/Q25-1.5B-UC-BatchSum-seed90
qwen2
["Qwen2ForSequenceClassification"]
151936
float32
4.48.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
151643
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
16,013
JW17/Q25-3B-UC-BatchSum-seed51
qwen2
["Qwen2ForSequenceClassification"]
151936
float32
4.48.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
16,014
JW17/Q25-3B-UC-BatchSum-seed78
qwen2
["Qwen2ForSequenceClassification"]
151936
float32
4.48.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
16,015
JW17/Q25-3B-UC-BatchSum-seed90
qwen2
["Qwen2ForSequenceClassification"]
151936
float32
4.48.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
16,016
JW17/Q3-4B-MOO-b1e1-ckpt1000
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,017
JW17/Q3-4B-MOO-b1e1-ckpt1400
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,018
JW17/Q3-4B-MOO-b1e1-ckpt500
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,019
JW17/Q3-4B-MOO-b1e2-ckpt1000
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,020
JW17/Q3-4B-MOO-b1e2-ckpt1400
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,021
JW17/Q3-4B-MOO-b1e2-ckpt500
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
False
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
16,022
JW17/Untied-49K-tokenized-20B-v0.1
llama
["LlamaForCausalLM"]
50280
float32
4.49.0
768
2,048
12
12
12
silu
0
True
2,048
100,000
0.00001
0.02
0
0
false
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.084935
small
medium
16,023
JY623/KoSOLAR-10.7B-merge-v3.0
llama
["LlamaForCausalLM"]
40960
float16
4.39.2
4,096
14,336
48
32
8
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
16,024
JY623/KoSOLRA-10.7B-merge-v2.2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.39.2
4,096
14,336
48
32
8
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
16,025
JYC333/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,026
JYC333/ppo-PyramidsTraining
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,027
JYL480/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,028
JYumeko/my_awesome_billsum_model
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.4
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,029
Ja-ck/KoMultiGen-General-Llama3-8B
llama
["LlamaForCausalLM"]
128256
bfloat16
4.40.0.dev0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,030
Ja-ck/Mistral-instruct-DPO-Y24-v2
mistral
["MistralForCausalLM"]
32002
bfloat16
4.35.0
4,096
14,336
32
32
8
silu
null
True
32,768
10,000
0.00001
0.02
1
32001
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,031
Ja-ck/llama-2-13b-DPO-Y24-v2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.35.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,032
Ja-ck/llama-2-13b-instruct-Y24-v1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.35.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,033
Ja-ck/llama-2-13b-instruct-Y24-v2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.35.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,034
JaJaYU/esm2_t12_35M_UR50D-pretrained-evaluation-new-data
esm
["EsmForSequenceClassification"]
33
float32
4.35.2
480
1,920
12
20
null
gelu
null
True
1,026
null
null
0.02
null
null
null
24
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.033178
small
short
16,035
JaJaYU/esm2_t12_35M_UR50D-pretrained-evaluation-new-data2
esm
["EsmForSequenceClassification"]
33
float32
4.35.2
320
1,280
6
20
null
gelu
null
True
1,026
null
null
0.02
null
null
null
16
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.007373
small
short
16,036
JaJaYU/esm2_t12_35M_UR50D-pretrained-evaluation-new-data3
esm
["EsmForSequenceClassification"]
33
float32
4.35.2
480
1,920
12
20
null
gelu
null
True
1,026
null
null
0.02
null
null
null
24
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.033178
small
short
16,037
JaJaYU/esm2_t12_35M_UR50D-pretrained-evaluation-new-data4
esm
["EsmForSequenceClassification"]
33
float32
4.35.2
320
1,280
6
20
null
gelu
null
True
1,026
null
null
0.02
null
null
null
16
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.007373
small
short
16,038
Jac-Zac/thesis_test_donut
vision-encoder-decoder
["VisionEncoderDecoderModel"]
null
float32
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,039
Jack010/quadratic-solver
t5
["T5ForConditionalGeneration"]
32128
float32
4.42.4
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,040
Jack200133/whisper-large-v2-es-es
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.34.0.dev0
null
null
32
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,041
Jack200133/whisper-small_es
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
12
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,042
JackBAI/crate-base
crate
["CrateForMaskedLM"]
50265
float32
4.32.0.dev0
768
0
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
16,043
JackComming/chatglm2-6b-smartline
chatglm
["ChatGLMForConditionalGeneration"]
65024
float16
4.30.2
4,096
null
null
32
null
null
0
True
null
null
null
null
null
2
false
128
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
16,044
JackFRost0703/R3GE_Quen2.5_7B
qwen2
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,045
JackFram/llama-160m
llama
["LlamaForCausalLM"]
32000
float32
4.31.0.dev0
768
3,072
12
12
12
silu
null
True
2,048
null
0.000001
0.02
1
2
false
64
1
false
null
null
true
false
false
false
false
false
false
false
false
false
0.084935
small
medium
16,046
JackFram/llama-160m-cbt-1
llama
["LlamaForCausalLM"]
32000
float32
4.28.0.dev0
768
3,072
12
12
null
silu
null
False
2,048
null
0.000001
0.02
0
2
false
64
null
false
null
null
true
false
false
false
false
false
false
false
false
false
0.084935
small
medium
16,047
JackFram/llama-160m-cbt-2
llama
["LlamaForCausalLM"]
32000
float32
4.28.0.dev0
768
3,072
12
12
null
silu
null
False
2,048
null
0.000001
0.02
0
2
false
64
null
false
null
null
true
false
false
false
false
false
false
false
false
false
0.084935
small
medium
16,048
JackFram/llama-160m-cbt-3
llama
["LlamaForCausalLM"]
32000
float32
4.28.0.dev0
768
3,072
12
12
null
silu
null
False
2,048
null
0.000001
0.02
0
2
false
64
null
false
null
null
true
false
false
false
false
false
false
false
false
false
0.084935
small
medium
16,049
JackFram/llama-160m-cbt-4
llama
["LlamaForCausalLM"]
32000
float32
4.28.0.dev0
768
3,072
12
12
null
silu
null
False
2,048
null
0.000001
0.02
0
2
false
64
null
false
null
null
true
false
false
false
false
false
false
false
false
false
0.084935
small
medium
16,050
JackLorAgain/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-hardy_alert_viper
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,051
JackNotLor/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-waddling_energetic_boar
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,052
JackWong0911/timesformer-base-finetuned-k400-finetuned-kinetic400-subset-epoch6-num_frame_10
timesformer
["TimesformerForVideoClassification"]
null
float32
4.38.1
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,053
JackWong0911/timesformer-base-finetuned-k400-kinetic400-subset-epoch6real-num_frame_10_myViT2_more_data_b8
timesformer
["MyTimesformerForVideoClassification"]
null
float32
4.38.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,054
JackWong0911/vivit-b-16x2-kinetics400-finetuned-kinectic
vivit
["VivitForVideoClassification"]
null
float32
4.38.1
768
3,072
12
12
null
gelu_fast
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,055
JackZhao1998/GPT2_ARTDrug
gpt2
["GPT2LMHeadModel"]
50261
float32
4.38.2
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,056
Jackay/__
vit
["ViTForImageClassification"]
null
float32
4.30.0.dev0
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,057
Jackett/subject_classifier
roberta
["RobertaForSequenceClassification"]
50265
float32
4.16.2
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,058
JackismyShephard/speecht5_tts-finetuned-nst-da
speecht5
["SpeechT5ForTextToSpeech"]
81
float32
4.37.2
768
null
null
null
null
gelu
0.1
True
null
null
null
0.02
0
2
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,059
Jacklor/Qwen2.5-0.5B-Instruct-Gensyn-Swarm-graceful_finicky_rabbit
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,060
Jacklu0831/procreate-diffusion-one-piece
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,061
Jackmin108/a2c-AntBulletEnv-v0
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,062
Jackmin108/a2c-PandaReachDense-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,063
Jackmin108/poca-SoccerTwos
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,064
Jackmin108/ppo1-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,065
Jackmin108/rl_course_vizdoom_health_gathering_supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,066
JacksonBrune/07a755e8-f651-4c59-999b-f7f80d36fdb7
llama
["LlamaForCausalLM"]
46336
float16
4.46.0
4,096
11,008
32
32
32
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,067
JacksonBrune/0c9557f5-908c-43d8-8b08-4ce68b41e583
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
16,068
JacksonBrune/0e987a56-97ef-4ebb-993c-62a8c77a762d
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,069
JacksonBrune/1c754c9b-a667-4782-a0e4-ffdb4bf273c0
llama
["LlamaForCausalLM"]
32001
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,070
JacksonBrune/2021398f-9348-4adf-a28b-2ada71f5b7e4
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
16,071
JacksonBrune/205f120f-ad0f-4439-96cf-25c6ad071c21
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
3,200
8,640
26
32
32
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
100
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.19488
medium
medium
16,072
JacksonBrune/2060194c-9586-4e05-b846-ba1c669c0cb7
llama
["LlamaForCausalLM"]
37632
float16
4.46.0
5,120
13,824
40
40
40
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,073
JacksonBrune/214119ba-864b-470c-af5f-e7f99690bbc8
bloom
["BloomForCausalLM"]
250880
null
4.46.0
1,024
null
null
null
null
null
0
False
null
null
null
0.02
1
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,074
JacksonBrune/259316b6-6986-4175-8219-8f68b1d78bf7
llama
["LlamaForCausalLM"]
49152
bfloat16
4.46.0
960
2,560
32
15
5
silu
0
False
2,048
10,000
0.00001
0.02
1
2
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.353894
small
medium
16,075
JacksonBrune/27e8d516-4181-4044-9769-b5a44ffdf4b7
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
16,076
JacksonBrune/287a8465-9bfc-497d-bd49-61779d729702
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,077
JacksonBrune/28928d35-8e63-4c59-aded-fa09074c4ace
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
16,078
JacksonBrune/30557acd-a0e6-4a69-9bd7-327a3a60b800
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,079
JacksonBrune/334f3d0d-a624-43a4-8502-9c59ac0be48e
gemma
["GemmaForCausalLM"]
256000
float32
4.46.0
32
2
1
2
1
gelu
0
False
512
10,000
0.000001
0.02
2
1
null
16
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.000012
small
short
16,080
JacksonBrune/338aec7d-1a10-480b-a0e4-bc05240ed560
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
768
3,072
12
12
12
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.084935
small
medium
16,081
JacksonBrune/35d494fe-27b6-4ca9-aa5f-026005f51eb0
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,024
2,816
24
16
16
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
16,082
JacksonBrune/37368bd3-c8e0-4450-8105-f1afbdf85ce5
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,083
JacksonBrune/38a8cc2a-2e85-4e28-a0fa-29d14e1e6fdc
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
16
64
2
4
4
silu
0
False
2,048
10,000
0.000001
0.02
0
2
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
medium
16,084
JacksonBrune/3a0ac261-8f7b-4ee8-8bad-79f8b8713365
phi3
["Phi3ForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
16,085
JacksonBrune/3afbae7e-57ce-4027-a005-0170c0a250c3
gpt_neo
["GPTNeoForCausalLM"]
50257
null
4.46.0
768
null
null
null
null
null
0
False
2,048
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
medium
16,086
JacksonBrune/3fc3e7d7-e130-4c28-89ea-70f37e073bea
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
16,087
JacksonBrune/40e87e58-92db-4527-b00c-75587ae893a6
mistral
["MistralForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
true
false
false
true
3.623879
medium
medium
16,088
JacksonBrune/4b84a001-ad63-4291-8382-1647448aab21
mistral
["MistralForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
null
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,089
JacksonBrune/4c8aaf3e-5d06-420b-8c53-d5bf0ebee061
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
48
32
8
silu
0
False
65,536
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
16,090
JacksonBrune/4ed21031-accc-43a8-8907-776a79e968b3
mistral
["MistralForCausalLM"]
131072
bfloat16
4.46.0
5,120
14,336
40
32
8
silu
0
False
1,024,000
1,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
16,091
JacksonBrune/52698a00-d1cb-4638-9e88-a0b75e57f62f
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
16,092
JacksonBrune/5788bd53-e993-4d3f-a683-f9eeae76153b
gpt_neox
["GPTNeoXForCausalLM"]
50304
float16
4.46.0
2,048
8,192
16
8
null
gelu
0
False
2,048
10,000
null
0.02
0
0
false
256
null
false
null
null
false
false
true
false
false
false
false
false
false
true
0.805306
small
medium
16,093
JacksonBrune/5ac36028-b5a9-483a-9738-b31a525f0bbc
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
14,336
48
32
8
silu
0
False
65,536
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
16,094
JacksonBrune/65ef4413-4a54-47f3-ac0f-7fade2e64248
llama
["LlamaForCausalLM"]
128256
float16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,095
JacksonBrune/6beac2fb-cca5-4ee8-8230-6a30f67116cc
mistral
["MistralForCausalLM"]
32002
float16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,096
JacksonBrune/6f19de1e-df77-4e1f-b05c-f2c8f60dac1a
falcon
["FalconForCausalLM"]
50304
bfloat16
4.46.0
2,048
null
24
32
null
null
0
False
null
null
null
0.02
50256
50256
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
1.20796
medium
null
16,097
JacksonBrune/742f3f40-84a4-4497-9586-ee4567525536
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
16,098
JacksonBrune/74ac6f54-df18-45af-bc6b-6dc84d97c706
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
16,099
JacksonBrune/77268c54-d3e5-41a6-9ab7-55b156760100
mistral
["MistralForCausalLM"]
32000
float16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long