index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
2,800
AlokBharadwaj/sd-class-butterflies-32_ddpm_pipeline_with_ssim_loss
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
2,801
AlokBharadwaj/sd-class-butterflies-32_using_mse_loss
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
2,802
Alokprasad/llama-3-8b-Instruct-bnb-4bit-aiaustin-demo
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
2,803
Aloksik/llama-2-7b-domain-tuned
llama
["LlamaForCausalLM"]
32000
float16
4.35.0
4,096
11,008
32
32
32
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,804
AlonCohen/RuSentNE-iter-3
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.38.1
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
2,805
Alorel/0414-sd-class-butterflies-64
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
2,806
Alpaca618/audio-scam-detection
llama
["LlamaForCausalLM"]
128256
float16
4.44.2
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
2,807
Alpaca618/audio-scam-detection-v2
llama
["LlamaForCausalLM"]
128256
float16
4.44.2
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
2,808
Alpaca69B/phi2-2b-whatsapp-app-reviews-absa
phi
["PhiForCausalLM"]
51200
float16
4.40.1
2,560
10,240
32
32
32
gelu_new
0
True
2,048
10,000
null
0.02
50256
50256
false
80
1
false
null
null
false
false
false
false
false
false
false
false
false
true
2.516582
medium
medium
2,809
AlpacaAlice/t5-end2end-questions-generation
t5
["T5ForConditionalGeneration"]
32101
float32
4.28.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
2,810
AlpachinoNLP/Baichuan-7B-Instruction
baichuan
["BaiChuanForCausalLM"]
64000
float16
4.29.2
4,096
11,008
32
32
null
silu
null
True
4,096
null
0.000001
0.02
1
2
false
128
null
false
null
null
false
false
false
false
false
false
false
false
false
false
6.442451
medium
medium
2,811
Alperens1/turna-gec-1
t5
["T5ForConditionalGeneration"]
32128
float32
4.41.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
2,812
AlphaGaO/Deepseek-Coder-V2-Lite-Instruct-Fused-16E-5B-preview
deepseek_v2
["DeepseekV2ForCausalLM"]
102400
bfloat16
4.47.1
2,048
10,944
27
16
16
silu
0
True
163,840
10,000
0.000001
0.02
100000
100001
false
128
1
false
64
6
false
false
false
false
false
true
false
true
false
true
1.358954
medium
very_long
2,813
AlphaGaO/Deepseek-Coder-V2-Lite-Instruct-Fused-2E-2B-preview-Unhealed
deepseek_v2
["DeepseekV2ForCausalLM"]
102400
bfloat16
4.47.1
2,048
10,944
27
16
16
silu
0
True
163,840
10,000
0.000001
0.02
100000
100001
false
128
1
false
64
6
false
false
false
false
false
true
false
true
false
true
1.358954
medium
very_long
2,814
AlphaGaO/Deepseek-Coder-V2-Lite-Instruct-Fused-4E-2_5B-preview
deepseek_v2
["DeepseekV2ForCausalLM"]
102400
bfloat16
4.47.1
2,048
10,944
27
16
16
silu
0
True
163,840
10,000
0.000001
0.02
100000
100001
false
128
1
false
64
6
false
false
false
false
false
true
false
true
false
true
1.358954
medium
very_long
2,815
AlphaGaO/Deepseek-Coder-V2-Lite-Instruct-Fused-4E-2_5B-preview-Unhealed
deepseek_v2
["DeepseekV2ForCausalLM"]
102400
bfloat16
4.47.1
2,048
10,944
27
16
16
silu
0
True
163,840
10,000
0.000001
0.02
100000
100001
false
128
1
false
64
6
false
false
false
false
false
true
false
true
false
true
1.358954
medium
very_long
2,816
AlphaGaO/Deepseek-Coder-V2-Lite-Instruct-Fused-8E-3B-preview
deepseek_v2
["DeepseekV2ForCausalLM"]
102400
bfloat16
4.47.1
2,048
10,944
27
16
16
silu
0
True
163,840
10,000
0.000001
0.02
100000
100001
false
128
1
false
64
6
false
false
false
false
false
true
false
true
false
true
1.358954
medium
very_long
2,817
AlphaNinja27/wav2vec2-large-xls-r-300m-panjabi-colab
wav2vec2
["Wav2Vec2ForCTC"]
64
float32
4.11.3
1,024
4,096
24
16
null
gelu
0
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.30199
small
null
2,818
AlphaRandy/WhelanBot
gpt2
["GPT2LMHeadModel"]
50257
float32
4.41.0
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
2,819
AlphaRandy/WhelanChatBot
gpt2
["GPT2LMHeadModel"]
50257
float32
4.41.0
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
2,820
AlphaZetta/finetuning-sentiment-model-3000-samples
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.19.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
2,821
AlphaZetta/finetuning-sentiment-model-finefood
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.19.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
2,822
Alphacode-AI/AlphaMist7B-slr-v1
mistral
["MistralForCausalLM"]
32000
bfloat16
4.38.1
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,823
Alphacode-AI/AlphaMist7B-slr-v2
mistral
["MistralForCausalLM"]
32000
bfloat16
4.38.1
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,824
Alphacode-AI/AlphaMist7B-slr-v3
mistral
["MistralForCausalLM"]
32000
bfloat16
4.38.1
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,825
Alphacode-AI/AlphaMist7B-slr-v4-slow
mistral
["MistralForCausalLM"]
32000
bfloat16
4.37.0
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,826
Alphatao/00aff3f4-8870-46e2-89af-c990bc1cadd4
llama
["LlamaForCausalLM"]
32001
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,827
Alphatao/02a997a8-f130-435d-91f6-5c0150428bed
llama
["LlamaForCausalLM"]
128256
float16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
2,828
Alphatao/03f640f1-7d7d-40be-98fe-9441de6cce1a
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
2,829
Alphatao/0419f7aa-3f7a-481c-a936-45cbb3185803
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
2,830
Alphatao/08aa7501-b5ba-4336-bddc-a2396da2289a
llama
["LlamaForCausalLM"]
46336
float16
4.46.0
4,096
11,008
32
32
32
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,831
Alphatao/08abc274-88b9-4abc-ac3d-92c20a2045e1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
null
False
65,536
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
2,832
Alphatao/0933d393-e86a-4fcf-9f39-300d0ed0f91d
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
2,833
Alphatao/0958c411-90d3-40a5-a490-6afc6d4097e6
mistral
["MistralForCausalLM"]
32032
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,834
Alphatao/0bcc6ef6-ca67-4163-90ce-285b8209e7fc
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,835
Alphatao/0c09debd-ecdd-434d-96dd-2e212f69950a
mistral
["MistralForCausalLM"]
131072
bfloat16
4.46.0
5,120
14,336
40
32
8
silu
0
False
131,072
1,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
2,836
Alphatao/0c36c7c1-fd7f-46d4-9ca3-caedcf8fa82b
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
2,837
Alphatao/0d4f449b-d698-4462-8fb8-d0004afee73d
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.51.3
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,838
Alphatao/0fafe251-0be2-4a05-adea-ee1c62ea98fc
falcon
["FalconForCausalLM"]
50304
bfloat16
4.46.0
2,048
null
24
32
null
null
0
False
null
null
null
0.02
50256
50256
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
1.20796
medium
null
2,839
Alphatao/10fc8940-78f2-4977-ac00-7b2cba146a90
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
2,840
Alphatao/11440172-6d70-4ed6-845c-530ac8d5414f
falcon
["FalconForCausalLM"]
65024
float32
4.46.0
32
null
2
2
null
null
0
False
2,048
10,000
null
0.02
null
11
null
16
null
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
medium
2,841
Alphatao/11a99da6-e19a-4dc7-82a1-029204f1b39e
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
2,842
Alphatao/11e268cf-0f18-4821-9f37-2ca530141172
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.51.3
5,120
17,408
40
40
8
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
12.582912
large
very_long
2,843
Alphatao/1200b65c-885d-4242-9990-175b9dc6e085
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
768
3,072
12
12
12
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.084935
small
medium
2,844
Alphatao/12e9cd75-79d8-4e24-8278-2edb1aef1492
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,845
Alphatao/1572bed8-2f8c-49ac-a263-1ef05ba7f37b
mistral
["MistralForCausalLM"]
32002
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,846
Alphatao/16c1d32b-edf2-4ace-b7bf-71beb96e49f3
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
4,096
10,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
medium
2,847
Alphatao/17601283-8ff6-4150-b0e7-88c52d727bb0
llama
["LlamaForCausalLM"]
32000
bfloat16
4.51.3
4,096
11,008
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,848
Alphatao/18296fec-2f76-47ef-bb0a-88e0f2654578
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
2,849
Alphatao/196dc12d-39ef-488b-b644-e5cf3412fb73
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
16
64
2
4
4
silu
0
False
2,048
10,000
0.000001
0.02
0
2
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
medium
2,850
Alphatao/19770576-fb74-4d69-8194-f4067376e2d9
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
5,120
13,824
48
40
8
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
15.099494
large
very_long
2,851
Alphatao/198e1d62-fda2-4d74-be93-83eff417e097
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
2,852
Alphatao/1ac3e33c-02cc-4610-b34b-dded37a658c2
falcon
["FalconForCausalLM"]
50304
bfloat16
4.46.0
2,048
null
24
32
null
null
0
False
null
null
null
0.02
50256
50256
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
1.20796
medium
null
2,853
Alphatao/1bcc6ec7-7c88-4f35-9e2a-e7f34825d853
llama
["LlamaForCausalLM"]
128256
float16
4.46.0
16
64
2
4
4
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
very_long
2,854
Alphatao/1ef00755-13ff-40f0-8362-77fa5f1c470d
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
2,855
Alphatao/2081438a-078a-42bf-980c-2dd8069065f5
phi3
["Phi3ForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
2,856
Alphatao/20e470e9-9fc9-450e-b276-91f7c69ea743
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.51.3
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,857
Alphatao/225892d5-fc4b-4e5d-982b-9b9758f1d465
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
2,858
Alphatao/22727b12-d31f-4226-a2a9-02abb2c6e793
bloom
["BloomForCausalLM"]
250880
null
4.46.0
1,024
null
null
null
null
null
0
False
null
null
null
0.02
1
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
2,859
Alphatao/22fc1f97-d307-49a9-b93a-5f765f9aee9d
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
2,860
Alphatao/241c5ab9-7636-45af-9161-fe8c0b21355c
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
2,861
Alphatao/2458f818-7459-4a93-a501-6d419378fe2c
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
16
64
2
4
4
silu
0
False
2,048
10,000
0.000001
0.02
0
2
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
medium
2,862
Alphatao/249af8c9-6851-41fb-8aa5-fc1055b7a552
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
2,863
Alphatao/2562a60b-fe04-4520-8dab-1683bd77f3a0
mistral
["MistralForCausalLM"]
32064
bfloat16
4.46.0
5,120
17,920
40
40
10
silu
0
False
4,096
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
medium
2,864
Alphatao/2698a0db-bfe6-4e30-b5f0-58402ad13657
gpt_neox
["GPTNeoXForCausalLM"]
50304
float32
4.51.3
512
2,048
6
8
null
gelu
0
False
2,048
10,000
null
0.02
0
0
false
64
null
false
null
null
false
false
true
false
false
false
false
false
false
true
0.018874
small
medium
2,865
Alphatao/27a551d1-a7c0-4897-bb7b-65c20f1195de
mistral
["MistralForCausalLM"]
32002
float32
4.51.3
4,096
14,336
32
32
8
silu
0
False
8,192
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
long
2,866
Alphatao/293e4a57-d575-48f4-92a8-69fe91273569
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.51.3
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,867
Alphatao/29782ae8-4df1-471d-8738-f2ed5142c158
mistral
["MistralForCausalLM"]
32002
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
2,868
Alphatao/2aa189bb-435b-4119-b1f2-d6be4664ac5f
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
3,200
8,640
26
32
32
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
100
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.19488
medium
medium
2,869
Alphatao/2b983e1e-ebac-4043-92f9-a78f6570351f
llama
["LlamaForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
2,870
Alphatao/2bd7a580-4152-4b7c-bebe-ce07d33d1236
llama
["LlamaForCausalLM"]
46336
bfloat16
4.51.3
4,096
11,008
32
32
32
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
2,871
Alphatao/2e0948d6-4b7e-41d6-a2b0-acf2767be028
llama
["LlamaForCausalLM"]
32064
bfloat16
4.46.0
3,072
8,192
32
32
32
silu
0
False
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
true
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
2,872
Alphatao/2e1b7483-03a1-498a-8551-07488877dfd0
llama
["LlamaForCausalLM"]
32000
float16
4.46.0
16
64
2
4
4
silu
0
False
2,048
10,000
0.000001
0.02
0
2
false
4
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.000006
small
medium
2,873
Alphatao/2ea1ae9d-34e6-4b6b-a4a0-167a6d35d1fd
llama
["LlamaForCausalLM"]
128257
bfloat16
4.51.3
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128003
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
2,874
Alphatao/2fe2cdf6-f516-4bc8-b3fb-525fe7322ef4
gptj
["GPTJForCausalLM"]
50401
float32
4.46.0
null
null
null
null
null
null
null
False
null
null
null
0.02
50256
50256
false
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
2,875
Alphatao/320081e7-b5fe-4e92-be2a-5495b48b0e48
llama
["LlamaForCausalLM"]
32016
float16
4.46.0
4,096
11,008
32
32
32
silu
null
False
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
2,876
Alphatao/32401fc5-3903-472d-a4fc-931658e0b867
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.51.3
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,877
Alphatao/33a67d72-294d-41fe-ac5a-72d9bec0fe73
llama
["LlamaForCausalLM"]
49153
bfloat16
4.46.0
576
1,536
30
9
3
silu
0
False
8,192
100,000
0.00001
0.041667
0
0
true
64
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.119439
small
long
2,878
Alphatao/340f3449-b6b8-4098-98b2-8a2e0fc24f48
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.51.3
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
2,879
Alphatao/34d6a6ba-7678-40c2-a66d-869b877274d0
llama
["LlamaForCausalLM"]
32000
float32
4.46.0
768
3,072
12
12
12
silu
0
False
2,048
10,000
0.000001
0.02
1
2
false
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
0.084935
small
medium
2,880
Alphatao/35637418-8679-494e-94bc-ad19a92ddc90
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
2,881
Alphatao/35d415d1-f59c-4c09-95be-7d6a5a982eb7
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
5,120
13,824
48
40
8
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
15.099494
large
very_long
2,882
Alphatao/364eddac-dd3f-4796-ae45-7cb409bf76c9
phi3
["Phi3ForCausalLM"]
32064
bfloat16
4.46.0
5,120
17,920
40
40
10
silu
0
False
131,072
10,000
0.00001
0.02
1
32011
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
12.582912
large
very_long
2,883
Alphatao/37fb3c3c-0d51-45cc-88ad-367b16fb033e
llama
["LlamaForCausalLM"]
32000
bfloat16
4.46.0
2,048
5,632
22
32
4
silu
0
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
2,884
Alphatao/38cb98ae-0ffb-48ac-ad87-f7ed000a43cb
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,885
Alphatao/3929e0de-46be-4914-930d-187c2948a97d
phi
["PhiForCausalLM"]
1025
float32
4.46.0
32
37
2
4
4
gelu
0
False
1,024
10,000
null
0.02
0
0
false
8
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.000025
small
short
2,886
Alphatao/3937545e-747c-449c-a489-e67e0542cd28
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.0
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
2,887
Alphatao/3ab7b1fc-9eff-4121-8684-8a2d421754c3
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,024
2,816
24
16
16
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
1
false
null
null
false
false
false
false
false
false
false
false
false
true
0.30199
small
very_long
2,888
Alphatao/3d7b69f8-f0bf-457d-98a1-fcd1e96750b7
bloom
["BloomForCausalLM"]
250880
float32
4.51.3
1,024
null
null
null
null
null
0
False
null
null
null
0.02
1
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
2,889
Alphatao/3ddc5d8a-29f8-4ae0-972a-90b91294d047
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
2,890
Alphatao/3e4cd70a-b068-4e21-9938-5b50ea85eb6b
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,891
Alphatao/4016ce59-35ce-4393-aae3-8f8c29ee2ba5
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
131,072
1,000,000
0.000001
0.02
null
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,892
Alphatao/408783fd-13a4-4aea-97e1-2e76bfb1afc4
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151643
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
2,893
Alphatao/42152b55-76a3-40d1-9cfa-1e54395e4c62
llama
["LlamaForCausalLM"]
49153
float32
4.51.3
2,048
8,192
24
32
32
silu
0
False
8,192
130,000
0.00001
0.02
0
0
true
64
1
false
null
null
true
false
false
false
false
false
false
false
false
true
1.20796
medium
long
2,894
Alphatao/429bbbf9-b789-4c75-87d4-52ca48f73ec8
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.46.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
2,895
Alphatao/462f49d7-2146-4f55-bd6a-2a616a953bb1
gptj
["GPTJForCausalLM"]
50401
float32
4.46.0
null
null
null
null
null
null
null
False
null
null
null
0.02
50256
50256
false
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
2,896
Alphatao/47026014-f1fa-422c-996a-ec264e4643e7
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
1,536
8,960
28
12
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
2,897
Alphatao/4b69da7d-0f13-417a-885c-d0a6fe2c2e12
gemma2
["Gemma2ForCausalLM"]
256000
bfloat16
4.46.0
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
False
8,192
10,000
0.000001
0.02
2
1
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
2,898
Alphatao/4bb1fb3b-9da9-4331-b85b-36f0c5175da8
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.46.0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
null
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
2,899
Alphatao/4bd0e166-5b61-4d08-a398-85127d66fdcf
falcon
["FalconForCausalLM"]
65024
bfloat16
4.46.0
4,544
null
32
71
null
null
0
False
null
null
null
0.02
null
11
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
7.928807
large
null