index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
16,200
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_110
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,201
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_120
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,202
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_140
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,203
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_150
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,204
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_160
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,205
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_170
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,206
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_180
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,207
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_190
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,208
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_20
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,209
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_200
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,210
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_30
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,211
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_40
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,212
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_50
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,213
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_60
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,214
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_70
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,215
JaehyeokLee/preliminary_one_source_gist_checkpoint_epoch_1_step_90
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,216
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_10
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,217
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_100
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,218
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_110
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,219
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_120
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,220
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_130
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,221
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_140
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,222
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_150
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,223
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_160
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,224
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_170
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,225
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_180
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,226
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_190
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,227
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_20
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,228
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_200
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,229
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_30
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,230
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_40
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,231
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_50
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,232
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_60
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,233
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_70
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,234
JaehyeokLee/preliminary_one_source_infonce_checkpoint_epoch_1_step_90
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.48.3
1,024
4,096
24
16
null
gelu
null
True
8,194
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
long
16,235
Jaewoo1/Polyglot-12.8B-korean100k-epoch2
gpt_neox
["GPTNeoXForCausalLM"]
30003
float16
4.28.1
5,120
20,480
40
40
null
gelu
null
True
2,048
null
null
0.02
0
0
false
128
null
false
null
null
false
false
true
false
false
false
false
false
false
false
12.582912
large
medium
16,236
Jaewoo1/Vicuna-13B_test_step1_epoch_0.5
llama
["LlamaForCausalLM"]
32000
float32
4.28.1
5,120
13,824
40
40
null
silu
null
True
2,048
null
0.000001
0.02
0
1
false
128
null
false
null
null
true
false
false
false
false
false
false
false
false
false
12.582912
large
medium
16,237
Jaewoo1/Vicuna-13B_test_step1_epoch_1
llama
["LlamaForCausalLM"]
32000
float32
4.28.1
5,120
13,824
40
40
null
silu
null
True
2,048
null
0.000001
0.02
0
1
false
128
null
false
null
null
true
false
false
false
false
false
false
false
false
false
12.582912
large
medium
16,238
Jaewoo1/Vicuna-13B_test_step1_epoch_2
llama
["LlamaForCausalLM"]
32000
float32
4.28.1
5,120
13,824
40
40
null
silu
null
True
2,048
null
0.000001
0.02
0
1
false
128
null
false
null
null
true
false
false
false
false
false
false
false
false
false
12.582912
large
medium
16,239
JaganAI/JaganAI
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.49.0
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,240
JaganAI/distilbert-finetuned
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.49.0
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,241
Jagobaemeka/my_awesome_food_model
vit
["ViTForImageClassification"]
null
float32
4.44.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,242
Jagrati/cve_detail_prediction_model
llama
["LlamaForCausalLM"]
128256
float16
4.51.3
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
16,243
Jags96/codeparrot
gpt2
["GPT2LMHeadModel"]
32768
float32
4.38.0
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,244
Jaguar7788/bert-base-uncased-train-glue-mrpc
bert
["BertForSequenceClassification"]
30522
float32
4.37.2
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,245
JahBless/distilbert
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.40.1
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,246
JahBless/modernbert
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.40.1
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,247
JahBless/roberta_base
roberta
["RobertaForSequenceClassification"]
50265
float32
4.40.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,248
JahBless/roberta_base_3c_bias_3c
roberta
["RobertaForSequenceClassification"]
50265
float32
4.40.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,249
JahBless/roberta_large
roberta
["RobertaForSequenceClassification"]
50265
float32
4.40.1
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
16,250
JahBless/tinybert
bert
["BertForSequenceClassification"]
30522
float32
4.40.1
768
3,072
6
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.042467
small
short
16,251
Jahid05/Gemma-2-9b-it-chat-prompt-genenator-v2
gemma2
["Gemma2ForCausalLM"]
256002
float16
4.44.2
3,584
14,336
42
16
8
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
256000
256001
null
224
2
false
null
null
false
false
false
false
false
false
false
false
true
true
6.473908
medium
long
16,252
Jahid05/meta-Llama3-8b-fine-tune-QLoRA
llama
["LlamaForCausalLM"]
128256
float16
4.33.1
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,253
JahnaviKumar/7BCodeLLama_PyCdSmry_Hetro_Central_LoRA
llama
["LlamaForCausalLM"]
32016
float16
4.37.0.dev0
4,096
11,008
32
32
32
silu
0
True
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
16,254
JaimeArboleda/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,255
JaimeT/finetuning-sentiment-model-3000-samples
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.48.3
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
16,256
JairamKanna/armms-xlsr
wav2vec2
["Wav2Vec2ForCTC"]
54
float32
4.35.2
1,024
4,096
24
16
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.30199
small
null
16,257
JairamKanna/whisper-small.en-hi
whisper
["WhisperForConditionalGeneration"]
51864
float32
4.35.0
null
null
12
null
null
null
0
True
null
null
null
null
50257
50256
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16,258
Jairnetojp/phi-1_5-finetuned-sql
mixformer-sequential
["MixFormerSequentialForCausalLM"]
51200
float32
4.33.2
null
null
null
null
null
null
null
null
null
null
null
0.02
null
null
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,259
JairoDanielMT/Llama2-Fine-Tuning-python-codes-25k
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
16,260
JakeOh/finetune-llama-3.1-8b-mbpp
llama
["LlamaForCausalLM"]
128256
bfloat16
4.47.1
4,096
14,336
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
16,261
JakeOh/star-plus-step-1
llama
["LlamaForCausalLM"]
128256
float32
4.48.1
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
16,262
JakeTurner616/Adonalsium-gpt2
gpt2
["GPT2LMHeadModel"]
50257
float32
4.37.2
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
16,263
JakeYunwooKim/mt5-small-finetuned-amazon-en-es
mt5
["MT5ForConditionalGeneration"]
250112
float32
4.28.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,264
Jakir057/augmented18k_15epoch
vit
["ViTForImageClassification"]
null
float32
4.32.1
1,024
4,096
24
16
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.30199
small
null
16,265
Jakir057/augmented18k_base
vit
["ViTForImageClassification"]
null
float32
4.32.1
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,266
Jakir057/banknote18k
vit
["ViTForImageClassification"]
null
float32
4.32.1
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,267
Jakir057/finetuned-indian-food
vit
["ViTForImageClassification"]
null
float32
4.32.1
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,268
Jakobaby/tester1
bert
[]
21128
float32
4.18.0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,269
Jakobaby/trendy1
bert
[]
17892
float32
4.18.0
null
null
12
12
null
null
null
True
null
null
null
null
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
null
16,270
JalilH/fine_tuned_gemma
gemma2
["Gemma2ForCausalLM"]
256000
float32
4.46.0
2,304
9,216
26
8
4
gelu_pytorch_tanh
0
True
8,192
10,000
0.000001
0.02
2
1
null
288
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.656226
medium
long
16,271
JamAndTeaStudios/DeepSeek-R1-0528-Qwen3-8B-FP8-Dynamic
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.55.2
4,096
12,288
36
32
8
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151645
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
7.247757
large
very_long
16,272
JamAndTeaStudios/gemma-3-12b-it-FP8-Dynamic
gemma3
["Gemma3ForConditionalGeneration"]
null
bfloat16
4.51.3
null
null
null
null
null
null
null
null
null
null
null
0.02
null
[1, 106]
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
16,273
Jaman/gemma_qa-Test-Finetune
gemma
["GemmaForCausalLM"]
256000
float16
4.38.0
2,048
16,384
18
8
1
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
16,274
Jaman/wav2vec2-base-finetuned-flight-control-SpeechCommands
wav2vec2
["Wav2Vec2ForSequenceClassification"]
32
float32
4.49.0
768
3,072
12
12
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,275
James-WYang/BigTranslate
llama
["LLaMAForCausalLM"]
53613
float16
4.21.0
5,120
13,824
40
40
null
silu
null
False
null
null
0.000001
0.02
0
1
false
128
null
false
null
null
true
false
false
false
false
false
false
false
false
false
12.582912
large
null
16,276
James-WYang/X-Instruction-13b-fi
llama
["LlamaForCausalLM"]
32000
bfloat16
4.33.3
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,277
James-WYang/X-Instruction-13b-ta
llama
["LlamaForCausalLM"]
32000
bfloat16
4.33.3
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,278
James-WYang/X-Instruction-13b-tr
llama
["LlamaForCausalLM"]
32000
bfloat16
4.33.3
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
16,279
James-WYang/X-Instruction-7b-10langs
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.2
3,072
24,576
28
16
16
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
16,280
James-WYang/X-Instruction-7b-ta
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.2
3,072
24,576
28
16
16
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
16,281
James-WYang/X-Instruction-8b-fi
llama
["LlamaForCausalLM"]
128258
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,282
James-WYang/X-Instruction-8b-sw
llama
["LlamaForCausalLM"]
128258
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
16,283
James-kc-min/AGT_Roberta
roberta
["RobertaForSequenceClassification"]
50265
float32
4.18.0
768
3,072
6
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.042467
small
short
16,284
James-kc-min/AGT_Roberta2
roberta
["RobertaForSequenceClassification"]
50265
float32
4.18.0
768
3,072
6
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.042467
small
short
16,285
James2313123/L3-Darker-Planet-12.15B_4bpw-h6-EXL2
llama
["LlamaForCausalLM"]
128256
float16
4.43.3
4,096
14,336
51
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
10.267656
large
long
16,286
James2313123/MN-GRAND-Gutenberg-Lyra4-Lyra-12B-DARKNESS-EXL2-3bpw
mistral
["MistralForCausalLM"]
131072
bfloat16
4.43.3
5,120
14,336
40
32
8
silu
0
True
1,024,000
1,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
16,287
James449/nlp-t5-qa-model
t5
["T5ForConditionalGeneration"]
32128
float32
4.38.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
16,288
JamesBegin/Llama-3.1-8B-Instruct-Pause-Tuned
llama
["LlamaForCausalLM"]
128256
bfloat16
4.49.0.dev0
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
16,289
JamesCS462/JamesCS462
vit
["ViTForImageClassification"]
null
null
4.35.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,290
JamesCS462/JamesCS462_cifar100
vit
["ViTForImageClassification"]
null
null
4.35.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,291
JamesEJarvis/rl_course_vizdoom_health_gathering_supreme
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
16,292
JamesKTChen/wav2vec2-base-finetuned-ks
wav2vec2
["Wav2Vec2ForSequenceClassification"]
32
float32
4.41.1
768
3,072
12
12
null
gelu
0.1
null
null
null
null
0.02
1
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
16,293
JamesKim/Mistral-7B-v0.3-q4
mistral
["MistralForCausalLM"]
32768
bfloat16
4.42.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,294
JamesKim/distil_bert-base-multilingual-cased-en
bert
["BertModel"]
119547
float32
4.43.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
16,295
JamesKim/mistral-7b-qlora-alpaca-1k-0508_HybridHPO
mistral
["MistralForCausalLM"]
32000
float32
4.40.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,296
JamesKim/mistral-7b-qlora-alpaca-1k-0509_ARC-Train
mistral
["MistralForCausalLM"]
32000
float32
4.40.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,297
JamesKim/mistral-7b-qlora-alpaca-sample-0.5k-r3
mistral
["MistralForCausalLM"]
32000
float32
4.40.1
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,298
JamesKim/mistral-7b-qlora-alpaca-sample-0.5k-v0.1
mistral
["MistralForCausalLM"]
32000
float32
4.40.1
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
16,299
JamesVorder/PPO-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null