index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
14,700
HiAmNear/bert-base-multilingual-uncased-ViISA
bert
["BertForSequenceClassification"]
105879
float32
4.46.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,701
HiAmNear/vibert-base-cased-ViFE
bert
["BertForSequenceClassification"]
38168
float32
4.46.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,702
HiAmNear/xlm-roberta-base-ViISA
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.46.3
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,703
HiHelloTW/my_awesome_qa_model
distilbert
["DistilBertForQuestionAnswering"]
30522
float32
4.44.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
14,704
HiTZ/EriBERTa-base
roberta
["RobertaForMaskedLM"]
64000
null
4.5.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,705
HiTZ/GoLLIE-7B
llama
["LlamaForCausalLM"]
32016
bfloat16
4.33.1
4,096
11,008
32
32
32
silu
null
True
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
14,706
HiTZ/Hermes-3-Llama-3.1-8B_ODESIA
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128040
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
14,707
HiTZ/latxa-13b-v1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
14,708
HiTZ/latxa-13b-v1.1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
14,709
HiTZ/latxa-13b-v1.2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
5,120
13,824
40
40
40
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
14,710
HiTZ/latxa-70b-v1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
8,192
28,672
80
64
8
silu
null
True
4,096
1,000,000
0.00001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
medium
14,711
HiTZ/latxa-70b-v1.1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
8,192
28,672
80
64
8
silu
null
True
4,096
1,000,000
0.00001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
medium
14,712
HiTZ/latxa-70b-v1.2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
8,192
28,672
80
64
8
silu
null
True
4,096
1,000,000
0.00001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
medium
14,713
HiTZ/latxa-7b-v1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,714
HiTZ/latxa-7b-v1.1
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,715
HiTZ/latxa-7b-v1.2
llama
["LlamaForCausalLM"]
32000
bfloat16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
14,716
HiTZ/lmloss-opt-rm-1.3b
opt
["OPTForCausalLM"]
50272
float16
4.23.1
2,048
null
24
32
null
null
0
True
2,048
null
null
null
2
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
1.20796
medium
medium
14,717
HiTZ/medical_en-eu
marian
["MarianMTModel"]
8001
float16
4.12.3
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,718
HiTZ/medical_enes-eu
marian
["MarianMTModel"]
8001
float16
4.12.3
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,719
HiTZ/medical_es-eu
marian
["MarianMTModel"]
8001
float16
4.12.3
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,720
HiTZ/mt-hitz-en-eu
marian
["MarianMTModel"]
8001
float16
4.12.3
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,721
HiTZ/mt-hitz-eu-en
marian
["MarianMTModel"]
8001
float16
4.12.3
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,722
HiTZ/whisper-base-eu
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
6
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,723
HiTZ/whisper-base-gl
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.37.2
null
null
6
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,724
HiTZ/whisper-large-eu
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
32
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,725
HiTZ/whisper-large-v3-eu
whisper
["WhisperForConditionalGeneration"]
51866
float32
4.37.2
null
null
32
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,726
HiTZ/whisper-medium-ca
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
24
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,727
HiTZ/whisper-medium-eu
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
24
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,728
HiTZ/whisper-small-ca
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
12
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,729
HiTZ/whisper-small-gl
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
12
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,730
HiTZ/whisper-tiny-eu
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.33.0.dev0
null
null
4
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,731
HiTZ/whisper-tiny-gl
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.37.2
null
null
4
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,732
HiTZ/xlm-roberta-large-lemma-cz
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,733
HiTZ/xlm-roberta-large-lemma-en
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,734
HiTZ/xlm-roberta-large-lemma-es
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,735
HiTZ/xlm-roberta-large-lemma-pl
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,736
HiTZ/xlm-roberta-large-lemma-ru
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,737
HiTZ/xlm-roberta-large-lemma-tr
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.20.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,738
HiTruong/Sailor_film_assistant
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.42.3
2,048
5,504
24
16
16
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
false
128
1
false
null
null
false
false
false
false
false
false
false
false
false
true
1.20796
medium
very_long
14,739
Hiariel/huggy
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,740
Hiba03/Qwen3_1.7B-GRPO-math-reasoning
qwen3
["Qwen3ForCausalLM"]
151936
float16
4.51.3
2,048
6,144
28
16
8
silu
0
True
40,960
1,000,000
0.000001
0.02
null
151645
true
128
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.409286
medium
very_long
14,741
Hiba03/alatlas_instruct_lora
qwen2
["Qwen2ForCausalLM"]
151936
float32
4.51.3
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
14,742
Hichnick/ex_bot
gpt2
["GPT2LMHeadModel"]
50261
float32
4.11.3
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
14,743
Hickey8/sd-class-butterflies-32
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,744
Hielke/deberta-v3-finetuned-t5-copanl
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.44.0
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,745
Hielke/deberta-v3-finetuned-t5-dbrs
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.44.0
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,746
Hieom/tiny-bert-sst2-distilled
bert
["BertForSequenceClassification"]
30522
float32
4.25.1
128
512
2
2
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.000393
small
short
14,747
Hieu-Hien/freelb-bart-base-cv2
bart
["BartForConditionalGeneration"]
50268
float32
4.44.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,748
Hieu-Hien/mulgat-t5-small-cv2-7000
t5
["T5ForConditionalGeneration"]
32103
float32
4.44.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,749
Hieu-Hien/mulgat-t5-small-cv2-token
t5
["T5ForConditionalGeneration"]
32103
float32
4.44.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,750
Hieu-Hien/tcab-bart-base-word
bart
["BartForConditionalGeneration"]
50265
float32
4.44.2
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,751
Hieu-Hien/ungat-bart-base-cv2
bart
["BartForConditionalGeneration"]
50268
float32
4.46.0
null
null
6
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,752
HieuAnh/travel-classify
roberta
["RobertaForSequenceClassification"]
64001
float32
4.35.2
768
3,072
12
12
null
gelu
null
True
258
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,753
HighCWu/sd-control-lora-face-landmarks
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,754
HikariLight/Llama_3.2_1B_COMP_ACI_DAMT_SFT_Merged
llama
["LlamaForCausalLM"]
128258
bfloat16
4.51.3
2,048
8,192
16
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128001
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
14,755
HikariLight/Mistral-7B-v0.3_SFT_Merged
mistral
["MistralForCausalLM"]
32768
bfloat16
4.42.3
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,756
HikariLight/Mistral_7B_v0.3_ACI_DAMT_SFT_Merged
mistral
["MistralForCausalLM"]
32768
bfloat16
4.48.1
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,757
HikariLight/Mistral_7B_v0.3_COMP_ACI_DAMT_SFT_Merged
mistral
["MistralForCausalLM"]
32770
bfloat16
4.51.3
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,758
HikariLight/Mistral_ACI_Bench_SFT
mistral
["MistralForCausalLM"]
32000
bfloat16
4.41.1
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
14,759
HikaruBear/distilbert-base-uncased-finetuned-squad
distilbert
["DistilBertForQuestionAnswering"]
30522
float32
4.24.0
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
14,760
Hikerell/shine-FT-20230414-on-liuli
t5
["T5ForConditionalGeneration"]
32128
float32
4.18.0
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
14,761
Hikkiray69/promptgptv2
gpt2
["GPT2LMHeadModel"]
50257
float32
4.47.1
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
14,762
HikmaLabs/ukhbert_narrator_linking
bert
["BertForTokenClassification"]
119547
float32
4.49.0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,763
Hilal2782/KTU_TFLai_Turkish-Alpaca
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
14,764
HilariusJeremy/disaster_tweet_distilbert
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.36.2
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
14,765
HildaPark0214/results
roberta
["RobertaForSequenceClassification"]
32000
float32
4.44.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,766
HillPhelmuth/Llama-3.2-3B-Instruct-Reason-FT
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
14,767
HillPhelmuth/Qwen3_4B_Chess_Explain_GGUF_Quants
qwen3
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,768
HillPhelmuth/Qwen3_4B_Chess_Explain_vllm
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.51.3
2,560
9,728
36
32
8
silu
0
True
40,960
1,000,000
0.000001
0.02
null
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
14,769
Hillal-titouh/Whisper-kabyle-Arabic-French-code-switching
whisper
["WhisperForConditionalGeneration"]
51918
float32
4.52.4
null
null
12
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
14,770
Hilurex/Nahida
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,771
Himanshu9178/lora-peft-llama-3.1-8b-finetuned-771
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
14,772
Himanshu99001/results
mpnet
["MPNetForSequenceClassification"]
30527
float32
4.45.2
768
3,072
12
12
null
gelu
null
null
514
null
null
0.02
0
2
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
14,773
HimanshuTyagi/CodeGenWithLLAMA2
codegen
["CodeGenForCausalLM"]
50295
float16
4.38.2
null
null
null
null
null
null
null
True
null
null
null
0.02
1
50256
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,774
Himmler1/Phi-3-mini-4k-instruct-Fined-Tuned-2
phi3
["Phi3ForCausalLM"]
32011
bfloat16
4.48.3
3,072
8,192
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
14,775
Himmler1/phi3.transcript
phi3
["Phi3ForCausalLM"]
32011
bfloat16
4.48.3
3,072
8,192
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
14,776
Himmler1/phi3.transcript_2
phi3
["Phi3ForCausalLM"]
32011
bfloat16
4.48.3
3,072
8,192
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
14,777
Hina541/fineTuningXLMRoberta-TokenClassification-latest
xlm-roberta
["XLMRobertaForTokenClassification"]
250002
float32
4.44.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,778
Hinno/fineTuneIncoderWithPrompt
xglm
["XGLMForCausalLM"]
50518
float32
4.40.0
null
null
null
null
null
null
0.1
True
2,048
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
medium
14,779
Hinno/incoder-1B-flutter-finetuned
xglm
["XGLMForCausalLM"]
50518
float32
4.40.0
null
null
null
null
null
null
0.1
True
2,048
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
medium
14,780
Hinova/ppo-PyramidRND
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,781
Hinova/ppo-SnowballTarget
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
14,782
Hippopoto0/akkadian-marianMT
marian
["MarianMTModel"]
62834
float32
4.48.2
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
14,783
Hiranmai49/CFDistilGPT
gpt2
["GPT2LMHeadModel"]
50257
float32
4.46.1
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
14,784
Hiranya2001/coral_comment_classifier
bert
["BertForSequenceClassification"]
30522
float32
4.42.4
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,785
Hiranya2001/distilbert_semantic_classifier
distilbert
["DistilBertForSequenceClassification"]
30522
float32
4.42.4
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
14,786
HiredScore/tiny-llama-full-schema-50K-english-synthetic-long-lora-english
llama
["LlamaForCausalLM"]
32001
bfloat16
4.35.2
2,048
5,632
22
32
4
silu
null
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
14,787
HiredScore/tinyllama_multi
llama
["LlamaForCausalLM"]
32004
bfloat16
4.34.0.dev0
2,048
5,632
22
32
4
silu
null
False
2,048
10,000
0.00001
0.02
1
2
false
64
8
false
null
null
true
false
false
false
false
false
false
false
true
true
1.107296
medium
medium
14,788
His-Wardship/sarcasm-detector
deberta-v2
["DebertaV2ForSequenceClassification"]
128100
float32
4.38.0.dev0
768
3,072
12
12
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
14,789
Hitisha/orpo-phi3
phi3
["Phi3ForCausalLM"]
32013
float16
4.42.4
3,072
8,192
32
32
32
silu
0
True
4,096
10,000
0.00001
0.02
32011
32012
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
medium
14,790
Hiveurban/multilingual-e5-large-pooled
xlm-roberta
["XLMRobertaModel"]
250002
float32
4.29.0
1,024
4,096
24
16
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.30199
small
short
14,791
Hjgugugjhuhjggg/mergekit-dare_linear-caalfar
llama
["LlamaForCausalLM"]
128256
bfloat16
4.45.1
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
14,792
Hjgugugjhuhjggg/mergekit-linear-sffnqed
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.2
3,072
8,192
28
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
14,793
Hjgugugjhuhjggg/mergekit-passthrough-bmwtwlm
llama
["LlamaForCausalLM"]
128256
float16
4.45.1
3,072
8,192
1
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
0.113246
small
very_long
14,794
Hjgugugjhuhjggg/mergekit-passthrough-obzzfql
llama
["LlamaForCausalLM"]
128256
float16
4.45.1
3,072
8,192
10
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
1.132462
medium
very_long
14,795
Hjgugugjhuhjggg/mergekit-passthrough-sppyqor
llama
["LlamaForCausalLM"]
128256
bfloat16
4.46.1
2,048
8,192
1
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.050332
small
very_long
14,796
Hjgugugjhuhjggg/mergekit-passthrough-vnrjyuc
gpt2
["GPT2LMHeadModel"]
50257
bfloat16
4.46.1
null
null
null
null
null
null
null
True
null
null
null
0.02
50256
50256
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
14,797
Hjgugugjhuhjggg/mergekit-ties-ahvmzcm
llama
["LlamaForCausalLM"]
128256
bfloat16
4.45.1
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
14,798
Hjgugugjhuhjggg/mergekit-ties-dhpqgnv
llama
["LlamaForCausalLM"]
128256
bfloat16
4.45.1
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
14,799
Hjgugugjhuhjggg/mergekit-ties-fhzafeq
llama
["LlamaForCausalLM"]
128256
float16
4.46.2
3,072
8,192
28
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long