index int64 0 125k | modelId stringlengths 6 115 | config_model_type stringlengths 2 46 ⌀ | config_architectures stringlengths 2 91 ⌀ | config_vocab_size stringlengths 1 8 ⌀ | config_torch_dtype stringclasses 7 values | config_transformers_version stringclasses 228 values | config_hidden_size float64 0 18.4k ⌀ | config_intermediate_size float64 0 25.2M ⌀ | config_num_hidden_layers float64 -1 260 ⌀ | config_num_attention_heads stringclasses 47 values | config_num_key_value_heads float64 0 4.1k ⌀ | config_hidden_act stringclasses 19 values | config_attention_dropout float64 0 0.5 ⌀ | config_use_cache stringclasses 3 values | config_max_position_embeddings float64 -1 10.5M ⌀ | config_rope_theta float64 256 100B ⌀ | config_rms_norm_eps float64 0 0 ⌀ | config_initializer_range float64 0 2 ⌀ | config_bos_token_id stringclasses 158 values | config_eos_token_id stringclasses 339 values | config_tie_word_embeddings bool 2 classes | config_head_dimension float64 0.5 3.07k ⌀ | config_gqa_ratio float64 0.5 64 ⌀ | config_moe_enabled bool 1 class | config_n_routed_experts float64 1 384 ⌀ | config_num_experts_per_tok float64 1 64 ⌀ | is_llama_family bool 2 classes | is_bert_family bool 2 classes | is_gpt_family bool 2 classes | is_t5_family bool 2 classes | is_whisper_family bool 2 classes | is_deepseek_family bool 2 classes | is_mistral_family bool 2 classes | uses_moe bool 2 classes | uses_gqa bool 2 classes | uses_rope bool 2 classes | config_approx_params_billions float64 -0.2 606 ⌀ | size_category stringclasses 4 values | context_category stringclasses 4 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2,300 | Akzhannn/multi-label_classification | roberta | ["RobertaForSequenceClassification"] | 52000 | float32 | 4.42.4 | 768 | 3,072 | 6 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.042467 | small | short |
2,301 | Al00000/my_awesome_eli5_mlm_model | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.42.4 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,302 | Al020198zee/ppo-CarRacing-v0 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,303 | Al020198zee/ppo-Walker2DBulletEnv-v0 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,304 | Al1111123232323/pur | gpt2 | ["GPT2LMHeadModel"] | 50257 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
2,305 | Al1Kay/dementia_classification_wavlm-base-plus | wavlm | ["WavLMForSequenceClassification"] | 32 | float32 | 4.51.3 | 768 | 3,072 | 12 | 12 | null | gelu | 0.1 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | null |
2,306 | Al1Kay/wav2vec2-large-xls-r-300m-dm32 | wav2vec2 | ["Wav2Vec2ForSpeechClassification"] | 32 | float32 | 4.48.2 | 1,024 | 4,096 | 24 | 16 | null | gelu | 0.1 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.30199 | small | null |
2,307 | Al3ksandra/distilbert-base-uncased-finetuned-emotion | distilbert | ["DistilBertForSequenceClassification"] | 30522 | float32 | 4.24.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
2,308 | AlGe/distilBERT-domain-adapted-5e | distilbert | ["DistilBertForMaskedLM"] | 30522 | float32 | 4.38.0.dev0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
2,309 | AlIshaq/IndoBART-faq-pondok | bart | ["BartForCausalLM"] | 50264 | float32 | 4.52.4 | null | null | 12 | null | null | null | 0 | True | 1,024 | null | null | null | 0 | 2 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
2,310 | AlSamCur123/Elmer | llama | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | true | false | false | false | false | false | false | false | false | false | null | null | null |
2,311 | AlSamCur123/LlamaChatml | llama | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | true | false | false | false | false | false | false | false | false | false | null | null | null |
2,312 | AlSamCur123/Ministral-8B | mistral | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | true | false | false | false | null | null | null |
2,313 | AlSamCur123/Ministral-8BContinuedFine | mistral | ["MistralForCausalLM"] | 131072 | float16 | 4.47.1 | 4,096 | 12,288 | 36 | 32 | 8 | silu | 0 | True | 32,768 | 100,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 7.247757 | large | very_long |
2,314 | AlSamCur123/Mistral-NeMo-Minitron-8B-Instruct | mistral | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | true | false | false | false | null | null | null |
2,315 | AlSamCur123/Mistral-NeMo-Minitron-8B-InstructContinuedFine | mistral | ["MistralForCausalLM"] | 131072 | float16 | 4.46.3 | 4,096 | 11,520 | 40 | 32 | 8 | silu | 0 | True | 32,000 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 8.053064 | large | long |
2,316 | AlSamCur123/Mistral-Small-Instruct | mistral | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | true | false | false | false | null | null | null |
2,317 | AlSamCur123/Mistral-Small-InstructContinuedFine | mistral | ["MistralForCausalLM"] | 32768 | bfloat16 | 4.47.1 | 6,144 | 16,384 | 56 | 48 | 8 | silu | 0 | True | 131,072 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 6 | false | null | null | false | false | false | false | false | false | true | false | true | true | 25.367151 | large | very_long |
2,318 | AlSamCur123/Mistral3R1 | mistral | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | true | false | false | false | null | null | null |
2,319 | AlSamCur123/Mistral3R1ContinuedFine | mistral | ["MistralForCausalLM"] | 32768 | float16 | 4.50.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,320 | AlSamCur123/Nemo-Instruct | mistral | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | true | false | false | false | null | null | null |
2,321 | AlSamCur123/Nemo-InstructContinuedFine | mistral | ["MistralForCausalLM"] | 131072 | bfloat16 | 4.46.3 | 5,120 | 14,336 | 40 | 32 | 8 | silu | 0 | True | 1,024,000 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 160 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 12.582912 | large | very_long |
2,322 | AlSamCur123/NemoTunerR1ContinuedFine | mistral | ["MistralForCausalLM"] | 131072 | bfloat16 | 4.51.3 | 5,120 | 14,336 | 40 | 32 | 8 | silu | 0 | True | 1,024,000 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 160 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 12.582912 | large | very_long |
2,323 | AlSamCur123/NotDolphin3.0-Llama3.1-8ContinuedFine | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.48.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | False | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | [128256, 128001, 128008, 128009] | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
2,324 | AlSamCur123/mistral-7b-instruct-v0.3ContinuedFine | mistral | ["MistralForCausalLM"] | 32768 | bfloat16 | 4.50.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,325 | AlTPha/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,326 | AlaGrine/distilgpt2-finetuned-3GPP-5G | gpt2 | ["GPT2LMHeadModel"] | 50257 | float32 | 4.36.0 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
2,327 | Alaa18/Assistant_chatbot_Mistral | mistral | ["MistralForCausalLM"] | 32000 | float32 | 4.40.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,328 | Alaa18/mistral_7b_chatbot | mistral | ["MistralForCausalLM"] | 32000 | float32 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,329 | Alaa67/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,330 | AlaaHussien/dinov2-small-Skin | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,331 | AlaguAlex/distilbert-base-uncased-swag | bert | ["BertForMultipleChoice"] | 30522 | float32 | 4.35.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,332 | Alainhby666/qwen2.5_sft_full_2025-05-22_0509 | qwen2 | ["Qwen2ForCausalLM"] | 151936 | float16 | 4.51.3 | 896 | 4,864 | 24 | 14 | 2 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 151643 | 151643 | true | 64 | 7 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.231211 | small | very_long |
2,333 | Alainhby666/qwen_sft_2025-05-22_1853 | qwen2 | ["Qwen2ForCausalLM"] | 151936 | float32 | 4.51.3 | 896 | 4,864 | 24 | 14 | 2 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 151643 | 151643 | true | 64 | 7 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.231211 | small | very_long |
2,334 | Alamerton/open-instruct-flan-1k-finetuning-steps | llama | ["LlamaForCausalLM"] | 32001 | float32 | 4.34.1 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 2,048 | 10,000 | 0.000001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
2,335 | Alan1402/ppo-lunarlander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,336 | AlanHou/distilbert-base-uncased-finetuned-emotion | distilbert | ["DistilBertForSequenceClassification"] | 30522 | float32 | 4.38.2 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
2,337 | AlanHou/news-classification-18-llama-2-7b | llama | ["LlamaForCausalLM"] | 32000 | bfloat16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
2,338 | AlanHou/xlm-roberta-base-finetuned-panx-all | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,339 | AlanHou/xlm-roberta-base-finetuned-panx-de | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,340 | AlanHou/xlm-roberta-base-finetuned-panx-de-fr | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,341 | AlanHou/xlm-roberta-base-finetuned-panx-en | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,342 | AlanHou/xlm-roberta-base-finetuned-panx-fr | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,343 | AlanHou/xlm-roberta-base-finetuned-panx-it | xlm-roberta | ["XLMRobertaForTokenClassification"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,344 | AlanKwook/DeepSeek-R1-Medical-COT | llama | ["LlamaForCausalLM"] | 128256 | float16 | 4.50.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
2,345 | AlanRobotics/bert-sentiment | bert | ["BertForSequenceClassification"] | 119547 | float32 | 4.26.0.dev0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,346 | AlanRobotics/bert_q_a_test | bert | ["BertForQuestionAnswering"] | 119547 | null | 4.24.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,347 | AlanRobotics/opt-ru | opt | ["OPTForCausalLM"] | 32000 | float32 | 4.33.0 | 768 | null | 12 | 12 | null | null | 0 | True | 2,048 | null | null | null | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | medium |
2,348 | AlanRobotics/ruT5-base | t5 | ["T5ForConditionalGeneration"] | 25662 | float32 | 4.26.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,349 | AlanRobotics/rubert-siamese | AutoModel | ["SiamseNNModel"] | null | float32 | 4.24.0 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,350 | Alaninfant/OrpoLlama3-8B | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.41.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128256 | 128257 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
2,351 | Alannikos768/BanterBot-7b-chat | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | null | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
2,352 | Alannikos768/BanterBot-7b-chat-w4a16-4bit | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.46.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | null | False | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
2,353 | Alannikos768/BanterBot_1_8b-chat | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.39.3 | 2,048 | 8,192 | 24 | 16 | 8 | silu | null | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 2 | false | null | null | false | false | false | false | false | false | false | false | true | true | 1.20796 | medium | very_long |
2,354 | Alannikos768/BanterBot_1_8b-chat-w4a16-4bit | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.46.2 | 2,048 | 8,192 | 24 | 16 | 8 | silu | null | False | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 2 | false | null | null | false | false | false | false | false | false | false | false | true | true | 1.20796 | medium | very_long |
2,355 | Alannikos768/BoostBot-7b-chat | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | null | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
2,356 | Alannikos768/BoostBot_1_8b-chat-w4a16-4bit | internlm2 | ["InternLM2ForCausalLM"] | 92544 | float16 | 4.46.2 | 2,048 | 8,192 | 24 | 16 | 8 | silu | null | False | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 2 | false | null | null | false | false | false | false | false | false | false | false | true | true | 1.20796 | medium | very_long |
2,357 | Albaeld/mistral-7b-16bit-aics-classifier | mistral | ["MistralForCausalLM"] | 32000 | float16 | 4.47.1 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,358 | Albaeld/mistral-7b-4bit-aics-classifier | mistral | ["MistralForCausalLM"] | 32000 | float16 | 4.47.1 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
2,359 | Albe/test-category | vit | ["ViTForImageClassification"] | null | float32 | 4.22.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | null | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | null |
2,360 | AlbelTec/phi-1_5-finetuned-ner-albel | mixformer-sequential | ["MixFormerSequentialForCausalLM"] | 51200 | float32 | 4.33.2 | null | null | null | null | null | null | null | null | null | null | null | 0.02 | null | null | false | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,361 | AlbelTec/phi-1_5-finetuned-orca | mixformer-sequential | ["MixFormerSequentialForCausalLM"] | 51200 | float32 | 4.34.0 | null | null | null | null | null | null | null | null | null | null | null | 0.02 | null | null | false | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,362 | AlbelTec/phi-1_5-finetuned-sql | mixformer-sequential | ["MixFormerSequentialForCausalLM"] | 51200 | float32 | 4.34.1 | null | null | null | null | null | null | null | null | null | null | null | 0.02 | null | null | false | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,363 | AlbertBik/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,364 | AlbertG3/BankStockEmbed | bert | ["BertModel"] | 30522 | float32 | 4.38.2 | 384 | 1,536 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 32 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.021234 | small | short |
2,365 | AlbertWang8192/2025-07-07_0 | mistral | ["MistralForCausalLM"] | 131072 | bfloat16 | 4.52.4 | 5,120 | 14,336 | 40 | 32 | 8 | silu | 0 | False | 131,072 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 160 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 12.582912 | large | very_long |
2,366 | AlbertWang8192/V24 | mistral | ["MistralForCausalLM"] | 131072 | bfloat16 | 4.46.0 | 5,120 | 14,336 | 40 | 32 | 8 | silu | 0 | False | 131,072 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 160 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 12.582912 | large | very_long |
2,367 | AlbertoImmune/Robotic1 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,368 | AlbertoImmune/Robotic5-2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,369 | AlbertoImmune/Robotic6-PandaReachDense-v3 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,370 | AlbertoImmune/rl_course_vizdoom_health_gathering_supreme | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,371 | Alchemist-mohan/douyin_bert-chinese-word | bert | ["BertForSequenceClassification"] | 21128 | float32 | 4.46.3 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,372 | Alchemy5/detr-resnet-50_finetuned_cppe5 | detr | ["DetrForObjectDetection"] | null | float32 | 4.35.2 | null | null | 6 | null | null | null | 0 | null | 1,024 | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
2,373 | Alchemy5/latexify2 | vision-encoder-decoder | ["VisionEncoderDecoderModel"] | null | float32 | 4.35.2 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,374 | AlcoholMan/t5-small-finetuned-xsum | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.26.1 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,375 | Aldelard/Indication_PYT_v2 | gpt_neox | ["GPTNeoXForCausalLM"] | 50304 | float16 | 4.29.2 | 2,560 | 10,240 | 32 | 32 | null | gelu | null | True | 2,048 | null | null | 0.02 | 0 | 0 | false | 80 | null | false | null | null | false | false | true | false | false | false | false | false | false | false | 2.516582 | medium | medium |
2,376 | Aldo789/8968b5a0-f988-4ffa-a15d-b42a7487af8c | qwen2 | ["Qwen2ForCausalLM"] | 152064 | float16 | 4.52.4 | 3,584 | 18,944 | 28 | 28 | 4 | silu | 0 | True | 131,072 | 1,000,000 | 0.000001 | 0.02 | null | 151643 | false | 128 | 7 | false | null | null | false | false | false | false | false | false | false | false | true | true | 4.315939 | medium | very_long |
2,377 | AldoSN/biomedical_model_aldosn | vit | ["ViTForImageClassification"] | null | float32 | 4.48.3 | 1,024 | 4,096 | 24 | 16 | null | gelu | null | null | null | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.30199 | small | null |
2,378 | Aldraz/distilbert-base-uncased-finetuned-emotion | distilbert | ["DistilBertForSequenceClassification"] | 30522 | float32 | 4.17.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
2,379 | Aldrich12/my-fine-tuned-model-ppo | gpt2 | ["GPT2LMHeadModel"] | 50257 | float32 | 4.44.2 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
2,380 | Ale902/ppo-Pyramids | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,381 | AleBurzio/bloom-560M-riddles | bloom | ["BloomForCausalLM"] | 250880 | float32 | 4.25.0.dev0 | 1,024 | null | null | null | null | null | 0 | True | null | null | null | 0.02 | 1 | 2 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
2,382 | AleRams/app_prova2 | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,383 | AleRams/app_prova3 | bert | ["MyBertForSequenceClassification"] | 30522 | float32 | 4.36.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,384 | AleRams/test-trainer | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,385 | AleRams/test-trainer2 | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,386 | AleRams/test-trainer_2 | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
2,387 | Aleereza/tinnyllama_fatokenizer_max_nonQu_5 | llama | ["LlamaForCausalLM"] | 33924 | float32 | 4.35.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
2,388 | Aleereza/tinnyllama_fatokenizer_mean_nonQu_5 | llama | ["LlamaForCausalLM"] | 33924 | float32 | 4.35.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
2,389 | Aleereza/tinnyllama_fatokenizer_sum_nonQu_5 | llama | ["LlamaForCausalLM"] | 33924 | float32 | 4.35.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
2,390 | Aleereza/tinnyllama_prtokenizer_mean_4 | llama | ["LlamaForCausalLM"] | 33924 | float16 | 4.35.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
2,391 | Aleereza/tiny_llama_with_m2Tokenizer | llama | ["LlamaForCausalLM"] | 36000 | float32 | 4.35.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | False | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
2,392 | Alefiah/UrduSum10 | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,393 | Alefiah/UrduSum10k | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,394 | Alefiah/UrduSum5k | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,395 | Alefiah/UrduSum6 | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,396 | Alefiah/UrduSum7 | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,397 | Alefiah/UrduSum9 | mt5 | ["MT5ForConditionalGeneration"] | 250112 | float32 | 4.28.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
2,398 | AlejandroOlmedo/DeepScaleR-1.5B-Preview-mlx | qwen2 | ["Qwen2ForCausalLM"] | 151936 | float32 | 4.47.1 | 1,536 | 8,960 | 28 | 12 | 2 | silu | 0 | True | 131,072 | 10,000 | 0.000001 | 0.02 | 151646 | 151643 | false | 128 | 6 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.792723 | small | very_long |
2,399 | AlekHesa/testing-llama2-v2 | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.