index int64 0 125k | modelId stringlengths 6 115 | config_model_type stringlengths 2 46 ⌀ | config_architectures stringlengths 2 91 ⌀ | config_vocab_size stringlengths 1 8 ⌀ | config_torch_dtype stringclasses 7 values | config_transformers_version stringclasses 228 values | config_hidden_size float64 0 18.4k ⌀ | config_intermediate_size float64 0 25.2M ⌀ | config_num_hidden_layers float64 -1 260 ⌀ | config_num_attention_heads stringclasses 47 values | config_num_key_value_heads float64 0 4.1k ⌀ | config_hidden_act stringclasses 19 values | config_attention_dropout float64 0 0.5 ⌀ | config_use_cache stringclasses 3 values | config_max_position_embeddings float64 -1 10.5M ⌀ | config_rope_theta float64 256 100B ⌀ | config_rms_norm_eps float64 0 0 ⌀ | config_initializer_range float64 0 2 ⌀ | config_bos_token_id stringclasses 158 values | config_eos_token_id stringclasses 339 values | config_tie_word_embeddings bool 2 classes | config_head_dimension float64 0.5 3.07k ⌀ | config_gqa_ratio float64 0.5 64 ⌀ | config_moe_enabled bool 1 class | config_n_routed_experts float64 1 384 ⌀ | config_num_experts_per_tok float64 1 64 ⌀ | is_llama_family bool 2 classes | is_bert_family bool 2 classes | is_gpt_family bool 2 classes | is_t5_family bool 2 classes | is_whisper_family bool 2 classes | is_deepseek_family bool 2 classes | is_mistral_family bool 2 classes | uses_moe bool 2 classes | uses_gqa bool 2 classes | uses_rope bool 2 classes | config_approx_params_billions float64 -0.2 606 ⌀ | size_category stringclasses 4 values | context_category stringclasses 4 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1,600 | AbdullahKnn/results_t5base | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.44.2 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,601 | Abdulmannan/llama-ft-f | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.41.1 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128256 | 128257 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
1,602 | Abdulmateen/mt5-small-finetuned-amazon-en-es | mt5 | ["MT5ForConditionalGeneration"] | 250112 | null | 4.26.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,603 | AbdulxoliqMirzaev/whisper-uz | whisper | ["WhisperForConditionalGeneration"] | 51865 | null | 4.57.1 | null | null | 6 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
1,604 | Abe13/juniper-all-mpnet-base-v2 | mpnet | ["MPNetModel"] | 30527 | float32 | 4.36.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,605 | Abe13/juniper-bge-large-en-v1.5 | bert | ["BertModel"] | 30522 | float32 | 4.36.0 | 1,024 | 4,096 | 24 | 16 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.30199 | small | short |
1,606 | AbeDijkstra/distilbert-base-uncased-finetuned-ner | distilbert | ["DistilBertForTokenClassification"] | 30522 | float32 | 4.46.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
1,607 | AbeShinzo0708/Japanese-Starling-ChatV-7B-exl2 | mistral | ["MistralForCausalLM"] | 32000 | bfloat16 | 4.38.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | medium |
1,608 | AbeShinzo0708/so_vits_svc4_AbeShinzo | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,609 | Abeee/Synthetic4 | opt | ["OPTForCausalLM"] | 50272 | float16 | 4.35.2 | 2,048 | null | 24 | 32 | null | null | 0 | True | 2,048 | null | null | null | 2 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 1.20796 | medium | medium |
1,610 | AbeerFatima/test8_doc-splitter-llama-3-2-3B-20-epoch | llama | ["LlamaForCausalLM"] | 128256 | float16 | 4.52.4 | 3,072 | 8,192 | 28 | 24 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | 128009 | true | 128 | 3 | false | null | null | true | false | false | false | false | false | false | false | true | true | 3.170894 | medium | very_long |
1,611 | Abel-05/ppo-Huggy | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,612 | AbelKidane/abelqwen4 | qwen | ["QWenLMHeadModel"] | 151936 | bfloat16 | 4.38.1 | 4,096 | 22,016 | 32 | 32 | null | null | null | True | 32,768 | null | null | 0.02 | null | null | false | 128 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 6.442451 | medium | very_long |
1,613 | AbelKidane/qwen-awq | qwen | ["QWenLMHeadModel"] | 151936 | bfloat16 | 4.38.1 | 4,096 | 22,016 | 32 | 32 | null | null | null | True | 32,768 | null | null | 0.02 | null | null | false | 128 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 6.442451 | medium | very_long |
1,614 | Abgegrieft/autotrain-qiyqv-nm11e | bert | ["BertForSequenceClassification"] | 30000 | float32 | 4.48.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,615 | Abgegrieft/autotrain-xd3au-9vekc | bert | ["BertForSequenceClassification"] | 30000 | float32 | 4.48.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,616 | Abgegrieft/test-toxicity-modell | bert | ["BertForSequenceClassification"] | 30000 | float32 | 4.48.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,617 | Abhay06102003/My-AwesomeFinance_Model | gemma | ["GemmaForCausalLM"] | 256000 | float16 | 4.41.2 | 2,048 | 16,384 | 18 | 8 | 1 | gelu | 0 | True | 8,192 | 10,000 | 0.000001 | 0.02 | 2 | 1 | null | 256 | 8 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.90597 | small | long |
1,618 | Abhay06102003/My-Awesome_Model | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.41.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | 0 | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
1,619 | AbhayaHanuma/pegasus-samsum-model | pegasus | ["PegasusForConditionalGeneration"] | 96103 | float32 | 4.37.2 | null | null | 16 | null | null | null | 0.1 | True | 1,024 | null | null | null | 0 | 1 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
1,620 | AbhayaHanuma/unsloth_finetune | qwen2_vl | ["Qwen2VLModel"] | 151936 | float16 | 4.51.1 | 1,536 | 8,960 | 28 | 12 | 2 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | null | 151645 | true | 128 | 6 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.792723 | small | very_long |
1,621 | Abhaykoul/Friday-Latest | stablelm | ["StableLmForCausalLM"] | 50280 | float16 | 4.43.3 | 2,560 | 6,912 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | null | 0.02 | 50278 | 50279 | false | 80 | 1 | false | null | null | false | false | false | false | false | false | false | false | false | true | 2.516582 | medium | medium |
1,622 | Abhaykoul/Qwen1.5-0.5B-vortex | qwen2 | ["Qwen2ForCausalLM"] | 151936 | bfloat16 | 4.37.2 | 1,024 | 2,816 | 24 | 16 | 16 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 151643 | 151643 | true | 64 | 1 | false | null | null | false | false | false | false | false | false | false | false | false | true | 0.30199 | small | very_long |
1,623 | Abhaykoul/Wise-Qwen | qwen2 | ["Qwen2ForCausalLM"] | 151646 | float16 | 4.41.2 | 1,536 | 8,960 | 28 | 12 | 2 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 151644 | 151645 | true | 128 | 6 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.792723 | small | very_long |
1,624 | Abhi-cyber/llama-3-8b-Instruct-bnb-4bit-aiaustin-demo | llama | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | true | false | false | false | false | false | false | false | false | false | null | null | null |
1,625 | Abhi2254/mentalhealth_model | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
1,626 | AbhiKa01/modernbert-llm-router-9k | modernbert | ["ModernBertForSequenceClassification"] | 50368 | float32 | 4.48.0.dev0 | 768 | 1,152 | 22 | 12 | null | null | 0 | null | 8,192 | null | null | 0.02 | 50281 | 50282 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | true | 0.155714 | small | long |
1,627 | AbhiKand/LLaVA-Med-Plus-Plus-PathVQA | llava_llama | ["LlavaLlamaForCausalLM"] | 128257 | bfloat16 | 4.37.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
1,628 | AbhiPemmaraju/finetuned_LAMA2 | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
1,629 | AbhiPemmaraju/gpt_model | gpt2 | ["GPT2LMHeadModel"] | 50259 | float32 | 4.37.1 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
1,630 | Abhibeats95/flan-t5-small-5000_questions_gt_3_5 | t5 | ["T5ForSequenceClassification"] | 32128 | float32 | 4.35.2 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,631 | Abhibeats95/flan-t5-small-5000_questions_gt_3_5epochs | t5 | ["T5ForSequenceClassification"] | 32128 | float32 | 4.35.2 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,632 | Abhikhade/stella_en_400M_v5_aquabotica | new | ["NewModel"] | 30528 | float32 | 4.46.2 | 1,024 | 4,096 | 24 | 16 | null | gelu | null | null | 8,192 | 160,000 | null | 0.02 | null | null | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | true | 0.30199 | small | long |
1,633 | Abhilashvj/CIRCL_website_classifier_test | resnet | ["ResNetForImageClassification"] | null | float32 | 4.29.2 | null | null | null | null | null | relu | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,634 | Abhilashvj/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,635 | Abhilashvj/stable-diffusion-inpainting-copy | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,636 | Abhimanyu9539/fine_tuned_t5_small_model | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.38.2 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,637 | Abhimanyu9539/test-ner | bert | ["BertForTokenClassification"] | 30522 | float32 | 4.49.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,638 | Abhimanyunain/Fine-tuned_Token_Classification_for_Distil-Bert-uncased | distilbert | ["DistilBertForTokenClassification"] | 30522 | float32 | 4.52.4 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
1,639 | Abhinandha/t5-small-finetuned-xsum | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.40.1 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,640 | Abhinav2499/gpt2-token-class | gpt2 | ["GPT2ForTokenClassification"] | 50257 | float32 | 4.29.2 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
1,641 | Abhinav28/large-v3-hi-test-2-integrated | whisper | ["WhisperForConditionalGeneration"] | 51866 | float16 | 4.36.2 | null | null | 32 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
1,642 | Abhinav7/mistral-7b-andrew-ctcv2-merged | mistral | ["MistralForCausalLM"] | 32000 | float16 | 4.38.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
1,643 | AbhinavD/Llama-2-7b-chat-finetune-A10-summary | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
1,644 | Abhinavanand21/gita-text-generation-gpt2 | gpt2 | ["GPT2LMHeadModel"] | 50257 | float32 | 4.44.2 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
1,645 | Abhinay0926/ppo-Huggy | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,646 | Abhinay45/LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,647 | Abhinay45/ML-Agents-Pyramids | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,648 | Abhinay45/ML-Agents-SnowballTarget | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,649 | Abhinay45/ML-Agents-SoccerTwos | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,650 | Abhinay45/PandaReachDense | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | true | null | null | null |
1,651 | Abhinay45/doom_health_gathering_supreme | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,652 | Abhinay45/ppo-huggy | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,653 | Abhinit/HW2-reward | gpt2 | ["GPT2ForSequenceClassification"] | 50257 | float32 | 4.51.3 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
1,654 | Abhinit/mistral-7b-finetuned-wiki | mistral | ["MistralForCausalLM"] | 32000 | float32 | 4.51.3 | 768 | 3,072 | 4 | 16 | 8 | silu | 0 | True | 512 | 10,000 | 0.000001 | 0.02 | 1 | 2 | false | 48 | 2 | false | null | null | false | false | false | false | false | false | true | false | true | true | 0.028312 | small | short |
1,655 | AbhirajAmbasta/simple_food_model | vit | ["ViTForImageClassification"] | null | float32 | 4.52.3 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | null | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | null |
1,656 | AbhirajAmbasta/simple_qa_model | distilbert | ["DistilBertForQuestionAnswering"] | 30522 | float32 | 4.52.3 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
1,657 | Abhiram4/t5-large-finetuned-lam-to-eng | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.44.0 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | null | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,658 | Abhishek107/AppMetric | llama | ["LlamaForCausalLM"] | 32000 | float32 | 4.38.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | 0 | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
1,659 | Abhishek107/Azure_metric_log | llama | ["LlamaForCausalLM"] | 32000 | float32 | 4.38.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | 0 | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
1,660 | Abhishek107/Ofd_v1 | llama | ["LlamaForCausalLM"] | 128256 | float16 | 4.42.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128009 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
1,661 | Abhishek291004/LunarLanderV2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,662 | Abhishek9998/llama2-resume-summary | llama | ["LlamaForCausalLM"] | 32000 | bfloat16 | 4.33.0.dev0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
1,663 | AbhishekG13/gemma-bnf-qna | gemma | ["GemmaForCausalLM"] | 256000 | float16 | 4.39.3 | 2,048 | 16,384 | 18 | 8 | 1 | gelu | 0 | True | 8,192 | 10,000 | 0.000001 | 0.02 | 2 | 1 | null | 256 | 8 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.90597 | small | long |
1,664 | Abhishekcr448/Tiny-Hinglish-Chat-21M | gpt2 | ["GPT2LMHeadModel"] | 50257 | float32 | 4.46.1 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
1,665 | Abhishekgowda/my-awesome-model | llama | ["LlamaForCausalLM"] | 128258 | float32 | 4.40.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128256 | 128257 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
1,666 | AbhishekkV19/bert-base-cased-5k-vul-hyp-exp | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.35.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,667 | AbhishekkV19/bert-base-cased-5k-vul-hyp-exp-10ep | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.35.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,668 | AbhishekkV19/bert-base-cased-5kvul-10aug-3nsfw-exp-10ep | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.35.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,669 | AbhishekkV19/bert-base-cased-ft5-3ep-s42 | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,670 | AbhishekkV19/bert-base-cased-ft5-6ep-s42 | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,671 | AbhishekkV19/bert-base-cased-ft6-3ep-s42-2 | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,672 | AbhishekkV19/bert-base-cased-ft6-3ep-s42-exp3 | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.36.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,673 | AbhishekkV19/bert-large-cased-5kvul-10aug-3nsfw-exp-10ep | bert | ["BertForSequenceClassification"] | 28996 | float32 | 4.35.2 | 1,024 | 4,096 | 24 | 16 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.30199 | small | short |
1,674 | Abhishtgarg/whisper-small-en | whisper | ["WhisperForConditionalGeneration"] | 51865 | float32 | 4.46.3 | null | null | 12 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
1,675 | Abhrant/LoRA_openhathi_samvaad_base_ssf | llama | ["LlamaForCausalLM"] | 48064 | float16 | 4.35.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
1,676 | AbidHasan95/movieHunt3-ner | distilbert | ["DistilBertForTokenClassification"] | 30522 | float32 | 4.21.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
1,677 | AbidHasan95/movieHunt4-ner | distilbert | ["DistilBertForTokenClassification"] | 30522 | float32 | 4.21.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
1,678 | AbidHasan95/smsner_model | bert | [] | 30522 | null | null | 512 | 2,048 | 2 | 8 | null | gelu | null | null | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.006291 | small | short |
1,679 | Abida4ka/fastapi-deploying-model | roberta | ["RobertaForSequenceClassification"] | 50265 | float32 | 4.49.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,680 | Abiggj99/stock-summary-model | bart | ["BartForConditionalGeneration"] | 50265 | float32 | 4.44.1 | null | null | 6 | null | null | null | 0.1 | True | 1,024 | null | null | null | 0 | 2 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
1,681 | Abin7/mergekit-slerp-eutevwk | mistral | ["MistralForCausalLM"] | 32002 | bfloat16 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | False | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 32000 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
1,682 | Abirami1213/Llama3.2 | llama | ["LlamaForCausalLM"] | 128256 | float16 | 4.44.2 | 3,072 | 8,192 | 28 | 24 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | [128001, 128008, 128009] | true | 128 | 3 | false | null | null | true | false | false | false | false | false | false | false | true | true | 3.170894 | medium | very_long |
1,683 | Abo3Adel/deepseekmerge-7b-ties | llama | ["LlamaForCausalLM"] | 102400 | float16 | 4.38.1 | 4,096 | 11,008 | 30 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.000001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.039798 | medium | medium |
1,684 | Abobus227/bert-finetuned-ner41 | bert | ["BertForTokenClassification"] | 28996 | float32 | 4.40.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,685 | AbrahamSanders/opt-2.7b-realtime-chat | opt | ["OPTForCausalLM"] | 50265 | float32 | 4.24.0 | 2,560 | null | 32 | 32 | null | null | 0 | True | 2,048 | null | null | null | 2 | 2 | null | 80 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 2.516582 | medium | medium |
1,686 | AbrorBalxiyev/text-classification | xlm-roberta | ["XLMRobertaForSequenceClassification"] | 250002 | float32 | 4.47.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
1,687 | AbrorBalxiyev/whisper-small-dv | whisper | ["WhisperForConditionalGeneration"] | 51865 | float32 | 4.40.1 | null | null | 12 | null | null | null | 0 | False | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
1,688 | Abrumu/output | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,689 | Absie/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,690 | AbstractPerspective/Phi-2_GDPR_Mix_DARE | phi | ["PhiForCausalLM"] | 51200 | bfloat16 | 4.37.0 | 2,560 | 10,240 | 32 | 32 | 32 | gelu_new | 0 | True | 2,048 | 10,000 | null | 0.02 | 50256 | 50256 | false | 80 | 1 | false | null | null | false | false | false | false | false | false | false | false | false | true | 2.516582 | medium | medium |
1,691 | AbstractPerspective/Phi-2_MoE_test | phi-msft | ["PhiForCausalLM"] | 51200 | float16 | 4.37.0 | null | null | null | null | null | null | null | null | null | null | null | 0.02 | null | null | false | null | null | false | 2 | 1 | false | false | false | false | false | false | false | true | false | false | null | null | null |
1,692 | AbstractPerspective/phi-2_base | phi-msft | ["PhiForCausalLM"] | 51200 | float16 | 4.35.2 | null | null | null | null | null | null | null | null | null | null | null | 0.02 | null | null | false | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,693 | AbstractPhil/omega-vit-l-reformed-fp32 | clip | ["CLIPModel"] | null | float32 | 4.44.2 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
1,694 | AbstractPhil/t5xxl-unchained | t5 | [] | 69328 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
1,695 | AbstractQbit/electra_large_imdb_regression_htsplice | electra | ["ElectraForSequenceClassification"] | 30522 | float32 | 4.31.0 | 1,024 | 4,096 | 24 | 16 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.30199 | small | short |
1,696 | Abstraction-of-Thought/Llama-3-8B-AoT | llama | ["LLaMAForCausalLM"] | 128384 | float16 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 4,096 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | medium |
1,697 | Abstractly/pegasus-finetuned-abstractive-summarization | pegasus | ["PegasusForConditionalGeneration"] | 96103 | float32 | 4.46.3 | null | null | 16 | null | null | null | 0.1 | True | 1,024 | null | null | null | 0 | 1 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
1,698 | Absurdist/my-finetuned-smolLM | llama | ["LlamaForCausalLM"] | 49152 | bfloat16 | 4.47.1 | 576 | 1,536 | 30 | 9 | 3 | silu | 0 | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | true | 64 | 3 | false | null | null | true | false | false | false | false | false | false | false | true | true | 0.119439 | small | medium |
1,699 | Abubakkar-01/urdu-abusive-detector | bert | ["BertForSequenceClassification"] | 197285 | float32 | 4.17.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.