index int64 0 125k | modelId stringlengths 6 115 | config_model_type stringlengths 2 46 ⌀ | config_architectures stringlengths 2 91 ⌀ | config_vocab_size stringlengths 1 8 ⌀ | config_torch_dtype stringclasses 7 values | config_transformers_version stringclasses 228 values | config_hidden_size float64 0 18.4k ⌀ | config_intermediate_size float64 0 25.2M ⌀ | config_num_hidden_layers float64 -1 260 ⌀ | config_num_attention_heads stringclasses 47 values | config_num_key_value_heads float64 0 4.1k ⌀ | config_hidden_act stringclasses 19 values | config_attention_dropout float64 0 0.5 ⌀ | config_use_cache stringclasses 3 values | config_max_position_embeddings float64 -1 10.5M ⌀ | config_rope_theta float64 256 100B ⌀ | config_rms_norm_eps float64 0 0 ⌀ | config_initializer_range float64 0 2 ⌀ | config_bos_token_id stringclasses 158 values | config_eos_token_id stringclasses 339 values | config_tie_word_embeddings bool 2 classes | config_head_dimension float64 0.5 3.07k ⌀ | config_gqa_ratio float64 0.5 64 ⌀ | config_moe_enabled bool 1 class | config_n_routed_experts float64 1 384 ⌀ | config_num_experts_per_tok float64 1 64 ⌀ | is_llama_family bool 2 classes | is_bert_family bool 2 classes | is_gpt_family bool 2 classes | is_t5_family bool 2 classes | is_whisper_family bool 2 classes | is_deepseek_family bool 2 classes | is_mistral_family bool 2 classes | uses_moe bool 2 classes | uses_gqa bool 2 classes | uses_rope bool 2 classes | config_approx_params_billions float64 -0.2 606 ⌀ | size_category stringclasses 4 values | context_category stringclasses 4 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3,800 | AntoDono/DialoGPT-Harry | gpt2 | ["GPT2LMHeadModel"] | 50257 | float32 | 4.18.0 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
3,801 | Antoine101/detr-resnet-50-dc5-finetuned-license-plates | detr | ["DetrForObjectDetection"] | null | float32 | 4.53.0 | null | null | 6 | null | null | null | 0 | null | 1,024 | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
3,802 | AntoineD/MiniLM_classification_tools_fr | bert | ["BertForSequenceClassification"] | 250037 | float32 | 4.34.0 | 384 | 1,536 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 32 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.021234 | small | short |
3,803 | AntoineD/MiniLM_uncased_classification_tools_classifier-only_fr | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.34.0 | 384 | 1,536 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 32 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.021234 | small | short |
3,804 | AntoineD/MiniLM_uncased_classification_tools_fr | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.34.0 | 384 | 1,536 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 32 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.021234 | small | short |
3,805 | AntoineD/TinyLlama-1.1B_vocal_multimodal_chatbot_fr_V2 | llama | ["LlamaForCausalLM"] | 32001 | float32 | 4.34.0 | 2,048 | 5,632 | 22 | 32 | 4 | silu | null | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
3,806 | AntoineD/camembert_ccnet_classification_tools_classifier-only_fr | camembert | ["CamembertForSequenceClassification"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,807 | AntoineD/camembert_ccnet_classification_tools_classifier-only_fr-p0.2 | camembert | ["CamembertForSequenceClassification"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,808 | AntoineD/camembert_ccnet_classification_tools_classifier-only_fr_lr1e-3_V3 | camembert | ["CamembertForSequenceClassification"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,809 | AntoineD/camembert_ccnet_classification_tools_fr | camembert | ["CamembertForSequenceClassification"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,810 | AntoineD/camembert_question_answering_tools_fr | camembert | ["CamembertForQuestionAnswering"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 5 | 6 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,811 | AntoineD/camembert_squadFR_question_answering_tools_fr | camembert | ["CamembertForQuestionAnswering"] | 32005 | float32 | 4.34.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 5 | 6 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,812 | AntoineGourru/Drome_2_2e5 | mistral | ["MistralForCausalLM"] | 32000 | float32 | 4.35.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | False | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
3,813 | AntoineGourru/Enccr_balanced | roberta | ["RobertaForSequenceClassification"] | 32768 | float32 | 4.42.4 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,814 | AntoineGourru/Mistral_drome_full | mistral | ["MistralForCausalLM"] | 32000 | bfloat16 | 4.38.0.dev0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
3,815 | AntoineGourru/f_x | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.51.3 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,816 | AntoineGourru/g_x_new | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.51.3 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,817 | AntoineSchutz/BestMCQA | llama | ["LlamaForCausalLM"] | 32000 | float32 | 4.40.2 | 2,048 | 5,632 | 22 | 32 | 4 | silu | 0 | True | 2,048 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 64 | 8 | false | null | null | true | false | false | false | false | false | false | false | true | true | 1.107296 | medium | medium |
3,818 | Antoinegg1/Llama-2-13b-hf_0.25 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,819 | Antoinegg1/Llama-2-13b-hf_0.25to0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,820 | Antoinegg1/Llama-2-13b-hf_0.25to0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,821 | Antoinegg1/Llama-2-13b-hf_0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,822 | Antoinegg1/Llama-2-13b-hf_0.5to0.25 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,823 | Antoinegg1/Llama-2-13b-hf_0.5to0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,824 | Antoinegg1/Llama-2-13b-hf_0.75to0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,825 | Antoinegg1/Meta-Llama-3-8B_0.25 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,826 | Antoinegg1/Meta-Llama-3-8B_0.25to0.75 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,827 | Antoinegg1/Meta-Llama-3-8B_0.5 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,828 | Antoinegg1/Meta-Llama-3-8B_0.5to0.25 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,829 | Antoinegg1/Meta-Llama-3-8B_0.75 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,830 | Antoinegg1/Meta-Llama-3-8B_0.75to0.25 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,831 | Antoinegg1/PM-14B-10k | qwen2 | ["Qwen2ForCausalLM"] | 151646 | float16 | 4.40.0 | 5,120 | 13,696 | 40 | 40 | 40 | silu | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 128245 | 151643 | false | 128 | 1 | false | null | null | false | false | false | false | false | false | false | false | false | true | 12.582912 | large | very_long |
3,832 | Antoinegg1/llama-2-13b_truthful_0.25 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,833 | Antoinegg1/llama-2-13b_truthful_0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,834 | Antoinegg1/llama-2-13b_truthful_0.5to0.25_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,835 | Antoinegg1/llama-2-13b_truthful_0.5to0.75_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 5,120 | 13,824 | 40 | 40 | 40 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 12.582912 | large | medium |
3,836 | Antoinegg1/llama-2-7b-0.25 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,837 | Antoinegg1/llama-2-7b-0.25to0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,838 | Antoinegg1/llama-2-7b-0.25to0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,839 | Antoinegg1/llama-2-7b-0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,840 | Antoinegg1/llama-2-7b-0.5to0.25 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,841 | Antoinegg1/llama-2-7b-0.5to0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,842 | Antoinegg1/llama-2-7b-0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,843 | Antoinegg1/llama-2-7b-0.75to0.5 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,844 | Antoinegg1/llama-2-7b_safe_0.25to0.5_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,845 | Antoinegg1/llama-2-7b_safe_0.25to0.75_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,846 | Antoinegg1/llama-2-7b_safe_0.5to0.25_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,847 | Antoinegg1/llama-2-7b_safe_0.75to0.25_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,848 | Antoinegg1/llama-2-7b_truthful_0.25to0.5_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,849 | Antoinegg1/llama-2-7b_truthful_0.5to0.75_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,850 | Antoinegg1/llama-2-7b_truthful_0.75 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,851 | Antoinegg1/llama-2-7b_truthful_0.75to0.5_1 | llama | ["LlamaForCausalLM"] | 32001 | float16 | 4.40.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | True | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,852 | Antoinegg1/llama-3-8b_safe_0.5 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,853 | Antoinegg1/llama-3-8b_safe_0.75 | llama | ["LlamaForCausalLM"] | 128258 | float16 | 4.40.0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 8,192 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | long |
3,854 | AntonAndreenko/gemma-3-1B-it-ot_expert | gemma3_text | ["Gemma3ForCausalLM"] | 262144 | bfloat16 | 4.50.0.dev0 | 1,152 | 6,912 | 26 | 4 | 1 | null | 0 | True | 32,768 | 1,000,000 | 0.000001 | 0.02 | 2 | [1, 106] | null | 288 | 4 | false | null | null | false | false | false | false | false | false | false | false | true | true | 0.414056 | small | very_long |
3,855 | AntonKorznikov/facebook-opt-350m-SFT-korz14 | opt | ["OPTForCausalLM"] | 50272 | float16 | 4.31.0 | 1,024 | null | 24 | 16 | null | null | 0 | True | 2,048 | null | null | null | 2 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.30199 | small | medium |
3,856 | AntonLu/Pyramids | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,857 | AntonShchur/llama3.1-8B-math-3000-v3-gguf | llama | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | true | false | false | false | false | false | false | false | false | false | null | null | null |
3,858 | AntonV/mamba2-2.7b-hf | mamba2 | [] | 50288 | null | 4.46.0.dev0 | 2,560 | null | 64 | null | null | silu | null | True | null | null | null | 0.1 | 0 | 0 | true | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 5.033165 | medium | null |
3,859 | AntonVoronko/poca-SoccerTwos | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,860 | AntonVoronko/ppo-Huggy | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,861 | AntonXue/codebert-base | roberta | ["RobertaForCausalLM"] | 50265 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,862 | Antonello003/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,863 | Antonio06188/MCQA-SciQ | t5 | ["T5ForConditionalGeneration"] | 32128 | float32 | 4.41.2 | null | null | null | null | null | null | null | True | null | null | null | null | null | 1 | false | null | null | false | null | null | false | false | false | true | false | false | false | false | false | false | null | null | null |
3,864 | Antonio49/Personal | bert | ["BertForQuestionAnswering"] | 31002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,865 | Antonio88/TaliML-PHI3-128K-ITA-V.1.0.FINAL | phi3 | ["Phi3ForCausalLM"] | 32064 | bfloat16 | 4.41.0.dev0 | 3,072 | 8,192 | 32 | 32 | 32 | silu | 0 | False | 131,072 | 10,000 | 0.00001 | 0.02 | 1 | 32000 | false | 96 | 1 | false | null | null | false | false | false | false | false | false | false | false | false | true | 3.623879 | medium | very_long |
3,866 | AntonioGSC/bert-hate-speech-test | bert | ["BertForSequenceClassification"] | 30522 | float32 | 4.50.3 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,867 | AntonioTH/Layout-finetuned-fr-model-100instancesV1 | layoutlmv2 | ["LayoutLMv2ForQuestionAnswering"] | 250002 | float32 | 4.38.2 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,868 | AntonioTH/Layout-finetuned-fr-model-300instances100-4epochs-5e5lr-V2 | layoutlmv2 | ["LayoutLMv2ForQuestionAnswering"] | 250002 | float32 | 4.48.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,869 | AntonioTH/Layout-finetuned-fr-model-50instances20-100epochs-5e-05lr-V2 | layoutlmv2 | ["LayoutLMv2ForQuestionAnswering"] | 250002 | float32 | 4.48.0 | 768 | 3,072 | 12 | 12 | null | gelu | null | null | 514 | null | null | 0.02 | 0 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,870 | AntonioZhang/Candy_detector | detr | ["DetrForObjectDetection"] | null | float32 | 4.35.0 | null | null | 6 | null | null | null | 0 | null | 1,024 | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
3,871 | Antont2024/DeepSeek-R1-Fine-tuned-Sales-KPI | llama | ["LlamaForCausalLM"] | 128256 | float16 | 4.47.1 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
3,872 | Anu99/gpt2-poem-generator-new | gpt_neo | ["GPTNeoForCausalLM"] | 50259 | float32 | 4.42.4 | 768 | null | null | null | null | null | 0 | True | 2,048 | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | medium |
3,873 | Anu99/poem-generator | gpt_neo | ["GPTNeoForCausalLM"] | 50259 | float32 | 4.41.2 | 768 | null | null | null | null | null | 0 | True | 2,048 | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | medium |
3,874 | AnuAna14/BRAG-Llama-3.1-8b-v0.1-Q4_0-GGUF | llama | ["LlamaForCausalLM"] | 128256 | bfloat16 | 4.43.1 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | False | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | 128009 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
3,875 | Anubhav-tripathi/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,876 | Anudev08/model_3 | gpt2 | ["GPT2ForSequenceClassification"] | 50257 | float32 | 4.15.0 | null | null | null | null | null | null | null | True | null | null | null | 0.02 | 50256 | 50256 | null | null | null | false | null | null | false | false | true | false | false | false | false | false | false | false | null | null | null |
3,877 | Anujgr8/Whisper-Anuj-Medum-Marathi | whisper | ["WhisperForConditionalGeneration"] | 51865 | float32 | 4.41.2 | null | null | 24 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
3,878 | Anujgr8/Whisper-Anuj-Medum-Medium-lalo | whisper | ["WhisperForConditionalGeneration"] | 51865 | float32 | 4.42.3 | null | null | 24 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
3,879 | Anujgr8/Whisper-Anuj-small-Tamil | whisper | ["WhisperForConditionalGeneration"] | 51865 | float32 | 4.41.2 | null | null | 12 | null | null | null | 0 | True | null | null | null | null | 50257 | 50257 | null | null | null | false | null | null | false | false | false | false | true | false | false | false | false | false | null | null | null |
3,880 | Anujgr8/w2v-bert-malawi | wav2vec2-bert | ["Wav2Vec2BertForCTC"] | 73 | float32 | 4.48.3 | 1,024 | 4,096 | 24 | 16 | null | swish | 0 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.30199 | small | null |
3,881 | Anujgr8/wav2vec2-Odia-large-xlsr53 | wav2vec2 | ["Wav2Vec2ForCTC"] | 71 | float32 | 4.41.2 | 1,024 | 4,096 | 24 | 16 | null | gelu | 0 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.30199 | small | null |
3,882 | Anujgr8/wav2vec2-base-Malawi-small | wav2vec2 | ["Wav2Vec2ForCTC"] | 75 | float32 | 4.44.2 | 768 | 3,072 | 12 | 12 | null | gelu | 0 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | null |
3,883 | Anujgr8/wav2vec2-base-Tamil-large | wav2vec2 | ["Wav2Vec2ForCTC"] | 58 | float32 | 4.41.2 | 768 | 3,072 | 12 | 12 | null | gelu | 0 | null | null | null | null | 0.02 | 1 | 2 | null | 64 | null | false | null | null | false | false | false | false | false | false | false | false | false | false | 0.084935 | small | null |
3,884 | Anukriti13/OpenHermes-2.5-Mistral-7B-DSStackexchange-dpoV1 | mistral | ["MistralForCausalLM"] | 32002 | float16 | 4.39.3 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | False | 32,768 | 10,000 | 0.00001 | 0.02 | 1 | 32000 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
3,885 | Anupam199949/LLama2finetunedby31 | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,886 | AnuradhaPoddar/seq_classification | bart | ["BartForSequenceClassification"] | 50265 | float32 | 4.38.0 | null | null | 12 | null | null | null | 0 | True | 1,024 | null | null | null | 0 | 2 | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | short |
3,887 | Anurag-Tiwari/hindi_updated-llama-model | llama | ["LlamaForCausalLM"] | 172432 | float16 | 4.44.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | [128001, 128008, 128009] | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
3,888 | Anurag-Tiwari/hindi_updated-llama-tokenizer | llama | ["LlamaForCausalLM"] | 128256 | bfloat16 | 4.43.0.dev0 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 131,072 | 500,000 | 0.00001 | 0.02 | 128000 | 128001 | false | 128 | 4 | false | null | null | true | false | false | false | false | false | false | false | true | true | 6.442451 | medium | very_long |
3,889 | Anurag0961/try-out-model-amc1 | distilbert | ["DistilBertForSequenceClassification"] | 30522 | float32 | 4.26.0 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
3,890 | AnuragVohra/testTinyLlama | llama | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | true | false | false | false | false | false | false | false | false | false | null | null | null |
3,891 | Anuragdhirubhai/my_awesome_qa_model | distilbert | ["DistilBertForQuestionAnswering"] | 30522 | null | 4.29.2 | null | null | null | null | null | null | 0.1 | null | 512 | null | null | 0.02 | null | null | null | null | null | false | null | null | false | true | false | false | false | false | false | false | false | false | null | null | short |
3,892 | Anusha64/mistral-7b-v0.2-mar27-v0 | mistral | ["MistralForCausalLM"] | 32000 | float16 | 4.38.2 | 4,096 | 14,336 | 32 | 32 | 8 | silu | 0 | True | 32,768 | 1,000,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 4 | false | null | null | false | false | false | false | false | false | true | false | true | true | 6.442451 | medium | very_long |
3,893 | AnushaPalle/my_awesome_open_llama_3b_clm-model | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 3,200 | 8,640 | 26 | 32 | 32 | silu | null | True | 2,048 | null | 0.000001 | 0.02 | 1 | 2 | false | 100 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 3.19488 | medium | medium |
3,894 | Anushka-103/llama-2-7b-ilm | llama | ["LlamaForCausalLM"] | 32000 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | null | True | 4,096 | null | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,895 | Anushka-103/llama-2-7b-ilm-kan | llama | ["LlamaForCausalLM"] | 49600 | float16 | 4.31.0 | 4,096 | 11,008 | 32 | 32 | 32 | silu | 0 | False | 4,096 | 10,000 | 0.00001 | 0.02 | 1 | 2 | false | 128 | 1 | false | null | null | true | false | false | false | false | false | false | false | false | true | 6.442451 | medium | medium |
3,896 | AnveshSK/ppo-LunarLander-v2 | null | [] | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | false | null | null | false | false | false | false | false | false | false | false | false | false | null | null | null |
3,897 | Anwaarma/Improved-Arabert-twitter-sentiment-No-dropout | bert | ["BertForSequenceClassification"] | 64000 | float32 | 4.34.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,898 | Anwaarma/Improved-Arabert-twitter-sentiment-No-dropout-Twitter | bert | ["BertForSequenceClassification"] | 64000 | float32 | 4.34.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
3,899 | Anwaarma/Improved-Arabert-twitter-sentiment-Twitter | bert | ["BertForSequenceClassification"] | 64000 | float32 | 4.34.1 | 768 | 3,072 | 12 | 12 | null | gelu | null | True | 512 | null | null | 0.02 | null | null | null | 64 | null | false | null | null | false | true | false | false | false | false | false | false | false | false | 0.084935 | small | short |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.