model stringlengths 13 48 | base_model stringclasses 3 values | revision stringlengths 4 40 | private bool 1 class | precision stringclasses 4 values | weight_type stringclasses 2 values | status stringclasses 6 values | submitted_time timestamp[s]date 2024-09-20 21:47:19 2026-02-20 15:41:14 | model_type stringclasses 8 values | likes int64 0 1.31k | params float64 1 70 β | license stringclasses 10 values | architecture stringclasses 13 values | sender stringclasses 16 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
01-ai/Yi-1.5-9B-Chat | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 8.83 | custom | mariagrandury | ||
01-ai/Yi-1.5-9B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T10:47:19 | π’ : pretrained | 44 | 7.25 | apache-2.0 | LlamaForCausalLM | mariagrandury | |
AIDC-AI/Marco-LLM-ES | main | false | bfloat16 | Original | FAILED | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 7.62 | custom | ChenyangLyu | ||
Almawave/Velvet-14B | main | false | bfloat16 | Original | RUNNING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 14.1 | custom | mariagrandury | ||
Almawave/Velvet-2B | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 14.1 | custom | mariagrandury | ||
BSC-LT/ALIA-40b-instruct-2601 | 84b2d90faf67fcc7356a32d82c2482cff744e808 | false | bfloat16 | Original | FINISHED | 2026-02-18T09:08:59 | π¦ : RL-tuned | 3 | 40.434 | apache-2.0 | LlamaForCausalLM | juliafalcao | |
BSC-LT/salamandra-2b-instruct | main | false | bfloat16 | Original | 2024-11-03T13:14:16 | instruction-tuned | 0 | 2.25 | custom | mariagrandury | |||
BSC-LT/salamandra-2b | 08316c1f2051a61dadb556a63f7a6796a3d1dbed | false | bfloat16 | Original | FINISHED | 2024-10-02T12:03:38 | π’ : pretrained | 5 | 2.253 | apache-2.0 | LlamaForCausalLM | mariagrandury | |
BSC-LT/salamandra-7b-instruct | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 7.77 | custom | mariagrandury | ||
BSC-LT/salamandra-7b | 968d2c40c21134ba201122737be98556a7da6727 | false | bfloat16 | Original | FINISHED | 2024-10-03T15:19:26 | π’ : pretrained | 8 | 7.768 | apache-2.0 | LlamaForCausalLM | mariagrandury | |
CohereForAI/aya-expanse-32b | main | false | float16 | Original | FAILED | 2026-02-20T14:08:03 | instruction-tuned | 0 | 32.3 | custom | mariagrandury | ||
CohereForAI/aya-expanse-8b | b9848575c8731981dfcf2e1f3bfbcb917a2e585d | false | float16 | Original | FINISHED | 2024-10-24T17:12:46 | π’ : pretrained | 44 | 8.028 | cc-by-nc-4.0 | CohereForCausalLM | mariagrandury | |
CohereLabs/c4ai-command-a-03-2025 | main | false | bfloat16 | Original | RUNNING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 3 | custom | mariagrandury | ||
CohereLabs/tiny-aya-base | main | false | nan | Original | FAILED | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 3 | custom | mariagrandury | ||
CohereLabs/tiny-aya-global | main | false | nan | Original | FAILED | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 3 | custom | mariagrandury | ||
CohereLabs/tiny-aya-water | main | false | nan | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 3 | custom | mariagrandury | ||
Danielbrdz/Barcenas-27b | b29599acbef4cde587ea49725d7d825063bad077 | false | float32 | Original | PENDING | 2024-10-02T17:48:43 | πΆ : fine-tuned | 0 | 27.227 | gemma | Gemma2ForCausalLM | Danielbrdz | |
HiTZ/Latxa-Llama-3.1-8B-Instruct | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
HiTZ/Latxa-Llama-3.1-8B | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | null | custom | mariagrandury | ||
HiTZ/latxa-13b-v1.2 | c727529f9e3f0d5fc2d5082ea3b229f5f96684b7 | false | bfloat16 | Original | PENDING | 2024-10-24T13:50:13 | π’ : pretrained | 1 | 13 | llama2 | LlamaForCausalLM | Iker | |
HiTZ/latxa-70b-v1.2 | 4a6a823da0f796248c3eebbea68430c147b25b62 | false | bfloat16 | Original | PENDING | 2024-10-24T13:50:24 | π’ : pretrained | 0 | 70 | llama2 | LlamaForCausalLM | Iker | |
HiTZ/latxa-7b-v1.2 | 5a798bd370d604bab91645c12428b2fbf35525d2 | false | bfloat16 | Original | FINISHED | 2024-10-24T13:50:31 | π’ : pretrained | 1 | 7 | llama2 | LlamaForCausalLM | Iker | |
HuggingFaceTB/SmolLM2-1.7B-Instruct | main | false | bfloat16 | Original | SUCCESS | 2025-07-21T20:10:27 | instruction-tuned | 0 | 1.71 | custom | mariagrandury | ||
HuggingFaceTB/SmolLM2-1.7B | main | false | bfloat16 | Original | SUCCESS | 2025-06-21T13:46:00 | π’ : pretrained | 0 | 1.71 | custom | mariagrandury | ||
HuggingFaceTB/SmolLM3-3B-Base | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | π’ : pretrained | 0 | 3.08 | custom | mariagrandury | ||
HuggingFaceTB/SmolLM3-3B | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 3.08 | custom | mariagrandury | ||
IIC/RigoChat-7b-v2 | cf2568400d142850d5f0295b3b2bbd798c2fd60e | false | bfloat16 | Original | FINISHED | 2025-01-16T20:01:41 | π¦ : RL-tuned | 8 | 7.616 | other | Qwen2ForCausalLM | albarji | |
Iker/Llama-3-Instruct-Neurona-8b-v2 | 92b369f60dae263d14f496e7215ee89c80473660 | false | bfloat16 | Original | PENDING | 2024-09-26T08:53:03 | β : instruction-tuned | 2 | 8.03 | llama3 | LlamaForCausalLM | Iker | |
LenguajeNaturalAI/leniachat-gemma-2b-v0 | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 2.51 | custom | mariagrandury | ||
LenguajeNaturalAI/leniachat-qwen2-1.5B-v0 | 031a2efebb3cc1150e46f42ba0bea9fa7b855436 | false | bfloat16 | Original | FINISHED | 2024-11-27T06:30:13 | β : instruction-tuned | 19 | 1.543 | apache-2.0 | Qwen2ForCausalLM | avacaondata | |
Nos-PT/Llama-Carvalho-GL | 009c2522b8f41a0b0f93645a1382404d969fcd79 | false | float16 | Original | PENDING | 2025-04-01T10:56:12 | π’ : pretrained | 0 | 8.03 | llama3.1 | LlamaForCausalLM | pablo-rf | |
Nos-PT/Llama-Carvalho-PT-GL | main | false | float16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 8.03 | custom | pablo-rf | ||
Qwen/Qwen2.5-1.5B-Instruct | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 1.54 | custom | mariagrandury | ||
Qwen/Qwen2.5-1.5B | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | pretrained | 0 | 1.54 | custom | mariagrandury | ||
Qwen/Qwen2.5-14B-Instruct-GPTQ-Int8 | main | false | float16 | Original | SUCCESS | 2025-07-21T20:10:27 | instruction-tuned | 0 | 4.99 | custom | mariagrandury | ||
Qwen/Qwen2.5-32B-Instruct-GPTQ-Int4 | main | false | float16 | Original | PENDING | 2025-06-21T18:13:32 | instruction-tuned | 0 | 5.74 | custom | mariagrandury | ||
Qwen/Qwen2.5-3B-Instruct | main | false | bfloat16 | Original | SUCCESS | 2025-06-21T13:46:00 | instruction-tuned | 0 | 3.09 | custom | mariagrandury | ||
Qwen/Qwen2.5-3B | main | false | bfloat16 | Original | SUCCESS | 2025-06-21T13:46:00 | π’ : pretrained | 0 | 3.09 | custom | mariagrandury | ||
Qwen/Qwen2.5-7B-Instruct | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 7.62 | custom | mariagrandury | ||
Qwen/Qwen2.5-7B | d149729398750b98c0af14eb82c78cfe92750796 | false | bfloat16 | Original | FINISHED | 2024-11-25T13:02:51 | π’ : pretrained | 71 | 7.616 | apache-2.0 | Qwen2ForCausalLM | ChenyangLyu | |
Qwen/Qwen3-1.7B-Base | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 2.03 | custom | mariagrandury | ||
Qwen/Qwen3-1.7B | main | false | bfloat16 | Original | SUCCESS | 2025-07-21T20:10:27 | instruction-tuned | 0 | 2.03 | custom | mariagrandury | ||
Qwen/Qwen3-14B-AWQ | main | false | float16 | Original | RUNNING | 2025-07-21T20:10:27 | instruction-tuned | 0 | 3.32 | custom | mariagrandury | ||
Qwen/Qwen3-14B-Base | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 14.8 | custom | mariagrandury | ||
Qwen/Qwen3-14B | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 14.8 | custom | mariagrandury | ||
Qwen/Qwen3-32B-Base | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 32.8 | custom | mariagrandury | ||
Qwen/Qwen3-32B | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 32.8 | custom | mariagrandury | ||
Qwen/Qwen3-4B-Base | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 4.02 | custom | mariagrandury | ||
Qwen/Qwen3-4B | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 4.02 | custom | mariagrandury | ||
Qwen/Qwen3-8B-Base | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 8.19 | custom | mariagrandury | ||
Qwen/Qwen3-8B | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 8.19 | custom | mariagrandury | ||
TheBloke/Llama-2-13B-chat-GPTQ | main | false | float16 | Original | PENDING | 2025-06-21T18:13:32 | instruction-tuned | 0 | 2.03 | custom | mariagrandury | ||
allenai/OLMo-2-1124-7B-Instruct | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 7.3 | custom | mariagrandury | ||
allenai/OLMo-2-1124-7B | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | π’ : pretrained | 0 | 7.3 | custom | mariagrandury | ||
bertin-project/Gromenauer-7B-Instruct | 4cee56ca72ee98ec67f4532010851c14d6b0d4e9 | false | float32 | Original | FINISHED | 2024-09-25T21:55:47 | β : instruction-tuned | 2 | 7.242 | apache-2.0 | MistralForCausalLM | alvp | |
bertin-project/Gromenauer-7B | aaff2b37b64b0cdf4ed5694ea5ee483b898a6c77 | false | float32 | Original | FINISHED | 2024-09-25T21:55:26 | π’ : pretrained | 2 | 7.242 | apache-2.0 | MistralForCausalLM | alvp | |
bertin-project/bertin-gpt-j-6B | main | false | float32 | Original | FINISHED | 2024-09-21T08:47:19 | π’ : pretrained | 17 | 6 | apache-2.0 | GPTJForCausalLM | mariagrandury | |
deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 1.78 | custom | mariagrandury | ||
deepseek-ai/DeepSeek-R1-Distill-Qwen-7B | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 7.62 | custom | mariagrandury | ||
google/gemma-2-2b-it | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 2.61 | custom | mariagrandury | ||
google/gemma2-2b | main | false | float32 | Original | FINISHED | 2024-09-20T21:47:19 | π’ : π’ : pretrained | 342 | 2.61 | gemma | Gemma2ForCausalLM | mariagrandury | |
google/gemma-2-9b-it | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 9.24 | custom | mariagrandury | ||
google/gemma-2-9b | main | false | float32 | Original | FINISHED | 2024-11-03T13:14:16 | pretrained | 0 | 9.24 | custom | mariagrandury | ||
google/gemma-3-12b-it | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 12.2 | custom | mariagrandury | ||
google/gemma-3-12b-pt | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 12.2 | custom | mariagrandury | ||
google/gemma-3-1b-it | main | false | bfloat16 | Original | SUCCESS | 2025-07-21T20:10:27 | instruction-tuned | 0 | 1 | custom | mariagrandury | ||
google/gemma-3-1b-pt | main | false | bfloat16 | Original | SUCCESS | 2026-01-19T16:50:33 | π’ : pretrained | 0 | 1 | custom | mariagrandury | ||
google/gemma-3-27b-it | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 27.4 | custom | mariagrandury | ||
google/gemma-3-27b-pt | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 27.4 | custom | mariagrandury | ||
google/gemma-3-4b-it | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | instruction-tuned | 0 | 4.3 | custom | mariagrandury | ||
google/gemma-3-4b-pt | main | false | bfloat16 | Original | SUCCESS | 2025-07-25T13:14:02 | π’ : pretrained | 0 | 4.3 | custom | mariagrandury | ||
gplsi/Aitana-6.3B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T05:19:20 | π’ : pretrained | 0 | 6.25 | apache-2.0 | BloomForCausalLM | mariagrandury | |
ibm-granite/granite-3.0-8b-base | 23357b69523bd98523496a5aba1f48bdea04a137 | false | float32 | Original | PENDING | 2024-12-03T11:36:41 | π’ : pretrained | 21 | 8.171 | apache-2.0 | GraniteForCausalLM | asier-gutierrez | |
ibm-granite/granite-3.0-8b-instruct | 8fe1e202a17f7763bd0af471253e00cc846d1c05 | false | float32 | Original | PENDING | 2024-12-03T11:36:17 | β : instruction-tuned | 180 | 8.171 | apache-2.0 | GraniteForCausalLM | asier-gutierrez | |
ibm-granite/granite-4.0-h-tiny-base | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | null | custom | mariagrandury | ||
ibm-granite/granite-4.0-h-tiny | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
internlm/internlm3-8b-instruct | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
marianbasti/Llama-2-13b-fp16-alpaca-spanish | 1f96ecd9c3f05e50c7d865f7718dc80b7c7369d2 | false | float32 | Original | FAILED | 2024-09-25T16:08:28 | πΆ : fine-tuned | 1 | 13 | llama2 | LlamaForCausalLM | marianbasti | |
meta-llama/Llama-3.2-1B-Instruct | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 1.24 | custom | mariagrandury | ||
meta-llama/Llama-3.2-1B | 221e3535e1ac4840bdf061a12b634139c84e144c | false | bfloat16 | Original | FINISHED | 2024-10-10T14:36:19 | π’ : pretrained | 532 | 1.24 | llama3.2 | LlamaForCausalLM | mariagrandury | |
meta-llama/Llama-3.2-3B-Instruct | main | false | bfloat16 | Original | SUCCESS | 2025-06-21T13:46:00 | instruction-tuned | 0 | 3.21 | custom | mariagrandury | ||
meta-llama/Llama-3.2-3B | main | false | bfloat16 | Original | SUCCESS | 2025-06-21T13:46:00 | π’ : pretrained | 0 | 3.21 | custom | mariagrandury | ||
meta-llama/Meta-Llama-3.1-8B-Instruct | main | false | bfloat16 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 8.03 | custom | mariagrandury | ||
meta-llama/Meta-Llama-3.1-8B | main | false | bfloat16 | Original | FINISHED | 2024-09-21T09:47:19 | π’ : pretrained | 877 | 8.03 | llama3.1 | LlamaForCausalLM | mariagrandury | |
microsoft/Phi-3.5-mini-instruct | af0dfb8029e8a74545d0736d30cb6b58d2f0f3f0 | false | float32 | Original | PENDING | 2024-12-03T11:35:21 | β : instruction-tuned | 664 | 3.821 | mit | Phi3ForCausalLM | asier-gutierrez | |
microsoft/Phi-4-mini-instruct | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | 14.7 | custom | mariagrandury | ||
microsoft/phi-1_5 | main | false | float16 | Original | FINISHED | 2024-09-21T12:47:19 | π’ : pretrained | 1,310 | 1.42 | mit | PhiForCausalLM | mariagrandury | |
microsoft/phi-4 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | 14.7 | custom | mariagrandury | ||
mistralai/Ministral-3-14B-Base-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | null | custom | mariagrandury | ||
mistralai/Ministral-3-14B-Instruct-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
mistralai/Ministral-3-3B-Base-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | null | custom | mariagrandury | ||
mistralai/Ministral-3-3B-Instruct-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
mistralai/Ministral-3-8B-Base-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | π’ : pretrained | 0 | null | custom | mariagrandury | ||
mistralai/Ministral-3-8B-Instruct-2512 | main | false | bfloat16 | Original | PENDING | 2026-02-20T14:08:03 | instruction-tuned | 0 | null | custom | mariagrandury | ||
mistralai/Mistral-7B-Instruct-v0.3 | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 7.25 | custom | mariagrandury | ||
mistralai/Mistral-7B-v0.3 | main | false | bfloat16 | Original | FINISHED | 2024-09-21T10:47:19 | π’ : pretrained | 363 | 7.25 | apache-2.0 | MistralForCausalLM | mariagrandury | |
occiglot/occiglot-7b-es-en-instruct | main | false | float32 | Original | FINISHED | 2024-11-03T13:14:16 | instruction-tuned | 0 | 7.24 | custom | mariagrandury | ||
occiglot/occiglot-7b-es-en | main | false | float32 | Original | FINISHED | 2024-09-21T03:47:19 | π’ : pretrained | 4 | 7.24 | apache-2.0 | MistralForCausalLM | mariagrandury | |
occiglot/occiglot-7b-eu5-instruct | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | instruction-tuned | 0 | 7.24 | custom | mariagrandury | ||
occiglot/occiglot-7b-eu5 | main | false | bfloat16 | Original | FINISHED | 2025-02-01T22:16:50 | pretrained | 0 | 7.24 | custom | mariagrandury |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 1,768
Size of downloaded dataset files:
49 kB
Size of the auto-converted Parquet files:
386 kB
Number of rows:
128