Spaces:
Running
on
Zero
Running
on
Zero
remove prevously added breeze models (as it didn't work), add smollm 135m taiwan
Browse files
app.py
CHANGED
|
@@ -29,13 +29,17 @@ MODELS = {
|
|
| 29 |
# β¦ your existing entries β¦
|
| 30 |
"Qwen2.5-Taiwan-1.5B-Instruct": {"repo_id": "benchang1110/Qwen2.5-Taiwan-1.5B-Instruct", "description": "Qwen2.5-Taiwan-1.5B-Instruct"},
|
| 31 |
# βββ New Taiwan-tuned variants βββ
|
| 32 |
-
"MediaTek-Research/Llama-Breeze2-3B-Instruct":{
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
},
|
| 36 |
-
"MediaTek-Research/Llama-Breeze2-8B-Instruct":{
|
| 37 |
-
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
},
|
| 40 |
"Llama-3.2-Taiwan-1B": {
|
| 41 |
"repo_id": "lianghsun/Llama-3.2-Taiwan-1B",
|
|
|
|
| 29 |
# β¦ your existing entries β¦
|
| 30 |
"Qwen2.5-Taiwan-1.5B-Instruct": {"repo_id": "benchang1110/Qwen2.5-Taiwan-1.5B-Instruct", "description": "Qwen2.5-Taiwan-1.5B-Instruct"},
|
| 31 |
# βββ New Taiwan-tuned variants βββ
|
| 32 |
+
# "MediaTek-Research/Llama-Breeze2-3B-Instruct":{
|
| 33 |
+
# "repo_id":"MediaTek-Research/Llama-Breeze2-3B-Instruct",
|
| 34 |
+
# "description":"The Breeze 2 Herd of Models: Traditional Chinese LLMs Based on LLaMA with Vision-Aware and Function-Calling Capabilities",
|
| 35 |
+
# },
|
| 36 |
+
# "MediaTek-Research/Llama-Breeze2-8B-Instruct":{
|
| 37 |
+
# "repo_id":"MediaTek-Research/Llama-Breeze2-8B-Instruct",
|
| 38 |
+
# "description":"The Breeze 2 Herd of Models: Traditional Chinese LLMs Based on LLaMA with Vision-Aware and Function-Calling Capabilities",
|
| 39 |
+
# },
|
| 40 |
+
"SmolLM-135M-Taiwan-Instruct-v1.0": {
|
| 41 |
+
"repo_id": "benchang1110/SmolLM-135M-Taiwan-Instruct-v1.0",
|
| 42 |
+
"description": "135-million-parameter F32 safetensors instruction-finetuned variant of SmolLM-135M-Taiwan, trained on the 416 k-example ChatTaiwan dataset for Traditional Chinese conversational and instruction-following tasks"
|
| 43 |
},
|
| 44 |
"Llama-3.2-Taiwan-1B": {
|
| 45 |
"repo_id": "lianghsun/Llama-3.2-Taiwan-1B",
|