Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,7 +4,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
| 4 |
import torch
|
| 5 |
|
| 6 |
# 初始化 Qwen 模型與 tokenizer(加上 trust_remote_code)
|
| 7 |
-
model_id = "TinyLlama-1.1B"
|
| 8 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 9 |
|
| 10 |
print(f"🚀 載入模型:{model_id} on {device}")
|
|
|
|
| 4 |
import torch
|
| 5 |
|
| 6 |
# 初始化 Qwen 模型與 tokenizer(加上 trust_remote_code)
|
| 7 |
+
model_id = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
| 8 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 9 |
|
| 10 |
print(f"🚀 載入模型:{model_id} on {device}")
|