Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -70,11 +70,9 @@ import os
|
|
| 70 |
if os.path.exists("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf"):
|
| 71 |
pass
|
| 72 |
else:
|
| 73 |
-
req=requests.get("https://huggingface.co/peterpeter8585/Llama-3-Open-Ko-8B-Instruct-preview-Q5_K_M-GGUF/resolve/main/llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf"
|
| 74 |
with open("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf","wb") as f:
|
| 75 |
-
|
| 76 |
-
f.write(i)
|
| 77 |
-
|
| 78 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|
| 79 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
| 80 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|
|
|
|
| 70 |
if os.path.exists("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf"):
|
| 71 |
pass
|
| 72 |
else:
|
| 73 |
+
req=requests.get("https://huggingface.co/peterpeter8585/Llama-3-Open-Ko-8B-Instruct-preview-Q5_K_M-GGUF/resolve/main/llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf")
|
| 74 |
with open("./llama-3-open-ko-8b-instruct-preview-q5_k_m.gguf","wb") as f:
|
| 75 |
+
f.write(req.content)
|
|
|
|
|
|
|
| 76 |
#from transformers import pipeline,AutoModelForCausalLM as M,AutoTokenizer as T
|
| 77 |
#m=M.from_pretrained("peterpeter8585/syai4.3")
|
| 78 |
#t=T.from_pretrained("peterpeter8585/syai4.3")
|