Spaces:
Sleeping
Sleeping
app.py
CHANGED
|
@@ -9,4 +9,6 @@ install("llama-cpp-python")
|
|
| 9 |
from llama_cpp import Llama
|
| 10 |
prompt = st.chat_input("Say something")
|
| 11 |
if prompt:
|
| 12 |
-
|
|
|
|
|
|
|
|
|
| 9 |
from llama_cpp import Llama
|
| 10 |
prompt = st.chat_input("Say something")
|
| 11 |
if prompt:
|
| 12 |
+
llm = Llama(model_path="Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf")
|
| 13 |
+
r=llm(prompt, max_tokens=1000)
|
| 14 |
+
st.write(f"User has sent the following prompt: {prompt} with response: {r["choices"][0]["text"]} ")
|