Spaces:
Sleeping
Sleeping
update
Browse files
app.py
CHANGED
|
@@ -11,4 +11,9 @@ prompt = st.chat_input("Say something")
|
|
| 11 |
if prompt:
|
| 12 |
llm = Llama(model_path="Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf")
|
| 13 |
r=llm(prompt, max_tokens=1000)
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
if prompt:
|
| 12 |
llm = Llama(model_path="Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf")
|
| 13 |
r=llm(prompt, max_tokens=1000)
|
| 14 |
+
l="Nothing"
|
| 15 |
+
try:
|
| 16 |
+
l=r["choices"][0]["text"]
|
| 17 |
+
except Exception as e:
|
| 18 |
+
print(e)
|
| 19 |
+
st.write(f"User has sent the following prompt: {prompt} with response: {l} ")
|