Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
import
|
| 3 |
import time
|
| 4 |
import ctypes #to run on C api directly
|
| 5 |
import llama_cpp
|
|
@@ -21,12 +21,11 @@ def generate_text(input_text, history):
|
|
| 21 |
input_text_with_history += f"Q: {input_text} \n A:"
|
| 22 |
print("new input", input_text_with_history)
|
| 23 |
output = llm(input_text_with_history, max_tokens=1024, stop=["Q:", "\n"], stream=True)
|
| 24 |
-
response = output['choices'][0]['text'] + "\n"
|
| 25 |
-
print("response", response)
|
| 26 |
history =["init",input_text_with_history]
|
| 27 |
for out in output:
|
| 28 |
-
|
| 29 |
-
|
|
|
|
| 30 |
|
| 31 |
demo = gr.ChatInterface(generate_text)
|
| 32 |
demo.queue(concurrency_count=1, max_size=5)
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
import copy
|
| 3 |
import time
|
| 4 |
import ctypes #to run on C api directly
|
| 5 |
import llama_cpp
|
|
|
|
| 21 |
input_text_with_history += f"Q: {input_text} \n A:"
|
| 22 |
print("new input", input_text_with_history)
|
| 23 |
output = llm(input_text_with_history, max_tokens=1024, stop=["Q:", "\n"], stream=True)
|
|
|
|
|
|
|
| 24 |
history =["init",input_text_with_history]
|
| 25 |
for out in output:
|
| 26 |
+
stream = copy.deepcopy(out)
|
| 27 |
+
yield print(stream["choice"][0]["text"])
|
| 28 |
+
|
| 29 |
|
| 30 |
demo = gr.ChatInterface(generate_text)
|
| 31 |
demo.queue(concurrency_count=1, max_size=5)
|