Gerry
initial commit
1089b8a verified
raw
history blame contribute delete
868 Bytes
import gradio as gr
import time
# Define a function that takes a message, history, system prompt, and tokens as input
def echo(message, history, system_prompt, tokens):
# Create a response string that includes the system prompt and the message
response = f"System prompt: {system_prompt}\n Message: {message}."
# Yield the response character by character, up to the number of tokens specified
for i in range(min(len(response), int(tokens))):
time.sleep(0.05)
yield response[: i + 1]
# Create a Gradio ChatInterface with the echo function
demo = gr.ChatInterface(
echo,
type="messages",
# Add additional inputs for the system prompt and tokens
additional_inputs=[
gr.Textbox("You are helpful AI.", label="System Prompt"),
gr.Slider(10, 100),
],
)
# Launch the demo
demo.launch(show_error=True)