Spaces:
Sleeping
Sleeping
File size: 2,691 Bytes
a83fbed 74c485e f86db8c 9baa4cb f86db8c 1133401 f86db8c 1635dbd f86db8c 1635dbd f86db8c fdb8106 1635dbd 12bc661 f86db8c 1635dbd f86db8c fdb8106 1635dbd e3d560c 1635dbd f86db8c 1635dbd f86db8c e3d560c f86db8c 9baa4cb 74c485e e3d560c 9baa4cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import gradio as gr
from huggingface_hub import InferenceClient
import json
# Inisialisasi HuggingFace client
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
def chat_llama(chat_history):
chat_completion = client.chat_completion(
messages=chat_history,
max_tokens=500,
)
chat_history.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
return chat_history
def chat_mem(message, chat_history):
chat_history_role = [{"role": "system", "content": "You are a helpful assistant."}]
if chat_history:
for i in range(len(chat_history)):
chat_history_role.append({"role": "user", "content": chat_history[i][0]})
chat_history_role.append({"role": "assistant", "content": chat_history[i][1]})
chat_history_role.append({"role": "user", "content": message})
chat_completion = client.chat_completion(
messages=chat_history_role,
max_tokens=500,
)
chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
modified = map(lambda x: x["content"], chat_history_role)
a = list(modified)
chat_history = [(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]
return "", chat_history
def process_json(json_input):
try:
chat_history = json.loads(json_input)
if not isinstance(chat_history, list):
raise ValueError("Input should be a list of message dictionaries.")
except (json.JSONDecodeError, ValueError) as e:
return f"Error parsing JSON: {str(e)}", ""
chat_history = chat_llama(chat_history)
return json.dumps(chat_history, indent=2), ""
# Definisikan antarmuka Gradio
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot()
msg = gr.Textbox(interactive=True)
with gr.Row():
clear = gr.ClearButton([msg, chatbot])
send_btn = gr.Button("Send", variant='primary')
msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
with gr.Column():
json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
process_btn = gr.Button("Process JSON", variant='primary')
process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
# Jalankan antarmuka Gradio dan sediakan API
demo.launch(server_name="0.0.0.0", server_port=7860, share=False) |