Spaces:
Sleeping
Sleeping
File size: 3,525 Bytes
74c485e a83fbed 74c485e f86db8c 74c485e 5d7dfa0 74c485e f86db8c 1133401 f86db8c 1635dbd f86db8c 1635dbd f86db8c fdb8106 1635dbd 12bc661 f86db8c 1635dbd f86db8c fdb8106 1635dbd e3d560c 1635dbd f86db8c 1635dbd f86db8c e3d560c f86db8c 74c485e e3d560c 74c485e f86db8c 74c485e f86db8c 74c485e f86db8c 74c485e f86db8c 74c485e f86db8c 74c485e f86db8c 74c485e f86db8c cc6442f 74c485e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
from flask import Flask, request, jsonify
import gradio as gr
from huggingface_hub import InferenceClient
import json
app = Flask(__name__)
# Inisialisasi Gradio dan HuggingFace client
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
def chat_llama(chat_history):
chat_completion = client.chat_completion(
messages=chat_history,
max_tokens=500,
)
chat_history.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
return chat_history
def chat_mem(message, chat_history):
chat_history_role = [{"role": "system", "content": "You are a helpful assistant."}]
if chat_history:
for i in range(len(chat_history)):
chat_history_role.append({"role": "user", "content": chat_history[i][0]})
chat_history_role.append({"role": "assistant", "content": chat_history[i][1]})
chat_history_role.append({"role": "user", "content": message})
chat_completion = client.chat_completion(
messages=chat_history_role,
max_tokens=500,
)
chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
modified = map(lambda x: x["content"], chat_history_role)
a = list(modified)
chat_history = [(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]
return "", chat_history
def process_json(json_input):
try:
chat_history = json.loads(json_input)
if not isinstance(chat_history, list):
raise ValueError("Input should be a list of message dictionaries.")
except (json.JSONDecodeError, ValueError) as e:
return f"Error parsing JSON: {str(e)}", ""
chat_history = chat_llama(chat_history)
return json.dumps(chat_history, indent=2), ""
# Definisikan antarmuka Gradio
gr_interface = gr.Blocks()
with gr_interface:
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot()
msg = gr.Textbox(interactive=True)
with gr.Row():
clear = gr.ClearButton([msg, chatbot])
send_btn = gr.Button("Send", variant='primary')
msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
with gr.Column():
json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
process_btn = gr.Button("Process JSON", variant='primary')
process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
@app.route("/", methods=["GET"])
def home():
return gr_interface.launch(inline=True, share=False)
@app.route("/chat_llama", methods=["POST"])
def chat_llama_endpoint():
data = request.json
chat_history = data.get('chat_history', [])
response = chat_llama(chat_history)
return jsonify(response)
@app.route("/chat_mem", methods=["POST"])
def chat_mem_endpoint():
data = request.json
message = data.get('message', '')
chat_history = data.get('chat_history', [])
response = chat_mem(message, chat_history)
return jsonify(response)
@app.route("/process_json", methods=["POST"])
def process_json_endpoint():
data = request.json
json_input = data.get('json_input', '')
response = process_json(json_input)
return jsonify(response)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860) |