Nexchan commited on
Commit
74c485e
·
verified ·
1 Parent(s): 5d7dfa0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -53
app.py CHANGED
@@ -1,23 +1,11 @@
1
- from fastapi import FastAPI, Request
2
- from fastapi.responses import HTMLResponse
3
- from fastapi.middleware.cors import CORSMiddleware
4
- from huggingface_hub import InferenceClient
5
  import gradio as gr
6
- import uvicorn
7
-
8
- # Inisialisasi FastAPI
9
- app = FastAPI()
10
 
11
- # Izinkan CORS (jika perlu)
12
- app.add_middleware(
13
- CORSMiddleware,
14
- allow_origins=["*"],
15
- allow_credentials=True,
16
- allow_methods=["*"],
17
- allow_headers=["*"],
18
- )
19
 
20
- # Inisialisasi Gradio
21
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
22
 
23
  def chat_llama(chat_history):
@@ -60,52 +48,49 @@ def process_json(json_input):
60
  return json.dumps(chat_history, indent=2), ""
61
 
62
  # Definisikan antarmuka Gradio
63
- def gradio_interface():
64
- with gr.Blocks() as demo:
65
- with gr.Row():
66
- with gr.Column():
67
- chatbot = gr.Chatbot()
68
- msg = gr.Textbox(interactive=True)
69
- with gr.Row():
70
- clear = gr.ClearButton([msg, chatbot])
71
- send_btn = gr.Button("Send", variant='primary')
72
- msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
73
- send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
74
-
75
- with gr.Column():
76
- json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
77
- json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
78
- process_btn = gr.Button("Process JSON", variant='primary')
79
- process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
80
-
81
- return demo
82
 
83
- @app.get("/", response_class=HTMLResponse)
84
- async def read_root():
85
- return gr.Interface(fn=process_json, inputs="text", outputs="text").launch(inline=True, share=False)
86
 
87
- @app.post("/chat_llama")
88
- async def chat_llama_endpoint(request: Request):
89
- data = await request.json()
90
  chat_history = data.get('chat_history', [])
91
  response = chat_llama(chat_history)
92
- return response
93
 
94
- @app.post("/chat_mem")
95
- async def chat_mem_endpoint(request: Request):
96
- data = await request.json()
97
  message = data.get('message', '')
98
  chat_history = data.get('chat_history', [])
99
  response = chat_mem(message, chat_history)
100
- return response
101
 
102
- @app.post("/process_json")
103
- async def process_json_endpoint(request: Request):
104
- data = await request.json()
105
  json_input = data.get('json_input', '')
106
  response = process_json(json_input)
107
- return response
108
 
109
- # Jalankan server FastAPI
110
  if __name__ == "__main__":
111
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ from flask import Flask, request, jsonify
 
 
 
2
  import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+ import json
 
 
5
 
6
+ app = Flask(__name__)
 
 
 
 
 
 
 
7
 
8
+ # Inisialisasi Gradio dan HuggingFace client
9
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
10
 
11
  def chat_llama(chat_history):
 
48
  return json.dumps(chat_history, indent=2), ""
49
 
50
  # Definisikan antarmuka Gradio
51
+ gr_interface = gr.Blocks()
52
+ with gr_interface:
53
+ with gr.Row():
54
+ with gr.Column():
55
+ chatbot = gr.Chatbot()
56
+ msg = gr.Textbox(interactive=True)
57
+ with gr.Row():
58
+ clear = gr.ClearButton([msg, chatbot])
59
+ send_btn = gr.Button("Send", variant='primary')
60
+ msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
61
+ send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
62
+
63
+ with gr.Column():
64
+ json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
65
+ json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
66
+ process_btn = gr.Button("Process JSON", variant='primary')
67
+ process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
 
 
68
 
69
+ @app.route("/", methods=["GET"])
70
+ def home():
71
+ return gr_interface.launch(inline=True, share=False)
72
 
73
+ @app.route("/chat_llama", methods=["POST"])
74
+ def chat_llama_endpoint():
75
+ data = request.json
76
  chat_history = data.get('chat_history', [])
77
  response = chat_llama(chat_history)
78
+ return jsonify(response)
79
 
80
+ @app.route("/chat_mem", methods=["POST"])
81
+ def chat_mem_endpoint():
82
+ data = request.json
83
  message = data.get('message', '')
84
  chat_history = data.get('chat_history', [])
85
  response = chat_mem(message, chat_history)
86
+ return jsonify(response)
87
 
88
+ @app.route("/process_json", methods=["POST"])
89
+ def process_json_endpoint():
90
+ data = request.json
91
  json_input = data.get('json_input', '')
92
  response = process_json(json_input)
93
+ return jsonify(response)
94
 
 
95
  if __name__ == "__main__":
96
+ app.run(host="0.0.0.0", port=7860)