Nexchan commited on
Commit
c7c31e8
·
verified ·
1 Parent(s): f86db8c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -22
app.py CHANGED
@@ -1,9 +1,9 @@
1
- from fastapi import FastAPI, Request, Response
2
  from fastapi.responses import HTMLResponse
3
- from fastapi.staticfiles import StaticFiles
4
- from huggingface_hub import InferenceClient
5
  import gradio as gr
 
6
  import json
 
7
 
8
  # Inisialisasi FastAPI
9
  app = FastAPI()
@@ -51,27 +51,30 @@ def process_json(json_input):
51
  return json.dumps(chat_history, indent=2), ""
52
 
53
  # Definisikan antarmuka Gradio
54
- gr_interface = gr.Blocks()
55
- with gr_interface:
56
- with gr.Row():
57
- with gr.Column():
58
- chatbot = gr.Chatbot()
59
- msg = gr.Textbox(interactive=True)
60
- with gr.Row():
61
- clear = gr.ClearButton([msg, chatbot])
62
- send_btn = gr.Button("Send", variant='primary')
63
- msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
64
- send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
65
-
66
- with gr.Column():
67
- json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
68
- json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
69
- process_btn = gr.Button("Process JSON", variant='primary')
70
- process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
 
 
71
 
72
  @app.get("/", response_class=HTMLResponse)
73
  async def read_root():
74
- return gr_interface.launch(inline=True, share=False)
 
75
 
76
  @app.post("/chat_llama")
77
  async def chat_llama_endpoint(request: Request):
@@ -97,5 +100,4 @@ async def process_json_endpoint(request: Request):
97
 
98
  # Jalankan server FastAPI
99
  if __name__ == "__main__":
100
- import uvicorn
101
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ from fastapi import FastAPI, Request
2
  from fastapi.responses import HTMLResponse
 
 
3
  import gradio as gr
4
+ from huggingface_hub import InferenceClient
5
  import json
6
+ import uvicorn
7
 
8
  # Inisialisasi FastAPI
9
  app = FastAPI()
 
51
  return json.dumps(chat_history, indent=2), ""
52
 
53
  # Definisikan antarmuka Gradio
54
+ def gradio_interface():
55
+ with gr.Blocks() as demo:
56
+ with gr.Row():
57
+ with gr.Column():
58
+ chatbot = gr.Chatbot()
59
+ msg = gr.Textbox(interactive=True)
60
+ with gr.Row():
61
+ clear = gr.ClearButton([msg, chatbot])
62
+ send_btn = gr.Button("Send", variant='primary')
63
+ msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
64
+ send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
65
+
66
+ with gr.Column():
67
+ json_input = gr.Textbox(placeholder='Input JSON here...', interactive=True, lines=10)
68
+ json_output = gr.Textbox(label='Output JSON', interactive=False, lines=10)
69
+ process_btn = gr.Button("Process JSON", variant='primary')
70
+ process_btn.click(fn=process_json, inputs=json_input, outputs=[json_output])
71
+
72
+ return demo
73
 
74
  @app.get("/", response_class=HTMLResponse)
75
  async def read_root():
76
+ interface = gradio_interface()
77
+ return interface.launch(inline=True, share=False)
78
 
79
  @app.post("/chat_llama")
80
  async def chat_llama_endpoint(request: Request):
 
100
 
101
  # Jalankan server FastAPI
102
  if __name__ == "__main__":
 
103
  uvicorn.run(app, host="0.0.0.0", port=7860)