Nexchan commited on
Commit
5d7dfa0
·
verified ·
1 Parent(s): a83fbed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -9
app.py CHANGED
@@ -1,14 +1,22 @@
1
  from fastapi import FastAPI, Request
2
  from fastapi.responses import HTMLResponse
 
3
  from huggingface_hub import InferenceClient
4
  import gradio as gr
5
- import json
6
  import uvicorn
7
- import threading
8
 
9
  # Inisialisasi FastAPI
10
  app = FastAPI()
11
 
 
 
 
 
 
 
 
 
 
12
  # Inisialisasi Gradio
13
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
14
 
@@ -72,13 +80,9 @@ def gradio_interface():
72
 
73
  return demo
74
 
75
- def run_gradio():
76
- interface = gradio_interface()
77
- interface.launch(server_port=7861, share=False, server_name="0.0.0.0")
78
-
79
  @app.get("/", response_class=HTMLResponse)
80
  async def read_root():
81
- return "FastAPI is running. Gradio is accessible on port 7861."
82
 
83
  @app.post("/chat_llama")
84
  async def chat_llama_endpoint(request: Request):
@@ -104,6 +108,4 @@ async def process_json_endpoint(request: Request):
104
 
105
  # Jalankan server FastAPI
106
  if __name__ == "__main__":
107
- gradio_thread = threading.Thread(target=run_gradio)
108
- gradio_thread.start()
109
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
  from fastapi import FastAPI, Request
2
  from fastapi.responses import HTMLResponse
3
+ from fastapi.middleware.cors import CORSMiddleware
4
  from huggingface_hub import InferenceClient
5
  import gradio as gr
 
6
  import uvicorn
 
7
 
8
  # Inisialisasi FastAPI
9
  app = FastAPI()
10
 
11
+ # Izinkan CORS (jika perlu)
12
+ app.add_middleware(
13
+ CORSMiddleware,
14
+ allow_origins=["*"],
15
+ allow_credentials=True,
16
+ allow_methods=["*"],
17
+ allow_headers=["*"],
18
+ )
19
+
20
  # Inisialisasi Gradio
21
  client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
22
 
 
80
 
81
  return demo
82
 
 
 
 
 
83
  @app.get("/", response_class=HTMLResponse)
84
  async def read_root():
85
+ return gr.Interface(fn=process_json, inputs="text", outputs="text").launch(inline=True, share=False)
86
 
87
  @app.post("/chat_llama")
88
  async def chat_llama_endpoint(request: Request):
 
108
 
109
  # Jalankan server FastAPI
110
  if __name__ == "__main__":
 
 
111
  uvicorn.run(app, host="0.0.0.0", port=7860)