Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,13 +3,11 @@ from fastapi import FastAPI, UploadFile, File
|
|
| 3 |
from fastapi.middleware.cors import CORSMiddleware
|
| 4 |
import requests
|
| 5 |
import uvicorn
|
| 6 |
-
|
| 7 |
-
import io
|
| 8 |
-
import base64
|
| 9 |
|
| 10 |
app = FastAPI()
|
| 11 |
|
| 12 |
-
# CORS
|
| 13 |
app.add_middleware(
|
| 14 |
CORSMiddleware,
|
| 15 |
allow_origins=["*"],
|
|
@@ -20,16 +18,13 @@ app.add_middleware(
|
|
| 20 |
|
| 21 |
# HuggingFace ๋ชจ๋ธ ์๋ํฌ์ธํธ
|
| 22 |
HF_API_URL = "https://api-inference.huggingface.co/models/keremberke/yolov8n-concrete-crack"
|
| 23 |
-
HF_TOKEN = "YOUR_HF_TOKEN" # ๋ฐ๋์ ์
๋ ฅ ํ์
|
| 24 |
-
|
| 25 |
-
headers = {
|
| 26 |
-
"Authorization": f"Bearer {HF_TOKEN}"
|
| 27 |
-
}
|
| 28 |
|
|
|
|
|
|
|
|
|
|
| 29 |
|
| 30 |
@app.post("/predict")
|
| 31 |
async def predict(img: UploadFile = File(...)):
|
| 32 |
-
# ์ด๋ฏธ์ง ์ฝ๊ธฐ
|
| 33 |
bytes_data = await img.read()
|
| 34 |
|
| 35 |
response = requests.post(
|
|
@@ -43,11 +38,9 @@ async def predict(img: UploadFile = File(...)):
|
|
| 43 |
except:
|
| 44 |
return {"data": [{"label": "normal", "confidence": 1.0}]}
|
| 45 |
|
| 46 |
-
# ๊ฒฐ๊ณผ๊ฐ bounding box ๋ฆฌ์คํธ
|
| 47 |
if not isinstance(results, list) or len(results) == 0:
|
| 48 |
return {"data": [{"label": "normal", "confidence": 1.0}]}
|
| 49 |
|
| 50 |
-
# confidence ์ต๊ณ ๊ฐ ์ฐพ๊ธฐ
|
| 51 |
max_conf = max(item.get("score", 0) for item in results)
|
| 52 |
|
| 53 |
return {
|
|
@@ -59,6 +52,5 @@ async def predict(img: UploadFile = File(...)):
|
|
| 59 |
]
|
| 60 |
}
|
| 61 |
|
| 62 |
-
|
| 63 |
if __name__ == "__main__":
|
| 64 |
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
|
| 3 |
from fastapi.middleware.cors import CORSMiddleware
|
| 4 |
import requests
|
| 5 |
import uvicorn
|
| 6 |
+
import os
|
|
|
|
|
|
|
| 7 |
|
| 8 |
app = FastAPI()
|
| 9 |
|
| 10 |
+
# CORS ์ค์
|
| 11 |
app.add_middleware(
|
| 12 |
CORSMiddleware,
|
| 13 |
allow_origins=["*"],
|
|
|
|
| 18 |
|
| 19 |
# HuggingFace ๋ชจ๋ธ ์๋ํฌ์ธํธ
|
| 20 |
HF_API_URL = "https://api-inference.huggingface.co/models/keremberke/yolov8n-concrete-crack"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
+
# โ ํ ํฐ์ ์ ๋๋ก ๋ฌธ์์ด๋ก ๋ฃ์ผ๋ฉด ์ ๋จ. ํ๊ฒฝ๋ณ์๋ก๋ง!
|
| 23 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 24 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
|
| 25 |
|
| 26 |
@app.post("/predict")
|
| 27 |
async def predict(img: UploadFile = File(...)):
|
|
|
|
| 28 |
bytes_data = await img.read()
|
| 29 |
|
| 30 |
response = requests.post(
|
|
|
|
| 38 |
except:
|
| 39 |
return {"data": [{"label": "normal", "confidence": 1.0}]}
|
| 40 |
|
|
|
|
| 41 |
if not isinstance(results, list) or len(results) == 0:
|
| 42 |
return {"data": [{"label": "normal", "confidence": 1.0}]}
|
| 43 |
|
|
|
|
| 44 |
max_conf = max(item.get("score", 0) for item in results)
|
| 45 |
|
| 46 |
return {
|
|
|
|
| 52 |
]
|
| 53 |
}
|
| 54 |
|
|
|
|
| 55 |
if __name__ == "__main__":
|
| 56 |
uvicorn.run(app, host="0.0.0.0", port=7860)
|