Spaces:
Running
Running
hueonsu654-arch commited on
Commit ยท
9a8fd3e
1
Parent(s): c77e45a
- .dockerignore +8 -0
- .gitignore +7 -0
- Dockerfile +32 -0
- Dockerfile_BK_260210.txt +27 -0
- Dockerfile_bk_260211.txt +32 -0
- app.py +50 -0
- app_bk.py +7 -0
- bee_wasp_efficientnetv2-s_aio.pth +3 -0
- data/8 Data Backup Security Tips for Ransomware Response.txt +37 -0
- data/Cyber โโThreat Trends Report for the First Half of 2025.txt +41 -0
- data/Detailed Guide to Analyzing and Assessing Technical Vulnerabilities in Critical Information and Communication Infrastructure.txt +64 -0
- data/Guide to Key Information and Communication Infrastructure Management, Physical Vulnerability Analysis.txt +125 -0
- data/Hacking Diagnostic Tool Utilization Plan #4 Taking Control of the AD Environment Through Exposed SMB File Servers.txt +47 -0
- data/Information and Communications Field_Breach_Incident_Response_Guide_Revised_Version.txt +194 -0
- data/Report on Trends in Small and Medium-Sized Enterprise Intrusion Damage Support Services (First Half of 2025) - Cases of Spear Phishing Targeting Businesses and Response Strategies.txt +29 -0
- data/myragdata1.txt +25 -0
- efficientnet_b0_chihuahua_muffin.json +1 -0
- efficientnet_b0_chihuahua_muffin.pt +3 -0
- efficientnet_b0_chihuahua_muffin_fsgmdef.json +1 -0
- efficientnet_b0_chihuahua_muffin_fsgmdef.pt +3 -0
- efficientnet_v2_s_plantforestdisease.json +1 -0
- efficientnet_v2_s_plantforestdisease.pt +3 -0
- effinet_basic.py +111 -0
- effinet_basic_compo.py +126 -0
- labels_map.txt +1 -0
- requirements.txt +27 -0
- requirements_bk_260210.txt +31 -0
- requirements_bk_260211.txt +32 -0
- router/cnn_router.py +534 -0
- router/llamindex_router.py +275 -0
- router/llamindex_router_bk_20251231.py +413 -0
- router/llamindex_router_bk_2025_12_10.py +184 -0
- router/llamindex_router_bk_251231V2.py +419 -0
- router/llamindex_router_bk_260120.py +245 -0
.dockerignore
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__/
|
| 2 |
+
*.pyc
|
| 3 |
+
*.pyo
|
| 4 |
+
*.pyd
|
| 5 |
+
*.log
|
| 6 |
+
.git
|
| 7 |
+
.gitignore
|
| 8 |
+
.venv/
|
.gitignore
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__/
|
| 2 |
+
*.pyc
|
| 3 |
+
*.pyo
|
| 4 |
+
*.pyd
|
| 5 |
+
*.log
|
| 6 |
+
.venv/
|
| 7 |
+
.env
|
Dockerfile
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Dockerfile
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# 1. ์์คํ
ํจํค์ง ์ค์น ๋ฐ ์๊ฐ๋ ์ค์ (Root ๊ถํ)
|
| 5 |
+
RUN apt-get update && \
|
| 6 |
+
apt-get install -y tzdata && \
|
| 7 |
+
ln -sf /usr/share/zoneinfo/Asia/Seoul /etc/localtime && \
|
| 8 |
+
echo "Asia/Seoul" > /etc/timezone && \
|
| 9 |
+
apt-get clean && \
|
| 10 |
+
rm -rf /var/lib/apt/lists/*
|
| 11 |
+
|
| 12 |
+
WORKDIR /app
|
| 13 |
+
|
| 14 |
+
# 2. ์์กด์ฑ ํ์ผ ๋ณต์ฌ
|
| 15 |
+
COPY requirements.txt .
|
| 16 |
+
|
| 17 |
+
# 3. 'uv'๋ฅผ ์ฌ์ฉํ์ฌ ํจํค์ง ์ค์น (ํต์ฌ ํด๊ฒฐ์ฑ
)
|
| 18 |
+
# pip ๋์ uv๋ฅผ ์ฌ์ฉํ๋ฉด resolution-too-deep ์๋ฌ๋ฅผ ํด๊ฒฐํ ์ ์์ต๋๋ค.
|
| 19 |
+
# --system ์ต์
์ผ๋ก ์์คํ
ํ์ด์ฌ ํ๊ฒฝ์ ์ง์ ์ค์นํฉ๋๋ค (๋์ปค ๋ด๋ถ๋ ๊ฒฉ๋ฆฌ๋์ด ์์ผ๋ฏ๋ก ์์ ํจ).
|
| 20 |
+
RUN pip install uv && \
|
| 21 |
+
uv pip install --system --no-cache-dir -r requirements.txt
|
| 22 |
+
|
| 23 |
+
# 4. ๋ณด์์ ์ํด ์ ์ ์์ฑ ๋ฐ ์ ํ
|
| 24 |
+
RUN useradd -m -u 1000 user
|
| 25 |
+
USER user
|
| 26 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
| 27 |
+
|
| 28 |
+
# 5. ์์ค ์ฝ๋ ๋ณต์ฌ (์ ์ ๊ถํ์ผ๋ก)
|
| 29 |
+
COPY --chown=user . /app
|
| 30 |
+
|
| 31 |
+
# 6. ์๋ฒ ์คํ
|
| 32 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
Dockerfile_BK_260210.txt
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Dockerfile
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# ํ์ํ ํจํค์ง ์ค์น ๋ฐ ์๊ฐ๋ ์ค์ (root ๊ถํ ํ์)
|
| 5 |
+
# 'tzdata'๋ฅผ ์ค์นํ๊ณ 'Asia/Seoul'๋ก ์๊ฐ๋๋ฅผ ์ค์
|
| 6 |
+
RUN apt-get update && \
|
| 7 |
+
apt-get install -y tzdata && \
|
| 8 |
+
ln -sf /usr/share/zoneinfo/Asia/Seoul /etc/localtime && \
|
| 9 |
+
echo "Asia/Seoul" > /etc/timezone && \
|
| 10 |
+
apt-get clean && \
|
| 11 |
+
rm -rf /var/lib/apt/lists/*
|
| 12 |
+
|
| 13 |
+
RUN useradd -m -u 1000 user
|
| 14 |
+
USER user
|
| 15 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
| 16 |
+
|
| 17 |
+
WORKDIR /app
|
| 18 |
+
|
| 19 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
| 20 |
+
RUN pip install --upgrade pip && \
|
| 21 |
+
pip install --no-cache-dir --upgrade -r requirements.txt
|
| 22 |
+
|
| 23 |
+
COPY --chown=user . /app
|
| 24 |
+
|
| 25 |
+
#์๋ฒ ์คํ ๋ช
๋ น: ์ปจํ
์ด๋๊ฐ ์์๋ ๋ FastAPI ์๋ฒ๋ฅผ ์คํํ๋๋ก ๋ช
๋ น์ ์ ์ํฉ๋๋ค.
|
| 26 |
+
#app.py ํ์ผ์ app ๊ฐ์ฒด๋ฅผ 7860 ํฌํธ๋ก ์ด์ด์ค๋๋ค.
|
| 27 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
Dockerfile_bk_260211.txt
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Dockerfile
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# 1. ์์คํ
ํจํค์ง ์ค์น ๋ฐ ์๊ฐ๋ ์ค์ (Root ๊ถํ)
|
| 5 |
+
RUN apt-get update && \
|
| 6 |
+
apt-get install -y tzdata && \
|
| 7 |
+
ln -sf /usr/share/zoneinfo/Asia/Seoul /etc/localtime && \
|
| 8 |
+
echo "Asia/Seoul" > /etc/timezone && \
|
| 9 |
+
apt-get clean && \
|
| 10 |
+
rm -rf /var/lib/apt/lists/*
|
| 11 |
+
|
| 12 |
+
WORKDIR /app
|
| 13 |
+
|
| 14 |
+
# 2. ์์กด์ฑ ํ์ผ ๋ณต์ฌ
|
| 15 |
+
COPY requirements.txt .
|
| 16 |
+
|
| 17 |
+
# 3. 'uv'๋ฅผ ์ฌ์ฉํ์ฌ ํจํค์ง ์ค์น (ํต์ฌ ํด๊ฒฐ์ฑ
)
|
| 18 |
+
# pip ๋์ uv๋ฅผ ์ฌ์ฉํ๋ฉด resolution-too-deep ์๋ฌ๋ฅผ ํด๊ฒฐํ ์ ์์ต๋๋ค.
|
| 19 |
+
# --system ์ต์
์ผ๋ก ์์คํ
ํ์ด์ฌ ํ๊ฒฝ์ ์ง์ ์ค์นํฉ๋๋ค (๋์ปค ๋ด๋ถ๋ ๊ฒฉ๋ฆฌ๋์ด ์์ผ๋ฏ๋ก ์์ ํจ).
|
| 20 |
+
RUN pip install uv && \
|
| 21 |
+
uv pip install --system --no-cache-dir -r requirements.txt
|
| 22 |
+
|
| 23 |
+
# 4. ๋ณด์์ ์ํด ์ ์ ์์ฑ ๋ฐ ์ ํ
|
| 24 |
+
RUN useradd -m -u 1000 user
|
| 25 |
+
USER user
|
| 26 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
| 27 |
+
|
| 28 |
+
# 5. ์์ค ์ฝ๋ ๋ณต์ฌ (์ ์ ๊ถํ์ผ๋ก)
|
| 29 |
+
COPY --chown=user . /app
|
| 30 |
+
|
| 31 |
+
# 6. ์๋ฒ ์คํ
|
| 32 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
import uvicorn
|
| 4 |
+
from contextlib import asynccontextmanager
|
| 5 |
+
|
| 6 |
+
#from router import llamindex_router
|
| 7 |
+
from router import cnn_router
|
| 8 |
+
|
| 9 |
+
@asynccontextmanager
|
| 10 |
+
async def lifespan_manager(app: FastAPI):
|
| 11 |
+
"""
|
| 12 |
+
์๋ฒ ์์ ์ ๋ชจ๋ธ์ ๋ก๋ํ๊ณ ์ข
๋ฃ ์ ์ ๋ฆฌํฉ๋๋ค.
|
| 13 |
+
"""
|
| 14 |
+
# 1. CNN ๋ชจ๋ธ ๋ก๋ (cnn_router์ ์์)
|
| 15 |
+
await cnn_router.load_models()
|
| 16 |
+
|
| 17 |
+
# ์๋ฒ๊ฐ ์์ฒญ ์ฒ๋ฆฌ๋ฅผ ์์ํ๋๋ก ์ ์ด๊ถ์ ๋๊ฒจ์ค๋๋ค.
|
| 18 |
+
yield
|
| 19 |
+
|
| 20 |
+
# ์๋ฒ ์ข
๋ฃ ์ ์คํ๋๋ ์ ๋ฆฌ(shutdown) ๋ก์ง
|
| 21 |
+
cnn_router.shutdown_models()
|
| 22 |
+
|
| 23 |
+
# FastAPI ์ ํ๋ฆฌ์ผ์ด์
์ด๊ธฐํ
|
| 24 |
+
app = FastAPI(
|
| 25 |
+
title="EfficientNetB0 ์ด๋ฏธ์ง ๋ถ๋ฅ API",
|
| 26 |
+
description="Fine-tuned EfficientNetB0 ๋ชจ๋ธ์ ์ฌ์ฉํ์ฌ ์ด๋ฏธ์ง๋ฅผ ์์ธกํฉ๋๋ค.",
|
| 27 |
+
lifespan=lifespan_manager
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
# CORS ์ค์ (๋ชจ๋ ๋๋ฉ์ธ ํ์ฉ)
|
| 31 |
+
app.add_middleware(
|
| 32 |
+
CORSMiddleware,
|
| 33 |
+
allow_origins=["*"],
|
| 34 |
+
allow_credentials=True,
|
| 35 |
+
allow_methods=["*"],
|
| 36 |
+
allow_headers=["*"],
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
#app.include_router(llamindex_router.router, prefix="/llama_index")
|
| 40 |
+
app.include_router(cnn_router.router) # CNN ๋ผ์ฐํฐ ์ถ๊ฐ
|
| 41 |
+
|
| 42 |
+
# ํฌ์ค ์ฒดํฌ์ฉ ๊ธฐ๋ณธ ์๋ํฌ์ธํธ
|
| 43 |
+
@app.get("/", summary="API ํฌ์ค ์ฒดํฌ")
|
| 44 |
+
def read_root():
|
| 45 |
+
"""API ์๋ฒ๊ฐ ์ ์์ ์ผ๋ก ์๋ํ๋์ง ํ์ธํฉ๋๋ค."""
|
| 46 |
+
return {"message": "EfficientNetB0 Classification API is running successfully."}
|
| 47 |
+
|
| 48 |
+
if __name__ == "__main__":
|
| 49 |
+
# --reload ์ต์
์ ์ถ๊ฐํ์ฌ ์ฝ๋๊ฐ ๋ณ๊ฒฝ๋ ๋๋ง๋ค ์๋ ์ฌ์์๋๊ฒ ์ค์ ํฉ๋๋ค.
|
| 50 |
+
uvicorn.run("app:app", host="0.0.0.0", port=8000, reload=True)
|
app_bk.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
|
| 3 |
+
app = FastAPI()
|
| 4 |
+
|
| 5 |
+
@app.get("/")
|
| 6 |
+
def greet_json():
|
| 7 |
+
return {"Hello": "World!"}
|
bee_wasp_efficientnetv2-s_aio.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:093f24660259f92883976b0265824ebd63a98ffedbdd98d826b6ef51f9365835
|
| 3 |
+
size 81642267
|
data/8 Data Backup Security Tips for Ransomware Response.txt
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ๋์ฌ์จ์ด ๋์์ ์ํ ๋ฐ์ดํฐ ๋ฐฑ์
8๋ ๋ณด์ ์์น
|
| 2 |
+
|
| 3 |
+
## 1. ์คํ์ฌ์ดํธ ์ด์
|
| 4 |
+
- ์ค๋ช
: ์ค์ ๋ฐ์ดํฐ๋ ๋ฐ๋์ ์๋น์ค ๋ง๊ณผ ๋ถ๋ฆฌ๋ ์คํ์ฌ์ดํธ(ํด๋ผ์ฐ๋, ์ธ๋ถ ์ ์ฅ์ ๋๋ ์คํ๋ผ์ธ)์ ๋ฐฑ์
ํ์ฌ ์ด์ํฉ๋๋ค.
|
| 5 |
+
- ํค์๋: ์คํ์ฌ์ดํธ ๋ฐฑ์
, ๋ฐ์ดํฐ ๋ถ๋ฆฌ, ํด๋ผ์ฐ๋ ๋ฐฑ์
|
| 6 |
+
|
| 7 |
+
## 2. 3-2-1 ๋ณด๊ด ์ ๋ต
|
| 8 |
+
- ์ค๋ช
: ์ค์ ๋ฐ์ดํฐ 3๊ฐ ์ฌ๋ณธ์ ๋ณด์ ํ๊ณ , 2๊ฐ๋ ์๋ก ๋ค๋ฅธ ์ ์ฅ๋งค์ฒด์, 1๊ฐ๋ ์คํ์ฌ์ดํธ์ ๋ฐฑ์
๋ฐ ์ด์ํฉ๋๋ค.
|
| 9 |
+
- ํค์๋: 3-2-1 ์ ๋ต, ๋ฐ์ดํฐ ์ฌ๋ณธ, ๋ค์ค ์ ์ฅ๋งค์ฒด
|
| 10 |
+
|
| 11 |
+
## 3. ์ ๊ทผ ํต์ ๋ฐ ๊ถํ ๊ด๋ฆฌ
|
| 12 |
+
- ์ค๋ช
: ๋ฐฑ์
์ ์ฅ์์ ๋ํ ์ต์ ์ ๊ทผ ๊ถํ์ ์ ์ฉํ๊ณ , ๋ฐฑ์
๋ด๋น์ ์ธ์ ์ ๊ทผ์ ์ฐจ๋จํฉ๋๋ค.
|
| 13 |
+
- ํค์๋: ์ ๊ทผ ํต์ , ์ต์ ๊ถํ ์์น, ๋ฐฑ์
๋ณด์
|
| 14 |
+
|
| 15 |
+
## 4. ๋ฐฑ์
์๋ฒ ๋ชจ๋ํฐ๋ง
|
| 16 |
+
- ์ค๋ช
: ๋ฐฑ์
์๋ฒ์ ๋ฐ์ดํฐ์ ๋ฌด๊ฒฐ์ฑ์ ์ ๊ฒํ ์ ์๋ ๋ฐฑ์ ๋๋ EDR ์ค์น ๋ฑ ๋ณด์ ๋ชจ๋ํฐ๋ง ์ฒด๊ณ๋ฅผ ๊ตฌ์ถํฉ๋๋ค.
|
| 17 |
+
- ํค์๋: ์๋ฒ ๋ชจ๋ํฐ๋ง, ๋ฐ์ดํฐ ๋ฌด๊ฒฐ์ฑ, EDR, ๋ฐฑ์
|
| 18 |
+
|
| 19 |
+
## 5. ์ ๊ธฐ์ ๋ณต๊ตฌ ํ๋ จ
|
| 20 |
+
- ์ค๋ช
: ์ฐ 1ํ ์ด์ ๋ณต๊ตฌ ํ๋ จ์ ํตํด ์ค์ ๋ณต๊ตฌ ๊ฐ๋ฅ์ฑ์ ๊ฒ์ฆํฉ๋๋ค.
|
| 21 |
+
- ํค์๋: ๋ณต๊ตฌ ํ๋ จ, ์ฌํด ๋ณต๊ตฌ, ๋ฐ์ดํฐ ๋ณต์
|
| 22 |
+
|
| 23 |
+
## 6. ์ต์ ๋ณด์ ํจ์น ์ ์ฉ
|
| 24 |
+
- ์ค๋ช
: ๋ฐฑ์
์๋ฒ์ SW๋ ๋ณด์ ์
๋ฐ์ดํธ ๋ฐ ํจ์น๋ฅผ ์ ์ํ๊ฒ ์ ์ฉํฉ๋๋ค.
|
| 25 |
+
- ํค์๋: ๋ณด์ ํจ์น, ์์คํ
์
๋ฐ์ดํธ, ์ทจ์ฝ์ ๊ด๋ฆฌ
|
| 26 |
+
|
| 27 |
+
## 7. ๋ฐฑ์
์ ๋ฌด๊ฒฐ์ฑ ๊ฒ์ฆ
|
| 28 |
+
- ์ค๋ช
: ๊ฐ์ผ๋ ์๋ณธ์ด ๋ฐฑ์
์ ๋ฎ์ด์ฐ์ง ์๋๋ก ๋ฐฑ์
์ ๋ฌด๊ฒฐ์ฑ์ ๊ฒ์ฆํฉ๋๋ค.
|
| 29 |
+
- ํค์๋: ๋ฌด๊ฒฐ์ฑ ๊ฒ์ฆ, ๋์ฌ์จ์ด ๊ฐ์ผ ๋ฐฉ์ง, ๋ฐฑ์
๋ฐ์ดํฐ ๊ฒ์ฌ
|
| 30 |
+
|
| 31 |
+
## 8. ์๋ ๋ฐฑ์
์ฒด๊ณ ์ด์
|
| 32 |
+
- ์ค๋ช
: ์ผ๊ฐ/์ฃผ๊ฐ/์๊ฐ ๋ฐฑ์
์๋ํ๋ฅผ ํตํด ์ค์๋ ๋๋ฝ์ ๋ฐฉ์งํฉ๋๋ค.
|
| 33 |
+
- ํค์๋: ๋ฐฑ์
์๋ํ, ๋ฐฑ์
์ค์ผ์ค๋ง, ๋ฐ์ดํฐ ๋๋ฝ ๋ฐฉ์ง
|
| 34 |
+
|
| 35 |
+
## ๋ฌธ์
|
| 36 |
+
- ํ๊ตญ์ธํฐ๋ท์งํฅ์ ์ฌ์ด๋ฒ๋ฏผ์์ผํฐ: ๊ตญ๋ฒ์์ด 118
|
| 37 |
+
- ์นจํด์ฌ๊ณ ๋ฐ์ ์ ์นจํด์ฌ๊ณ ์ ๊ณ (๋ณดํธ๋๋ผ > ์นจํด์ฌ๊ณ ์ ๊ณ )
|
data/Cyber โโThreat Trends Report for the First Half of 2025.txt
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025๋
์๋ฐ๊ธฐ ์ฌ์ด๋ฒ ์ํ ๋ํฅ ๋ณด๊ณ ์ (KISA)
|
| 2 |
+
1. ๊ฐ์ ๋ฐ ํต๊ณ (2025๋
์๋ฐ๊ธฐ ํํฉ)
|
| 3 |
+
๋ณด๊ณ ์ ๋ช
์นญ: 2025๋
์๋ฐ๊ธฐ ์ฌ์ด๋ฒ ์ํ ๋ํฅ ๋ณด๊ณ ์
|
| 4 |
+
๋ฐํ ๊ธฐ๊ด: ๊ณผํ๊ธฐ์ ์ ๋ณดํต์ ๋ถ, ํ๊ตญ์ธํฐ๋ท์งํฅ์(KISA)
|
| 5 |
+
์นจํด์ฌ๊ณ ์ ๊ณ ํํฉ: 2025๋
์๋ฐ๊ธฐ ์ด 1,034๊ฑด (์ ๋
์๋ฐ๊ธฐ ๋๋น 15% ์ฆ๊ฐ)
|
| 6 |
+
๊ณต๊ฒฉ ์ ํ๋ณ ๋น์ค: ์๋ฒ ํดํน(51.4%), DDoS ๊ณต๊ฒฉ(23.0%), ์
์ฑ์ฝ๋ ๊ฐ์ผ(11.1%)
|
| 7 |
+
DDoS ๊ณต๊ฒฉ ๊ธ์ฆ: ์ ๋
์๋ฐ๊ธฐ ๋๋น 55.5% ์ฆ๊ฐ (์ฃผ๋ก ์ ๋ณดํต์ ์
ํ๊ฒ, DNS Query Flooding ์ ํ ๋ค์)
|
| 8 |
+
์
์ข
๋ณ ํํฉ: ์ ๋ณดํต์ ์
(37.7%)์ด ๊ฐ์ฅ ๋์ผ๋ฉฐ, ์ ์กฐ์
(15.2%), ๋์๋งค์
(12.8%) ์
|
| 9 |
+
๋์ฌ์จ์ด ๋ํฅ: ์ค๊ฒฌ๊ธฐ์
์ ๊ณ ๋ 21% ์ฆ๊ฐํ์ผ๋ ์ค์๊ธฐ์
์ 19% ๊ฐ์. ๋จ, ๋ฐฑ์
์์คํ
๊น์ง ๊ฐ์ผ๋๋ ์ฌ๋ก๊ฐ ๋์ด ์ค์ง์ ํผํด๋ ์ฌํ๋จ (๋ฐฑ์
๋ณธ ๊ฐ์ผ๋ฅ 44.4%).
|
| 10 |
+
2. ์ฃผ์ ์นจํด์ฌ๊ณ ์ฌ๋ก ๋ถ์
|
| 11 |
+
๋๊ท๋ชจ ์ ๋ณด ์ ์ถ:
|
| 12 |
+
SKํ
๋ ์ฝค(4์): ๋๊ท๋ชจ ์ ์ฌ(USIM) ์ ๋ณด ๋ฐ ๊ฐ์ธ์ ๋ณด ์ ์ถ. ์๋ฒ ๊ณ์ ๊ด๋ฆฌ ๋ถ์ค ๋ฐ ์ํธํ ์กฐ์น ๋ฏธํก์ด ์์ธ.
|
| 13 |
+
GS25/์๋ฐ๋ชฌ/ํฐ๋จธ๋: ํฌ๋ฆฌ๋ด์
์คํฐํ(Credential Stuffing) ๊ณต๊ฒฉ์ผ๋ก ์ธํ ํ์ ์ ๋ณด ์ ์ถ. ํ ์ฌ์ดํธ์์ ์ ์ถ๋ ๊ณ์ ์ ๋ณด๋ฅผ ์ฌ์ฌ์ฉํ์ฌ ๊ณต๊ฒฉ.
|
| 14 |
+
๊ณต๊ธ๋ง ๋ฐ ๊ฐ์์์ฐ ํดํน:
|
| 15 |
+
๋ฐ์ด๋นํธ(Bybit, 2์): ์ฝ 2์กฐ 700์ต ์ ๊ท๋ชจ ํ์ทจ. ์ง๊ฐ ๋ณด์ ์๋ฃจ์
๊ฐ๋ฐ์ PC ํดํน์ ํตํ ์๋ฐ์คํฌ๋ฆฝํธ ์ฝ๋ ์ฝ์
(๊ณต๊ธ๋ง ๊ณต๊ฒฉ).
|
| 16 |
+
์๋ฏน์ค(WEMIX, 2์): NFT ์๋น์ค(๋์ผ) ์ธ์ฆํค ํดํน์ ํตํ ๊ฐ์์์ฐ ์ ์ถ.
|
| 17 |
+
๋ฒ์ธ๋ณดํ๋๋ฆฌ์ (GA, 4์): ์ ์ง๋ณด์ ์
์ฒด ๊ฐ๋ฐ์ PC ๊ฐ์ผ์ ํตํ ๊ณ ๊ฐ ์ ๋ณด ์ ์ถ.
|
| 18 |
+
๋์ฌ์จ์ด ์ฌ๋ก:
|
| 19 |
+
YES24(6์): ๋์/ํฐ์ผ ์๋น์ค 5์ผ๊ฐ ์ค๋จ. ์ฝ 100์ต ์ ์์ค ๋ฐ 2,000๋ง ๋ช
ํ์ ์ ๋ณด ์ ์ถ ์ ํฉ. ์คํ์ฌ์ดํธ ๋ฐฑ์
๋ถ์ฌ๊ฐ ํผํด ํค์.
|
| 20 |
+
3. ์ ๋ ๋ฐ ๋ฒ๊ท: AI ๊ธฐ๋ณธ๋ฒ (2024.12 ๊ตญํ ํต๊ณผ)
|
| 21 |
+
์ ์ ๋ช
์นญ: ์ธ๊ณต์ง๋ฅ์ ๋ฐ์ ๊ณผ ์ ๋ขฐ ๊ธฐ๋ฐ ์กฐ์ฑ ๋ฑ์ ๊ดํ ๊ธฐ๋ณธ๋ฒ
|
| 22 |
+
ํต์ฌ ๋ด์ฉ: AI ์ฐ์
์ก์ฑ๊ณผ ์ ๋ขฐ์ฑ ํ๋ณด์ ๊ท ํ ๊ฐ์กฐ.
|
| 23 |
+
๊ณ ์ํฅ AI: ์๋ช
, ์ ์ฒด ์์ , ๊ธฐ๋ณธ๊ถ์ ์ค๋ํ ์ํฅ์ ๋ฏธ์น๋ AI๋ก ์ ์ํ๊ณ ๊ท์ ์ ์ฉ.
|
| 24 |
+
์์ฑํ AI: ํฌ๋ช
์ฑ ํ๋ณด ์๋ฌด(AI ์์ฑ๋ฌผ ํ์ ๋ฑ) ๋ถ๊ณผ.
|
| 25 |
+
์ฐ์
๋ณ ์ํฅ: ๊ธ์ต(์ ์ฉํ๊ฐ), ์์จ์ฃผํ(์์ ์ธ์ฆ), ์๋ฃ(์์ ๊ฒ์ฆ), ๋ก๋ณดํฑ์ค(์ค๋ฆฌ ๋ชจ๋) ๋ถ์ผ์์ ๊ธฐ์ ํ์คํ ๋ฐ ์์ ์ฑ ๊ฒ์ฆ ์ง์ ์ฒด๊ณ ๋ง๋ จ.
|
| 26 |
+
EU AI Act ๋น๊ต: EU๋ ์ํ ๊ธฐ๋ฐ์ ๊ฐ๋ ฅํ ๊ท์ ์ค์ฌ์ธ ๋ฐ๋ฉด, ํ๊ตญ์ ์์จ์ฑ๊ณผ ์ํ๊ณ ์กฐ์ฑ์ ๊ฐ์กฐํ๋ ์งํฅ ์ค์ฌ์ ์ฑ๊ฒฉ์ด ๊ฐํจ.
|
| 27 |
+
4. ์ ๊ท ๊ธฐ์ ํธ๋ ๋: MCP ๋ฐ A2A ํ๋กํ ์ฝ
|
| 28 |
+
MCP (Model Context Protocol): Anthropic ๊ฐ๋ฐ. LLM์ด API๋ฅผ ํตํด ์ธ๋ถ ์๋น์ค(์ด๋ฉ์ผ, ์คํ ๋ฆฌ์ง ๋ฑ)์ ์ํธ์์ฉํ๋ ํ์ค ๋ฐฉ์. "AI๋ฅผ ์ํ USB-C"๋ก ๋ถ๋ฆผ.
|
| 29 |
+
A2A (Agent-to-Agent): Google ๊ฐ๋ฐ. ์๋ก ๋ค๋ฅธ AI ์์ด์ ํธ ๊ฐ์ ํ์
๋ฐ ์ํต์ ์ํ ํ๋กํ ์ฝ.
|
| 30 |
+
๋ณด์ ์ํ:
|
| 31 |
+
๋ฌธ๋งฅ ์กฐ์(Context Injection): ์์กฐ๋ ๋ฌธ๋งฅ์ ์ฃผ์
ํด LLM ์ค์๋ ์ ๋.
|
| 32 |
+
TPA (Tool Poisoning Attack): ๋๊ตฌ ์ค๋ช
์ ์
์ฑ ๋ช
๋ น์ ์ฝ์
ํ์ฌ ์ฌ์ฉ์ ๋ชจ๋ฅด๊ฒ ์ ๋ณด ํ์ทจ.
|
| 33 |
+
์ฌ๋ก: ์์ฌ๋(Asana) MCP ์๋ฒ ๋ฒ๊ทธ๋ก ์ธํ ์ฌ์ฉ์ ๋ฐ์ดํฐ ๋
ธ์ถ ์ฌ๊ณ ๋ฐ์.
|
| 34 |
+
5. ๋์ ์ ๋ต ๋ฐ ๊ถ๊ณ ์ฌํญ
|
| 35 |
+
๋ฐ์ดํฐ ๋ฐฑ์
8๋ ๋ณด์ ์์น: ์คํ์ฌ์ดํธ ์ด์, 3-2-1 ๋ณด๊ด ์ ๋ต, ์ ๊ทผ ํต์ , ๋ฌด๊ฒฐ์ฑ ๊ฒ์ฆ, ์ ๊ธฐ ๋ณต๊ตฌ ํ๋ จ ๋ฑ.
|
| 36 |
+
ํ๋์ ๋ฐฉ์ด ์ฒด๊ณ ์ ํ:
|
| 37 |
+
์ ๋กํธ๋ฌ์คํธ(Zero Trust): '์ ๋ ์ ๋ขฐํ์ง ๋ง๊ณ ํญ์ ๊ฒ์ฆ'ํ๋ ์ํคํ
์ฒ ๋์
. ๋ค์ค ์ธ์ฆ(MFA) ํ์ํ.
|
| 38 |
+
์ค์๊ฐ ํ์ง ๋ฐ ๋์: EDR/XDR ๋์
, AI ๊ธฐ๋ฐ ์ด์ํ์ ๋ถ์(UEBA) ํ์ฉ.
|
| 39 |
+
MDR (Managed Detection & Response): ๋ณด์ ์ธ๋ ฅ์ด ๋ถ์กฑํ ๊ธฐ์
์ ์ํ ๊ด๋ฆฌํ ํ์ง ๋ฐ ๋์ ์๋น์ค ํ์ฉ ๊ถ๊ณ .
|
| 40 |
+
๋จ๊ณ๋ณ ๋ก๋๋งต: 1๋จ๊ณ(MFA, ๋ฐฑ์
), 2๋จ๊ณ(EDR/MDR ๋์
), 3๋จ๊ณ(์ง๋ฅํ ํ์ง ์ฒด๊ณ), 4๋จ๊ณ(AI ๊ธฐ๋ฐ ์์จ ๋ณด์).
|
| 41 |
+
ํค์๋: #์ฌ์ด๋ฒ๋ณด์ #2025์ฌ์ด๋ฒ์ํ #KISA #AI๊ธฐ๋ณธ๋ฒ #๋์ฌ์จ์ด #DDoS #MCP #์ ๋กํธ๋ฌ์คํธ #๊ฐ์ธ์ ๋ณด์ ์ถ #๊ณต๊ธ๋ง๊ณต๊ฒฉ
|
data/Detailed Guide to Analyzing and Assessing Technical Vulnerabilities in Critical Information and Communication Infrastructure.txt
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
# 2026 ์ฃผ์์ ๋ณดํต์ ๊ธฐ๋ฐ์์ค ๊ธฐ์ ์ ์ทจ์ฝ์ ๋ถ์ยทํ๊ฐ ๊ฐ์ด๋ (RAG์ฉ ๋ฐ์ดํฐ๋ฒ ์ด์ค)
|
| 3 |
+
|
| 4 |
+
[ํญ๋ชฉ ์ฝ๋] U-01
|
| 5 |
+
[๋ถ๋ฅ] Unix ์๋ฒ > ๊ณ์ ๊ด๋ฆฌ
|
| 6 |
+
[์ ๋ชฉ] root ๊ณ์ ์๊ฒฉ ์ ์ ์ ํ
|
| 7 |
+
[์ค์๋] ์
|
| 8 |
+
[ํญ๋ชฉ ์ค๋ช
] ์์คํ
์ ์ฑ
์ root ๊ณ์ ์ ์๊ฒฉํฐ๋ฏธ๋ ์ ์ ์ฐจ๋จ ์ค์ ์ด ์ ์ฉ ์ฌ๋ถ ์ ๊ฒ
|
| 9 |
+
[์ ๊ฒ ๋ชฉ์ ] ๊ด๋ฆฌ์ ๊ณ์ ํ์ทจ๋ก ์ธํ ์์คํ
์ฅ์
์ ๋ฐฉ์งํ๊ธฐ ์ํด ์ธ๋ถ ๋น์ธ๊ฐ์์ root ๊ณ์ ์ ๊ทผ ์๋๋ฅผ ์์ฒ์ ์ผ๋ก ์ฐจ๋จํ๊ธฐ ์ํจ
|
| 10 |
+
[๋ณด์ ์ํ] root ๊ณ์ ์ ์ด์์ฒด์ ์ ๋ชจ๋ ๊ธฐ๋ฅ์ ์ค์ ๋ฐ ๋ณ๊ฒฝ์ด ๊ฐ๋ฅํ์ฌ root ๊ณ์ ์ ํ์ทจํ์ฌ ์ธ๋ถ์์ ์๊ฒฉ์ ์ด์ฉํ ์์คํ
์ฅ์
๋ฐ ๊ฐ์ข
๊ณต๊ฒฉ์ผ๋ก ์ธํ ์ํ์ด ์กด์ฌํจ
|
| 11 |
+
[ํ๋จ ๊ธฐ์ค]
|
| 12 |
+
- ์ํธ: ์๊ฒฉํฐ๋ฏธ๋ ์๋น์ค๋ฅผ ์ฌ์ฉํ์ง ์๊ฑฐ๋, ์ฌ์ฉ ์ root ์ง์ ์ ์์ ์ฐจ๋จํ ๊ฒฝ์ฐ
|
| 13 |
+
- ์ทจ์ฝ: ์๊ฒฉํฐ๋ฏธ๋ ์๋น์ค ์ฌ์ฉ ์ root ์ง์ ์ ์์ ํ์ฉํ ๊ฒฝ์ฐ
|
| 14 |
+
[์กฐ์น ๋ฐฉ๋ฒ] ์๊ฒฉ ์ ์ ์ root ๊ณ์ ์ผ๋ก ์ ์ํ ์ ์๋๋ก ํ์ผ ๋ด์ฉ ์ค์
|
| 15 |
+
- LINUX: /etc/ssh/sshd_config ํ์ผ ๋ด์ PermitRootLogin No ์ค์ , Telnet ์ฌ์ฉ ์ /etc/securetty ํ์ผ ๋ด pts/x ์ค์ ์ฃผ์ ์ฒ๋ฆฌ
|
| 16 |
+
- SOLARIS: /etc/default/login ํ์ผ ๋ด CONSOLE=/dev/console ์ค์
|
| 17 |
+
- AIX: /etc/security/user ํ์ผ์ rlogin = false ์ค์
|
| 18 |
+
|
| 19 |
+
--------------------------------------------------
|
| 20 |
+
|
| 21 |
+
[ํญ๋ชฉ ์ฝ๋] U-02
|
| 22 |
+
[๋ถ๋ฅ] Unix ์๋ฒ > ๊ณ์ ๊ด๋ฆฌ
|
| 23 |
+
[์ ๋ชฉ] ๋น๋ฐ๋ฒํธ ๊ด๋ฆฌ์ ์ฑ
์ค์
|
| 24 |
+
[์ค์๋] ์
|
| 25 |
+
[ํญ๋ชฉ ์ค๋ช
] ๋น๋ฐ๋ฒํธ ๊ด๋ฆฌ ์ ์ฑ
์ค์ ์ฌ๋ถ ์ ๊ฒ
|
| 26 |
+
[์ ๊ฒ ๋ชฉ์ ] ์ฌ์ฉ์์ ๋น๋ฐ๋ฒํธ ๋ณต์ก์ฑ๊ณผ ์ฃผ๊ธฐ์ ๋ณ๊ฒฝ์ ํตํด ์์คํ
๋ณด์์ ๊ฐํํ๊ธฐ ์ํจ
|
| 27 |
+
[๋ณด์ ์ํ] ๋น๋ฐ๋ฒํธ ๊ด๋ จ ์ ์ฑ
์ด ์ค์ ๋์ง ์์ ๊ฒฝ์ฐ, ๋น์ธ๊ฐ์์ ๊ฐ์ข
๊ณต๊ฒฉ์ ์ํด ๋น๋ฐ๋ฒํธ๊ฐ ๋
ธ์ถ๋ ์ํ์ด ์กด์ฌํจ
|
| 28 |
+
[ํ๋จ ๊ธฐ์ค]
|
| 29 |
+
- ์ํธ: ๋น๋ฐ๋ฒํธ ๊ด๋ฆฌ ์ ์ฑ
์ด ์ค์ ๋ ๊ฒฝ์ฐ
|
| 30 |
+
- ์ทจ์ฝ: ๋น๋ฐ๋ฒํธ ๊ด๋ฆฌ ์ ์ฑ
์ด ์ค์ ๋์ง ์์ ๊ฒฝ์ฐ
|
| 31 |
+
[์กฐ์น ๋ฐฉ๋ฒ] ์ฌ์ฉ์ ๊ณ์ ์ ๋น๋ฐ๋ฒํธ๋ฅผ ์๋ฌธ, ์ซ์, ํน์๋ฌธ์ ํฌํจ ์ต์ 8์๋ฆฌ ์ด์, ์ต์ ์ฌ์ฉ ๊ธฐ๊ฐ 1์ผ, ์ต๋ ์ฌ์ฉ ๊ธฐ๊ฐ 90์ผ, ์ต๊ทผ ๊ธฐ์ต 4ํ ์ด์์ผ๋ก ์ค์
|
| 32 |
+
- LINUX (Redhat): /etc/login.defs ํ์ผ์ PASS_MAX_DAYS 90, PASS_MIN_DAYS 1 ์ค์ , /etc/security/pwquality.conf ์ minlen = 8, dcredit = -1 ๋ฑ ์ค์
|
| 33 |
+
|
| 34 |
+
--------------------------------------------------
|
| 35 |
+
|
| 36 |
+
[ํญ๋ชฉ ์ฝ๋] W-01
|
| 37 |
+
[๋ถ๋ฅ] Windows ์๋ฒ > ๊ณ์ ๊ด๋ฆฌ
|
| 38 |
+
[์ ๋ชฉ] Administrator ๊ณ์ ์ด๋ฆ ๋ณ๊ฒฝ ๋ฑ ๋ณด์์ฑ ๊ฐํ
|
| 39 |
+
[์ค์๋] ์
|
| 40 |
+
[ํญ๋ชฉ ์ค๋ช
] Administrator์ ๊ณ์ ๋ช
๋ณ๊ฒฝ ๋๋ ๋ณด์์ ๊ณ ๋ คํ ๋น๋ฐ๋ฒํธ ์ค์ ์ฌ๋ถ ์ ๊ฒ
|
| 41 |
+
[์ ๊ฒ ๋ชฉ์ ] ์ ์๋ ค์ง ๊ด๋ฆฌ์ ๊ณ์ ์ ํตํ ์
์์ ์ธ ํจ์ค์๋ ์ถ์ธก ๊ณต๊ฒฉ์ ์ฐจ๋จํ๊ธฐ ์ํจ
|
| 42 |
+
[๋ณด์ ์ํ] Administrator ๊ณ์ ์ ๊ธฐ๋ณธ์ ์ผ๋ก ์ญ์ ํ๊ฑฐ๋ ์ ๊ธ ์ ์์ด ๊ณต๊ฒฉ์์ ์ฃผ์ ํ๊ฒ์ด ๋จ
|
| 43 |
+
[ํ๋จ ๊ธฐ์ค]
|
| 44 |
+
- ์ํธ: Administrator ๊ธฐ๋ณธ ๊ณ์ ์ด๋ฆ์ ๋ณ๊ฒฝํ๊ฑฐ๋ ๊ฐํ๋ ๋น๋ฐ๋ฒํธ๋ฅผ ์ ์ฉํ ๊ฒฝ์ฐ
|
| 45 |
+
- ์ทจ์ฝ: Administrator ๊ธฐ๋ณธ ๊ณ์ ์ด๋ฆ์ ๋ณ๊ฒฝํ์ง ์๊ฑฐ๋ ๋จ์ ๋น๋ฐ๋ฒํธ๋ฅผ ์ ์ฉํ ๊ฒฝ์ฐ
|
| 46 |
+
[์กฐ์น ๋ฐฉ๋ฒ] ๋ก์ปฌ ๋ณด์ ์ ์ฑ
์์ Administrator ๊ณ์ ์ด๋ฆ์ ์ ์ถํ๊ธฐ ์ด๋ ค์ด ์ด๋ฆ์ผ๋ก ๋ณ๊ฒฝ
|
| 47 |
+
- Windows 2012/2016/2019/2022: ์ ์ดํ > ๊ด๋ฆฌ ๋๊ตฌ > ๋ก์ปฌ ๋ณด์ ์ ์ฑ
> ๋ก์ปฌ ์ ์ฑ
> ๋ณด์ ์ต์
> โ๊ณ์ : Administrator ๊ณ์ ์ด๋ฆ ๋ฐ๊พธ๊ธฐโ ์ ํ ํ ์ด๋ฆ ๋ณ๊ฒฝ
|
| 48 |
+
|
| 49 |
+
--------------------------------------------------
|
| 50 |
+
|
| 51 |
+
[ํญ๋ชฉ ์ฝ๋] WEB-04
|
| 52 |
+
[๋ถ๋ฅ] ์น ์๋น์ค > ์๋น์ค ๊ด๋ฆฌ
|
| 53 |
+
[์ ๋ชฉ] ์น ์๋น์ค ๋๋ ํฐ๋ฆฌ ๋ฆฌ์คํ
๋ฐฉ์ง ์ค์
|
| 54 |
+
[์ค์๋] ์
|
| 55 |
+
[ํญ๋ชฉ ์ค๋ช
] ๋๋ ํฐ๋ฆฌ ๋ฆฌ์คํ
๊ธฐ๋ฅ ์ฐจ๋จ ์ฌ๋ถ ์ ๊ฒ
|
| 56 |
+
[์ ๊ฒ ๋ชฉ์ ] ๋๋ ํฐ๋ฆฌ ๋ด์ ๋ชจ๋ ํ์ผ์ ๋ํ ์ ๊ทผ ๋ฐ ์ ๋ณด ๋
ธ์ถ์ ์ฐจ๋จํ๊ธฐ ์ํจ
|
| 57 |
+
[๋ณด์ ์ํ] ์ฐจ๋จ๋์ง ์์ ๊ฒฝ์ฐ ๋ฐฑ์
ํ์ผ์ด๋ ์์ค ํ์ผ ๋ฑ ๊ณต๊ฐ๋๋ฉด ์ ๋๋ ์ค์ ํ์ผ๋ค์ด ๋
ธ์ถ๋จ
|
| 58 |
+
[ํ๋จ ๊ธฐ์ค]
|
| 59 |
+
- ์ํธ: ๋๋ ํฐ๋ฆฌ ๋ฆฌ์คํ
์ด ์ค์ ๋์ง ์์ ๊ฒฝ์ฐ
|
| 60 |
+
- ์ทจ์ฝ: ๋๋ ํฐ๋ฆฌ ๋ฆฌ์คํ
์ด ์ค์ ๋ ๊ฒฝ์ฐ
|
| 61 |
+
[์กฐ์น ๋ฐฉ๋ฒ] ์น ์๋ฒ ์ค์ ํ์ผ์์ ์ธ๋ฑ์ฑ ์ต์
์ ๊ฑฐ
|
| 62 |
+
- Apache: httpd.conf ํ์ผ ๋ด Options ์ง์์์์ Indexes ์ ๊ฑฐ (๋๋ -Indexes)
|
| 63 |
+
- Nginx: nginx.conf ํ์ผ ๋ด autoindex off ์ค์
|
| 64 |
+
- IIS: ์ธํฐ๋ท ์ ๋ณด ์๋น์ค(IIS) ๊ด๋ฆฌ์ > ๋๋ ํฐ๋ฆฌ ๊ฒ์ > ์ฌ์ฉ ์ ํจ
|
data/Guide to Key Information and Communication Infrastructure Management, Physical Vulnerability Analysis.txt
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
====================================================
|
| 2 |
+
DOCUMENT: 2026 ์ฃผ์์ ๋ณดํต์ ๊ธฐ๋ฐ์์ค ๊ด๋ฆฌยท๋ฌผ๋ฆฌ์ ์ทจ์ฝ์ ๋ถ์ยทํ๊ฐ๋ฐฉ๋ฒ ์๋ด์
|
| 3 |
+
YEAR: 2026 | AUTHOR: ๊ณผํ๊ธฐ์ ์ ๋ณดํต์ ๋ถ / ํ๊ตญ์ธํฐ๋ท์งํฅ์(KISA)
|
| 4 |
+
====================================================
|
| 5 |
+
|
| 6 |
+
[SECTION: ์๋ด์ ๊ฐ์ ๋ฐ ํ๊ฐ ์ฒด๊ณ]
|
| 7 |
+
Description: ์ ๋ณดํต์ ๊ธฐ๋ฐ ๋ณดํธ๋ฒ์ ๋ฐ๋ฅธ ์ ๊ธฐ์ ์ทจ์ฝ์ ๋ถ์ ๋ฐ ํ๊ฐ์ ๋ชฉ์ ๊ณผ ์ํ ์ ์ฐจ๋ฅผ ์ ์ํฉ๋๋ค.
|
| 8 |
+
----------------------------------------------------
|
| 9 |
+
โถ SUBSECTION: ๋ฒ์ ๊ทผ๊ฑฐ ๋ฐ ๋ชฉ์
|
| 10 |
+
- SUMMARY: ์ ๋ณดํต์ ๊ธฐ๋ฐ ๋ณดํธ๋ฒ ์ 9์กฐ์ ๊ทผ๊ฑฐํ์ฌ ๊ด๋ฆฌ๊ธฐ๊ด์ ์ฅ์ด ์ ๊ธฐ์ ์ผ๋ก ์๊ด ์์ค์ ์ทจ์ฝ์ ์ ๋ถ์ยทํ๊ฐํ๋๋ก ์๋ฌดํํ๊ณ ์์ต๋๋ค.
|
| 11 |
+
- KEY POINTS:
|
| 12 |
+
1. ์ฃผ์์ ๋ณดํต์ ๊ธฐ๋ฐ์์ค ๋ณดํธ ์ญ๋ ๊ฐํ
|
| 13 |
+
2. ๋งค๋
์ ๊ธฐ ํ๊ฐ ์ค์ ์๋ฌด
|
| 14 |
+
3. ์ค๋ํ ๋ณํ ๋ฐ์ ์ ์์ ํ๊ฐ ๋ช
๋ น ๊ฐ๋ฅ
|
| 15 |
+
- REFERENCES: ์ ๋ณดํต์ ๊ธฐ๋ฐ ๋ณดํธ๋ฒ ์ 9์กฐ
|
| 16 |
+
|
| 17 |
+
โถ SUBSECTION: ์ ๊ฒ ์๋ น ๋ฐ ๋ฑ๊ธ ์ฐ์
|
| 18 |
+
- SUMMARY: ํ๊ฐ ๊ฒฐ๊ณผ๋ '์ํธ', '๋ถ๋ถ ์ดํ', '์ทจ์ฝ'์ 3๋จ๊ณ๋ก ๊ตฌ๋ถํ์ฌ ๊ฐ๊ด์ฑ์ ํ๋ณดํฉ๋๋ค.
|
| 19 |
+
- KEY POINTS:
|
| 20 |
+
1. ์ํธ: ์ ๊ฒ ํญ๋ชฉ์ ๋ช
ํํ ๋ถํฉ
|
| 21 |
+
2. ๋ถ๋ถ ์ดํ: ์ผ๋ถ ๋ง์กฑํ๋ ๊ฐ์ ํ์
|
| 22 |
+
3. ์ทจ์ฝ: ํญ๋ชฉ ๋ถํฉ ์คํจ
|
| 23 |
+
- REFERENCES: ์ ๊ฒ ์๋ น ์ 3์
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
[SECTION: ์ ๋ณด๋ณดํธ ๊ฑฐ๋ฒ๋์ค ๋ฐ ์ ์ฑ
๊ด๋ฆฌ]
|
| 27 |
+
Description: ์กฐ์ง ๋ด ์ ๋ณด๋ณดํธ ์ ์ฑ
์๋ฆฝ, ์ ๋ด ์กฐ์ง ๊ตฌ์ฑ ๋ฐ ์์ํ ์ด์์ ๊ดํ ์ง์นจ์ ๋ค๋ฃน๋๋ค.
|
| 28 |
+
----------------------------------------------------
|
| 29 |
+
โถ SUBSECTION: ์ ๋ณด๋ณดํธ ์ ์ฑ
์๋ฆฝ ๋ฐ ์ํ
|
| 30 |
+
- SUMMARY: ์ต๊ณ ๊ฒฝ์์ ์น์ธ์ ํ๋ํ ์ต์์ ์ ์ฑ
์ ์๋ฆฝํ๊ณ , ์ด๋ฅผ ๊ตฌ์ฒดํํ ์ง์นจ, ์ ์ฐจ, ๋งค๋ด์ผ์ ๋ฌธ์ํํ์ฌ์ผ ํฉ๋๋ค.
|
| 31 |
+
- KEY POINTS:
|
| 32 |
+
1. ์ต๊ณ ๊ฒฝ์์(๊ธฐ๊ด์ฅ) ๊ณต์ ์น์ธ ํ์
|
| 33 |
+
2. ์์ง์ ๋ฐ ๊ด๋ จ์ ์ ๊ทผ ์ฉ์ด์ฑ ํ๋ณด
|
| 34 |
+
3. ์ฐ 1ํ ์ด์ ํ๋น์ฑ ๊ฒํ ๋ฐ ์ค๋ ๋ณํ ์ ๊ฐ์
|
| 35 |
+
- REFERENCES: A-1, A-2, A-3, A-4, A-5
|
| 36 |
+
|
| 37 |
+
โถ SUBSECTION: ์ ๋ณด๋ณดํธ ์กฐ์ง ๋ฐ ์ธ๋ ฅ ๋ณด์
|
| 38 |
+
- SUMMARY: ์ ๋ณด๋ณดํธ์ฑ
์์(CISO) ์ง์ ๋ฐ ์ ๋ด ์กฐ์ง์ ๊ตฌ์ฑํ๊ณ , ์ธ๋ ฅ ์ฑ์ฉ๋ถํฐ ํด์ง๊น์ง์ ๋ณด์ ์ ์ฐจ๋ฅผ ์๋ฆฝํฉ๋๋ค.
|
| 39 |
+
- KEY POINTS:
|
| 40 |
+
1. ์ ๋ณด๋ณดํธ์์ํ ๊ตฌ์ฑ ๋ฐ ์ญํ ๋ช
๋ฌธํ
|
| 41 |
+
2. ์ ์ ํ์ธ ๋ฐ ์ ๊ฒฉ์ฌ์ฌ ์ํ
|
| 42 |
+
3. ๋น๋ฐ์ ์ง ํ์ฝ์ ์ง๊ตฌ ๋ฐ ํด์ง ์ ๊ถํ ์ฆ์ ํ์
|
| 43 |
+
- REFERENCES: A-8, A-9, A-21, A-25
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
[SECTION: ์์ฐ ๊ด๋ฆฌ ๋ฐ ์ด์ ๋ณด์]
|
| 47 |
+
Description: ์ ๋ณด์์ฐ์ ์๋ณ, ๋ถ๋ฅ์ ๋๋ถ์ด ์์คํ
์ ๋์
, ๋ณ๊ฒฝ, ํ๊ธฐ ๋ฑ ์์ ์ฃผ๊ธฐ ๋ณด์์ ๊ด๋ฆฌํฉ๋๋ค.
|
| 48 |
+
----------------------------------------------------
|
| 49 |
+
โถ SUBSECTION: ์์ฐ ์๋ณ ๋ฐ ์ํ ๊ด๋ฆฌ
|
| 50 |
+
- SUMMARY: ์ธ๋ ฅ, ์์ค, ์ฅ๋น ๋ฑ ๋ชจ๋ ์์ฐ์ ์๋ณํ์ฌ ๋ชฉ๋กํํ๊ณ ์ฐ 1ํ ์ด์ ์ ๊ธฐ์ ์ํ ํ๊ฐ๋ฅผ ์ํํฉ๋๋ค.
|
| 51 |
+
- KEY POINTS:
|
| 52 |
+
1. ์์ฐ ์ค์๋(๊ธฐ๋ฐ์ฑ, ๋ฌด๊ฒฐ์ฑ, ๊ฐ์ฉ์ฑ) ํ๊ฐ ๋ฐ ๋ฑ๊ธ ๋ถ์ฌ
|
| 53 |
+
2. ์ํ ์ฒ๋ฆฌ ์ ๋ต(๊ฐ์, ํํผ, ์ ๊ฐ, ์์ฉ) ์๋ฆฝ
|
| 54 |
+
3. ๋ชฉํ ์ํ ์์ค(DoA) ์ค์
|
| 55 |
+
- REFERENCES: A-10, A-15, A-16, A-17
|
| 56 |
+
|
| 57 |
+
โถ SUBSECTION: ์ด์ ๋ฐ ๋ณ๊ฒฝ ๋ณด์
|
| 58 |
+
- SUMMARY: ์์คํ
๋์
์ ๋ณด์์ฑ ๊ฒํ ๋ฅผ ์ค์ํ๊ณ , ๋ณ๊ฒฝ ์ฌํญ์ ๋ํ ๊ณต์ ์น์ธ ์ ์ฐจ๋ฅผ ์ด์ํฉ๋๋ค.
|
| 59 |
+
- KEY POINTS:
|
| 60 |
+
1. ๊ตญ๋ด์ฉ CC์ธ์ฆ ์ ํ ๋์
๊ถ๊ณ
|
| 61 |
+
2. ๊ฐ๋ฐ-ํ
์คํธ-์ด์ ํ๊ฒฝ์ ๋ฌผ๋ฆฌ์ /๋
ผ๋ฆฌ์ ๋ถ๋ฆฌ
|
| 62 |
+
3. ์์ค์ฝ๋ ๋ณด์ ์ฝ์ ์ ๊ฒ ๋ฐ ํ์ ๊ด๋ฆฌ
|
| 63 |
+
- REFERENCES: A-58, A-61, A-63, A-66
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
[SECTION: ์ ๊ทผ ํต์ ๋ฐ ๋คํธ์ํฌ ๋ณด์]
|
| 67 |
+
Description: ๊ณ์ ๊ด๋ฆฌ, ์ธ์ฆ ๋ฐฉ์, ๋คํธ์ํฌ ๋ถ๋ฆฌ ๋ฐ ์ธ๋ถ ์ ์ ํต์ ์ ๋ต์ ์ ์ํฉ๋๋ค.
|
| 68 |
+
----------------------------------------------------
|
| 69 |
+
โถ SUBSECTION: ์ธ์ฆ ๋ฐ ๊ถํ ๊ด๋ฆฌ
|
| 70 |
+
- SUMMARY: ์ง๋ฌด๋ณ ์ ๊ทผ ๊ถํ ๋ถ์ฌ์ ๋ค์ค ์ธ์ฆ(MFA) ๋ฑ ์์ ํ ์ธ์ฆ ์ฒด๊ณ๋ฅผ ๊ตฌ์ถํฉ๋๋ค.
|
| 71 |
+
- KEY POINTS:
|
| 72 |
+
1. ์ ํ์(Need-to-Know) ๋ฐ ํ ํ์(Need-to-Do) ์์น
|
| 73 |
+
2. ๋น๋ฐ๋ฒํธ ๋ณต์ก๋ ์ค์ ๋ฐ ์ฃผ๊ธฐ์ ๋ณ๊ฒฝ
|
| 74 |
+
3. ์ฅ๊ธฐ ๋ฏธ์ฌ์ฉ ๊ณ์ (3๊ฐ์ ์ดํ) ํํฉ ์กฐ์ฌ ๋ฐ ์กฐ์น
|
| 75 |
+
- REFERENCES: A-39, A-41, A-42
|
| 76 |
+
|
| 77 |
+
โถ SUBSECTION: ๋คํธ์ํฌ ๋ฐ ๋ง๋ถ๋ฆฌ
|
| 78 |
+
- SUMMARY: ๋ด๋ถ๋ง๊ณผ ์ธ๋ถ๋ง์ ๋ถ๋ฆฌํ๊ณ ์ผ๋ฐฉํฅ ์ ์ก ์ฅ๋น ๋ฑ์ ํตํด ์์ ํ ๋ฐ์ดํฐ ์ ์ก์ ๋ณด์ฅํฉ๋๋ค.
|
| 79 |
+
- KEY POINTS:
|
| 80 |
+
1. ์
๋ฌด๋ง๊ณผ ์ธํฐ๋ท๋ง์ ๋ฌผ๋ฆฌ์ /๋
ผ๋ฆฌ์ ๋ถ๋ฆฌ
|
| 81 |
+
2. VPN ๋ฑ ์์ ํ ์๊ฒฉ ์ ์ ์๋จ ์ ์ฉ
|
| 82 |
+
3. ๋น์ธ๊ฐ ๋ฌด์ AP ํ์ง ๋ฐ ์ฐจ๋จ
|
| 83 |
+
- REFERENCES: A-51, A-52, A-53, A-57
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
[SECTION: ์นจํด์ฌ๊ณ ๋์ ๋ฐ ์
๋ฌด ์ฐ์์ฑ]
|
| 87 |
+
Description: ์ฌ์ด๋ฒ ๊ณต๊ฒฉ ๋ฐ์ ์ ๋์ ์ฒด๊ณ์ ์ฌ๋ ์ ์๋น์ค ๊ฐ์ฉ์ฑ ๋ณด์ฅ ๊ณํ์ ๋ค๋ฃน๋๋ค.
|
| 88 |
+
----------------------------------------------------
|
| 89 |
+
โถ SUBSECTION: ์นจํด์ฌ๊ณ ๋์ ์ฒด๊ณ(CERT)
|
| 90 |
+
- SUMMARY: ์ฌ๊ณ ์๋ฐฉ, ํ์ง, ๋์, ๋ณต๊ตฌ ๋ฐ ๋ณด๊ณ ์ ์ฐจ๋ฅผ ํฌํจํ CERT ์กฐ์ง์ ๊ตฌ์ฑํ๊ณ ํ๋ จํฉ๋๋ค.
|
| 91 |
+
- KEY POINTS:
|
| 92 |
+
1. ์ฌ์ด๋ฒ์๊ธฐ ๊ฒฝ๋ณด ๋จ๊ณ๋ณ ํ๋์๋ น ์๋ฆฝ
|
| 93 |
+
2. ์นจํด์ฌ๊ณ ๋ฐ์ ์ 24์๊ฐ ์ด๋ด ๊ด๊ณ๊ธฐ๊ด ํต์ง
|
| 94 |
+
3. ์ฌ๋ฐ ๋ฐฉ์ง ๋์ฑ
์๋ฆฝ ๋ฐ ์ฆ๊ฑฐ ์๋ฃ ํ๋ณด
|
| 95 |
+
- REFERENCES: A-104, A-107, A-110, A-113
|
| 96 |
+
|
| 97 |
+
โถ SUBSECTION: ๋น์ฆ๋์ค ์ฐ์์ฑ ๊ณํ(BCP)
|
| 98 |
+
- SUMMARY: ํต์ฌ ์๋น์ค์ ๋ณต๊ตฌ ๋ชฉํ ์๊ฐ(RTO)๊ณผ ์์ (RPO)์ ์ ์ํ๊ณ ์์คํ
์ด์คํ๋ฅผ ๊ด๋ฆฌํฉ๋๋ค.
|
| 99 |
+
- KEY POINTS:
|
| 100 |
+
1. ์
๋ฌด์ํฅ๋ถ์(BIA) ์ํ ๋ฐ ํต์ฌ ์์คํ
์๋ณ
|
| 101 |
+
2. ์ ๊ธฐ์ ๋ณต๊ตฌ ํ
์คํธ ๋ฐ ๋ชจ์ ํ๋ จ ์ค์
|
| 102 |
+
3. ์ค์ ๋ฐ์ดํฐ์ ์๊ฒฉ์ง ์์ฐ ๋ฐฑ์
|
| 103 |
+
- REFERENCES: A-114, A-115, A-116, A-118
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
[SECTION: ๋ฌผ๋ฆฌ์ ๋ณด์ ๋ฐ ํ๊ฒฝ ํต์ ]
|
| 107 |
+
Description: ๋ณดํธ๊ตฌ์ญ ์ง์ , ์ถ์
ํต์ ์ฅ์น ์ด์ ๋ฐ ํ๊ฒฝ ์ฌํด ๋๋น ์ค๋น๋ฅผ ๊ด๋ฆฌํฉ๋๋ค.
|
| 108 |
+
----------------------------------------------------
|
| 109 |
+
โถ SUBSECTION: ๋ณดํธ๊ตฌ์ญ ๊ด๋ฆฌ
|
| 110 |
+
- SUMMARY: ์ ํ๊ตฌ์ญ๊ณผ ํต์ ๊ตฌ์ญ์ ๊ตฌ๋ถํ์ฌ ์ถ์
์๊ฒฉ์ ์๊ฒฉํ ์ ํํ๊ณ ๊ธฐ๋ก์ ๋ณด๊ดํฉ๋๋ค.
|
| 111 |
+
- KEY POINTS:
|
| 112 |
+
1. ๋ค๋จ๊ณ ์ถ์
ํต์ (ID์นด๋, ์์ฒด ์ธ์ ๋ฑ)
|
| 113 |
+
2. ์ถ์
๊ธฐ๋ก ์ต์ 2๊ฐ์ ์ด์ ๋ณด๊ด
|
| 114 |
+
3. ๋ณดํธ๊ตฌ์ญ ๋ด CCTV ์ค์น ๋ฐ ๋ชจ๋ํฐ๋ง
|
| 115 |
+
- REFERENCES: P-1, P-2, P-6, P-17
|
| 116 |
+
|
| 117 |
+
โถ SUBSECTION: ์์ค ๋ณดํธ ๋ฐ ์ฌํด ๋๋น
|
| 118 |
+
- SUMMARY: ํ์ฌ, ์นจ์, ์ ๋ ฅ ์ค๋จ ๋ฑ ํ๊ฒฝ์ ์ํ์ผ๋ก๋ถํฐ ์ฅ๋น๋ฅผ ๋ณดํธํ๊ธฐ ์ํ ์ค๋น๋ฅผ ๊ตฌ์ถํฉ๋๋ค.
|
| 119 |
+
- KEY POINTS:
|
| 120 |
+
1. UPS ๋ฐ ๋น์ ๋ฐ์ ๊ธฐ ๋ฑ ๋น์ ์ ์ ์ค๋น
|
| 121 |
+
2. 24์๊ฐ ํญ์จํญ์ต ๋ฐ ๋์ ๊ฐ์ง ์ผ์
|
| 122 |
+
3. ๋ดํ ๊ตฌ์กฐ ๊ฑด์ถ ์์ฌ ์ฌ์ฉ ๋ฐ ์ํด ๋ฐฉ์ง ์์ค
|
| 123 |
+
- REFERENCES: P-10, P-11, P-15, P-14
|
| 124 |
+
|
| 125 |
+
|
data/Hacking Diagnostic Tool Utilization Plan #4 Taking Control of the AD Environment Through Exposed SMB File Servers.txt
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[์ ์ฒด ๋ฌธ์ ๊ฐ์]
|
| 2 |
+
์ ๋ชฉ: ํดํน์ง๋จ๋๊ตฌ ํ์ฉ ๋ฐฉ์ #4: ๋
ธ์ถ๋ SMB ํ์ผ ์๋ฒ๋ฅผ ํตํ AD ํ๊ฒฝ ์ฅ์
|
| 3 |
+
์์ฑ: ํ๊ตญ์ธํฐ๋ท์งํฅ์(KISA) ์ํ๋ถ์๋จ ํฌ๋ ์๋ถ์ํ
|
| 4 |
+
์ค๋ช
: ๋ณด์ ์ธ๋ ฅ์ด ๋ถ์กฑํ ์ค์๊ธฐ์
์ ๋์์ผ๋ก, SMB ์ทจ์ฝ์ ์ ํตํ ๋ด๋ถ ์นจํฌ ๋ฐ Active Directory(AD) ๊ถํ ์ฅ์
์๋๋ฆฌ์ค๋ฅผ ๋ถ์ํ๊ณ ํดํน์ง๋จ๋๊ตฌ๋ฅผ ํ์ฉํ ํ์ง ๋ฐ ๋์ ๋ฐฉ์์ ์ ์ํจ.
|
| 5 |
+
๋ถ์ ๋์: ํ์ผ ์๋ฒ(Ubuntu 20.04 LTS), ๋๋ฉ์ธ ์ปจํธ๋กค๋ฌ(Windows Server 2019)
|
| 6 |
+
|
| 7 |
+
==================================================
|
| 8 |
+
|
| 9 |
+
[1. ํดํน์ง๋จ๋๊ตฌ ๊ฐ์ ๋ฐ ๋ชฉ์ ]
|
| 10 |
+
ํต์ฌ ์์ฝ: ์ค์๊ธฐ์
์ด ์นจํด์ฌ๊ณ ์ ์ต์ข
ํผํด ๋ฐ์ ์ ํดํน ์๋๋ฅผ ์ค์ค๋ก ์๋ณํ๊ณ ๋์ํ ์ ์๋๋ก ์ง์ํ๋ ์๊ฐ์ง๋จ ๋๊ตฌ ํ์ฉ ๊ฐ์ด๋.
|
| 11 |
+
์์ธ ๋ด์ฉ: ํดํน์ง๋จ๋๊ตฌ๋ ๋ณด์ ์ ๋ด ์ธ๋ ฅ๊ณผ ์์ฐ์ด ๋ถ์กฑํ ๊ธฐ์
๋ค์ด ์นจํด์ฌ๊ณ ๋ฅผ ์กฐ๊ธฐ์ ์ธ์งํ ์ ์๋๋ก ๊ฐ๋ฐ๋์์ต๋๋ค. ์๊ฐ์ง๋จ -> ์กฐ๊ธฐ ์๋ณ -> ํผํด ์ต์ํ์ ์ ์ํ ๊ตฌ์กฐ๋ฅผ ๋ง๋๋ ๊ฒ์ด ์ฃผ ๋ชฉ์ ์
๋๋ค.
|
| 12 |
+
|
| 13 |
+
--------------------------------------------------
|
| 14 |
+
|
| 15 |
+
[2. ์นจํด์ฌ๊ณ ๊ณต๊ฒฉ ์๋๋ฆฌ์ค ์์ธ]
|
| 16 |
+
ํต์ฌ ์์ฝ: SMB ๊ณต์ ํด๋์ ์ ๊ทผ ์ ์ด ๋ฏธํก์ ์ด์ฉํ ์ด๊ธฐ ์นจํฌ๋ถํฐ AD ์๋ฒ ์ฅ์
๊น์ง์ ๋จ๊ณ๋ณ ๊ณต๊ฒฉ ํ๋ฆ.
|
| 17 |
+
์์ธ ๋ด์ฉ: ๊ณต๊ฒฉ ํ๋ฆ: ํฌํธ ์ค์บ๋(์ทจ์ฝ ํ์ผ ์๋ฒ ์๋ณ) -> SMB ๊ณต์ ์ ๊ทผ(๊ณ์ ์ ๋ณด ํ๋) -> rlogin ์๊ฒฉ ์ ์ -> ์
์ฑ ์ฝ๋(Plague) ๋ฐฐํฌ ๋ฐ ๋ฐฑ๋์ด ์ค์น -> ๋ด๋ถ๋ง ์ด๋(Lateral Movement) -> DC ์๋ฒ Brute Force ๊ณต๊ฒฉ -> RDP ์ ์ ๋ฐ ๊ด๋ฆฌ์ ๊ณ์ ์์ฑ -> PowerShell์ ์ด์ฉํ ๋ณด์ ์ค์ (UAC ๋ฑ) ํด์ -> ํ์ ์ ๊ฑฐ.
|
| 18 |
+
|
| 19 |
+
--------------------------------------------------
|
| 20 |
+
|
| 21 |
+
[3. ํ์ผ ์๋ฒ(Linux) ํ์ง ๋ฐ ๋์]
|
| 22 |
+
ํต์ฌ ์์ฝ: ๋ฆฌ๋
์ค ๊ธฐ๋ฐ ํ์ผ ์๋ฒ์์ ๋ฐ๊ฒฌ๋ ์
์ฑ์ฝ๋ ๋์, ์๊ฒฉ ๋ช
๋ น์ด ์ฌ์ฉ, ๊ณ์ ์์ฑ ๋ฐ ๋ฐ์ดํฐ ์ ์ถ ํ์ ํ์ง.
|
| 23 |
+
์์ธ ๋ด์ฉ: ์ฃผ์ ํ์ง ํญ๋ชฉ:
|
| 24 |
+
- [PLAGUE]_01: Plague ์
์ฑ์ฝ๋ ๋ฐ์ด๋๋ฆฌ ๊ฐ ํ์ธ ๋ฐ ld.so.preload ์ฝ์
ํ์ง.
|
| 25 |
+
- [PS]_03: rlogin, rexec ๋ฑ ์๊ฒฉ ๋ช
๋ น ์๋น์ค ๋์ ํ์ธ.
|
| 26 |
+
- [EVT]_01: ๋ฆฌ๋
์ค ๋ก๊ทธ ๋ด ๋น์ ์ ๊ณ์ ์์ฑ ํ์ธ.
|
| 27 |
+
- [EVT]_02: ๊ณต๊ฒฉ ์๋์ ์ํ ํ์ผ ์ญ์ ํ์ ํ์ง.
|
| 28 |
+
- [EVT]_05: scp, wget ๋ฑ์ ์ด์ฉํ ๋ฐ์ดํฐ ์ ์ถ ๋๊ตฌ ์คํ ํ์ธ.
|
| 29 |
+
|
| 30 |
+
--------------------------------------------------
|
| 31 |
+
|
| 32 |
+
[4. ๋๋ฉ์ธ ์ปจํธ๋กค๋ฌ(Windows) ํ์ง ๋ฐ ๋์]
|
| 33 |
+
ํต์ฌ ์์ฝ: ์๋์ฐ ์๋ฒ ํ๊ฒฝ์์์ ๋น์ ์ ๊ด๋ฆฌ์ ๊ณ์ ์์ฑ, ๋ฐฑ์ ๊ธฐ๋ฅ ๋ฌด๋ ฅํ ๋ฐ ๊ถํ ์์น ํ์ง.
|
| 34 |
+
์์ธ ๋ด์ฉ: ์ฃผ์ ํ์ง ํญ๋ชฉ:
|
| 35 |
+
- [EVT]_09: ๋น์ ์์ ์ผ๋ก ์์ฑ๋ ์ฌ์ฉ์(๊ด๋ฆฌ์) ๊ณ์ ํ์ง.
|
| 36 |
+
- [EVT]_14: Windows Defender ์ค์๊ฐ ๊ฐ์ ๊ธฐ๋ฅ ๋นํ์ฑํ ํ์ง.
|
| 37 |
+
- [EVT]_07: ๊ณต๊ฒฉ ์๋์ ์ํ ๊ณ์ ์ญ์ ํ์ ํ์ง.
|
| 38 |
+
- [REG]_04: UAC(์ฌ์ฉ์ ๊ณ์ ์ปจํธ๋กค) ๊ฒฝ๊ณ ๊ธฐ๋ฅ ๊บผ์ง(EnableLUA=0) ํ์ง.
|
| 39 |
+
|
| 40 |
+
--------------------------------------------------
|
| 41 |
+
|
| 42 |
+
[5. ๋๊ตฌ ์ง์ ๋ฒ์ ๋ฐ ์คํ ๋ฐฉ๋ฒ]
|
| 43 |
+
ํต์ฌ ์์ฝ: ์ง์ํ๋ ์ด์์ฒด์ (Windows Server 2008~2025, ์ฃผ์ ๋ฆฌ๋
์ค ๋ฐฐํฌํ) ๋ชฉ๋ก ๋ฐ ์ํคํ
์ฒ๋ณ ์ค์น ๊ฐ์ด๋.
|
| 44 |
+
์์ธ ๋ด์ฉ: ์ง์ ํ๊ฒฝ: Windows Server ์ ๋ฒ์ , Ubuntu, CentOS, RedHat, Rocky, Oracle, Amazon Linux ๋ฑ ์ฃผ์ ๋ฆฌ๋
์ค. ์คํ ๋ฐฉ์: ํฌํฐ๋ธ ์คํํ์ผ ํํ๋ก ์ ๊ณต๋๋ฉฐ 32/64๋นํธ ์ํคํ
์ฒ์ ๋ง๋ ํ์ผ์ ์ ํํ์ฌ ์คํ.
|
| 45 |
+
|
| 46 |
+
--------------------------------------------------
|
| 47 |
+
|
data/Information and Communications Field_Breach_Incident_Response_Guide_Revised_Version.txt
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
==================================================
|
| 3 |
+
DOCUMENT OVERVIEW
|
| 4 |
+
==================================================
|
| 5 |
+
๋ฌธ์ ์ ๋ชฉ: ์ ๋ณดํต์ ๋ถ์ผ ์นจํด์ฌ๊ณ ๋์ ์๋ด์
|
| 6 |
+
๋ฐํ ๊ธฐ๊ด: ๊ณผํ๊ธฐ์ ์ ๋ณดํต์ ๋ถ, KISA(ํ๊ตญ์ธํฐ๋ท์งํฅ์)
|
| 7 |
+
๋ฐํ ์ผ์: 2025๋
08์
|
| 8 |
+
๋ฌธ์ ์์ฝ:
|
| 9 |
+
์ด ๋ฌธ์๋ ์ ๋ณดํต์ ๋ถ์ผ์์ ๋ฐ์ํ๋ ์นจํด์ฌ๊ณ ์ ๋ํ ์๋ฐฉ ๋ฐ ๋์ ์๋ น์ ๋ค๋ฃน๋๋ค. ICT ๊ฐ๊ตญ์ธ ํ๊ตญ์ ๋์งํธ ํ๊ฒฝ ๋ฐฐ๊ฒฝ๊ณผ ํจ๊ป, ์ ๋ณดํต์ ๋ง๋ฒ ๋ฐ ๊ฐ์ธ์ ๋ณด๋ณดํธ๋ฒ์ ๋ฐ๋ฅธ ๋ฒ์ ์ ๊ณ ์๋ฌด, ์ฌ๊ณ ๋ฐ์ ์ 7๋จ๊ณ ๋์ ์ ์ฐจ, ๊ทธ๋ฆฌ๊ณ ๊ฐ์ธ ๋ฐ ๊ธฐ์
(์๋ฒ, ๋คํธ์ํฌ, DB ๋ฑ)์ด ์ํํด์ผ ํ ๊ตฌ์ฒด์ ์ธ ๋ณด์ ์ ๊ฒ ํญ๋ชฉ์ ํฌํจํ๊ณ ์์ต๋๋ค.
|
| 10 |
+
|
| 11 |
+
==================================================
|
| 12 |
+
[Section 1]
|
| 13 |
+
[Section Title]
|
| 14 |
+
๊ฐ์ ๋ฐ ๋ฐฐ๊ฒฝ: ์ ๋ณดํต์ ๋ถ์ผ ์นจํด์ฌ๊ณ ๋์์ ํ์์ฑ
|
| 15 |
+
|
| 16 |
+
[Summary]
|
| 17 |
+
์๋ด์์ ๋ฐํ ๋ฐฐ๊ฒฝ๊ณผ ๋ชฉ์ ์ ์ค๋ช
ํฉ๋๋ค. OECD ๋ณด๊ณ ์์ ๋ฐ๋ฅธ ํ๊ตญ์ ICT ์์, ๋์งํธ ์ ํ ๊ฐ์ํ์ ๋ฐ๋ฅธ ์ฌ์ด๋ฒ ์ํ(ํดํน, ์
์ฑ์ฝ๋, DDoS)์ ์ฆ๊ฐ ์ถ์ธ, ๊ทธ๋ฆฌ๊ณ ๊ตญ๊ฐ ์์ ๋ณด์ฅ์ ์ํ ๋๋น ํ์์ฑ์ ๋ค๋ฃน๋๋ค.
|
| 18 |
+
|
| 19 |
+
[Keywords]
|
| 20 |
+
๊ฐ์, ๋ฐฐ๊ฒฝ, ๋์งํธ ์ ํ, ์ฌ์ด๋ฒ ์ํ, ํดํน, ์
์ฑ์ฝ๋, DDoS, ๊ตญ๊ฐ ์์ , ์๋ด์ ๋ชฉ์ , ๊ณผํ๊ธฐ์ ์ ๋ณดํต์ ๋ถ, KISA
|
| 21 |
+
|
| 22 |
+
[Body]
|
| 23 |
+
1. ๋ฐฐ๊ฒฝ
|
| 24 |
+
OECD ๋์งํธ๊ฒฝ์ ์ ๋ง๋ณด๊ณ ์2024์ ๋ฐ๋ฅด๋ฉด ์ฐ๋ฆฌ๋๋ผ๋ IoT, ๋น
๋ฐ์ดํฐ, AI ๋ฑ ํ์ ๊ธฐ์ ๋ถ์ผ์์ ๊ฐ์ฅ ๋น ๋ฅธ ๋์
๋ฅ ์ ๋ํ๋ด๋ ICT ๊ฐ๊ตญ์
๋๋ค. ์ฝ๋ก๋19 ์ดํ ๋์งํธ ์ ํ ๊ฐ์ํ๋ก 2023๋
๊ฐ๊ตฌ ์ธํฐ๋ท ์ ์๋ฅ ์ 99.9%์ ๋๋ฌํ๋ฉฐ ์ด์ฐ๊ฒฐ ์๋์ ์ง์
ํ์ต๋๋ค. ์ด์ ๋ฐ๋ผ ํดํน, ์
์ฑ์ฝ๋, DDoS ๋ฑ ์นจํด์ฌ๊ณ ๊ณต๊ฒฉ ์ ํ์ด ๊ฐ์๋ก ์ง๋ฅํ, ๊ณ ๋ํ๋๊ณ ์์ผ๋ฉฐ ๋งค๋
์ฆ๊ฐ ์ถ์ธ์ ์์ต๋๋ค. ๊ฐ์ธ์ ๋ณด ์นจํด, ์ฐ์
๊ธฐ๋ฐ ์ ์ถ ๋ฑ ์ฌ์ด๋ฒ ์ํ์ ์ง์์ ์ผ๋ก ๋๋นํ์ฌ ๊ตญ๊ฐ ์์ ์ ๋ณด์ฅํด์ผ ํฉ๋๋ค.
|
| 25 |
+
|
| 26 |
+
2. ๋ชฉ์ ๋ฐ ๊ตฌ์ฑ
|
| 27 |
+
๋ณธ ์๋ด์๋ ๊ธฐ์
๋ฐ ๊ฐ์ธ ์ด์ฉ์๊ฐ ์์์ผ ํ ์นจํด์ฌ๊ณ ์๋ฐฉ ๋ฐ ๋์์๋ น์ ์ ๊ณตํฉ๋๋ค.
|
| 28 |
+
- ์ 1์ฅ ๊ฐ์: ๋ฐฐ๊ฒฝ, ๋ชฉ์ ๋ฐ ๊ตฌ์ฑ ์ค๋ช
|
| 29 |
+
- ์ 2์ฅ ์นจํด์ฌ๊ณ ์ ๊ณ : ์ ๋ณดํต์ ๋ง๋ฒ ๋ฐ ๊ฐ์ธ์ ๋ณด๋ณดํธ๋ฒ์ ๊ทผ๊ฑฐํ ์ ๊ณ ์๋ฌด์ ๋ฐฉ๋ฒ
|
| 30 |
+
- ์ 3์ฅ ์นจํด์ฌ๊ณ ์กฐ์น ๊ฐ์ด๋: ์ฌ๊ณ ์ ํ๋ณ ๋์์ ์ฐจ, ์์คํ
์ ๊ฒํญ๋ชฉ ๋ฐ ์ทจ์ฝ์ ์กฐ์น ๋ฐฉ์
|
| 31 |
+
- ๋ถ๋ก: ์ฃผ์ ์ฉ์ด ์ ๋ฆฌ
|
| 32 |
+
|
| 33 |
+
==================================================
|
| 34 |
+
[Section 2]
|
| 35 |
+
[Section Title]
|
| 36 |
+
์นจํด์ฌ๊ณ ์ ๊ณ ์๋ฌด ๋ฐ ์ ์ฐจ (์ ๋ณดํต์ ๋ง๋ฒ)
|
| 37 |
+
|
| 38 |
+
[Summary]
|
| 39 |
+
์ ๋ณดํต์ ๋ง๋ฒ ์ 48์กฐ์3์ ๊ทผ๊ฑฐํ ์นจํด์ฌ๊ณ ์ ์ ์์ ์ ๊ณ ์๋ฌด๋ฅผ ์ค๋ช
ํฉ๋๋ค. ์ฌ๊ณ ๋ฐ์ ์ธ์ง ํ 24์๊ฐ ์ด๋ด ์ ๊ณ ๊ท์ , ์๋ฐ ์ ๊ณผํ๋ฃ, ๊ทธ๋ฆฌ๊ณ ๊ตฌ์ฒด์ ์ธ ์ ๊ณ ๋ฐฉ๋ฒ(๋ณดํธ๋๋ผ, 118 ๋ฑ)์ ํฌํจํฉ๋๋ค.
|
| 40 |
+
|
| 41 |
+
[Keywords]
|
| 42 |
+
์นจํด์ฌ๊ณ ์ ๊ณ , ์ ๋ณดํต์ ๋ง๋ฒ, ์ ๊ณ ์๋ฌด, 24์๊ฐ ์ด๋ด, ๊ณผํ๋ฃ, KISA ์ ๊ณ , ๋ณดํธ๋๋ผ, 118, ํดํน ์ ๊ณ
|
| 43 |
+
|
| 44 |
+
[Body]
|
| 45 |
+
1. ์นจํด์ฌ๊ณ ๋ฐ์ ์ ์ ๊ณ
|
| 46 |
+
|
| 47 |
+
๊ฐ. ์ ์
|
| 48 |
+
ํดํน, ๋ฐ์ด๋ฌ์ค, ์๋น์ค ๊ฑฐ๋ถ(DDoS) ๋ฑ์ ๋ฐฉ๋ฒ์ผ๋ก ์ ๋ณดํต์ ๋ง์ ๊ณต๊ฒฉํ๋ ํ์๋ก ์ธํด ๋ฐ์ํ ์ฌํ๋ฅผ ๋งํฉ๋๋ค.
|
| 49 |
+
|
| 50 |
+
๋. ๊ด๋ จ ๋ฒ๋ฅ ๋ฐ ์๋ฌด
|
| 51 |
+
์ ๋ณดํต์ ๋ง ์ด์ฉ์ด์ง ๋ฐ ์ ๋ณด๋ณดํธ ๋ฑ์ ๊ดํ ๋ฒ๋ฅ (์ ๋ณดํต์ ๋ง๋ฒ) ์ 48์กฐ์3์ ๋ฐ๋ผ ์ฌ๊ณ ๋ฐ์ ์ฆ์(์นจํด์ฌ๊ณ ๋ฅผ ์๊ฒ ๋ ๋๋ก๋ถํฐ 24์๊ฐ ์ด๋ด) ๊ณผํ๊ธฐ์ ์ ๋ณดํต์ ๋ถ ๋๋ KISA(ํ๊ตญ์ธํฐ๋ท์งํฅ์)์ ์ ๊ณ ํด์ผ ํฉ๋๋ค.
|
| 52 |
+
- ๋ฒ์น: ์ ๊ณ ์๋ฌด ์๋ฐ ์ 3์ฒ๋ง์ ์ดํ์ ๊ณผํ๋ฃ๊ฐ ๋ถ๊ณผ๋ฉ๋๋ค.
|
| 53 |
+
|
| 54 |
+
๋ค. ์ ๊ณ ๋ฐฉ๋ฒ
|
| 55 |
+
- ํํ์ด์ง: ๋ณดํธ๋๋ผ&KrCERT (http://www.boho.or.kr)
|
| 56 |
+
- ์ ์์ฐํธ: certgen@krcert.or.kr
|
| 57 |
+
- ์ ํ: 118 (๊ตญ๋ฒ ์์ด)
|
| 58 |
+
- ๊ธฐํ: ์๋ฉด ์ ๊ณ ๋ฑ
|
| 59 |
+
|
| 60 |
+
==================================================
|
| 61 |
+
[Section 3]
|
| 62 |
+
[Section Title]
|
| 63 |
+
๊ฐ์ธ์ ๋ณด ์ ์ถ์ฌ๊ณ ์ ๊ณ ๋ฐ ํต์ง (๊ฐ์ธ์ ๋ณด ๋ณดํธ๋ฒ)
|
| 64 |
+
|
| 65 |
+
[Summary]
|
| 66 |
+
๊ฐ์ธ์ ๋ณด ๋ณดํธ๋ฒ ์ 34์กฐ์ ๋ฐ๋ฅธ ๊ฐ์ธ์ ๋ณด ์ ์ถ์ ์ ์์ ๋์ ์๋ฌด๋ฅผ ์ค๋ช
ํฉ๋๋ค. ์ ๋ณด์ฃผ์ฒด์ ๋ํ ์ง์ฒด ์๋ ํต์ง ์๋ฌด์ 72์๊ฐ ์ด๋ด ๊ด๊ณ ๊ธฐ๊ด(๊ฐ์ธ์ ๋ณด๋ณดํธ์์ํ, KISA) ์ ๊ณ ์ ์ฐจ๋ฅผ ๋ค๋ฃน๋๋ค.
|
| 67 |
+
|
| 68 |
+
[Keywords]
|
| 69 |
+
๊ฐ์ธ์ ๋ณด ์ ์ถ, ๊ฐ์ธ์ ๋ณด ๋ณดํธ๋ฒ, ์ ์ถ ์ ๊ณ , 72์๊ฐ ์ด๋ด, ์ ๋ณด์ฃผ์ฒด ํต์ง, ๊ฐ์ธ์ ๋ณด๋ณดํธ์์ํ, ํผํด ๊ตฌ์
|
| 70 |
+
|
| 71 |
+
[Body]
|
| 72 |
+
2. ๊ฐ์ธ์ ๋ณด ์ ์ถ์ฌ๊ณ ๋ฐ์ ์ ์ ๊ณ
|
| 73 |
+
|
| 74 |
+
๊ฐ. ์ ์
|
| 75 |
+
๊ฐ์ธ์ ๋ณด์ฒ๋ฆฌ์์ ๊ด๋ฆฌ, ํต์ ๊ถ์ ๋ฒ์ด๋ ์ 3์๊ฐ ๋ด์ฉ์ ์ ์ ์๋ ์ํ์ ์ด๋ฅธ ๊ฒ์ ์๋ฏธํฉ๋๋ค.
|
| 76 |
+
|
| 77 |
+
๋. ๋ฒ์ ์๋ฌด
|
| 78 |
+
๊ฐ์ธ์ ๋ณด ๋ณดํธ๋ฒ ์ 34์กฐ์ ๋ฐ๋ผ ๋ค์์ ์กฐ์น๋ฅผ ์ทจํด์ผ ํฉ๋๋ค.
|
| 79 |
+
1) ์ ๋ณด์ฃผ์ฒด ํต์ง: ์ ์ถ ์ฌ์ค์ ์๊ฒ ๋ ํ ์ง์ฒด ์์ด ์ ๋ณด์ฃผ์ฒด์๊ฒ ํต์งํด์ผ ํฉ๋๋ค.
|
| 80 |
+
2) ๊ธฐ๊ด ์ ๊ณ : ์ ์ถ ์ฌ์ค์ ์๊ฒ ๋ ํ 72์๊ฐ ์ด๋ด์ ๊ฐ์ธ์ ๋ณด๋ณดํธ์์ํ ๋๋ KISA(ํ๊ตญ์ธํฐ๋ท์งํฅ์)์ ์ ๊ณ ํด์ผ ํฉ๋๋ค.
|
| 81 |
+
|
| 82 |
+
๋ค. ํต์ง ๋ด์ฉ
|
| 83 |
+
์ ๋ณด์ฃผ์ฒด์๊ฒ ํต์งํ ๋๋ ๋ค์์ ๋ด์ฉ์ ํฌํจํด์ผ ํฉ๏ฟฝ๏ฟฝ๏ฟฝ๋ค.
|
| 84 |
+
- ์ ์ถ๋ ๊ฐ์ธ์ ๋ณด์ ํญ๋ชฉ
|
| 85 |
+
- ์ ์ถ๋ ์์ ๊ณผ ๊ทธ ๊ฒฝ์
|
| 86 |
+
- ์ ์ถ๋ก ์ธํ์ฌ ๋ฐ์ํ ์ ์๋ ํผํด๋ฅผ ์ต์ํํ๊ธฐ ์ํ์ฌ ์ ๋ณด์ฃผ์ฒด๊ฐ ํ ์ ์๋ ๋ฐฉ๋ฒ ๋ฑ์ ๊ดํ ์ ๋ณด
|
| 87 |
+
- ๊ฐ์ธ์ ๋ณด์ฒ๋ฆฌ์์ ๋์์กฐ์น ๋ฐ ํผํด ๊ตฌ์ ์ ์ฐจ
|
| 88 |
+
- ์ ๋ณด์ฃผ์ฒด์๊ฒ ํผํด๊ฐ ๋ฐ์ํ ๊ฒฝ์ฐ ์ ๊ณ ๋ฑ์ ์ ์ํ ์ ์๋ ๋ด๋น๋ถ์ ๋ฐ ์ฐ๋ฝ์ฒ
|
| 89 |
+
|
| 90 |
+
==================================================
|
| 91 |
+
[Section 4]
|
| 92 |
+
[Section Title]
|
| 93 |
+
์นจํด์ฌ๊ณ ๋์ 7๋จ๊ณ ํ๋ก์ธ์ค
|
| 94 |
+
|
| 95 |
+
[Summary]
|
| 96 |
+
์นจํด์ฌ๊ณ ๋ฐ์ ์ ์ฒด๊ณ์ ์ธ ๋์์ ์ํ 7๋จ๊ณ ์ ์ฐจ(์ค๋น, ํ์ง, ์ด๊ธฐ ๋์, ์ ๋ต ์ฒด๊ณํ, ์กฐ์ฌ, ๋ณด๊ณ ์, ํด๊ฒฐ)๋ฅผ ์์ธํ ์ค๋ช
ํฉ๋๋ค.
|
| 97 |
+
|
| 98 |
+
[Keywords]
|
| 99 |
+
์ฌ๊ณ ๋์ 7๋จ๊ณ, ์ฌ๊ณ ํ์ง, ์ด๊ธฐ ๋์, ์ฌ๊ณ ์กฐ์ฌ, ํฌ๋ ์, ๋์ ์ ๋ต, ์ฌ๋ฐ ๋ฐฉ์ง, ๋ณด์ ์ ์ฑ
|
| 100 |
+
|
| 101 |
+
[Body]
|
| 102 |
+
1. ์ฌ๊ณ ๋์ 7๋จ๊ณ
|
| 103 |
+
|
| 104 |
+
โ 1๋จ๊ณ: ์ฌ๊ณ ์ ์ค๋น
|
| 105 |
+
- ์ฌ๊ณ ๋์ํ ๊ตฌ์ฑ ๋ฐ ์กฐ์ง์ ๋์ ์ฒด๊ณ ์ค๋น
|
| 106 |
+
|
| 107 |
+
โก 2๋จ๊ณ: ์ฌ๊ณ ํ์ง
|
| 108 |
+
- ๋ณด์ ์ฅ๋น ๋ชจ๋ํฐ๋ง ๋ฑ์ ํตํ ์ด์ ์งํ ํ์ง ๋ฐ ์ค์ ์ฌ๊ณ ์ฌ๋ถ ์๋ณ
|
| 109 |
+
|
| 110 |
+
โข 3๋จ๊ณ: ์ด๊ธฐ ๋์
|
| 111 |
+
- ์ฌ๊ณ ์ ๊ธฐ๋ณธ ์ธ๋ถ์ฌํญ ๊ธฐ๋ก
|
| 112 |
+
- ๊ด๋ จ ๋ถ์ ๋ฐ ๋ด๋น์์๊ฒ ์ ์ํ ํต์ง
|
| 113 |
+
|
| 114 |
+
โฃ 4๋จ๊ณ: ๋์ ์ ๋ต ์ฒด๊ณํ
|
| 115 |
+
- ๋ฒ์ ๋์(์์ก) ์ฌ๋ถ ํ๋จ
|
| 116 |
+
- ์์ฌ๊ธฐ๊ด ๊ณต์กฐ ํ์์ฑ ๋ฐ ๊ฒฐ์
|
| 117 |
+
|
| 118 |
+
โค 5๋จ๊ณ: ์ฌ๊ณ ์กฐ์ฌ
|
| 119 |
+
- ๋ก๊ทธ, ์์คํ
์ด๋ฏธ์ง ๋ฑ ๋ฐ์ดํฐ ์์ง ๋ฐ ๋ถ์
|
| 120 |
+
- ์กํ์์น(์ธ์ , ๋๊ฐ, ์ด๋ป๊ฒ ๋ฑ)์ ๋ฐ๋ฅธ ์์ธ ๋ถ์ ์ํ
|
| 121 |
+
|
| 122 |
+
โฅ 6๋จ๊ณ: ๋ณด๊ณ ์ ์์ฑ
|
| 123 |
+
- ๊ฒฝ์์ง ๋ฑ ์์ฌ๊ฒฐ์ ๊ถ์๊ฐ ์ดํดํ ์ ์๋ ํํ์ ๊ฒฐ๊ณผ ๋ณด๊ณ ์ ์์ฑ
|
| 124 |
+
|
| 125 |
+
โฆ 7๋จ๊ณ: ํด๊ฒฐ
|
| 126 |
+
- ๋ณด์ ์ ์ฑ
์๋ฆฝ ๋ฐ ์ ์ฐจ ๋ณ๊ฒฝ
|
| 127 |
+
- ์ทจ์ฝ์ ํจ์น ๋ฑ ์ฐจ๊ธฐ ๊ณต๊ฒฉ ์๋ฐฉ ์กฐ์น ์ํ
|
| 128 |
+
|
| 129 |
+
==================================================
|
| 130 |
+
[Section 5]
|
| 131 |
+
[Section Title]
|
| 132 |
+
๋์๋ณ ๋ณด์ ์ ๊ฒํญ๋ชฉ ๋ฐ ์กฐ์น๋ฐฉ์ (๊ฐ์ธ/๊ธฐ์
)
|
| 133 |
+
|
| 134 |
+
[Summary]
|
| 135 |
+
๊ฐ์ธ ์ด์ฉ์์ ๊ธฐ์
(์น ์๋ฒ, ๋คํธ์ํฌ, DB, ์ ํ๋ฆฌ์ผ์ด์
)์ด ์ํํด์ผ ํ ๊ตฌ์ฒด์ ์ธ ๋ณด์ ์ ๊ฒ ๋ฆฌ์คํธ์ ๊ธฐ์ ์ ์กฐ์น ๋ฐฉ์์ ์ ๊ณตํฉ๋๋ค.
|
| 136 |
+
|
| 137 |
+
[Keywords]
|
| 138 |
+
๋ณด์ ์ ๊ฒ, ์กฐ์น ๋ฐฉ์, ์น ์๋ฒ ๋ณด์, ๋คํธ์ํฌ ๋ณด์, DB ๋ณด์, SQL Injection, XSS, ํจ์ค์๋ ๊ด๋ฆฌ, ๋ฐฑ์
, ์ทจ์ฝ์ ์กฐ์น
|
| 139 |
+
|
| 140 |
+
[Body]
|
| 141 |
+
2. ์ ๊ฒํญ๋ชฉ ๋ฐ ์กฐ์น๋ฐฉ์
|
| 142 |
+
|
| 143 |
+
[๊ฐ์ธ ์ด์ฉ์]
|
| 144 |
+
- ์ ํ ์ํํธ์จ์ด(SW) ์ฌ์ฉ
|
| 145 |
+
- ์ด์์ฒด์ (OS) ๋ฐ SW ์ต์ ๋ณด์ ํจ์น ์ ์ฉ
|
| 146 |
+
- ๋ฐฑ์ ํ๋ก๊ทธ๋จ ์ค์น ๋ฐ ์ค์๊ฐ ๊ฐ์
|
| 147 |
+
- ๋น๋ฐ๋ฒํธ ๊ด๋ฆฌ: ๊ณต์ ๊ธฐ ๋ฐ WIFI ๋น๋ฐ๋ฒํธ ์ค์ , ์ฃผ๊ธฐ์ ๋ณ๊ฒฝ
|
| 148 |
+
- ๋์ฌ์จ์ด ๋๋น ์ค์ ๋ฐ์ดํฐ ์ ๊ธฐ ๋ฐฑ์
|
| 149 |
+
|
| 150 |
+
[๊ธฐ์
- ์์คํ
๋ณ ์กฐ์น]
|
| 151 |
+
๊ฐ. ์น ์๋ฒ
|
| 152 |
+
- ํดํน์ง๋จ๋๊ตฌ(ํ์ฌ, ์บ์ฌ ๋ฑ) ํ์ฉํ์ฌ ์น์ ๋ฑ ์ ๊ฒ
|
| 153 |
+
- OS ๋ฐ ์๋ฒ ์ํํธ์จ์ด ์ต์ ๋ณด์ ํจ์น ์ ์ฉ
|
| 154 |
+
- ๊ด๋ฆฌ์ ํ์ด์ง ์ ๊ทผ ์ ์ด ์ค์
|
| 155 |
+
- ์ถ์ธกํ๊ธฐ ์ด๋ ค์ด ๊ฐ๋ ฅํ ํจ์ค์๋ ์ฌ์ฉ
|
| 156 |
+
- ๋ก๊ทธ ํ์ผ ๋ณดํธ ์ค์ ๋ฐ 6๊ฐ์ ์ด์ ๋ณด๊ด
|
| 157 |
+
- ๋ถํ์ํ ํธ๋ํฝ ์ ํ ์ค์
|
| 158 |
+
|
| 159 |
+
๋. ๋คํธ์ํฌ
|
| 160 |
+
- ์๊ฒฉ ์ ๊ทผ(Telnet, FTP ๋ฑ) ์ ํ ๋ฐ ๋ณด์ ํ๋กํ ์ฝ(SSH ๋ฑ) ์ฌ์ฉ
|
| 161 |
+
- SNMP ์ค์ ๋ณ๊ฒฝ (Community String ๋ณต์กํ๊ฒ ์ค์ )
|
| 162 |
+
- ๋ถํ์ํ ์๋น์ค ๋ฐ ํฌํธ ์ค๋จ
|
| 163 |
+
- ๋ก๊ทธ์ธ ์๋ ํ์ ๋ฐ ์๊ฐ ์ ํ ์ค์
|
| 164 |
+
|
| 165 |
+
๋ค. ๋ฐ์ดํฐ๋ฒ ์ด์ค(DB)
|
| 166 |
+
- My-SQL ๋ฑ DBMS ์ค์น ์ ๊ธฐ๋ณธ(Default) ํจ์ค์๋ ๋ณ๊ฒฝ
|
| 167 |
+
- ์ธ๋ถ๋ก๋ถํฐ์ ์๊ฒฉ ์ ์ ์ฐจ๋จ (๋ก์ปฌ์์๋ง ์ ์ ํ์ฉ ๋ฑ)
|
| 168 |
+
- ์ฌ์ฉ์๋ณ ๊ถํ ์ต์ํ ๋ถ์ฌ
|
| 169 |
+
- ์ต์ ๋ณด์ ํจ์น ์ ์ฉ
|
| 170 |
+
|
| 171 |
+
๋ผ. ์ดํ๋ฆฌ์ผ์ด์
์ทจ์ฝ์
|
| 172 |
+
- ์ฃผ์ ์ทจ์ฝ์ : SQL Injection, XSS, CSRF, ๋ฒํผ ์ค๋ฒํ๋ก์ฐ ๋ฑ
|
| 173 |
+
- ์กฐ์น ๋ฐฉ์: ์ฌ์ฉ์ ์
๋ ฅ๊ฐ์ ๋ํ ์ฒ ์ ํ ๊ฒ์ฆ ๋ก์ง ๊ตฌํ ๋ฐ ์ํ์ด ์ฝ๋ฉ ์ ์ฉ
|
| 174 |
+
|
| 175 |
+
==================================================
|
| 176 |
+
[Section 6]
|
| 177 |
+
[Section Title]
|
| 178 |
+
๋ถ๋ก: ์ ๋ณด๋ณดํธ ์ฃผ์ ์ฉ์ด ์ ์
|
| 179 |
+
|
| 180 |
+
[Summary]
|
| 181 |
+
์นจํด์ฌ๊ณ ๋์๊ณผ ๊ด๋ จ๋ ์ฃผ์ ๊ธฐ์ ์ฉ์ด(DDoS, ๋์ฌ์จ์ด, ๋ฃจํธํท, APT, ์น์
ธ, ์ด๋ฏธ์ง)์ ์ ์๋ฅผ ์ค๋ช
ํฉ๋๋ค.
|
| 182 |
+
|
| 183 |
+
[Keywords]
|
| 184 |
+
์ฉ์ด ์ฌ์ , DDoS, ๋์ฌ์จ์ด, ๋ฃจํธํท, APT, ์ง๋ฅํ ์ง์ ์ํ, ์น์
ธ, ์ด๋ฏธ์ง, ํฌ๋ ์ ์ฉ์ด
|
| 185 |
+
|
| 186 |
+
[Body]
|
| 187 |
+
๋ถ๋ก: ์ฃผ์ ์ฉ์ด
|
| 188 |
+
|
| 189 |
+
- DDoS (Distributed Denial of Service): ๋ถ์ฐ ์๋น์ค ๊ฑฐ๋ถ ๊ณต๊ฒฉ. ๋๋์ ํธ๋ํฝ์ ์ ์กํ์ฌ ์์คํ
์ ๊ฐ์ฉ์ฑ์ ๋ง๋น์ํค๊ณ ์๋น์ค๋ฅผ ๋ฐฉํดํ๋ ๊ณต๊ฒฉ.
|
| 190 |
+
- ๋์ฌ์จ์ด (Ransomware): ์ปดํจํฐ ์์คํ
์ ๊ฐ์ผ์์ผ ๋ฐ์ดํฐ๋ ํ์ผ์ ์ํธํํ ํ, ์ด๋ฅผ ๋ณต๊ตฌํด ์ฃผ๋ ๋๊ฐ๋ก ๊ธ์ ์ ์๊ตฌํ๋ ์
์ฑ ํ๋ก๊ทธ๋จ.
|
| 191 |
+
- ๋ฃจํธํท (Rootkit): ์์คํ
์ ์นจ์
ํ ์ฌ์ค์ ์จ๊ธฐ๊ณ , ๋์ค์ ๋ค์ ์นจ์
ํ๊ธฐ ์ํด ๋ฐฑ๋์ด ๋ฑ์ ์ค์นํ๋ฉฐ ์
์ฑ์ฝ๋๋ฅผ ํ์ง๋์ง ์๊ฒ ์จ๊ฒจ์ฃผ๋ ๋๊ตฌ ๋ชจ์.
|
| 192 |
+
- APT (Advanced Persistent Threat): ์ง๋ฅํ ์ง์ ์ํ. ํน์ ๋์์ ๊ฒจ๋ฅํ์ฌ ๋ค์ํ ๊ณต๊ฒฉ ๊ธฐ๋ฒ์ ์ด์ฉํด ์ง์์ ์ด๊ณ ์๋ฐํ๊ฒ ๊ณต๊ฒฉํ๋ ๊ธฐ๋ฒ.
|
| 193 |
+
- ์น์
ธ (Webshell): ๊ณต๊ฒฉ์๊ฐ ์๊ฒฉ์ผ๋ก ์น ์๋ฒ์ ๋ช
๋ น์ ์คํํ ์ ์๋๋ก ์
๋ก๋ํ๋ ์
์ฑ ์คํฌ๋ฆฝํธ ํ์ผ.
|
| 194 |
+
- ์ด๋ฏธ์ง ๋ถ์ (์ด๋ฏธ์ง): ๋์งํธ ํฌ๋ ์ ๊ณผ์ ์์ ์๋ณธ ์ฆ๊ฑฐ๊ฐ ํผ์๋์ง ์๋๋ก ๋์คํฌ์ ๋ณต์ ๋ณธ(์ฌ๋ณธ)์ ๋ง๋๋ ๊ณผ์ .
|
data/Report on Trends in Small and Medium-Sized Enterprise Intrusion Damage Support Services (First Half of 2025) - Cases of Spear Phishing Targeting Businesses and Response Strategies.txt
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[RAG ๋ฌธ์ ๋ฐ์ดํฐ๋ฒ ์ด์ค]
|
| 2 |
+
์ ๋ชฉ: ์ค์๊ธฐ์
์นจํด์ฌ๊ณ ํผํด์ง์ ์๋น์ค ๋ํฅ ๋ณด๊ณ ์ (2025๋
์๋ฐ๊ธฐ) - ๊ธฐ์
ํ๊นํ ์คํผ์ด ํผ์ฑ ์ฌ๋ก ๋ฐ ๋์๋ฐฉ์
|
| 3 |
+
================================================
|
| 4 |
+
[์ ์ฒด ๊ฐ์]
|
| 5 |
+
์ด ๋ฌธ์๋ 2025๋
์๋ฐ๊ธฐ ์ค์๊ธฐ์
์ ๋์์ผ๋ก ๋ฐ์ํ ์ง๋ฅํ ์ฌ์ด๋ฒ ๊ณต๊ฒฉ์ธ '์คํผ์ด ํผ์ฑ(Spear Phishing)'์ ๋ํฅ์ ๋ถ์ํ๊ณ , ์ค์ ์ฌ๋ก์ ์์ธ์ ํ์
ํ์ฌ ๊ธฐ์
๋ฐ ์ฌ์ฉ์๊ฐ ์ทจํด์ผ ํ ์ค์ง์ ์ธ ๋์ ๋ฐฉ์์ ์ ์ํ๋ RAG ์์คํ
์ฉ ๋ฌธ์์
๋๋ค.
|
| 6 |
+
|
| 7 |
+
[์น์
1: 1. ์๋ก ๋ฐ 2025๋
์๋ฐ๊ธฐ ์นจํด์ฌ๊ณ ํต๊ณ]
|
| 8 |
+
- ํต์ฌ ์์ฝ: ๋์งํธ ์ ํ ๊ฐ์ํ์ ๋ฐ๋ผ ์ค์๊ธฐ์
๋์ ์ฌ์ด๋ฒ ๊ณต๊ฒฉ์ด ์ง๋ฅํ๋๊ณ ์์ผ๋ฉฐ, ํนํ 2025๋
2๋ถ๊ธฐ์ ํผ์ฑ ์ฌ๊ณ ๊ฐ 1๋ถ๊ธฐ ๋๋น 24๊ฑด ์ฆ๊ฐํ๋ฉฐ ๊ฐ์ฅ ๋์ ์ฆ๊ฐ์ธ๋ฅผ ๋ณด์์ต๋๋ค. ์ฃผ์ ํผํด๋ ์
์ฑ ๋ฉ์ผ์ ํตํ ๊ณ์ ํ์ทจ ๋ฐ ์
์ฑ์ฝ๋ ๊ฐ์ผ์
๋๋ค.
|
| 9 |
+
- ์ฃผ์ ํค์๋: ๋์งํธ ์ ํ, ํผ์ฑ ํต๊ณ, 2025 ์๋ฐ๊ธฐ, ์ค์๊ธฐ์
์ํ
|
| 10 |
+
|
| 11 |
+
[์น์
2: 2. ํผ์ฑ ์นจํด์ฌ๊ณ ์ ์งํ ๋ฐ ์ฃผ์ ์ ํ]
|
| 12 |
+
- ํต์ฌ ์์ฝ: ๊ณต๊ฒฉ ๋ฐฉ์์ด ๋จ์ ์คํธ์์ ๊ธฐ์
๋ฉ์ผ ์๋ฒ ์นจํฌ ํ ์ค์ ๋ฉ์ผ ์ฃผ์๋ฅผ ๋์ฉํ๊ฑฐ๋, ๊ธฐ์กด ๋ํ์ '๋ต์ฅ'ํ๋ ์ ๊ตํ ํํ๋ก ์งํํ์ต๋๋ค. ์ฃผ์ ์ ํ์ผ๋ก e-์ปค๋จธ์ค ๊ณ์ ํ์ทจ๋ฅผ ํตํ ํ๋งค ์ํ ์ ๋ณด ์์กฐ์ ๊ฑฐ๋์ฒ ์ฌ์นญ ๋น์ฆ๋์ค ๋ฉ์ผ(BEC)์ ํตํ ๊ฑฐ๋ ๋๊ธ ํ์ทจ๊ฐ ๋ณด๊ณ ๋์์ต๋๋ค.
|
| 13 |
+
- ์ฃผ์ ํค์๋: ์คํผ์ด ํผ์ฑ, ๋ฉ์ผ ์๋ฒ ์นจํฌ, e-์ปค๋จธ์ค ํ์ทจ, ๊ฑฐ๋๋๊ธ ํธ์ทจ
|
| 14 |
+
|
| 15 |
+
[์น์
3: 3. ์นจํด์ฌ๊ณ ์ ์ฃผ์ ๊ธฐ์ ์ ์์ธ]
|
| 16 |
+
- ํต์ฌ ์์ฝ: ๊ณต๊ฒฉ์๋ค์ ๋ฌด์ฐจ๋ณ ๋์
๊ณต๊ฒฉ(Brute Force), ์ ์ถ๋ ๊ณ์ ์ ๋ณด๋ฅผ ์ฌ์ฌ์ฉํ๋ ํฌ๋ฆฌ๋ด์
์คํฐํ(Credential Stuffing), ๊ทธ๋ฆฌ๊ณ ์
๋ฌด ๋ฉ์ผ๋ก ์์ฅํ ์
์ฑ์ฝ๋๋ฅผ ํตํ ์ ๋ณด ํ์ทจ๋ฅผ ํตํด ๊ธฐ์
์์คํ
์ ์นจํฌํฉ๋๋ค.
|
| 17 |
+
- ์ฃผ์ ํค์๋: ๋ฌด์ฐจ๋ณ ๋์
, ํฌ๋ฆฌ๋ด์
์คํฐํ, ์
์ฑ์ฝ๋, C2 ์๋ฒ
|
| 18 |
+
|
| 19 |
+
[์น์
4: 4. ์ฌ์ฉ์ ๋ฐ ๊ธฐ์
๋์ ๋ฐฉ์]
|
| 20 |
+
- ํต์ฌ ์์ฝ: ์ผ๋ฐ ์ฌ์ฉ์๋ ์์ฌ์ค๋ฌ์ด ๋ฉ์ผ ์ฃผ์ ํ์ธ ๋ฐ ์ฒจ๋ถํ์ผ ์คํ ์ฃผ์๊ฐ ํ์ํ๋ฉฐ, ๊ธฐ์
์ 2๋จ๊ณ ์ธ์ฆ(MFA) ๋์
, ๋ฉ์ผ ๋ณด์ ์๋ฃจ์
(DMARC, SPF, DKIM) ๊ตฌ์ถ, ์กฐ์ง ๋ด ๋ณด์ ๋ฌธํ ์ ์ฐฉ ๋ฐ ๋ชจ์ ํ๋ จ ์ค์๊ฐ ํ์์ ์
๋๋ค.
|
| 21 |
+
- ์ฃผ์ ํค์๋: 2๋จ๊ณ ์ธ์ฆ(MFA), DMARC, ๋ณด์ ๋ชจ์ํ๋ จ, ๋ณด์ ํจ์น
|
| 22 |
+
|
| 23 |
+
[์น์
5: 5. ์ฌ๊ณ ๋ฐ์ ์ ์กฐ์น ์๋ น ๋ฐ ๊ฒฐ๋ก ]
|
| 24 |
+
- ํต์ฌ ์์ฝ: ์ฌ๊ณ ๋ฐ์ ์ฆ์ ์ธํฐ๋ท์ ์ฐจ๋จํ๊ณ ์ฆ๊ฑฐ๋ฅผ ๋ณด์ ํ๋ฉฐ KISA(118)์ ์ ๊ณ ํด์ผ ํฉ๋๋ค. ๊ธฐ์ ์ ์กฐ์น๋งํผ์ด๋ ์ฌ์ฉ์์ ๋ณด์ ์ธ์ ๊ฐํ๊ฐ ์นจํด์ฌ๊ณ ์๋ฐฉ์ ํต์ฌ์ด๋ฉฐ, ๋ณด์ ์์น์ ์ฒ ์ ํ ์ค์๊ฐ ํ์ํฉ๋๋ค.
|
| 25 |
+
- ์ฃผ์ ํค์๋: ์ด๊ธฐ ๋์, KISA ์ ๊ณ , ๋ณด์ ์ธ์, ์ธํฐ๋ท ์ฐจ๋จ
|
| 26 |
+
|
| 27 |
+
================================================
|
| 28 |
+
๋ฌธ์ ์์ฑ์ผ: 2025-08-20
|
| 29 |
+
์ถ์ฒ: KISA ์ค์๊ธฐ์
์นจํด์ฌ๊ณ ํผํด์ง์ ์๋น์ค ๋ณด๊ณ ์
|
data/myragdata1.txt
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[์ค์ ๊ธฐ๋ฐ ์ ๋ณด]
|
| 2 |
+
ํ๋ก์ ํธ ์ฝ๋๋ช
: 'Blue Horizon'
|
| 3 |
+
์ถ์ ์์ ์ผ: 2025๋
12์ 25์ผ
|
| 4 |
+
์ฃผ์ ๊ธฐ๋ฅ: ์์ ์ํธํ ํต์ , 6G ๋คํธ์ํฌ ์ง์.
|
| 5 |
+
๋ด๋น์: ๊น์ฒ ์ ์์ ์ฐ๊ตฌ์.
|
| 6 |
+
|
| 7 |
+
[ํ์ฌ์์ ํค์ฐ๋ ๋๋ฌผ๋ค]
|
| 8 |
+
1. ๊ฐ. ์ด๋ฆ: ๊ฐ๋ผ์ง, ํ์ข
: ์๊ณจ์ก์ข
, ๋์ด: 1๋
.
|
| 9 |
+
2. ์ด๋ชจ์ฌ. ์ด๋ฆ: ๋ฐ์ดํผ, ํ์ข
: ๋ฑ, ๋์ด: 1๋
.
|
| 10 |
+
|
| 11 |
+
[ํ์ฌ์์ ํ๋ ํ๋ชฉ]
|
| 12 |
+
- ๋
ธํธ๋ถ: 150๋ง์
|
| 13 |
+
- ์ค๋งํธํฐ: 100๋ง์
|
| 14 |
+
- ํ๋ธ๋ฆฟ: 80๋ง์
|
| 15 |
+
- ํน์ฝ๋ธ๋ผ : 500๋ง์
|
| 16 |
+
- ์๋ฐํธ๋ก์ค: 1000๋ง์
|
| 17 |
+
- ํด๋ฌ: 300๋ง์
|
| 18 |
+
- ์๋ฌ: 400๋ง์
|
| 19 |
+
|
| 20 |
+
- ์ฃฝ์์ฅ:1๋ง์
|
| 21 |
+
- ์ด์์๋ ์ฅ:2๋ง์
|
| 22 |
+
- ํ ๋ผ: 2๋ง์
|
| 23 |
+
- ์ด์์๋ ํ ๋ผ: 4๋ง์
|
| 24 |
+
- ์ฃฝ์ ๊ณ ์์ด: 1๋ง์
|
| 25 |
+
- ์ด์์๋ ๊ณ ์์ด: 2๋ง์
|
efficientnet_b0_chihuahua_muffin.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
["chihuahua", "muffin"]
|
efficientnet_b0_chihuahua_muffin.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d93f94d7cf902593bf19f9bd9315235dee476509029711880333b0cbc8bfbbe8
|
| 3 |
+
size 16348525
|
efficientnet_b0_chihuahua_muffin_fsgmdef.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
["chihuahua", "muffin"]
|
efficientnet_b0_chihuahua_muffin_fsgmdef.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:82280e17f0ce15bf897ccd2d92314de3e1e749f7b6b59fe67cfe5ee8f884999f
|
| 3 |
+
size 16358049
|
efficientnet_v2_s_plantforestdisease.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
["Aphid", "Black Rust", "Blast", "Brown Rust", "Fusarium Head Blight", "Healthy Wheat", "Leaf Blight", "Mildew", "Mite", "Septoria", "Smut", "Stem fly", "Tan spot", "Yellow Rust"]
|
efficientnet_v2_s_plantforestdisease.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a3ff9aea0459fe346d687d108873057bb77741fb7e47dad47b2ba08281a37031
|
| 3 |
+
size 81704423
|
effinet_basic.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torchvision.models import efficientnet_b0, EfficientNet_B0_Weights
|
| 3 |
+
import torchvision.transforms as transforms
|
| 4 |
+
from PIL import Image
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import sys # ํ์ผ ๋๋ฝ ์ ์ข
๋ฃ๋ฅผ ์ํด ์ถ๊ฐ
|
| 9 |
+
|
| 10 |
+
# ==============================================================================
|
| 11 |
+
# 0. ImageNet ํด๋์ค ์ด๋ฆ ๋ก๋
|
| 12 |
+
# ==============================================================================
|
| 13 |
+
CLASS_MAP_FILENAME = 'labels_map.txt'
|
| 14 |
+
class_name_map = None # ์ ์ญ ๋ณ์๋ก ์ด๊ธฐํ
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
if not os.path.exists(CLASS_MAP_FILENAME):
|
| 18 |
+
print(f"[์ค๋ฅ] ํด๋์ค ์ด๋ฆ ํ์ผ('{CLASS_MAP_FILENAME}')์ ์ฐพ์ ์ ์์ต๋๋ค.")
|
| 19 |
+
print("ํ์ผ์ ํ์ฌ ๋๋ ํ ๋ฆฌ์ ์ ์ฅํ๋์ง ํ์ธํด ์ฃผ์ธ์.")
|
| 20 |
+
sys.exit(1) # ํ์ผ์ด ์์ผ๋ฉด ํ๋ก๊ทธ๋จ ์ข
๋ฃ
|
| 21 |
+
|
| 22 |
+
# 1. ํ์ผ ๋ก๋ (JSON ํ์)
|
| 23 |
+
with open(CLASS_MAP_FILENAME, 'r') as f:
|
| 24 |
+
class_map_json = json.load(f)
|
| 25 |
+
|
| 26 |
+
# 2. ์ ๊ณตํด์ฃผ์ ๋ก์ง ์ ์ฉ: ์ธ๋ฑ์ค 0๋ถํฐ 999๊น์ง ์ด๋ฆ๋ง ์ถ์ถํ์ฌ ๋ฆฌ์คํธ ์์ฑ
|
| 27 |
+
# JSON ํ์ผ์ ํค๊ฐ ๋ฌธ์์ด์ด๋ฏ๋ก str(i)๋ก ์ ๊ทผํ๊ณ , ๊ฐ ๋ฆฌ์คํธ์ ๋ ๋ฒ์งธ ์์(์ด๋ฆ)๋ฅผ ๊ฐ์ ธ์ต๋๋ค.
|
| 28 |
+
labels_list = [class_map_json[str(i)] for i in range(1000)]
|
| 29 |
+
|
| 30 |
+
# 3. ์ธ๋ฑ์ค์ ์ด๋ฆ์ ๋งคํํ๋ ๋์
๋๋ฆฌ๋ก ๋ณํ (๋์ค์ ํด๋์ค ID๋ก ์ด๋ฆ ์กฐํ ์ฉ์ด)
|
| 31 |
+
class_name_map = {i: name for i, name in enumerate(labels_list)}
|
| 32 |
+
|
| 33 |
+
print(f"ImageNet ํด๋์ค ์ด๋ฆ ({len(class_name_map)}๊ฐ) ๋ก๋ ์๋ฃ.")
|
| 34 |
+
|
| 35 |
+
except Exception as e:
|
| 36 |
+
print(f"[์ค๋ฅ] ํด๋์ค ํ์ผ ๋ก๋ ๋๋ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {e}")
|
| 37 |
+
sys.exit(1) # ์ค๋ฅ ๋ฐ์ ์ ํ๋ก๊ทธ๋จ ์ข
๋ฃ
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# ==============================================================================
|
| 41 |
+
# 1. ์ค์ ๋ฐ ๋ชจ๋ธ ๋ก๋
|
| 42 |
+
# ==============================================================================
|
| 43 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 44 |
+
print(f"์ฌ์ฉ ์ฅ์น: {device}")
|
| 45 |
+
|
| 46 |
+
# ImageNet์ผ๋ก ์ฌ์ ํ๋ จ๋ EfficientNetB0 ๋ชจ๋ธ ๋ก๋
|
| 47 |
+
print("์ฌ์ ํ๋ จ๋ EfficientNetB0 ๋ชจ๋ธ ๋ก๋ ์ค...")
|
| 48 |
+
model = efficientnet_b0(weights=EfficientNet_B0_Weights.IMAGENET1K_V1)
|
| 49 |
+
model.eval() # ํ๊ฐ ๋ชจ๋ ์ค์
|
| 50 |
+
model = model.to(device)
|
| 51 |
+
print("๋ชจ๋ธ ๋ก๋ ๋ฐ ํ๊ฐ ๋ชจ๋ ์ค์ ์๋ฃ.")
|
| 52 |
+
|
| 53 |
+
# ==============================================================================
|
| 54 |
+
# 2. ํ์ ์ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ ์ ์
|
| 55 |
+
# ==============================================================================
|
| 56 |
+
preprocess = transforms.Compose([
|
| 57 |
+
transforms.Resize(256),
|
| 58 |
+
transforms.CenterCrop(224),
|
| 59 |
+
transforms.ToTensor(),
|
| 60 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
| 61 |
+
])
|
| 62 |
+
|
| 63 |
+
# ==============================================================================
|
| 64 |
+
# 3. ์ด๋ฏธ์ง ๋ถ๋ฅ ๋ฐ ์ถ๋ ฅ ํจ์
|
| 65 |
+
# ==============================================================================
|
| 66 |
+
|
| 67 |
+
def classify_image(image_path_string):
|
| 68 |
+
"""
|
| 69 |
+
์ฃผ์ด์ง ์ด๋ฏธ์ง ๊ฒฝ๋ก์ ํ์ผ์ EfficientNetB0 ๋ชจ๋ธ๋ก ๋ถ๋ฅํ๊ณ ๊ฒฐ๊ณผ๋ฅผ ์ถ๋ ฅํฉ๋๋ค.
|
| 70 |
+
(ํด๋์ค ์ด๋ฆ ํฌํจ)
|
| 71 |
+
"""
|
| 72 |
+
try:
|
| 73 |
+
# 1. ์ด๋ฏธ์ง ๋ก๋ ๋ฐ RGB ๋ณํ
|
| 74 |
+
img = Image.open(image_path_string).convert('RGB')
|
| 75 |
+
print(f"\n[INFO] ์ด๋ฏธ์ง ๋ก๋ ์ฑ๊ณต: {image_path_string}")
|
| 76 |
+
|
| 77 |
+
input_tensor = preprocess(img)
|
| 78 |
+
input_batch = input_tensor.unsqueeze(0).to(device)
|
| 79 |
+
|
| 80 |
+
with torch.no_grad():
|
| 81 |
+
output = model(input_batch)
|
| 82 |
+
|
| 83 |
+
probabilities = F.softmax(output[0], dim=0)
|
| 84 |
+
top_prob, top_catid = torch.topk(probabilities, 5)
|
| 85 |
+
|
| 86 |
+
print("\n--- ๋ถ๋ฅ ๊ฒฐ๊ณผ (Top-5) ---")
|
| 87 |
+
|
| 88 |
+
for i in range(top_prob.size(0)):
|
| 89 |
+
idx = top_catid[i].item()
|
| 90 |
+
|
| 91 |
+
# ํด๋์ค ์ด๋ฆ ๋งคํ ์ ์ฉ: ๋ก๋๋ ๋์
๋๋ฆฌ ์ฌ์ฉ
|
| 92 |
+
class_name = class_name_map.get(idx, f"์ ์ ์๋ ํด๋์ค (ID: {idx})")
|
| 93 |
+
|
| 94 |
+
print(f"์์ {i+1}:")
|
| 95 |
+
print(f" - ํด๋์ค ์ด๋ฆ: **{class_name}**")
|
| 96 |
+
print(f" - ํด๋์ค ์ธ๋ฑ์ค (ID): {idx}")
|
| 97 |
+
print(f" - ํ๋ฅ : {top_prob[i].item():.4f}")
|
| 98 |
+
|
| 99 |
+
except FileNotFoundError:
|
| 100 |
+
print(f"\n[์ค๋ฅ] ์ด๋ฏธ์ง ํ์ผ์ ์ฐพ์ ์ ์์ต๋๋ค: {image_path_string}")
|
| 101 |
+
print("๊ฒฝ๋ก๋ฅผ ๋ค์ ํ์ธํด์ฃผ์ธ์.")
|
| 102 |
+
except Exception as e:
|
| 103 |
+
print(f"\n[์ค๋ฅ] ๋ถ๋ฅ ์ค ๋ฌธ์ ๊ฐ ๋ฐ์ํ์ต๋๋ค: {e}")
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
# --- ์คํ ---
|
| 107 |
+
# ๋ถ๋ฅํ ์ด๋ฏธ์ง ํ์ผ ๊ฒฝ๋ก๋ฅผ ๋ฌธ์์ด๋ก ์ง์ (์ฌ์ฉ์ ํ๊ฒฝ์ ๋ง๊ฒ ์์ ํ์!)
|
| 108 |
+
CLASSIFY_TARGET_PATH = 'C:/Users/itg/Pictures/muffin1.png'
|
| 109 |
+
|
| 110 |
+
# ํจ์ ์คํ
|
| 111 |
+
classify_image(CLASSIFY_TARGET_PATH)
|
effinet_basic_compo.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from torchvision.models import efficientnet_b0, EfficientNet_B0_Weights
|
| 3 |
+
import torchvision.transforms as transforms
|
| 4 |
+
from PIL import Image
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
# ==============================================================================
|
| 11 |
+
# 0. ImageNet ํด๋์ค ์ด๋ฆ ๋ก๋
|
| 12 |
+
# ==============================================================================
|
| 13 |
+
CLASS_MAP_FILENAME = 'labels_map.txt'
|
| 14 |
+
class_name_map = None
|
| 15 |
+
|
| 16 |
+
# API ์๋ฒ ์์ ์ ImageNet ํด๋์ค ๋งต์ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋
|
| 17 |
+
try:
|
| 18 |
+
if not os.path.exists(CLASS_MAP_FILENAME):
|
| 19 |
+
# NOTE: API ํ๊ฒฝ์์๋ sys.exit ๋์ ์์ธ๋ฅผ ๋ฐ์์์ผ์ผ ํฉ๋๋ค.
|
| 20 |
+
raise FileNotFoundError(f"[์ค๋ฅ] ํด๋์ค ์ด๋ฆ ํ์ผ('{CLASS_MAP_FILENAME}')์ ์ฐพ์ ์ ์์ต๋๋ค. API ์๋ฒ๋ฅผ ์์ํ ์ ์์ต๋๋ค.")
|
| 21 |
+
|
| 22 |
+
with open(CLASS_MAP_FILENAME, 'r') as f:
|
| 23 |
+
class_map_json = json.load(f)
|
| 24 |
+
|
| 25 |
+
# ๐จ๐จ๐จ ์ด ๋ถ๋ถ์ด ์์ ๋์์ต๋๋ค. ๐จ๐จ๐จ
|
| 26 |
+
# ๊ฐ(Value)์ด ๋ฌธ์์ด์ธ ๊ฒฝ์ฐ: v ์์ฒด๊ฐ ํด๋์ค ์ด๋ฆ์
๋๋ค.
|
| 27 |
+
# ๊ฐ(Value)์ด ๋ฆฌ์คํธ์ธ ๊ฒฝ์ฐ: ๋ฆฌ์คํธ์ ๋ง์ง๋ง ์์(์ผ๋ฐ์ ์ผ๋ก ์ธ๋ฑ์ค 1)๋ฅผ ํด๋์ค ์ด๋ฆ์ผ๋ก ๊ฐ์ ํฉ๋๋ค.
|
| 28 |
+
labels_list = []
|
| 29 |
+
for k, v in class_map_json.items():
|
| 30 |
+
if k.isdigit() and 0 <= int(k) < 1000:
|
| 31 |
+
if isinstance(v, list) and len(v) > 1:
|
| 32 |
+
labels_list.append(v[1]) # ๋ฆฌ์คํธ์ผ ๊ฒฝ์ฐ ๋ ๋ฒ์งธ ์์ (์ด์ ์ฝ๋ ์ ์ง)
|
| 33 |
+
elif isinstance(v, str):
|
| 34 |
+
labels_list.append(v) # ๋ฌธ์์ด์ผ ๊ฒฝ์ฐ ์ ์ฒด ๋ฌธ์์ด ์ฌ์ฉ (์์ ๋ ํต์ฌ)
|
| 35 |
+
else:
|
| 36 |
+
# ์ ์ ์๋ ํ์์ ๋ฌด์ํ๊ฑฐ๋, ๊ธฐ๋ณธ๊ฐ ์ค์
|
| 37 |
+
labels_list.append(f"Unknown Class Index {k}")
|
| 38 |
+
|
| 39 |
+
# ์ธ๋ฑ์ค์ ์ด๋ฆ ๋งคํ ๋์
๋๋ฆฌ๋ก ๋ณํ
|
| 40 |
+
# labels_list์ ์์๊ฐ ๋ชจ๋ธ์ ์ถ๋ ฅ ์ธ๋ฑ์ค (0~999)์ ์ผ์นํด์ผ ํฉ๋๋ค.
|
| 41 |
+
class_name_map = {i: name for i, name in enumerate(labels_list)}
|
| 42 |
+
|
| 43 |
+
# ํด๋์ค ๋งต์ด 1000๊ฐ๊ฐ ๋ง๋์ง ํ์ธ (ImageNet ๊ธฐ์ค)
|
| 44 |
+
if len(class_name_map) != 1000:
|
| 45 |
+
print(f"[๊ฒฝ๊ณ ] ๋ก๋๋ ํด๋์ค ์: {len(class_name_map)}๊ฐ. ImageNet (1000๊ฐ)๊ณผ ๋ค๋ฆ
๋๋ค. ํ์ธํด ์ฃผ์ธ์.")
|
| 46 |
+
|
| 47 |
+
print(f"ImageNet ํด๋์ค ๋งต ๋ก๋ ์ฑ๊ณต. (์ด {len(class_name_map)}๊ฐ)")
|
| 48 |
+
|
| 49 |
+
except FileNotFoundError as e:
|
| 50 |
+
# API ์๋ฒ ์์์ ๋ง๊ธฐ ์ํด ๋ฐ์๋ ์ค๋ฅ๋ฅผ ๋ค์ ๋ฐ์
|
| 51 |
+
raise e
|
| 52 |
+
except Exception as e:
|
| 53 |
+
# JSON ํ์ฑ ์ค๋ฅ ๋ฑ ๊ธฐํ ๋ก๋ฉ ์ค๋ฅ
|
| 54 |
+
print(f"[์ค๋ฅ] ํด๋์ค ๋งต ๋ก๋ ์ค ์๊ธฐ์น ์์ ์ค๋ฅ ๋ฐ์: {e}")
|
| 55 |
+
class_name_map = None # ๋ก๋ ์คํจ ์ None ์ ์ง
|
| 56 |
+
# API ์๋ฒ ์์์ ๋ง๊ธฐ ์ํด RuntimeError ๋ฐ์
|
| 57 |
+
raise RuntimeError(f"ํด๋์ค ๋งต ๋ก๋ ์ค๋ฅ: {e}")
|
| 58 |
+
|
| 59 |
+
# ==============================================================================
|
| 60 |
+
# 1. ๋ชจ๋ธ ๋ฐ ์ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ ๋ก๋ (์ ์ญ์ ์ผ๋ก ํ ๋ฒ๋ง ์คํ)
|
| 61 |
+
# ==============================================================================
|
| 62 |
+
|
| 63 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 64 |
+
|
| 65 |
+
# ์ฌ์ ํ๋ จ๋ EfficientNetB0 ๋ชจ๋ธ ๋ก๋
|
| 66 |
+
# ๋ชจ๋ธ ๋ก๋ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ ์ ์์ผ๋ฏ๋ก try-except ๋ธ๋ก์ผ๋ก ๊ฐ์๋๋ค.
|
| 67 |
+
try:
|
| 68 |
+
# weights ๊ฐ์ฒด๋ ์ ์ฒ๋ฆฌ(transforms) ์ ๋ณด๋ ํฌํจํฉ๋๋ค.
|
| 69 |
+
weights = EfficientNet_B0_Weights.DEFAULT
|
| 70 |
+
model = efficientnet_b0(weights=weights).to(device).eval() # eval ๋ชจ๋๋ก ์ค์
|
| 71 |
+
preprocess = weights.transforms() # ์ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ ๋ก๋
|
| 72 |
+
print("EfficientNetB0 ๋ชจ๋ธ ๋ฐ ์ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ ๋ก๋ ์ฑ๊ณต.")
|
| 73 |
+
except Exception as e:
|
| 74 |
+
print(f"[์ค๋ฅ] EfficientNetB0 ๋ชจ๋ธ ๋ก๋ ์ค ์ค๋ฅ ๋ฐ์: {e}")
|
| 75 |
+
# ๋ชจ๋ธ ๋ก๋ ์คํจ ์ None ์ค์ ํ, ๋ถ๋ฅ ์ ์ค๋ฅ๋ฅผ ๋ฐ์์ํค๋๋ก ํจ
|
| 76 |
+
model = None
|
| 77 |
+
preprocess = None
|
| 78 |
+
raise RuntimeError(f"๋ชจ๋ธ ๋ก๋ ์ค๋ฅ: {e}")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
# ==============================================================================
|
| 82 |
+
# 2. ๋ถ๋ฅ ํจ์ (API์์ ์ฌ์ฉ)
|
| 83 |
+
# ==============================================================================
|
| 84 |
+
|
| 85 |
+
def classify_image_pil(img: Image.Image) -> list:
|
| 86 |
+
"""
|
| 87 |
+
์ฃผ์ด์ง PIL Image ๊ฐ์ฒด๋ฅผ EfficientNetB0 ๋ชจ๋ธ๋ก ๋ถ๋ฅํ๊ณ
|
| 88 |
+
Top-5 ๊ฒฐ๊ณผ๋ฅผ ๋ฆฌ์คํธ๋ก ๋ฐํํฉ๋๋ค.
|
| 89 |
+
"""
|
| 90 |
+
if class_name_map is None or not model:
|
| 91 |
+
raise RuntimeError("๋ชจ๋ธ ๋๋ ํด๋์ค ๋งต์ด ์์ง ๋ก๋๋์ง ์์์ต๋๋ค.")
|
| 92 |
+
|
| 93 |
+
try:
|
| 94 |
+
# 1. ์ด๋ฏธ์ง RGB ๋ณํ ๋ฐ ์ ์ฒ๋ฆฌ
|
| 95 |
+
img = img.convert('RGB')
|
| 96 |
+
input_tensor = preprocess(img)
|
| 97 |
+
input_batch = input_tensor.unsqueeze(0).to(device)
|
| 98 |
+
|
| 99 |
+
# 2. ์ถ๋ก ์ํ
|
| 100 |
+
with torch.no_grad():
|
| 101 |
+
output = model(input_batch)
|
| 102 |
+
|
| 103 |
+
# 3. ํ๋ฅ ๋ฐ Top-K ์ถ์ถ
|
| 104 |
+
probabilities = F.softmax(output[0], dim=0)
|
| 105 |
+
# Top-5 ํ๋ฅ ๋ฐ ์ธ๋ฑ์ค (์นดํ
๊ณ ๋ฆฌ ID) ์ถ์ถ
|
| 106 |
+
top_prob, top_catid = torch.topk(probabilities, 5)
|
| 107 |
+
|
| 108 |
+
results = []
|
| 109 |
+
for i in range(top_prob.size(0)):
|
| 110 |
+
idx = top_catid[i].item()
|
| 111 |
+
|
| 112 |
+
# ํด๋์ค ์ด๋ฆ ๋งคํ ์ ์ฉ
|
| 113 |
+
class_name = class_name_map.get(idx, f"์ ์ ์๋ ํด๋์ค (ID: {idx})")
|
| 114 |
+
|
| 115 |
+
results.append({
|
| 116 |
+
"rank": i + 1,
|
| 117 |
+
"class_name": class_name,
|
| 118 |
+
"class_index": idx,
|
| 119 |
+
"probability": top_prob[i].item()
|
| 120 |
+
})
|
| 121 |
+
|
| 122 |
+
return results
|
| 123 |
+
|
| 124 |
+
except Exception as e:
|
| 125 |
+
# ๋ถ๋ฅ ์ค ๋ฐ์ํ๋ ๋ชจ๋ ์ค๋ฅ๋ ํธ์ถ์(app.py)์๊ฒ RuntimeError๋ก ์ ๋ฌ
|
| 126 |
+
raise RuntimeError(f"์ด๋ฏธ์ง ๋ถ๋ฅ ์ค PyTorch/CUDA ์ค๋ฅ ๋ฐ์: {e}")
|
labels_map.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"0": "tench, Tinca tinca", "1": "goldfish, Carassius auratus", "2": "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias", "3": "tiger shark, Galeocerdo cuvieri", "4": "hammerhead, hammerhead shark", "5": "electric ray, crampfish, numbfish, torpedo", "6": "stingray", "7": "cock", "8": "hen", "9": "ostrich, Struthio camelus", "10": "brambling, Fringilla montifringilla", "11": "goldfinch, Carduelis carduelis", "12": "house finch, linnet, Carpodacus mexicanus", "13": "junco, snowbird", "14": "indigo bunting, indigo finch, indigo bird, Passerina cyanea", "15": "robin, American robin, Turdus migratorius", "16": "bulbul", "17": "jay", "18": "magpie", "19": "chickadee", "20": "water ouzel, dipper", "21": "kite", "22": "bald eagle, American eagle, Haliaeetus leucocephalus", "23": "vulture", "24": "great grey owl, great gray owl, Strix nebulosa", "25": "European fire salamander, Salamandra salamandra", "26": "common newt, Triturus vulgaris", "27": "eft", "28": "spotted salamander, Ambystoma maculatum", "29": "axolotl, mud puppy, Ambystoma mexicanum", "30": "bullfrog, Rana catesbeiana", "31": "tree frog, tree-frog", "32": "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui", "33": "loggerhead, loggerhead turtle, Caretta caretta", "34": "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea", "35": "mud turtle", "36": "terrapin", "37": "box turtle, box tortoise", "38": "banded gecko", "39": "common iguana, iguana, Iguana iguana", "40": "American chameleon, anole, Anolis carolinensis", "41": "whiptail, whiptail lizard", "42": "agama", "43": "frilled lizard, Chlamydosaurus kingi", "44": "alligator lizard", "45": "Gila monster, Heloderma suspectum", "46": "green lizard, Lacerta viridis", "47": "African chameleon, Chamaeleo chamaeleon", "48": "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis", "49": "African crocodile, Nile crocodile, Crocodylus niloticus", "50": "American alligator, Alligator mississipiensis", "51": "triceratops", "52": "thunder snake, worm snake, Carphophis amoenus", "53": "ringneck snake, ring-necked snake, ring snake", "54": "hognose snake, puff adder, sand viper", "55": "green snake, grass snake", "56": "king snake, kingsnake", "57": "garter snake, grass snake", "58": "water snake", "59": "vine snake", "60": "night snake, Hypsiglena torquata", "61": "boa constrictor, Constrictor constrictor", "62": "rock python, rock snake, Python sebae", "63": "Indian cobra, Naja naja", "64": "green mamba", "65": "sea snake", "66": "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus", "67": "diamondback, diamondback rattlesnake, Crotalus adamanteus", "68": "sidewinder, horned rattlesnake, Crotalus cerastes", "69": "trilobite", "70": "harvestman, daddy longlegs, Phalangium opilio", "71": "scorpion", "72": "black and gold garden spider, Argiope aurantia", "73": "barn spider, Araneus cavaticus", "74": "garden spider, Aranea diademata", "75": "black widow, Latrodectus mactans", "76": "tarantula", "77": "wolf spider, hunting spider", "78": "tick", "79": "centipede", "80": "black grouse", "81": "ptarmigan", "82": "ruffed grouse, partridge, Bonasa umbellus", "83": "prairie chicken, prairie grouse, prairie fowl", "84": "peacock", "85": "quail", "86": "partridge", "87": "African grey, African gray, Psittacus erithacus", "88": "macaw", "89": "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita", "90": "lorikeet", "91": "coucal", "92": "bee eater", "93": "hornbill", "94": "hummingbird", "95": "jacamar", "96": "toucan", "97": "drake", "98": "red-breasted merganser, Mergus serrator", "99": "goose", "100": "black swan, Cygnus atratus", "101": "tusker", "102": "echidna, spiny anteater, anteater", "103": "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus", "104": "wallaby, brush kangaroo", "105": "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus", "106": "wombat", "107": "jellyfish", "108": "sea anemone, anemone", "109": "brain coral", "110": "flatworm, platyhelminth", "111": "nematode, nematode worm, roundworm", "112": "conch", "113": "snail", "114": "slug", "115": "sea slug, nudibranch", "116": "chiton, coat-of-mail shell, sea cradle, polyplacophore", "117": "chambered nautilus, pearly nautilus, nautilus", "118": "Dungeness crab, Cancer magister", "119": "rock crab, Cancer irroratus", "120": "fiddler crab", "121": "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica", "122": "American lobster, Northern lobster, Maine lobster, Homarus americanus", "123": "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "124": "crayfish, crawfish, crawdad, crawdaddy", "125": "hermit crab", "126": "isopod", "127": "white stork, Ciconia ciconia", "128": "black stork, Ciconia nigra", "129": "spoonbill", "130": "flamingo", "131": "little blue heron, Egretta caerulea", "132": "American egret, great white heron, Egretta albus", "133": "bittern", "134": "crane", "135": "limpkin, Aramus pictus", "136": "European gallinule, Porphyrio porphyrio", "137": "American coot, marsh hen, mud hen, water hen, Fulica americana", "138": "bustard", "139": "ruddy turnstone, Arenaria interpres", "140": "red-backed sandpiper, dunlin, Erolia alpina", "141": "redshank, Tringa totanus", "142": "dowitcher", "143": "oystercatcher, oyster catcher", "144": "pelican", "145": "king penguin, Aptenodytes patagonica", "146": "albatross, mollymawk", "147": "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus", "148": "killer whale, killer, orca, grampus, sea wolf, Orcinus orca", "149": "dugong, Dugong dugon", "150": "sea lion", "151": "Chihuahua", "152": "Japanese spaniel", "153": "Maltese dog, Maltese terrier, Maltese", "154": "Pekinese, Pekingese, Peke", "155": "Shih-Tzu", "156": "Blenheim spaniel", "157": "papillon", "158": "toy terrier", "159": "Rhodesian ridgeback", "160": "Afghan hound, Afghan", "161": "basset, basset hound", "162": "beagle", "163": "bloodhound, sleuthhound", "164": "bluetick", "165": "black-and-tan coonhound", "166": "Walker hound, Walker foxhound", "167": "English foxhound", "168": "redbone", "169": "borzoi, Russian wolfhound", "170": "Irish wolfhound", "171": "Italian greyhound", "172": "whippet", "173": "Ibizan hound, Ibizan Podenco", "174": "Norwegian elkhound, elkhound", "175": "otterhound, otter hound", "176": "Saluki, gazelle hound", "177": "Scottish deerhound, deerhound", "178": "Weimaraner", "179": "Staffordshire bullterrier, Staffordshire bull terrier", "180": "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier", "181": "Bedlington terrier", "182": "Border terrier", "183": "Kerry blue terrier", "184": "Irish terrier", "185": "Norfolk terrier", "186": "Norwich terrier", "187": "Yorkshire terrier", "188": "wire-haired fox terrier", "189": "Lakeland terrier", "190": "Sealyham terrier, Sealyham", "191": "Airedale, Airedale terrier", "192": "cairn, cairn terrier", "193": "Australian terrier", "194": "Dandie Dinmont, Dandie Dinmont terrier", "195": "Boston bull, Boston terrier", "196": "miniature schnauzer", "197": "giant schnauzer", "198": "standard schnauzer", "199": "Scotch terrier, Scottish terrier, Scottie", "200": "Tibetan terrier, chrysanthemum dog", "201": "silky terrier, Sydney silky", "202": "soft-coated wheaten terrier", "203": "West Highland white terrier", "204": "Lhasa, Lhasa apso", "205": "flat-coated retriever", "206": "curly-coated retriever", "207": "golden retriever", "208": "Labrador retriever", "209": "Chesapeake Bay retriever", "210": "German short-haired pointer", "211": "vizsla, Hungarian pointer", "212": "English setter", "213": "Irish setter, red setter", "214": "Gordon setter", "215": "Brittany spaniel", "216": "clumber, clumber spaniel", "217": "English springer, English springer spaniel", "218": "Welsh springer spaniel", "219": "cocker spaniel, English cocker spaniel, cocker", "220": "Sussex spaniel", "221": "Irish water spaniel", "222": "kuvasz", "223": "schipperke", "224": "groenendael", "225": "malinois", "226": "briard", "227": "kelpie", "228": "komondor", "229": "Old English sheepdog, bobtail", "230": "Shetland sheepdog, Shetland sheep dog, Shetland", "231": "collie", "232": "Border collie", "233": "Bouvier des Flandres, Bouviers des Flandres", "234": "Rottweiler", "235": "German shepherd, German shepherd dog, German police dog, alsatian", "236": "Doberman, Doberman pinscher", "237": "miniature pinscher", "238": "Greater Swiss Mountain dog", "239": "Bernese mountain dog", "240": "Appenzeller", "241": "EntleBucher", "242": "boxer", "243": "bull mastiff", "244": "Tibetan mastiff", "245": "French bulldog", "246": "Great Dane", "247": "Saint Bernard, St Bernard", "248": "Eskimo dog, husky", "249": "malamute, malemute, Alaskan malamute", "250": "Siberian husky", "251": "dalmatian, coach dog, carriage dog", "252": "affenpinscher, monkey pinscher, monkey dog", "253": "basenji", "254": "pug, pug-dog", "255": "Leonberg", "256": "Newfoundland, Newfoundland dog", "257": "Great Pyrenees", "258": "Samoyed, Samoyede", "259": "Pomeranian", "260": "chow, chow chow", "261": "keeshond", "262": "Brabancon griffon", "263": "Pembroke, Pembroke Welsh corgi", "264": "Cardigan, Cardigan Welsh corgi", "265": "toy poodle", "266": "miniature poodle", "267": "standard poodle", "268": "Mexican hairless", "269": "timber wolf, grey wolf, gray wolf, Canis lupus", "270": "white wolf, Arctic wolf, Canis lupus tundrarum", "271": "red wolf, maned wolf, Canis rufus, Canis niger", "272": "coyote, prairie wolf, brush wolf, Canis latrans", "273": "dingo, warrigal, warragal, Canis dingo", "274": "dhole, Cuon alpinus", "275": "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus", "276": "hyena, hyaena", "277": "red fox, Vulpes vulpes", "278": "kit fox, Vulpes macrotis", "279": "Arctic fox, white fox, Alopex lagopus", "280": "grey fox, gray fox, Urocyon cinereoargenteus", "281": "tabby, tabby cat", "282": "tiger cat", "283": "Persian cat", "284": "Siamese cat, Siamese", "285": "Egyptian cat", "286": "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor", "287": "lynx, catamount", "288": "leopard, Panthera pardus", "289": "snow leopard, ounce, Panthera uncia", "290": "jaguar, panther, Panthera onca, Felis onca", "291": "lion, king of beasts, Panthera leo", "292": "tiger, Panthera tigris", "293": "cheetah, chetah, Acinonyx jubatus", "294": "brown bear, bruin, Ursus arctos", "295": "American black bear, black bear, Ursus americanus, Euarctos americanus", "296": "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus", "297": "sloth bear, Melursus ursinus, Ursus ursinus", "298": "mongoose", "299": "meerkat, mierkat", "300": "tiger beetle", "301": "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "302": "ground beetle, carabid beetle", "303": "long-horned beetle, longicorn, longicorn beetle", "304": "leaf beetle, chrysomelid", "305": "dung beetle", "306": "rhinoceros beetle", "307": "weevil", "308": "fly", "309": "bee", "310": "ant, emmet, pismire", "311": "grasshopper, hopper", "312": "cricket", "313": "walking stick, walkingstick, stick insect", "314": "cockroach, roach", "315": "mantis, mantid", "316": "cicada, cicala", "317": "leafhopper", "318": "lacewing, lacewing fly", "319": "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "320": "damselfly", "321": "admiral", "322": "ringlet, ringlet butterfly", "323": "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus", "324": "cabbage butterfly", "325": "sulphur butterfly, sulfur butterfly", "326": "lycaenid, lycaenid butterfly", "327": "starfish, sea star", "328": "sea urchin", "329": "sea cucumber, holothurian", "330": "wood rabbit, cottontail, cottontail rabbit", "331": "hare", "332": "Angora, Angora rabbit", "333": "hamster", "334": "porcupine, hedgehog", "335": "fox squirrel, eastern fox squirrel, Sciurus niger", "336": "marmot", "337": "beaver", "338": "guinea pig, Cavia cobaya", "339": "sorrel", "340": "zebra", "341": "hog, pig, grunter, squealer, Sus scrofa", "342": "wild boar, boar, Sus scrofa", "343": "warthog", "344": "hippopotamus, hippo, river horse, Hippopotamus amphibius", "345": "ox", "346": "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis", "347": "bison", "348": "ram, tup", "349": "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis", "350": "ibex, Capra ibex", "351": "hartebeest", "352": "impala, Aepyceros melampus", "353": "gazelle", "354": "Arabian camel, dromedary, Camelus dromedarius", "355": "llama", "356": "weasel", "357": "mink", "358": "polecat, fitch, foulmart, foumart, Mustela putorius", "359": "black-footed ferret, ferret, Mustela nigripes", "360": "otter", "361": "skunk, polecat, wood pussy", "362": "badger", "363": "armadillo", "364": "three-toed sloth, ai, Bradypus tridactylus", "365": "orangutan, orang, orangutang, Pongo pygmaeus", "366": "gorilla, Gorilla gorilla", "367": "chimpanzee, chimp, Pan troglodytes", "368": "gibbon, Hylobates lar", "369": "siamang, Hylobates syndactylus, Symphalangus syndactylus", "370": "guenon, guenon monkey", "371": "patas, hussar monkey, Erythrocebus patas", "372": "baboon", "373": "macaque", "374": "langur", "375": "colobus, colobus monkey", "376": "proboscis monkey, Nasalis larvatus", "377": "marmoset", "378": "capuchin, ringtail, Cebus capucinus", "379": "howler monkey, howler", "380": "titi, titi monkey", "381": "spider monkey, Ateles geoffroyi", "382": "squirrel monkey, Saimiri sciureus", "383": "Madagascar cat, ring-tailed lemur, Lemur catta", "384": "indri, indris, Indri indri, Indri brevicaudatus", "385": "Indian elephant, Elephas maximus", "386": "African elephant, Loxodonta africana", "387": "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens", "388": "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca", "389": "barracouta, snoek", "390": "eel", "391": "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch", "392": "rock beauty, Holocanthus tricolor", "393": "anemone fish", "394": "sturgeon", "395": "gar, garfish, garpike, billfish, Lepisosteus osseus", "396": "lionfish", "397": "puffer, pufferfish, blowfish, globefish", "398": "abacus", "399": "abaya", "400": "academic gown, academic robe, judge's robe", "401": "accordion, piano accordion, squeeze box", "402": "acoustic guitar", "403": "aircraft carrier, carrier, flattop, attack aircraft carrier", "404": "airliner", "405": "airship, dirigible", "406": "altar", "407": "ambulance", "408": "amphibian, amphibious vehicle", "409": "analog clock", "410": "apiary, bee house", "411": "apron", "412": "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "413": "assault rifle, assault gun", "414": "backpack, back pack, knapsack, packsack, rucksack, haversack", "415": "bakery, bakeshop, bakehouse", "416": "balance beam, beam", "417": "balloon", "418": "ballpoint, ballpoint pen, ballpen, Biro", "419": "Band Aid", "420": "banjo", "421": "bannister, banister, balustrade, balusters, handrail", "422": "barbell", "423": "barber chair", "424": "barbershop", "425": "barn", "426": "barometer", "427": "barrel, cask", "428": "barrow, garden cart, lawn cart, wheelbarrow", "429": "baseball", "430": "basketball", "431": "bassinet", "432": "bassoon", "433": "bathing cap, swimming cap", "434": "bath towel", "435": "bathtub, bathing tub, bath, tub", "436": "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "437": "beacon, lighthouse, beacon light, pharos", "438": "beaker", "439": "bearskin, busby, shako", "440": "beer bottle", "441": "beer glass", "442": "bell cote, bell cot", "443": "bib", "444": "bicycle-built-for-two, tandem bicycle, tandem", "445": "bikini, two-piece", "446": "binder, ring-binder", "447": "binoculars, field glasses, opera glasses", "448": "birdhouse", "449": "boathouse", "450": "bobsled, bobsleigh, bob", "451": "bolo tie, bolo, bola tie, bola", "452": "bonnet, poke bonnet", "453": "bookcase", "454": "bookshop, bookstore, bookstall", "455": "bottlecap", "456": "bow", "457": "bow tie, bow-tie, bowtie", "458": "brass, memorial tablet, plaque", "459": "brassiere, bra, bandeau", "460": "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "461": "breastplate, aegis, egis", "462": "broom", "463": "bucket, pail", "464": "buckle", "465": "bulletproof vest", "466": "bullet train, bullet", "467": "butcher shop, meat market", "468": "cab, hack, taxi, taxicab", "469": "caldron, cauldron", "470": "candle, taper, wax light", "471": "cannon", "472": "canoe", "473": "can opener, tin opener", "474": "cardigan", "475": "car mirror", "476": "carousel, carrousel, merry-go-round, roundabout, whirligig", "477": "carpenter's kit, tool kit", "478": "carton", "479": "car wheel", "480": "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM", "481": "cassette", "482": "cassette player", "483": "castle", "484": "catamaran", "485": "CD player", "486": "cello, violoncello", "487": "cellular telephone, cellular phone, cellphone, cell, mobile phone", "488": "chain", "489": "chainlink fence", "490": "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "491": "chain saw, chainsaw", "492": "chest", "493": "chiffonier, commode", "494": "chime, bell, gong", "495": "china cabinet, china closet", "496": "Christmas stocking", "497": "church, church building", "498": "cinema, movie theater, movie theatre, movie house, picture palace", "499": "cleaver, meat cleaver, chopper", "500": "cliff dwelling", "501": "cloak", "502": "clog, geta, patten, sabot", "503": "cocktail shaker", "504": "coffee mug", "505": "coffeepot", "506": "coil, spiral, volute, whorl, helix", "507": "combination lock", "508": "computer keyboard, keypad", "509": "confectionery, confectionary, candy store", "510": "container ship, containership, container vessel", "511": "convertible", "512": "corkscrew, bottle screw", "513": "cornet, horn, trumpet, trump", "514": "cowboy boot", "515": "cowboy hat, ten-gallon hat", "516": "cradle", "517": "crane", "518": "crash helmet", "519": "crate", "520": "crib, cot", "521": "Crock Pot", "522": "croquet ball", "523": "crutch", "524": "cuirass", "525": "dam, dike, dyke", "526": "desk", "527": "desktop computer", "528": "dial telephone, dial phone", "529": "diaper, nappy, napkin", "530": "digital clock", "531": "digital watch", "532": "dining table, board", "533": "dishrag, dishcloth", "534": "dishwasher, dish washer, dishwashing machine", "535": "disk brake, disc brake", "536": "dock, dockage, docking facility", "537": "dogsled, dog sled, dog sleigh", "538": "dome", "539": "doormat, welcome mat", "540": "drilling platform, offshore rig", "541": "drum, membranophone, tympan", "542": "drumstick", "543": "dumbbell", "544": "Dutch oven", "545": "electric fan, blower", "546": "electric guitar", "547": "electric locomotive", "548": "entertainment center", "549": "envelope", "550": "espresso maker", "551": "face powder", "552": "feather boa, boa", "553": "file, file cabinet, filing cabinet", "554": "fireboat", "555": "fire engine, fire truck", "556": "fire screen, fireguard", "557": "flagpole, flagstaff", "558": "flute, transverse flute", "559": "folding chair", "560": "football helmet", "561": "forklift", "562": "fountain", "563": "fountain pen", "564": "four-poster", "565": "freight car", "566": "French horn, horn", "567": "frying pan, frypan, skillet", "568": "fur coat", "569": "garbage truck, dustcart", "570": "gasmask, respirator, gas helmet", "571": "gas pump, gasoline pump, petrol pump, island dispenser", "572": "goblet", "573": "go-kart", "574": "golf ball", "575": "golfcart, golf cart", "576": "gondola", "577": "gong, tam-tam", "578": "gown", "579": "grand piano, grand", "580": "greenhouse, nursery, glasshouse", "581": "grille, radiator grille", "582": "grocery store, grocery, food market, market", "583": "guillotine", "584": "hair slide", "585": "hair spray", "586": "half track", "587": "hammer", "588": "hamper", "589": "hand blower, blow dryer, blow drier, hair dryer, hair drier", "590": "hand-held computer, hand-held microcomputer", "591": "handkerchief, hankie, hanky, hankey", "592": "hard disc, hard disk, fixed disk", "593": "harmonica, mouth organ, harp, mouth harp", "594": "harp", "595": "harvester, reaper", "596": "hatchet", "597": "holster", "598": "home theater, home theatre", "599": "honeycomb", "600": "hook, claw", "601": "hoopskirt, crinoline", "602": "horizontal bar, high bar", "603": "horse cart, horse-cart", "604": "hourglass", "605": "iPod", "606": "iron, smoothing iron", "607": "jack-o'-lantern", "608": "jean, blue jean, denim", "609": "jeep, landrover", "610": "jersey, T-shirt, tee shirt", "611": "jigsaw puzzle", "612": "jinrikisha, ricksha, rickshaw", "613": "joystick", "614": "kimono", "615": "knee pad", "616": "knot", "617": "lab coat, laboratory coat", "618": "ladle", "619": "lampshade, lamp shade", "620": "laptop, laptop computer", "621": "lawn mower, mower", "622": "lens cap, lens cover", "623": "letter opener, paper knife, paperknife", "624": "library", "625": "lifeboat", "626": "lighter, light, igniter, ignitor", "627": "limousine, limo", "628": "liner, ocean liner", "629": "lipstick, lip rouge", "630": "Loafer", "631": "lotion", "632": "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "633": "loupe, jeweler's loupe", "634": "lumbermill, sawmill", "635": "magnetic compass", "636": "mailbag, postbag", "637": "mailbox, letter box", "638": "maillot", "639": "maillot, tank suit", "640": "manhole cover", "641": "maraca", "642": "marimba, xylophone", "643": "mask", "644": "matchstick", "645": "maypole", "646": "maze, labyrinth", "647": "measuring cup", "648": "medicine chest, medicine cabinet", "649": "megalith, megalithic structure", "650": "microphone, mike", "651": "microwave, microwave oven", "652": "military uniform", "653": "milk can", "654": "minibus", "655": "miniskirt, mini", "656": "minivan", "657": "missile", "658": "mitten", "659": "mixing bowl", "660": "mobile home, manufactured home", "661": "Model T", "662": "modem", "663": "monastery", "664": "monitor", "665": "moped", "666": "mortar", "667": "mortarboard", "668": "mosque", "669": "mosquito net", "670": "motor scooter, scooter", "671": "mountain bike, all-terrain bike, off-roader", "672": "mountain tent", "673": "mouse, computer mouse", "674": "mousetrap", "675": "moving van", "676": "muzzle", "677": "nail", "678": "neck brace", "679": "necklace", "680": "nipple", "681": "notebook, notebook computer", "682": "obelisk", "683": "oboe, hautboy, hautbois", "684": "ocarina, sweet potato", "685": "odometer, hodometer, mileometer, milometer", "686": "oil filter", "687": "organ, pipe organ", "688": "oscilloscope, scope, cathode-ray oscilloscope, CRO", "689": "overskirt", "690": "oxcart", "691": "oxygen mask", "692": "packet", "693": "paddle, boat paddle", "694": "paddlewheel, paddle wheel", "695": "padlock", "696": "paintbrush", "697": "pajama, pyjama, pj's, jammies", "698": "palace", "699": "panpipe, pandean pipe, syrinx", "700": "paper towel", "701": "parachute, chute", "702": "parallel bars, bars", "703": "park bench", "704": "parking meter", "705": "passenger car, coach, carriage", "706": "patio, terrace", "707": "pay-phone, pay-station", "708": "pedestal, plinth, footstall", "709": "pencil box, pencil case", "710": "pencil sharpener", "711": "perfume, essence", "712": "Petri dish", "713": "photocopier", "714": "pick, plectrum, plectron", "715": "pickelhaube", "716": "picket fence, paling", "717": "pickup, pickup truck", "718": "pier", "719": "piggy bank, penny bank", "720": "pill bottle", "721": "pillow", "722": "ping-pong ball", "723": "pinwheel", "724": "pirate, pirate ship", "725": "pitcher, ewer", "726": "plane, carpenter's plane, woodworking plane", "727": "planetarium", "728": "plastic bag", "729": "plate rack", "730": "plow, plough", "731": "plunger, plumber's helper", "732": "Polaroid camera, Polaroid Land camera", "733": "pole", "734": "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria", "735": "poncho", "736": "pool table, billiard table, snooker table", "737": "pop bottle, soda bottle", "738": "pot, flowerpot", "739": "potter's wheel", "740": "power drill", "741": "prayer rug, prayer mat", "742": "printer", "743": "prison, prison house", "744": "projectile, missile", "745": "projector", "746": "puck, hockey puck", "747": "punching bag, punch bag, punching ball, punchball", "748": "purse", "749": "quill, quill pen", "750": "quilt, comforter, comfort, puff", "751": "racer, race car, racing car", "752": "racket, racquet", "753": "radiator", "754": "radio, wireless", "755": "radio telescope, radio reflector", "756": "rain barrel", "757": "recreational vehicle, RV, R.V.", "758": "reel", "759": "reflex camera", "760": "refrigerator, icebox", "761": "remote control, remote", "762": "restaurant, eating house, eating place, eatery", "763": "revolver, six-gun, six-shooter", "764": "rifle", "765": "rocking chair, rocker", "766": "rotisserie", "767": "rubber eraser, rubber, pencil eraser", "768": "rugby ball", "769": "rule, ruler", "770": "running shoe", "771": "safe", "772": "safety pin", "773": "saltshaker, salt shaker", "774": "sandal", "775": "sarong", "776": "sax, saxophone", "777": "scabbard", "778": "scale, weighing machine", "779": "school bus", "780": "schooner", "781": "scoreboard", "782": "screen, CRT screen", "783": "screw", "784": "screwdriver", "785": "seat belt, seatbelt", "786": "sewing machine", "787": "shield, buckler", "788": "shoe shop, shoe-shop, shoe store", "789": "shoji", "790": "shopping basket", "791": "shopping cart", "792": "shovel", "793": "shower cap", "794": "shower curtain", "795": "ski", "796": "ski mask", "797": "sleeping bag", "798": "slide rule, slipstick", "799": "sliding door", "800": "slot, one-armed bandit", "801": "snorkel", "802": "snowmobile", "803": "snowplow, snowplough", "804": "soap dispenser", "805": "soccer ball", "806": "sock", "807": "solar dish, solar collector, solar furnace", "808": "sombrero", "809": "soup bowl", "810": "space bar", "811": "space heater", "812": "space shuttle", "813": "spatula", "814": "speedboat", "815": "spider web, spider's web", "816": "spindle", "817": "sports car, sport car", "818": "spotlight, spot", "819": "stage", "820": "steam locomotive", "821": "steel arch bridge", "822": "steel drum", "823": "stethoscope", "824": "stole", "825": "stone wall", "826": "stopwatch, stop watch", "827": "stove", "828": "strainer", "829": "streetcar, tram, tramcar, trolley, trolley car", "830": "stretcher", "831": "studio couch, day bed", "832": "stupa, tope", "833": "submarine, pigboat, sub, U-boat", "834": "suit, suit of clothes", "835": "sundial", "836": "sunglass", "837": "sunglasses, dark glasses, shades", "838": "sunscreen, sunblock, sun blocker", "839": "suspension bridge", "840": "swab, swob, mop", "841": "sweatshirt", "842": "swimming trunks, bathing trunks", "843": "swing", "844": "switch, electric switch, electrical switch", "845": "syringe", "846": "table lamp", "847": "tank, army tank, armored combat vehicle, armoured combat vehicle", "848": "tape player", "849": "teapot", "850": "teddy, teddy bear", "851": "television, television system", "852": "tennis ball", "853": "thatch, thatched roof", "854": "theater curtain, theatre curtain", "855": "thimble", "856": "thresher, thrasher, threshing machine", "857": "throne", "858": "tile roof", "859": "toaster", "860": "tobacco shop, tobacconist shop, tobacconist", "861": "toilet seat", "862": "torch", "863": "totem pole", "864": "tow truck, tow car, wrecker", "865": "toyshop", "866": "tractor", "867": "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "868": "tray", "869": "trench coat", "870": "tricycle, trike, velocipede", "871": "trimaran", "872": "tripod", "873": "triumphal arch", "874": "trolleybus, trolley coach, trackless trolley", "875": "trombone", "876": "tub, vat", "877": "turnstile", "878": "typewriter keyboard", "879": "umbrella", "880": "unicycle, monocycle", "881": "upright, upright piano", "882": "vacuum, vacuum cleaner", "883": "vase", "884": "vault", "885": "velvet", "886": "vending machine", "887": "vestment", "888": "viaduct", "889": "violin, fiddle", "890": "volleyball", "891": "waffle iron", "892": "wall clock", "893": "wallet, billfold, notecase, pocketbook", "894": "wardrobe, closet, press", "895": "warplane, military plane", "896": "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "897": "washer, automatic washer, washing machine", "898": "water bottle", "899": "water jug", "900": "water tower", "901": "whiskey jug", "902": "whistle", "903": "wig", "904": "window screen", "905": "window shade", "906": "Windsor tie", "907": "wine bottle", "908": "wing", "909": "wok", "910": "wooden spoon", "911": "wool, woolen, woollen", "912": "worm fence, snake fence, snake-rail fence, Virginia fence", "913": "wreck", "914": "yawl", "915": "yurt", "916": "web site, website, internet site, site", "917": "comic book", "918": "crossword puzzle, crossword", "919": "street sign", "920": "traffic light, traffic signal, stoplight", "921": "book jacket, dust cover, dust jacket, dust wrapper", "922": "menu", "923": "plate", "924": "guacamole", "925": "consomme", "926": "hot pot, hotpot", "927": "trifle", "928": "ice cream, icecream", "929": "ice lolly, lolly, lollipop, popsicle", "930": "French loaf", "931": "bagel, beigel", "932": "pretzel", "933": "cheeseburger", "934": "hotdog, hot dog, red hot", "935": "mashed potato", "936": "head cabbage", "937": "broccoli", "938": "cauliflower", "939": "zucchini, courgette", "940": "spaghetti squash", "941": "acorn squash", "942": "butternut squash", "943": "cucumber, cuke", "944": "artichoke, globe artichoke", "945": "bell pepper", "946": "cardoon", "947": "mushroom", "948": "Granny Smith", "949": "strawberry", "950": "orange", "951": "lemon", "952": "fig", "953": "pineapple, ananas", "954": "banana", "955": "jackfruit, jak, jack", "956": "custard apple", "957": "pomegranate", "958": "hay", "959": "carbonara", "960": "chocolate sauce, chocolate syrup", "961": "dough", "962": "meat loaf, meatloaf", "963": "pizza, pizza pie", "964": "potpie", "965": "burrito", "966": "red wine", "967": "espresso", "968": "cup", "969": "eggnog", "970": "alp", "971": "bubble", "972": "cliff, drop, drop-off", "973": "coral reef", "974": "geyser", "975": "lakeside, lakeshore", "976": "promontory, headland, head, foreland", "977": "sandbar, sand bar", "978": "seashore, coast, seacoast, sea-coast", "979": "valley, vale", "980": "volcano", "981": "ballplayer, baseball player", "982": "groom, bridegroom", "983": "scuba diver", "984": "rapeseed", "985": "daisy", "986": "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", "987": "corn", "988": "acorn", "989": "hip, rose hip, rosehip", "990": "buckeye, horse chestnut, conker", "991": "coral fungus", "992": "agaric", "993": "gyromitra", "994": "stinkhorn, carrion fungus", "995": "earthstar", "996": "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa", "997": "bolete", "998": "ear, spike, capitulum", "999": "toilet tissue, toilet paper, bathroom tissue"}
|
requirements.txt
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn[standard]
|
| 3 |
+
torch
|
| 4 |
+
torchvision
|
| 5 |
+
numpy
|
| 6 |
+
Pillow
|
| 7 |
+
requests
|
| 8 |
+
python-multipart
|
| 9 |
+
|
| 10 |
+
llama-index
|
| 11 |
+
llama-index-llms-gemini
|
| 12 |
+
llama-index-embeddings-huggingface
|
| 13 |
+
llama-index-multi-modal-llms-gemini
|
| 14 |
+
|
| 15 |
+
transformers
|
| 16 |
+
einops
|
| 17 |
+
accelerate
|
| 18 |
+
|
| 19 |
+
python-dotenv
|
| 20 |
+
google-generativeai
|
| 21 |
+
pypdf
|
| 22 |
+
faiss-cpu
|
| 23 |
+
|
| 24 |
+
langchain
|
| 25 |
+
langchain-community
|
| 26 |
+
langchain-huggingface
|
| 27 |
+
langchain-google-genai
|
requirements_bk_260210.txt
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn[standard]
|
| 3 |
+
torch
|
| 4 |
+
torchvision
|
| 5 |
+
numpy
|
| 6 |
+
Pillow
|
| 7 |
+
requests
|
| 8 |
+
python-multipart
|
| 9 |
+
|
| 10 |
+
# LlamaIndex ์ฝ์ด ๋ฐ ํ๋ฌ๊ทธ์ธ
|
| 11 |
+
llama-index
|
| 12 |
+
llama-index-core
|
| 13 |
+
llama-index-llms-gemini
|
| 14 |
+
llama-index-embeddings-huggingface
|
| 15 |
+
llama-index-multi-modal-llms-gemini
|
| 16 |
+
|
| 17 |
+
# Hugging Face ๋ฐ Torch ๊ด๋ จ (ebind-full ๊ตฌ๋์ฉ)
|
| 18 |
+
transformers
|
| 19 |
+
einops
|
| 20 |
+
accelerate
|
| 21 |
+
|
| 22 |
+
# ํ๊ฒฝ๋ณ์ ๊ด๋ฆฌ
|
| 23 |
+
python-dotenv
|
| 24 |
+
google-generativeai
|
| 25 |
+
pypdf
|
| 26 |
+
|
| 27 |
+
langchain
|
| 28 |
+
langchain-community
|
| 29 |
+
langchain-huggingface
|
| 30 |
+
langchain-google-genai
|
| 31 |
+
faiss-cpu
|
requirements_bk_260211.txt
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# requirements.txt ์ถ์ฒ ์์ ์ (์ค๋ณต ์ ๊ฑฐ)
|
| 2 |
+
fastapi
|
| 3 |
+
uvicorn[standard]
|
| 4 |
+
torch
|
| 5 |
+
torchvision
|
| 6 |
+
numpy
|
| 7 |
+
Pillow
|
| 8 |
+
requests
|
| 9 |
+
python-multipart
|
| 10 |
+
|
| 11 |
+
# LlamaIndex (core ๋ฑ์ ์๋ ํฌํจ๋๋ฏ๋ก ๋ฉ์ธ ํจํค์ง๋ง ๋ช
์)
|
| 12 |
+
llama-index
|
| 13 |
+
llama-index-llms-gemini
|
| 14 |
+
llama-index-embeddings-huggingface
|
| 15 |
+
llama-index-multi-modal-llms-gemini
|
| 16 |
+
|
| 17 |
+
# Hugging Face
|
| 18 |
+
transformers
|
| 19 |
+
einops
|
| 20 |
+
accelerate
|
| 21 |
+
|
| 22 |
+
# ์ ํธ๋ฆฌํฐ
|
| 23 |
+
python-dotenv
|
| 24 |
+
google-generativeai
|
| 25 |
+
pypdf
|
| 26 |
+
faiss-cpu
|
| 27 |
+
|
| 28 |
+
# LangChain (๋ฒ์ ๋ช
์ ๊ถ์ฅ)
|
| 29 |
+
langchain
|
| 30 |
+
langchain-community
|
| 31 |
+
langchain-huggingface
|
| 32 |
+
langchain-google-genai
|
router/cnn_router.py
ADDED
|
@@ -0,0 +1,534 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, UploadFile, File, HTTPException
|
| 2 |
+
from io import BytesIO
|
| 3 |
+
from PIL import Image
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn as nn
|
| 6 |
+
from torchvision import models, transforms
|
| 7 |
+
import json
|
| 8 |
+
import os
|
| 9 |
+
from typing import List, Dict, Any
|
| 10 |
+
|
| 11 |
+
# effinet_basic_compo.py ์์ ๋ถ๋ฅ ํจ์ ์ํฌํธ
|
| 12 |
+
# ์ฃผ์: ์ด ๋ชจ๋์ ์ํฌํธ ์์ ์ ์ ์ญ ์ด๊ธฐํ ์ฝ๋๊ฐ ์คํ๋ ์ ์์
|
| 13 |
+
try:
|
| 14 |
+
from effinet_basic_compo import classify_image_pil
|
| 15 |
+
except ImportError:
|
| 16 |
+
# router ํด๋ ๋ด์ ์์ ๊ฒฝ์ฐ ์์ ๋๋ ํ ๋ฆฌ ์ฐธ์กฐ ํ์ํ ์ ์์
|
| 17 |
+
import sys
|
| 18 |
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 19 |
+
from effinet_basic_compo import classify_image_pil
|
| 20 |
+
|
| 21 |
+
router = APIRouter(tags=["CNN_Image_Classification"])
|
| 22 |
+
|
| 23 |
+
# ==============================================================================
|
| 24 |
+
# โ๏ธ ์ค์ ๋ฐ ์ ์ญ ๋ณ์
|
| 25 |
+
# ==============================================================================
|
| 26 |
+
|
| 27 |
+
# ํ์ผ ์ด๋ฆ ์์ ์ ์
|
| 28 |
+
MODEL_PATH = "efficientnet_b0_chihuahua_muffin.pt"
|
| 29 |
+
CLASS_MAP_PATH = "efficientnet_b0_chihuahua_muffin.json"
|
| 30 |
+
|
| 31 |
+
FGSM_MODEL_PATH = "efficientnet_b0_chihuahua_muffin_fsgmdef.pt"
|
| 32 |
+
FGSM_CLASS_MAP_PATH = "efficientnet_b0_chihuahua_muffin_fsgmdef.json"
|
| 33 |
+
|
| 34 |
+
PLANTDISEASE_MODEL_PATH_2 = "efficientnet_v2_s_plantforestdisease.pt"
|
| 35 |
+
PLANTDISEASE_CLASS_MAP_PATH_2 = "efficientnet_v2_s_plantforestdisease.json"
|
| 36 |
+
|
| 37 |
+
BEE_WASP_MODEL_PATH = "bee_wasp_efficientnetv2-s_aio.pth"
|
| 38 |
+
|
| 39 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 40 |
+
# ๋ชจ๋ธ ๊ฐ์ฒด๋ฅผ ์ ์ฅํ ์ ์ญ ๋ณ์
|
| 41 |
+
MODEL = {
|
| 42 |
+
"model": None,
|
| 43 |
+
"preprocess": None,
|
| 44 |
+
"class_idx_to_name": None,
|
| 45 |
+
|
| 46 |
+
"fgsm_model": None,
|
| 47 |
+
"fgsm_preprocess": None,
|
| 48 |
+
"fgsm_class_idx_to_name": None,
|
| 49 |
+
|
| 50 |
+
"plantdisease_model_2": None,
|
| 51 |
+
"plantdisease_preprocess_2": None,
|
| 52 |
+
"plantdisease_class_idx_to_name_2": None,
|
| 53 |
+
|
| 54 |
+
"bee_wasp_model": None,
|
| 55 |
+
"bee_wasp_preprocess": None,
|
| 56 |
+
"bee_wasp_class_names": None,
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
# ==============================================================================
|
| 60 |
+
# ๐ ๋ชจ๋ธ ์๋ช
์ฃผ๊ธฐ ๊ด๋ฆฌ (Load / Shutdown)
|
| 61 |
+
# ==============================================================================
|
| 62 |
+
|
| 63 |
+
async def load_models():
|
| 64 |
+
"""
|
| 65 |
+
์๋ฒ ์์ ์ EfficientNetB0 (Wierd Animals) ๋ชจ๋ธ๊ณผ EfficientNetV2-S (Plant Disease) ๋ชจ๋ธ์ ๋ก๋ํฉ๋๋ค.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
# ====================================================================
|
| 69 |
+
# ๐ฅ 1. EfficientNetB0 (efficientnet_b0_chihuahua_muffin) ๋ชจ๋ธ ๋ก๋
|
| 70 |
+
# ====================================================================
|
| 71 |
+
print(f"[MODEL INIT] EfficientNetB0 ๋ชจ๋ธ ๋ก๋ ์์... ({MODEL_PATH})")
|
| 72 |
+
try:
|
| 73 |
+
# 1-1. ํด๋์ค ๋งต ๋ก๋
|
| 74 |
+
if not os.path.exists(CLASS_MAP_PATH):
|
| 75 |
+
raise FileNotFoundError(f"ํด๋์ค ๋งต ํ์ผ์ด ์์ต๋๋ค: {CLASS_MAP_PATH}")
|
| 76 |
+
|
| 77 |
+
with open(CLASS_MAP_PATH, 'r', encoding='utf-8') as f:
|
| 78 |
+
raw_class_data = json.load(f)
|
| 79 |
+
if isinstance(raw_class_data, list):
|
| 80 |
+
class_idx_to_name = {i: name for i, name in enumerate(raw_class_data)}
|
| 81 |
+
elif isinstance(raw_class_data, dict):
|
| 82 |
+
class_idx_to_name = {int(k): v for k, v in raw_class_data.items()}
|
| 83 |
+
MODEL["class_idx_to_name"] = class_idx_to_name
|
| 84 |
+
|
| 85 |
+
# 1-2. ๋ชจ๋ธ ๊ตฌ์กฐ ์ ์ ๋ฐ ๊ฐ์ค์น ๋ก๋
|
| 86 |
+
if not os.path.exists(MODEL_PATH):
|
| 87 |
+
raise FileNotFoundError(f"๋ชจ๋ธ ๊ฐ์ค์น ํ์ผ์ด ์์ต๋๋ค: {MODEL_PATH}")
|
| 88 |
+
|
| 89 |
+
num_classes = len(class_idx_to_name)
|
| 90 |
+
model = models.efficientnet_b0(weights=None)
|
| 91 |
+
in_features = model.classifier[1].in_features
|
| 92 |
+
model.classifier[1] = nn.Linear(in_features, num_classes)
|
| 93 |
+
|
| 94 |
+
ckpt = torch.load(MODEL_PATH, map_location='cpu')
|
| 95 |
+
|
| 96 |
+
if isinstance(ckpt, dict) and 'state_dict' in ckpt:
|
| 97 |
+
state_dict_to_load = ckpt['state_dict']
|
| 98 |
+
else:
|
| 99 |
+
state_dict_to_load = ckpt
|
| 100 |
+
|
| 101 |
+
model.load_state_dict(state_dict_to_load, strict=False)
|
| 102 |
+
model.to(device)
|
| 103 |
+
model.eval()
|
| 104 |
+
MODEL["model"] = model
|
| 105 |
+
|
| 106 |
+
# 1-3. ์ด๋ฏธ์ง ์ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ ์ ์
|
| 107 |
+
preprocess = transforms.Compose([
|
| 108 |
+
transforms.Resize((224, 224)),
|
| 109 |
+
transforms.ToTensor(),
|
| 110 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
| 111 |
+
])
|
| 112 |
+
MODEL["preprocess"] = preprocess
|
| 113 |
+
|
| 114 |
+
print(f"[MODEL INIT] EfficientNetB0 ๋ชจ๋ธ ๋ก๋ ์๋ฃ. ํด๋์ค ์: {num_classes}")
|
| 115 |
+
|
| 116 |
+
except Exception as e:
|
| 117 |
+
print(f"[ERROR] EfficientNetB0 ๋ชจ๋ธ ์ด๊ธฐํ ์คํจ. ์์ธ: {type(e).__name__} - {e}")
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
# ====================================================================
|
| 121 |
+
# ๐ฅ 2. EfficientNetV2-S (Plant Disease) - [์ฌ๊ธฐ๊ฐ ์์ ๋จ]
|
| 122 |
+
# ====================================================================
|
| 123 |
+
# ์ ์ญ ๋ณ์ ์๋จ์์ ํ์ผ๋ช
์์ ํ์:
|
| 124 |
+
# PLANTDISEASE_MODEL_PATH_2 = "efficientnet_v2_s_plantforestdisease.pt"
|
| 125 |
+
|
| 126 |
+
print(f"[MODEL INIT] EfficientNetV2-S ๋ชจ๋ธ ๋ก๋ ์์... ({PLANTDISEASE_MODEL_PATH_2})")
|
| 127 |
+
try:
|
| 128 |
+
# 2-1. ํด๋์ค ๋งต ๋ก๋
|
| 129 |
+
if not os.path.exists(PLANTDISEASE_CLASS_MAP_PATH_2):
|
| 130 |
+
raise FileNotFoundError(f"ํ์ผ ์์: {PLANTDISEASE_CLASS_MAP_PATH_2}")
|
| 131 |
+
|
| 132 |
+
with open(PLANTDISEASE_CLASS_MAP_PATH_2, 'r', encoding='utf-8') as f:
|
| 133 |
+
raw_class_data_2 = json.load(f)
|
| 134 |
+
if isinstance(raw_class_data_2, list):
|
| 135 |
+
class_idx_to_name_2 = {i: name for i, name in enumerate(raw_class_data_2)}
|
| 136 |
+
elif isinstance(raw_class_data_2, dict):
|
| 137 |
+
class_idx_to_name_2 = {int(k): v for k, v in raw_class_data_2.items()}
|
| 138 |
+
MODEL["plantdisease_class_idx_to_name_2"] = class_idx_to_name_2
|
| 139 |
+
|
| 140 |
+
# 2-2. ๋ชจ๋ธ ๊ตฌ์กฐ ์ ์ (EfficientNet V2-S)
|
| 141 |
+
if not os.path.exists(PLANTDISEASE_MODEL_PATH_2):
|
| 142 |
+
raise FileNotFoundError(f"ํ์ผ ์์: {PLANTDISEASE_MODEL_PATH_2}")
|
| 143 |
+
|
| 144 |
+
num_classes_2 = len(class_idx_to_name_2)
|
| 145 |
+
|
| 146 |
+
# โ
ํต์ฌ ๋ณ๊ฒฝ: ResNet50 -> EfficientNetV2-S
|
| 147 |
+
model_2 = models.efficientnet_v2_s(weights=None)
|
| 148 |
+
|
| 149 |
+
# โ
ํต์ฌ ๋ณ๊ฒฝ: .fc -> .classifier[1]
|
| 150 |
+
in_features_2 = model_2.classifier[1].in_features
|
| 151 |
+
model_2.classifier[1] = nn.Linear(in_features_2, num_classes_2)
|
| 152 |
+
|
| 153 |
+
ckpt_2 = torch.load(PLANTDISEASE_MODEL_PATH_2, map_location='cpu')
|
| 154 |
+
if isinstance(ckpt_2, dict) and 'state_dict' in ckpt_2:
|
| 155 |
+
state_dict_to_load_2 = ckpt_2['state_dict']
|
| 156 |
+
else:
|
| 157 |
+
state_dict_to_load_2 = ckpt_2
|
| 158 |
+
|
| 159 |
+
model_2.load_state_dict(state_dict_to_load_2, strict=False)
|
| 160 |
+
model_2.to(device)
|
| 161 |
+
model_2.eval()
|
| 162 |
+
MODEL["plantdisease_model_2"] = model_2
|
| 163 |
+
|
| 164 |
+
# โ
ํต์ฌ ๋ณ๊ฒฝ: ์ ์ฒ๋ฆฌ ์ฌ์ด์ฆ 384x384 (ํ์ต ์ฝ๋์ ๋์ผํ๊ฒ ๋ง์ถค)
|
| 165 |
+
preprocess_2 = transforms.Compose([
|
| 166 |
+
transforms.Resize((384, 384)),
|
| 167 |
+
transforms.ToTensor(),
|
| 168 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
| 169 |
+
])
|
| 170 |
+
MODEL["plantdisease_preprocess_2"] = preprocess_2
|
| 171 |
+
|
| 172 |
+
print(f"[MODEL INIT] EfficientNetV2-S (Plant) ๋ก๋ ์๋ฃ.")
|
| 173 |
+
|
| 174 |
+
except Exception as e:
|
| 175 |
+
print(f"[ERROR] EfficientNetV2-S ์ด๊ธฐํ ์คํจ: {e}")
|
| 176 |
+
|
| 177 |
+
# ====================================================================
|
| 178 |
+
# ๐ฅ 3. EfficientNetV2-S (Bee vs Wasp) - [์๋ก ์ถ๊ฐ๋ ๋ถ๋ถ]
|
| 179 |
+
# ====================================================================
|
| 180 |
+
print(f"[MODEL INIT] 3. Bee/Wasp ๋ชจ๋ธ ๋ก๋ ์์... ({BEE_WASP_MODEL_PATH})")
|
| 181 |
+
try:
|
| 182 |
+
if not os.path.exists(BEE_WASP_MODEL_PATH):
|
| 183 |
+
raise FileNotFoundError(f"ํ์ผ ์์: {BEE_WASP_MODEL_PATH}")
|
| 184 |
+
|
| 185 |
+
# 3-1. ํตํฉ ์ฒดํฌํฌ์ธํธ(.pth) ๋ก๋
|
| 186 |
+
# ์ด ํ์ผ์ {'model_state_dict': ..., 'class_names': ..., 'input_size': ...} ํํ์
|
| 187 |
+
checkpoint_bee = torch.load(BEE_WASP_MODEL_PATH, map_location=device)
|
| 188 |
+
|
| 189 |
+
# 3-2. ๋ฉํ๋ฐ์ดํฐ ์ถ์ถ (ํด๋์ค ์ด๋ฆ, ์
๋ ฅ ํฌ๊ธฐ)
|
| 190 |
+
bee_classes = checkpoint_bee.get('class_names', [])
|
| 191 |
+
input_size_bee = checkpoint_bee.get('input_size', 384) # ๊ธฐ๋ณธ๊ฐ 384
|
| 192 |
+
|
| 193 |
+
# MODEL ๋์
๋๋ฆฌ์ ํด๋์ค ์ ๋ณด ์ ์ฅ
|
| 194 |
+
# (API ๋ฐํ ์ ์ธ๋ฑ์ค๋ก ์ด๋ฆ์ ์ฐพ๊ธฐ ์ฝ๊ฒ ๋์
๋๋ฆฌ๋ก ๋ณํํ์ฌ ์ ์ฅํ๊ฑฐ๋, ๋ฆฌ์คํธ ๊ทธ๋๋ก ์ ์ฅ)
|
| 195 |
+
MODEL["bee_wasp_class_names"] = bee_classes
|
| 196 |
+
|
| 197 |
+
# 3-3. ๋ชจ๋ธ ๊ตฌ์กฐ ์ ์ (EfficientNetV2-S)
|
| 198 |
+
num_classes_bee = len(bee_classes)
|
| 199 |
+
model_bee = models.efficientnet_v2_s(weights=None)
|
| 200 |
+
|
| 201 |
+
# Head ๊ต์ฒด
|
| 202 |
+
in_features_bee = model_bee.classifier[1].in_features
|
| 203 |
+
model_bee.classifier[1] = nn.Linear(in_features_bee, num_classes_bee)
|
| 204 |
+
|
| 205 |
+
# ๊ฐ์ค์น ๋ก๋
|
| 206 |
+
model_bee.load_state_dict(checkpoint_bee['model_state_dict'])
|
| 207 |
+
model_bee.to(device)
|
| 208 |
+
model_bee.eval()
|
| 209 |
+
MODEL["bee_wasp_model"] = model_bee
|
| 210 |
+
|
| 211 |
+
# 3-4. ์ ์ฒ๋ฆฌ ์ ์ (์ ์ฅ๋ input_size ํ์ฉ)
|
| 212 |
+
preprocess_bee = transforms.Compose([
|
| 213 |
+
transforms.Resize((input_size_bee, input_size_bee)),
|
| 214 |
+
transforms.ToTensor(),
|
| 215 |
+
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
|
| 216 |
+
])
|
| 217 |
+
MODEL["bee_wasp_preprocess"] = preprocess_bee
|
| 218 |
+
|
| 219 |
+
print(f"[MODEL INIT] โ
Bee/Wasp ๋ชจ๋ธ ๋ก๋ ์๋ฃ. (Classes: {len(bee_classes)})")
|
| 220 |
+
|
| 221 |
+
except Exception as e:
|
| 222 |
+
print(f"[ERROR] โ Bee/Wasp ๋ชจ๋ธ ์ด๊ธฐํ ์คํจ: {e}")
|
| 223 |
+
|
| 224 |
+
# ====================================================================
|
| 225 |
+
# ๐
4. EfficientNetB0 (FGSM Defended - Muffin vs Chihuahua) - [์๋ก ์ถ๊ฐ๋จ]
|
| 226 |
+
# ====================================================================
|
| 227 |
+
print(f"[MODEL INIT] 4. FGSM ๋ฐฉ์ด ๋ชจ๋ธ ๋ก๋ ์์... ({FGSM_MODEL_PATH})")
|
| 228 |
+
try:
|
| 229 |
+
with open(FGSM_CLASS_MAP_PATH, 'r', encoding='utf-8') as f:
|
| 230 |
+
raw_class_data_fgsm = json.load(f)
|
| 231 |
+
if isinstance(raw_class_data_fgsm, list):
|
| 232 |
+
class_idx_to_name_fgsm = {i: name for i, name in enumerate(raw_class_data_fgsm)}
|
| 233 |
+
elif isinstance(raw_class_data_fgsm, dict):
|
| 234 |
+
class_idx_to_name_fgsm = {int(k): v for k, v in raw_class_data_fgsm.items()}
|
| 235 |
+
MODEL["fgsm_class_idx_to_name"] = class_idx_to_name_fgsm
|
| 236 |
+
|
| 237 |
+
num_classes_fgsm = len(class_idx_to_name_fgsm)
|
| 238 |
+
model_fgsm = models.efficientnet_b0(weights=None)
|
| 239 |
+
in_features_fgsm = model_fgsm.classifier[1].in_features
|
| 240 |
+
model_fgsm.classifier[1] = nn.Linear(in_features_fgsm, num_classes_fgsm)
|
| 241 |
+
|
| 242 |
+
ckpt_fgsm = torch.load(FGSM_MODEL_PATH, map_location='cpu')
|
| 243 |
+
state_dict_to_load_fgsm = ckpt_fgsm['state_dict'] if isinstance(ckpt_fgsm, dict) and 'state_dict' in ckpt_fgsm else ckpt_fgsm
|
| 244 |
+
|
| 245 |
+
model_fgsm.load_state_dict(state_dict_to_load_fgsm, strict=False)
|
| 246 |
+
model_fgsm.to(device)
|
| 247 |
+
model_fgsm.eval()
|
| 248 |
+
MODEL["fgsm_model"] = model_fgsm
|
| 249 |
+
|
| 250 |
+
# โ
์ฃผ์: ํ์ต/์์ธก ์ฝ๋์ ๋์ผํ๊ฒ CenterCrop์ ์ ์ธํ๊ณ Resize๋ง ์งํ
|
| 251 |
+
preprocess_fgsm = transforms.Compose([
|
| 252 |
+
transforms.Resize((224, 224)),
|
| 253 |
+
transforms.ToTensor(),
|
| 254 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
| 255 |
+
])
|
| 256 |
+
MODEL["fgsm_preprocess"] = preprocess_fgsm
|
| 257 |
+
|
| 258 |
+
print(f"[MODEL INIT] โ
FGSM ๋ฐฉ์ด ๋ชจ๋ธ ๋ก๋ ์๋ฃ.")
|
| 259 |
+
|
| 260 |
+
except Exception as e:
|
| 261 |
+
print(f"[ERROR] โ FGSM ๋ฐฉ์ด ๋ชจ๋ธ ์ด๊ธฐํ ์คํจ: {e}")
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def shutdown_models():
|
| 265 |
+
"""์๋ฒ ์ข
๋ฃ ์ ๋ชจ๋ธ ๋ฉ๋ชจ๋ฆฌ ํด์ """
|
| 266 |
+
print("[SERVER SHUTDOWN] ๋ชจ๋ธ ๋ฉ๋ชจ๋ฆฌ ํด์ ์ค...")
|
| 267 |
+
MODEL["model"] = None
|
| 268 |
+
MODEL["plantdisease_model_2"] = None
|
| 269 |
+
MODEL["bee_wasp_model"] = None
|
| 270 |
+
MODEL["fgsm_model"] = None # ์ถ๊ฐ๋จ
|
| 271 |
+
if torch.cuda.is_available():
|
| 272 |
+
torch.cuda.empty_cache()
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
# ==============================================================================
|
| 277 |
+
# ๐ API Endpoints
|
| 278 |
+
# ==============================================================================
|
| 279 |
+
|
| 280 |
+
@router.post("/classify", summary="์ด๋ฏธ์ง ๋ถ๋ฅ (๊ธฐ๋ณธ)", response_description="์์ธก ๊ฒฐ๊ณผ ๋ฐ ์ฑ๊ณต ์ฌ๋ถ")
|
| 281 |
+
async def classify_image(file: UploadFile = File(..., description="๋ถ๋ฅํ ์ด๋ฏธ์ง ํ์ผ (JPG, PNG ๋ฑ)")):
|
| 282 |
+
"""
|
| 283 |
+
์ด๋ฏธ์ง ํ์ผ์ ์
๋ก๋ํ๊ณ EfficientNetB0 ๋ชจ๋ธ์ ์ฌ์ฉํ์ฌ
|
| 284 |
+
ImageNet ํด๋์ค์ ๋ํ Top-5 ๋ถ๋ฅ ๊ฒฐ๊ณผ๋ฅผ JSON ํ์์ผ๋ก ๋ฐํํฉ๋๋ค.
|
| 285 |
+
"""
|
| 286 |
+
|
| 287 |
+
# 1. ํ์ผ ํ์ ๊ฒ์ฆ (์ ํ์ )
|
| 288 |
+
if not file.content_type or not file.content_type.startswith('image/'):
|
| 289 |
+
return {
|
| 290 |
+
"success": False,
|
| 291 |
+
"msg": "์๋ชป๋ ํ์ผ ํ์์
๋๋ค. ์ด๋ฏธ์ง ํ์ผ์ ์
๋ก๋ํด ์ฃผ์ธ์."
|
| 292 |
+
}
|
| 293 |
+
|
| 294 |
+
try:
|
| 295 |
+
# 2. ์
๋ก๋๋ ํ์ผ์ ๋ด์ฉ์ ๋ฉ๋ชจ๋ฆฌ๋ก ์ฝ์ด์ต๋๋ค. (๋น๋๊ธฐ ์ฒ๋ฆฌ)
|
| 296 |
+
file_content = await file.read()
|
| 297 |
+
|
| 298 |
+
# 3. ๋ฐ์ดํธ ๋ฐ์ดํฐ๋ฅผ PIL Image ๊ฐ์ฒด๋ก ๋ณํ
|
| 299 |
+
try:
|
| 300 |
+
img = Image.open(BytesIO(file_content))
|
| 301 |
+
except IOError:
|
| 302 |
+
return {
|
| 303 |
+
"success": False,
|
| 304 |
+
"msg": "์
๋ก๋๋ ํ์ผ์ ๋ด์ฉ์ด ์ ํจํ ์ด๋ฏธ์ง ํ์์ด ์๋๋๋ค."
|
| 305 |
+
}
|
| 306 |
+
|
| 307 |
+
# 4. effinet_basic.py์ ๋ถ๋ฅ ํจ์ ํธ์ถ (์ฌ๊ธฐ์๋ ๊ธฐ์กด ๋ก์ง๋๋ก PIL ์ด๋ฏธ์ง๋ฅผ ๋๊น)
|
| 308 |
+
print(f"[API INFO] ํ์ผ '{file.filename}' ๋ถ๋ฅ ์์...")
|
| 309 |
+
classification_results = classify_image_pil(img)
|
| 310 |
+
print(f"[API INFO] ํ์ผ '{file.filename}' ๋ถ๋ฅ ์๋ฃ.")
|
| 311 |
+
|
| 312 |
+
# 5. ์ฑ๊ณต ๊ฒฐ๊ณผ ๋ฐํ
|
| 313 |
+
return {
|
| 314 |
+
"success": True,
|
| 315 |
+
"msg": "์ด๋ฏธ์ง ๋ถ๋ฅ์ ์ฑ๊ณตํ์ต๋๋ค.",
|
| 316 |
+
"filename": file.filename,
|
| 317 |
+
"predictions": classification_results
|
| 318 |
+
}
|
| 319 |
+
|
| 320 |
+
except RuntimeError as e:
|
| 321 |
+
error_msg = f"๋ชจ๋ธ ๋ถ๋ฅ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
| 322 |
+
print(f"[ERROR] Classification Runtime Error: {error_msg}")
|
| 323 |
+
return {"success": False, "msg": error_msg}
|
| 324 |
+
except Exception as e:
|
| 325 |
+
error_msg = f"์๋ฒ์์ ์์์น ๋ชปํ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {type(e).__name__}"
|
| 326 |
+
print(f"[ERROR] An unexpected error occurred: {e}")
|
| 327 |
+
return {"success": False, "msg": error_msg}
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
@router.post("/classify_muffin_chihuahua", summary="๋จธํ/์น์์ ์ด๋ฏธ์ง ๋ถ๋ฅ", response_description="์์ธก ๊ฒฐ๊ณผ ๋ฐ ์ฑ๊ณต ์ฌ๋ถ")
|
| 331 |
+
async def classify_muffin_chihuahua(file: UploadFile = File(..., description="๋ถ๋ฅํ ์ด๋ฏธ์ง ํ์ผ (JPG, PNG ๋ฑ)"), top_k: int = 1):
|
| 332 |
+
"""
|
| 333 |
+
์
๋ก๋๋ ์ด๋ฏธ์ง ํ์ผ์ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋๋ EfficientNetB0 ๋ชจ๋ธ๋ก ๋ถ๋ฅํ๊ณ ๊ฒฐ๊ณผ๋ฅผ ๋ฐํํฉ๋๋ค.
|
| 334 |
+
"""
|
| 335 |
+
|
| 336 |
+
# 0. ๋ชจ๋ธ ๋ก๋ ์ํ ํ์ธ
|
| 337 |
+
model = MODEL["model"]
|
| 338 |
+
preprocess = MODEL["preprocess"]
|
| 339 |
+
class_idx_to_name = MODEL["class_idx_to_name"]
|
| 340 |
+
|
| 341 |
+
if model is None:
|
| 342 |
+
raise HTTPException(status_code=503, detail="๋ชจ๋ธ์ด ์๋ฒ์ ๋ก๋๋์ง ์์์ต๋๋ค. ๊ด๋ฆฌ์์๊ฒ ๋ฌธ์ํ์ธ์.")
|
| 343 |
+
|
| 344 |
+
# 1. ํ์ผ ํ์ ๊ฒ์ฆ
|
| 345 |
+
if not file.content_type or not file.content_type.startswith('image/'):
|
| 346 |
+
return {"success": False, "msg": "์๋ชป๋ ํ์ผ ํ์์
๋๋ค. ์ด๋ฏธ์ง ํ์ผ์ ์
๋ก๋ํด ์ฃผ์ธ์."}
|
| 347 |
+
|
| 348 |
+
try:
|
| 349 |
+
file_content = await file.read()
|
| 350 |
+
|
| 351 |
+
# 2. ๋ฐ์ดํธ ๋ฐ์ดํฐ๋ฅผ PIL Image ๊ฐ์ฒด๋ก ๋ณํ
|
| 352 |
+
try:
|
| 353 |
+
img = Image.open(BytesIO(file_content)).convert("RGB")
|
| 354 |
+
except IOError:
|
| 355 |
+
return {"success": False, "msg": "์
๋ก๋๋ ํ์ผ์ ๋ด์ฉ์ด ์ ํจํ ์ด๋ฏธ์ง ํ์์ด ์๋๋๋ค."}
|
| 356 |
+
|
| 357 |
+
# 3. ์ถ๋ก ๋ก์ง
|
| 358 |
+
input_tensor = preprocess(img)
|
| 359 |
+
input_batch = input_tensor.unsqueeze(0)
|
| 360 |
+
|
| 361 |
+
# ์ถ๋ก
|
| 362 |
+
with torch.no_grad():
|
| 363 |
+
device = next(model.parameters()).device
|
| 364 |
+
input_batch = input_batch.to(device)
|
| 365 |
+
output = model(input_batch)
|
| 366 |
+
|
| 367 |
+
# ํ๋ฅ ๋ฐ Top-K ์์ธก ๊ฒฐ๊ณผ ๊ณ์ฐ
|
| 368 |
+
probabilities = torch.nn.functional.softmax(output[0], dim=0)
|
| 369 |
+
top_prob, top_indices = torch.topk(probabilities, top_k)
|
| 370 |
+
|
| 371 |
+
# ๊ฒฐ๊ณผ ํฌ๋งคํ
|
| 372 |
+
results = []
|
| 373 |
+
for i in range(top_k):
|
| 374 |
+
index = top_indices[i].item()
|
| 375 |
+
probability = top_prob[i].item()
|
| 376 |
+
class_name = class_idx_to_name.get(index, f"Unknown Class Index {index}")
|
| 377 |
+
|
| 378 |
+
results.append({
|
| 379 |
+
"rank": i + 1,
|
| 380 |
+
"class_index": index,
|
| 381 |
+
"class_name": class_name,
|
| 382 |
+
"probability": f"{probability:.4f}"
|
| 383 |
+
})
|
| 384 |
+
|
| 385 |
+
# 4. ์ฑ๊ณต ๊ฒฐ๊ณผ ๋ฐํ
|
| 386 |
+
return {
|
| 387 |
+
"success": True,
|
| 388 |
+
"msg": "์ด๋ฏธ์ง ๋ถ๋ฅ์ ์ฑ๊ณตํ์ต๋๋ค.",
|
| 389 |
+
"filename": file.filename,
|
| 390 |
+
"predictions": results
|
| 391 |
+
}
|
| 392 |
+
|
| 393 |
+
except Exception as e:
|
| 394 |
+
error_msg = f"์๋ฒ์์ ์์์น ๋ชปํ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {type(e).__name__} - {e}"
|
| 395 |
+
print(f"[ERROR] An unexpected error occurred: {e}")
|
| 396 |
+
return {"success": False, "msg": error_msg}
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
@router.post("/classify_muffin_chihuahua_fgsm", summary="๋จธํ/์น์์ ์ด๋ฏธ์ง ๋ถ๋ฅ", response_description="์์ธก ๊ฒฐ๊ณผ ๋ฐ ์ฑ๊ณต ์ฌ๋ถ")
|
| 401 |
+
async def classify_muffin_chihuahua_fgsm(file: UploadFile = File(...), top_k: int = 1):
|
| 402 |
+
model, preprocess, class_idx_to_name = MODEL["fgsm_model"], MODEL["fgsm_preprocess"], MODEL["fgsm_class_idx_to_name"]
|
| 403 |
+
if model is None: raise HTTPException(status_code=503, detail="๋ชจ๋ธ์ด ๋ก๋๋์ง ์์์ต๋๋ค.")
|
| 404 |
+
|
| 405 |
+
try:
|
| 406 |
+
img = Image.open(BytesIO(await file.read())).convert("RGB")
|
| 407 |
+
input_batch = preprocess(img).unsqueeze(0).to(next(model.parameters()).device)
|
| 408 |
+
with torch.no_grad():
|
| 409 |
+
output = model(input_batch)
|
| 410 |
+
top_prob, top_indices = torch.topk(torch.nn.functional.softmax(output[0], dim=0), top_k)
|
| 411 |
+
|
| 412 |
+
results = [{"rank": i + 1, "class_index": top_indices[i].item(), "class_name": class_idx_to_name.get(top_indices[i].item(), "Unknown"), "probability": f"{top_prob[i].item():.4f}"} for i in range(top_k)]
|
| 413 |
+
return {"success": True, "filename": file.filename, "predictions": results}
|
| 414 |
+
except Exception as e:
|
| 415 |
+
return {"success": False, "msg": str(e)}
|
| 416 |
+
|
| 417 |
+
@router.post("/classify_plant_disease", summary="์๋ฌผ๋ณ์ถฉํด ์ด๋ฏธ์ง ๋ถ๋ฅ", response_description="์์ธก ๊ฒฐ๊ณผ ๋ฐ ์ฑ๊ณต ์ฌ๋ถ")
|
| 418 |
+
async def classify_plant_disease(file: UploadFile = File(..., description="๋ถ๋ฅํ ์ด๋ฏธ์ง ํ์ผ (JPG, PNG ๋ฑ)"), top_k: int = 1):
|
| 419 |
+
"""
|
| 420 |
+
์
๋ก๋๋ ์ด๋ฏธ์ง ํ์ผ์ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋๋ EfficientNetV2-S ๋ชจ๋ธ๋ก ๋ถ๋ฅํ๊ณ ๊ฒฐ๊ณผ๋ฅผ ๋ฐํํฉ๋๋ค.
|
| 421 |
+
"""
|
| 422 |
+
# 0. ๋ชจ๋ธ ๋ก๋ ์ํ ํ์ธ (๋ณ์๋ช
์์ )
|
| 423 |
+
model = MODEL["plantdisease_model_2"]
|
| 424 |
+
preprocess = MODEL["plantdisease_preprocess_2"]
|
| 425 |
+
class_idx_to_name = MODEL["plantdisease_class_idx_to_name_2"]
|
| 426 |
+
|
| 427 |
+
if model is None:
|
| 428 |
+
raise HTTPException(status_code=503, detail="๋ชจ๋ธ์ด ์๋ฒ์ ๋ก๋๋์ง ์์์ต๋๋ค. ๊ด๋ฆฌ์์๊ฒ ๋ฌธ์ํ์ธ์.")
|
| 429 |
+
|
| 430 |
+
# 1. ํ์ผ ํ์ ๊ฒ์ฆ
|
| 431 |
+
if not file.content_type or not file.content_type.startswith('image/'):
|
| 432 |
+
return {"success": False, "msg": "์๋ชป๋ ํ์ผ ํ์์
๋๋ค. ์ด๋ฏธ์ง ํ์ผ์ ์
๋ก๋ํด ์ฃผ์ธ์."}
|
| 433 |
+
|
| 434 |
+
try:
|
| 435 |
+
file_content = await file.read()
|
| 436 |
+
|
| 437 |
+
# 2. ๋ฐ์ดํธ ๋ฐ์ดํฐ๋ฅผ PIL Image ๊ฐ์ฒด๋ก ๋ณํ
|
| 438 |
+
try:
|
| 439 |
+
img = Image.open(BytesIO(file_content)).convert("RGB")
|
| 440 |
+
except IOError:
|
| 441 |
+
return {"success": False, "msg": "์
๋ก๋๋ ํ์ผ์ ๋ด์ฉ์ด ์ ํจํ ์ด๋ฏธ์ง ํ์์ด ์๋๋๋ค."}
|
| 442 |
+
|
| 443 |
+
# 3. ์ถ๋ก ๋ก์ง
|
| 444 |
+
input_tensor = preprocess(img)
|
| 445 |
+
input_batch = input_tensor.unsqueeze(0)
|
| 446 |
+
|
| 447 |
+
# ์ถ๋ก
|
| 448 |
+
with torch.no_grad():
|
| 449 |
+
device = next(model.parameters()).device
|
| 450 |
+
input_batch = input_batch.to(device)
|
| 451 |
+
output = model(input_batch)
|
| 452 |
+
|
| 453 |
+
# ํ๋ฅ ๋ฐ Top-K ์์ธก ๊ฒฐ๊ณผ ๊ณ์ฐ
|
| 454 |
+
probabilities = torch.nn.functional.softmax(output[0], dim=0)
|
| 455 |
+
top_prob, top_indices = torch.topk(probabilities, top_k)
|
| 456 |
+
|
| 457 |
+
# ๊ฒฐ๊ณผ ํฌ๋งคํ
|
| 458 |
+
results = []
|
| 459 |
+
for i in range(top_k):
|
| 460 |
+
index = top_indices[i].item()
|
| 461 |
+
probability = top_prob[i].item()
|
| 462 |
+
class_name = class_idx_to_name.get(index, f"Unknown Class Index {index}")
|
| 463 |
+
|
| 464 |
+
results.append({
|
| 465 |
+
"rank": i + 1,
|
| 466 |
+
"class_index": index,
|
| 467 |
+
"class_name": class_name,
|
| 468 |
+
"probability": f"{probability:.4f}"
|
| 469 |
+
})
|
| 470 |
+
|
| 471 |
+
# 4. ์ฑ๊ณต ๊ฒฐ๊ณผ ๋ฐํ
|
| 472 |
+
return {
|
| 473 |
+
"success": True,
|
| 474 |
+
"msg": "์ด๋ฏธ์ง ๋ถ๋ฅ์ ์ฑ๊ณตํ์ต๋๋ค.",
|
| 475 |
+
"filename": file.filename,
|
| 476 |
+
"predictions": results
|
| 477 |
+
}
|
| 478 |
+
|
| 479 |
+
except Exception as e:
|
| 480 |
+
error_msg = f"์๋ฒ์์ ์์์น ๋ชปํ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {type(e).__name__} - {e}"
|
| 481 |
+
print(f"[ERROR] An unexpected error occurred: {e}")
|
| 482 |
+
return {"success": False, "msg": error_msg}
|
| 483 |
+
|
| 484 |
+
@router.post("/classify_bee_wasp")
|
| 485 |
+
async def classify_bee_wasp(file: UploadFile = File(...)):
|
| 486 |
+
"""
|
| 487 |
+
๋ฒ(Bee) vs ๋ง๋ฒ(Wasp) ๋ฐ ๊ธฐํ ๊ณค์ถฉ ๋ถ๋ฅ ์๋ํฌ์ธํธ
|
| 488 |
+
- ๋ชจ๋ธ: EfficientNetV2-S
|
| 489 |
+
"""
|
| 490 |
+
bee_wasp_model = MODEL["bee_wasp_model"]
|
| 491 |
+
bee_wasp_transform = MODEL["bee_wasp_preprocess"]
|
| 492 |
+
bee_wasp_classes = MODEL["bee_wasp_class_names"]
|
| 493 |
+
if bee_wasp_model is None:
|
| 494 |
+
raise HTTPException(status_code=503, detail="๋ชจ๋ธ์ด ๋ก๋๋์ง ์์์ต๋๋ค.")
|
| 495 |
+
|
| 496 |
+
# 1. ์ด๋ฏธ์ง ์ฝ๊ธฐ
|
| 497 |
+
try:
|
| 498 |
+
contents = await file.read()
|
| 499 |
+
image = Image.open(BytesIO(contents)).convert("RGB")
|
| 500 |
+
except Exception:
|
| 501 |
+
raise HTTPException(status_code=400, detail="์ ํจํ์ง ์์ ์ด๋ฏธ์ง ํ์ผ์
๋๋ค.")
|
| 502 |
+
|
| 503 |
+
# 2. ์ ์ฒ๋ฆฌ
|
| 504 |
+
try:
|
| 505 |
+
input_tensor = bee_wasp_transform(image).unsqueeze(0).to(device)
|
| 506 |
+
except Exception as e:
|
| 507 |
+
raise HTTPException(status_code=500, detail=f"์ด๋ฏธ์ง ์ ์ฒ๋ฆฌ ์ค๋ฅ: {str(e)}")
|
| 508 |
+
|
| 509 |
+
# 3. ์ถ๋ก
|
| 510 |
+
with torch.no_grad():
|
| 511 |
+
outputs = bee_wasp_model(input_tensor)
|
| 512 |
+
probabilities = torch.nn.functional.softmax(outputs[0], dim=0)
|
| 513 |
+
|
| 514 |
+
# 4. ๊ฒฐ๊ณผ ํฌ๋งคํ
(Top 3 ํ๋ฅ ๋ฐํ)
|
| 515 |
+
top_k = min(3, len(bee_wasp_classes))
|
| 516 |
+
top_prob, top_indices = torch.topk(probabilities, top_k)
|
| 517 |
+
|
| 518 |
+
results = []
|
| 519 |
+
for i in range(top_k):
|
| 520 |
+
idx = top_indices[i].item()
|
| 521 |
+
score = top_prob[i].item()
|
| 522 |
+
|
| 523 |
+
results.append({
|
| 524 |
+
"rank": i + 1,
|
| 525 |
+
"class_name": bee_wasp_classes[idx],
|
| 526 |
+
"confidence": f"{score * 100:.2f}%",
|
| 527 |
+
"probability": score
|
| 528 |
+
})
|
| 529 |
+
|
| 530 |
+
return {
|
| 531 |
+
"filename": file.filename,
|
| 532 |
+
"top_prediction": results[0]["class_name"],
|
| 533 |
+
"predictions": results
|
| 534 |
+
}
|
router/llamindex_router.py
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import base64
|
| 4 |
+
import time
|
| 5 |
+
from typing import Optional, Dict, List
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, UploadFile, File, Form, HTTPException
|
| 8 |
+
from fastapi.responses import StreamingResponse
|
| 9 |
+
|
| 10 |
+
# ๐ฆ LangChain Imports
|
| 11 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 12 |
+
from langchain_community.vectorstores import FAISS
|
| 13 |
+
from langchain_community.document_loaders import DirectoryLoader, TextLoader, UnstructuredFileLoader
|
| 14 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
| 15 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 16 |
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
| 17 |
+
from langchain_core.messages import HumanMessage, SystemMessage
|
| 18 |
+
from langchain_core.runnables.history import RunnableWithMessageHistory
|
| 19 |
+
from langchain_community.chat_message_histories import ChatMessageHistory
|
| 20 |
+
from langchain_core.runnables import ConfigurableFieldSpec
|
| 21 |
+
|
| 22 |
+
router = APIRouter(tags=["LangChain_Refactor"])
|
| 23 |
+
|
| 24 |
+
# ==============================================================================
|
| 25 |
+
# โ๏ธ 1. ์ค์ ๋ฐ ์ด๊ธฐํ (Embedding & LLM)
|
| 26 |
+
# ==============================================================================
|
| 27 |
+
|
| 28 |
+
# 1-1. Embedding Model (์์ฒญํ์ ๋ก์ปฌ ๋ชจ๋ธ ๊ทธ๋๋ก ์ ์ง)
|
| 29 |
+
# huggingface-cli login ํน์ HF_TOKEN ํ๊ฒฝ๋ณ์ ํ์
|
| 30 |
+
hf_token = os.getenv("HF_TOKEN")
|
| 31 |
+
|
| 32 |
+
embedding_model = HuggingFaceEmbeddings(
|
| 33 |
+
model_name="google/embeddinggemma-300m",
|
| 34 |
+
model_kwargs={
|
| 35 |
+
"device": "cpu", # CPU ์ฌ์ฉ
|
| 36 |
+
"trust_remote_code": True,
|
| 37 |
+
"token": hf_token
|
| 38 |
+
},
|
| 39 |
+
encode_kwargs={"normalize_embeddings": True}
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
# 1-2. LLM Model (Gemini 2.5 Flash)
|
| 43 |
+
llm = ChatGoogleGenerativeAI(
|
| 44 |
+
model="gemini-2.5-flash",
|
| 45 |
+
temperature=0.1,
|
| 46 |
+
google_api_key=os.getenv("GOOGLE_API_KEY"),
|
| 47 |
+
convert_system_message_to_human=True
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# ==============================================================================
|
| 51 |
+
# ๐พ 2. ๋ฐ์ดํฐ ์ ์ฅ์ (Vector DB & Session Memory)
|
| 52 |
+
# ==============================================================================
|
| 53 |
+
|
| 54 |
+
# ์ ์ญ ๋ฒกํฐ ์คํ ์ด (FAISS)
|
| 55 |
+
VECTOR_STORE = None
|
| 56 |
+
DATA_DIR = "./data"
|
| 57 |
+
|
| 58 |
+
# ์ธ์
์ ์ฅ์: { "session_id": { "history": ChatMessageHistory, "last_access": timestamp } }
|
| 59 |
+
SESSION_STORE: Dict[str, Dict] = {}
|
| 60 |
+
SESSION_TIMEOUT = 3600 # 1์๊ฐ
|
| 61 |
+
|
| 62 |
+
def init_vector_db():
|
| 63 |
+
"""์๋ฒ ์์ ์ ๋๋ ํ์ ์ ๋ฒกํฐ DB ์ด๊ธฐํ"""
|
| 64 |
+
global VECTOR_STORE
|
| 65 |
+
|
| 66 |
+
if not os.path.exists(DATA_DIR):
|
| 67 |
+
os.makedirs(DATA_DIR)
|
| 68 |
+
with open(f"{DATA_DIR}/readme.txt", "w", encoding="utf-8") as f:
|
| 69 |
+
f.write("Initialize data directory.")
|
| 70 |
+
|
| 71 |
+
# ๋ฌธ์ ๋ก๋
|
| 72 |
+
print("๐ [LangChain] ๋ฌธ์ ๋ก๋ฉ ๋ฐ ์ธ๋ฑ์ฑ ์์...")
|
| 73 |
+
# DirectoryLoader๋ ํด๋ ๋ด ํ์ผ๋ค์ ์ค์บํฉ๋๋ค.
|
| 74 |
+
loader = DirectoryLoader(DATA_DIR, glob="*", show_progress=True, loader_cls=TextLoader)
|
| 75 |
+
try:
|
| 76 |
+
docs = loader.load()
|
| 77 |
+
except Exception:
|
| 78 |
+
# ํ
์คํธ ํ์ผ์ด ์๋ ๊ฒฝ์ฐ๋ฅผ ๋๋นํด ์์ธ์ฒ๋ฆฌ (์ค์ ๋ก UnstructuredLoader ๋ฑ ์ฌ์ฉ ๊ถ์ฅ)
|
| 79 |
+
docs = []
|
| 80 |
+
|
| 81 |
+
if not docs:
|
| 82 |
+
print("โ ๏ธ [LangChain] ๋ฌธ์๊ฐ ์์ต๋๋ค. ๋น ์ธ๋ฑ์ค๋ฅผ ์์ฑํฉ๋๋ค.")
|
| 83 |
+
# ๋น ์ธ๋ฑ์ค ์์ฑ ํธ๋ฆญ
|
| 84 |
+
texts = ["Initial document"]
|
| 85 |
+
VECTOR_STORE = FAISS.from_texts(texts, embedding_model)
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
# ์ฒญํน (Chunking)
|
| 89 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 90 |
+
splits = text_splitter.split_documents(docs)
|
| 91 |
+
|
| 92 |
+
# FAISS ์ธ๋ฑ์ค ์์ฑ
|
| 93 |
+
VECTOR_STORE = FAISS.from_documents(splits, embedding_model)
|
| 94 |
+
print("โ
[LangChain] FAISS ์ธ๋ฑ์ค ์์ฑ ์๋ฃ!")
|
| 95 |
+
|
| 96 |
+
def get_session_history(session_id: str):
|
| 97 |
+
"""
|
| 98 |
+
์ธ์
ID ๊ธฐ๋ฐ ํ์คํ ๋ฆฌ ๋ฐํ + ๋ง๋ฃ๋ ์ธ์
์ ๋ฆฌ (Garbage Collection)
|
| 99 |
+
"""
|
| 100 |
+
current_time = time.time()
|
| 101 |
+
|
| 102 |
+
# 1. ๋ง๋ฃ๋ ์ธ์
์ ๋ฆฌ (์์ฒญ์ด ๋ค์ด์ฌ ๋๋ง๋ค ์ฒดํฌ)
|
| 103 |
+
expired_sessions = [
|
| 104 |
+
sid for sid, data in SESSION_STORE.items()
|
| 105 |
+
if current_time - data["last_access"] > SESSION_TIMEOUT
|
| 106 |
+
]
|
| 107 |
+
for sid in expired_sessions:
|
| 108 |
+
del SESSION_STORE[sid]
|
| 109 |
+
print(f"๐๏ธ [System] Timeout ์ธ์
์ญ์ : {sid}")
|
| 110 |
+
|
| 111 |
+
# 2. ์ธ์
์กฐํ ๋๋ ์์ฑ
|
| 112 |
+
if session_id not in SESSION_STORE:
|
| 113 |
+
print(f"โจ [System] ์ ์ธ์
์์ฑ: {session_id}")
|
| 114 |
+
SESSION_STORE[session_id] = {
|
| 115 |
+
"history": ChatMessageHistory(),
|
| 116 |
+
"last_access": current_time
|
| 117 |
+
}
|
| 118 |
+
|
| 119 |
+
# 3. ๋ง์ง๋ง ์ ์ ์๊ฐ ๊ฐฑ์
|
| 120 |
+
SESSION_STORE[session_id]["last_access"] = current_time
|
| 121 |
+
|
| 122 |
+
return SESSION_STORE[session_id]["history"]
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
# ==============================================================================
|
| 126 |
+
# ๐ 3. API Endpoints
|
| 127 |
+
# ==============================================================================
|
| 128 |
+
|
| 129 |
+
@router.on_event("startup")
|
| 130 |
+
async def startup_event():
|
| 131 |
+
init_vector_db()
|
| 132 |
+
|
| 133 |
+
@router.post("/upload")
|
| 134 |
+
async def upload_document(file: UploadFile = File(...)):
|
| 135 |
+
"""๋ฌธ์๋ฅผ ์
๋ก๋ํ๊ณ ๋ฒก๏ฟฝ๏ฟฝ DB์ ์ฆ์ ์ถ๊ฐ"""
|
| 136 |
+
global VECTOR_STORE
|
| 137 |
+
try:
|
| 138 |
+
save_path = f"{DATA_DIR}/{file.filename}"
|
| 139 |
+
with open(save_path, "wb") as buffer:
|
| 140 |
+
shutil.copyfileobj(file.file, buffer)
|
| 141 |
+
|
| 142 |
+
print(f"๐พ [Upload] ์ ์ฅ ์๋ฃ: {save_path}")
|
| 143 |
+
|
| 144 |
+
# LangChain ๋ฐฉ์์ผ๋ก ๋ก๋ & ์ถ๊ฐ
|
| 145 |
+
loader = TextLoader(save_path) # TXT ๊ธฐ์ค, PDF๋ฉด PyPDFLoader ๋ฑ ์ฌ์ฉ
|
| 146 |
+
docs = loader.load()
|
| 147 |
+
|
| 148 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 149 |
+
splits = text_splitter.split_documents(docs)
|
| 150 |
+
|
| 151 |
+
if VECTOR_STORE is None:
|
| 152 |
+
VECTOR_STORE = FAISS.from_documents(splits, embedding_model)
|
| 153 |
+
else:
|
| 154 |
+
VECTOR_STORE.add_documents(splits) # ๐ ์คํ ์ค์ธ DB์ ์ถ๊ฐ
|
| 155 |
+
|
| 156 |
+
return {"success": True, "message": "ํ์ผ์ด ์ง์ ๋ฒ ์ด์ค์ ์ถ๊ฐ๋์์ต๋๋ค."}
|
| 157 |
+
|
| 158 |
+
except Exception as e:
|
| 159 |
+
return {"success": False, "msg": str(e)}
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@router.post("/query_stream")
|
| 163 |
+
async def query_stream(
|
| 164 |
+
question: str = Form(...),
|
| 165 |
+
session_id: str = Form(...),
|
| 166 |
+
files: List[UploadFile] = File(None, alias="file")
|
| 167 |
+
):
|
| 168 |
+
"""
|
| 169 |
+
LangChain RAG + History + Multimodal Streaming Endpoint
|
| 170 |
+
"""
|
| 171 |
+
global VECTOR_STORE
|
| 172 |
+
|
| 173 |
+
# 1. RAG ๊ฒ์ (Context ์ถ์ถ)
|
| 174 |
+
context_text = ""
|
| 175 |
+
source_docs = []
|
| 176 |
+
|
| 177 |
+
if VECTOR_STORE:
|
| 178 |
+
retriever = VECTOR_STORE.as_retriever(search_kwargs={"k": 3})
|
| 179 |
+
source_docs = retriever.invoke(question)
|
| 180 |
+
context_text = "\n\n".join([doc.page_content for doc in source_docs])
|
| 181 |
+
|
| 182 |
+
# 2. ์์คํ
ํ๋กฌํํธ ๊ตฌ์ฑ (ํ๋ฅด์๋ + Context)
|
| 183 |
+
system_prompt_text = f"""
|
| 184 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 185 |
+
|
| 186 |
+
[์ง์ ์ฌํญ]
|
| 187 |
+
1. ์ฌ์ฉ์์ **์ง๋ฌธ**๊ณผ **์ฒจ๋ถ ํ์ผ(์ด๋ฏธ์ง/ํ
์คํธ)**, ๊ทธ๋ฆฌ๊ณ ์๋ **[์ฐธ๊ณ ์๋ฃ]**๋ฅผ ๋ชจ๋ ํ์ธํด.
|
| 188 |
+
2. **[์ฐธ๊ณ ์๋ฃ]๊ฐ ์ง๋ฌธ์ด๋ ํ์ผ๊ณผ ๊ด๋ จ์ด ์๋ค๋ฉด**, ์ด๋ฅผ ์ ๊ทน์ ์ผ๋ก ์ธ์ฉํด์ ์ ๋ฌธ์ ์ผ๋ก ๋ต๋ณํด. (ํ์ผ + RAG)
|
| 189 |
+
3. ๋ง์ฝ **[์ฐธ๊ณ ์๋ฃ]๊ฐ ์ง๋ฌธ๊ณผ ์ ํ ์๋ฑํ ๋ด์ฉ์ด๋ผ๋ฉด**, ๊ณผ๊ฐํ ๋ฌด์ํ๊ณ ํ์ผ ๋ด์ฉ๊ณผ ๋์ ๋ฐฐ๊ฒฝ์ง์๋ง์ผ๋ก ๋ต๋ณํด. (LLM ํ๋จ)
|
| 190 |
+
4. ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ, ๋ฐ๋ง๋ก ์น์ ํ๊ฒ ํด์ค.
|
| 191 |
+
5. ๋์ (์์) ๊ฐ์ ์ง๋ฌธ์ ์์ด์ค.
|
| 192 |
+
|
| 193 |
+
[๊ฐ์ /ํ๋ ํ๊ทธ ๊ท์น]
|
| 194 |
+
- ๋ต๋ณ ์ค๊ฐ/๋์ [[FaceSmile1]], [[DoJump]] ๊ฐ์ ํ๊ทธ๋ฅผ ์ ์ ํ ์์ด์ ์๋๊ฐ ์๊ฒ ํํํด.
|
| 195 |
+
|
| 196 |
+
[์ฐธ๊ณ ์๋ฃ(RAG Context)]:
|
| 197 |
+
{context_text}
|
| 198 |
+
"""
|
| 199 |
+
|
| 200 |
+
# 3. ์ฌ์ฉ์ ๋ฉ์์ง ๊ตฌ์ฑ (๋ฉํฐ๋ชจ๋ฌ ์ฒ๋ฆฌ)
|
| 201 |
+
user_content = [{"type": "text", "text": question}]
|
| 202 |
+
|
| 203 |
+
# ๋ค์ค ํ์ผ ์ฒ๋ฆฌ
|
| 204 |
+
if files:
|
| 205 |
+
for file in files:
|
| 206 |
+
content_type = file.content_type or "application/octet-stream"
|
| 207 |
+
|
| 208 |
+
# (A) ํ
์คํธ ํ์ผ ์ฒ๋ฆฌ (ํ ํฐ ์ ์ฝ์ ์ํด ํ
์คํธ๋ก ๋ณํํ์ฌ ์ ์ก)
|
| 209 |
+
if content_type.startswith("text/") or file.filename.endswith(
|
| 210 |
+
(".txt", ".md", ".py", ".json", ".csv", ".html", ".css",
|
| 211 |
+
".js", ".jsx", ".ts", ".tsx", ".yaml", ".yml", ".xml", ".ini",
|
| 212 |
+
".sh", ".bat", ".c", ".cpp", ".h", ".java", ".sql")
|
| 213 |
+
):
|
| 214 |
+
try:
|
| 215 |
+
text_bytes = await file.read()
|
| 216 |
+
# ์ธ์ฝ๋ฉ ์ถ๋ก (๊ธฐ๋ณธ utf-8)
|
| 217 |
+
text_content = text_bytes.decode("utf-8", errors="replace")
|
| 218 |
+
|
| 219 |
+
user_content.append({
|
| 220 |
+
"type": "text",
|
| 221 |
+
"text": f"\n\n[File: {file.filename}]\n{text_content}"
|
| 222 |
+
})
|
| 223 |
+
except Exception as e:
|
| 224 |
+
print(f"โ ๏ธ [File Error] ํ
์คํธ ํ์ผ ์ฝ๊ธฐ ์คํจ ({file.filename}): {e}")
|
| 225 |
+
|
| 226 |
+
# (B) ์ด๋ฏธ์ง, PDF, ๋น๋์ค ๋ฑ (Base64 ์ธ์ฝ๋ฉ ์ ์ก)
|
| 227 |
+
else:
|
| 228 |
+
# Gemini๋ PDF, Video, Audio ๋ฑ์ inline data๋ก ๋ฐ์ ์ ์์ (LangChain์ image_url๋ก ๋งคํ)
|
| 229 |
+
file_bytes = await file.read()
|
| 230 |
+
encoded_data = base64.b64encode(file_bytes).decode("utf-8")
|
| 231 |
+
|
| 232 |
+
user_content.append({
|
| 233 |
+
"type": "image_url",
|
| 234 |
+
"image_url": {"url": f"data:{content_type};base64,{encoded_data}"}
|
| 235 |
+
})
|
| 236 |
+
|
| 237 |
+
# 4. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ ์
|
| 238 |
+
prompt = ChatPromptTemplate.from_messages([
|
| 239 |
+
("system", system_prompt_text),
|
| 240 |
+
MessagesPlaceholder(variable_name="history"), # ๋ํ ๋ด์ญ
|
| 241 |
+
MessagesPlaceholder(variable_name="user_content"), # ๋ฉํฐ๋ชจ๋ฌ ๋ฉ์์ง๊ฐ ๋ค์ด๊ฐ ์๋ฆฌ
|
| 242 |
+
])
|
| 243 |
+
|
| 244 |
+
# 5. Chain ์์ฑ (Prompt -> LLM)
|
| 245 |
+
chain = prompt | llm
|
| 246 |
+
|
| 247 |
+
# 6. History ๊ธฐ๋ฅ์ด ์ถ๊ฐ๋ Runnable ์์ฑ
|
| 248 |
+
chain_with_history = RunnableWithMessageHistory(
|
| 249 |
+
chain,
|
| 250 |
+
get_session_history,
|
| 251 |
+
input_messages_key="user_content",
|
| 252 |
+
history_messages_key="history"
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
# 7. ์คํธ๋ฆฌ๋ฐ ์๋ต ์์ฑ
|
| 256 |
+
async def event_generator():
|
| 257 |
+
# (1) ๋ต๋ณ ์คํธ๋ฆฌ๋ฐ
|
| 258 |
+
# ๐ user_content ๋ฆฌ์คํธ๋ฅผ HumanMessage ๊ฐ์ฒด๋ก ๊ฐ์ธ์ ์ ๋ฌ
|
| 259 |
+
current_message = HumanMessage(content=user_content)
|
| 260 |
+
|
| 261 |
+
async for token in chain_with_history.astream(
|
| 262 |
+
{"user_content": [current_message]}, # ๐ ๋ฆฌ์คํธ ํํ๋ก ์ ๋ฌ
|
| 263 |
+
config={"configurable": {"session_id": session_id}}
|
| 264 |
+
):
|
| 265 |
+
# ChatGoogleGenerativeAI๋ AIMessageChunk๋ฅผ ๋ฐํํ๋ฏ๋ก content๋ง ์ถ์ถ
|
| 266 |
+
yield token.content
|
| 267 |
+
|
| 268 |
+
# (2) ์ฐธ๊ณ ์๋ฃ(Sources) ๋ถ์ด๊ธฐ
|
| 269 |
+
if source_docs:
|
| 270 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ (LangChain RAG)]:\n"
|
| 271 |
+
for doc in source_docs:
|
| 272 |
+
preview = doc.page_content.replace("\n", " ")[:50]
|
| 273 |
+
yield f"- {preview}...\n"
|
| 274 |
+
|
| 275 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|
router/llamindex_router_bk_20251231.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, UploadFile, File, Form
|
| 2 |
+
from fastapi.responses import StreamingResponse
|
| 3 |
+
from typing import Optional
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings, PromptTemplate
|
| 7 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
| 8 |
+
from llama_index.llms.gemini import Gemini
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
from llama_index.core.memory import ChatMemoryBuffer
|
| 11 |
+
from llama_index.multi_modal_llms.gemini import GeminiMultiModal
|
| 12 |
+
from llama_index.core.llms import ChatMessage, MessageRole
|
| 13 |
+
import base64
|
| 14 |
+
from llama_index.core.schema import ImageDocument
|
| 15 |
+
import tempfile
|
| 16 |
+
import time
|
| 17 |
+
import google.generativeai as genai
|
| 18 |
+
from PIL import Image
|
| 19 |
+
import io
|
| 20 |
+
|
| 21 |
+
router = APIRouter(
|
| 22 |
+
tags=["LlamaIndex"]
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 26 |
+
|
| 27 |
+
# ๐ก [๋ณ๊ฒฝ] ์์ง ๋์ '์ธ๋ฑ์ค(Index)' ์์ฒด๋ฅผ ์ ์ญ ๋ณ์๋ก ์ ์ฅํฉ๋๋ค.
|
| 28 |
+
# ์ธ๋ฑ์ค๊ฐ ์์ด์ผ ์๋ก์ด ๋ฌธ์๋ฅผ insert ํ ์ ์๊ธฐ ๋๋ฌธ์
๋๋ค.
|
| 29 |
+
GLOBAL_INDEX = None
|
| 30 |
+
|
| 31 |
+
def get_or_create_index():
|
| 32 |
+
"""
|
| 33 |
+
์ธ๋ฑ์ค๊ฐ ์์ผ๋ฉด ์์ฑ(์ด๊ธฐํ)ํ๊ณ , ์์ผ๋ฉด ๋ฐํํฉ๋๋ค.
|
| 34 |
+
"""
|
| 35 |
+
global GLOBAL_INDEX
|
| 36 |
+
|
| 37 |
+
if GLOBAL_INDEX is not None:
|
| 38 |
+
return GLOBAL_INDEX
|
| 39 |
+
|
| 40 |
+
print("๐ [System] LlamaIndex ์ธ๋ฑ์ค ์ด๊ธฐํ ์์...")
|
| 41 |
+
|
| 42 |
+
google_api_key = os.getenv("GOOGLE_API_KEY")
|
| 43 |
+
if not google_api_key:
|
| 44 |
+
raise ValueError("Google API Key๊ฐ ์์ต๋๋ค.")
|
| 45 |
+
|
| 46 |
+
# 1. ๋ชจ๋ธ ์ค์
|
| 47 |
+
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-m3", device="cpu")
|
| 48 |
+
llm = Gemini(model="models/gemini-2.5-flash", api_key=google_api_key, temperature=0.1)
|
| 49 |
+
|
| 50 |
+
Settings.embed_model = embed_model
|
| 51 |
+
Settings.llm = llm
|
| 52 |
+
|
| 53 |
+
# 2. ๋ฐ์ดํฐ ํด๋ ํ์ธ ๋ฐ ๋๋ฏธ ๋ฐ์ดํฐ ์์ฑ
|
| 54 |
+
if not os.path.exists("./data"):
|
| 55 |
+
os.makedirs("./data")
|
| 56 |
+
# ์ด๊ธฐ ๋ฐ์ดํฐ๊ฐ ์์ผ๋ฉด ์๋ฌ๊ฐ ๋ ์ ์์ผ๋ฏ๋ก ๋๋ฏธ ํ์ผ ์์ฑ
|
| 57 |
+
if not os.listdir("./data"):
|
| 58 |
+
with open("./data/readme.txt", "w", encoding="utf-8") as f:
|
| 59 |
+
f.write("์ด๊ณณ์ ์
๋ก๋๋ ๋ฌธ์๊ฐ ์ ์ฅ๋๋ ๊ณณ์
๋๋ค.")
|
| 60 |
+
|
| 61 |
+
# 3. ๋ฌธ์ ๋ก๋ ๋ฐ ์ธ๋ฑ์ฑ
|
| 62 |
+
print("๐ [Data] ๊ธฐ์กด ๋ฌธ์ ๋ก๋ฉ ๋ฐ ์ธ๋ฑ์ฑ...")
|
| 63 |
+
documents = SimpleDirectoryReader("./data").load_data()
|
| 64 |
+
index = VectorStoreIndex.from_documents(documents)
|
| 65 |
+
|
| 66 |
+
print("โ
[System] ์ธ๋ฑ์ค ์์ฑ ์๋ฃ!")
|
| 67 |
+
GLOBAL_INDEX = index
|
| 68 |
+
return GLOBAL_INDEX
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
@router.get("/check-key")
|
| 72 |
+
async def check_key():
|
| 73 |
+
if os.getenv("GOOGLE_API_KEY"):
|
| 74 |
+
return {"status": "ok", "message": "API Key is loaded."}
|
| 75 |
+
return {"status": "error", "message": "API Key not found."}
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
# ==============================================================================
|
| 79 |
+
# ๐ [์ ๊ท ๊ธฐ๋ฅ] ๋ฌธ์ ์
๋ก๋ ๋ฐ ์ฆ์ ๋ฐ์ API
|
| 80 |
+
# ==============================================================================
|
| 81 |
+
@router.post("/upload")
|
| 82 |
+
async def upload_document(file: UploadFile = File(...)):
|
| 83 |
+
"""
|
| 84 |
+
ํ์ผ์ ์
๋ก๋ํ๋ฉด ./data ์ ์ ์ฅํ๊ณ , ์คํ ์ค์ธ LlamaIndex์ ์ฆ์ ์ถ๊ฐํฉ๋๋ค.
|
| 85 |
+
"""
|
| 86 |
+
try:
|
| 87 |
+
# 1. ํ์ผ ์์คํ
์ ์ ์ฅ (์ฌ์์ ์ ์ ์ง๋ฅผ ์ํด)
|
| 88 |
+
save_path = f"./data/{file.filename}"
|
| 89 |
+
|
| 90 |
+
# ํ์ผ ์ฐ๊ธฐ
|
| 91 |
+
with open(save_path, "wb") as buffer:
|
| 92 |
+
shutil.copyfileobj(file.file, buffer)
|
| 93 |
+
|
| 94 |
+
print(f"๐พ [Upload] ํ์ผ ์ ์ฅ ์๋ฃ: {save_path}")
|
| 95 |
+
|
| 96 |
+
# 2. ์คํ ์ค์ธ ์ธ๋ฑ์ค ๊ฐ์ ธ์ค๊ธฐ
|
| 97 |
+
index = get_or_create_index()
|
| 98 |
+
|
| 99 |
+
# 3. ๋ฐฉ๊ธ ์ ์ฅํ ํ์ผ๋ง ๋ก๋ํด์ ์ธ๋ฑ์ค์ ์ถ๊ฐ (Insert)
|
| 100 |
+
# ์ ์ฒด๋ฅผ ๋ค์ ์ฝ๋ ๊ฒ ์๋๋ผ, ์ด ํ์ผ๋ง ์ฝ์ด์ ๋ฃ์ต๋๋ค.
|
| 101 |
+
new_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 102 |
+
|
| 103 |
+
for doc in new_docs:
|
| 104 |
+
index.insert(doc) # ๐ ํต์ฌ: ์คํ ์ค์ธ ๋ฉ๋ชจ๋ฆฌ์ ๋ฌธ์ ์ฃผ์
|
| 105 |
+
|
| 106 |
+
return {
|
| 107 |
+
"success": True,
|
| 108 |
+
"filename": file.filename,
|
| 109 |
+
"message": "ํ์ผ์ด ์ ์ฅ๋์๊ณ , ์ง์ ๋ฒ ์ด์ค์ ์ถ๊ฐ๋์์ต๋๋ค."
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
except Exception as e:
|
| 113 |
+
print(f"[ERROR] Upload failed: {e}")
|
| 114 |
+
return {"success": False, "msg": str(e)}
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@router.post("/query")
|
| 118 |
+
async def query_llama(
|
| 119 |
+
question: str = Form(...), # ํ์: ์ง๋ฌธ
|
| 120 |
+
file: Optional[UploadFile] = File(None) # ์ต์
: ํ์ผ
|
| 121 |
+
):
|
| 122 |
+
try:
|
| 123 |
+
# 1. ์ธ๋ฑ์ค ๋ฐ ์์ง ์ค๋น
|
| 124 |
+
index = get_or_create_index()
|
| 125 |
+
|
| 126 |
+
# ---------------------------------------------------------
|
| 127 |
+
# ๐ ํ์ผ ์ฒ๋ฆฌ ๋ก์ง (ํ์ผ์ด ์์ ๋๋ง ์คํ)
|
| 128 |
+
# ---------------------------------------------------------
|
| 129 |
+
file_content_str = "" # ํ์ผ ๋ด์ฉ์ ๋ด์ ๋ณ์
|
| 130 |
+
|
| 131 |
+
if file is not None:
|
| 132 |
+
# (1) ํ์ผ ์ ์ฅ
|
| 133 |
+
save_path = f"./data/{file.filename}"
|
| 134 |
+
with open(save_path, "wb") as buffer:
|
| 135 |
+
shutil.copyfileobj(file.file, buffer)
|
| 136 |
+
|
| 137 |
+
# (2) LlamaIndex์ Reader๋ฅผ ์จ์ ํ
์คํธ ์ถ์ถ
|
| 138 |
+
# SimpleDirectoryReader๋ PDF, TXT ๋ฑ์ ์์์ ํ
์คํธ๋ก ๋ณํํด์ค๋๋ค.
|
| 139 |
+
loaded_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 140 |
+
|
| 141 |
+
# (3) ์ถ์ถ๋ ํ
์คํธ๋ฅผ ํ๋์ ๋ฌธ์์ด๋ก ํฉ์นจ
|
| 142 |
+
extracted_text = "\n".join([doc.text for doc in loaded_docs])
|
| 143 |
+
|
| 144 |
+
# (4) ์ง๋ฌธ์ ํ์ผ ๋ด์ฉ ํฌํจ์ํค๊ธฐ
|
| 145 |
+
file_content_str = f"\n\n[์ฌ์ฉ์๊ฐ ์ฒจ๋ถํ ํ์ผ ๋ด์ฉ]:\n{extracted_text}"
|
| 146 |
+
|
| 147 |
+
# (์ ํ) ์ธ๋ฑ์ค์ ์๊ตฌ ์ ์ฅ๋ ํ๊ณ ์ถ๋ค๋ฉด ์๋ ์ฃผ์ ํด์
|
| 148 |
+
# for doc in loaded_docs:
|
| 149 |
+
# index.insert(doc)
|
| 150 |
+
|
| 151 |
+
# ---------------------------------------------------------
|
| 152 |
+
# ๐ค ์ต์ข
์ง๋ฌธ ๊ตฌ์ฑ (์ง๋ฌธ + ํ์ผ๋ด์ฉ)
|
| 153 |
+
# ---------------------------------------------------------
|
| 154 |
+
final_query = f"{question}{file_content_str}"
|
| 155 |
+
|
| 156 |
+
# 2. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ค์
|
| 157 |
+
my_prompt_str = """
|
| 158 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 159 |
+
๊ท์น:
|
| 160 |
+
- [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 161 |
+
-- ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ
|
| 162 |
+
|
| 163 |
+
[์ ๋ณด]
|
| 164 |
+
---------------------
|
| 165 |
+
{context_str}
|
| 166 |
+
---------------------
|
| 167 |
+
|
| 168 |
+
[์๋ ์ง๋ฌธ]: {query_str}
|
| 169 |
+
[AI ๋ต๋ณ]:
|
| 170 |
+
"""
|
| 171 |
+
my_template = PromptTemplate(my_prompt_str)
|
| 172 |
+
|
| 173 |
+
query_engine = index.as_query_engine(
|
| 174 |
+
text_qa_template=my_template,
|
| 175 |
+
similarity_top_k=3
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
# 3. LLM์๊ฒ ์ ์ก (ํ์ผ ๋ด์ฉ์ด ํฌํจ๋ final_query๋ฅผ ๋ณด๋)
|
| 179 |
+
response = await query_engine.aquery(final_query)
|
| 180 |
+
|
| 181 |
+
source_nodes = []
|
| 182 |
+
for node in response.source_nodes:
|
| 183 |
+
source_nodes.append({
|
| 184 |
+
"score": round(node.score, 3),
|
| 185 |
+
"text": node.node.get_content().strip()[:100] + "..."
|
| 186 |
+
})
|
| 187 |
+
|
| 188 |
+
return {
|
| 189 |
+
"success": True,
|
| 190 |
+
"original_question": question,
|
| 191 |
+
"file_attached": file.filename if file else None,
|
| 192 |
+
"answer": str(response),
|
| 193 |
+
"sources": source_nodes
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
except Exception as e:
|
| 197 |
+
return {"success": False, "msg": str(e)}
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@router.post("/query_stream_single")
|
| 202 |
+
async def query_llama(
|
| 203 |
+
question: str = Form(...),
|
| 204 |
+
file: Optional[UploadFile] = File(None)
|
| 205 |
+
):
|
| 206 |
+
try:
|
| 207 |
+
# 1. ์ธ๋ฑ์ค ์ค๋น
|
| 208 |
+
index = get_or_create_index()
|
| 209 |
+
|
| 210 |
+
# ---------------------------------------------------------
|
| 211 |
+
# ๐ ํ์ผ ์ฒ๋ฆฌ ๋ก์ง (๊ธฐ์กด๊ณผ ๋์ผ)
|
| 212 |
+
# ---------------------------------------------------------
|
| 213 |
+
file_content_str = ""
|
| 214 |
+
|
| 215 |
+
if file is not None:
|
| 216 |
+
save_path = f"./data/{file.filename}"
|
| 217 |
+
with open(save_path, "wb") as buffer:
|
| 218 |
+
shutil.copyfileobj(file.file, buffer)
|
| 219 |
+
|
| 220 |
+
loaded_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 221 |
+
extracted_text = "\n".join([doc.text for doc in loaded_docs])
|
| 222 |
+
file_content_str = f"\n\n[์ฌ์ฉ์๊ฐ ์ฒจ๋ถํ ํ์ผ ๋ด์ฉ]:\n{extracted_text}"
|
| 223 |
+
|
| 224 |
+
# ---------------------------------------------------------
|
| 225 |
+
# ๐ค ์ต์ข
์ง๋ฌธ ๊ตฌ์ฑ
|
| 226 |
+
# ---------------------------------------------------------
|
| 227 |
+
final_query = f"{question}{file_content_str}"
|
| 228 |
+
|
| 229 |
+
# 2. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ค์
|
| 230 |
+
my_prompt_str = """
|
| 231 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 232 |
+
๊ท์น:
|
| 233 |
+
- [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 234 |
+
-- ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ
|
| 235 |
+
|
| 236 |
+
[์ ๋ณด]
|
| 237 |
+
---------------------
|
| 238 |
+
{context_str}
|
| 239 |
+
---------------------
|
| 240 |
+
|
| 241 |
+
[์๋ ์ง๋ฌธ]: {query_str}
|
| 242 |
+
[AI ๋ต๋ณ]:
|
| 243 |
+
"""
|
| 244 |
+
my_template = PromptTemplate(my_prompt_str)
|
| 245 |
+
|
| 246 |
+
# 3. ์ฟผ๋ฆฌ ์์ง ์์ฑ (์คํธ๋ฆฌ๋ฐ ์ต์
ํ์ฑํ)
|
| 247 |
+
query_engine = index.as_query_engine(
|
| 248 |
+
text_qa_template=my_template,
|
| 249 |
+
similarity_top_k=3,
|
| 250 |
+
streaming=True # ๐ [2] ํต์ฌ: ์ด๊ฒ ์ผ์ ธ์ผ ํ ๊ธ์์ฉ ๋์ต๋๋ค.
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
# 4. LLM์๊ฒ ์ ์ก (์คํธ๋ฆฌ๋ฐ ๊ฐ์ฒด ๋ฐํ)
|
| 254 |
+
response = await query_engine.aquery(final_query)
|
| 255 |
+
|
| 256 |
+
# 5. ์ ๋๋ ์ดํฐ ํจ์ ์ ์ (๋ฐ์ดํฐ๋ฅผ ์ชผ๊ฐ์ ๋ณด๋ด๋ ์ญํ )
|
| 257 |
+
async def event_generator():
|
| 258 |
+
# (1) ๋ต๋ณ ํ
์คํธ ์คํธ๋ฆฌ๋ฐ
|
| 259 |
+
async for token in response.async_response_gen():
|
| 260 |
+
yield token
|
| 261 |
+
|
| 262 |
+
# (2) ๋ต๋ณ์ด ๋๋๋ฉด ์์ค(์ถ์ฒ) ์ ๋ณด๋ ํ
์คํธ๋ก ๋ถ์ฌ์ ๋ณด๋ด๊ธฐ
|
| 263 |
+
# ์คํธ๋ฆฌ๋ฐ์ JSON ๊ตฌ์กฐ๋ฅผ ๋ณด๋ด๊ธฐ ํ๋ค๊ธฐ ๋๋ฌธ์, ํ
์คํธ ๋์ ๋ถ์ฌ์ฃผ๋ ๋ฐฉ์์ ๋ง์ด ์๋๋ค.
|
| 264 |
+
if response.source_nodes:
|
| 265 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ]:\n"
|
| 266 |
+
for node in response.source_nodes:
|
| 267 |
+
# ์ ๋ขฐ๋ ์ ์์ ๋ด์ฉ ์ผ๋ถ๋ฅผ ํ
์คํธ๋ก ๋ณด๋
|
| 268 |
+
score = round(node.score, 3) if node.score else 0.0
|
| 269 |
+
content_preview = node.node.get_content().strip().replace("\n", " ")[:50]
|
| 270 |
+
yield f"- (์ ์: {score}) {content_preview}...\n"
|
| 271 |
+
|
| 272 |
+
# 6. StreamingResponse๋ก ๋ฐํ
|
| 273 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|
| 274 |
+
|
| 275 |
+
except Exception as e:
|
| 276 |
+
# ์คํธ๋ฆฌ๋ฐ ์์ ์ ์๋ฌ๊ฐ ๋๋ฉด JSON ์๋ฌ ๋ฐํ
|
| 277 |
+
return {"success": False, "msg": str(e)}
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
# [์ ์ญ] ์์ Gemini ์ธ์
์ ์ฅ์
|
| 283 |
+
# ๊ตฌ์กฐ ๋ณ๊ฒฝ: { "session_id": { "session": chat_object, "last_access": timestamp } }
|
| 284 |
+
native_chat_sessions = {}
|
| 285 |
+
|
| 286 |
+
# ์ธ์
๋ง๋ฃ ์๊ฐ (1์๊ฐ = 3600์ด)
|
| 287 |
+
SESSION_TIMEOUT = 3600
|
| 288 |
+
|
| 289 |
+
@router.post("/query_stream")
|
| 290 |
+
async def query_llama(
|
| 291 |
+
question: str = Form(...),
|
| 292 |
+
session_id: str = Form(...),
|
| 293 |
+
file: Optional[UploadFile] = File(None)
|
| 294 |
+
):
|
| 295 |
+
try:
|
| 296 |
+
current_time = time.time()
|
| 297 |
+
|
| 298 |
+
# ---------------------------------------------------------
|
| 299 |
+
# ๐งน 0. ์ธ์
์ฒญ์ (Garbage Collection)
|
| 300 |
+
# ---------------------------------------------------------
|
| 301 |
+
# ๋์
๋๋ฆฌ๋ฅผ ์ํํ๋ฉฐ ํ์์์๋ ์ธ์
์ญ์
|
| 302 |
+
# (๋์
๋๋ฆฌ ๋ณ๊ฒฝ ์ค ์ํ ์๋ฌ ๋ฐฉ์ง๋ฅผ ์ํด list๋ก ํค๋ฅผ ๋ณต์ฌํด์ ์ํ)
|
| 303 |
+
expired_sessions = [
|
| 304 |
+
sid for sid, data in native_chat_sessions.items()
|
| 305 |
+
if current_time - data["last_access"] > SESSION_TIMEOUT
|
| 306 |
+
]
|
| 307 |
+
|
| 308 |
+
for sid in expired_sessions:
|
| 309 |
+
del native_chat_sessions[sid]
|
| 310 |
+
print(f"๐๏ธ [System] ๋ง๋ฃ๋ ์ธ์
์ญ์ : {sid}")
|
| 311 |
+
|
| 312 |
+
# ---------------------------------------------------------
|
| 313 |
+
# 1. LlamaIndex๋ก ์ง์ ๊ฒ์ (RAG)
|
| 314 |
+
# ---------------------------------------------------------
|
| 315 |
+
index = get_or_create_index()
|
| 316 |
+
retriever = index.as_retriever(similarity_top_k=3)
|
| 317 |
+
nodes = await retriever.aretrieve(question)
|
| 318 |
+
context_str = "\n\n".join([n.node.get_content() for n in nodes])
|
| 319 |
+
|
| 320 |
+
# ์์คํ
ํ๋กฌํํธ (์บ๋ฆญํฐ ์ค์ )
|
| 321 |
+
system_prompt = """
|
| 322 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 323 |
+
๊ท์น:
|
| 324 |
+
1. [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 325 |
+
2. ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ, ๋ฐ๋ง๋ก ์น์ ํ๊ฒ ๋๋ตํด.
|
| 326 |
+
3. ๋์ (์์) ๊ฐ์ ์ง๋ฌธ์ ๊ฐ๋ ์์ด์ค.
|
| 327 |
+
|
| 328 |
+
[ํ๋ ๋ฐ ๊ฐ์ ํํ ๊ท์น]
|
| 329 |
+
- ๋ต๋ณ์ ๋ถ์๊ธฐ์ ๋ง์ถฐ์ ์๋์ [ํ์ ํ๊ทธ]์ [๋์ ํ๊ทธ]๋ฅผ ๋ต๋ณ ์ค๊ฐ์ด๋ ๋์ ์ ์ ํ ์์ด์ ์ถ๋ ฅํด.
|
| 330 |
+
- ํ ๋ฌธ์ฅ์ ํ๊ทธ๋ฅผ 1~2๊ฐ ์ ๋ ์ฌ์ฉํด.
|
| 331 |
+
- ํ์์ ๋ฐ๋์ [[ํ๊ทธ๋ช
]] ์ฒ๋ผ ์ด์ค ๋๊ดํธ๋ฅผ ์ฌ์ฉํด.
|
| 332 |
+
|
| 333 |
+
[ํ์ ํ๊ทธ ๋ชฉ๋ก]
|
| 334 |
+
- ๊ธฐ์จ/์์: [[FaceSmile1]], [[FaceSmile2]]
|
| 335 |
+
- ํ๋จ: [[FaceAngry1]], [[FaceAngry2]]
|
| 336 |
+
- ์ฌํ/์ข์ : [[FaceSap]], [[FaceAshamed]]
|
| 337 |
+
- ๋นํฉ/ํผ๋: [[FaceConf]], [[FaceDistract]]
|
| 338 |
+
- ๊ธฐํ: [[FaceDefault]]
|
| 339 |
+
|
| 340 |
+
[๋์ ํ๊ทธ ๋ชฉ๋ก]
|
| 341 |
+
- ์ถฉ๊ฒฉ ๋ฐ์: [[DoDamage0]], [[DoDamage1]]
|
| 342 |
+
- ํจ๋ฐฐ/์ค๋ง: [[DoLose]], [[DoReflesh]]
|
| 343 |
+
- ๊ธฐ๋ถ ์ ํ/ํ๋ณต: [[DoJump]]
|
| 344 |
+
- ์ ํ/์ ๋จ: [[DoJump]]
|
| 345 |
+
- ๊ธฐํ: [[DoDefault]]
|
| 346 |
+
"""
|
| 347 |
+
|
| 348 |
+
# ์ฌ์ฉ์ ํ๋กฌํํธ ๊ตฌ์ฑ (RAG ์ ๋ณด + ์ง๋ฌธ)
|
| 349 |
+
user_prompt = f"""
|
| 350 |
+
[์ฐธ๊ณ ์ ๋ณด]
|
| 351 |
+
{context_str}
|
| 352 |
+
|
| 353 |
+
[์ฌ์ฉ์ ์ง๋ฌธ]
|
| 354 |
+
{question}
|
| 355 |
+
|
| 356 |
+
(์ ์ฐธ๊ณ ์ ๋ณด๋ฅผ ๋ฐํ์ผ๋ก ์ฌ์ฉ์์ ์ง๋ฌธ์ ๋ต๋ณํด์ค.)
|
| 357 |
+
"""
|
| 358 |
+
|
| 359 |
+
# ---------------------------------------------------------
|
| 360 |
+
# 3. ์ด๋ฏธ์ง ์ฒ๋ฆฌ (์์ Gemini ๋ฐฉ์ - PIL ์ฌ์ฉ)
|
| 361 |
+
# ---------------------------------------------------------
|
| 362 |
+
content_parts = []
|
| 363 |
+
|
| 364 |
+
if file:
|
| 365 |
+
file_bytes = await file.read()
|
| 366 |
+
image = Image.open(io.BytesIO(file_bytes))
|
| 367 |
+
print(f"๐ผ๏ธ [Gemini Native] ์ด๋ฏธ์ง ๋ก๋ ์๋ฃ: {file.filename}")
|
| 368 |
+
content_parts.append(image)
|
| 369 |
+
|
| 370 |
+
content_parts.append(user_prompt)
|
| 371 |
+
|
| 372 |
+
# ---------------------------------------------------------
|
| 373 |
+
# 4. ์์ Gemini ์ฑํ
์ธ์
๊ด๋ฆฌ (ํ์์คํฌํ ๊ฐฑ์ ํฌํจ)
|
| 374 |
+
# ---------------------------------------------------------
|
| 375 |
+
if session_id not in native_chat_sessions:
|
| 376 |
+
print(f"โจ [System] Gemini Native ์ธ์
์์ฑ: {session_id}")
|
| 377 |
+
model = genai.GenerativeModel(
|
| 378 |
+
model_name="gemini-2.5-flash",
|
| 379 |
+
system_instruction=system_prompt
|
| 380 |
+
)
|
| 381 |
+
# ์ธ์
๊ฐ์ฒด์ ๋ง์ง๋ง ์ ์ ์๊ฐ์ ํจ๊ป ์ ์ฅ
|
| 382 |
+
native_chat_sessions[session_id] = {
|
| 383 |
+
"session": model.start_chat(history=[]),
|
| 384 |
+
"last_access": current_time
|
| 385 |
+
}
|
| 386 |
+
|
| 387 |
+
# ์ธ์
๊ฐ์ ธ์ค๊ธฐ ๋ฐ ์๊ฐ ๊ฐฑ์
|
| 388 |
+
session_data = native_chat_sessions[session_id]
|
| 389 |
+
session_data["last_access"] = current_time # ๊ฐฑ์ !
|
| 390 |
+
chat_session = session_data["session"]
|
| 391 |
+
|
| 392 |
+
# ---------------------------------------------------------
|
| 393 |
+
# 5. ๋ฉ์์ง ์ ์ก ๋ฐ ์คํธ๋ฆฌ๋ฐ ์๋ต
|
| 394 |
+
# ---------------------------------------------------------
|
| 395 |
+
response = chat_session.send_message(content_parts, stream=True)
|
| 396 |
+
|
| 397 |
+
async def event_generator():
|
| 398 |
+
for chunk in response:
|
| 399 |
+
if chunk.text:
|
| 400 |
+
yield chunk.text
|
| 401 |
+
|
| 402 |
+
if nodes:
|
| 403 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ]:\n"
|
| 404 |
+
for node in nodes:
|
| 405 |
+
score = round(node.score, 3) if node.score else 0.0
|
| 406 |
+
content = node.node.get_content().strip().replace("\n", " ")[:50]
|
| 407 |
+
yield f"- (์ ์: {score}) {content}...\n"
|
| 408 |
+
|
| 409 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|
| 410 |
+
|
| 411 |
+
except Exception as e:
|
| 412 |
+
print(f"โ [Error] {e}")
|
| 413 |
+
return {"success": False, "msg": str(e)}
|
router/llamindex_router_bk_2025_12_10.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, UploadFile, File, Form
|
| 2 |
+
from typing import Optional
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings, PromptTemplate
|
| 6 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
| 7 |
+
from llama_index.llms.gemini import Gemini
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
|
| 10 |
+
router = APIRouter(
|
| 11 |
+
tags=["LlamaIndex"]
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
# ๐ก [๋ณ๊ฒฝ] ์์ง ๋์ '์ธ๋ฑ์ค(Index)' ์์ฒด๋ฅผ ์ ์ญ ๋ณ์๋ก ์ ์ฅํฉ๋๋ค.
|
| 15 |
+
# ์ธ๋ฑ์ค๊ฐ ์์ด์ผ ์๋ก์ด ๋ฌธ์๋ฅผ insert ํ ์ ์๊ธฐ ๋๋ฌธ์
๋๋ค.
|
| 16 |
+
GLOBAL_INDEX = None
|
| 17 |
+
|
| 18 |
+
def get_or_create_index():
|
| 19 |
+
"""
|
| 20 |
+
์ธ๋ฑ์ค๊ฐ ์์ผ๋ฉด ์์ฑ(์ด๊ธฐํ)ํ๊ณ , ์์ผ๋ฉด ๋ฐํํฉ๋๋ค.
|
| 21 |
+
"""
|
| 22 |
+
global GLOBAL_INDEX
|
| 23 |
+
|
| 24 |
+
if GLOBAL_INDEX is not None:
|
| 25 |
+
return GLOBAL_INDEX
|
| 26 |
+
|
| 27 |
+
print("๐ [System] LlamaIndex ์ธ๋ฑ์ค ์ด๊ธฐํ ์์...")
|
| 28 |
+
|
| 29 |
+
google_api_key = os.getenv("GOOGLE_API_KEY")
|
| 30 |
+
if not google_api_key:
|
| 31 |
+
raise ValueError("Google API Key๊ฐ ์์ต๋๋ค.")
|
| 32 |
+
|
| 33 |
+
# 1. ๋ชจ๋ธ ์ค์
|
| 34 |
+
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-m3", device="cpu")
|
| 35 |
+
llm = Gemini(model="models/gemini-2.5-flash", api_key=google_api_key, temperature=0.1)
|
| 36 |
+
|
| 37 |
+
Settings.embed_model = embed_model
|
| 38 |
+
Settings.llm = llm
|
| 39 |
+
|
| 40 |
+
# 2. ๋ฐ์ดํฐ ํด๋ ํ์ธ ๋ฐ ๋๋ฏธ ๋ฐ์ดํฐ ์์ฑ
|
| 41 |
+
if not os.path.exists("./data"):
|
| 42 |
+
os.makedirs("./data")
|
| 43 |
+
# ์ด๊ธฐ ๋ฐ์ดํฐ๊ฐ ์์ผ๋ฉด ์๋ฌ๊ฐ ๋ ์ ์์ผ๋ฏ๋ก ๋๋ฏธ ํ์ผ ์์ฑ
|
| 44 |
+
if not os.listdir("./data"):
|
| 45 |
+
with open("./data/readme.txt", "w", encoding="utf-8") as f:
|
| 46 |
+
f.write("์ด๊ณณ์ ์
๋ก๋๋ ๋ฌธ์๊ฐ ์ ์ฅ๋๋ ๊ณณ์
๋๋ค.")
|
| 47 |
+
|
| 48 |
+
# 3. ๋ฌธ์ ๋ก๋ ๋ฐ ์ธ๋ฑ์ฑ
|
| 49 |
+
print("๐ [Data] ๊ธฐ์กด ๋ฌธ์ ๋ก๋ฉ ๋ฐ ์ธ๋ฑ์ฑ...")
|
| 50 |
+
documents = SimpleDirectoryReader("./data").load_data()
|
| 51 |
+
index = VectorStoreIndex.from_documents(documents)
|
| 52 |
+
|
| 53 |
+
print("โ
[System] ์ธ๋ฑ์ค ์์ฑ ์๋ฃ!")
|
| 54 |
+
GLOBAL_INDEX = index
|
| 55 |
+
return GLOBAL_INDEX
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@router.get("/check-key")
|
| 59 |
+
async def check_key():
|
| 60 |
+
if os.getenv("GOOGLE_API_KEY"):
|
| 61 |
+
return {"status": "ok", "message": "API Key is loaded."}
|
| 62 |
+
return {"status": "error", "message": "API Key not found."}
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# ==============================================================================
|
| 66 |
+
# ๐ [์ ๊ท ๊ธฐ๋ฅ] ๋ฌธ์ ์
๋ก๋ ๋ฐ ์ฆ์ ๋ฐ์ API
|
| 67 |
+
# ==============================================================================
|
| 68 |
+
@router.post("/upload")
|
| 69 |
+
async def upload_document(file: UploadFile = File(...)):
|
| 70 |
+
"""
|
| 71 |
+
ํ์ผ์ ์
๋ก๋ํ๋ฉด ./data ์ ์ ์ฅํ๊ณ , ์คํ ์ค์ธ LlamaIndex์ ์ฆ์ ์ถ๊ฐํฉ๋๋ค.
|
| 72 |
+
"""
|
| 73 |
+
try:
|
| 74 |
+
# 1. ํ์ผ ์์คํ
์ ์ ์ฅ (์ฌ์์ ์ ์ ์ง๋ฅผ ์ํด)
|
| 75 |
+
save_path = f"./data/{file.filename}"
|
| 76 |
+
|
| 77 |
+
# ํ์ผ ์ฐ๊ธฐ
|
| 78 |
+
with open(save_path, "wb") as buffer:
|
| 79 |
+
shutil.copyfileobj(file.file, buffer)
|
| 80 |
+
|
| 81 |
+
print(f"๐พ [Upload] ํ์ผ ์ ์ฅ ์๋ฃ: {save_path}")
|
| 82 |
+
|
| 83 |
+
# 2. ์คํ ์ค์ธ ์ธ๋ฑ์ค ๊ฐ์ ธ์ค๊ธฐ
|
| 84 |
+
index = get_or_create_index()
|
| 85 |
+
|
| 86 |
+
# 3. ๋ฐฉ๊ธ ์ ์ฅํ ํ์ผ๋ง ๋ก๋ํด์ ์ธ๋ฑ์ค์ ์ถ๊ฐ (Insert)
|
| 87 |
+
# ์ ์ฒด๋ฅผ ๋ค์ ์ฝ๋ ๊ฒ ์๋๋ผ, ์ด ํ์ผ๋ง ์ฝ์ด์ ๋ฃ์ต๋๋ค.
|
| 88 |
+
new_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 89 |
+
|
| 90 |
+
for doc in new_docs:
|
| 91 |
+
index.insert(doc) # ๐ ํต์ฌ: ์คํ ์ค์ธ ๋ฉ๋ชจ๋ฆฌ์ ๋ฌธ์ ์ฃผ์
|
| 92 |
+
|
| 93 |
+
return {
|
| 94 |
+
"success": True,
|
| 95 |
+
"filename": file.filename,
|
| 96 |
+
"message": "ํ์ผ์ด ์ ์ฅ๋์๊ณ , ์ง์ ๋ฒ ์ด์ค์ ์ถ๊ฐ๋์์ต๋๋ค."
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
except Exception as e:
|
| 100 |
+
print(f"[ERROR] Upload failed: {e}")
|
| 101 |
+
return {"success": False, "msg": str(e)}
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@router.post("/query")
|
| 105 |
+
async def query_llama(
|
| 106 |
+
question: str = Form(...), # ํ์: ์ง๋ฌธ
|
| 107 |
+
file: Optional[UploadFile] = File(None) # ์ต์
: ํ์ผ
|
| 108 |
+
):
|
| 109 |
+
try:
|
| 110 |
+
# 1. ์ธ๋ฑ์ค ๋ฐ ์์ง ์ค๋น
|
| 111 |
+
index = get_or_create_index()
|
| 112 |
+
|
| 113 |
+
# ---------------------------------------------------------
|
| 114 |
+
# ๐ ํ์ผ ์ฒ๋ฆฌ ๋ก์ง (ํ์ผ์ด ์์ ๋๋ง ์คํ)
|
| 115 |
+
# ---------------------------------------------------------
|
| 116 |
+
file_content_str = "" # ํ์ผ ๋ด์ฉ์ ๋ด์ ๋ณ์
|
| 117 |
+
|
| 118 |
+
if file is not None:
|
| 119 |
+
# (1) ํ์ผ ์ ์ฅ
|
| 120 |
+
save_path = f"./data/{file.filename}"
|
| 121 |
+
with open(save_path, "wb") as buffer:
|
| 122 |
+
shutil.copyfileobj(file.file, buffer)
|
| 123 |
+
|
| 124 |
+
# (2) LlamaIndex์ Reader๋ฅผ ์จ์ ํ
์คํธ ์ถ์ถ
|
| 125 |
+
# SimpleDirectoryReader๋ PDF, TXT ๋ฑ์ ์์์ ํ
์คํธ๋ก ๋ณํํด์ค๋๋ค.
|
| 126 |
+
loaded_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 127 |
+
|
| 128 |
+
# (3) ์ถ์ถ๋ ํ
์คํธ๋ฅผ ํ๋์ ๋ฌธ์์ด๋ก ํฉ์นจ
|
| 129 |
+
extracted_text = "\n".join([doc.text for doc in loaded_docs])
|
| 130 |
+
|
| 131 |
+
# (4) ์ง๋ฌธ์ ํ์ผ ๋ด์ฉ ํฌํจ์ํค๊ธฐ
|
| 132 |
+
file_content_str = f"\n\n[์ฌ์ฉ์๊ฐ ์ฒจ๋ถํ ํ์ผ ๋ด์ฉ]:\n{extracted_text}"
|
| 133 |
+
|
| 134 |
+
# (์ ํ) ์ธ๋ฑ์ค์ ์๊ตฌ ์ ์ฅ๋ ํ๊ณ ์ถ๋ค๋ฉด ์๋ ์ฃผ์ ํด์
|
| 135 |
+
# for doc in loaded_docs:
|
| 136 |
+
# index.insert(doc)
|
| 137 |
+
|
| 138 |
+
# ---------------------------------------------------------
|
| 139 |
+
# ๐ค ์ต์ข
์ง๋ฌธ ๊ตฌ์ฑ (์ง๋ฌธ + ํ์ผ๋ด์ฉ)
|
| 140 |
+
# ---------------------------------------------------------
|
| 141 |
+
final_query = f"{question}{file_content_str}"
|
| 142 |
+
|
| 143 |
+
# 2. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ค์
|
| 144 |
+
my_prompt_str = """
|
| 145 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 146 |
+
๊ท์น:
|
| 147 |
+
- [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 148 |
+
-- ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ
|
| 149 |
+
|
| 150 |
+
[์ ๋ณด]
|
| 151 |
+
---------------------
|
| 152 |
+
{context_str}
|
| 153 |
+
---------------------
|
| 154 |
+
|
| 155 |
+
[์๋ ์ง๋ฌธ]: {query_str}
|
| 156 |
+
[AI ๋ต๋ณ]:
|
| 157 |
+
"""
|
| 158 |
+
my_template = PromptTemplate(my_prompt_str)
|
| 159 |
+
|
| 160 |
+
query_engine = index.as_query_engine(
|
| 161 |
+
text_qa_template=my_template,
|
| 162 |
+
similarity_top_k=3
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
# 3. LLM์๊ฒ ์ ์ก (ํ์ผ ๋ด์ฉ์ด ํฌํจ๋ final_query๋ฅผ ๋ณด๋)
|
| 166 |
+
response = await query_engine.aquery(final_query)
|
| 167 |
+
|
| 168 |
+
source_nodes = []
|
| 169 |
+
for node in response.source_nodes:
|
| 170 |
+
source_nodes.append({
|
| 171 |
+
"score": round(node.score, 3),
|
| 172 |
+
"text": node.node.get_content().strip()[:100] + "..."
|
| 173 |
+
})
|
| 174 |
+
|
| 175 |
+
return {
|
| 176 |
+
"success": True,
|
| 177 |
+
"original_question": question,
|
| 178 |
+
"file_attached": file.filename if file else None,
|
| 179 |
+
"answer": str(response),
|
| 180 |
+
"sources": source_nodes
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
except Exception as e:
|
| 184 |
+
return {"success": False, "msg": str(e)}
|
router/llamindex_router_bk_251231V2.py
ADDED
|
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, UploadFile, File, Form
|
| 2 |
+
from fastapi.responses import StreamingResponse
|
| 3 |
+
from typing import Optional
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings, PromptTemplate
|
| 7 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
| 8 |
+
from llama_index.llms.gemini import Gemini
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
from llama_index.core.memory import ChatMemoryBuffer
|
| 11 |
+
from llama_index.multi_modal_llms.gemini import GeminiMultiModal
|
| 12 |
+
from llama_index.core.llms import ChatMessage, MessageRole
|
| 13 |
+
import base64
|
| 14 |
+
from llama_index.core.schema import ImageDocument
|
| 15 |
+
import tempfile
|
| 16 |
+
import time
|
| 17 |
+
import google.generativeai as genai
|
| 18 |
+
from PIL import Image
|
| 19 |
+
import io
|
| 20 |
+
|
| 21 |
+
router = APIRouter(
|
| 22 |
+
tags=["LlamaIndex"]
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 26 |
+
|
| 27 |
+
# ๐ก [๋ณ๊ฒฝ] ์์ง ๋์ '์ธ๋ฑ์ค(Index)' ์์ฒด๋ฅผ ์ ์ญ ๋ณ์๋ก ์ ์ฅํฉ๋๋ค.
|
| 28 |
+
# ์ธ๋ฑ์ค๊ฐ ์์ด์ผ ์๋ก์ด ๋ฌธ์๋ฅผ insert ํ ์ ์๊ธฐ ๋๋ฌธ์
๋๋ค.
|
| 29 |
+
GLOBAL_INDEX = None
|
| 30 |
+
|
| 31 |
+
def get_or_create_index():
|
| 32 |
+
"""
|
| 33 |
+
์ธ๋ฑ์ค๊ฐ ์์ผ๋ฉด ์์ฑ(์ด๊ธฐํ)ํ๊ณ , ์์ผ๋ฉด ๋ฐํํฉ๋๋ค.
|
| 34 |
+
"""
|
| 35 |
+
global GLOBAL_INDEX
|
| 36 |
+
|
| 37 |
+
if GLOBAL_INDEX is not None:
|
| 38 |
+
return GLOBAL_INDEX
|
| 39 |
+
|
| 40 |
+
print("๐ [System] LlamaIndex ์ธ๋ฑ์ค ์ด๊ธฐํ ์์...")
|
| 41 |
+
|
| 42 |
+
google_api_key = os.getenv("GOOGLE_API_KEY")
|
| 43 |
+
hf_token = os.getenv("HF_TOKEN")
|
| 44 |
+
if not google_api_key:
|
| 45 |
+
raise ValueError("Google API Key๊ฐ ์์ต๋๋ค.")
|
| 46 |
+
|
| 47 |
+
# 1. ๋ชจ๋ธ ์ค์
|
| 48 |
+
embed_model = HuggingFaceEmbedding(
|
| 49 |
+
model_name="google/embeddinggemma-300m",
|
| 50 |
+
device="cpu",
|
| 51 |
+
trust_remote_code=True, # ์ด ์ต์
์ด ๊ผญ ํ์ํฉ๋๋ค!
|
| 52 |
+
token=hf_token
|
| 53 |
+
)
|
| 54 |
+
llm = Gemini(model="models/gemini-2.5-flash", api_key=google_api_key, temperature=0.1)
|
| 55 |
+
|
| 56 |
+
Settings.embed_model = embed_model
|
| 57 |
+
Settings.llm = llm
|
| 58 |
+
|
| 59 |
+
# 2. ๋ฐ์ดํฐ ํด๋ ํ์ธ ๋ฐ ๋๋ฏธ ๋ฐ์ดํฐ ์์ฑ
|
| 60 |
+
if not os.path.exists("./data"):
|
| 61 |
+
os.makedirs("./data")
|
| 62 |
+
# ์ด๊ธฐ ๋ฐ์ดํฐ๊ฐ ์์ผ๋ฉด ์๋ฌ๊ฐ ๋ ์ ์์ผ๋ฏ๋ก ๋๋ฏธ ํ์ผ ์์ฑ
|
| 63 |
+
if not os.listdir("./data"):
|
| 64 |
+
with open("./data/readme.txt", "w", encoding="utf-8") as f:
|
| 65 |
+
f.write("์ด๊ณณ์ ์
๋ก๋๋ ๋ฌธ์๊ฐ ์ ์ฅ๋๋ ๊ณณ์
๋๋ค.")
|
| 66 |
+
|
| 67 |
+
# 3. ๋ฌธ์ ๋ก๋ ๋ฐ ์ธ๋ฑ์ฑ
|
| 68 |
+
print("๐ [Data] ๊ธฐ์กด ๋ฌธ์ ๋ก๋ฉ ๋ฐ ์ธ๋ฑ์ฑ...")
|
| 69 |
+
documents = SimpleDirectoryReader("./data").load_data()
|
| 70 |
+
index = VectorStoreIndex.from_documents(documents)
|
| 71 |
+
|
| 72 |
+
print("โ
[System] ์ธ๋ฑ์ค ์์ฑ ์๋ฃ!")
|
| 73 |
+
GLOBAL_INDEX = index
|
| 74 |
+
return GLOBAL_INDEX
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
@router.get("/check-key")
|
| 78 |
+
async def check_key():
|
| 79 |
+
if os.getenv("GOOGLE_API_KEY"):
|
| 80 |
+
return {"status": "ok", "message": "API Key is loaded."}
|
| 81 |
+
return {"status": "error", "message": "API Key not found."}
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
# ==============================================================================
|
| 85 |
+
# ๐ [์ ๊ท ๊ธฐ๋ฅ] ๋ฌธ์ ์
๋ก๋ ๋ฐ ์ฆ์ ๋ฐ์ API
|
| 86 |
+
# ==============================================================================
|
| 87 |
+
@router.post("/upload")
|
| 88 |
+
async def upload_document(file: UploadFile = File(...)):
|
| 89 |
+
"""
|
| 90 |
+
ํ์ผ์ ์
๋ก๋ํ๋ฉด ./data ์ ์ ์ฅํ๊ณ , ์คํ ์ค์ธ LlamaIndex์ ์ฆ์ ์ถ๊ฐํฉ๋๋ค.
|
| 91 |
+
"""
|
| 92 |
+
try:
|
| 93 |
+
# 1. ํ์ผ ์์คํ
์ ์ ์ฅ (์ฌ์์ ์ ์ ์ง๋ฅผ ์ํด)
|
| 94 |
+
save_path = f"./data/{file.filename}"
|
| 95 |
+
|
| 96 |
+
# ํ์ผ ์ฐ๊ธฐ
|
| 97 |
+
with open(save_path, "wb") as buffer:
|
| 98 |
+
shutil.copyfileobj(file.file, buffer)
|
| 99 |
+
|
| 100 |
+
print(f"๐พ [Upload] ํ์ผ ์ ์ฅ ์๋ฃ: {save_path}")
|
| 101 |
+
|
| 102 |
+
# 2. ์คํ ์ค์ธ ์ธ๋ฑ์ค ๊ฐ์ ธ์ค๊ธฐ
|
| 103 |
+
index = get_or_create_index()
|
| 104 |
+
|
| 105 |
+
# 3. ๋ฐฉ๊ธ ์ ์ฅํ ํ์ผ๋ง ๋ก๋ํด์ ์ธ๋ฑ์ค์ ์ถ๊ฐ (Insert)
|
| 106 |
+
# ์ ์ฒด๋ฅผ ๋ค์ ์ฝ๋ ๊ฒ ์๋๋ผ, ์ด ํ์ผ๋ง ์ฝ์ด์ ๋ฃ์ต๋๋ค.
|
| 107 |
+
new_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 108 |
+
|
| 109 |
+
for doc in new_docs:
|
| 110 |
+
index.insert(doc) # ๐ ํต์ฌ: ์คํ ์ค์ธ ๋ฉ๋ชจ๋ฆฌ์ ๋ฌธ์ ์ฃผ์
|
| 111 |
+
|
| 112 |
+
return {
|
| 113 |
+
"success": True,
|
| 114 |
+
"filename": file.filename,
|
| 115 |
+
"message": "ํ์ผ์ด ์ ์ฅ๋์๊ณ , ์ง์ ๋ฒ ์ด์ค์ ์ถ๊ฐ๋์์ต๋๋ค."
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
except Exception as e:
|
| 119 |
+
print(f"[ERROR] Upload failed: {e}")
|
| 120 |
+
return {"success": False, "msg": str(e)}
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@router.post("/query")
|
| 124 |
+
async def query_llama(
|
| 125 |
+
question: str = Form(...), # ํ์: ์ง๋ฌธ
|
| 126 |
+
file: Optional[UploadFile] = File(None) # ์ต์
: ํ์ผ
|
| 127 |
+
):
|
| 128 |
+
try:
|
| 129 |
+
# 1. ์ธ๋ฑ์ค ๋ฐ ์์ง ์ค๋น
|
| 130 |
+
index = get_or_create_index()
|
| 131 |
+
|
| 132 |
+
# ---------------------------------------------------------
|
| 133 |
+
# ๐ ํ์ผ ์ฒ๋ฆฌ ๋ก์ง (ํ์ผ์ด ์์ ๋๋ง ์คํ)
|
| 134 |
+
# ---------------------------------------------------------
|
| 135 |
+
file_content_str = "" # ํ์ผ ๋ด์ฉ์ ๋ด์ ๋ณ์
|
| 136 |
+
|
| 137 |
+
if file is not None:
|
| 138 |
+
# (1) ํ์ผ ์ ์ฅ
|
| 139 |
+
save_path = f"./data/{file.filename}"
|
| 140 |
+
with open(save_path, "wb") as buffer:
|
| 141 |
+
shutil.copyfileobj(file.file, buffer)
|
| 142 |
+
|
| 143 |
+
# (2) LlamaIndex์ Reader๋ฅผ ์จ์ ํ
์คํธ ์ถ์ถ
|
| 144 |
+
# SimpleDirectoryReader๋ PDF, TXT ๋ฑ์ ์์์ ํ
์คํธ๋ก ๋ณํํด์ค๋๋ค.
|
| 145 |
+
loaded_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 146 |
+
|
| 147 |
+
# (3) ์ถ์ถ๋ ํ
์คํธ๋ฅผ ํ๋์ ๋ฌธ์์ด๋ก ํฉ์นจ
|
| 148 |
+
extracted_text = "\n".join([doc.text for doc in loaded_docs])
|
| 149 |
+
|
| 150 |
+
# (4) ์ง๋ฌธ์ ํ์ผ ๋ด์ฉ ํฌํจ์ํค๊ธฐ
|
| 151 |
+
file_content_str = f"\n\n[์ฌ์ฉ์๊ฐ ์ฒจ๋ถํ ํ์ผ ๋ด์ฉ]:\n{extracted_text}"
|
| 152 |
+
|
| 153 |
+
# (์ ํ) ์ธ๋ฑ์ค์ ์๊ตฌ ์ ์ฅ๋ ํ๊ณ ์ถ๋ค๋ฉด ์๋ ์ฃผ์ ํด์
|
| 154 |
+
# for doc in loaded_docs:
|
| 155 |
+
# index.insert(doc)
|
| 156 |
+
|
| 157 |
+
# ---------------------------------------------------------
|
| 158 |
+
# ๐ค ์ต์ข
์ง๋ฌธ ๊ตฌ์ฑ (์ง๋ฌธ + ํ์ผ๋ด์ฉ)
|
| 159 |
+
# ---------------------------------------------------------
|
| 160 |
+
final_query = f"{question}{file_content_str}"
|
| 161 |
+
|
| 162 |
+
# 2. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ค์
|
| 163 |
+
my_prompt_str = """
|
| 164 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 165 |
+
๊ท์น:
|
| 166 |
+
- [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 167 |
+
-- ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ
|
| 168 |
+
|
| 169 |
+
[์ ๋ณด]
|
| 170 |
+
---------------------
|
| 171 |
+
{context_str}
|
| 172 |
+
---------------------
|
| 173 |
+
|
| 174 |
+
[์๋ ์ง๋ฌธ]: {query_str}
|
| 175 |
+
[AI ๋ต๋ณ]:
|
| 176 |
+
"""
|
| 177 |
+
my_template = PromptTemplate(my_prompt_str)
|
| 178 |
+
|
| 179 |
+
query_engine = index.as_query_engine(
|
| 180 |
+
text_qa_template=my_template,
|
| 181 |
+
similarity_top_k=3
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
# 3. LLM์๊ฒ ์ ์ก (ํ์ผ ๋ด์ฉ์ด ํฌํจ๋ final_query๋ฅผ ๋ณด๋)
|
| 185 |
+
response = await query_engine.aquery(final_query)
|
| 186 |
+
|
| 187 |
+
source_nodes = []
|
| 188 |
+
for node in response.source_nodes:
|
| 189 |
+
source_nodes.append({
|
| 190 |
+
"score": round(node.score, 3),
|
| 191 |
+
"text": node.node.get_content().strip()[:100] + "..."
|
| 192 |
+
})
|
| 193 |
+
|
| 194 |
+
return {
|
| 195 |
+
"success": True,
|
| 196 |
+
"original_question": question,
|
| 197 |
+
"file_attached": file.filename if file else None,
|
| 198 |
+
"answer": str(response),
|
| 199 |
+
"sources": source_nodes
|
| 200 |
+
}
|
| 201 |
+
|
| 202 |
+
except Exception as e:
|
| 203 |
+
return {"success": False, "msg": str(e)}
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
@router.post("/query_stream_single")
|
| 208 |
+
async def query_llama(
|
| 209 |
+
question: str = Form(...),
|
| 210 |
+
file: Optional[UploadFile] = File(None)
|
| 211 |
+
):
|
| 212 |
+
try:
|
| 213 |
+
# 1. ์ธ๋ฑ์ค ์ค๋น
|
| 214 |
+
index = get_or_create_index()
|
| 215 |
+
|
| 216 |
+
# ---------------------------------------------------------
|
| 217 |
+
# ๐ ํ์ผ ์ฒ๋ฆฌ ๋ก์ง (๊ธฐ์กด๊ณผ ๋์ผ)
|
| 218 |
+
# ---------------------------------------------------------
|
| 219 |
+
file_content_str = ""
|
| 220 |
+
|
| 221 |
+
if file is not None:
|
| 222 |
+
save_path = f"./data/{file.filename}"
|
| 223 |
+
with open(save_path, "wb") as buffer:
|
| 224 |
+
shutil.copyfileobj(file.file, buffer)
|
| 225 |
+
|
| 226 |
+
loaded_docs = SimpleDirectoryReader(input_files=[save_path]).load_data()
|
| 227 |
+
extracted_text = "\n".join([doc.text for doc in loaded_docs])
|
| 228 |
+
file_content_str = f"\n\n[์ฌ์ฉ์๊ฐ ์ฒจ๋ถํ ํ์ผ ๋ด์ฉ]:\n{extracted_text}"
|
| 229 |
+
|
| 230 |
+
# ---------------------------------------------------------
|
| 231 |
+
# ๐ค ์ต์ข
์ง๋ฌธ ๊ตฌ์ฑ
|
| 232 |
+
# ---------------------------------------------------------
|
| 233 |
+
final_query = f"{question}{file_content_str}"
|
| 234 |
+
|
| 235 |
+
# 2. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ค์
|
| 236 |
+
my_prompt_str = """
|
| 237 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 238 |
+
๊ท์น:
|
| 239 |
+
- [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 240 |
+
-- ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ
|
| 241 |
+
|
| 242 |
+
[์ ๋ณด]
|
| 243 |
+
---------------------
|
| 244 |
+
{context_str}
|
| 245 |
+
---------------------
|
| 246 |
+
|
| 247 |
+
[์๋ ์ง๋ฌธ]: {query_str}
|
| 248 |
+
[AI ๋ต๋ณ]:
|
| 249 |
+
"""
|
| 250 |
+
my_template = PromptTemplate(my_prompt_str)
|
| 251 |
+
|
| 252 |
+
# 3. ์ฟผ๋ฆฌ ์์ง ์์ฑ (์คํธ๋ฆฌ๋ฐ ์ต์
ํ์ฑํ)
|
| 253 |
+
query_engine = index.as_query_engine(
|
| 254 |
+
text_qa_template=my_template,
|
| 255 |
+
similarity_top_k=3,
|
| 256 |
+
streaming=True # ๐ [2] ํต์ฌ: ์ด๊ฒ ์ผ์ ธ์ผ ํ ๊ธ์์ฉ ๋์ต๋๋ค.
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
# 4. LLM์๊ฒ ์ ์ก (์คํธ๋ฆฌ๋ฐ ๊ฐ์ฒด ๋ฐํ)
|
| 260 |
+
response = await query_engine.aquery(final_query)
|
| 261 |
+
|
| 262 |
+
# 5. ์ ๋๋ ์ดํฐ ํจ์ ์ ์ (๋ฐ์ดํฐ๋ฅผ ์ชผ๊ฐ์ ๋ณด๋ด๋ ์ญํ )
|
| 263 |
+
async def event_generator():
|
| 264 |
+
# (1) ๋ต๋ณ ํ
์คํธ ์คํธ๋ฆฌ๋ฐ
|
| 265 |
+
async for token in response.async_response_gen():
|
| 266 |
+
yield token
|
| 267 |
+
|
| 268 |
+
# (2) ๋ต๋ณ์ด ๋๋๋ฉด ์์ค(์ถ์ฒ) ์ ๋ณด๋ ํ
์คํธ๋ก ๋ถ์ฌ์ ๋ณด๋ด๊ธฐ
|
| 269 |
+
# ์คํธ๋ฆฌ๋ฐ์ JSON ๊ตฌ์กฐ๋ฅผ ๋ณด๋ด๊ธฐ ํ๋ค๊ธฐ ๋๋ฌธ์, ํ
์คํธ ๋์ ๋ถ์ฌ์ฃผ๋ ๋ฐฉ์์ ๋ง์ด ์๋๋ค.
|
| 270 |
+
if response.source_nodes:
|
| 271 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ]:\n"
|
| 272 |
+
for node in response.source_nodes:
|
| 273 |
+
# ์ ๋ขฐ๋ ์ ์์ ๋ด์ฉ ์ผ๋ถ๋ฅผ ํ
์คํธ๋ก ๋ณด๋
|
| 274 |
+
score = round(node.score, 3) if node.score else 0.0
|
| 275 |
+
content_preview = node.node.get_content().strip().replace("\n", " ")[:50]
|
| 276 |
+
yield f"- (์ ์: {score}) {content_preview}...\n"
|
| 277 |
+
|
| 278 |
+
# 6. StreamingResponse๋ก ๋ฐํ
|
| 279 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|
| 280 |
+
|
| 281 |
+
except Exception as e:
|
| 282 |
+
# ์คํธ๋ฆฌ๋ฐ ์์ ์ ์๋ฌ๊ฐ ๋๋ฉด JSON ์๋ฌ ๋ฐํ
|
| 283 |
+
return {"success": False, "msg": str(e)}
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
# [์ ์ญ] ์์ Gemini ์ธ์
์ ์ฅ์
|
| 289 |
+
# ๊ตฌ์กฐ ๋ณ๊ฒฝ: { "session_id": { "session": chat_object, "last_access": timestamp } }
|
| 290 |
+
native_chat_sessions = {}
|
| 291 |
+
|
| 292 |
+
# ์ธ์
๋ง๋ฃ ์๊ฐ (1์๊ฐ = 3600์ด)
|
| 293 |
+
SESSION_TIMEOUT = 3600
|
| 294 |
+
|
| 295 |
+
@router.post("/query_stream")
|
| 296 |
+
async def query_llama(
|
| 297 |
+
question: str = Form(...),
|
| 298 |
+
session_id: str = Form(...),
|
| 299 |
+
file: Optional[UploadFile] = File(None)
|
| 300 |
+
):
|
| 301 |
+
try:
|
| 302 |
+
current_time = time.time()
|
| 303 |
+
|
| 304 |
+
# ---------------------------------------------------------
|
| 305 |
+
# ๐งน 0. ์ธ์
์ฒญ์ (Garbage Collection)
|
| 306 |
+
# ---------------------------------------------------------
|
| 307 |
+
# ๋์
๋๋ฆฌ๋ฅผ ์ํํ๋ฉฐ ํ์์์๋ ์ธ์
์ญ์
|
| 308 |
+
# (๋์
๋๋ฆฌ ๋ณ๊ฒฝ ์ค ์ํ ์๋ฌ ๋ฐฉ์ง๋ฅผ ์ํด list๋ก ํค๋ฅผ ๋ณต์ฌํด์ ์ํ)
|
| 309 |
+
expired_sessions = [
|
| 310 |
+
sid for sid, data in native_chat_sessions.items()
|
| 311 |
+
if current_time - data["last_access"] > SESSION_TIMEOUT
|
| 312 |
+
]
|
| 313 |
+
|
| 314 |
+
for sid in expired_sessions:
|
| 315 |
+
del native_chat_sessions[sid]
|
| 316 |
+
print(f"๐๏ธ [System] ๋ง๋ฃ๋ ์ธ์
์ญ์ : {sid}")
|
| 317 |
+
|
| 318 |
+
# ---------------------------------------------------------
|
| 319 |
+
# 1. LlamaIndex๋ก ์ง์ ๊ฒ์ (RAG)
|
| 320 |
+
# ---------------------------------------------------------
|
| 321 |
+
index = get_or_create_index()
|
| 322 |
+
retriever = index.as_retriever(similarity_top_k=3)
|
| 323 |
+
nodes = await retriever.aretrieve(question)
|
| 324 |
+
context_str = "\n\n".join([n.node.get_content() for n in nodes])
|
| 325 |
+
|
| 326 |
+
# ์์คํ
ํ๋กฌํํธ (์บ๋ฆญํฐ ์ค์ )
|
| 327 |
+
system_prompt = """
|
| 328 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 329 |
+
๊ท์น:
|
| 330 |
+
1. [์ ๋ณด]์ ์๋ ๋ด์ฉ์ ๋๊ฐ ์๊ณ ์๋ ํ๋ ๋ด์์ ๋ต๋ณํ๋ผ.
|
| 331 |
+
2. ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ, ๋ฐ๋ง๋ก ์น์ ํ๊ฒ ๋๋ตํด.
|
| 332 |
+
3. ๋์ (์์) ๊ฐ์ ์ง๋ฌธ์ ๊ฐ๋ ์์ด์ค.
|
| 333 |
+
|
| 334 |
+
[ํ๋ ๋ฐ ๊ฐ์ ํํ ๊ท์น]
|
| 335 |
+
- ๋ต๋ณ์ ๋ถ์๊ธฐ์ ๋ง์ถฐ์ ์๋์ [ํ์ ํ๊ทธ]์ [๋์ ํ๊ทธ]๋ฅผ ๋ต๋ณ ์ค๊ฐ์ด๋ ๋์ ์ ์ ํ ์์ด์ ์ถ๋ ฅํด.
|
| 336 |
+
- ํ ๋ฌธ์ฅ์ ํ๊ทธ๋ฅผ 1~2๊ฐ ์ ๋ ์ฌ์ฉํด.
|
| 337 |
+
- ํ์์ ๋ฐ๋์ [[ํ๊ทธ๋ช
]] ์ฒ๋ผ ์ด์ค ๋๊ดํธ๋ฅผ ์ฌ์ฉํด.
|
| 338 |
+
|
| 339 |
+
[ํ์ ํ๊ทธ ๋ชฉ๋ก]
|
| 340 |
+
- ๊ธฐ์จ/์์: [[FaceSmile1]], [[FaceSmile2]]
|
| 341 |
+
- ํ๋จ: [[FaceAngry1]], [[FaceAngry2]]
|
| 342 |
+
- ์ฌํ/์ข์ : [[FaceSap]], [[FaceAshamed]]
|
| 343 |
+
- ๋นํฉ/ํผ๋: [[FaceConf]], [[FaceDistract]]
|
| 344 |
+
- ๊ธฐํ: [[FaceDefault]]
|
| 345 |
+
|
| 346 |
+
[๋์ ํ๊ทธ ๋ชฉ๋ก]
|
| 347 |
+
- ์ถฉ๊ฒฉ ๋ฐ์: [[DoDamage0]], [[DoDamage1]]
|
| 348 |
+
- ํจ๋ฐฐ/์ค๋ง: [[DoLose]], [[DoReflesh]]
|
| 349 |
+
- ๊ธฐ๋ถ ์ ํ/ํ๋ณต: [[DoJump]]
|
| 350 |
+
- ์ ํ/์ ๋จ: [[DoJump]]
|
| 351 |
+
- ๊ธฐํ: [[DoDefault]]
|
| 352 |
+
"""
|
| 353 |
+
|
| 354 |
+
# ์ฌ์ฉ์ ํ๋กฌํํธ ๊ตฌ์ฑ (RAG ์ ๋ณด + ์ง๋ฌธ)
|
| 355 |
+
user_prompt = f"""
|
| 356 |
+
[์ฐธ๊ณ ์ ๋ณด]
|
| 357 |
+
{context_str}
|
| 358 |
+
|
| 359 |
+
[์ฌ์ฉ์ ์ง๋ฌธ]
|
| 360 |
+
{question}
|
| 361 |
+
|
| 362 |
+
(์ ์ฐธ๊ณ ์ ๋ณด๋ฅผ ๋ฐํ์ผ๋ก ์ฌ์ฉ์์ ์ง๋ฌธ์ ๋ต๋ณํด์ค.)
|
| 363 |
+
"""
|
| 364 |
+
|
| 365 |
+
# ---------------------------------------------------------
|
| 366 |
+
# 3. ์ด๋ฏธ์ง ์ฒ๋ฆฌ (์์ Gemini ๋ฐฉ์ - PIL ์ฌ์ฉ)
|
| 367 |
+
# ---------------------------------------------------------
|
| 368 |
+
content_parts = []
|
| 369 |
+
|
| 370 |
+
if file:
|
| 371 |
+
file_bytes = await file.read()
|
| 372 |
+
image = Image.open(io.BytesIO(file_bytes))
|
| 373 |
+
print(f"๐ผ๏ธ [Gemini Native] ์ด๋ฏธ์ง ๋ก๋ ์๋ฃ: {file.filename}")
|
| 374 |
+
content_parts.append(image)
|
| 375 |
+
|
| 376 |
+
content_parts.append(user_prompt)
|
| 377 |
+
|
| 378 |
+
# ---------------------------------------------------------
|
| 379 |
+
# 4. ์์ Gemini ์ฑํ
์ธ์
๊ด๋ฆฌ (ํ์์คํฌํ ๊ฐฑ์ ํฌํจ)
|
| 380 |
+
# ---------------------------------------------------------
|
| 381 |
+
if session_id not in native_chat_sessions:
|
| 382 |
+
print(f"โจ [System] Gemini Native ์ธ์
์์ฑ: {session_id}")
|
| 383 |
+
model = genai.GenerativeModel(
|
| 384 |
+
model_name="gemini-2.5-flash",
|
| 385 |
+
system_instruction=system_prompt
|
| 386 |
+
)
|
| 387 |
+
# ์ธ์
๊ฐ์ฒด์ ๋ง๏ฟฝ๏ฟฝ๋ง ์ ์ ์๊ฐ์ ํจ๊ป ์ ์ฅ
|
| 388 |
+
native_chat_sessions[session_id] = {
|
| 389 |
+
"session": model.start_chat(history=[]),
|
| 390 |
+
"last_access": current_time
|
| 391 |
+
}
|
| 392 |
+
|
| 393 |
+
# ์ธ์
๊ฐ์ ธ์ค๊ธฐ ๋ฐ ์๊ฐ ๊ฐฑ์
|
| 394 |
+
session_data = native_chat_sessions[session_id]
|
| 395 |
+
session_data["last_access"] = current_time # ๊ฐฑ์ !
|
| 396 |
+
chat_session = session_data["session"]
|
| 397 |
+
|
| 398 |
+
# ---------------------------------------------------------
|
| 399 |
+
# 5. ๋ฉ์์ง ์ ์ก ๋ฐ ์คํธ๋ฆฌ๋ฐ ์๋ต
|
| 400 |
+
# ---------------------------------------------------------
|
| 401 |
+
response = chat_session.send_message(content_parts, stream=True)
|
| 402 |
+
|
| 403 |
+
async def event_generator():
|
| 404 |
+
for chunk in response:
|
| 405 |
+
if chunk.text:
|
| 406 |
+
yield chunk.text
|
| 407 |
+
|
| 408 |
+
if nodes:
|
| 409 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ]:\n"
|
| 410 |
+
for node in nodes:
|
| 411 |
+
score = round(node.score, 3) if node.score else 0.0
|
| 412 |
+
content = node.node.get_content().strip().replace("\n", " ")[:50]
|
| 413 |
+
yield f"- (์ ์: {score}) {content}...\n"
|
| 414 |
+
|
| 415 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|
| 416 |
+
|
| 417 |
+
except Exception as e:
|
| 418 |
+
print(f"โ [Error] {e}")
|
| 419 |
+
return {"success": False, "msg": str(e)}
|
router/llamindex_router_bk_260120.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import base64
|
| 4 |
+
import time
|
| 5 |
+
from typing import Optional, Dict
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, UploadFile, File, Form, HTTPException
|
| 8 |
+
from fastapi.responses import StreamingResponse
|
| 9 |
+
|
| 10 |
+
# ๐ฆ LangChain Imports
|
| 11 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 12 |
+
from langchain_community.vectorstores import FAISS
|
| 13 |
+
from langchain_community.document_loaders import DirectoryLoader, TextLoader, UnstructuredFileLoader
|
| 14 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
| 15 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 16 |
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
| 17 |
+
from langchain_core.messages import HumanMessage, SystemMessage
|
| 18 |
+
from langchain_core.runnables.history import RunnableWithMessageHistory
|
| 19 |
+
from langchain_community.chat_message_histories import ChatMessageHistory
|
| 20 |
+
from langchain_core.runnables import ConfigurableFieldSpec
|
| 21 |
+
|
| 22 |
+
router = APIRouter(tags=["LangChain_Refactor"])
|
| 23 |
+
|
| 24 |
+
# ==============================================================================
|
| 25 |
+
# โ๏ธ 1. ์ค์ ๋ฐ ์ด๊ธฐํ (Embedding & LLM)
|
| 26 |
+
# ==============================================================================
|
| 27 |
+
|
| 28 |
+
# 1-1. Embedding Model (์์ฒญํ์ ๋ก์ปฌ ๋ชจ๋ธ ๊ทธ๋๋ก ์ ์ง)
|
| 29 |
+
# huggingface-cli login ํน์ HF_TOKEN ํ๊ฒฝ๋ณ์ ํ์
|
| 30 |
+
hf_token = os.getenv("HF_TOKEN")
|
| 31 |
+
|
| 32 |
+
embedding_model = HuggingFaceEmbeddings(
|
| 33 |
+
model_name="google/embeddinggemma-300m",
|
| 34 |
+
model_kwargs={
|
| 35 |
+
"device": "cpu", # CPU ์ฌ์ฉ
|
| 36 |
+
"trust_remote_code": True,
|
| 37 |
+
"token": hf_token
|
| 38 |
+
},
|
| 39 |
+
encode_kwargs={"normalize_embeddings": True}
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
# 1-2. LLM Model (Gemini 2.5 Flash)
|
| 43 |
+
llm = ChatGoogleGenerativeAI(
|
| 44 |
+
model="gemini-2.5-flash",
|
| 45 |
+
temperature=0.1,
|
| 46 |
+
google_api_key=os.getenv("GOOGLE_API_KEY"),
|
| 47 |
+
convert_system_message_to_human=True
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# ==============================================================================
|
| 51 |
+
# ๐พ 2. ๋ฐ์ดํฐ ์ ์ฅ์ (Vector DB & Session Memory)
|
| 52 |
+
# ==============================================================================
|
| 53 |
+
|
| 54 |
+
# ์ ์ญ ๋ฒกํฐ ์คํ ์ด (FAISS)
|
| 55 |
+
VECTOR_STORE = None
|
| 56 |
+
DATA_DIR = "./data"
|
| 57 |
+
|
| 58 |
+
# ์ธ์
์ ์ฅ์: { "session_id": { "history": ChatMessageHistory, "last_access": timestamp } }
|
| 59 |
+
SESSION_STORE: Dict[str, Dict] = {}
|
| 60 |
+
SESSION_TIMEOUT = 3600 # 1์๊ฐ
|
| 61 |
+
|
| 62 |
+
def init_vector_db():
|
| 63 |
+
"""์๋ฒ ์์ ์ ๋๋ ํ์ ์ ๋ฒกํฐ DB ์ด๊ธฐํ"""
|
| 64 |
+
global VECTOR_STORE
|
| 65 |
+
|
| 66 |
+
if not os.path.exists(DATA_DIR):
|
| 67 |
+
os.makedirs(DATA_DIR)
|
| 68 |
+
with open(f"{DATA_DIR}/readme.txt", "w", encoding="utf-8") as f:
|
| 69 |
+
f.write("Initialize data directory.")
|
| 70 |
+
|
| 71 |
+
# ๋ฌธ์ ๋ก๋
|
| 72 |
+
print("๐ [LangChain] ๋ฌธ์ ๋ก๋ฉ ๋ฐ ์ธ๋ฑ์ฑ ์์...")
|
| 73 |
+
# DirectoryLoader๋ ํด๋ ๋ด ํ์ผ๋ค์ ์ค์บํฉ๋๋ค.
|
| 74 |
+
loader = DirectoryLoader(DATA_DIR, glob="*", show_progress=True, loader_cls=TextLoader)
|
| 75 |
+
try:
|
| 76 |
+
docs = loader.load()
|
| 77 |
+
except Exception:
|
| 78 |
+
# ํ
์คํธ ํ์ผ์ด ์๋ ๊ฒฝ์ฐ๋ฅผ ๋๋นํด ์์ธ์ฒ๋ฆฌ (์ค์ ๋ก UnstructuredLoader ๋ฑ ์ฌ์ฉ ๊ถ์ฅ)
|
| 79 |
+
docs = []
|
| 80 |
+
|
| 81 |
+
if not docs:
|
| 82 |
+
print("โ ๏ธ [LangChain] ๋ฌธ์๊ฐ ์์ต๋๋ค. ๋น ์ธ๋ฑ์ค๋ฅผ ์์ฑํฉ๋๋ค.")
|
| 83 |
+
# ๋น ์ธ๋ฑ์ค ์์ฑ ํธ๋ฆญ
|
| 84 |
+
texts = ["Initial document"]
|
| 85 |
+
VECTOR_STORE = FAISS.from_texts(texts, embedding_model)
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
# ์ฒญํน (Chunking)
|
| 89 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 90 |
+
splits = text_splitter.split_documents(docs)
|
| 91 |
+
|
| 92 |
+
# FAISS ์ธ๋ฑ์ค ์์ฑ
|
| 93 |
+
VECTOR_STORE = FAISS.from_documents(splits, embedding_model)
|
| 94 |
+
print("โ
[LangChain] FAISS ์ธ๋ฑ์ค ์์ฑ ์๋ฃ!")
|
| 95 |
+
|
| 96 |
+
def get_session_history(session_id: str):
|
| 97 |
+
"""
|
| 98 |
+
์ธ์
ID ๊ธฐ๋ฐ ํ์คํ ๋ฆฌ ๋ฐํ + ๋ง๋ฃ๋ ์ธ์
์ ๋ฆฌ (Garbage Collection)
|
| 99 |
+
"""
|
| 100 |
+
current_time = time.time()
|
| 101 |
+
|
| 102 |
+
# 1. ๋ง๋ฃ๋ ์ธ์
์ ๋ฆฌ (์์ฒญ์ด ๋ค์ด์ฌ ๋๋ง๋ค ์ฒดํฌ)
|
| 103 |
+
expired_sessions = [
|
| 104 |
+
sid for sid, data in SESSION_STORE.items()
|
| 105 |
+
if current_time - data["last_access"] > SESSION_TIMEOUT
|
| 106 |
+
]
|
| 107 |
+
for sid in expired_sessions:
|
| 108 |
+
del SESSION_STORE[sid]
|
| 109 |
+
print(f"๐๏ธ [System] Timeout ์ธ์
์ญ์ : {sid}")
|
| 110 |
+
|
| 111 |
+
# 2. ์ธ์
์กฐํ ๋๋ ์์ฑ
|
| 112 |
+
if session_id not in SESSION_STORE:
|
| 113 |
+
print(f"โจ [System] ์ ์ธ์
์์ฑ: {session_id}")
|
| 114 |
+
SESSION_STORE[session_id] = {
|
| 115 |
+
"history": ChatMessageHistory(),
|
| 116 |
+
"last_access": current_time
|
| 117 |
+
}
|
| 118 |
+
|
| 119 |
+
# 3. ๋ง์ง๋ง ์ ์ ์๊ฐ ๊ฐฑ์
|
| 120 |
+
SESSION_STORE[session_id]["last_access"] = current_time
|
| 121 |
+
|
| 122 |
+
return SESSION_STORE[session_id]["history"]
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
# ==============================================================================
|
| 126 |
+
# ๐ 3. API Endpoints
|
| 127 |
+
# ==============================================================================
|
| 128 |
+
|
| 129 |
+
@router.on_event("startup")
|
| 130 |
+
async def startup_event():
|
| 131 |
+
init_vector_db()
|
| 132 |
+
|
| 133 |
+
@router.post("/upload")
|
| 134 |
+
async def upload_document(file: UploadFile = File(...)):
|
| 135 |
+
"""๋ฌธ์๋ฅผ ์
๋ก๋ํ๊ณ ๋ฒกํฐ DB๏ฟฝ๏ฟฝ ์ฆ์ ์ถ๊ฐ"""
|
| 136 |
+
global VECTOR_STORE
|
| 137 |
+
try:
|
| 138 |
+
save_path = f"{DATA_DIR}/{file.filename}"
|
| 139 |
+
with open(save_path, "wb") as buffer:
|
| 140 |
+
shutil.copyfileobj(file.file, buffer)
|
| 141 |
+
|
| 142 |
+
print(f"๐พ [Upload] ์ ์ฅ ์๋ฃ: {save_path}")
|
| 143 |
+
|
| 144 |
+
# LangChain ๋ฐฉ์์ผ๋ก ๋ก๋ & ์ถ๊ฐ
|
| 145 |
+
loader = TextLoader(save_path) # TXT ๊ธฐ์ค, PDF๋ฉด PyPDFLoader ๋ฑ ์ฌ์ฉ
|
| 146 |
+
docs = loader.load()
|
| 147 |
+
|
| 148 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 149 |
+
splits = text_splitter.split_documents(docs)
|
| 150 |
+
|
| 151 |
+
if VECTOR_STORE is None:
|
| 152 |
+
VECTOR_STORE = FAISS.from_documents(splits, embedding_model)
|
| 153 |
+
else:
|
| 154 |
+
VECTOR_STORE.add_documents(splits) # ๐ ์คํ ์ค์ธ DB์ ์ถ๊ฐ
|
| 155 |
+
|
| 156 |
+
return {"success": True, "message": "ํ์ผ์ด ์ง์ ๋ฒ ์ด์ค์ ์ถ๊ฐ๋์์ต๋๋ค."}
|
| 157 |
+
|
| 158 |
+
except Exception as e:
|
| 159 |
+
return {"success": False, "msg": str(e)}
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@router.post("/query_stream")
|
| 163 |
+
async def query_stream(
|
| 164 |
+
question: str = Form(...),
|
| 165 |
+
session_id: str = Form(...),
|
| 166 |
+
file: Optional[UploadFile] = File(None)
|
| 167 |
+
):
|
| 168 |
+
"""
|
| 169 |
+
LangChain RAG + History + Multimodal Streaming Endpoint
|
| 170 |
+
"""
|
| 171 |
+
global VECTOR_STORE
|
| 172 |
+
|
| 173 |
+
# 1. RAG ๊ฒ์ (Context ์ถ์ถ)
|
| 174 |
+
context_text = ""
|
| 175 |
+
source_docs = []
|
| 176 |
+
|
| 177 |
+
if VECTOR_STORE:
|
| 178 |
+
retriever = VECTOR_STORE.as_retriever(search_kwargs={"k": 3})
|
| 179 |
+
source_docs = retriever.invoke(question)
|
| 180 |
+
context_text = "\n\n".join([doc.page_content for doc in source_docs])
|
| 181 |
+
|
| 182 |
+
# 2. ์์คํ
ํ๋กฌํํธ ๊ตฌ์ฑ (ํ๋ฅด์๋ + Context)
|
| 183 |
+
system_prompt_text = f"""
|
| 184 |
+
๋๋ 2d ๋ฏธ์๋
์บ๋ฆญ ๋ฉ์ด๋ ๋น์๋ค.
|
| 185 |
+
|
| 186 |
+
[๊ท์น]
|
| 187 |
+
1. ์๋ [์ ๋ณด]๋ฅผ ๋ฐํ์ผ๋ก ๋ต๋ณํ๊ณ , ์์ผ๋ฉด ์๋ ๋๋ก ๋ตํด.
|
| 188 |
+
2. ์ค๋ช
์ ์์ฃผ ์ฝ๊ฒ, ๋ฐ๋ง๋ก ์น์ ํ๊ฒ.
|
| 189 |
+
3. ๋์ (์์) ๊ฐ์ ์ง๋ฌธ์ ์์ด์ค.
|
| 190 |
+
|
| 191 |
+
[๊ฐ์ /ํ๋ ํ๊ทธ ๊ท์น]
|
| 192 |
+
- ๋ต๋ณ ์ค๊ฐ/๋์ [[FaceSmile1]], [[DoJump]] ๊ฐ์ ํ๊ทธ๋ฅผ ์ฌ์ฉํด.
|
| 193 |
+
|
| 194 |
+
[์ ๋ณด]:
|
| 195 |
+
{context_text}
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
# 3. ์ฌ์ฉ์ ๋ฉ์์ง ๊ตฌ์ฑ (๋ฉํฐ๋ชจ๋ฌ ์ฒ๋ฆฌ)
|
| 199 |
+
user_content = [{"type": "text", "text": question}]
|
| 200 |
+
|
| 201 |
+
if file:
|
| 202 |
+
# ์ด๋ฏธ์ง ํ์ผ์ base64๋ก ๋ณํํ์ฌ LLM์ ์ ๋ฌ
|
| 203 |
+
file_bytes = await file.read()
|
| 204 |
+
encoded_image = base64.b64encode(file_bytes).decode("utf-8")
|
| 205 |
+
user_content.append({
|
| 206 |
+
"type": "image_url",
|
| 207 |
+
"image_url": {"url": f"data:image/jpeg;base64,{encoded_image}"}
|
| 208 |
+
})
|
| 209 |
+
|
| 210 |
+
# 4. ํ๋กฌํํธ ํ
ํ๋ฆฟ ์ ์
|
| 211 |
+
prompt = ChatPromptTemplate.from_messages([
|
| 212 |
+
("system", system_prompt_text),
|
| 213 |
+
MessagesPlaceholder(variable_name="history"), # ๐ ๋ํ ๋ด์ญ ์๋ ์ฃผ์
์์น
|
| 214 |
+
("human", "{user_content}"),
|
| 215 |
+
])
|
| 216 |
+
|
| 217 |
+
# 5. Chain ์์ฑ (Prompt -> LLM)
|
| 218 |
+
chain = prompt | llm
|
| 219 |
+
|
| 220 |
+
# 6. History ๊ธฐ๋ฅ์ด ์ถ๊ฐ๋ Runnable ์์ฑ
|
| 221 |
+
chain_with_history = RunnableWithMessageHistory(
|
| 222 |
+
chain,
|
| 223 |
+
get_session_history,
|
| 224 |
+
input_messages_key="user_content",
|
| 225 |
+
history_messages_key="history"
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
# 7. ์คํธ๋ฆฌ๋ฐ ์๋ต ์์ฑ
|
| 229 |
+
async def event_generator():
|
| 230 |
+
# (1) ๋ต๋ณ ์คํธ๋ฆฌ๋ฐ
|
| 231 |
+
async for token in chain_with_history.astream(
|
| 232 |
+
{"user_content": user_content},
|
| 233 |
+
config={"configurable": {"session_id": session_id}}
|
| 234 |
+
):
|
| 235 |
+
# ChatGoogleGenerativeAI๋ AIMessageChunk๋ฅผ ๋ฐํํ๋ฏ๋ก content๋ง ์ถ์ถ
|
| 236 |
+
yield token.content
|
| 237 |
+
|
| 238 |
+
# (2) ์ฐธ๊ณ ์๋ฃ(Sources) ๋ถ์ด๊ธฐ
|
| 239 |
+
if source_docs:
|
| 240 |
+
yield "\n\n-------------------\n[์ฐธ๊ณ ์๋ฃ (LangChain RAG)]:\n"
|
| 241 |
+
for doc in source_docs:
|
| 242 |
+
preview = doc.page_content.replace("\n", " ")[:50]
|
| 243 |
+
yield f"- {preview}...\n"
|
| 244 |
+
|
| 245 |
+
return StreamingResponse(event_generator(), media_type="text/plain")
|