Update app/llm.py
Browse files- app/llm.py +9 -0
app/llm.py
CHANGED
|
@@ -123,7 +123,16 @@ llm_router = APIRouter(prefix="/llm")
|
|
| 123 |
@llm_router.get("/health", tags=["llm"])
|
| 124 |
def health():
|
| 125 |
return {"status": "ok"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 126 |
|
|
|
|
| 127 |
# Chat Completion API
|
| 128 |
@llm_router.post("/chat/", tags=["llm"])
|
| 129 |
async def chat(chatm:ChatModel):#, user: schemas.BaseUser = fastapi.Depends(current_active_user)):
|
|
|
|
| 123 |
@llm_router.get("/health", tags=["llm"])
|
| 124 |
def health():
|
| 125 |
return {"status": "ok"}
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
@llm_router.post("/rag/", tags=["llm"])
|
| 131 |
+
async def ragchat(chatm:ChatModel):#, user: schemas.BaseUser = fastapi.Depends(current_active_user)):
|
| 132 |
+
r = RagChat().chat(chatml.question)
|
| 133 |
+
print(r)
|
| 134 |
|
| 135 |
+
|
| 136 |
# Chat Completion API
|
| 137 |
@llm_router.post("/chat/", tags=["llm"])
|
| 138 |
async def chat(chatm:ChatModel):#, user: schemas.BaseUser = fastapi.Depends(current_active_user)):
|