Spaces:
Sleeping
Sleeping
File size: 2,052 Bytes
f01124b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 | from fastapi import FastAPI
from pydantic import BaseModel, Field
from typing import Optional
from fastapi.middleware.cors import CORSMiddleware
from graph import app as workflow
from fastapi.responses import JSONResponse
from langchain_core.messages import HumanMessage, AIMessage
app = FastAPI(docs_url="/")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class Item(BaseModel):
messages: list = Field(..., description="List of messages")
language: Optional[str] = Field("vi", description="Language of the messages")
class Config:
json_schema_extra = {
"example": {
"messages": [
{"type": "human", "content": "Chào bạn"},
{"type": "ai", "content": "Bạn muốn tìm job gì?"},
{
"type": "human",
"content": "Tôi muốn tìm job dạy học cho trung cấp",
},
],
"language": "vi",
}
}
def convert_message(messages):
list_message = []
for message in messages:
if message["type"] == "human":
list_message.append(HumanMessage(content=message["content"]))
else:
list_message.append(AIMessage(content=message["content"]))
return list_message
@app.post("/chat")
async def Chat(item: Item):
messages = convert_message(item.messages)
history = messages[:-1] if len(messages) > 1 else []
try:
response = workflow.invoke(
{
"user_query": messages[-1],
"messages_history": history,
"language": item.language,
}
)["llm_response"]
return JSONResponse(content=response, status_code=200)
except Exception as e:
return JSONResponse(content={"error": str(e)}, status_code=500)
if __name__ == "__main__":
import uvicorn
uvicorn.run("app:app")
|