ABAO77 commited on
Commit
ab6f843
·
verified ·
1 Parent(s): 7365dcb

Upload 42 files

Browse files
src/apis/interfaces/__pycache__/chat_interface.cpython-311.pyc CHANGED
Binary files a/src/apis/interfaces/__pycache__/chat_interface.cpython-311.pyc and b/src/apis/interfaces/__pycache__/chat_interface.cpython-311.pyc differ
 
src/apis/interfaces/chat_interface.py CHANGED
@@ -7,7 +7,6 @@ class ChatBody(BaseModel):
7
  history: Optional[list] = Field(None, title="Chat history")
8
  language: Optional[str] = Field("en", title="Language")
9
  topic: Optional[str] = Field("education", title="Topic")
10
- filter: Optional[dict] = Field(None, title="Filter")
11
 
12
  model_config = {
13
  "json_schema_extra": {
@@ -22,7 +21,31 @@ class ChatBody(BaseModel):
22
  ],
23
  "language": "Vietnamese",
24
  "topic": "education",
25
- "filter": {"session_number": 1, "lesson_id": "L01"},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  }
27
  }
28
  }
 
7
  history: Optional[list] = Field(None, title="Chat history")
8
  language: Optional[str] = Field("en", title="Language")
9
  topic: Optional[str] = Field("education", title="Topic")
 
10
 
11
  model_config = {
12
  "json_schema_extra": {
 
21
  ],
22
  "language": "Vietnamese",
23
  "topic": "education",
24
+ }
25
+ }
26
+ }
27
+
28
+
29
+ class PrimaryChatBody(ChatBody):
30
+ pass
31
+
32
+
33
+ class TutorChatBody(ChatBody):
34
+ filter: Optional[dict] = Field(None, title="Filter")
35
+ model_config = {
36
+ "json_schema_extra": {
37
+ "example": {
38
+ "query": "Vai trò của tri thức lịch sử là gì",
39
+ "history": [
40
+ {"content": "Môn này là gì", "type": "human"},
41
+ {
42
+ "content": "Lịch sử về Châu Á",
43
+ "type": "ai",
44
+ },
45
+ ],
46
+ "language": "Vietnamese",
47
+ "topic": "education",
48
+ "filter": {"lesson_id": "L01"},
49
  }
50
  }
51
  }
src/apis/routers/__pycache__/chat_router.cpython-311.pyc CHANGED
Binary files a/src/apis/routers/__pycache__/chat_router.cpython-311.pyc and b/src/apis/routers/__pycache__/chat_router.cpython-311.pyc differ
 
src/apis/routers/chat_router.py CHANGED
@@ -1,7 +1,12 @@
1
  from fastapi import APIRouter, status, Depends
2
  from fastapi.responses import JSONResponse
3
  from typing import Annotated
4
- from src.apis.interfaces.chat_interface import ChatBody, HighlightExplainBody
 
 
 
 
 
5
  from src.apis.interfaces.entrance_eval_interface import TestResultsBody
6
  from src.agents.primary_chatbot.flow import rag_accuracy, rag_speed
7
  from src.agents.highlight_explain_agent.flow import highlight_workflow
@@ -10,26 +15,25 @@ from src.agents.entrance_eval_agent.flow import entrance_eval_agent
10
  router = APIRouter(prefix="/ai", tags=["AI"])
11
 
12
 
13
- @router.post("/rag_accuracy")
14
- async def primary_chat_accuracy(body: ChatBody):
15
  print("topic", body.topic)
16
- response = await rag_accuracy.ainvoke(
17
  {
18
  "user_query": body.query,
19
  "messages_history": body.history,
20
  "language": body.language,
21
  "topic": body.topic,
22
- "filter": body.filter,
23
  }
24
  )
25
  final_response = response["llm_response"]
26
  return JSONResponse(status_code=status.HTTP_200_OK, content=final_response)
27
 
28
 
29
- @router.post("/rag_speed")
30
- async def primary_chat_speed(body: ChatBody):
31
- print("filter", body.filter)
32
- response = await rag_speed.ainvoke(
33
  {
34
  "user_query": body.query,
35
  "messages_history": body.history,
 
1
  from fastapi import APIRouter, status, Depends
2
  from fastapi.responses import JSONResponse
3
  from typing import Annotated
4
+ from src.apis.interfaces.chat_interface import (
5
+ PrimaryChatBody,
6
+ ChatBody,
7
+ HighlightExplainBody,
8
+ TutorChatBody,
9
+ )
10
  from src.apis.interfaces.entrance_eval_interface import TestResultsBody
11
  from src.agents.primary_chatbot.flow import rag_accuracy, rag_speed
12
  from src.agents.highlight_explain_agent.flow import highlight_workflow
 
15
  router = APIRouter(prefix="/ai", tags=["AI"])
16
 
17
 
18
+ @router.post("/primary_chat")
19
+ async def primary_chat(body: PrimaryChatBody):
20
  print("topic", body.topic)
21
+ response = await rag_speed.ainvoke(
22
  {
23
  "user_query": body.query,
24
  "messages_history": body.history,
25
  "language": body.language,
26
  "topic": body.topic,
27
+ "filter": None,
28
  }
29
  )
30
  final_response = response["llm_response"]
31
  return JSONResponse(status_code=status.HTTP_200_OK, content=final_response)
32
 
33
 
34
+ @router.post("/tutor_chat")
35
+ async def tutor_chat(body: TutorChatBody):
36
+ response = await rag_accuracy.ainvoke(
 
37
  {
38
  "user_query": body.query,
39
  "messages_history": body.history,