ABAO77 commited on
Commit
0cfb154
·
verified ·
1 Parent(s): 83a18c0

Upload 52 files

Browse files
src/agents/highlight_explain_agent/__pycache__/flow.cpython-311.pyc CHANGED
Binary files a/src/agents/highlight_explain_agent/__pycache__/flow.cpython-311.pyc and b/src/agents/highlight_explain_agent/__pycache__/flow.cpython-311.pyc differ
 
src/agents/highlight_explain_agent/__pycache__/func.cpython-311.pyc CHANGED
Binary files a/src/agents/highlight_explain_agent/__pycache__/func.cpython-311.pyc and b/src/agents/highlight_explain_agent/__pycache__/func.cpython-311.pyc differ
 
src/agents/highlight_explain_agent/__pycache__/prompt.cpython-311.pyc CHANGED
Binary files a/src/agents/highlight_explain_agent/__pycache__/prompt.cpython-311.pyc and b/src/agents/highlight_explain_agent/__pycache__/prompt.cpython-311.pyc differ
 
src/agents/highlight_explain_agent/flow.py CHANGED
@@ -1,27 +0,0 @@
1
- from langgraph.graph import StateGraph, START, END
2
- from .func import State, highlight_explain
3
- from langgraph.graph.state import CompiledStateGraph
4
-
5
-
6
- class HighlightExplainAgent:
7
- def __init__(self):
8
- self.builder = StateGraph(State)
9
-
10
- @staticmethod
11
- def routing(state: State):
12
- pass
13
-
14
- def node(self):
15
- self.builder.add_node("highlight_explain", highlight_explain)
16
-
17
- def edge(self):
18
- self.builder.add_edge(START, "highlight_explain")
19
- self.builder.add_edge("highlight_explain", END)
20
-
21
- def __call__(self) -> CompiledStateGraph:
22
- self.node()
23
- self.edge()
24
- return self.builder.compile()
25
-
26
-
27
- highlight_workflow = HighlightExplainAgent()()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/agents/highlight_explain_agent/func.py CHANGED
@@ -1,33 +1,57 @@
1
  from typing import TypedDict, AnyStr
2
 
3
- from .prompt import highlight_explain_chain
4
 
5
 
6
- class State(TypedDict):
7
- domain: AnyStr
8
- highlight_terms: AnyStr
9
- before_highlight_paragraph: AnyStr
10
- after_highlight_paragraph: AnyStr
11
- question: AnyStr
12
- explanation: AnyStr
13
- language: AnyStr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
 
16
- async def highlight_explain(state: State):
 
 
 
 
 
 
 
17
  adjacent_paragraphs = (
18
- state["before_highlight_paragraph"]
19
  + "**"
20
- + state["highlight_terms"]
21
  + "**"
22
- + state["after_highlight_paragraph"]
23
  )
24
- response = await highlight_explain_chain.ainvoke(
25
  {
26
- "domain": state["domain"],
27
- "highlight_terms": state["highlight_terms"],
28
  "adjacent_paragraphs": adjacent_paragraphs,
29
- "question": state["question"],
30
- "language": state["language"],
31
  }
32
  )
33
- return {"explanation": response["explanation"]}
 
1
  from typing import TypedDict, AnyStr
2
 
3
+ from .prompt import highlight_explain_chain, highlight_explain_question_generate_chain
4
 
5
 
6
+ async def highlight_explain(
7
+ domain,
8
+ question,
9
+ highlight_terms,
10
+ before_highlight_paragraph,
11
+ after_highlight_paragraph,
12
+ language,
13
+ ):
14
+ adjacent_paragraphs = (
15
+ before_highlight_paragraph
16
+ + "**"
17
+ + highlight_terms
18
+ + "**"
19
+ + after_highlight_paragraph
20
+ )
21
+ response = await highlight_explain_chain.ainvoke(
22
+ {
23
+ "domain": domain,
24
+ "highlight_terms": highlight_terms,
25
+ "adjacent_paragraphs": adjacent_paragraphs,
26
+ "question": question,
27
+ "language": language,
28
+ }
29
+ )
30
+ return response.explanation
31
 
32
 
33
+ async def highlight_explain_question_generate(
34
+ domain,
35
+ question,
36
+ highlight_terms,
37
+ before_highlight_paragraph,
38
+ after_highlight_paragraph,
39
+ language,
40
+ ):
41
  adjacent_paragraphs = (
42
+ before_highlight_paragraph
43
  + "**"
44
+ + highlight_terms
45
  + "**"
46
+ + after_highlight_paragraph
47
  )
48
+ response = await highlight_explain_question_generate_chain.ainvoke(
49
  {
50
+ "domain": domain,
51
+ "highlight_terms": highlight_terms,
52
  "adjacent_paragraphs": adjacent_paragraphs,
53
+ "question": question,
54
+ "language": language,
55
  }
56
  )
57
+ return response.questions
src/agents/highlight_explain_agent/prompt.py CHANGED
@@ -1,13 +1,21 @@
1
  from pydantic import BaseModel, Field
2
  from langchain_core.prompts import ChatPromptTemplate
3
- from typing import Literal, Annotated, AnyStr, TypedDict
4
  from src.config.llm import llm_2_0 as llm
5
 
6
 
7
- class HighlightExplain(TypedDict):
8
  """Explain the highlight terms in a concise and easy to understand manner."""
9
 
10
- explanation: Annotated[AnyStr, "The explanation of the highlight terms."]
 
 
 
 
 
 
 
 
11
 
12
 
13
  highlight_explain_prompt = ChatPromptTemplate(
@@ -33,7 +41,33 @@ The explanation must be primarily in {language}. But you can use {domain} domain
33
  ]
34
  )
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
  highlight_explain_chain = highlight_explain_prompt | llm.with_structured_output(
38
  HighlightExplain
39
  )
 
 
 
 
 
1
  from pydantic import BaseModel, Field
2
  from langchain_core.prompts import ChatPromptTemplate
3
+ from typing import List
4
  from src.config.llm import llm_2_0 as llm
5
 
6
 
7
+ class HighlightExplain(BaseModel):
8
  """Explain the highlight terms in a concise and easy to understand manner."""
9
 
10
+ explanation: str = Field(description="The explanation of the highlight terms.")
11
+
12
+
13
+ class HighlightExplainQuestionGenerate(BaseModel):
14
+ """Gợi ý 3 câu hỏi liên quan"""
15
+
16
+ questions: List[str] = Field(
17
+ description="Các câu hỏi gợi ý liên quan"
18
+ )
19
 
20
 
21
  highlight_explain_prompt = ChatPromptTemplate(
 
41
  ]
42
  )
43
 
44
+ highlight_explain_question_generate_prompt = ChatPromptTemplate(
45
+ [
46
+ (
47
+ "system",
48
+ """Bạn là chuyên gia gợi ý câu hỏi ở lĩnh vực {domain}.
49
+ Bạn được cung cấp với các từ khóa được nhấn và các đoạn văn xung quanh từ khóa.
50
+ Bạn cần gợi ý các câu hỏi phù hợp với các từ khóa được nhấn và các đoạn văn xung quanh từ khóa.
51
+ Gen ra 3 gợi ý
52
+
53
+ Câu hỏi phải được viết chủ yếu bằng {language}. Nhưng bạn có thể sử dụng các từ khóa trong lĩnh vực {domain}.
54
+ """,
55
+ ),
56
+ (
57
+ "human",
58
+ """
59
+ Highlight terms: {highlight_terms}
60
+ Câu xung quanh highlight terms: {adjacent_paragraphs}
61
+ """,
62
+ ),
63
+ ]
64
+ )
65
+
66
 
67
  highlight_explain_chain = highlight_explain_prompt | llm.with_structured_output(
68
  HighlightExplain
69
  )
70
+ highlight_explain_question_generate_chain = (
71
+ highlight_explain_question_generate_prompt
72
+ | llm.with_structured_output(HighlightExplainQuestionGenerate)
73
+ )
src/agents/primary_chatbot/__pycache__/prompt.cpython-311.pyc CHANGED
Binary files a/src/agents/primary_chatbot/__pycache__/prompt.cpython-311.pyc and b/src/agents/primary_chatbot/__pycache__/prompt.cpython-311.pyc differ
 
src/agents/primary_chatbot/prompt.py CHANGED
@@ -2,7 +2,6 @@ from pydantic import BaseModel, Field
2
  from langchain_core.prompts import ChatPromptTemplate
3
  from typing import Literal
4
 
5
- # from src.config.llm import llm_2_0 as llm
6
  from src.config.llm import llm_2_0 as llm
7
  from typing import Optional
8
 
 
2
  from langchain_core.prompts import ChatPromptTemplate
3
  from typing import Literal
4
 
 
5
  from src.config.llm import llm_2_0 as llm
6
  from typing import Optional
7
 
src/apis/routers/__pycache__/chat_router.cpython-311.pyc CHANGED
Binary files a/src/apis/routers/__pycache__/chat_router.cpython-311.pyc and b/src/apis/routers/__pycache__/chat_router.cpython-311.pyc differ
 
src/apis/routers/chat_router.py CHANGED
@@ -9,7 +9,10 @@ from src.apis.interfaces.chat_interface import (
9
  )
10
  from src.apis.interfaces.entrance_eval_interface import TestResultsBody
11
  from src.agents.primary_chatbot.flow import primary_chat_agent, tutor_chat_agent
12
- from src.agents.highlight_explain_agent.flow import highlight_workflow
 
 
 
13
  from src.agents.entrance_eval_agent.flow import entrance_eval_agent
14
 
15
  router = APIRouter(prefix="/ai", tags=["AI"])
@@ -48,19 +51,29 @@ async def tutor_chat(body: TutorChatBody):
48
 
49
 
50
  @router.post("/highlight_explain")
51
- async def highlight_explain(body: HighlightExplainBody):
52
- response = await highlight_workflow.ainvoke(
53
- {
54
- "domain": body.domain,
55
- "question": body.question,
56
- "highlight_terms": body.highlight_terms,
57
- "before_highlight_paragraph": body.before_highlight_paragraph,
58
- "after_highlight_paragraph": body.after_highlight_paragraph,
59
- "language": body.language,
60
- }
61
  )
62
- final_response = response["explanation"]
63
- return JSONResponse(status_code=status.HTTP_200_OK, content=final_response)
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
 
66
  @router.post("/entrance_eval")
 
9
  )
10
  from src.apis.interfaces.entrance_eval_interface import TestResultsBody
11
  from src.agents.primary_chatbot.flow import primary_chat_agent, tutor_chat_agent
12
+ from src.agents.highlight_explain_agent.func import (
13
+ highlight_explain,
14
+ highlight_explain_question_generate,
15
+ )
16
  from src.agents.entrance_eval_agent.flow import entrance_eval_agent
17
 
18
  router = APIRouter(prefix="/ai", tags=["AI"])
 
51
 
52
 
53
  @router.post("/highlight_explain")
54
+ async def highlight_explain_router(body: HighlightExplainBody):
55
+ response = await highlight_explain(
56
+ body.domain,
57
+ body.question,
58
+ body.highlight_terms,
59
+ body.before_highlight_paragraph,
60
+ body.after_highlight_paragraph,
61
+ body.language,
 
 
62
  )
63
+ return JSONResponse(status_code=status.HTTP_200_OK, content=response)
64
+
65
+
66
+ @router.post("/highlight_explain_question_generate")
67
+ async def highlight_explain_question_generate_router(body: HighlightExplainBody):
68
+ response = await highlight_explain_question_generate(
69
+ body.domain,
70
+ body.question,
71
+ body.highlight_terms,
72
+ body.before_highlight_paragraph,
73
+ body.after_highlight_paragraph,
74
+ body.language,
75
+ )
76
+ return JSONResponse(status_code=status.HTTP_200_OK, content=response)
77
 
78
 
79
  @router.post("/entrance_eval")
src/config/__pycache__/llm.cpython-311.pyc CHANGED
Binary files a/src/config/__pycache__/llm.cpython-311.pyc and b/src/config/__pycache__/llm.cpython-311.pyc differ