Dinesh310 commited on
Commit
f35d7bb
·
verified ·
1 Parent(s): f68c145

Update src/node/nodes.py

Browse files
Files changed (1) hide show
  1. src/node/nodes.py +64 -62
src/node/nodes.py CHANGED
@@ -1,63 +1,65 @@
1
- """LangGraph nodes for RAG workflow"""
2
-
3
- from src.state.rag_state import RAGState
4
-
5
- class RAGNodes:
6
- """Contains node functions for RAG workflow"""
7
-
8
- def __init__(self, retriever, llm):
9
- """
10
- Initialize RAG nodes
11
-
12
- Args:
13
- retriever: Document retriever instance
14
- llm: Language model instance
15
- """
16
- self.retriever = retriever
17
- self.llm = llm
18
-
19
- def retrieve_docs(self, state: RAGState) -> RAGState:
20
- """
21
- Retrieve relevant documents node
22
-
23
- Args:
24
- state: Current RAG state
25
-
26
- Returns:
27
- Updated RAG state with retrieved documents
28
- """
29
- docs = self.retriever.invoke(state.question)
30
- return RAGState(
31
- question=state.question,
32
- retrieved_docs=docs
33
- )
34
-
35
- def generate_answer(self, state: RAGState) -> RAGState:
36
- """
37
- Generate answer from retrieved documents node
38
-
39
- Args:
40
- state: Current RAG state with retrieved documents
41
-
42
- Returns:
43
- Updated RAG state with generated answer
44
- """
45
- # Combine retrieved documents into context
46
- context = "\n\n".join([doc.page_content for doc in state.retrieved_docs])
47
-
48
- # Create prompt
49
- prompt = f"""Answer the question based on the context.
50
-
51
- Context:
52
- {context}
53
-
54
- Question: {state.question}"""
55
-
56
- # Generate response
57
- response = self.llm.invoke(prompt)
58
-
59
- return RAGState(
60
- question=state.question,
61
- retrieved_docs=state.retrieved_docs,
62
- answer=response.content
 
 
63
  )
 
1
+ """LangGraph nodes for RAG workflow"""
2
+
3
+ from src.state.rag_state import RAGState
4
+
5
+ class RAGNodes:
6
+ """Contains node functions for RAG workflow"""
7
+
8
+ def __init__(self, retriever, llm):
9
+ """
10
+ Initialize RAG nodes
11
+
12
+ Args:
13
+ retriever: Document retriever instance
14
+ llm: Language model instance
15
+ """
16
+ self.retriever = retriever
17
+ self.llm = llm
18
+
19
+ def retrieve_docs(self, state: RAGState) -> RAGState:
20
+ """
21
+ Retrieve relevant documents node
22
+
23
+ Args:
24
+ state: Current RAG state
25
+
26
+ Returns:
27
+ Updated RAG state with retrieved documents
28
+ """
29
+ docs = self.retriever.invoke(state.question)
30
+ return RAGState(
31
+ question=state.question,
32
+ retrieved_docs=docs
33
+ )
34
+
35
+ def generate_answer(self, state: RAGState) -> RAGState:
36
+ """
37
+ Generate answer from retrieved documents node
38
+
39
+ Args:
40
+ state: Current RAG state with retrieved documents
41
+
42
+ Returns:
43
+ Updated RAG state with generated answer
44
+ """
45
+ # Combine retrieved documents into context
46
+ context = "\n\n".join([doc.page_content for doc in state.retrieved_docs])
47
+
48
+ # Create prompt
49
+ prompt = f"""You are a professional Project Analyst.
50
+ Answer strictly using the context.
51
+ If unknown, say you don't know.
52
+
53
+ Context:
54
+ {context}
55
+
56
+ Question: {state.question}"""
57
+
58
+ # Generate response
59
+ response = self.llm.invoke(prompt)
60
+
61
+ return RAGState(
62
+ question=state.question,
63
+ retrieved_docs=state.retrieved_docs,
64
+ answer=response.content
65
  )