File size: 656 Bytes
f154798
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from loggers.logger import logger
from template.response_prompt import GENERATE_CHAIN

import sys
sys.dont_write_bytecode = True

def generate(state):
    """
    Generate Answer
    Args:
        state (dict): The current graph state
    Return:
        state (dict): New key added to state
    """
    
    logger.info("STEP: GENERATING FINAL RESPONSE")

    question = state['question']

    try:
        context = state['context']
    except:
        context = " "
    
    generation = GENERATE_CHAIN.invoke(
        {
            "context": context,
            "question": question
        }
    )
    return {
        "generation": generation
    }