File size: 1,189 Bytes
af2a153
 
63c4985
af2a153
 
 
63c4985
af2a153
 
 
 
 
 
 
 
 
 
 
63c4985
 
 
af2a153
 
 
63c4985
 
af2a153
 
 
 
 
 
 
 
 
63c4985
af2a153
 
 
 
 
 
 
63c4985
 
af2a153
63c4985
af2a153
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from typing import Literal, List, Tuple
from .groq_client import groq_chat
from .utils import get_answer

def build_prompt(question:str,
                mode: Literal["cot", "base"]="base",
                exampler: List[Tuple[str, str]]=None,
                zero_shot: bool=False
                ):
    """
    to be written
    """
    
    if mode == "cot":

        if not zero_shot:
            prompt = ""

            if exampler:
                for q, a in exampler:
                    prompt+=f"Q: {q}\nA:{a}"

            prompt+=f"Q: {question}\nA:"
            return prompt
        
        return f"Q: {question}\nA:"

    return f"Q: {question}\nA:"

def generate_answer(
        question: str,
        model_id: str="llama3-8b-8192",
        temperature: float=0.5,
        max_tokens: int=200,
        mode: Literal["cot", "base"]="base",
        exampler: List[Tuple[str ,str]]=None,
        zero_shot: bool=False
        ):
    """
    to be written
    """
    
    prompt = build_prompt(question, mode, exampler, zero_shot)
    reasoning = groq_chat(prompt, model_id, temperature, max_tokens)
    last_line = get_answer(reasoning)

    return reasoning, last_line