File size: 1,271 Bytes
cd3cabd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5e94c26
cd3cabd
 
 
 
 
 
 
 
 
 
 
531ae36
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from transformers import pipeline
from langchain_huggingface import HuggingFacePipeline
from langchain.prompts import PromptTemplate
from transformers.utils.logging import set_verbosity_error

## setup the model
set_verbosity_error()

# Use Phi-2 for math solving
math_pipeline = pipeline(
    "text-generation",
    model="microsoft/phi-2", # hjskhan/gemma-2b-fine-tuned-math
    device=0,
    max_new_tokens=256,  # 💡 increase for full explanation
    temperature=0.7,
    do_sample=True
)

math_solver = HuggingFacePipeline(pipeline=math_pipeline)

# QA model (same as before)
qa_pipeline = pipeline("question-answering", model="bert-large-uncased-whole-word-masking-finetuned-squad", device=-1)

# Prompt to force step-by-step reasoning
math_template = PromptTemplate.from_template(
    "You are a math and physics tutor with great didactic methods. Solve the following problem step-by-step and explain clearly:\n\n{problem}\n\nSolution:"
)
#askdjnaslkd
# Chain definition
math_chain = math_template | math_solver

def ask_math_problem(problem):
    """

    Function to ask a math problem and get the solution.

    """
    # Generate the answer
    solution = math_chain.invoke({"problem": problem})

    return solution