File size: 747 Bytes
345000b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from langchain_mistralai import ChatMistralAI
from langchain_core.messages import SystemMessage, HumanMessage
from dotenv import load_dotenv

load_dotenv()

# -------------------------------------------------
# Initialize Mistral (API-based, no local model)
# -------------------------------------------------

llm = ChatMistralAI(
    model="mistral-small-latest",
    temperature=0.2,
)

# -------------------------------------------------
# LLM Runner (LangChain-native)
# -------------------------------------------------

def run_llm(prompt: str) -> str:
    response = llm.invoke([
        HumanMessage(content=prompt),
    ])
    print("the type of the reponse ",type(response.content))
    return response.content