MossaicMan's picture
Upload 33 files
345000b verified
raw
history blame contribute delete
747 Bytes
from langchain_mistralai import ChatMistralAI
from langchain_core.messages import SystemMessage, HumanMessage
from dotenv import load_dotenv
load_dotenv()
# -------------------------------------------------
# Initialize Mistral (API-based, no local model)
# -------------------------------------------------
llm = ChatMistralAI(
model="mistral-small-latest",
temperature=0.2,
)
# -------------------------------------------------
# LLM Runner (LangChain-native)
# -------------------------------------------------
def run_llm(prompt: str) -> str:
response = llm.invoke([
HumanMessage(content=prompt),
])
print("the type of the reponse ",type(response.content))
return response.content