Spaces:
Sleeping
Sleeping
File size: 1,348 Bytes
782bbd9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 | from langchain_groq import ChatGroq
import os
from dotenv import load_dotenv
class GroqLLM:
def __init__(self):
load_dotenv()
def get_llm(self):
try:
# Don't print API key - security risk
# print(os.getenv("GROQ_API_KEY"))
os.environ["GROQ_API_KEY"] = self.groq_api_key = os.getenv("GROQ_API_KEY")
# Using llama models which have better JSON support
llm=ChatGroq(
api_key=self.groq_api_key,
model="llama-3.3-70b-versatile", # Better JSON support
streaming=False,
temperature=0.1 # Lower temperature for more consistent structured output
)
return llm
except Exception as e:
raise ValueError(f"Error occurred with exception: {e}")
def get_moon(self):
try:
# Don't print API key - security risk
# print(os.getenv("GROQ_API_KEY"))
os.environ["GROQ_API_KEY"] = self.groq_api_key = os.getenv("GROQ_API_KEY")
llm=ChatGroq(
api_key=self.groq_api_key,
model="moonshotai/kimi-k2-instruct",
streaming=False
)
return llm
except Exception as e:
raise ValueError(f"Error occurred with exception: {e}") |