trygithubactions / src /genai /utils /models_loader.py
subashpoudel's picture
Included CI CD
583f6dd
raw
history blame
1.92 kB
import os
import requests
import numpy as np
from langchain_groq import ChatGroq
from langchain_openai import ChatOpenAI
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_anthropic import ChatAnthropic
from langchain_openai import OpenAIEmbeddings
from huggingface_hub import login
from dotenv import load_dotenv
load_dotenv()
os.environ['HUGGINGFACEHUB_ACCESS_TOKEN']=os.getenv('HUGGINGFACEHUB_ACCESS_TOKEN')
login(os.environ['HUGGINGFACEHUB_ACCESS_TOKEN'])
os.environ['GROQ_API_KEY']=os.getenv('GROQ_API_KEY')
embedding_model = OpenAIEmbeddings(model="text-embedding-3-small", dimensions=1536)
llm_anthropic = ChatAnthropic(model='claude-3-5-sonnet-20241022', temperature=0.7, max_tokens=500)
llm_gemini = ChatGoogleGenerativeAI(model="gemini-1.5-flash")
llm = ChatGroq(
model="llama-3.1-8b-instant",
temperature=0.7,
)
llm_gpt = ChatOpenAI(
model="gpt-4o-mini",
temperature=0.3,
)
class HFEmbeddingAPI:
def __init__(self, api_url, token):
self.api_url = api_url
self.headers = {
"Authorization": f"Bearer {os.environ.get('HUGGINGFACEHUB_ACCESS_TOKEN')}",
}
def encode(self, texts):
if isinstance(texts, str):
texts = [texts]
response = requests.post(
self.api_url,
headers=self.headers,
json={"inputs": texts}
)
response.raise_for_status()
embeddings=response.json()
return np.array(embeddings[0]) if len(embeddings) == 1 else np.array(embeddings)
# Instantiate your API-backed "SentenceTransformer"
ST = HFEmbeddingAPI(
api_url="https://router.huggingface.co/hf-inference/models/mixedbread-ai/mxbai-embed-large-v1/pipeline/feature-extraction",
token=os.environ.get('HUGGINGFACEHUB_ACCESS_TOKEN')
)
improver_llm = llm_anthropic
ideator_llm = llm_anthropic
critic_llm = llm_anthropic
validator_llm = llm_anthropic