File size: 1,915 Bytes
6c655a3
 
 
be3a5c4
3f2f8aa
6874dac
6c655a3
 
be3a5c4
 
 
 
 
 
 
6c655a3
 
fbc17f4
6874dac
c636895
6874dac
3c1150c
be3a5c4
 
3c1150c
 
 
 
c636895
3f2f8aa
07387fb
 
 
 
32131c3
07387fb
 
 
 
 
 
 
 
 
 
 
94962e5
 
07387fb
 
b4fb6ac
 
 
 
eb40d68
6c655a3
 
 
 
eb40d68
b623e6c
eb40d68
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
import requests
import numpy as np
from langchain_groq import ChatGroq
from langchain_openai import ChatOpenAI
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_anthropic import ChatAnthropic
from langchain_openai import OpenAIEmbeddings
from huggingface_hub import login
from dotenv import load_dotenv
load_dotenv()
os.environ['HUGGINGFACEHUB_ACCESS_TOKEN']=os.getenv('HUGGINGFACEHUB_ACCESS_TOKEN')
login(os.environ['HUGGINGFACEHUB_ACCESS_TOKEN'])
os.environ['GROQ_API_KEY']=os.getenv('GROQ_API_KEY')

embedding_model = OpenAIEmbeddings(model="text-embedding-3-small", dimensions=1536)
llm_anthropic = ChatAnthropic(model='claude-3-5-sonnet-20241022', temperature=0.7, max_tokens=500)
llm_gemini = ChatGoogleGenerativeAI(model="gemini-1.5-flash")

llm = ChatGroq(
    model="llama-3.1-8b-instant",
    temperature=0.7,
)

llm_gpt = ChatOpenAI(
    model="gpt-4o-mini",
    temperature=0.3,
)


class HFEmbeddingAPI:
    def __init__(self, api_url, token):
        self.api_url = api_url
        self.headers = {
            "Authorization": f"Bearer {os.environ.get('HUGGINGFACEHUB_ACCESS_TOKEN')}",
        }

    def encode(self, texts):
        if isinstance(texts, str):
            texts = [texts]
        response = requests.post(
            self.api_url,
            headers=self.headers,
            json={"inputs": texts}
        )
        response.raise_for_status()
        embeddings=response.json()
        return np.array(embeddings[0]) if len(embeddings) == 1 else np.array(embeddings)

# Instantiate your API-backed "SentenceTransformer"
ST = HFEmbeddingAPI(
    api_url="https://router.huggingface.co/hf-inference/models/mixedbread-ai/mxbai-embed-large-v1/pipeline/feature-extraction",
    token=os.environ.get('HUGGINGFACEHUB_ACCESS_TOKEN')
)

improver_llm = llm_anthropic
ideator_llm = llm_anthropic
critic_llm = llm_anthropic
validator_llm = llm_anthropic