File size: 2,075 Bytes
6b50ab8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import os
from typing import Union
# from azure.identity import DefaultAzureCredential
from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
from langchain_ollama import OllamaEmbeddings
from langchain_huggingface import HuggingFaceEmbeddings


class EmbeddingFactory:
    """
    A static utility class to create and return LLM Embedding instances based on the input type.
    """

    @staticmethod
    def get_llm(llm_type: str) -> Union[AzureOpenAIEmbeddings, OpenAIEmbeddings]:
        """
        Returns an LLM instance based on the specified type.

        Parameters:
            llm_type (str): The type of LLM to return. Valid values are 'azure' or 'openai'.

        Returns:
            Union[AzureOpenAIEmbeddings, OpenAIEmbeddings]: The LLM instance.
        """
        if llm_type.lower() == "azure":
            # Get the Azure Credential
            # credential = DefaultAzureCredential()
            # token=credential.get_token("https://cognitiveservices.azure.com/.default").token

            # if not token:
            #     raise ValueError("Token is required for AzureOpenAIEmbeddings.")
            # return AzureOpenAIEmbeddings(
            #     azure_endpoint=os.environ["AZURE_OPENAI_API_URI"],
            #     azure_deployment="text-embedding-3-small", #os.environ["AZURE_OPENAI_API_BASE_MODEL"],
            #     api_version=os.environ["AZURE_OPENAI_API_VERSION"],
            #     api_key=token
            # )
            pass
        elif llm_type.lower() == "openai":
            return OpenAIEmbeddings(
                api_key=os.environ["OPENAI_API_KEY"],
                model="text-embedding-3-large"
            )
        elif llm_type.lower() == "ollama": # must have ollama running locally with the following model
            return OllamaEmbeddings(model="gemma:2b") 
        elif llm_type.lower() == "hf": # must have key update in env:HF_TOKEN
            return HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
        else:
            raise ValueError("Invalid llm_type. Use 'azure' or 'openai'.")