import os from dotenv import load_dotenv from config.env_constant import EnvFilepath load_dotenv(EnvFilepath.ENVPATH) from langchain_openai import AzureChatOpenAI # from langchain_google_genai import ChatGoogleGenerativeAI # from langchain.callbacks import AsyncIteratorCallbackHandler # from langfuse.callback import CallbackHandler # langfuse_handler = CallbackHandler( # secret_key=os.environ.get('buma--langfuse--secret-key'), # public_key=os.environ.get('buma--langfuse--public-key'), # host=os.environ.get('buma--langfuse__host') # ) ####### # LLM ####### # model_4o = AzureChatOpenAI( # azure_endpoint=os.environ.get("azureai--endpoint--url--4o"), # or your deployment # openai_api_version=os.environ.get("azureai--api--version--4o"), # or your api version # deployment_name=os.environ.get("azureai--deployment--name--4o"), # openai_api_key=os.environ.get("azureai--api-key--4o"), # openai_api_type="azure", # max_retries=2, # disable_streaming=True # ) model_4o_2 = AzureChatOpenAI( azure_endpoint=os.environ.get("azureai__endpoint__url__4o_2"), # or your deployment openai_api_version=os.environ.get("azureai__api__version__4o_2"), # or your api version deployment_name=os.environ.get("azureai__deployment__name__4o_2"), openai_api_key=os.environ.get("azureai__api_key__4o_2"), openai_api_type="azure", max_retries=2, disable_streaming=True ) model_4omini = AzureChatOpenAI( azure_endpoint=os.environ.get("azureai__endpoint__url__4omini"), # or your deployment openai_api_version=os.environ.get("azureai__api__version__4omini"), # or your api version deployment_name=os.environ.get("azureai__deployment__name__4omini"), openai_api_key=os.environ.get("azureai__api_key__4omini"), openai_api_type="azure", max_retries=2, disable_streaming=True ) model_5mini = AzureChatOpenAI( azure_endpoint=os.environ.get("azureai__endpoint__url__5mini"), # or your deployment openai_api_version=os.environ.get("azureai__api__version__5mini"), # or your api version deployment_name=os.environ.get("azureai__deployment__name__5mini"), openai_api_key=os.environ.get("azureai__api_key__5mini"), openai_api_type="azure", max_retries=2, disable_streaming=True ) model_5_1 = AzureChatOpenAI( openai_api_key=os.environ.get("azureai__api_key__51"), azure_endpoint=os.environ.get("azureai__endpoint__url__51"), deployment_name=os.environ.get("azureai__deployment__name__51"), openai_api_version=os.environ.get("azureai__api__version__51"), openai_api_type="azure", max_retries=2, disable_streaming=True ) # model_5_1codex = AzureChatOpenAI( # azure_endpoint=os.environ.get("azureai--endpoint--url--5.1-codex"), # openai_api_version=os.environ.get("azureai--api--version--5.1-codex"), # deployment_name=os.environ.get("azureai--deployment--name--5.1-codex"), # openai_api_key=os.environ.get("azureai--api-key--5.1-codex"), # openai_api_type="azure", # max_retries=2, # disable_streaming=True # ) # model_gemini = ChatGoogleGenerativeAI( # model="gemini-2.0-flash", # disable_streaming=False, # # callbacks=[callback], # verbose=True) # response = model_gemini.invoke("Buatkan satu lagu dalam bahasa indonesia, bertema pertemanan") # print(response.content)