Gdoc / src /tools /llms.py
adrien.aribaut-gaudin
push on my interface
c626d10
from langchain.llms import OpenAI
# from transformers import AutoTokenizer, AutoModelForCausalLM
import os
OpenAI_KEY = "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"
os.environ["OPENAI_API_KEY"] = OpenAI_KEY
openai_llm = OpenAI(temperature=0) #CHAT GPT MODEL
# llm_model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf") #LAMA MODEL
SERPAPI_API_KEY = "dba90c4ecfa942f37e2b9eb2e7c6600ef7fb5c02ab8bbfacef426773df14c06b"
os.environ["SERPAPI_API_KEY"] = SERPAPI_API_KEY
"""
HF_API_KEY = "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"
hf_llm = HuggingFaceHub(repo_id="google/flan-t5-small",
model_kwargs={"temperature": 0, "max_length": 1000},
huggingfacehub_api_token=HF_API_KEY)
"""