|
|
| from transformers import AutoModelForCausalLM, AutoTokenizer |
| from langchain_community.llms import HuggingFacePipeline |
| from langchain.chains import ConversationChain |
| from langchain.memory import ConversationBufferMemory |
| from transformers import pipeline |
| import dotenv |
|
|
| import os |
| dotenv.load_dotenv() |
|
|
| from huggingface_hub import login |
|
|
|
|
| token = os.getenv("HUGGINGFACEHUB_API_TOKEN") |
|
|
| if token: |
| login(token=token) |
| else: |
| raise EnvironmentError("⚠️ Environment variable HUGGINGFACE_HUB_TOKEN is not set in this space.") |
|
|
| def load_openchat(): |
| |
| model_name = "microsoft/DialoGPT-small" |
| model = AutoModelForCausalLM.from_pretrained(model_name , ) |
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
| |
| pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
|
| return pipe |
|
|