Spaces:
Sleeping
Sleeping
Update SimpleChatbot.py
Browse files- SimpleChatbot.py +2 -7
SimpleChatbot.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
| 1 |
# Q&A Chatbot
|
|
|
|
| 2 |
from langchain_community.llms import HuggingFaceEndpoint
|
| 3 |
from langchain.chains import LLMChain
|
| 4 |
from langchain.prompts import PromptTemplate
|
| 5 |
-
|
| 6 |
-
# from langchain.llms import OpenAI
|
| 7 |
-
# uncomment above line when I have credit in OpenAI
|
| 8 |
from dotenv import load_dotenv
|
| 9 |
load_dotenv() # take environment variables from .env
|
| 10 |
|
|
@@ -15,10 +13,7 @@ import os
|
|
| 15 |
|
| 16 |
def get_model_response(question):
|
| 17 |
llm = HuggingFaceEndpoint(
|
| 18 |
-
repo_id="mistralai/Mistral-7B-Instruct-v0.2", max_length=128, temperature=0.5
|
| 19 |
-
|
| 20 |
-
# llm = OpenAI(openai_api_key=os.getenv("OPENAI_API_KEY"), model_name="text-davinci-003", temperature=0.6, max_tokens=64)
|
| 21 |
-
# uncomment above line and use it instead of HuggingfaceEndpoint when I have credit in OpenAI.
|
| 22 |
template = """Question: {question}
|
| 23 |
Answer:"""
|
| 24 |
prompt = PromptTemplate.from_template(template)
|
|
|
|
| 1 |
# Q&A Chatbot
|
| 2 |
+
import langchain_community
|
| 3 |
from langchain_community.llms import HuggingFaceEndpoint
|
| 4 |
from langchain.chains import LLMChain
|
| 5 |
from langchain.prompts import PromptTemplate
|
|
|
|
|
|
|
|
|
|
| 6 |
from dotenv import load_dotenv
|
| 7 |
load_dotenv() # take environment variables from .env
|
| 8 |
|
|
|
|
| 13 |
|
| 14 |
def get_model_response(question):
|
| 15 |
llm = HuggingFaceEndpoint(
|
| 16 |
+
repo_id="mistralai/Mistral-7B-Instruct-v0.2", max_length=128, temperature=0.5)
|
|
|
|
|
|
|
|
|
|
| 17 |
template = """Question: {question}
|
| 18 |
Answer:"""
|
| 19 |
prompt = PromptTemplate.from_template(template)
|