Spaces:
Paused
Paused
mock
Browse files
api/question_answering/qa_model.py
CHANGED
|
@@ -13,6 +13,7 @@ from langchain.llms import HuggingFacePipeline
|
|
| 13 |
from langchain.llms.base import LLM
|
| 14 |
from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceHubEmbeddings, HuggingFaceInstructEmbeddings
|
| 15 |
from langchain.vectorstores import FAISS
|
|
|
|
| 16 |
|
| 17 |
from api.logger import logger
|
| 18 |
from api.question_answering.response import Response
|
|
@@ -182,7 +183,7 @@ class QAModel():
|
|
| 182 |
)
|
| 183 |
else:
|
| 184 |
logger.info('using transformers pipeline model')
|
| 185 |
-
self.llm_model =
|
| 186 |
model_id=llm_model_id
|
| 187 |
)
|
| 188 |
|
|
|
|
| 13 |
from langchain.llms.base import LLM
|
| 14 |
from langchain.embeddings import HuggingFaceEmbeddings, HuggingFaceHubEmbeddings, HuggingFaceInstructEmbeddings
|
| 15 |
from langchain.vectorstores import FAISS
|
| 16 |
+
from mocks import MockLocalBinaryModel
|
| 17 |
|
| 18 |
from api.logger import logger
|
| 19 |
from api.question_answering.response import Response
|
|
|
|
| 183 |
)
|
| 184 |
else:
|
| 185 |
logger.info('using transformers pipeline model')
|
| 186 |
+
self.llm_model = MockLocalBinaryModel(
|
| 187 |
model_id=llm_model_id
|
| 188 |
)
|
| 189 |
|