Spaces:
Runtime error
Runtime error
| import enum | |
| import os | |
| from core.helper import LifecycleHelper | |
| from faq.robot_manager import FAQRobotManager, AzureFAQRobotManager | |
| from multiprocessing import Lock | |
| lock = Lock() | |
| class FAQRobotRevision(enum.Enum): | |
| SIMPLE_OPENAI_VERSION_0 = 1 | |
| HUGGINGFACE_VERSION_0 = 2 | |
| class FAQRobotManagerFactory: | |
| """ | |
| CAPABLE: Dict[FAQRobotRevision, FAQManager] = | |
| {FAQRobotRevision.SIMPLE_OPENAI_VERSION_0: FAQRobotManager()} | |
| """ | |
| CAPABLE = dict() # type: dict[FAQRobotRevision, FAQRobotManager] | |
| def get_or_create(cls, revision: FAQRobotRevision) -> FAQRobotManager: | |
| with lock: | |
| if cls.CAPABLE.get(revision) is not None: | |
| return cls.CAPABLE[revision] | |
| if revision == FAQRobotRevision.SIMPLE_OPENAI_VERSION_0: | |
| manager = cls.create_simple_openai_version_0() | |
| elif revision == FAQRobotRevision.HUGGINGFACE_VERSION_0: | |
| manager = cls.create_huggingface_version_0() | |
| cls.CAPABLE[revision] = manager | |
| return manager | |
| def create_simple_openai_version_0(cls) -> AzureFAQRobotManager: | |
| from llama.service_context import AzureServiceContextManager | |
| from langchain_manager.manager import LangChainAzureManager | |
| service_context_manager = AzureServiceContextManager( | |
| lc_manager=LangChainAzureManager() | |
| ) | |
| from llama.storage_context import LocalStorageContextManager | |
| dataset_path = os.getenv("FAQ_DATASET_PATH", "./dataset/faq") | |
| storage_context_manager = LocalStorageContextManager( | |
| dataset_path=dataset_path, service_context_manager=service_context_manager | |
| ) | |
| robot_manager = AzureFAQRobotManager( | |
| service_context_manager=service_context_manager, | |
| storage_context_manager=storage_context_manager, | |
| ) | |
| LifecycleHelper.initialize_if_possible(robot_manager) | |
| LifecycleHelper.start_if_possible(robot_manager) | |
| return robot_manager | |
| def create_huggingface_version_0(cls) -> AzureFAQRobotManager: | |
| from llama.service_context import HuggingFaceChineseOptServiceContextManager | |
| from langchain_manager.manager import LangChainAzureManager | |
| service_context_manager = HuggingFaceChineseOptServiceContextManager( | |
| lc_manager=LangChainAzureManager() | |
| ) | |
| from llama.storage_context import LocalStorageContextManager | |
| dataset_path = os.getenv("FAQ_DATASET_PATH", "./dataset/faq") | |
| storage_context_manager = LocalStorageContextManager( | |
| dataset_path=dataset_path, service_context_manager=service_context_manager | |
| ) | |
| robot_manager = AzureFAQRobotManager( | |
| service_context_manager=service_context_manager, | |
| storage_context_manager=storage_context_manager, | |
| ) | |
| LifecycleHelper.initialize_if_possible(robot_manager) | |
| LifecycleHelper.start_if_possible(robot_manager) | |
| return robot_manager | |