text
stringlengths
0
5.92k
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._cycle_converter import _CycleConverter from ._cycle_model import _CycleModel class _CycleSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_CycleModel, converter=_CycleConverter)
from typing import NewType CycleId = NewType("CycleId", str) CycleId.__doc__ = """Type that holds a `Cycle^` identifier."""
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import JSON, Column, Enum, String, Table from taipy.config.common.frequency import Frequency from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .cycle_id import CycleId @mapper_registry.mapped @dataclass class _CycleModel(_BaseModel): __table__ = Table( "cycle", mapper_registry.metadata, Column("id", String, primary_key=True), Column("name", String), Column("frequency", Enum(Frequency)), Column("properties", JSON), Column("creation_date", String), Column("start_date", String), Column("end_date", String), ) id: CycleId name: str frequency: Frequency properties: Dict[str, Any] creation_date: str start_date: str end_date: str @staticmethod def from_dict(data: Dict[str, Any]): return _CycleModel( id=data["id"], name=data["name"], frequency=Frequency._from_repr(data["frequency"]), properties=_BaseModel._deserialize_attribute(data["properties"]), creation_date=data["creation_date"], start_date=data["start_date"], end_date=data["end_date"], ) def to_list(self): return [ self.id, self.name, repr(self.frequency), _BaseModel._serialize_attribute(self.properties), self.creation_date, self.start_date, self.end_date, ]
from datetime import datetime from .._repository._abstract_converter import _AbstractConverter from ..cycle._cycle_model import _CycleModel from ..cycle.cycle import Cycle class _CycleConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, cycle: Cycle) -> _CycleModel: return _CycleModel( id=cycle.id, name=cycle._name, frequency=cycle._frequency, creation_date=cycle._creation_date.isoformat(), start_date=cycle._start_date.isoformat(), end_date=cycle._end_date.isoformat(), properties=cycle._properties.data, ) @classmethod def _model_to_entity(cls, model: _CycleModel) -> Cycle: return Cycle( id=model.id, name=model.name, frequency=model.frequency, properties=model.properties, creation_date=datetime.fromisoformat(model.creation_date), start_date=datetime.fromisoformat(model.start_date), end_date=datetime.fromisoformat(model.end_date), )
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ..cycle._cycle_manager import _CycleManager from ._cycle_fs_repository import _CycleFSRepository from ._cycle_sql_repository import _CycleSQLRepository class _CycleManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _CycleFSRepository, "sql": _CycleSQLRepository} @classmethod def _build_manager(cls) -> Type[_CycleManager]: # type: ignore if cls._using_enterprise(): cycle_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager_factory", "_CycleManagerFactory" )._build_repository # type: ignore else: cycle_manager = _CycleManager build_repository = cls._build_repository cycle_manager._repository = build_repository() # type: ignore return cycle_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from abc import abstractmethod from importlib import util from typing import Type from taipy.config import Config from ._manager import _Manager class _ManagerFactory: _TAIPY_ENTERPRISE_MODULE = "taipy.enterprise" _TAIPY_ENTERPRISE_CORE_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core" @classmethod @abstractmethod def _build_manager(cls) -> Type[_Manager]: # type: ignore raise NotImplementedError @classmethod def _build_repository(cls): raise NotImplementedError @classmethod def _using_enterprise(cls) -> bool: return util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None @staticmethod def _get_repository_with_repo_map(repository_map: dict): return repository_map.get(Config.core.repository_type, repository_map.get("default"))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import pathlib from importlib import metadata from typing import Dict, Generic, Iterable, List, Optional, TypeVar, Union from taipy.logger._taipy_logger import _TaipyLogger from .._entity._entity_ids import _EntityIds from .._repository._abstract_repository import _AbstractRepository from ..exceptions.exceptions import ModelNotFound from ..notification import Event, EventOperation, Notifier EntityType = TypeVar("EntityType") class _Manager(Generic[EntityType]): _repository: _AbstractRepository _logger = _TaipyLogger._get_logger() _ENTITY_NAME: str = "Entity" @classmethod def _delete_all(cls): """ Deletes all entities. """ cls._repository._delete_all() if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, metadata={"delete_all": True}, ) ) @classmethod def _delete_many(cls, ids: Iterable): """ Deletes entities by a list of ids. """ cls._repository._delete_many(ids) if hasattr(cls, "_EVENT_ENTITY_TYPE"): for entity_id in ids: Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, entity_id=entity_id, metadata={"delete_all": True}, ) ) @classmethod def _delete_by_version(cls, version_number: str): """ Deletes entities by version number. """ cls._repository._delete_by(attribute="version", value=version_number) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, metadata={"delete_by_version": version_number}, ) ) @classmethod def _delete(cls, id): """ Deletes an entity by id. """ cls._repository._delete(id) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, entity_id=id, ) ) @classmethod def _set(cls, entity: EntityType): """ Save or update an entity. """ cls._repository._save(entity) @classmethod def _get_all(cls, version_number: Optional[str] = "all") -> List[EntityType]: """ Returns all entities. """ filters: List[Dict] = [] return cls._repository._load_all(filters) @classmethod def _get_all_by(cls, filters: Optional[List[Dict]] = None) -> List[EntityType]: """ Returns all entities based on a criteria. """ if not filters: filters = [] return cls._repository._load_all(filters) @classmethod def _get(cls, entity: Union[str, EntityType], default=None) -> EntityType: """ Returns an entity by id or reference. """ entity_id = entity if isinstance(entity, str) else entity.id # type: ignore try: return cls._repository._load(entity_id) except ModelNotFound: cls._logger.error(f"{cls._ENTITY_NAME} not found: {entity_id}") return default @classmethod def _exists(cls, entity_id: str) -> bool: """ Returns True if the entity id exists. """ return cls._repository._exists(entity_id) @classmethod def _delete_entities_of_multiple_types(cls, _entity_ids: _EntityIds): """ Deletes entities of multiple types. """ from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory _CycleManagerFactory._build_manager()._delete_many(_entity_ids.cycle_ids) _SequenceManagerFactory._build_manager()._delete_many(_entity_ids.sequence_ids) _ScenarioManagerFactory._build_manager()._delete_many(_entity_ids.scenario_ids) _TaskManagerFactory._build_manager()._delete_many(_entity_ids.task_ids) _JobManagerFactory._build_manager()._delete_many(_entity_ids.job_ids) _DataManagerFactory._build_manager()._delete_many(_entity_ids.data_node_ids) _SubmissionManagerFactory._build_manager()._delete_many(_entity_ids.submission_ids) @classmethod def _export(cls, id: str, folder_path: Union[str, pathlib.Path]): """ Export an entity. """ return cls._repository._export(id, folder_path) @classmethod def _is_editable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True @classmethod def _is_readable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True
from datetime import datetime from typing import Any from taipy.config import Config from taipy.config._config import _Config from .._entity._entity import _Entity class _Version(_Entity): def __init__(self, id: str, config: Any) -> None: self.id: str = id self.config: _Config = config self.creation_date: datetime = datetime.now() def __eq__(self, other): return self.id == other.id and self.__is_config_eq(other) def __is_config_eq(self, other): return Config._serializer._str(self.config) == Config._serializer._str(other.config)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Dict, List from .._version._version_manager_factory import _VersionManagerFactory class _VersionMixin: _version_manager = _VersionManagerFactory._build_manager() @classmethod def __fetch_version_number(cls, version_number): version_number = _VersionManagerFactory._build_manager()._replace_version_number(version_number) if not isinstance(version_number, List): version_number = [version_number] if version_number else [] return version_number @classmethod def _build_filters_with_version(cls, version_number) -> List[Dict]: filters = [] if versions := cls.__fetch_version_number(version_number): filters = [{"version": version} for version in versions] return filters @classmethod def _get_latest_version(cls): return cls._version_manager._get_latest_version()
from .._manager._manager_factory import _ManagerFactory from ..common import _utils from ._version_fs_repository import _VersionFSRepository from ._version_manager import _VersionManager from ._version_sql_repository import _VersionSQLRepository class _VersionManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _VersionFSRepository, "sql": _VersionSQLRepository} @classmethod def _build_manager(cls) -> _VersionManager: # type: ignore if cls._using_enterprise(): version_manager = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager", "_VersionManager" ) # type: ignore build_repository = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager_factory", "_VersionManagerFactory" )._build_repository # type: ignore else: version_manager = _VersionManager build_repository = cls._build_repository version_manager._repository = build_repository() # type: ignore return version_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from abc import ABC, abstractmethod class _VersionRepositoryInterface(ABC): _LATEST_VERSION_KEY = "latest_version" _DEVELOPMENT_VERSION_KEY = "development_version" _PRODUCTION_VERSION_KEY = "production_version" @abstractmethod def _set_latest_version(self, version_number): raise NotImplementedError @abstractmethod def _get_latest_version(self): raise NotImplementedError @abstractmethod def _set_development_version(self, version_number): raise NotImplementedError @abstractmethod def _get_development_version(self): raise NotImplementedError @abstractmethod def _set_production_version(self, version_number): raise NotImplementedError @abstractmethod def _get_production_versions(self): raise NotImplementedError @abstractmethod def _delete_production_version(self, version_number): raise NotImplementedError
import json from typing import List from taipy.logger._taipy_logger import _TaipyLogger from .._repository._filesystem_repository import _FileSystemRepository from ..exceptions.exceptions import VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionFSRepository(_FileSystemRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter, dir_name="version") @property def _version_file_path(self): return super()._storage_folder / "version.json" def _delete_all(self): super()._delete_all() if self._version_file_path.exists(): self._version_file_path.unlink() def _set_latest_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_latest_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._LATEST_VERSION_KEY] def _set_development_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._DEVELOPMENT_VERSION_KEY] = version_number file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: version_number, self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_development_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._DEVELOPMENT_VERSION_KEY] def _set_production_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: file_content[self._PRODUCTION_VERSION_KEY].append(version_number) else: _TaipyLogger._get_logger().info(f"Version {version_number} is already a production version.") else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [version_number], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_production_versions(self) -> List[str]: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._PRODUCTION_VERSION_KEY] def _delete_production_version(self, version_number): try: with open(self._version_file_path, "r") as f: file_content = json.load(f) if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") file_content[self._PRODUCTION_VERSION_KEY].remove(version_number) self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) except FileNotFoundError: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.")
from sqlalchemy.dialects import sqlite from .._repository._sql_repository import _SQLRepository from ..exceptions.exceptions import ModelNotFound, VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionSQLRepository(_SQLRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter) def _set_latest_version(self, version_number): if old_latest := self.db.execute(str(self.table.select().filter_by(is_latest=True))).fetchone(): old_latest = self.model_type.from_dict(old_latest) old_latest.is_latest = False self._update_entry(old_latest) version = self.__get_by_id(version_number) version.is_latest = True self._update_entry(version) def _get_latest_version(self): if latest := self.db.execute( str(self.table.select().filter_by(is_latest=True).compile(dialect=sqlite.dialect())) ).fetchone(): return latest["id"] raise ModelNotFound(self.model_type, "") def _set_development_version(self, version_number): if old_development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): old_development = self.model_type.from_dict(old_development) old_development.is_development = False self._update_entry(old_development) version = self.__get_by_id(version_number) version.is_development = True self._update_entry(version) self._set_latest_version(version_number) def _get_development_version(self): if development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): return development["id"] raise ModelNotFound(self.model_type, "") def _set_production_version(self, version_number): version = self.__get_by_id(version_number) version.is_production = True self._update_entry(version) self._set_latest_version(version_number) def _get_production_versions(self): if productions := self.db.execute( str(self.table.select().filter_by(is_production=True).compile(dialect=sqlite.dialect())), ).fetchall(): return [p["id"] for p in productions] return [] def _delete_production_version(self, version_number): version = self.__get_by_id(version_number) if not version or not version.is_production: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") version.is_production = False self._update_entry(version) def __get_by_id(self, version_id): query = str(self.table.select().filter_by(id=version_id).compile(dialect=sqlite.dialect())) entry = self.db.execute(query, [version_id]).fetchone() return self.model_type.from_dict(entry) if entry else None
from datetime import datetime from taipy.config import Config from .._repository._abstract_converter import _AbstractConverter from .._version._version import _Version from .._version._version_model import _VersionModel class _VersionConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, version: _Version) -> _VersionModel: return _VersionModel( id=version.id, config=Config._to_json(version.config), creation_date=version.creation_date.isoformat() ) @classmethod def _model_to_entity(cls, model: _VersionModel) -> _Version: version = _Version(id=model.id, config=Config._from_json(model.config)) version.creation_date = datetime.fromisoformat(model.creation_date) return version
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry @mapper_registry.mapped @dataclass class _VersionModel(_BaseModel): __table__ = Table( "version", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config", String), # config is store as a json string Column("creation_date", String), Column("is_production", Boolean), Column("is_development", Boolean), Column("is_latest", Boolean), ) id: str config: Dict[str, Any] creation_date: str @staticmethod def from_dict(data: Dict[str, Any]): model = _VersionModel( id=data["id"], config=data["config"], creation_date=data["creation_date"], ) model.is_production = data.get("is_production") # type: ignore model.is_development = data.get("is_development") # type: ignore model.is_latest = data.get("is_latest") # type: ignore return model def to_list(self): return [ self.id, self.config, self.creation_date, self.is_production, self.is_development, self.is_latest, ]
from typing import Callable, List from taipy.config.config import Config from .._entity._reload import _Reloader from ..config import MigrationConfig from ._version_manager_factory import _VersionManagerFactory def _migrate_entity(entity): if ( latest_version := _VersionManagerFactory._build_manager()._get_latest_version() ) in _VersionManagerFactory._build_manager()._get_production_versions(): if migration_fcts := __get_migration_fcts_to_latest(entity._version, entity.config_id): with _Reloader(): for fct in migration_fcts: entity = fct(entity) entity._version = latest_version return entity def __get_migration_fcts_to_latest(source_version: str, config_id: str) -> List[Callable]: migration_fcts_to_latest: List[Callable] = [] production_versions = _VersionManagerFactory._build_manager()._get_production_versions() try: start_index = production_versions.index(source_version) + 1 except ValueError: return migration_fcts_to_latest versions_to_migrate = production_versions[start_index:] for version in versions_to_migrate: migration_fct = Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id) if migration_fct: migration_fcts_to_latest.append(migration_fct) return migration_fcts_to_latest
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import os import sys def _vt_codes_enabled_in_windows_registry(): """ Check the Windows Registry to see if VT code handling has been enabled by default, see https://superuser.com/a/1300251/447564. """ try: # winreg is only available on Windows. import winreg except ImportError: return False else: try: reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console") reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel") except FileNotFoundError: return False else: return reg_key_value == 1 def _is_color_supported(): """ Return True if the running system's terminal supports color, and False otherwise. """ is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() return is_a_tty and ( sys.platform != "win32" or "ANSICON" in os.environ or "WT_SESSION" in os.environ # Windows Terminal supports VT codes. or os.environ.get("TERM_PROGRAM") == "vscode" # VSCode's built-in terminal supports colors. or _vt_codes_enabled_in_windows_registry() ) class _Bcolors: PURPLE = "\033[95m" if _is_color_supported() else "" BLUE = "\033[94m" if _is_color_supported() else "" CYAN = "\033[96m" if _is_color_supported() else "" GREEN = "\033[92m" if _is_color_supported() else "" BOLD = "\033[1m" if _is_color_supported() else "" UNDERLINE = "\033[4m" if _is_color_supported() else "" END = "\033[0m" if _is_color_supported() else ""
from .exceptions import *
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from __future__ import annotations import abc from typing import Any, Callable, List, Optional, Set, Union import networkx as nx from ..common._listattributes import _ListAttributes from ..common._utils import _Subscriber from ..data.data_node import DataNode from ..job.job import Job from ..task.task import Task from ._dag import _DAG class Submittable: """Instance of an entity that can be submitted for execution. A submittable holds functions that can be used to build the execution directed acyclic graph. Attributes: subscribers (List[Callable]): The list of callbacks to be called on `Job^`'s status change. """ def __init__(self, subscribers: Optional[List[_Subscriber]] = None): self._subscribers = _ListAttributes(self, subscribers or list()) @abc.abstractmethod def submit( self, callbacks: Optional[List[Callable]] = None, force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ): raise NotImplementedError def get_inputs(self) -> Set[DataNode]: """Return the set of input data nodes of the submittable entity. Returns: The set of input data nodes. """ dag = self._build_dag() return self.__get_inputs(dag) def __get_inputs(self, dag: nx.DiGraph) -> Set[DataNode]: return {node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_outputs(self) -> Set[DataNode]: """Return the set of output data nodes of the submittable entity. Returns: The set of output data nodes. """ dag = self._build_dag() return self.__get_outputs(dag) def __get_outputs(self, dag: nx.DiGraph) -> set[DataNode]: return {node for node, degree in dict(dag.out_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_intermediate(self) -> Set[DataNode]: """Return the set of intermediate data nodes of the submittable entity. Returns: The set of intermediate data nodes. """ dag = self._build_dag() all_data_nodes_in_dag = {node for node in dag.nodes if isinstance(node, DataNode)} return all_data_nodes_in_dag - self.__get_inputs(dag) - self.__get_outputs(dag) def is_ready_to_run(self) -> bool: """Indicate if the entity is ready to be run. Returns: True if the given entity is ready to be run. False otherwise. """ return all(dn.is_ready_for_reading for dn in self.get_inputs()) def data_nodes_being_edited(self) -> Set[DataNode]: """Return the set of data nodes of the submittable entity that are being edited. Returns: The set of data nodes that are being edited. """ dag = self._build_dag() return {node for node in dag.nodes if isinstance(node, DataNode) and node.edit_in_progress} @abc.abstractmethod def subscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def unsubscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def _get_set_of_tasks(self) -> Set[Task]: raise NotImplementedError def _get_dag(self) -> _DAG: return _DAG(self._build_dag()) def _build_dag(self) -> nx.DiGraph: graph = nx.DiGraph() tasks = self._get_set_of_tasks() for task in tasks: if has_input := task.input: for predecessor in task.input.values(): graph.add_edges_from([(predecessor, task)]) if has_output := task.output: for successor in task.output.values(): graph.add_edges_from([(task, successor)]) if not has_input and not has_output: graph.add_node(task) return graph def _get_sorted_tasks(self) -> List[List[Task]]: dag = self._build_dag() remove = [node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)] dag.remove_nodes_from(remove) return list(nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes))) def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): params = [] if params is None else params self._subscribers.append(_Subscriber(callback=callback, params=params)) def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): if params is not None: self._subscribers.remove(_Subscriber(callback, params)) else: elem = [x for x in self._subscribers if x.callback == callback] if not elem: raise ValueError self._subscribers.remove(elem[0])
from typing import List from .._entity._reload import _get_manager from ..notification import Notifier class _Entity: _MANAGER_NAME: str _is_in_context = False _in_context_attributes_changed_collector: List def __enter__(self): self._is_in_context = True self._in_context_attributes_changed_collector = list() return self def __exit__(self, exc_type, exc_value, exc_traceback): # If multiple entities is in context, the last to enter will be the first to exit self._is_in_context = False if hasattr(self, "_properties"): for to_delete_key in self._properties._pending_deletions: self._properties.data.pop(to_delete_key, None) self._properties.data.update(self._properties._pending_changes) _get_manager(self._MANAGER_NAME)._set(self) for event in self._in_context_attributes_changed_collector: Notifier.publish(event) _get_manager(self._MANAGER_NAME)._set(self)
from collections import UserDict from ..notification import _ENTITY_TO_EVENT_ENTITY_TYPE, EventOperation, Notifier, _make_event class _Properties(UserDict): __PROPERTIES_ATTRIBUTE_NAME = "properties" def __init__(self, entity_owner, **kwargs): super().__init__(**kwargs) self._entity_owner = entity_owner self._pending_changes = {} self._pending_deletions = set() def __setitem__(self, key, value): super(_Properties, self).__setitem__(key, value) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=value, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: if key in self._pending_deletions: self._pending_deletions.remove(key) self._pending_changes[key] = value self._entity_owner._in_context_attributes_changed_collector.append(event) def __getitem__(self, key): from taipy.config.common._template_handler import _TemplateHandler as _tpl return _tpl._replace_templates(super(_Properties, self).__getitem__(key)) def __delitem__(self, key): super(_Properties, self).__delitem__(key) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=None, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: self._pending_changes.pop(key, None) self._pending_deletions.add(key) self._entity_owner._in_context_attributes_changed_collector.append(event)
from __future__ import annotations class _EntityIds: def __init__(self): self.data_node_ids = set() self.task_ids = set() self.scenario_ids = set() self.sequence_ids = set() self.job_ids = set() self.cycle_ids = set() self.submission_ids = set() def __add__(self, other: _EntityIds): self.data_node_ids.update(other.data_node_ids) self.task_ids.update(other.task_ids) self.scenario_ids.update(other.scenario_ids) self.sequence_ids.update(other.sequence_ids) self.job_ids.update(other.job_ids) self.cycle_ids.update(other.cycle_ids) self.submission_ids.update(other.submission_ids) return self def __iadd__(self, other: _EntityIds): self.__add__(other) return self
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import math from functools import reduce from typing import Any, Dict, List, Tuple import networkx as nx class _Node: def __init__(self, entity: Any, x, y): self.type = entity.__class__.__name__ self.entity = entity self.x = x self.y = y class _Edge: def __init__(self, src: _Node, dest: _Node): self.src = src self.dest = dest class _DAG: def __init__(self, dag: nx.DiGraph): self._sorted_nodes = list(nodes for nodes in nx.topological_generations(dag)) self._length, self._width = self.__compute_size() self._grid_length, self._grid_width = self.__compute_grid_size() self._nodes = self.__compute_nodes() self._edges = self.__compute_edges(dag) @property def width(self) -> int: return self._width @property def length(self) -> int: return self._length @property def nodes(self) -> Dict[str, _Node]: return self._nodes @property def edges(self) -> List[_Edge]: return self._edges def __compute_size(self) -> Tuple[int, int]: return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes]) def __compute_grid_size(self) -> Tuple[int, int]: if self._width == 1: grd_wdt = 1 else: grd_wdt = self.__lcm(*[len(i) + 1 if len(i) != self._width else len(i) - 1 for i in self._sorted_nodes]) + 1 return len(self._sorted_nodes), grd_wdt def __compute_nodes(self) -> Dict[str, _Node]: nodes = {} x = 0 for same_lvl_nodes in self._sorted_nodes: lcl_wdt = len(same_lvl_nodes) is_max = lcl_wdt != self.width if self.width != 1: y_incr = (self._grid_width - 1) / (lcl_wdt + 1) if is_max else (self._grid_width - 1) / (lcl_wdt - 1) else: y_incr = 1 y = 0 if is_max else -y_incr for node in same_lvl_nodes: y += y_incr nodes[node.id] = _Node(node, x, y) x += 1 return nodes def __compute_edges(self, dag) -> List[_Edge]: edges = [] for edge in dag.edges(): edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id])) return edges @staticmethod def __lcm(*integers) -> int: # Function math.lcm is only implemented for Python 3.9+ # For compatibility with Python 3.8 it has been re implemented. if 0 in integers: return 0 return reduce(lambda x, y: (x * y) // math.gcd(x, y), integers)
import sys from typing import List from taipy._cli._base_cli import _CLI from taipy.logger._taipy_logger import _TaipyLogger from ._migrate import ( _migrate_fs_entities, _migrate_mongo_entities, _migrate_sql_entities, _remove_backup_file_entities, _remove_backup_mongo_entities, _remove_backup_sql_entities, _restore_migrate_file_entities, _restore_migrate_mongo_entities, _restore_migrate_sql_entities, ) class _MigrateCLI: __logger = _TaipyLogger._get_logger() @classmethod def create_parser(cls): migrate_parser = _CLI._add_subparser( "migrate", help="Migrate entities created from old taipy versions to be compatible with the current taipy version. " " The entity migration should be performed only after updating taipy code to the current version.", ) migrate_parser.add_argument( "--repository-type", required=True, nargs="+", help="The type of repository to migrate. If filesystem or sql, a path to the database folder/.sqlite file " "should be informed. In case of mongo host, port, user and password must be informed, if left empty it " "is assumed default values", ) migrate_parser.add_argument( "--skip-backup", action="store_true", help="Skip the backup of entities before migration.", ) migrate_parser.add_argument( "--restore", action="store_true", help="Restore the migration of entities from backup folder.", ) migrate_parser.add_argument( "--remove-backup", action="store_true", help="Remove the backup of entities. Only use this option if the migration was successful.", ) @classmethod def parse_arguments(cls): args = _CLI._parse() if getattr(args, "which", None) != "migrate": return repository_type = args.repository_type[0] repository_args = args.repository_type[1:] if len(args.repository_type) > 1 else [None] if args.restore: cls.__handle_restore_backup(repository_type, repository_args) if args.remove_backup: cls.__handle_remove_backup(repository_type, repository_args) do_backup = False if args.skip_backup else True cls.__migrate_entities(repository_type, repository_args, do_backup) sys.exit(0) @classmethod def __handle_remove_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _remove_backup_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _remove_backup_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": if not _remove_backup_mongo_entities(): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __handle_restore_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _restore_migrate_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _restore_migrate_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] if not _restore_migrate_mongo_entities(*mongo_args): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __migrate_entities(cls, repository_type: str, repository_args: List, do_backup: bool): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _migrate_fs_entities(path, do_backup): sys.exit(1) elif repository_type == "sql": if not _migrate_sql_entities(repository_args[0], do_backup): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] _migrate_mongo_entities(*mongo_args, backup=do_backup) # type: ignore else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1)
import functools from ..notification import EventOperation, Notifier, _make_event class _Reloader: """The _Reloader singleton class""" _instance = None _no_reload_context = False def __new__(class_, *args, **kwargs): if not isinstance(class_._instance, class_): class_._instance = object.__new__(class_, *args, **kwargs) return class_._instance def _reload(self, manager: str, obj): if self._no_reload_context: return obj entity = _get_manager(manager)._get(obj, obj) if obj._is_in_context and hasattr(entity, "_properties"): if obj._properties._pending_changes: entity._properties._pending_changes = obj._properties._pending_changes if obj._properties._pending_deletions: entity._properties._pending_deletions = obj._properties._pending_deletions entity._properties._entity_owner = obj return entity def __enter__(self): self._no_reload_context = True return self def __exit__(self, exc_type, exc_value, exc_traceback): self._no_reload_context = False def _self_reload(manager): def __reload(fct): @functools.wraps(fct) def _do_reload(self, *args, **kwargs): self = _Reloader()._reload(manager, self) return fct(self, *args, **kwargs) return _do_reload return __reload def _self_setter(manager): def __set_entity(fct): @functools.wraps(fct) def _do_set_entity(self, *args, **kwargs): fct(self, *args, **kwargs) entity_manager = _get_manager(manager) if len(args) == 1: value = args[0] else: value = args event = _make_event( self, EventOperation.UPDATE, attribute_name=fct.__name__, attribute_value=value, ) if not self._is_in_context: entity = _Reloader()._reload(manager, self) fct(entity, *args, **kwargs) entity_manager._set(entity) Notifier.publish(event) else: self._in_context_attributes_changed_collector.append(event) return _do_set_entity return __set_entity @functools.lru_cache def _get_manager(manager: str): from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory return { "scenario": _ScenarioManagerFactory._build_manager(), "sequence": _SequenceManagerFactory._build_manager(), "data": _DataManagerFactory._build_manager(), "cycle": _CycleManagerFactory._build_manager(), "job": _JobManagerFactory._build_manager(), "task": _TaskManagerFactory._build_manager(), "submission": _SubmissionManagerFactory._build_manager(), }[manager]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import abc from typing import Optional class _Labeled: __LABEL_SEPARATOR = " > " @abc.abstractmethod def get_label(self) -> str: raise NotImplementedError def _get_label(self) -> str: """Returns the entity label made of the simple label prefixed by the owner label. Returns: The label of the entity as a string. """ return self._get_explicit_label() or self._generate_label() @abc.abstractmethod def get_simple_label(self) -> str: raise NotImplementedError def _get_simple_label(self) -> str: """Returns the simple label. Returns: The simple label of the entity as a string. """ return self._get_explicit_label() or self._generate_label(True) def _generate_label(self, simple=False) -> str: ls = [] if not simple: if owner_id := self._get_owner_id(): if getattr(self, "id") != owner_id: from ... import core as tp owner = tp.get(owner_id) ls.append(owner.get_label()) ls.append(self._generate_entity_label()) return self.__LABEL_SEPARATOR.join(ls) def _get_explicit_label(self) -> Optional[str]: if hasattr(self, "_properties"): return getattr(self, "_properties").get("label") return None def _get_owner_id(self) -> Optional[str]: if hasattr(self, "owner_id"): return getattr(self, "owner_id") return None def _get_name(self) -> Optional[str]: if hasattr(self, "name"): return getattr(self, "name") if hasattr(self, "_properties"): return getattr(self, "_properties").get("name") return None def _get_config_id(self) -> Optional[str]: if hasattr(self, "config_id"): return getattr(self, "config_id") return None def _generate_entity_label(self) -> str: if name := self._get_name(): return name if config_id := self._get_config_id(): return config_id return getattr(self, "id")
import os import shutil from functools import lru_cache from typing import Dict import bson import pymongo from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() OLD_COLLECTIONS = [ "cycle", "scenario", "pipeline", "task", "data_node", "job", "version", ] NEW_COLLECTIONS = [ "cycle", "scenario", "task", "data_node", "job", "version", ] DATABASE_NAME = "taipy" MONGO_BACKUP_FOLDER = ".mongo_backup" @lru_cache def _connect_mongodb(db_host: str, db_port: int, db_username: str, db_password: str) -> pymongo.MongoClient: auth_str = "" if db_username and db_password: auth_str = f"{db_username}:{db_password}@" connection_string = f"mongodb://{auth_str}{db_host}:{db_port}" return pymongo.MongoClient(connection_string) def __load_all_entities_from_mongo( hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) entities = {} for collection in OLD_COLLECTIONS: db = client[DATABASE_NAME] cursor = db[collection].find({}) for document in cursor: entities[document["id"]] = {"data": document} return entities def __write_entities_to_mongo( _entities: Dict, hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) for collection in NEW_COLLECTIONS: db = client[DATABASE_NAME] db[collection].insert_many( [entity["data"] for entity in _entities.values() if collection in entity["data"]["id"]] ) def _backup_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.exists(MONGO_BACKUP_FOLDER): os.makedirs(MONGO_BACKUP_FOLDER, exist_ok=True) for collection in OLD_COLLECTIONS: with open(os.path.join(MONGO_BACKUP_FOLDER, f"{collection}.bson"), "wb+") as f: for doc in db[collection].find(): f.write(bson.BSON.encode(doc)) __logger.info(f"Backed up entities to folder '{MONGO_BACKUP_FOLDER}' before migration.") return True def _restore_migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False for collection in os.listdir(MONGO_BACKUP_FOLDER): if collection.endswith(".bson"): with open(os.path.join(MONGO_BACKUP_FOLDER, collection), "rb+") as f: if bson_data := bson.decode_all(f.read()): # type: ignore db[collection.split(".")[0]].insert_many(bson_data) shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Restored entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _remove_backup_mongo_entities() -> bool: if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Removed backup entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", backup: bool = True, ) -> bool: """Migrate entities from mongodb to the current version. Args: hostname (str, optional): The hostname of the mongodb. Defaults to "localhost". port (int, optional): The port of the mongodb. Defaults to 27017. user (str, optional): The username of the mongodb. Defaults to "". password (str, optional): The password of the mongodb. Defaults to "". backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if backup: _backup_mongo_entities(hostname=hostname, port=port, user=user, password=password) __logger.info(f"Starting entity migration from MongoDB {hostname}:{port}") entities = __load_all_entities_from_mongo(hostname, port, user, password) entities, _ = _migrate(entities) __write_entities_to_mongo(entities, hostname, port, user, password) __logger.info("Migration finished") return True
from ._migrate_fs import _migrate_fs_entities, _remove_backup_file_entities, _restore_migrate_file_entities from ._migrate_mongo import _migrate_mongo_entities, _remove_backup_mongo_entities, _restore_migrate_mongo_entities from ._migrate_sql import _migrate_sql_entities, _remove_backup_sql_entities, _restore_migrate_sql_entities
import json import os import shutil from typing import Dict from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() def _load_all_entities_from_fs(root: str) -> Dict: # run through all files in the data folder and load them entities = {} for root, dirs, files in os.walk(root): for file in files: if file.endswith(".json"): with open(os.path.join(root, file)) as f: _id = file.split(".")[0] if "version" in root: _id = f"VERSION_{_id}" entities[_id] = { "data": json.load(f), "path": os.path.join(root, file), } return entities def __write_entities_to_fs(_entities: Dict, root: str): if not os.path.exists(root): os.makedirs(root, exist_ok=True) for _id, entity in _entities.items(): # Do not write pipeline entities if "PIPELINE" in _id: continue with open(entity["path"], "w") as f: json.dump(entity["data"], f, indent=0) # Remove pipelines folder pipelines_path = os.path.join(root, "pipelines") if os.path.exists(pipelines_path): shutil.rmtree(pipelines_path) def _restore_migrate_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False if os.path.exists(path): shutil.rmtree(path) else: __logger.warning(f"The original entities folder '{path}' does not exist.") os.rename(backup_path, path) __logger.info(f"Restored entities from the backup folder '{backup_path}' to '{path}'.") return True def _remove_backup_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False shutil.rmtree(backup_path) __logger.info(f"Removed backup entities from the backup folder '{backup_path}'.") return True def _migrate_fs_entities(path: str, backup: bool = True) -> bool: """Migrate entities from filesystem to the current version. Args: path (str): The path to the folder containing the entities. backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if not os.path.isdir(path): __logger.error(f"Folder '{path}' does not exist.") return False if backup: backup_path = f"{path}_backup" try: shutil.copytree(path, backup_path) except FileExistsError: __logger.warning(f"The backup folder '{backup_path}' already exists. Migration canceled.") return False else: __logger.info(f"Backed up entities from '{path}' to '{backup_path}' folder before migration.") __logger.info(f"Starting entity migration from '{path}' folder.") entities = _load_all_entities_from_fs(path) entities, _ = _migrate(entities) __write_entities_to_fs(entities, path) __logger.info("Migration finished") return True
from functools import lru_cache import pymongo @lru_cache def _connect_mongodb( db_host: str, db_port: int, db_username: str, db_password: str, db_extra_args: frozenset, db_driver: str ) -> pymongo.MongoClient: """Create a connection to a Mongo database. The `"mongodb_extra_args"` passed by the user is originally a dictionary, but since `@lru_cache` wrapper only accepts hashable parameters, the `"mongodb_extra_args"` should be converted into a frozenset beforehand. Parameters: db_host (str): the database host. db_port (int): the database port. db_username (str): the database username. db_password (str): the database password. db_extra_args (frozenset): A frozenset converted from a dictionary of additional arguments to be passed into database connection string. Returns: pymongo.MongoClient """ auth_str = "" if db_username and db_password: auth_str = f"{db_username}:{db_password}@" extra_args_str = "&".join(f"{k}={str(v)}" for k, v in db_extra_args) if extra_args_str: extra_args_str = "/?" + extra_args_str driver = "mongodb" if db_driver: driver = f"{driver}+{db_driver}" connection_string = f"{driver}://{auth_str}{db_host}" connection_string = connection_string if db_driver else f"{connection_string}:{db_port}" connection_string += extra_args_str return pymongo.MongoClient(connection_string)
from taipy.config.common._validate_id import _validate_id class MongoDefaultDocument: """The default class for \"custom_document\" property to configure a `MongoCollectionDataNode^`. Attributes: **kwargs: Attributes of the MongoDefaultDocument object. Example: - `document = MongoDefaultDocument(name="example", age=30})` will return a MongoDefaultDocument object so that `document.name` returns `"example"`, and `document.age` returns `30`. - `document = MongoDefaultDocument(date="12/24/2018", temperature=20})` will return a MongoDefaultDocument object so that `document.date` returns `"12/24/2018"`, and `document.temperature` returns `20`. """ def __init__(self, **kwargs): for attribute_name, value in kwargs.items(): setattr(self, _validate_id(attribute_name), value)
from .mongo_default_document import MongoDefaultDocument
from collections import UserList class _ListAttributes(UserList): def __init__(self, parent, *args, **kwargs): super().__init__(*args, **kwargs) self._parent = parent def __add_iterable(self, iterable): for i in iterable: super(_ListAttributes, self).append(i) def __set_self(self): from ... import core as tp if hasattr(self, "_parent"): tp.set(self._parent) def __add__(self, value): if hasattr(value, "__iter__"): self.__add_iterable(value) else: self.append(value) return self def extend(self, value) -> None: super(_ListAttributes, self).extend(value) self.__set_self() def append(self, value) -> None: super(_ListAttributes, self).append(value) self.__set_self() def remove(self, value): super(_ListAttributes, self).remove(value) self.__set_self() def clear(self) -> None: super(_ListAttributes, self).clear() self.__set_self()
import functools import warnings from typing import Optional warnings.simplefilter("once", ResourceWarning) def _warn_deprecated(deprecated: str, suggest: Optional[str] = None, stacklevel: int = 3) -> None: category = DeprecationWarning message = f"{deprecated} is deprecated." if suggest: message += f" Use {suggest} instead." warnings.warn(message=message, category=category, stacklevel=stacklevel) def _warn_no_core_service(stacklevel: int = 3): def inner(f): @functools.wraps(f) def _check_if_core_service_is_running(*args, **kwargs): from .._orchestrator._orchestrator_factory import _OrchestratorFactory if not _OrchestratorFactory._dispatcher: message = "The Core service is NOT running" warnings.warn(message=message, category=ResourceWarning, stacklevel=stacklevel) return f(*args, **kwargs) return _check_if_core_service_is_running return inner
import functools from enum import Enum class _ReprEnum(Enum): @classmethod @functools.lru_cache def _from_repr(cls, repr_: str): return next(filter(lambda e: repr(e) == repr_, cls)) # type: ignore
from typing import Iterable from taipy.logger._taipy_logger import _TaipyLogger from ..data import DataNode def _warn_if_inputs_not_ready(inputs: Iterable[DataNode]): from ..data import CSVDataNode, ExcelDataNode, JSONDataNode, ParquetDataNode, PickleDataNode from ..data._data_manager_factory import _DataManagerFactory logger = _TaipyLogger._get_logger() data_manager = _DataManagerFactory._build_manager() for dn in inputs: dn = data_manager._get(dn.id) if dn.is_ready_for_reading is False and not dn._last_edit_date: if dn.storage_type() in [ CSVDataNode.storage_type(), ExcelDataNode.storage_type(), JSONDataNode.storage_type(), PickleDataNode.storage_type(), ParquetDataNode.storage_type(), ]: logger.warning( f"{dn.id} cannot be read because it has never been written. " f"Hint: The data node may refer to a wrong path : {dn.path} " ) else: logger.warning(f"{dn.id} cannot be read because it has never been written.")
from typing import TypeVar, Union from .._repository._abstract_converter import _AbstractConverter from .._repository._base_taipy_model import _BaseModel ModelType = TypeVar("ModelType", bound=_BaseModel) Entity = TypeVar("Entity") Converter = TypeVar("Converter", bound=_AbstractConverter) Json = Union[dict, list, str, int, float, bool]
import functools import time from collections import namedtuple from importlib import import_module from operator import attrgetter from typing import Callable, Optional, Tuple from taipy.config import Config @functools.lru_cache def _load_fct(module_name: str, fct_name: str) -> Callable: module = import_module(module_name) return attrgetter(fct_name)(module) def _retry_read_entity(exceptions: Tuple, sleep_time: float = 0.2): """ Retries the wrapped function/method if the exceptions listed in ``exceptions`` are thrown. The number of retries is defined by Config.core.read_entity_retry. Parameters: exceptions (tuple): Tuple of exceptions that trigger a retry attempt. sleep_time (float): Time to sleep between retries. """ def decorator(func): def newfn(*args, **kwargs): for _ in range(Config.core.read_entity_retry): try: return func(*args, **kwargs) except exceptions: time.sleep(sleep_time) return func(*args, **kwargs) return newfn return decorator @functools.lru_cache def _get_fct_name(f) -> Optional[str]: # Mock function does not have __qualname__ attribute -> return __name__ # Partial or anonymous function does not have __name__ or __qualname__ attribute -> return None name = getattr(f, "__qualname__", getattr(f, "__name__", None)) return name def _fct_to_dict(obj): params = [] callback = obj if isinstance(obj, _Subscriber): callback = obj.callback params = obj.params fct_name = _get_fct_name(callback) if not fct_name: return None return { "fct_name": fct_name, "fct_params": params, "fct_module": callback.__module__, } def _fcts_to_dict(objs): return [d for obj in objs if (d := _fct_to_dict(obj)) is not None] _Subscriber = namedtuple("_Subscriber", "callback params")
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._scenario_fs_repository import _ScenarioFSRepository from ._scenario_manager import _ScenarioManager from ._scenario_sql_repository import _ScenarioSQLRepository class _ScenarioManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _ScenarioFSRepository, "sql": _ScenarioSQLRepository} @classmethod def _build_manager(cls) -> Type[_ScenarioManager]: # type: ignore if cls._using_enterprise(): scenario_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager", "_ScenarioManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager_factory", "_ScenarioManagerFactory" )._build_repository # type: ignore else: scenario_manager = _ScenarioManager build_repository = cls._build_repository scenario_manager._repository = build_repository() # type: ignore return scenario_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from ..cycle.cycle_id import CycleId from ..data.data_node_id import DataNodeId from ..task.task_id import TaskId from .scenario_id import ScenarioId @mapper_registry.mapped @dataclass class _ScenarioModel(_BaseModel): __table__ = Table( "scenario", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config_id", String), Column("tasks", JSON), Column("additional_data_nodes", JSON), Column("properties", JSON), Column("creation_date", String), Column("primary_scenario", Boolean), Column("subscribers", JSON), Column("tags", JSON), Column("version", String), Column("sequences", JSON), Column("cycle", String), ) id: ScenarioId config_id: str tasks: List[TaskId] additional_data_nodes: List[DataNodeId] properties: Dict[str, Any] creation_date: str primary_scenario: bool subscribers: List[Dict] tags: List[str] version: str sequences: Optional[Dict[str, Dict]] = None cycle: Optional[CycleId] = None @staticmethod def from_dict(data: Dict[str, Any]): return _ScenarioModel( id=data["id"], config_id=data["config_id"], tasks=_BaseModel._deserialize_attribute(data["tasks"]), additional_data_nodes=_BaseModel._deserialize_attribute(data["additional_data_nodes"]), properties=_BaseModel._deserialize_attribute(data["properties"]), creation_date=data["creation_date"], primary_scenario=data["primary_scenario"], subscribers=_BaseModel._deserialize_attribute(data["subscribers"]), tags=_BaseModel._deserialize_attribute(data["tags"]), version=data["version"], sequences=_BaseModel._deserialize_attribute(data["sequences"]), cycle=CycleId(data["cycle"]) if "cycle" in data else None, ) def to_list(self): return [ self.id, self.config_id, _BaseModel._serialize_attribute(self.tasks), _BaseModel._serialize_attribute(self.additional_data_nodes), _BaseModel._serialize_attribute(self.properties), self.creation_date, self.primary_scenario, _BaseModel._serialize_attribute(self.subscribers), _BaseModel._serialize_attribute(self.tags), self.version, _BaseModel._serialize_attribute(self.sequences), self.cycle, ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._scenario_converter import _ScenarioConverter from ._scenario_model import _ScenarioModel class _ScenarioFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_ScenarioModel, converter=_ScenarioConverter, dir_name="scenarios")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._scenario_converter import _ScenarioConverter from ._scenario_model import _ScenarioModel class _ScenarioSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_ScenarioModel, converter=_ScenarioConverter)
from datetime import datetime from typing import Dict, List, Optional, Set, Union from .._repository._abstract_converter import _AbstractConverter from .._version._utils import _migrate_entity from ..common import _utils from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..cycle.cycle import Cycle, CycleId from ..data.data_node import DataNode, DataNodeId from ..scenario._scenario_model import _ScenarioModel from ..scenario.scenario import Scenario from ..task.task import Task, TaskId class _ScenarioConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, scenario: Scenario) -> _ScenarioModel: sequences: Dict[str, Dict[str, Union[List[TaskId], Dict, List]]] = {} for p_name, sequence_data in scenario._sequences.items(): sequences[p_name] = { Scenario._SEQUENCE_TASKS_KEY: [ t.id if isinstance(t, Task) else t for t in sequence_data.get("tasks", []) ], Scenario._SEQUENCE_PROPERTIES_KEY: sequence_data.get("properties", {}), Scenario._SEQUENCE_SUBSCRIBERS_KEY: _utils._fcts_to_dict(sequence_data.get("subscribers", [])), } return _ScenarioModel( id=scenario.id, config_id=scenario.config_id, tasks=[task.id if isinstance(task, Task) else TaskId(str(task)) for task in list(scenario._tasks)], additional_data_nodes=[ dn.id if isinstance(dn, DataNode) else DataNodeId(str(dn)) for dn in list(scenario._additional_data_nodes) ], properties=scenario._properties.data, creation_date=scenario._creation_date.isoformat(), primary_scenario=scenario._primary_scenario, subscribers=_utils._fcts_to_dict(scenario._subscribers), tags=list(scenario._tags), version=scenario._version, cycle=scenario._cycle.id if scenario._cycle else None, sequences=sequences if sequences else None, ) @classmethod def _model_to_entity(cls, model: _ScenarioModel) -> Scenario: tasks: Union[Set[TaskId], Set[Task], Set] = set() if model.tasks: tasks = set(model.tasks) if model.sequences: for sequence_name, sequence_data in model.sequences.items(): if subscribers := sequence_data.get(Scenario._SEQUENCE_SUBSCRIBERS_KEY): model.sequences[sequence_name][Scenario._SEQUENCE_SUBSCRIBERS_KEY] = [ _utils._Subscriber(_utils._load_fct(it["fct_module"], it["fct_name"]), it["fct_params"]) for it in subscribers ] scenario = Scenario( scenario_id=model.id, config_id=model.config_id, tasks=tasks, additional_data_nodes=set(model.additional_data_nodes), properties=model.properties, creation_date=datetime.fromisoformat(model.creation_date), is_primary=model.primary_scenario, tags=set(model.tags), cycle=cls.__to_cycle(model.cycle), subscribers=[ _utils._Subscriber(_utils._load_fct(it["fct_module"], it["fct_name"]), it["fct_params"]) for it in model.subscribers ], version=model.version, sequences=model.sequences, ) return _migrate_entity(scenario) @staticmethod def __to_cycle(cycle_id: Optional[CycleId] = None) -> Optional[Cycle]: return _CycleManagerFactory._build_manager()._get(cycle_id) if cycle_id else None
from typing import NewType ScenarioId = NewType("ScenarioId", str) ScenarioId.__doc__ = """Type that holds a `Scenario^` identifier."""
from abc import abstractmethod from typing import Callable, Iterable, List, Optional, Union from ..job.job import Job from ..task.task import Task class _AbstractOrchestrator: """Creates, enqueues, and orchestrates jobs as instances of `Job^` class.""" @classmethod @abstractmethod def initialize(cls): raise NotImplementedError @classmethod @abstractmethod def submit( cls, sequence, callbacks: Optional[Iterable[Callable]], force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ) -> List[Job]: raise NotImplementedError @classmethod @abstractmethod def submit_task( cls, task: Task, callbacks: Optional[Iterable[Callable]] = None, force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ) -> Job: raise NotImplementedError @classmethod @abstractmethod def cancel_job(cls, job): raise NotImplementedError
from importlib import util from typing import Optional, Type from taipy.config.config import Config from ..common._utils import _load_fct from ..exceptions.exceptions import ModeNotAvailable, OrchestratorNotBuilt from ._abstract_orchestrator import _AbstractOrchestrator from ._dispatcher import _DevelopmentJobDispatcher, _JobDispatcher, _StandaloneJobDispatcher from ._orchestrator import _Orchestrator class _OrchestratorFactory: _TAIPY_ENTERPRISE_MODULE = "taipy.enterprise" _TAIPY_ENTERPRISE_CORE_ORCHESTRATOR_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core._orchestrator._orchestrator" _TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core._orchestrator._dispatcher" __TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD = "_build_dispatcher" _orchestrator: Optional[_Orchestrator] = None _dispatcher: Optional[_JobDispatcher] = None @classmethod def _build_orchestrator(cls) -> Type[_AbstractOrchestrator]: if util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None: cls._orchestrator = _load_fct( cls._TAIPY_ENTERPRISE_CORE_ORCHESTRATOR_MODULE, "Orchestrator", ) # type: ignore else: cls._orchestrator = _Orchestrator # type: ignore cls._orchestrator.initialize() # type: ignore return cls._orchestrator # type: ignore @classmethod def _build_dispatcher(cls, force_restart=False) -> Optional[_JobDispatcher]: if not cls._orchestrator: raise OrchestratorNotBuilt if Config.job_config.is_standalone: cls.__build_standalone_job_dispatcher(force_restart=force_restart) elif Config.job_config.is_development: cls.__build_development_job_dispatcher() elif util.find_spec(cls._TAIPY_ENTERPRISE_MODULE): cls.__build_enterprise_job_dispatcher(force_restart=force_restart) else: raise ModeNotAvailable(f"Job mode {Config.job_config.mode} is not available.") return cls._dispatcher @classmethod def _remove_dispatcher(cls) -> Optional[_JobDispatcher]: if cls._dispatcher is not None and not isinstance(cls._dispatcher, _DevelopmentJobDispatcher): cls._dispatcher.stop() cls._dispatcher = None return cls._dispatcher @classmethod def __build_standalone_job_dispatcher(cls, force_restart=False): if isinstance(cls._dispatcher, _StandaloneJobDispatcher): if force_restart: cls._dispatcher.stop() else: return if util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None: cls._dispatcher = _load_fct( cls._TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE, cls.__TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD )(cls._orchestrator) else: cls._dispatcher = _StandaloneJobDispatcher(cls._orchestrator) # type: ignore cls._dispatcher.start() # type: ignore @classmethod def __build_development_job_dispatcher(cls): if isinstance(cls._dispatcher, _StandaloneJobDispatcher): cls._dispatcher.stop() cls._dispatcher = _DevelopmentJobDispatcher(cls._orchestrator) # type: ignore @classmethod def __build_enterprise_job_dispatcher(cls, force_restart=False): cls._dispatcher = _load_fct( cls._TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE, cls.__TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD )(cls._orchestrator, force_restart) if cls._dispatcher: cls._dispatcher.start() else: raise ModeNotAvailable(f"Job mode {Config.job_config.mode} is not available.")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from ._development_job_dispatcher import _DevelopmentJobDispatcher from ._job_dispatcher import _JobDispatcher from ._standalone_job_dispatcher import _StandaloneJobDispatcher
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from typing import Optional from ...job.job import Job from .._abstract_orchestrator import _AbstractOrchestrator from ._job_dispatcher import _JobDispatcher class _DevelopmentJobDispatcher(_JobDispatcher): """Manages job dispatching (instances of `Job^` class) in a synchronous way.""" def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): super().__init__(orchestrator) def start(self): raise NotImplementedError def is_running(self) -> bool: return True def stop(self): raise NotImplementedError def run(self): raise NotImplementedError def _dispatch(self, job: Job): """Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ rs = self._wrapped_function(job.id, job.task) self._update_job_status(job, rs)
from concurrent.futures import ProcessPoolExecutor from functools import partial from typing import Optional from taipy.config._serializer._toml_serializer import _TomlSerializer from taipy.config.config import Config from ...job.job import Job from .._abstract_orchestrator import _AbstractOrchestrator from ._job_dispatcher import _JobDispatcher class _StandaloneJobDispatcher(_JobDispatcher): """Manages job dispatching (instances of `Job^` class) in an asynchronous way using a ProcessPoolExecutor.""" def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): super().__init__(orchestrator) self._executor = ProcessPoolExecutor(Config.job_config.max_nb_of_workers or 1) # type: ignore self._nb_available_workers = self._executor._max_workers # type: ignore def _dispatch(self, job: Job): """Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ self._nb_available_workers -= 1 config_as_string = _TomlSerializer()._serialize(Config._applied_config) future = self._executor.submit(self._wrapped_function_with_config_load, config_as_string, job.id, job.task) self._set_dispatched_processes(job.id, future) # type: ignore future.add_done_callback(self._release_worker) future.add_done_callback(partial(self._update_job_status_from_future, job)) def _release_worker(self, _): self._nb_available_workers += 1 def _update_job_status_from_future(self, job: Job, ft): self._pop_dispatched_process(job.id) # type: ignore self._update_job_status(job, ft.result())
import threading from abc import abstractmethod from typing import Dict, Optional from taipy.config.config import Config from taipy.logger._taipy_logger import _TaipyLogger from ...data._data_manager_factory import _DataManagerFactory from ...job._job_manager_factory import _JobManagerFactory from ...job.job import Job from ...task.task import Task from .._abstract_orchestrator import _AbstractOrchestrator from ._task_function_wrapper import _TaskFunctionWrapper class _JobDispatcher(threading.Thread, _TaskFunctionWrapper): """Manages job dispatching (instances of `Job^` class) on executors.""" _STOP_FLAG = False _dispatched_processes: Dict = {} __logger = _TaipyLogger._get_logger() _nb_available_workers: int = 1 def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): threading.Thread.__init__(self, name="Thread-Taipy-JobDispatcher") self.daemon = True self.orchestrator = orchestrator self.lock = self.orchestrator.lock # type: ignore Config.block_update() def start(self): """Start the dispatcher""" threading.Thread.start(self) def is_running(self) -> bool: """Return True if the dispatcher is running""" return self.is_alive() def stop(self): """Stop the dispatcher""" self._STOP_FLAG = True def run(self): _TaipyLogger._get_logger().info("Start job dispatcher...") while not self._STOP_FLAG: try: if self._can_execute(): with self.lock: job = self.orchestrator.jobs_to_run.get(block=True, timeout=0.1) self._execute_job(job) except Exception: # In case the last job of the queue has been removed. pass def _can_execute(self) -> bool: """Returns True if the dispatcher have resources to execute a new job.""" return self._nb_available_workers > 0 def _execute_job(self, job: Job): if job.force or self._needs_to_run(job.task): if job.force: self.__logger.info(f"job {job.id} is forced to be executed.") job.running() self._dispatch(job) else: job._unlock_edit_on_outputs() job.skipped() self.__logger.info(f"job {job.id} is skipped.") def _execute_jobs_synchronously(self): while not self.orchestrator.jobs_to_run.empty(): with self.lock: try: job = self.orchestrator.jobs_to_run.get() except Exception: # In case the last job of the queue has been removed. self.__logger.warning(f"{job.id} is no longer in the list of jobs to run.") self._execute_job(job) @staticmethod def _needs_to_run(task: Task) -> bool: """ Returns True if the task has no output or if at least one input was modified since the latest run. Parameters: task (Task^): The task to run. Returns: True if the task needs to run. False otherwise. """ if not task.skippable: return True data_manager = _DataManagerFactory._build_manager() if len(task.output) == 0: return True are_outputs_in_cache = all(data_manager._get(dn.id).is_valid for dn in task.output.values()) if not are_outputs_in_cache: return True if len(task.input) == 0: return False input_last_edit = max(data_manager._get(dn.id).last_edit_date for dn in task.input.values()) output_last_edit = min(data_manager._get(dn.id).last_edit_date for dn in task.output.values()) return input_last_edit > output_last_edit @abstractmethod def _dispatch(self, job: Job): """ Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ raise NotImplementedError @staticmethod def _update_job_status(job: Job, exceptions): job.update_status(exceptions) _JobManagerFactory._build_manager()._set(job) @classmethod def _set_dispatched_processes(cls, job_id, process): cls._dispatched_processes[job_id] = process @classmethod def _pop_dispatched_process(cls, job_id, default=None): return cls._dispatched_processes.pop(job_id, default) # type: ignore
from typing import Any, List from taipy.config._serializer._toml_serializer import _TomlSerializer from taipy.config.config import Config from ...data._data_manager_factory import _DataManagerFactory from ...data.data_node import DataNode from ...exceptions import DataNodeWritingError from ...job.job_id import JobId from ...task.task import Task class _TaskFunctionWrapper: @classmethod def _wrapped_function_with_config_load(cls, config_as_string, job_id: JobId, task: Task): Config._applied_config._update(_TomlSerializer()._deserialize(config_as_string)) Config.block_update() return cls._wrapped_function(job_id, task) @classmethod def _wrapped_function(cls, job_id: JobId, task: Task): try: inputs: List[DataNode] = list(task.input.values()) outputs: List[DataNode] = list(task.output.values()) fct = task.function results = fct(*cls.__read_inputs(inputs)) return cls.__write_data(outputs, results, job_id) except Exception as e: return [e] @classmethod def __read_inputs(cls, inputs: List[DataNode]) -> List[Any]: data_manager = _DataManagerFactory._build_manager() return [data_manager._get(dn.id).read_or_raise() for dn in inputs] @classmethod def __write_data(cls, outputs: List[DataNode], results, job_id: JobId): data_manager = _DataManagerFactory._build_manager() try: if outputs: _results = cls.__extract_results(outputs, results) exceptions = [] for res, dn in zip(_results, outputs): try: data_node = data_manager._get(dn.id) data_node.write(res, job_id=job_id) data_manager._set(data_node) except Exception as e: exceptions.append(DataNodeWritingError(f"Error writing in datanode id {dn.id}: {e}")) return exceptions except Exception as e: return [e] @classmethod def __extract_results(cls, outputs: List[DataNode], results: Any) -> List[Any]: _results: List[Any] = [results] if len(outputs) == 1 else results if len(_results) != len(outputs): raise DataNodeWritingError("Error: wrong number of result or task output") return _results
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._task_fs_repository import _TaskFSRepository from ._task_manager import _TaskManager from ._task_sql_repository import _TaskSQLRepository class _TaskManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _TaskFSRepository, "sql": _TaskSQLRepository} @classmethod def _build_manager(cls) -> Type[_TaskManager]: # type: ignore if cls._using_enterprise(): task_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager_factory", "_TaskManagerFactory" )._build_repository # type: ignore else: task_manager = _TaskManager build_repository = cls._build_repository task_manager._repository = build_repository() # type: ignore return task_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from .._repository._abstract_converter import _AbstractConverter from .._version._utils import _migrate_entity from ..common._utils import _load_fct from ..data._data_manager_factory import _DataManagerFactory from ..exceptions import NonExistingDataNode from ..task._task_model import _TaskModel from ..task.task import Task from .task import TaskId class _TaskConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, task: Task) -> _TaskModel: return _TaskModel( id=task.id, owner_id=task.owner_id, parent_ids=list(task._parent_ids), config_id=task.config_id, input_ids=cls.__to_ids(task.input.values()), function_name=task._function.__name__, function_module=task._function.__module__, output_ids=cls.__to_ids(task.output.values()), version=task._version, skippable=task._skippable, properties=task._properties.data.copy(), ) @classmethod def _model_to_entity(cls, model: _TaskModel) -> Task: task = Task( id=TaskId(model.id), owner_id=model.owner_id, parent_ids=set(model.parent_ids), config_id=model.config_id, function=_load_fct(model.function_module, model.function_name), input=cls.__to_data_nodes(model.input_ids), output=cls.__to_data_nodes(model.output_ids), version=model.version, skippable=model.skippable, properties=model.properties, ) return _migrate_entity(task) @staticmethod def __to_ids(data_nodes): return [i.id for i in data_nodes] @staticmethod def __to_data_nodes(data_nodes_ids): data_nodes = [] data_manager = _DataManagerFactory._build_manager() for _id in data_nodes_ids: if data_node := data_manager._get(_id): data_nodes.append(data_node) else: raise NonExistingDataNode(_id) return data_nodes
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._task_converter import _TaskConverter from ._task_model import _TaskModel class _TaskSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_TaskModel, converter=_TaskConverter)
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry @mapper_registry.mapped @dataclass class _TaskModel(_BaseModel): __table__ = Table( "task", mapper_registry.metadata, Column("id", String, primary_key=True), Column("owner_id", String), Column("parent_ids", JSON), Column("config_id", String), Column("input_ids", JSON), Column("function_name", String), Column("function_module", String), Column("output_ids", JSON), Column("version", String), Column("skippable", Boolean), Column("properties", JSON), ) id: str owner_id: Optional[str] parent_ids: List[str] config_id: str input_ids: List[str] function_name: str function_module: str output_ids: List[str] version: str skippable: bool properties: Dict[str, Any] @staticmethod def from_dict(data: Dict[str, Any]): return _TaskModel( id=data["id"], owner_id=data.get("owner_id"), parent_ids=_BaseModel._deserialize_attribute(data.get("parent_ids", [])), config_id=data["config_id"], input_ids=_BaseModel._deserialize_attribute(data["input_ids"]), function_name=data["function_name"], function_module=data["function_module"], output_ids=_BaseModel._deserialize_attribute(data["output_ids"]), version=data["version"], skippable=data["skippable"], properties=_BaseModel._deserialize_attribute(data["properties"] if "properties" in data.keys() else {}), ) def to_list(self): return [ self.id, self.owner_id, _BaseModel._serialize_attribute(self.parent_ids), self.config_id, _BaseModel._serialize_attribute(self.input_ids), self.function_name, self.function_module, _BaseModel._serialize_attribute(self.output_ids), self.version, self.skippable, _BaseModel._serialize_attribute(self.properties), ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._task_converter import _TaskConverter from ._task_model import _TaskModel class _TaskFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_TaskModel, converter=_TaskConverter, dir_name="tasks")
from typing import NewType TaskId = NewType("TaskId", str) TaskId.__doc__ = """Type that holds a `Task^` identifier."""
from dataclasses import dataclass from typing import Any, Dict, List from sqlalchemy import JSON, Boolean, Column, Enum, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .job_id import JobId from .status import Status @mapper_registry.mapped @dataclass class _JobModel(_BaseModel): __table__ = Table( "job", mapper_registry.metadata, Column("id", String, primary_key=True), Column("task_id", String), Column("status", Enum(Status)), Column("force", Boolean), Column("submit_id", String), Column("submit_entity_id", String), Column("creation_date", String), Column("subscribers", JSON), Column("stacktrace", JSON), Column("version", String), ) id: JobId task_id: str status: Status force: bool submit_id: str submit_entity_id: str creation_date: str subscribers: List[Dict] stacktrace: List[str] version: str @staticmethod def from_dict(data: Dict[str, Any]): return _JobModel( id=data["id"], task_id=data["task_id"], status=Status._from_repr(data["status"]), force=data["force"], submit_id=data["submit_id"], submit_entity_id=data["submit_entity_id"], creation_date=data["creation_date"], subscribers=_BaseModel._deserialize_attribute(data["subscribers"]), stacktrace=_BaseModel._deserialize_attribute(data["stacktrace"]), version=data["version"], ) def to_list(self): return [ self.id, self.task_id, repr(self.status), self.force, self.submit_id, self.submit_entity_id, self.creation_date, _BaseModel._serialize_attribute(self.subscribers), _BaseModel._serialize_attribute(self.stacktrace), self.version, ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._job_converter import _JobConverter from ._job_model import _JobModel class _JobFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_JobModel, converter=_JobConverter, dir_name="jobs")
import uuid from typing import Callable, Iterable, List, Optional, Union from .._manager._manager import _Manager from .._repository._abstract_repository import _AbstractRepository from .._version._version_manager_factory import _VersionManagerFactory from .._version._version_mixin import _VersionMixin from ..exceptions.exceptions import JobNotDeletedException from ..notification import EventEntityType, EventOperation, Notifier, _make_event from ..task.task import Task from .job import Job from .job_id import JobId class _JobManager(_Manager[Job], _VersionMixin): _ENTITY_NAME = Job.__name__ _ID_PREFIX = "JOB_" _repository: _AbstractRepository _EVENT_ENTITY_TYPE = EventEntityType.JOB @classmethod def _get_all(cls, version_number: Optional[str] = None) -> List[Job]: """ Returns all entities. """ filters = cls._build_filters_with_version(version_number) return cls._repository._load_all(filters) @classmethod def _create( cls, task: Task, callbacks: Iterable[Callable], submit_id: str, submit_entity_id: str, force=False ) -> Job: version = _VersionManagerFactory._build_manager()._get_latest_version() job = Job( id=JobId(f"{Job._ID_PREFIX}_{task.config_id}_{uuid.uuid4()}"), task=task, submit_id=submit_id, submit_entity_id=submit_entity_id, force=force, version=version, ) cls._set(job) Notifier.publish(_make_event(job, EventOperation.CREATION)) job._on_status_change(*callbacks) return job @classmethod def _delete(cls, job: Job, force=False): if job.is_finished() or force: super()._delete(job.id) from .._orchestrator._dispatcher._job_dispatcher import _JobDispatcher _JobDispatcher._pop_dispatched_process(job.id) else: err = JobNotDeletedException(job.id) cls._logger.warning(err) raise err @classmethod def _cancel(cls, job: Union[str, Job]): job = cls._get(job) if isinstance(job, str) else job from .._orchestrator._orchestrator_factory import _OrchestratorFactory _OrchestratorFactory._build_orchestrator().cancel_job(job) @classmethod def _get_latest(cls, task: Task) -> Optional[Job]: jobs_of_task = list(filter(lambda job: task in job, cls._get_all())) if len(jobs_of_task) == 0: return None if len(jobs_of_task) == 1: return jobs_of_task[0] else: return max(jobs_of_task) @classmethod def _is_deletable(cls, job: Union[Job, JobId]) -> bool: if isinstance(job, str): job = cls._get(job) if job.is_finished(): return True return False
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import NewType JobId = NewType("JobId", str) JobId.__doc__ = """Type that holds a `Job^` identifier."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._job_converter import _JobConverter from ._job_model import _JobModel class _JobSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_JobModel, converter=_JobConverter)
from datetime import datetime from typing import List from .._repository._abstract_converter import _AbstractConverter from ..common._utils import _fcts_to_dict, _load_fct from ..exceptions import InvalidSubscriber from ..job._job_model import _JobModel from ..job.job import Job from ..task._task_manager_factory import _TaskManagerFactory class _JobConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, job: Job) -> _JobModel: return _JobModel( job.id, job._task.id, job._status, job._force, job.submit_id, job.submit_entity_id, job._creation_date.isoformat(), cls.__serialize_subscribers(job._subscribers), job._stacktrace, version=job._version, ) @classmethod def _model_to_entity(cls, model: _JobModel) -> Job: task_manager = _TaskManagerFactory._build_manager() task_repository = task_manager._repository job = Job( id=model.id, task=task_repository._load(model.task_id), submit_id=model.submit_id, submit_entity_id=model.submit_entity_id, version=model.version, ) job._status = model.status # type: ignore job._force = model.force # type: ignore job._creation_date = datetime.fromisoformat(model.creation_date) # type: ignore for it in model.subscribers: try: fct_module, fct_name = it.get("fct_module"), it.get("fct_name") job._subscribers.append(_load_fct(fct_module, fct_name)) # type: ignore except AttributeError: raise InvalidSubscriber(f"The subscriber function {it.get('fct_name')} cannot be loaded.") job._stacktrace = model.stacktrace return job @staticmethod def __serialize_subscribers(subscribers: List) -> List: return _fcts_to_dict(subscribers)
from ..common._repr_enum import _ReprEnum class Status(_ReprEnum): """Execution status of a `Job^`. It is implemented as an enumeration. The possible values are: - `SUBMITTED`: A `SUBMITTED` job has been submitted for execution but not processed yet by the orchestrator. - `PENDING`: A `PENDING` job has been enqueued by the orchestrator. It is waiting for an executor to be available for its execution. - `BLOCKED`: A `BLOCKED` job has been blocked because its input data nodes are not ready yet. It is waiting for the completion of another `Job^` - `RUNNING`: A `RUNNING` job is currently executed by a dedicated executor. - `CANCELED`: A `CANCELED` job has been submitted but its execution has been canceled. - `FAILED`: A `FAILED` job raised an exception during its execution. - `COMPLETED`: A `COMPLETED` job has successfully been executed. - `SKIPPED`: A `SKIPPED` job has not been executed because its outputs were already computed. - `ABANDONED`: An `ABANDONED` job has not been executed because it depends on a job that could not complete ( cancelled, failed, or abandoned). """ SUBMITTED = 1 BLOCKED = 2 PENDING = 3 RUNNING = 4 CANCELED = 5 FAILED = 6 COMPLETED = 7 SKIPPED = 8 ABANDONED = 9
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._job_fs_repository import _JobFSRepository from ._job_manager import _JobManager from ._job_sql_repository import _JobSQLRepository class _JobManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _JobFSRepository, "sql": _JobSQLRepository} @classmethod def _build_manager(cls) -> Type[_JobManager]: # type: ignore if cls._using_enterprise(): job_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager_factory", "_JobManagerFactory" )._build_repository # type: ignore else: job_manager = _JobManager build_repository = cls._build_repository job_manager._repository = build_repository() # type: ignore return job_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from typing import NewType SequenceId = NewType("SequenceId", str) SequenceId.__doc__ = """Type that holds a `Sequence^` identifier."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Dict from .._repository._abstract_converter import _AbstractConverter from ..common import _utils from ..task.task import Task from .sequence import Sequence class _SequenceConverter(_AbstractConverter): _SEQUENCE_MODEL_ID_KEY = "id" _SEQUENCE_MODEL_OWNER_ID_KEY = "owner_id" _SEQUENCE_MODEL_PARENT_IDS_KEY = "parent_ids" _SEQUENCE_MODEL_PROPERTIES_KEY = "properties" _SEQUENCE_MODEL_TASKS_KEY = "tasks" _SEQUENCE_MODEL_SUBSCRIBERS_KEY = "subscribers" _SEQUENCE_MODEL_VERSION_KEY = "version" @classmethod def _entity_to_model(cls, sequence: Sequence) -> Dict: return { "id": sequence.id, "owner_id": sequence.owner_id, "parent_ids": list(sequence._parent_ids), "properties": sequence._properties.data, "tasks": cls.__to_task_ids(sequence._tasks), "subscribers": _utils._fcts_to_dict(sequence._subscribers), "version": sequence._version, } @staticmethod def __to_task_ids(tasks): return [t.id if isinstance(t, Task) else t for t in tasks]
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._sequence_manager import _SequenceManager class _SequenceManagerFactory(_ManagerFactory): @classmethod def _build_manager(cls) -> Type[_SequenceManager]: # type: ignore if cls._using_enterprise(): sequence_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".sequence._sequence_manager", "_SequenceManager" ) # type: ignore else: sequence_manager = _SequenceManager return sequence_manager # type: ignore
from ..exceptions.exceptions import InvalidExposedType class _AbstractTabularDataNode(object): """Abstract base class for tabular data node implementations (CSVDataNode, ParquetDataNode, ExcelDataNode, SQLTableDataNode and SQLDataNode) that are tabular representable.""" @staticmethod def _check_exposed_type(exposed_type, valid_string_exposed_types): if isinstance(exposed_type, str) and exposed_type not in valid_string_exposed_types: raise InvalidExposedType( f"Invalid string exposed type {exposed_type}. Supported values are " f"{', '.join(valid_string_exposed_types)}" )
from enum import Enum class Operator(Enum): """Enumeration of operators for Data Node filtering. The possible values are: - `EQUAL` - `NOT_EQUAL` - `LESS_THAN` - `LESS_OR_EQUAL` - `GREATER_THAN` - `GREATER_OR_EQUAL` """ EQUAL = 1 NOT_EQUAL = 2 LESS_THAN = 3 LESS_OR_EQUAL = 4 GREATER_THAN = 5 GREATER_OR_EQUAL = 6 class JoinOperator(Enum): """ Enumeration of join operators for Data Node filtering. The possible values are `AND` and `OR`. """ AND = 1 OR = 2
from .csv import CSVDataNode from .data_node import DataNode from .excel import ExcelDataNode from .generic import GenericDataNode from .in_memory import InMemoryDataNode from .json import JSONDataNode from .mongo import MongoCollectionDataNode from .operator import JoinOperator, Operator from .parquet import ParquetDataNode from .pickle import PickleDataNode from .sql import SQLDataNode from .sql_table import SQLTableDataNode
import pathlib class _AbstractFileDataNode(object): """Abstract base class for data node implementations (CSVDataNode, ParquetDataNode, ExcelDataNode, PickleDataNode and JSONDataNode) that are file based.""" __EXTENSION_MAP = {"csv": "csv", "excel": "xlsx", "parquet": "parquet", "pickle": "p", "json": "json"} def _build_path(self, storage_type): from taipy.config.config import Config folder = f"{storage_type}s" dir_path = pathlib.Path(Config.core.storage_folder) / folder if not dir_path.exists(): dir_path.mkdir(parents=True, exist_ok=True) return dir_path / f"{self.id}.{self.__EXTENSION_MAP.get(storage_type)}"
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._data_fs_repository import _DataFSRepository from ._data_manager import _DataManager from ._data_sql_repository import _DataSQLRepository class _DataManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _DataFSRepository, "sql": _DataSQLRepository} @classmethod def _build_manager(cls) -> Type[_DataManager]: # type: ignore if cls._using_enterprise(): data_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager_factory", "_DataManagerFactory" )._build_repository # type: ignore else: data_manager = _DataManager build_repository = cls._build_repository data_manager._repository = build_repository() # type: ignore return data_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._data_converter import _DataNodeConverter from ._data_model import _DataNodeModel class _DataSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_DataNodeModel, converter=_DataNodeConverter)
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, Enum, Float, String, Table, UniqueConstraint from taipy.config.common.scope import Scope from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .data_node_id import Edit @mapper_registry.mapped @dataclass class _DataNodeModel(_BaseModel): __table__ = Table( "data_node", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config_id", String), Column("scope", Enum(Scope)), Column("storage_type", String), Column("owner_id", String), Column("parent_ids", JSON), Column("last_edit_date", String), Column("edits", JSON), Column("version", String), Column("validity_days", Float), Column("validity_seconds", Float), Column("edit_in_progress", Boolean), Column("editor_id", String), Column("editor_expiration_date", String), Column("data_node_properties", JSON), ) __table_args__ = (UniqueConstraint("config_id", "owner_id", name="_config_owner_uc"),) id: str config_id: str scope: Scope storage_type: str owner_id: Optional[str] parent_ids: List[str] last_edit_date: Optional[str] edits: List[Edit] version: str validity_days: Optional[float] validity_seconds: Optional[float] edit_in_progress: bool editor_id: Optional[str] editor_expiration_date: Optional[str] data_node_properties: Dict[str, Any] @staticmethod def from_dict(data: Dict[str, Any]): return _DataNodeModel( id=data["id"], config_id=data["config_id"], scope=Scope._from_repr(data["scope"]), storage_type=data["storage_type"], owner_id=data.get("owner_id"), parent_ids=data.get("parent_ids", []), last_edit_date=data.get("last_edit_date"), edits=_BaseModel._deserialize_attribute(data["edits"]), version=data["version"], validity_days=data["validity_days"], validity_seconds=data["validity_seconds"], edit_in_progress=bool(data.get("edit_in_progress", False)), editor_id=data.get("editor_id", None), editor_expiration_date=data.get("editor_expiration_date"), data_node_properties=_BaseModel._deserialize_attribute(data["data_node_properties"]), ) def to_list(self): return [ self.id, self.config_id, repr(self.scope), self.storage_type, self.owner_id, _BaseModel._serialize_attribute(self.parent_ids), self.last_edit_date, _BaseModel._serialize_attribute(self.edits), self.version, self.validity_days, self.validity_seconds, self.edit_in_progress, self.editor_id, self.editor_expiration_date, _BaseModel._serialize_attribute(self.data_node_properties), ]
from datetime import datetime, timedelta from typing import Any, Dict, List, Optional, Set from taipy.config.common.scope import Scope from .._version._version_manager_factory import _VersionManagerFactory from .data_node import DataNode from .data_node_id import DataNodeId, Edit in_memory_storage: Dict[str, Any] = {} class InMemoryDataNode(DataNode): """Data Node stored in memory. Warning: This Data Node implementation is not compatible with a parallel execution of taipy tasks, but only with a task executor in development mode. The purpose of `InMemoryDataNode` is to be used for development or debugging. Attributes: config_id (str): Identifier of the data node configuration. It must be a valid Python identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. last_edit_date (datetime): The date and time of the last modification. edits (List[Edit^]): The ordered list of edits for that job. version (str): The string indicates the application version of the data node to instantiate. If not provided, the current version is used. validity_period (Optional[timedelta]): The duration implemented as a timedelta since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task management page](../core/entities/task-mgt.md) for more details). If _validity_period_ is set to `None`, the data node is always up-to-date. edit_in_progress (bool): True if a task computing the data node has been submitted and not completed yet. False otherwise. editor_id (Optional[str]): The identifier of the user who is currently editing the data node. editor_expiration_date (Optional[datetime]): The expiration date of the editor lock. properties (dict[str, Any]): A dictionary of additional properties. When creating an _In Memory_ data node, if the _properties_ dictionary contains a _"default_data"_ entry, the data node is automatically written with the corresponding _"default_data"_ value. """ __STORAGE_TYPE = "in_memory" __DEFAULT_DATA_VALUE = "default_data" _REQUIRED_PROPERTIES: List[str] = [] def __init__( self, config_id: str, scope: Scope, id: Optional[DataNodeId] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, edits: List[Edit] = None, version: str = None, validity_period: Optional[timedelta] = None, edit_in_progress: bool = False, editor_id: Optional[str] = None, editor_expiration_date: Optional[datetime] = None, properties=None, ): if properties is None: properties = {} default_value = properties.pop(self.__DEFAULT_DATA_VALUE, None) super().__init__( config_id, scope, id, owner_id, parent_ids, last_edit_date, edits, version or _VersionManagerFactory._build_manager()._get_latest_version(), validity_period, edit_in_progress, editor_id, editor_expiration_date, **properties ) if default_value is not None and self.id not in in_memory_storage: self._write(default_value) self._last_edit_date = datetime.now() self._edits.append( Edit( { "timestamp": self._last_edit_date, "writer_identifier": "TAIPY", "comments": "Default data written.", } ) ) self._TAIPY_PROPERTIES.update({self.__DEFAULT_DATA_VALUE}) @classmethod def storage_type(cls) -> str: return cls.__STORAGE_TYPE def _read(self): return in_memory_storage.get(self.id) def _write(self, data): in_memory_storage[self.id] = data
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._data_converter import _DataNodeConverter from ._data_model import _DataNodeModel class _DataFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_DataNodeModel, converter=_DataNodeConverter, dir_name="data_nodes")
from typing import Any, Dict, NewType DataNodeId = NewType("DataNodeId", str) DataNodeId.__doc__ = """Type that holds a `DataNode^` identifier.""" Edit = NewType("Edit", Dict[str, Any]) Edit.__doc__ = """Type that holds a `DataNode^` edit information."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._submission_converter import _SubmissionConverter from ._submission_model import _SubmissionModel class _SubmissionFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_SubmissionModel, converter=_SubmissionConverter, dir_name="submission")
from typing import NewType SubmissionId = NewType("SubmissionId", str) SubmissionId.__doc__ = """Type that holds a `Submission^` identifier."""
from ..common._repr_enum import _ReprEnum class SubmissionStatus(_ReprEnum): """Execution status of a `Submission^`. It is implemented as an enumeration. The possible values are: - `SUBMITTED`: A `SUBMITTED` submission has been submitted for execution but not processed yet by the orchestrator. - `UNDEFINED`: AN `UNDEFINED` submission's jobs have been submitted for execution but got some undefined status changes. - `PENDING`: A `PENDING` submission has been enqueued by the orchestrator. It is waiting for an executor to be available for its execution. - `BLOCKED`: A `BLOCKED` submission has been blocked because it has been finished with a job being blocked. - `RUNNING`: A `RUNNING` submission has its jobs currently being executed. - `CANCELED`: A `CANCELED` submission has been submitted but its execution has been canceled. - `FAILED`: A `FAILED` submission has a job failed during its execution. - `COMPLETED`: A `COMPLETED` submission has successfully been executed. """ SUBMITTED = 0 UNDEFINED = 1 BLOCKED = 2 PENDING = 3 RUNNING = 4 CANCELED = 5 FAILED = 6 COMPLETED = 7
from dataclasses import dataclass from typing import Any, Dict, List, Union from sqlalchemy import JSON, Column, Enum, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from ..job.job_id import JobId from .submission_status import SubmissionStatus @mapper_registry.mapped @dataclass class _SubmissionModel(_BaseModel): __table__ = Table( "submission", mapper_registry.metadata, Column("id", String, primary_key=True), Column("entity_id", String), Column("job_ids", JSON), Column("creation_date", String), Column("submission_status", Enum(SubmissionStatus)), Column("version", String), ) id: str entity_id: str job_ids: Union[List[JobId], List] creation_date: str submission_status: SubmissionStatus version: str @staticmethod def from_dict(data: Dict[str, Any]): return _SubmissionModel( id=data["id"], entity_id=data["entity_id"], job_ids=_BaseModel._deserialize_attribute(data["job_ids"]), creation_date=data["creation_date"], submission_status=SubmissionStatus._from_repr(data["submission_status"]), version=data["version"], ) def to_list(self): return [ self.id, self.entity_id, _BaseModel._serialize_attribute(self.job_ids), self.creation_date, repr(self.submission_status), self.version, ]
from datetime import datetime from .._repository._abstract_converter import _AbstractConverter from ..job.job import Job, JobId from ..submission._submission_model import _SubmissionModel from ..submission.submission import Submission from .submission import SubmissionId class _SubmissionConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, submission: Submission) -> _SubmissionModel: return _SubmissionModel( id=submission.id, entity_id=submission._entity_id, job_ids=[job.id if isinstance(job, Job) else JobId(str(job)) for job in list(submission._jobs)], creation_date=submission._creation_date.isoformat(), submission_status=submission._submission_status, version=submission._version, ) @classmethod def _model_to_entity(cls, model: _SubmissionModel) -> Submission: submission = Submission( entity_id=model.entity_id, id=SubmissionId(model.id), jobs=model.job_ids, creation_date=datetime.fromisoformat(model.creation_date), submission_status=model.submission_status, version=model.version, ) return submission
from typing import List, Optional, Union from .._manager._manager import _Manager from .._repository._abstract_repository import _AbstractRepository from .._version._version_mixin import _VersionMixin from ..notification import EventEntityType, EventOperation, Notifier, _make_event from ..scenario.scenario import Scenario from ..sequence.sequence import Sequence from ..submission.submission import Submission from ..task.task import Task class _SubmissionManager(_Manager[Submission], _VersionMixin): _ENTITY_NAME = Submission.__name__ _repository: _AbstractRepository _EVENT_ENTITY_TYPE = EventEntityType.SUBMISSION @classmethod def _get_all(cls, version_number: Optional[str] = None) -> List[Submission]: """ Returns all entities. """ filters = cls._build_filters_with_version(version_number) return cls._repository._load_all(filters) @classmethod def _create( cls, entity_id: str, ) -> Submission: submission = Submission(entity_id=entity_id) cls._set(submission) Notifier.publish(_make_event(submission, EventOperation.CREATION)) return submission @classmethod def _get_latest(cls, entity: Union[Scenario, Sequence, Task]) -> Optional[Submission]: entity_id = entity.id if not isinstance(entity, str) else entity submissions_of_task = list(filter(lambda submission: submission.entity_id == entity_id, cls._get_all())) if len(submissions_of_task) == 0: return None if len(submissions_of_task) == 1: return submissions_of_task[0] else: return max(submissions_of_task)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._submission_fs_repository import _SubmissionFSRepository from ._submission_manager import _SubmissionManager from ._submission_sql_repository import _SubmissionSQLRepository class _SubmissionManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _SubmissionFSRepository, "sql": _SubmissionSQLRepository} @classmethod def _build_manager(cls) -> Type[_SubmissionManager]: # type: ignore if cls._using_enterprise(): submission_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager", "_SubmissionManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager_factory", "_SubmissionManagerFactory", )._build_repository # type: ignore else: submission_manager = _SubmissionManager build_repository = cls._build_repository submission_manager._repository = build_repository() # type: ignore return submission_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._submission_converter import _SubmissionConverter from ._submission_model import _SubmissionModel class _SubmissionSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_SubmissionModel, converter=_SubmissionConverter)
""" A single-page Taipy application. Please refer to https://docs.taipy.io/en/latest/manuals/gui/ for more details. """ import webbrowser from taipy.gui import Markdown, notify import taipy as tp value = 0 logo = "images/taipy_logo.jpg" page = Markdown( """ <center> <|navbar|lov={[("page1", "Homepage"), ("https://docs.taipy.io/en/latest/manuals/about/", "Taipy Docs"), ("https://docs.taipy.io/en/latest/getting_started/", "Getting Started")]}|> </center> <| <center> <|{logo}|image|height=200px|width=200px|on_action=image_action|> </center> |> # Taipy Application <|{value}|slider|on_change=on_slider|> <|Push|button|on_action=on_push|> """ ) def image_action(state): webbrowser.open("https://taipy.io") def on_push(state): ... def on_slider(state): if state.value == 100: notify(state, "success", "Taipy is running!") def on_change(state, var_name: str, var_value): ... gui = tp.Gui(page=page) if __name__ == '__main__': # Execute by the _Python_ interpretor, for debug only. tp.run(gui, title="Taipy Application (development)") else: # Execute by _Gunicorn_, for production environment. app = tp.run(gui, title="Taipy Application", run_server=False)
from taipy import Gui # A dark mode is available in Taipy # However, we will use the light mode for the Getting Started Gui(page="# Getting started with *Taipy*").run(dark_mode=False)