text
stringlengths
0
5.92k
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import math from functools import reduce from typing import Any, Dict, List, Tuple import networkx as nx class _Node: def __init__(self, entity: Any, x, y): self.type = entity.__class__.__name__ self.entity = entity self.x = x self.y = y class _Edge: def __init__(self, src: _Node, dest: _Node): self.src = src self.dest = dest class _DAG: def __init__(self, dag: nx.DiGraph): self._sorted_nodes = list(nodes for nodes in nx.topological_generations(dag)) self._length, self._width = self.__compute_size() self._grid_length, self._grid_width = self.__compute_grid_size() self._nodes = self.__compute_nodes() self._edges = self.__compute_edges(dag) @property def width(self) -> int: return self._width @property def length(self) -> int: return self._length @property def nodes(self) -> Dict[str, _Node]: return self._nodes @property def edges(self) -> List[_Edge]: return self._edges def __compute_size(self) -> Tuple[int, int]: return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes]) def __compute_grid_size(self) -> Tuple[int, int]: if self._width == 1: grd_wdt = 1 else: grd_wdt = self.__lcm(*[len(i) + 1 if len(i) != self._width else len(i) - 1 for i in self._sorted_nodes]) + 1 return len(self._sorted_nodes), grd_wdt def __compute_nodes(self) -> Dict[str, _Node]: nodes = {} x = 0 for same_lvl_nodes in self._sorted_nodes: lcl_wdt = len(same_lvl_nodes) is_max = lcl_wdt != self.width if self.width != 1: y_incr = (self._grid_width - 1) / (lcl_wdt + 1) if is_max else (self._grid_width - 1) / (lcl_wdt - 1) else: y_incr = 1 y = 0 if is_max else -y_incr for node in same_lvl_nodes: y += y_incr nodes[node.id] = _Node(node, x, y) x += 1 return nodes def __compute_edges(self, dag) -> List[_Edge]: edges = [] for edge in dag.edges(): edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id])) return edges @staticmethod def __lcm(*integers) -> int: # Function math.lcm is only implemented for Python 3.9+ # For compatibility with Python 3.8 it has been re implemented. if 0 in integers: return 0 return reduce(lambda x, y: (x * y) // math.gcd(x, y), integers)
import sys from typing import List from taipy._cli._base_cli import _CLI from taipy.logger._taipy_logger import _TaipyLogger from ._migrate import ( _migrate_fs_entities, _migrate_mongo_entities, _migrate_sql_entities, _remove_backup_file_entities, _remove_backup_mongo_entities, _remove_backup_sql_entities, _restore_migrate_file_entities, _restore_migrate_mongo_entities, _restore_migrate_sql_entities, ) class _MigrateCLI: __logger = _TaipyLogger._get_logger() @classmethod def create_parser(cls): migrate_parser = _CLI._add_subparser( "migrate", help="Migrate entities created from old taipy versions to be compatible with the current taipy version. " " The entity migration should be performed only after updating taipy code to the current version.", ) migrate_parser.add_argument( "--repository-type", required=True, nargs="+", help="The type of repository to migrate. If filesystem or sql, a path to the database folder/.sqlite file " "should be informed. In case of mongo host, port, user and password must be informed, if left empty it " "is assumed default values", ) migrate_parser.add_argument( "--skip-backup", action="store_true", help="Skip the backup of entities before migration.", ) migrate_parser.add_argument( "--restore", action="store_true", help="Restore the migration of entities from backup folder.", ) migrate_parser.add_argument( "--remove-backup", action="store_true", help="Remove the backup of entities. Only use this option if the migration was successful.", ) @classmethod def parse_arguments(cls): args = _CLI._parse() if getattr(args, "which", None) != "migrate": return repository_type = args.repository_type[0] repository_args = args.repository_type[1:] if len(args.repository_type) > 1 else [None] if args.restore: cls.__handle_restore_backup(repository_type, repository_args) if args.remove_backup: cls.__handle_remove_backup(repository_type, repository_args) do_backup = False if args.skip_backup else True cls.__migrate_entities(repository_type, repository_args, do_backup) sys.exit(0) @classmethod def __handle_remove_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _remove_backup_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _remove_backup_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": if not _remove_backup_mongo_entities(): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __handle_restore_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _restore_migrate_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _restore_migrate_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] if not _restore_migrate_mongo_entities(*mongo_args): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __migrate_entities(cls, repository_type: str, repository_args: List, do_backup: bool): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _migrate_fs_entities(path, do_backup): sys.exit(1) elif repository_type == "sql": if not _migrate_sql_entities(repository_args[0], do_backup): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] _migrate_mongo_entities(*mongo_args, backup=do_backup) # type: ignore else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1)
import functools from ..notification import EventOperation, Notifier, _make_event class _Reloader: """The _Reloader singleton class""" _instance = None _no_reload_context = False def __new__(class_, *args, **kwargs): if not isinstance(class_._instance, class_): class_._instance = object.__new__(class_, *args, **kwargs) return class_._instance def _reload(self, manager: str, obj): if self._no_reload_context: return obj entity = _get_manager(manager)._get(obj, obj) if obj._is_in_context and hasattr(entity, "_properties"): if obj._properties._pending_changes: entity._properties._pending_changes = obj._properties._pending_changes if obj._properties._pending_deletions: entity._properties._pending_deletions = obj._properties._pending_deletions entity._properties._entity_owner = obj return entity def __enter__(self): self._no_reload_context = True return self def __exit__(self, exc_type, exc_value, exc_traceback): self._no_reload_context = False def _self_reload(manager): def __reload(fct): @functools.wraps(fct) def _do_reload(self, *args, **kwargs): self = _Reloader()._reload(manager, self) return fct(self, *args, **kwargs) return _do_reload return __reload def _self_setter(manager): def __set_entity(fct): @functools.wraps(fct) def _do_set_entity(self, *args, **kwargs): fct(self, *args, **kwargs) entity_manager = _get_manager(manager) if len(args) == 1: value = args[0] else: value = args event = _make_event( self, EventOperation.UPDATE, attribute_name=fct.__name__, attribute_value=value, ) if not self._is_in_context: entity = _Reloader()._reload(manager, self) fct(entity, *args, **kwargs) entity_manager._set(entity) Notifier.publish(event) else: self._in_context_attributes_changed_collector.append(event) return _do_set_entity return __set_entity @functools.lru_cache def _get_manager(manager: str): from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory return { "scenario": _ScenarioManagerFactory._build_manager(), "sequence": _SequenceManagerFactory._build_manager(), "data": _DataManagerFactory._build_manager(), "cycle": _CycleManagerFactory._build_manager(), "job": _JobManagerFactory._build_manager(), "task": _TaskManagerFactory._build_manager(), "submission": _SubmissionManagerFactory._build_manager(), }[manager]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import abc from typing import Optional class _Labeled: __LABEL_SEPARATOR = " > " @abc.abstractmethod def get_label(self) -> str: raise NotImplementedError def _get_label(self) -> str: """Returns the entity label made of the simple label prefixed by the owner label. Returns: The label of the entity as a string. """ return self._get_explicit_label() or self._generate_label() @abc.abstractmethod def get_simple_label(self) -> str: raise NotImplementedError def _get_simple_label(self) -> str: """Returns the simple label. Returns: The simple label of the entity as a string. """ return self._get_explicit_label() or self._generate_label(True) def _generate_label(self, simple=False) -> str: ls = [] if not simple: if owner_id := self._get_owner_id(): if getattr(self, "id") != owner_id: from ... import core as tp owner = tp.get(owner_id) ls.append(owner.get_label()) ls.append(self._generate_entity_label()) return self.__LABEL_SEPARATOR.join(ls) def _get_explicit_label(self) -> Optional[str]: if hasattr(self, "_properties"): return getattr(self, "_properties").get("label") return None def _get_owner_id(self) -> Optional[str]: if hasattr(self, "owner_id"): return getattr(self, "owner_id") return None def _get_name(self) -> Optional[str]: if hasattr(self, "name"): return getattr(self, "name") if hasattr(self, "_properties"): return getattr(self, "_properties").get("name") return None def _get_config_id(self) -> Optional[str]: if hasattr(self, "config_id"): return getattr(self, "config_id") return None def _generate_entity_label(self) -> str: if name := self._get_name(): return name if config_id := self._get_config_id(): return config_id return getattr(self, "id")
import os import shutil from functools import lru_cache from typing import Dict import bson import pymongo from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() OLD_COLLECTIONS = [ "cycle", "scenario", "pipeline", "task", "data_node", "job", "version", ] NEW_COLLECTIONS = [ "cycle", "scenario", "task", "data_node", "job", "version", ] DATABASE_NAME = "taipy" MONGO_BACKUP_FOLDER = ".mongo_backup" @lru_cache def _connect_mongodb(db_host: str, db_port: int, db_username: str, db_password: str) -> pymongo.MongoClient: auth_str = "" if db_username and db_password: auth_str = f"{db_username}:{db_password}@" connection_string = f"mongodb://{auth_str}{db_host}:{db_port}" return pymongo.MongoClient(connection_string) def __load_all_entities_from_mongo( hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) entities = {} for collection in OLD_COLLECTIONS: db = client[DATABASE_NAME] cursor = db[collection].find({}) for document in cursor: entities[document["id"]] = {"data": document} return entities def __write_entities_to_mongo( _entities: Dict, hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) for collection in NEW_COLLECTIONS: db = client[DATABASE_NAME] db[collection].insert_many( [entity["data"] for entity in _entities.values() if collection in entity["data"]["id"]] ) def _backup_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.exists(MONGO_BACKUP_FOLDER): os.makedirs(MONGO_BACKUP_FOLDER, exist_ok=True) for collection in OLD_COLLECTIONS: with open(os.path.join(MONGO_BACKUP_FOLDER, f"{collection}.bson"), "wb+") as f: for doc in db[collection].find(): f.write(bson.BSON.encode(doc)) __logger.info(f"Backed up entities to folder '{MONGO_BACKUP_FOLDER}' before migration.") return True def _restore_migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False for collection in os.listdir(MONGO_BACKUP_FOLDER): if collection.endswith(".bson"): with open(os.path.join(MONGO_BACKUP_FOLDER, collection), "rb+") as f: if bson_data := bson.decode_all(f.read()): # type: ignore db[collection.split(".")[0]].insert_many(bson_data) shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Restored entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _remove_backup_mongo_entities() -> bool: if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Removed backup entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", backup: bool = True, ) -> bool: """Migrate entities from mongodb to the current version. Args: hostname (str, optional): The hostname of the mongodb. Defaults to "localhost". port (int, optional): The port of the mongodb. Defaults to 27017. user (str, optional): The username of the mongodb. Defaults to "". password (str, optional): The password of the mongodb. Defaults to "". backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if backup: _backup_mongo_entities(hostname=hostname, port=port, user=user, password=password) __logger.info(f"Starting entity migration from MongoDB {hostname}:{port}") entities = __load_all_entities_from_mongo(hostname, port, user, password) entities, _ = _migrate(entities) __write_entities_to_mongo(entities, hostname, port, user, password) __logger.info("Migration finished") return True
from ._migrate_fs import _migrate_fs_entities, _remove_backup_file_entities, _restore_migrate_file_entities from ._migrate_mongo import _migrate_mongo_entities, _remove_backup_mongo_entities, _restore_migrate_mongo_entities from ._migrate_sql import _migrate_sql_entities, _remove_backup_sql_entities, _restore_migrate_sql_entities
import json import os import shutil from typing import Dict from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() def _load_all_entities_from_fs(root: str) -> Dict: # run through all files in the data folder and load them entities = {} for root, dirs, files in os.walk(root): for file in files: if file.endswith(".json"): with open(os.path.join(root, file)) as f: _id = file.split(".")[0] if "version" in root: _id = f"VERSION_{_id}" entities[_id] = { "data": json.load(f), "path": os.path.join(root, file), } return entities def __write_entities_to_fs(_entities: Dict, root: str): if not os.path.exists(root): os.makedirs(root, exist_ok=True) for _id, entity in _entities.items(): # Do not write pipeline entities if "PIPELINE" in _id: continue with open(entity["path"], "w") as f: json.dump(entity["data"], f, indent=0) # Remove pipelines folder pipelines_path = os.path.join(root, "pipelines") if os.path.exists(pipelines_path): shutil.rmtree(pipelines_path) def _restore_migrate_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False if os.path.exists(path): shutil.rmtree(path) else: __logger.warning(f"The original entities folder '{path}' does not exist.") os.rename(backup_path, path) __logger.info(f"Restored entities from the backup folder '{backup_path}' to '{path}'.") return True def _remove_backup_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False shutil.rmtree(backup_path) __logger.info(f"Removed backup entities from the backup folder '{backup_path}'.") return True def _migrate_fs_entities(path: str, backup: bool = True) -> bool: """Migrate entities from filesystem to the current version. Args: path (str): The path to the folder containing the entities. backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if not os.path.isdir(path): __logger.error(f"Folder '{path}' does not exist.") return False if backup: backup_path = f"{path}_backup" try: shutil.copytree(path, backup_path) except FileExistsError: __logger.warning(f"The backup folder '{backup_path}' already exists. Migration canceled.") return False else: __logger.info(f"Backed up entities from '{path}' to '{backup_path}' folder before migration.") __logger.info(f"Starting entity migration from '{path}' folder.") entities = _load_all_entities_from_fs(path) entities, _ = _migrate(entities) __write_entities_to_fs(entities, path) __logger.info("Migration finished") return True
from functools import lru_cache import pymongo @lru_cache def _connect_mongodb( db_host: str, db_port: int, db_username: str, db_password: str, db_extra_args: frozenset, db_driver: str ) -> pymongo.MongoClient: """Create a connection to a Mongo database. The `"mongodb_extra_args"` passed by the user is originally a dictionary, but since `@lru_cache` wrapper only accepts hashable parameters, the `"mongodb_extra_args"` should be converted into a frozenset beforehand. Parameters: db_host (str): the database host. db_port (int): the database port. db_username (str): the database username. db_password (str): the database password. db_extra_args (frozenset): A frozenset converted from a dictionary of additional arguments to be passed into database connection string. Returns: pymongo.MongoClient """ auth_str = "" if db_username and db_password: auth_str = f"{db_username}:{db_password}@" extra_args_str = "&".join(f"{k}={str(v)}" for k, v in db_extra_args) if extra_args_str: extra_args_str = "/?" + extra_args_str driver = "mongodb" if db_driver: driver = f"{driver}+{db_driver}" connection_string = f"{driver}://{auth_str}{db_host}" connection_string = connection_string if db_driver else f"{connection_string}:{db_port}" connection_string += extra_args_str return pymongo.MongoClient(connection_string)
from taipy.config.common._validate_id import _validate_id class MongoDefaultDocument: """The default class for \"custom_document\" property to configure a `MongoCollectionDataNode^`. Attributes: **kwargs: Attributes of the MongoDefaultDocument object. Example: - `document = MongoDefaultDocument(name="example", age=30})` will return a MongoDefaultDocument object so that `document.name` returns `"example"`, and `document.age` returns `30`. - `document = MongoDefaultDocument(date="12/24/2018", temperature=20})` will return a MongoDefaultDocument object so that `document.date` returns `"12/24/2018"`, and `document.temperature` returns `20`. """ def __init__(self, **kwargs): for attribute_name, value in kwargs.items(): setattr(self, _validate_id(attribute_name), value)
from .mongo_default_document import MongoDefaultDocument
from collections import UserList class _ListAttributes(UserList): def __init__(self, parent, *args, **kwargs): super().__init__(*args, **kwargs) self._parent = parent def __add_iterable(self, iterable): for i in iterable: super(_ListAttributes, self).append(i) def __set_self(self): from ... import core as tp if hasattr(self, "_parent"): tp.set(self._parent) def __add__(self, value): if hasattr(value, "__iter__"): self.__add_iterable(value) else: self.append(value) return self def extend(self, value) -> None: super(_ListAttributes, self).extend(value) self.__set_self() def append(self, value) -> None: super(_ListAttributes, self).append(value) self.__set_self() def remove(self, value): super(_ListAttributes, self).remove(value) self.__set_self() def clear(self) -> None: super(_ListAttributes, self).clear() self.__set_self()
import functools import warnings from typing import Optional warnings.simplefilter("once", ResourceWarning) def _warn_deprecated(deprecated: str, suggest: Optional[str] = None, stacklevel: int = 3) -> None: category = DeprecationWarning message = f"{deprecated} is deprecated." if suggest: message += f" Use {suggest} instead." warnings.warn(message=message, category=category, stacklevel=stacklevel) def _warn_no_core_service(stacklevel: int = 3): def inner(f): @functools.wraps(f) def _check_if_core_service_is_running(*args, **kwargs): from .._orchestrator._orchestrator_factory import _OrchestratorFactory if not _OrchestratorFactory._dispatcher: message = "The Core service is NOT running" warnings.warn(message=message, category=ResourceWarning, stacklevel=stacklevel) return f(*args, **kwargs) return _check_if_core_service_is_running return inner
import functools from enum import Enum class _ReprEnum(Enum): @classmethod @functools.lru_cache def _from_repr(cls, repr_: str): return next(filter(lambda e: repr(e) == repr_, cls)) # type: ignore
from typing import Iterable from taipy.logger._taipy_logger import _TaipyLogger from ..data import DataNode def _warn_if_inputs_not_ready(inputs: Iterable[DataNode]): from ..data import CSVDataNode, ExcelDataNode, JSONDataNode, ParquetDataNode, PickleDataNode from ..data._data_manager_factory import _DataManagerFactory logger = _TaipyLogger._get_logger() data_manager = _DataManagerFactory._build_manager() for dn in inputs: dn = data_manager._get(dn.id) if dn.is_ready_for_reading is False and not dn._last_edit_date: if dn.storage_type() in [ CSVDataNode.storage_type(), ExcelDataNode.storage_type(), JSONDataNode.storage_type(), PickleDataNode.storage_type(), ParquetDataNode.storage_type(), ]: logger.warning( f"{dn.id} cannot be read because it has never been written. " f"Hint: The data node may refer to a wrong path : {dn.path} " ) else: logger.warning(f"{dn.id} cannot be read because it has never been written.")
from typing import TypeVar, Union from .._repository._abstract_converter import _AbstractConverter from .._repository._base_taipy_model import _BaseModel ModelType = TypeVar("ModelType", bound=_BaseModel) Entity = TypeVar("Entity") Converter = TypeVar("Converter", bound=_AbstractConverter) Json = Union[dict, list, str, int, float, bool]
import functools import time from collections import namedtuple from importlib import import_module from operator import attrgetter from typing import Callable, Optional, Tuple from taipy.config import Config @functools.lru_cache def _load_fct(module_name: str, fct_name: str) -> Callable: module = import_module(module_name) return attrgetter(fct_name)(module) def _retry_read_entity(exceptions: Tuple, sleep_time: float = 0.2): """ Retries the wrapped function/method if the exceptions listed in ``exceptions`` are thrown. The number of retries is defined by Config.core.read_entity_retry. Parameters: exceptions (tuple): Tuple of exceptions that trigger a retry attempt. sleep_time (float): Time to sleep between retries. """ def decorator(func): def newfn(*args, **kwargs): for _ in range(Config.core.read_entity_retry): try: return func(*args, **kwargs) except exceptions: time.sleep(sleep_time) return func(*args, **kwargs) return newfn return decorator @functools.lru_cache def _get_fct_name(f) -> Optional[str]: # Mock function does not have __qualname__ attribute -> return __name__ # Partial or anonymous function does not have __name__ or __qualname__ attribute -> return None name = getattr(f, "__qualname__", getattr(f, "__name__", None)) return name def _fct_to_dict(obj): params = [] callback = obj if isinstance(obj, _Subscriber): callback = obj.callback params = obj.params fct_name = _get_fct_name(callback) if not fct_name: return None return { "fct_name": fct_name, "fct_params": params, "fct_module": callback.__module__, } def _fcts_to_dict(objs): return [d for obj in objs if (d := _fct_to_dict(obj)) is not None] _Subscriber = namedtuple("_Subscriber", "callback params")
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._scenario_fs_repository import _ScenarioFSRepository from ._scenario_manager import _ScenarioManager from ._scenario_sql_repository import _ScenarioSQLRepository class _ScenarioManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _ScenarioFSRepository, "sql": _ScenarioSQLRepository} @classmethod def _build_manager(cls) -> Type[_ScenarioManager]: # type: ignore if cls._using_enterprise(): scenario_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager", "_ScenarioManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager_factory", "_ScenarioManagerFactory" )._build_repository # type: ignore else: scenario_manager = _ScenarioManager build_repository = cls._build_repository scenario_manager._repository = build_repository() # type: ignore return scenario_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from ..cycle.cycle_id import CycleId from ..data.data_node_id import DataNodeId from ..task.task_id import TaskId from .scenario_id import ScenarioId @mapper_registry.mapped @dataclass class _ScenarioModel(_BaseModel): __table__ = Table( "scenario", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config_id", String), Column("tasks", JSON), Column("additional_data_nodes", JSON), Column("properties", JSON), Column("creation_date", String), Column("primary_scenario", Boolean), Column("subscribers", JSON), Column("tags", JSON), Column("version", String), Column("sequences", JSON), Column("cycle", String), ) id: ScenarioId config_id: str tasks: List[TaskId] additional_data_nodes: List[DataNodeId] properties: Dict[str, Any] creation_date: str primary_scenario: bool subscribers: List[Dict] tags: List[str] version: str sequences: Optional[Dict[str, Dict]] = None cycle: Optional[CycleId] = None @staticmethod def from_dict(data: Dict[str, Any]): return _ScenarioModel( id=data["id"], config_id=data["config_id"], tasks=_BaseModel._deserialize_attribute(data["tasks"]), additional_data_nodes=_BaseModel._deserialize_attribute(data["additional_data_nodes"]), properties=_BaseModel._deserialize_attribute(data["properties"]), creation_date=data["creation_date"], primary_scenario=data["primary_scenario"], subscribers=_BaseModel._deserialize_attribute(data["subscribers"]), tags=_BaseModel._deserialize_attribute(data["tags"]), version=data["version"], sequences=_BaseModel._deserialize_attribute(data["sequences"]), cycle=CycleId(data["cycle"]) if "cycle" in data else None, ) def to_list(self): return [ self.id, self.config_id, _BaseModel._serialize_attribute(self.tasks), _BaseModel._serialize_attribute(self.additional_data_nodes), _BaseModel._serialize_attribute(self.properties), self.creation_date, self.primary_scenario, _BaseModel._serialize_attribute(self.subscribers), _BaseModel._serialize_attribute(self.tags), self.version, _BaseModel._serialize_attribute(self.sequences), self.cycle, ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._scenario_converter import _ScenarioConverter from ._scenario_model import _ScenarioModel class _ScenarioFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_ScenarioModel, converter=_ScenarioConverter, dir_name="scenarios")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._scenario_converter import _ScenarioConverter from ._scenario_model import _ScenarioModel class _ScenarioSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_ScenarioModel, converter=_ScenarioConverter)
from datetime import datetime from typing import Dict, List, Optional, Set, Union from .._repository._abstract_converter import _AbstractConverter from .._version._utils import _migrate_entity from ..common import _utils from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..cycle.cycle import Cycle, CycleId from ..data.data_node import DataNode, DataNodeId from ..scenario._scenario_model import _ScenarioModel from ..scenario.scenario import Scenario from ..task.task import Task, TaskId class _ScenarioConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, scenario: Scenario) -> _ScenarioModel: sequences: Dict[str, Dict[str, Union[List[TaskId], Dict, List]]] = {} for p_name, sequence_data in scenario._sequences.items(): sequences[p_name] = { Scenario._SEQUENCE_TASKS_KEY: [ t.id if isinstance(t, Task) else t for t in sequence_data.get("tasks", []) ], Scenario._SEQUENCE_PROPERTIES_KEY: sequence_data.get("properties", {}), Scenario._SEQUENCE_SUBSCRIBERS_KEY: _utils._fcts_to_dict(sequence_data.get("subscribers", [])), } return _ScenarioModel( id=scenario.id, config_id=scenario.config_id, tasks=[task.id if isinstance(task, Task) else TaskId(str(task)) for task in list(scenario._tasks)], additional_data_nodes=[ dn.id if isinstance(dn, DataNode) else DataNodeId(str(dn)) for dn in list(scenario._additional_data_nodes) ], properties=scenario._properties.data, creation_date=scenario._creation_date.isoformat(), primary_scenario=scenario._primary_scenario, subscribers=_utils._fcts_to_dict(scenario._subscribers), tags=list(scenario._tags), version=scenario._version, cycle=scenario._cycle.id if scenario._cycle else None, sequences=sequences if sequences else None, ) @classmethod def _model_to_entity(cls, model: _ScenarioModel) -> Scenario: tasks: Union[Set[TaskId], Set[Task], Set] = set() if model.tasks: tasks = set(model.tasks) if model.sequences: for sequence_name, sequence_data in model.sequences.items(): if subscribers := sequence_data.get(Scenario._SEQUENCE_SUBSCRIBERS_KEY): model.sequences[sequence_name][Scenario._SEQUENCE_SUBSCRIBERS_KEY] = [ _utils._Subscriber(_utils._load_fct(it["fct_module"], it["fct_name"]), it["fct_params"]) for it in subscribers ] scenario = Scenario( scenario_id=model.id, config_id=model.config_id, tasks=tasks, additional_data_nodes=set(model.additional_data_nodes), properties=model.properties, creation_date=datetime.fromisoformat(model.creation_date), is_primary=model.primary_scenario, tags=set(model.tags), cycle=cls.__to_cycle(model.cycle), subscribers=[ _utils._Subscriber(_utils._load_fct(it["fct_module"], it["fct_name"]), it["fct_params"]) for it in model.subscribers ], version=model.version, sequences=model.sequences, ) return _migrate_entity(scenario) @staticmethod def __to_cycle(cycle_id: Optional[CycleId] = None) -> Optional[Cycle]: return _CycleManagerFactory._build_manager()._get(cycle_id) if cycle_id else None
from typing import NewType ScenarioId = NewType("ScenarioId", str) ScenarioId.__doc__ = """Type that holds a `Scenario^` identifier."""
from abc import abstractmethod from typing import Callable, Iterable, List, Optional, Union from ..job.job import Job from ..task.task import Task class _AbstractOrchestrator: """Creates, enqueues, and orchestrates jobs as instances of `Job^` class.""" @classmethod @abstractmethod def initialize(cls): raise NotImplementedError @classmethod @abstractmethod def submit( cls, sequence, callbacks: Optional[Iterable[Callable]], force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ) -> List[Job]: raise NotImplementedError @classmethod @abstractmethod def submit_task( cls, task: Task, callbacks: Optional[Iterable[Callable]] = None, force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ) -> Job: raise NotImplementedError @classmethod @abstractmethod def cancel_job(cls, job): raise NotImplementedError
from importlib import util from typing import Optional, Type from taipy.config.config import Config from ..common._utils import _load_fct from ..exceptions.exceptions import ModeNotAvailable, OrchestratorNotBuilt from ._abstract_orchestrator import _AbstractOrchestrator from ._dispatcher import _DevelopmentJobDispatcher, _JobDispatcher, _StandaloneJobDispatcher from ._orchestrator import _Orchestrator class _OrchestratorFactory: _TAIPY_ENTERPRISE_MODULE = "taipy.enterprise" _TAIPY_ENTERPRISE_CORE_ORCHESTRATOR_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core._orchestrator._orchestrator" _TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core._orchestrator._dispatcher" __TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD = "_build_dispatcher" _orchestrator: Optional[_Orchestrator] = None _dispatcher: Optional[_JobDispatcher] = None @classmethod def _build_orchestrator(cls) -> Type[_AbstractOrchestrator]: if util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None: cls._orchestrator = _load_fct( cls._TAIPY_ENTERPRISE_CORE_ORCHESTRATOR_MODULE, "Orchestrator", ) # type: ignore else: cls._orchestrator = _Orchestrator # type: ignore cls._orchestrator.initialize() # type: ignore return cls._orchestrator # type: ignore @classmethod def _build_dispatcher(cls, force_restart=False) -> Optional[_JobDispatcher]: if not cls._orchestrator: raise OrchestratorNotBuilt if Config.job_config.is_standalone: cls.__build_standalone_job_dispatcher(force_restart=force_restart) elif Config.job_config.is_development: cls.__build_development_job_dispatcher() elif util.find_spec(cls._TAIPY_ENTERPRISE_MODULE): cls.__build_enterprise_job_dispatcher(force_restart=force_restart) else: raise ModeNotAvailable(f"Job mode {Config.job_config.mode} is not available.") return cls._dispatcher @classmethod def _remove_dispatcher(cls) -> Optional[_JobDispatcher]: if cls._dispatcher is not None and not isinstance(cls._dispatcher, _DevelopmentJobDispatcher): cls._dispatcher.stop() cls._dispatcher = None return cls._dispatcher @classmethod def __build_standalone_job_dispatcher(cls, force_restart=False): if isinstance(cls._dispatcher, _StandaloneJobDispatcher): if force_restart: cls._dispatcher.stop() else: return if util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None: cls._dispatcher = _load_fct( cls._TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE, cls.__TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD )(cls._orchestrator) else: cls._dispatcher = _StandaloneJobDispatcher(cls._orchestrator) # type: ignore cls._dispatcher.start() # type: ignore @classmethod def __build_development_job_dispatcher(cls): if isinstance(cls._dispatcher, _StandaloneJobDispatcher): cls._dispatcher.stop() cls._dispatcher = _DevelopmentJobDispatcher(cls._orchestrator) # type: ignore @classmethod def __build_enterprise_job_dispatcher(cls, force_restart=False): cls._dispatcher = _load_fct( cls._TAIPY_ENTERPRISE_CORE_DISPATCHER_MODULE, cls.__TAIPY_ENTERPRISE_BUILD_DISPATCHER_METHOD )(cls._orchestrator, force_restart) if cls._dispatcher: cls._dispatcher.start() else: raise ModeNotAvailable(f"Job mode {Config.job_config.mode} is not available.")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from ._development_job_dispatcher import _DevelopmentJobDispatcher from ._job_dispatcher import _JobDispatcher from ._standalone_job_dispatcher import _StandaloneJobDispatcher
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from typing import Optional from ...job.job import Job from .._abstract_orchestrator import _AbstractOrchestrator from ._job_dispatcher import _JobDispatcher from ._task_function_wrapper import _TaskFunctionWrapper class _DevelopmentJobDispatcher(_JobDispatcher): """Manages job dispatching (instances of `Job^` class) in a synchronous way.""" def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): super().__init__(orchestrator) def start(self): raise NotImplementedError def is_running(self) -> bool: return True def stop(self): raise NotImplementedError def run(self): raise NotImplementedError def _dispatch(self, job: Job): """Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ rs = _TaskFunctionWrapper(job.id, job.task).execute() self._update_job_status(job, rs)
from concurrent.futures import ProcessPoolExecutor from functools import partial from typing import Optional from taipy.config._serializer._toml_serializer import _TomlSerializer from taipy.config.config import Config from ...job.job import Job from .._abstract_orchestrator import _AbstractOrchestrator from ._job_dispatcher import _JobDispatcher from ._task_function_wrapper import _TaskFunctionWrapper class _StandaloneJobDispatcher(_JobDispatcher): """Manages job dispatching (instances of `Job^` class) in an asynchronous way using a ProcessPoolExecutor.""" def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): super().__init__(orchestrator) self._executor = ProcessPoolExecutor(Config.job_config.max_nb_of_workers or 1) # type: ignore self._nb_available_workers = self._executor._max_workers # type: ignore def _dispatch(self, job: Job): """Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ self._nb_available_workers -= 1 config_as_string = _TomlSerializer()._serialize(Config._applied_config) future = self._executor.submit(_TaskFunctionWrapper(job.id, job.task), config_as_string=config_as_string) self._set_dispatched_processes(job.id, future) # type: ignore future.add_done_callback(self._release_worker) future.add_done_callback(partial(self._update_job_status_from_future, job)) def _release_worker(self, _): self._nb_available_workers += 1 def _update_job_status_from_future(self, job: Job, ft): self._pop_dispatched_process(job.id) # type: ignore self._update_job_status(job, ft.result())
import threading from abc import abstractmethod from typing import Dict, Optional from taipy.config.config import Config from taipy.logger._taipy_logger import _TaipyLogger from ...data._data_manager_factory import _DataManagerFactory from ...job._job_manager_factory import _JobManagerFactory from ...job.job import Job from ...task.task import Task from .._abstract_orchestrator import _AbstractOrchestrator class _JobDispatcher(threading.Thread): """Manages job dispatching (instances of `Job^` class) on executors.""" _STOP_FLAG = False _dispatched_processes: Dict = {} __logger = _TaipyLogger._get_logger() _nb_available_workers: int = 1 def __init__(self, orchestrator: Optional[_AbstractOrchestrator]): threading.Thread.__init__(self, name="Thread-Taipy-JobDispatcher") self.daemon = True self.orchestrator = orchestrator self.lock = self.orchestrator.lock # type: ignore Config.block_update() def start(self): """Start the dispatcher""" threading.Thread.start(self) def is_running(self) -> bool: """Return True if the dispatcher is running""" return self.is_alive() def stop(self): """Stop the dispatcher""" self._STOP_FLAG = True def run(self): _TaipyLogger._get_logger().info("Start job dispatcher...") while not self._STOP_FLAG: try: if self._can_execute(): with self.lock: job = self.orchestrator.jobs_to_run.get(block=True, timeout=0.1) self._execute_job(job) except Exception: # In case the last job of the queue has been removed. pass def _can_execute(self) -> bool: """Returns True if the dispatcher have resources to execute a new job.""" return self._nb_available_workers > 0 def _execute_job(self, job: Job): if job.force or self._needs_to_run(job.task): if job.force: self.__logger.info(f"job {job.id} is forced to be executed.") job.running() self._dispatch(job) else: job._unlock_edit_on_outputs() job.skipped() self.__logger.info(f"job {job.id} is skipped.") def _execute_jobs_synchronously(self): while not self.orchestrator.jobs_to_run.empty(): with self.lock: try: job = self.orchestrator.jobs_to_run.get() except Exception: # In case the last job of the queue has been removed. self.__logger.warning(f"{job.id} is no longer in the list of jobs to run.") self._execute_job(job) @staticmethod def _needs_to_run(task: Task) -> bool: """ Returns True if the task has no output or if at least one input was modified since the latest run. Parameters: task (Task^): The task to run. Returns: True if the task needs to run. False otherwise. """ if not task.skippable: return True data_manager = _DataManagerFactory._build_manager() if len(task.output) == 0: return True are_outputs_in_cache = all(data_manager._get(dn.id).is_valid for dn in task.output.values()) if not are_outputs_in_cache: return True if len(task.input) == 0: return False input_last_edit = max(data_manager._get(dn.id).last_edit_date for dn in task.input.values()) output_last_edit = min(data_manager._get(dn.id).last_edit_date for dn in task.output.values()) return input_last_edit > output_last_edit @abstractmethod def _dispatch(self, job: Job): """ Dispatches the given `Job^` on an available worker for execution. Parameters: job (Job^): The job to submit on an executor with an available worker. """ raise NotImplementedError @staticmethod def _update_job_status(job: Job, exceptions): job.update_status(exceptions) _JobManagerFactory._build_manager()._set(job) @classmethod def _set_dispatched_processes(cls, job_id, process): cls._dispatched_processes[job_id] = process @classmethod def _pop_dispatched_process(cls, job_id, default=None): return cls._dispatched_processes.pop(job_id, default) # type: ignore
from typing import Any, List from taipy.config._serializer._toml_serializer import _TomlSerializer from taipy.config.config import Config from taipy.logger._taipy_logger import _TaipyLogger from ...data._data_manager_factory import _DataManagerFactory from ...data.data_node import DataNode from ...exceptions import DataNodeWritingError from ...job.job_id import JobId from ...task.task import Task logger = _TaipyLogger._get_logger() class _TaskFunctionWrapper: """Wrapper around task function.""" def __init__(self, job_id: JobId, task: Task): self.job_id = job_id self.task = task def __call__(self, **kwargs): """Make this object callable as a function. Actually calls `execute`.""" return self.execute(**kwargs) def execute(self, **kwargs): """Execute the wrapped function. If `config_as_string` is given, then it will be reapplied to the config.""" try: config_as_string = kwargs.pop("config_as_string", None) if config_as_string: logger.info("Updating with given config.") Config._applied_config._update(_TomlSerializer()._deserialize(config_as_string)) Config.block_update() inputs = list(self.task.input.values()) outputs = list(self.task.output.values()) arguments = self._read_inputs(inputs) results = self._execute_fct(arguments) return self._write_data(outputs, results, self.job_id) except Exception as e: logger.error("Error during task function execution!", exc_info=1) return [e] def _read_inputs(self, inputs: List[DataNode]) -> List[Any]: data_manager = _DataManagerFactory._build_manager() return [data_manager._get(dn.id).read_or_raise() for dn in inputs] def _write_data(self, outputs: List[DataNode], results, job_id: JobId): data_manager = _DataManagerFactory._build_manager() try: if outputs: _results = self._extract_results(outputs, results) exceptions = [] for res, dn in zip(_results, outputs): try: data_node = data_manager._get(dn.id) data_node.write(res, job_id=job_id) data_manager._set(data_node) except Exception as e: logger.error("Error during write", exc_info=1) exceptions.append(DataNodeWritingError(f"Error writing in datanode id {dn.id}: {e}")) return exceptions except Exception as e: return [e] def _execute_fct(self, arguments: List[Any]) -> Any: return self.task.function(*arguments) def _extract_results(self, outputs: List[DataNode], results: Any) -> List[Any]: _results: List[Any] = [results] if len(outputs) == 1 else results if len(_results) != len(outputs): raise DataNodeWritingError("Error: wrong number of result or task output") return _results
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._task_fs_repository import _TaskFSRepository from ._task_manager import _TaskManager from ._task_sql_repository import _TaskSQLRepository class _TaskManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _TaskFSRepository, "sql": _TaskSQLRepository} @classmethod def _build_manager(cls) -> Type[_TaskManager]: # type: ignore if cls._using_enterprise(): task_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager_factory", "_TaskManagerFactory" )._build_repository # type: ignore else: task_manager = _TaskManager build_repository = cls._build_repository task_manager._repository = build_repository() # type: ignore return task_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from .._repository._abstract_converter import _AbstractConverter from .._version._utils import _migrate_entity from ..common._utils import _load_fct from ..data._data_manager_factory import _DataManagerFactory from ..exceptions import NonExistingDataNode from ..task._task_model import _TaskModel from ..task.task import Task from .task import TaskId class _TaskConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, task: Task) -> _TaskModel: return _TaskModel( id=task.id, owner_id=task.owner_id, parent_ids=list(task._parent_ids), config_id=task.config_id, input_ids=cls.__to_ids(task.input.values()), function_name=task._function.__name__, function_module=task._function.__module__, output_ids=cls.__to_ids(task.output.values()), version=task._version, skippable=task._skippable, properties=task._properties.data.copy(), ) @classmethod def _model_to_entity(cls, model: _TaskModel) -> Task: task = Task( id=TaskId(model.id), owner_id=model.owner_id, parent_ids=set(model.parent_ids), config_id=model.config_id, function=_load_fct(model.function_module, model.function_name), input=cls.__to_data_nodes(model.input_ids), output=cls.__to_data_nodes(model.output_ids), version=model.version, skippable=model.skippable, properties=model.properties, ) return _migrate_entity(task) @staticmethod def __to_ids(data_nodes): return [i.id for i in data_nodes] @staticmethod def __to_data_nodes(data_nodes_ids): data_nodes = [] data_manager = _DataManagerFactory._build_manager() for _id in data_nodes_ids: if data_node := data_manager._get(_id): data_nodes.append(data_node) else: raise NonExistingDataNode(_id) return data_nodes
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._task_converter import _TaskConverter from ._task_model import _TaskModel class _TaskSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_TaskModel, converter=_TaskConverter)
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry @mapper_registry.mapped @dataclass class _TaskModel(_BaseModel): __table__ = Table( "task", mapper_registry.metadata, Column("id", String, primary_key=True), Column("owner_id", String), Column("parent_ids", JSON), Column("config_id", String), Column("input_ids", JSON), Column("function_name", String), Column("function_module", String), Column("output_ids", JSON), Column("version", String), Column("skippable", Boolean), Column("properties", JSON), ) id: str owner_id: Optional[str] parent_ids: List[str] config_id: str input_ids: List[str] function_name: str function_module: str output_ids: List[str] version: str skippable: bool properties: Dict[str, Any] @staticmethod def from_dict(data: Dict[str, Any]): return _TaskModel( id=data["id"], owner_id=data.get("owner_id"), parent_ids=_BaseModel._deserialize_attribute(data.get("parent_ids", [])), config_id=data["config_id"], input_ids=_BaseModel._deserialize_attribute(data["input_ids"]), function_name=data["function_name"], function_module=data["function_module"], output_ids=_BaseModel._deserialize_attribute(data["output_ids"]), version=data["version"], skippable=data["skippable"], properties=_BaseModel._deserialize_attribute(data["properties"] if "properties" in data.keys() else {}), ) def to_list(self): return [ self.id, self.owner_id, _BaseModel._serialize_attribute(self.parent_ids), self.config_id, _BaseModel._serialize_attribute(self.input_ids), self.function_name, self.function_module, _BaseModel._serialize_attribute(self.output_ids), self.version, self.skippable, _BaseModel._serialize_attribute(self.properties), ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._task_converter import _TaskConverter from ._task_model import _TaskModel class _TaskFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_TaskModel, converter=_TaskConverter, dir_name="tasks")
from typing import NewType TaskId = NewType("TaskId", str) TaskId.__doc__ = """Type that holds a `Task^` identifier."""
from dataclasses import dataclass from typing import Any, Dict, List from sqlalchemy import JSON, Boolean, Column, Enum, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .job_id import JobId from .status import Status @mapper_registry.mapped @dataclass class _JobModel(_BaseModel): __table__ = Table( "job", mapper_registry.metadata, Column("id", String, primary_key=True), Column("task_id", String), Column("status", Enum(Status)), Column("force", Boolean), Column("submit_id", String), Column("submit_entity_id", String), Column("creation_date", String), Column("subscribers", JSON), Column("stacktrace", JSON), Column("version", String), ) id: JobId task_id: str status: Status force: bool submit_id: str submit_entity_id: str creation_date: str subscribers: List[Dict] stacktrace: List[str] version: str @staticmethod def from_dict(data: Dict[str, Any]): return _JobModel( id=data["id"], task_id=data["task_id"], status=Status._from_repr(data["status"]), force=data["force"], submit_id=data["submit_id"], submit_entity_id=data["submit_entity_id"], creation_date=data["creation_date"], subscribers=_BaseModel._deserialize_attribute(data["subscribers"]), stacktrace=_BaseModel._deserialize_attribute(data["stacktrace"]), version=data["version"], ) def to_list(self): return [ self.id, self.task_id, repr(self.status), self.force, self.submit_id, self.submit_entity_id, self.creation_date, _BaseModel._serialize_attribute(self.subscribers), _BaseModel._serialize_attribute(self.stacktrace), self.version, ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._job_converter import _JobConverter from ._job_model import _JobModel class _JobFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_JobModel, converter=_JobConverter, dir_name="jobs")
import uuid from typing import Callable, Iterable, List, Optional, Union from .._manager._manager import _Manager from .._repository._abstract_repository import _AbstractRepository from .._version._version_manager_factory import _VersionManagerFactory from .._version._version_mixin import _VersionMixin from ..exceptions.exceptions import JobNotDeletedException from ..notification import EventEntityType, EventOperation, Notifier, _make_event from ..task.task import Task from .job import Job from .job_id import JobId class _JobManager(_Manager[Job], _VersionMixin): _ENTITY_NAME = Job.__name__ _ID_PREFIX = "JOB_" _repository: _AbstractRepository _EVENT_ENTITY_TYPE = EventEntityType.JOB @classmethod def _get_all(cls, version_number: Optional[str] = None) -> List[Job]: """ Returns all entities. """ filters = cls._build_filters_with_version(version_number) return cls._repository._load_all(filters) @classmethod def _create( cls, task: Task, callbacks: Iterable[Callable], submit_id: str, submit_entity_id: str, force=False ) -> Job: version = _VersionManagerFactory._build_manager()._get_latest_version() job = Job( id=JobId(f"{Job._ID_PREFIX}_{task.config_id}_{uuid.uuid4()}"), task=task, submit_id=submit_id, submit_entity_id=submit_entity_id, force=force, version=version, ) cls._set(job) Notifier.publish(_make_event(job, EventOperation.CREATION)) job._on_status_change(*callbacks) return job @classmethod def _delete(cls, job: Job, force=False): if job.is_finished() or force: super()._delete(job.id) from .._orchestrator._dispatcher._job_dispatcher import _JobDispatcher _JobDispatcher._pop_dispatched_process(job.id) else: err = JobNotDeletedException(job.id) cls._logger.warning(err) raise err @classmethod def _cancel(cls, job: Union[str, Job]): job = cls._get(job) if isinstance(job, str) else job from .._orchestrator._orchestrator_factory import _OrchestratorFactory _OrchestratorFactory._build_orchestrator().cancel_job(job) @classmethod def _get_latest(cls, task: Task) -> Optional[Job]: jobs_of_task = list(filter(lambda job: task in job, cls._get_all())) if len(jobs_of_task) == 0: return None if len(jobs_of_task) == 1: return jobs_of_task[0] else: return max(jobs_of_task) @classmethod def _is_deletable(cls, job: Union[Job, JobId]) -> bool: if isinstance(job, str): job = cls._get(job) if job.is_finished(): return True return False
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import NewType JobId = NewType("JobId", str) JobId.__doc__ = """Type that holds a `Job^` identifier."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._job_converter import _JobConverter from ._job_model import _JobModel class _JobSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_JobModel, converter=_JobConverter)
from datetime import datetime from typing import List from .._repository._abstract_converter import _AbstractConverter from ..common._utils import _fcts_to_dict, _load_fct from ..exceptions import InvalidSubscriber from ..job._job_model import _JobModel from ..job.job import Job from ..task._task_manager_factory import _TaskManagerFactory class _JobConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, job: Job) -> _JobModel: return _JobModel( job.id, job._task.id, job._status, job._force, job.submit_id, job.submit_entity_id, job._creation_date.isoformat(), cls.__serialize_subscribers(job._subscribers), job._stacktrace, version=job._version, ) @classmethod def _model_to_entity(cls, model: _JobModel) -> Job: task_manager = _TaskManagerFactory._build_manager() task_repository = task_manager._repository job = Job( id=model.id, task=task_repository._load(model.task_id), submit_id=model.submit_id, submit_entity_id=model.submit_entity_id, version=model.version, ) job._status = model.status # type: ignore job._force = model.force # type: ignore job._creation_date = datetime.fromisoformat(model.creation_date) # type: ignore for it in model.subscribers: try: fct_module, fct_name = it.get("fct_module"), it.get("fct_name") job._subscribers.append(_load_fct(fct_module, fct_name)) # type: ignore except AttributeError: raise InvalidSubscriber(f"The subscriber function {it.get('fct_name')} cannot be loaded.") job._stacktrace = model.stacktrace return job @staticmethod def __serialize_subscribers(subscribers: List) -> List: return _fcts_to_dict(subscribers)
from ..common._repr_enum import _ReprEnum class Status(_ReprEnum): """Execution status of a `Job^`. It is implemented as an enumeration. The possible values are: - `SUBMITTED`: A `SUBMITTED` job has been submitted for execution but not processed yet by the orchestrator. - `PENDING`: A `PENDING` job has been enqueued by the orchestrator. It is waiting for an executor to be available for its execution. - `BLOCKED`: A `BLOCKED` job has been blocked because its input data nodes are not ready yet. It is waiting for the completion of another `Job^` - `RUNNING`: A `RUNNING` job is currently executed by a dedicated executor. - `CANCELED`: A `CANCELED` job has been submitted but its execution has been canceled. - `FAILED`: A `FAILED` job raised an exception during its execution. - `COMPLETED`: A `COMPLETED` job has successfully been executed. - `SKIPPED`: A `SKIPPED` job has not been executed because its outputs were already computed. - `ABANDONED`: An `ABANDONED` job has not been executed because it depends on a job that could not complete ( cancelled, failed, or abandoned). """ SUBMITTED = 1 BLOCKED = 2 PENDING = 3 RUNNING = 4 CANCELED = 5 FAILED = 6 COMPLETED = 7 SKIPPED = 8 ABANDONED = 9
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._job_fs_repository import _JobFSRepository from ._job_manager import _JobManager from ._job_sql_repository import _JobSQLRepository class _JobManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _JobFSRepository, "sql": _JobSQLRepository} @classmethod def _build_manager(cls) -> Type[_JobManager]: # type: ignore if cls._using_enterprise(): job_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager_factory", "_JobManagerFactory" )._build_repository # type: ignore else: job_manager = _JobManager build_repository = cls._build_repository job_manager._repository = build_repository() # type: ignore return job_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from typing import NewType SequenceId = NewType("SequenceId", str) SequenceId.__doc__ = """Type that holds a `Sequence^` identifier."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Dict from .._repository._abstract_converter import _AbstractConverter from ..common import _utils from ..task.task import Task from .sequence import Sequence class _SequenceConverter(_AbstractConverter): _SEQUENCE_MODEL_ID_KEY = "id" _SEQUENCE_MODEL_OWNER_ID_KEY = "owner_id" _SEQUENCE_MODEL_PARENT_IDS_KEY = "parent_ids" _SEQUENCE_MODEL_PROPERTIES_KEY = "properties" _SEQUENCE_MODEL_TASKS_KEY = "tasks" _SEQUENCE_MODEL_SUBSCRIBERS_KEY = "subscribers" _SEQUENCE_MODEL_VERSION_KEY = "version" @classmethod def _entity_to_model(cls, sequence: Sequence) -> Dict: return { "id": sequence.id, "owner_id": sequence.owner_id, "parent_ids": list(sequence._parent_ids), "properties": sequence._properties.data, "tasks": cls.__to_task_ids(sequence._tasks), "subscribers": _utils._fcts_to_dict(sequence._subscribers), "version": sequence._version, } @staticmethod def __to_task_ids(tasks): return [t.id if isinstance(t, Task) else t for t in tasks]
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._sequence_manager import _SequenceManager class _SequenceManagerFactory(_ManagerFactory): @classmethod def _build_manager(cls) -> Type[_SequenceManager]: # type: ignore if cls._using_enterprise(): sequence_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".sequence._sequence_manager", "_SequenceManager" ) # type: ignore else: sequence_manager = _SequenceManager return sequence_manager # type: ignore
from ..exceptions.exceptions import InvalidExposedType class _AbstractTabularDataNode(object): """Abstract base class for tabular data node implementations (CSVDataNode, ParquetDataNode, ExcelDataNode, SQLTableDataNode and SQLDataNode) that are tabular representable.""" @staticmethod def _check_exposed_type(exposed_type, valid_string_exposed_types): if isinstance(exposed_type, str) and exposed_type not in valid_string_exposed_types: raise InvalidExposedType( f"Invalid string exposed type {exposed_type}. Supported values are " f"{', '.join(valid_string_exposed_types)}" )
from enum import Enum class Operator(Enum): """Enumeration of operators for Data Node filtering. The possible values are: - `EQUAL` - `NOT_EQUAL` - `LESS_THAN` - `LESS_OR_EQUAL` - `GREATER_THAN` - `GREATER_OR_EQUAL` """ EQUAL = 1 NOT_EQUAL = 2 LESS_THAN = 3 LESS_OR_EQUAL = 4 GREATER_THAN = 5 GREATER_OR_EQUAL = 6 class JoinOperator(Enum): """ Enumeration of join operators for Data Node filtering. The possible values are `AND` and `OR`. """ AND = 1 OR = 2
from .csv import CSVDataNode from .data_node import DataNode from .excel import ExcelDataNode from .generic import GenericDataNode from .in_memory import InMemoryDataNode from .json import JSONDataNode from .mongo import MongoCollectionDataNode from .operator import JoinOperator, Operator from .parquet import ParquetDataNode from .pickle import PickleDataNode from .sql import SQLDataNode from .sql_table import SQLTableDataNode
import pathlib class _AbstractFileDataNode(object): """Abstract base class for data node implementations (CSVDataNode, ParquetDataNode, ExcelDataNode, PickleDataNode and JSONDataNode) that are file based.""" __EXTENSION_MAP = {"csv": "csv", "excel": "xlsx", "parquet": "parquet", "pickle": "p", "json": "json"} def _build_path(self, storage_type): from taipy.config.config import Config folder = f"{storage_type}s" dir_path = pathlib.Path(Config.core.storage_folder) / folder if not dir_path.exists(): dir_path.mkdir(parents=True, exist_ok=True) return dir_path / f"{self.id}.{self.__EXTENSION_MAP.get(storage_type)}"
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._data_fs_repository import _DataFSRepository from ._data_manager import _DataManager from ._data_sql_repository import _DataSQLRepository class _DataManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _DataFSRepository, "sql": _DataSQLRepository} @classmethod def _build_manager(cls) -> Type[_DataManager]: # type: ignore if cls._using_enterprise(): data_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager_factory", "_DataManagerFactory" )._build_repository # type: ignore else: data_manager = _DataManager build_repository = cls._build_repository data_manager._repository = build_repository() # type: ignore return data_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._data_converter import _DataNodeConverter from ._data_model import _DataNodeModel class _DataSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_DataNodeModel, converter=_DataNodeConverter)
from dataclasses import dataclass from typing import Any, Dict, List, Optional from sqlalchemy import JSON, Boolean, Column, Enum, Float, String, Table, UniqueConstraint from taipy.config.common.scope import Scope from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .data_node_id import Edit @mapper_registry.mapped @dataclass class _DataNodeModel(_BaseModel): __table__ = Table( "data_node", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config_id", String), Column("scope", Enum(Scope)), Column("storage_type", String), Column("owner_id", String), Column("parent_ids", JSON), Column("last_edit_date", String), Column("edits", JSON), Column("version", String), Column("validity_days", Float), Column("validity_seconds", Float), Column("edit_in_progress", Boolean), Column("editor_id", String), Column("editor_expiration_date", String), Column("data_node_properties", JSON), ) __table_args__ = (UniqueConstraint("config_id", "owner_id", name="_config_owner_uc"),) id: str config_id: str scope: Scope storage_type: str owner_id: Optional[str] parent_ids: List[str] last_edit_date: Optional[str] edits: List[Edit] version: str validity_days: Optional[float] validity_seconds: Optional[float] edit_in_progress: bool editor_id: Optional[str] editor_expiration_date: Optional[str] data_node_properties: Dict[str, Any] @staticmethod def from_dict(data: Dict[str, Any]): return _DataNodeModel( id=data["id"], config_id=data["config_id"], scope=Scope._from_repr(data["scope"]), storage_type=data["storage_type"], owner_id=data.get("owner_id"), parent_ids=data.get("parent_ids", []), last_edit_date=data.get("last_edit_date"), edits=_BaseModel._deserialize_attribute(data["edits"]), version=data["version"], validity_days=data["validity_days"], validity_seconds=data["validity_seconds"], edit_in_progress=bool(data.get("edit_in_progress", False)), editor_id=data.get("editor_id", None), editor_expiration_date=data.get("editor_expiration_date"), data_node_properties=_BaseModel._deserialize_attribute(data["data_node_properties"]), ) def to_list(self): return [ self.id, self.config_id, repr(self.scope), self.storage_type, self.owner_id, _BaseModel._serialize_attribute(self.parent_ids), self.last_edit_date, _BaseModel._serialize_attribute(self.edits), self.version, self.validity_days, self.validity_seconds, self.edit_in_progress, self.editor_id, self.editor_expiration_date, _BaseModel._serialize_attribute(self.data_node_properties), ]
from datetime import datetime, timedelta from typing import Any, Dict, List, Optional, Set from taipy.config.common.scope import Scope from .._version._version_manager_factory import _VersionManagerFactory from .data_node import DataNode from .data_node_id import DataNodeId, Edit in_memory_storage: Dict[str, Any] = {} class InMemoryDataNode(DataNode): """Data Node stored in memory. Warning: This Data Node implementation is not compatible with a parallel execution of taipy tasks, but only with a task executor in development mode. The purpose of `InMemoryDataNode` is to be used for development or debugging. Attributes: config_id (str): Identifier of the data node configuration. It must be a valid Python identifier. scope (Scope^): The scope of this data node. id (str): The unique identifier of this data node. owner_id (str): The identifier of the owner (sequence_id, scenario_id, cycle_id) or `None`. parent_ids (Optional[Set[str]]): The identifiers of the parent tasks or `None`. last_edit_date (datetime): The date and time of the last modification. edits (List[Edit^]): The ordered list of edits for that job. version (str): The string indicates the application version of the data node to instantiate. If not provided, the current version is used. validity_period (Optional[timedelta]): The duration implemented as a timedelta since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task management page](../core/entities/task-mgt.md) for more details). If _validity_period_ is set to `None`, the data node is always up-to-date. edit_in_progress (bool): True if a task computing the data node has been submitted and not completed yet. False otherwise. editor_id (Optional[str]): The identifier of the user who is currently editing the data node. editor_expiration_date (Optional[datetime]): The expiration date of the editor lock. properties (dict[str, Any]): A dictionary of additional properties. When creating an _In Memory_ data node, if the _properties_ dictionary contains a _"default_data"_ entry, the data node is automatically written with the corresponding _"default_data"_ value. """ __STORAGE_TYPE = "in_memory" __DEFAULT_DATA_VALUE = "default_data" _REQUIRED_PROPERTIES: List[str] = [] def __init__( self, config_id: str, scope: Scope, id: Optional[DataNodeId] = None, owner_id: Optional[str] = None, parent_ids: Optional[Set[str]] = None, last_edit_date: Optional[datetime] = None, edits: List[Edit] = None, version: str = None, validity_period: Optional[timedelta] = None, edit_in_progress: bool = False, editor_id: Optional[str] = None, editor_expiration_date: Optional[datetime] = None, properties=None, ): if properties is None: properties = {} default_value = properties.pop(self.__DEFAULT_DATA_VALUE, None) super().__init__( config_id, scope, id, owner_id, parent_ids, last_edit_date, edits, version or _VersionManagerFactory._build_manager()._get_latest_version(), validity_period, edit_in_progress, editor_id, editor_expiration_date, **properties ) if default_value is not None and self.id not in in_memory_storage: self._write(default_value) self._last_edit_date = datetime.now() self._edits.append( Edit( { "timestamp": self._last_edit_date, "writer_identifier": "TAIPY", "comments": "Default data written.", } ) ) self._TAIPY_PROPERTIES.update({self.__DEFAULT_DATA_VALUE}) @classmethod def storage_type(cls) -> str: return cls.__STORAGE_TYPE def _read(self): return in_memory_storage.get(self.id) def _write(self, data): in_memory_storage[self.id] = data
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._data_converter import _DataNodeConverter from ._data_model import _DataNodeModel class _DataFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_DataNodeModel, converter=_DataNodeConverter, dir_name="data_nodes")
from typing import Any, Dict, NewType DataNodeId = NewType("DataNodeId", str) DataNodeId.__doc__ = """Type that holds a `DataNode^` identifier.""" Edit = NewType("Edit", Dict[str, Any]) Edit.__doc__ = """Type that holds a `DataNode^` edit information."""
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._submission_converter import _SubmissionConverter from ._submission_model import _SubmissionModel class _SubmissionFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_SubmissionModel, converter=_SubmissionConverter, dir_name="submission")
from typing import NewType SubmissionId = NewType("SubmissionId", str) SubmissionId.__doc__ = """Type that holds a `Submission^` identifier."""
from ..common._repr_enum import _ReprEnum class SubmissionStatus(_ReprEnum): """Execution status of a `Submission^`. It is implemented as an enumeration. The possible values are: - `SUBMITTED`: A `SUBMITTED` submission has been submitted for execution but not processed yet by the orchestrator. - `UNDEFINED`: AN `UNDEFINED` submission's jobs have been submitted for execution but got some undefined status changes. - `PENDING`: A `PENDING` submission has been enqueued by the orchestrator. It is waiting for an executor to be available for its execution. - `BLOCKED`: A `BLOCKED` submission has been blocked because it has been finished with a job being blocked. - `RUNNING`: A `RUNNING` submission has its jobs currently being executed. - `CANCELED`: A `CANCELED` submission has been submitted but its execution has been canceled. - `FAILED`: A `FAILED` submission has a job failed during its execution. - `COMPLETED`: A `COMPLETED` submission has successfully been executed. """ SUBMITTED = 0 UNDEFINED = 1 BLOCKED = 2 PENDING = 3 RUNNING = 4 CANCELED = 5 FAILED = 6 COMPLETED = 7
from dataclasses import dataclass from typing import Any, Dict, List, Union from sqlalchemy import JSON, Column, Enum, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from ..job.job_id import JobId from .submission_status import SubmissionStatus @mapper_registry.mapped @dataclass class _SubmissionModel(_BaseModel): __table__ = Table( "submission", mapper_registry.metadata, Column("id", String, primary_key=True), Column("entity_id", String), Column("entity_type", String), Column("job_ids", JSON), Column("creation_date", String), Column("submission_status", Enum(SubmissionStatus)), Column("version", String), ) id: str entity_id: str entity_type: str job_ids: Union[List[JobId], List] creation_date: str submission_status: SubmissionStatus version: str @staticmethod def from_dict(data: Dict[str, Any]): return _SubmissionModel( id=data["id"], entity_id=data["entity_id"], entity_type=data["entity_type"], job_ids=_BaseModel._deserialize_attribute(data["job_ids"]), creation_date=data["creation_date"], submission_status=SubmissionStatus._from_repr(data["submission_status"]), version=data["version"], ) def to_list(self): return [ self.id, self.entity_id, self.entity_type, _BaseModel._serialize_attribute(self.job_ids), self.creation_date, repr(self.submission_status), self.version, ]
from datetime import datetime from .._repository._abstract_converter import _AbstractConverter from ..job.job import Job, JobId from ..submission._submission_model import _SubmissionModel from ..submission.submission import Submission from .submission import SubmissionId class _SubmissionConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, submission: Submission) -> _SubmissionModel: return _SubmissionModel( id=submission.id, entity_id=submission._entity_id, entity_type=submission.entity_type, job_ids=[job.id if isinstance(job, Job) else JobId(str(job)) for job in list(submission._jobs)], creation_date=submission._creation_date.isoformat(), submission_status=submission._submission_status, version=submission._version, ) @classmethod def _model_to_entity(cls, model: _SubmissionModel) -> Submission: submission = Submission( entity_id=model.entity_id, entity_type=model.entity_type, id=SubmissionId(model.id), jobs=model.job_ids, creation_date=datetime.fromisoformat(model.creation_date), submission_status=model.submission_status, version=model.version, ) return submission
from typing import List, Optional, Union from .._manager._manager import _Manager from .._repository._abstract_repository import _AbstractRepository from .._version._version_mixin import _VersionMixin from ..notification import EventEntityType, EventOperation, Notifier, _make_event from ..scenario.scenario import Scenario from ..sequence.sequence import Sequence from ..submission.submission import Submission from ..task.task import Task class _SubmissionManager(_Manager[Submission], _VersionMixin): _ENTITY_NAME = Submission.__name__ _repository: _AbstractRepository _EVENT_ENTITY_TYPE = EventEntityType.SUBMISSION @classmethod def _get_all(cls, version_number: Optional[str] = None) -> List[Submission]: """ Returns all entities. """ filters = cls._build_filters_with_version(version_number) return cls._repository._load_all(filters) @classmethod def _create(cls, entity_id: str, entity_type: str) -> Submission: submission = Submission(entity_id=entity_id, entity_type=entity_type) cls._set(submission) Notifier.publish(_make_event(submission, EventOperation.CREATION)) return submission @classmethod def _get_latest(cls, entity: Union[Scenario, Sequence, Task]) -> Optional[Submission]: entity_id = entity.id if not isinstance(entity, str) else entity submissions_of_task = list(filter(lambda submission: submission.entity_id == entity_id, cls._get_all())) if len(submissions_of_task) == 0: return None if len(submissions_of_task) == 1: return submissions_of_task[0] else: return max(submissions_of_task)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ._submission_fs_repository import _SubmissionFSRepository from ._submission_manager import _SubmissionManager from ._submission_sql_repository import _SubmissionSQLRepository class _SubmissionManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _SubmissionFSRepository, "sql": _SubmissionSQLRepository} @classmethod def _build_manager(cls) -> Type[_SubmissionManager]: # type: ignore if cls._using_enterprise(): submission_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager", "_SubmissionManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager_factory", "_SubmissionManagerFactory", )._build_repository # type: ignore else: submission_manager = _SubmissionManager build_repository = cls._build_repository submission_manager._repository = build_repository() # type: ignore return submission_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._submission_converter import _SubmissionConverter from ._submission_model import _SubmissionModel class _SubmissionSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_SubmissionModel, converter=_SubmissionConverter)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import logging.config import os import sys class _TaipyLogger: _ENVIRONMENT_VARIABLE_NAME_WITH_LOGGER_CONFIG_PATH = "TAIPY_LOGGER_CONFIG_PATH" __logger = None @classmethod def _get_logger(cls): cls._ENVIRONMENT_VARIABLE_NAME_WITH_LOGGER_CONFIG_PATH = "TAIPY_LOGGER_CONFIG_PATH" if cls.__logger: return cls.__logger if config_filename := os.environ.get(cls._ENVIRONMENT_VARIABLE_NAME_WITH_LOGGER_CONFIG_PATH): logging.config.fileConfig(config_filename) cls.__logger = logging.getLogger("Taipy") else: cls.__logger = logging.getLogger("Taipy") cls.__logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter("[%(asctime)s][%(name)s][%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S") ch.setFormatter(formatter) cls.__logger.addHandler(ch) return cls.__logger
import json import os def _get_version(): with open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
from .common.frequency import Frequency from .common.scope import Scope from .config import Config
"""# Taipy Config The Taipy Config package is a Python library designed to configure a Taipy application. The main entrypoint is the `Config^` singleton class. It exposes some methods to configure the Taipy application and some attributes to retrieve the configuration values. """ from typing import List from ._init import Config from .checker.issue import Issue from .checker.issue_collector import IssueCollector from .global_app.global_app_config import GlobalAppConfig from .section import Section from .unique_section import UniqueSection from .version import _get_version __version__ = _get_version() def _config_doc(func): def func_with_doc(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections=False): import os if os.environ.get("GENERATING_TAIPY_DOC", None) and os.environ["GENERATING_TAIPY_DOC"] == "true": with open("config_doc.txt", "a") as f: from inspect import signature for exposed_configuration_method, configuration_method in configuration_methods: annotation = " @staticmethod\n" sign = " def " + exposed_configuration_method + str(signature(configuration_method)) + ":\n" doc = ' """' + configuration_method.__doc__ + '"""\n' content = " pass\n\n" f.write(annotation + sign + doc + content) return func(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections) return func_with_doc @_config_doc def _inject_section( section_clazz, attribute_name: str, default: Section, configuration_methods: List[tuple], add_to_unconflicted_sections: bool = False, ): Config._register_default(default) if issubclass(section_clazz, UniqueSection): setattr(Config, attribute_name, Config.unique_sections[section_clazz.name]) elif issubclass(section_clazz, Section): setattr(Config, attribute_name, Config.sections[section_clazz.name]) else: raise TypeError if add_to_unconflicted_sections: Config._comparator._add_unconflicted_section(section_clazz.name) # type: ignore for exposed_configuration_method, configuration_method in configuration_methods: setattr(Config, exposed_configuration_method, configuration_method)
from abc import abstractmethod from typing import Any, Dict, Optional from .common._config_blocker import _ConfigBlocker from .common._template_handler import _TemplateHandler as _tpl from .common._validate_id import _validate_id class Section: """A Section as a consistent part of the Config. A section is defined by the section name (representing the type of objects that are configured) and a section id. """ _DEFAULT_KEY = "default" _ID_KEY = "id" def __init__(self, id, **properties): self.id = _validate_id(id) self._properties = properties or dict() @abstractmethod def __copy__(self): raise NotImplementedError @property @abstractmethod def name(self): raise NotImplementedError @abstractmethod def _clean(self): raise NotImplementedError @abstractmethod def _to_dict(self): raise NotImplementedError @classmethod @abstractmethod def _from_dict(cls, config_as_dict: Dict[str, Any], id, config): raise NotImplementedError @abstractmethod def _update(self, config_as_dict, default_section=None): raise NotImplementedError def __getattr__(self, item: str) -> Optional[Any]: return self._replace_templates(self._properties.get(item, None)) @property def properties(self): return {k: _tpl._replace_templates(v) for k, v in self._properties.items()} @properties.setter # type: ignore @_ConfigBlocker._check() def properties(self, val): self._properties = val def _replace_templates(self, value): return _tpl._replace_templates(value)
#!/usr/bin/env python """The setup script.""" import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md") as readme_file: readme = readme_file.read() with open(f"src{os.sep}taipy{os.sep}config{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" requirements = ["toml>=0.10,<0.11", "deepdiff>=6.2,<6.3"] test_requirements = ["pytest>=3.8"] setup( author="Avaiga", author_email="dev@taipy.io", python_requires=">=3.8", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", ], description="A Taipy package dedicated to easily configure a Taipy application.", install_requires=requirements, long_description=readme, long_description_content_type="text/markdown", include_package_data=True, license="Apache License 2.0", keywords="taipy-config", name="taipy-config", package_dir={"": "src"}, packages=find_namespace_packages(where="src") + find_packages(include=["taipy", "taipy.config", "taipy.config.*", "taipy.logger", "taipy.logger.*"]), test_suite="tests", tests_require=test_requirements, url="https://github.com/avaiga/taipy-config", version=version_string, zip_safe=False, )
from abc import ABC from .common._validate_id import _validate_id from .section import Section class UniqueSection(Section, ABC): """A UniqueSection is a configuration `Section^` that can have only one instance. A UniqueSection is only defined by the section name. """ def __init__(self, **properties): super().__init__(self.name, **properties)
from copy import copy from typing import Dict from .global_app.global_app_config import GlobalAppConfig from .section import Section from .unique_section import UniqueSection class _Config: DEFAULT_KEY = "default" def __init__(self): self._sections: Dict[str, Dict[str, Section]] = {} self._unique_sections: Dict[str, UniqueSection] = {} self._global_config: GlobalAppConfig = GlobalAppConfig() def _clean(self): self._global_config._clean() for unique_section in self._unique_sections.values(): unique_section._clean() for sections in self._sections.values(): for section in sections.values(): section._clean() @classmethod def _default_config(cls): config = _Config() config._global_config = GlobalAppConfig.default_config() return config def _update(self, other_config): self._global_config._update(other_config._global_config._to_dict()) if other_config._unique_sections: for section_name, other_section in other_config._unique_sections.items(): if section := self._unique_sections.get(section_name, None): section._update(other_section._to_dict()) else: self._unique_sections[section_name] = copy(other_config._unique_sections[section_name]) if other_config._sections: for section_name, other_non_unique_sections in other_config._sections.items(): if non_unique_sections := self._sections.get(section_name, None): self.__update_sections(non_unique_sections, other_non_unique_sections) else: self._sections[section_name] = {} self.__add_sections(self._sections[section_name], other_non_unique_sections) def __add_sections(self, entity_config, other_entity_configs): for cfg_id, sub_config in other_entity_configs.items(): entity_config[cfg_id] = copy(sub_config) self.__point_nested_section_to_self(sub_config) def __update_sections(self, entity_config, other_entity_configs): if self.DEFAULT_KEY in other_entity_configs: if self.DEFAULT_KEY in entity_config: entity_config[self.DEFAULT_KEY]._update(other_entity_configs[self.DEFAULT_KEY]._to_dict()) else: entity_config[self.DEFAULT_KEY] = other_entity_configs[self.DEFAULT_KEY] for cfg_id, sub_config in other_entity_configs.items(): if cfg_id != self.DEFAULT_KEY: if cfg_id in entity_config: entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) else: entity_config[cfg_id] = copy(sub_config) entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) self.__point_nested_section_to_self(sub_config) def __point_nested_section_to_self(self, section): """Loop through attributes of a Section to find if any attribute has a list of Section as value. If there is, update each nested Section by the corresponding instance in self. Args: section (Section): The Section to search for nested sections. """ for _, attr_value in vars(section).items(): # ! This will fail if an attribute is a dictionary, or nested list of Sections. if not isinstance(attr_value, list): continue for index, item in enumerate(attr_value): if not isinstance(item, Section): continue if sub_item := self._sections.get(item.name, {}).get(item.id, None): attr_value[index] = sub_item
import ast import re from pathlib import Path from typing import List def _get_function_delimiters(initial_line, lines): begin = end = initial_line while True: if lines[begin - 1] == "\n": break begin -= 1 if lines[end].endswith("(\n"): while ":\n" not in lines[end]: end += 1 if '"""' in lines[end + 1]: while True: if '"""\n' in lines[end]: break end += 1 return begin, end + 1 def _get_file_lines(filename: str) -> List[str]: # Get file lines for later with open(filename) as f: return f.readlines() def _get_file_ast(filename: str): # Get raw text and build ast _config = Path(filename) _tree = _config.read_text() return ast.parse(_tree) def _build_base_config_pyi(filename, base_pyi): lines = _get_file_lines(filename) tree = _get_file_ast(filename) class_lineno = [f.lineno for f in ast.walk(tree) if isinstance(f, ast.ClassDef) and f.name == "Config"] begin_class, end_class = _get_function_delimiters(class_lineno[0] - 1, lines) base_pyi += "".join(lines[begin_class:end_class]) functions = [f.lineno for f in ast.walk(tree) if isinstance(f, ast.FunctionDef) and not f.name.startswith("__")] for ln in functions: begin_line, end_line = _get_function_delimiters(ln - 1, lines) base_pyi += "".join(lines[begin_line:end_line]) base_pyi = __add_docstring(base_pyi, lines, end_line) base_pyi += "\n" return base_pyi def __add_docstring(base_pyi, lines, end_line): if '"""' not in lines[end_line - 1]: base_pyi += '\t\t""""""\n'.replace("\t", " ") return base_pyi def _build_entity_config_pyi(base_pyi, filename, entity_map): lines = _get_file_lines(filename) tree = _get_file_ast(filename) functions = {} for f in ast.walk(tree): if isinstance(f, ast.FunctionDef): if "_configure" in f.name and not f.name.startswith("__"): functions[f.name] = f.lineno elif "_set_default" in f.name and not f.name.startswith("__"): functions[f.name] = f.lineno elif "_add" in f.name and not f.name.startswith("__"): functions[f.name] = f.lineno for k, v in functions.items(): begin_line, end_line = _get_function_delimiters(v - 1, lines) try: func = "".join(lines[begin_line:end_line]) func = func if not k.startswith("_") else func.replace(k, entity_map.get(k)) func = __add_docstring(func, lines, end_line) + "\n" base_pyi += func except Exception: print(f"key={k}") raise return base_pyi def _generate_entity_and_property_maps(filename): entities_map = {} property_map = {} entity_tree = _get_file_ast(filename) functions = [ f for f in ast.walk(entity_tree) if isinstance(f, ast.Call) and getattr(f.func, "id", "") == "_inject_section" ] for f in functions: entity = ast.unparse(f.args[0]) entities_map[entity] = {} property_map[eval(ast.unparse(f.args[1]))] = entity # Remove class name from function map text = ast.unparse(f.args[-1]).replace(f"{entity}.", "") matches = re.findall(r"\((.*?)\)", text) for m in matches: v, k = m.replace("'", "").split(",") entities_map[entity][k.strip()] = v return entities_map, property_map def _generate_acessors(base_pyi, property_map): for property, cls in property_map.items(): return_template = f"Dict[str, {cls}]" if property != "job_config" else f"{cls}" template = ("\t@_Classproperty\n" + f'\tdef {property}(cls) -> {return_template}:\n\t\t""""""\n').replace( "\t", " " ) base_pyi += template + "\n" return base_pyi def _build_header(filename): _file = Path(filename) return _file.read_text() + "\n\n" if __name__ == "__main__": header_file = "stubs/pyi_header.py" config_init = Path("taipy-core/src/taipy/core/config/__init__.py") base_config = "src/taipy/config/config.py" dn_filename = "taipy-core/src/taipy/core/config/data_node_config.py" job_filename = "taipy-core/src/taipy/core/config/job_config.py" scenario_filename = "taipy-core/src/taipy/core/config/scenario_config.py" task_filename = "taipy-core/src/taipy/core/config/task_config.py" migration_filename = "taipy-core/src/taipy/core/config/migration_config.py" core_filename = "taipy-core/src/taipy/core/config/core_section.py" entities_map, property_map = _generate_entity_and_property_maps(config_init) pyi = _build_header(header_file) pyi = _build_base_config_pyi(base_config, pyi) pyi = _generate_acessors(pyi, property_map) pyi = _build_entity_config_pyi(pyi, scenario_filename, entities_map["ScenarioConfig"]) pyi = _build_entity_config_pyi(pyi, dn_filename, entities_map["DataNodeConfig"]) pyi = _build_entity_config_pyi(pyi, task_filename, entities_map["TaskConfig"]) pyi = _build_entity_config_pyi(pyi, job_filename, entities_map["JobConfig"]) pyi = _build_entity_config_pyi(pyi, migration_filename, entities_map["MigrationConfig"]) pyi = _build_entity_config_pyi(pyi, core_filename, entities_map["CoreSection"]) with open("src/taipy/config/config.pyi", "w") as f: f.writelines(pyi)
import json from datetime import timedelta from typing import Any, Callable, Dict, List, Optional, Union from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig from .checker.issue_collector import IssueCollector from .common._classproperty import _Classproperty from .common._config_blocker import _ConfigBlocker from .common.frequency import Frequency from .common.scope import Scope from .global_app.global_app_config import GlobalAppConfig from .section import Section from .unique_section import UniqueSection
import toml # type: ignore from .._config import _Config from ..exceptions.exceptions import LoadingError from ._base_serializer import _BaseSerializer class _TomlSerializer(_BaseSerializer): """Convert configuration from TOML representation to Python Dict and reciprocally.""" @classmethod def _write(cls, configuration: _Config, filename: str): with open(filename, "w") as fd: toml.dump(cls._str(configuration), fd) @classmethod def _read(cls, filename: str) -> _Config: try: config_as_dict = cls._pythonify(dict(toml.load(filename))) return cls._from_dict(config_as_dict) except toml.TomlDecodeError as e: error_msg = f"Can not load configuration {e}" raise LoadingError(error_msg) @classmethod def _serialize(cls, configuration: _Config) -> str: return toml.dumps(cls._str(configuration)) @classmethod def _deserialize(cls, config_as_string: str) -> _Config: return cls._from_dict(cls._pythonify(dict(toml.loads(config_as_string))))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import json # type: ignore from .._config import _Config from ..exceptions.exceptions import LoadingError from ._base_serializer import _BaseSerializer class _JsonSerializer(_BaseSerializer): """Convert configuration from JSON representation to Python Dict and reciprocally.""" @classmethod def _write(cls, configuration: _Config, filename: str): with open(filename, "w") as fd: json.dump(cls._str(configuration), fd, ensure_ascii=False, indent=0, check_circular=False) @classmethod def _read(cls, filename: str) -> _Config: try: with open(filename) as f: config_as_dict = cls._pythonify(json.load(f)) return cls._from_dict(config_as_dict) except json.JSONDecodeError as e: error_msg = f"Can not load configuration {e}" raise LoadingError(error_msg) @classmethod def _serialize(cls, configuration: _Config) -> str: return json.dumps(cls._str(configuration), ensure_ascii=False, indent=0, check_circular=False) @classmethod def _deserialize(cls, config_as_string: str) -> _Config: return cls._from_dict(cls._pythonify(dict(json.loads(config_as_string))))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Any, List from .issue import Issue class IssueCollector: """ A collection of issues (instances of class `Issue^`). Attributes: errors (List[Issue^]): List of ERROR issues collected. warnings (List[Issue^]): List WARNING issues collected. infos (List[Issue^]): List INFO issues collected. all (List[Issue^]): List of all issues collected ordered by decreasing level (ERROR, WARNING and INFO). """ _ERROR_LEVEL = "ERROR" _WARNING_LEVEL = "WARNING" _INFO_LEVEL = "INFO" def __init__(self): self._errors: List[Issue] = [] self._warnings: List[Issue] = [] self._infos: List[Issue] = [] @property def all(self) -> List[Issue]: return self._errors + self._warnings + self._infos @property def infos(self) -> List[Issue]: return self._infos @property def warnings(self) -> List[Issue]: return self._warnings @property def errors(self) -> List[Issue]: return self._errors def _add_error(self, field: str, value: Any, message: str, checker_name: str): self._errors.append(Issue(self._ERROR_LEVEL, field, value, message, checker_name)) def _add_warning(self, field: str, value: Any, message: str, checker_name: str): self._warnings.append(Issue(self._WARNING_LEVEL, field, value, message, checker_name)) def _add_info(self, field: str, value: Any, message: str, checker_name: str): self._infos.append(Issue(self._INFO_LEVEL, field, value, message, checker_name))
from dataclasses import dataclass from typing import Any, Optional @dataclass class Issue: """ An issue detected in the configuration. Attributes: level (str): Level of the issue among ERROR, WARNING, INFO. field (str): Configuration field on which the issue has been detected. value (Any): Value of the field on which the issue has been detected. message (str): Human readable message to help the user fix the issue. tag (Optional[str]): Optional tag to be used to filter issues. """ level: str field: str value: Any message: str tag: Optional[str] def __str__(self) -> str: message = self.message if self.value: current_value_str = f'"{self.value}"' if isinstance(self.value, str) else f"{self.value}" message += f" Current value of property `{self.field}` is {current_value_str}." return message
from typing import List from ._checkers._config_checker import _ConfigChecker from .issue_collector import IssueCollector class _Checker: """Holds the various checkers to perform on the config.""" _checkers: List[_ConfigChecker] = [] @classmethod def _check(cls, _applied_config): collector = IssueCollector() for checker in cls._checkers: checker(_applied_config, collector)._check() return collector @classmethod def add_checker(cls, checker_class: _ConfigChecker): cls._checkers.append(checker_class)
import abc from typing import Any, List, Optional, Set from ..._config import _Config from ..issue_collector import IssueCollector class _ConfigChecker: _PREDEFINED_PROPERTIES_KEYS = ["_entity_owner"] def __init__(self, config: _Config, collector): self._collector = collector self._config = config @abc.abstractmethod def _check(self) -> IssueCollector: raise NotImplementedError def _error(self, field: str, value: Any, message: str): self._collector._add_error(field, value, message, self.__class__.__name__) def _warning(self, field: str, value: Any, message: str): self._collector._add_warning(field, value, message, self.__class__.__name__) def _info(self, field: str, value: Any, message: str): self._collector._add_info(field, value, message, self.__class__.__name__) def _check_children( self, parent_config_class, config_id: str, config_key: str, config_value, child_config_class, can_be_empty: Optional[bool] = False, ): if not config_value and not can_be_empty: self._warning( config_key, config_value, f"{config_key} field of {parent_config_class.__name__} `{config_id}` is empty.", ) else: if not ( (isinstance(config_value, List) or isinstance(config_value, Set)) and all(map(lambda x: isinstance(x, child_config_class), config_value)) ): self._error( config_key, config_value, f"{config_key} field of {parent_config_class.__name__} `{config_id}` must be populated with a list " f"of {child_config_class.__name__} objects.", ) def _check_existing_config_id(self, config): if not config.id: self._error( "config_id", config.id, f"config_id of {config.__class__.__name__} `{config.id}` is empty.", ) def _check_if_entity_property_key_used_is_predefined(self, config): for key, value in config._properties.items(): if key in self._PREDEFINED_PROPERTIES_KEYS: self._error( key, value, f"Properties of {config.__class__.__name__} `{config.id}` cannot have `{key}` as its property.", )
from ..._config import _Config from ..issue_collector import IssueCollector from ._config_checker import _ConfigChecker class _AuthConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: auth_config = self._config._auth_config # type: ignore self._check_predefined_protocol(auth_config) return self._collector def _check_predefined_protocol(self, auth_config): if auth_config.protocol == auth_config._PROTOCOL_LDAP: self.__check_ldap(auth_config) if auth_config.protocol == auth_config._PROTOCOL_TAIPY: self.__check_taipy(auth_config) def __check_taipy(self, auth_config): if auth_config._TAIPY_ROLES not in auth_config.properties: self._error( "properties", auth_config._LDAP_SERVER, f"`{auth_config._LDAP_SERVER}` property must be populated when {auth_config._PROTOCOL_LDAP} is used.", ) if auth_config._TAIPY_PWD not in auth_config.properties: self._warning( "properties", auth_config._TAIPY_PWD, f"`In order to protect authentication with passwords using {auth_config._PROTOCOL_TAIPY} protocol," f" {auth_config._TAIPY_PWD}` property can be populated.", ) def __check_ldap(self, auth_config): if auth_config._LDAP_SERVER not in auth_config.properties: self._error( "properties", auth_config._LDAP_SERVER, f"`{auth_config._LDAP_SERVER}` attribute must be populated when {auth_config._PROTOCOL_LDAP} is used.", ) if auth_config._LDAP_BASE_DN not in auth_config.properties: self._error( "properties", auth_config._LDAP_BASE_DN, f"`{auth_config._LDAP_BASE_DN}` field must be populated when {auth_config._PROTOCOL_LDAP} is used.", )
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .exceptions import *
class LoadingError(Exception): """Raised if an error occurs while loading the configuration file.""" class InconsistentEnvVariableError(Exception): """Inconsistency value has been detected in an environment variable referenced by the configuration.""" class MissingEnvVariableError(Exception): """Environment variable referenced in configuration is missing.""" class InvalidConfigurationId(Exception): """Configuration id is not valid.""" class ConfigurationUpdateBlocked(Exception): """The configuration is being blocked from update by other Taipy services."""
from ..common._repr_enum import _ReprEnum class Frequency(_ReprEnum): """Frequency of the recurrence of `Cycle^` and `Scenario^` objects. The frequency must be provided in the `ScenarioConfig^`. Each recurrent scenario is attached to the cycle corresponding to the creation date and the frequency. In other words, each cycle represents an iteration and contains the various scenarios created during this iteration. For instance, when scenarios have a _MONTHLY_ frequency, one cycle will be created for each month (January, February, March, etc.). A new scenario created on February 10th, gets attached to the _February_ cycle. The frequency is implemented as an enumeration with the following possible values: - With a _DAILY_ frequency, a new cycle is created for each day. - With a _WEEKLY_ frequency, a new cycle is created for each week (from Monday to Sunday). - With a _MONTHLY_ frequency, a new cycle is created for each month. - With a _QUARTERLY_ frequency, a new cycle is created for each quarter. - With a _YEARLY_ frequency, a new cycle is created for each year. """ DAILY = 1 WEEKLY = 2 MONTHLY = 3 QUARTERLY = 4 YEARLY = 5
class _Classproperty(object): def __init__(self, f): self.f = f def __get__(self, obj, owner): return self.f(owner)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import functools from enum import Enum class _ReprEnum(Enum): @classmethod @functools.lru_cache def _from_repr(cls, repr_: str): return next(filter(lambda e: repr(e) == repr_, cls)) # type: ignore
import keyword from ..exceptions.exceptions import InvalidConfigurationId __INVALID_TAIPY_ID_TERMS = ["CYCLE", "SCENARIO", "SEQUENCE", "TASK", "DATANODE"] def _validate_id(name: str): for invalid_taipy_id_term in __INVALID_TAIPY_ID_TERMS: if invalid_taipy_id_term in name: raise InvalidConfigurationId(f"{name} is not a valid identifier. {invalid_taipy_id_term} is restricted.") if name.isidentifier() and not keyword.iskeyword(name): return name raise InvalidConfigurationId(f"{name} is not a valid identifier.")
import functools from ...logger._taipy_logger import _TaipyLogger from ..exceptions.exceptions import ConfigurationUpdateBlocked class _ConfigBlocker: """Configuration blocker singleton.""" __logger = _TaipyLogger._get_logger() __block_config_update = False @classmethod def _block(cls): cls.__block_config_update = True @classmethod def _unblock(cls): cls.__block_config_update = False @classmethod def _check(cls): def inner(f): @functools.wraps(f) def _check_if_is_blocking(*args, **kwargs): if cls.__block_config_update: error_message = ( "The Core service should be stopped by running core.stop() before" " modifying the Configuration. For more information, please refer to:" " https://docs.taipy.io/en/latest/manuals/running_services/#running-core." ) cls.__logger.error("ConfigurationUpdateBlocked: " + error_message) raise ConfigurationUpdateBlocked(error_message) return f(*args, **kwargs) return _check_if_is_blocking return inner
from ..common._repr_enum import _ReprEnum class _OrderedEnum(_ReprEnum): def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class Scope(_OrderedEnum): """Scope of a `DataNode^`. This enumeration can have the following values: - `GLOBAL` - `CYCLE` - `SCENARIO` """ GLOBAL = 3 CYCLE = 2 SCENARIO = 1