text
stringlengths
0
5.92k
"""# Taipy Config The Taipy Config package is a Python library designed to configure a Taipy application. The main entrypoint is the `Config^` singleton class. It exposes some methods to configure the Taipy application and some attributes to retrieve the configuration values. """ from ._init import * from typing import List from .checker.issue import Issue from .checker.issue_collector import IssueCollector from .global_app.global_app_config import GlobalAppConfig from .section import Section from .unique_section import UniqueSection from .version import _get_version __version__ = _get_version() def _config_doc(func): def func_with_doc(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections=False): import os if os.environ.get("GENERATING_TAIPY_DOC", None) and os.environ["GENERATING_TAIPY_DOC"] == "true": with open("config_doc.txt", "a") as f: from inspect import signature for exposed_configuration_method, configuration_method in configuration_methods: annotation = " @staticmethod\n" sign = " def " + exposed_configuration_method + str(signature(configuration_method)) + ":\n" doc = ' """' + configuration_method.__doc__ + '"""\n' content = " pass\n\n" f.write(annotation + sign + doc + content) return func(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections) return func_with_doc @_config_doc def _inject_section( section_clazz, attribute_name: str, default: Section, configuration_methods: List[tuple], add_to_unconflicted_sections: bool = False, ): Config._register_default(default) if issubclass(section_clazz, UniqueSection): setattr(Config, attribute_name, Config.unique_sections[section_clazz.name]) elif issubclass(section_clazz, Section): setattr(Config, attribute_name, Config.sections[section_clazz.name]) else: raise TypeError if add_to_unconflicted_sections: Config._comparator._add_unconflicted_section(section_clazz.name) for exposed_configuration_method, configuration_method in configuration_methods: setattr(Config, exposed_configuration_method, configuration_method)
from abc import abstractmethod from typing import Any, Dict, Optional from .common._config_blocker import _ConfigBlocker from .common._template_handler import _TemplateHandler as _tpl from .common._validate_id import _validate_id class Section: """A Section as a consistent part of the Config. A section is defined by the section name (representing the type of objects that are configured) and a section id. """ _DEFAULT_KEY = "default" _ID_KEY = "id" def __init__(self, id, **properties): self.id = _validate_id(id) self._properties = properties or dict() @abstractmethod def __copy__(self): raise NotImplementedError @property @abstractmethod def name(self): raise NotImplementedError @abstractmethod def _clean(self): raise NotImplementedError @abstractmethod def _to_dict(self): raise NotImplementedError @classmethod @abstractmethod def _from_dict(cls, config_as_dict: Dict[str, Any], id, config): raise NotImplementedError @abstractmethod def _update(self, config_as_dict, default_section=None): raise NotImplementedError def __getattr__(self, item: str) -> Optional[Any]: return self._replace_templates(self._properties.get(item, None)) @property def properties(self): return {k: _tpl._replace_templates(v) for k, v in self._properties.items()} @properties.setter # type: ignore @_ConfigBlocker._check() def properties(self, val): self._properties = val def _replace_templates(self, value): return _tpl._replace_templates(value)
from abc import ABC from .common._validate_id import _validate_id from .section import Section class UniqueSection(Section, ABC): """A UniqueSection is a configuration `Section^` that can have only one instance. A UniqueSection is only defined by the section name. """ def __init__(self, **properties): super().__init__(self.name, **properties)
from copy import copy from typing import Dict from .global_app.global_app_config import GlobalAppConfig from .section import Section from .unique_section import UniqueSection class _Config: DEFAULT_KEY = "default" def __init__(self): self._sections: Dict[str, Dict[str, Section]] = {} self._unique_sections: Dict[str, UniqueSection] = {} self._global_config: GlobalAppConfig = GlobalAppConfig() def _clean(self): self._global_config._clean() for unique_section in self._unique_sections.values(): unique_section._clean() for sections in self._sections.values(): for section in sections.values(): section._clean() @classmethod def _default_config(cls): config = _Config() config._global_config = GlobalAppConfig.default_config() return config def _update(self, other_config): self._global_config._update(other_config._global_config._to_dict()) if other_config._unique_sections: for section_name, other_section in other_config._unique_sections.items(): if section := self._unique_sections.get(section_name, None): section._update(other_section._to_dict()) else: self._unique_sections[section_name] = copy(other_config._unique_sections[section_name]) if other_config._sections: for section_name, other_non_unique_sections in other_config._sections.items(): if non_unique_sections := self._sections.get(section_name, None): self.__update_sections(non_unique_sections, other_non_unique_sections) else: self._sections[section_name] = {} self.__add_sections(self._sections[section_name], other_non_unique_sections) def __add_sections(self, entity_config, other_entity_configs): for cfg_id, sub_config in other_entity_configs.items(): entity_config[cfg_id] = copy(sub_config) self.__point_nested_section_to_self(sub_config) def __update_sections(self, entity_config, other_entity_configs): if self.DEFAULT_KEY in other_entity_configs: if self.DEFAULT_KEY in entity_config: entity_config[self.DEFAULT_KEY]._update(other_entity_configs[self.DEFAULT_KEY]._to_dict()) else: entity_config[self.DEFAULT_KEY] = other_entity_configs[self.DEFAULT_KEY] for cfg_id, sub_config in other_entity_configs.items(): if cfg_id != self.DEFAULT_KEY: if cfg_id in entity_config: entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) else: entity_config[cfg_id] = copy(sub_config) entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY)) self.__point_nested_section_to_self(sub_config) def __point_nested_section_to_self(self, section): """Loop through attributes of a Section to find if any attribute has a list of Section as value. If there is, update each nested Section by the corresponding instance in self. Args: section (Section): The Section to search for nested sections. """ for _, attr_value in vars(section).items(): # ! This will fail if an attribute is a dictionary, or nested list of Sections. if not isinstance(attr_value, list): continue for index, item in enumerate(attr_value): if not isinstance(item, Section): continue if sub_item := self._sections.get(item.name, {}).get(item.id, None): attr_value[index] = sub_item
import toml # type: ignore from .._config import _Config from ..exceptions.exceptions import LoadingError from ._base_serializer import _BaseSerializer class _TomlSerializer(_BaseSerializer): """Convert configuration from TOML representation to Python Dict and reciprocally.""" @classmethod def _write(cls, configuration: _Config, filename: str): with open(filename, "w") as fd: toml.dump(cls._str(configuration), fd) @classmethod def _read(cls, filename: str) -> _Config: try: config_as_dict = cls._pythonify(dict(toml.load(filename))) return cls._from_dict(config_as_dict) except toml.TomlDecodeError as e: error_msg = f"Can not load configuration {e}" raise LoadingError(error_msg) @classmethod def _serialize(cls, configuration: _Config) -> str: return toml.dumps(cls._str(configuration)) @classmethod def _deserialize(cls, config_as_string: str) -> _Config: return cls._from_dict(cls._pythonify(dict(toml.loads(config_as_string))))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import json # type: ignore from .._config import _Config from ..exceptions.exceptions import LoadingError from ._base_serializer import _BaseSerializer class _JsonSerializer(_BaseSerializer): """Convert configuration from JSON representation to Python Dict and reciprocally.""" @classmethod def _write(cls, configuration: _Config, filename: str): with open(filename, "w") as fd: json.dump(cls._str(configuration), fd, ensure_ascii=False, indent=0, check_circular=False) @classmethod def _read(cls, filename: str) -> _Config: try: with open(filename) as f: config_as_dict = cls._pythonify(json.load(f)) return cls._from_dict(config_as_dict) except json.JSONDecodeError as e: error_msg = f"Can not load configuration {e}" raise LoadingError(error_msg) @classmethod def _serialize(cls, configuration: _Config) -> str: return json.dumps(cls._str(configuration), ensure_ascii=False, indent=0, check_circular=False) @classmethod def _deserialize(cls, config_as_string: str) -> _Config: return cls._from_dict(cls._pythonify(dict(json.loads(config_as_string))))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Any, List from .issue import Issue class IssueCollector: """ A collection of issues (instances of class `Issue^`). Attributes: errors (List[Issue^]): List of ERROR issues collected. warnings (List[Issue^]): List WARNING issues collected. infos (List[Issue^]): List INFO issues collected. all (List[Issue^]): List of all issues collected ordered by decreasing level (ERROR, WARNING and INFO). """ _ERROR_LEVEL = "ERROR" _WARNING_LEVEL = "WARNING" _INFO_LEVEL = "INFO" def __init__(self): self._errors: List[Issue] = [] self._warnings: List[Issue] = [] self._infos: List[Issue] = [] @property def all(self) -> List[Issue]: return self._errors + self._warnings + self._infos @property def infos(self) -> List[Issue]: return self._infos @property def warnings(self) -> List[Issue]: return self._warnings @property def errors(self) -> List[Issue]: return self._errors def _add_error(self, field: str, value: Any, message: str, checker_name: str): self._errors.append(Issue(self._ERROR_LEVEL, field, value, message, checker_name)) def _add_warning(self, field: str, value: Any, message: str, checker_name: str): self._warnings.append(Issue(self._WARNING_LEVEL, field, value, message, checker_name)) def _add_info(self, field: str, value: Any, message: str, checker_name: str): self._infos.append(Issue(self._INFO_LEVEL, field, value, message, checker_name))
from dataclasses import dataclass from typing import Any, Optional @dataclass class Issue: """ An issue detected in the configuration. Attributes: level (str): Level of the issue among ERROR, WARNING, INFO. field (str): Configuration field on which the issue has been detected. value (Any): Value of the field on which the issue has been detected. message (str): Human readable message to help the user fix the issue. tag (Optional[str]): Optional tag to be used to filter issues. """ level: str field: str value: Any message: str tag: Optional[str] def __str__(self) -> str: message = self.message if self.value: current_value_str = f'"{self.value}"' if isinstance(self.value, str) else f"{self.value}" message += f" Current value of property `{self.field}` is {current_value_str}." return message
from typing import List from ._checkers._config_checker import _ConfigChecker from .issue_collector import IssueCollector class _Checker: """Holds the various checkers to perform on the config.""" _checkers: List[_ConfigChecker] = [] @classmethod def _check(cls, _applied_config): collector = IssueCollector() for checker in cls._checkers: checker(_applied_config, collector)._check() return collector @classmethod def add_checker(cls, checker_class: _ConfigChecker): cls._checkers.append(checker_class)
import abc from typing import Any, List, Optional, Set from ..._config import _Config from ..issue_collector import IssueCollector class _ConfigChecker: _PREDEFINED_PROPERTIES_KEYS = ["_entity_owner"] def __init__(self, config: _Config, collector): self._collector = collector self._config = config @abc.abstractmethod def _check(self) -> IssueCollector: raise NotImplementedError def _error(self, field: str, value: Any, message: str): self._collector._add_error(field, value, message, self.__class__.__name__) def _warning(self, field: str, value: Any, message: str): self._collector._add_warning(field, value, message, self.__class__.__name__) def _info(self, field: str, value: Any, message: str): self._collector._add_info(field, value, message, self.__class__.__name__) def _check_children( self, parent_config_class, config_id: str, config_key: str, config_value, child_config_class, can_be_empty: Optional[bool] = False, ): if not config_value and not can_be_empty: self._warning( config_key, config_value, f"{config_key} field of {parent_config_class.__name__} `{config_id}` is empty.", ) else: if not ( (isinstance(config_value, List) or isinstance(config_value, Set)) and all(map(lambda x: isinstance(x, child_config_class), config_value)) ): self._error( config_key, config_value, f"{config_key} field of {parent_config_class.__name__} `{config_id}` must be populated with a list " f"of {child_config_class.__name__} objects.", ) def _check_existing_config_id(self, config): if not config.id: self._error( "config_id", config.id, f"config_id of {config.__class__.__name__} `{config.id}` is empty.", ) def _check_if_entity_property_key_used_is_predefined(self, config): for key, value in config._properties.items(): if key in self._PREDEFINED_PROPERTIES_KEYS: self._error( key, value, f"Properties of {config.__class__.__name__} `{config.id}` cannot have `{key}` as its property.", )
from ..._config import _Config from ..issue_collector import IssueCollector from ._config_checker import _ConfigChecker class _AuthConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: auth_config = self._config._auth_config self._check_predefined_protocol(auth_config) return self._collector def _check_predefined_protocol(self, auth_config): if auth_config.protocol == auth_config._PROTOCOL_LDAP: self.__check_ldap(auth_config) if auth_config.protocol == auth_config._PROTOCOL_TAIPY: self.__check_taipy(auth_config) def __check_taipy(self, auth_config): if auth_config._TAIPY_ROLES not in auth_config.properties: self._error( "properties", auth_config._LDAP_SERVER, f"`{auth_config._LDAP_SERVER}` property must be populated when {auth_config._PROTOCOL_LDAP} is used.", ) if auth_config._TAIPY_PWD not in auth_config.properties: self._warning( "properties", auth_config._TAIPY_PWD, f"`In order to protect authentication with passwords using {auth_config._PROTOCOL_TAIPY} protocol," f" {auth_config._TAIPY_PWD}` property can be populated.", ) def __check_ldap(self, auth_config): if auth_config._LDAP_SERVER not in auth_config.properties: self._error( "properties", auth_config._LDAP_SERVER, f"`{auth_config._LDAP_SERVER}` attribute must be populated when {auth_config._PROTOCOL_LDAP} is used.", ) if auth_config._LDAP_BASE_DN not in auth_config.properties: self._error( "properties", auth_config._LDAP_BASE_DN, f"`{auth_config._LDAP_BASE_DN}` field must be populated when {auth_config._PROTOCOL_LDAP} is used.", )
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .exceptions import *
class LoadingError(Exception): """Raised if an error occurs while loading the configuration file.""" class InconsistentEnvVariableError(Exception): """Inconsistency value has been detected in an environment variable referenced by the configuration.""" class MissingEnvVariableError(Exception): """Environment variable referenced in configuration is missing.""" class InvalidConfigurationId(Exception): """Configuration id is not valid.""" class ConfigurationUpdateBlocked(Exception): """The configuration is being blocked from update by other Taipy services."""
from ..common._repr_enum import _ReprEnum class Frequency(_ReprEnum): """Frequency of the recurrence of `Cycle^` and `Scenario^` objects. The frequency must be provided in the `ScenarioConfig^`. Each recurrent scenario is attached to the cycle corresponding to the creation date and the frequency. In other words, each cycle represents an iteration and contains the various scenarios created during this iteration. For instance, when scenarios have a _MONTHLY_ frequency, one cycle will be created for each month (January, February, March, etc.). A new scenario created on February 10th, gets attached to the _February_ cycle. The frequency is implemented as an enumeration with the following possible values: - With a _DAILY_ frequency, a new cycle is created for each day. - With a _WEEKLY_ frequency, a new cycle is created for each week (from Monday to Sunday). - With a _MONTHLY_ frequency, a new cycle is created for each month. - With a _QUARTERLY_ frequency, a new cycle is created for each quarter. - With a _YEARLY_ frequency, a new cycle is created for each year. """ DAILY = 1 WEEKLY = 2 MONTHLY = 3 QUARTERLY = 4 YEARLY = 5
class _Classproperty(object): def __init__(self, f): self.f = f def __get__(self, obj, owner): return self.f(owner)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import functools from enum import Enum class _ReprEnum(Enum): @classmethod @functools.lru_cache def _from_repr(cls, repr_: str): return next(filter(lambda e: repr(e) == repr_, cls)) # type: ignore
import keyword from ..exceptions.exceptions import InvalidConfigurationId __INVALID_TAIPY_ID_TERMS = ["CYCLE", "SCENARIO", "SEQUENCE", "TASK", "DATANODE"] def _validate_id(name: str): for invalid_taipy_id_term in __INVALID_TAIPY_ID_TERMS: if invalid_taipy_id_term in name: raise InvalidConfigurationId(f"{name} is not a valid identifier. {invalid_taipy_id_term} is restricted.") if name.isidentifier() and not keyword.iskeyword(name): return name raise InvalidConfigurationId(f"{name} is not a valid identifier.")
import functools from ...logger._taipy_logger import _TaipyLogger from ..exceptions.exceptions import ConfigurationUpdateBlocked class _ConfigBlocker: """Configuration blocker singleton.""" __logger = _TaipyLogger._get_logger() __block_config_update = False @classmethod def _block(cls): cls.__block_config_update = True @classmethod def _unblock(cls): cls.__block_config_update = False @classmethod def _check(cls): def inner(f): @functools.wraps(f) def _check_if_is_blocking(*args, **kwargs): if cls.__block_config_update: error_message = ( "The Core service should be stopped by running core.stop() before" " modifying the Configuration. For more information, please refer to:" " https://docs.taipy.io/en/latest/manuals/running_services/#running-core." ) cls.__logger.error("ConfigurationUpdateBlocked: " + error_message) raise ConfigurationUpdateBlocked(error_message) return f(*args, **kwargs) return _check_if_is_blocking return inner
from ..common._repr_enum import _ReprEnum class _OrderedEnum(_ReprEnum): def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class Scope(_OrderedEnum): """Scope of a `DataNode^`. This enumeration can have the following values: - `GLOBAL` - `CYCLE` - `SCENARIO` """ GLOBAL = 3 CYCLE = 2 SCENARIO = 1
import os import re from collections import UserDict from datetime import datetime, timedelta from importlib import import_module from operator import attrgetter from pydoc import locate from ..exceptions.exceptions import InconsistentEnvVariableError, MissingEnvVariableError from .frequency import Frequency from .scope import Scope class _TemplateHandler: """Factory to handle actions related to config value templating.""" _PATTERN = r"^ENV\[([a-zA-Z_]\w*)\](:(\bbool\b|\bstr\b|\bfloat\b|\bint\b))?$" @classmethod def _replace_templates(cls, template, type=str, required=True, default=None): if isinstance(template, tuple): return tuple(cls._replace_template(item, type, required, default) for item in template) if isinstance(template, list): return [cls._replace_template(item, type, required, default) for item in template] if isinstance(template, dict): return {str(k): cls._replace_template(v, type, required, default) for k, v in template.items()} if isinstance(template, UserDict): return {str(k): cls._replace_template(v, type, required, default) for k, v in template.items()} return cls._replace_template(template, type, required, default) @classmethod def _replace_template(cls, template, type, required, default): if "ENV" not in str(template): return template match = re.fullmatch(cls._PATTERN, str(template)) if match: var = match.group(1) dynamic_type = match.group(3) val = os.environ.get(var) if val is None: if required: raise MissingEnvVariableError(f"Environment variable {var} is not set.") return default if type == bool: return cls._to_bool(val) elif type == int: return cls._to_int(val) elif type == float: return cls._to_float(val) elif type == Scope: return cls._to_scope(val) elif type == Frequency: return cls._to_frequency(val) else: if dynamic_type == "bool": return cls._to_bool(val) elif dynamic_type == "int": return cls._to_int(val) elif dynamic_type == "float": return cls._to_float(val) return val return template @staticmethod def _to_bool(val: str) -> bool: possible_values = ["true", "false"] if str.lower(val) not in possible_values: raise InconsistentEnvVariableError("{val} is not a Boolean.") return str.lower(val) == "true" or not (str.lower(val) == "false") @staticmethod def _to_int(val: str) -> int: try: return int(val) except ValueError: raise InconsistentEnvVariableError(f"{val} is not an integer.") @staticmethod def _to_float(val: str) -> float: try: return float(val) except ValueError: raise InconsistentEnvVariableError(f"{val} is not a float.") @staticmethod def _to_datetime(val: str) -> datetime: try: return datetime.fromisoformat(val) except ValueError: raise InconsistentEnvVariableError(f"{val} is not a valid datetime.") @staticmethod def _to_timedelta(val: str) -> timedelta: """ Parse a time string e.g. (2h13m) into a timedelta object. :param timedelta_str: A string identifying a duration. (eg. 2h13m) :return datetime.timedelta: A datetime.timedelta object """ regex = re.compile( r"^((?P<days>[\.\d]+?)d)? *" r"((?P<hours>[\.\d]+?)h)? *" r"((?P<minutes>[\.\d]+?)m)? *" r"((?P<seconds>[\.\d]+?)s)?$" ) parts = regex.match(val) if not parts: raise InconsistentEnvVariableError(f"{val} is not a valid timedelta.") time_params = {name: float(param) for name, param in parts.groupdict().items() if param} return timedelta(**time_params) # type: ignore @staticmethod def _to_scope(val: str) -> Scope: try: return Scope[str.upper(val)] except Exception: raise InconsistentEnvVariableError(f"{val} is not a valid scope.") @staticmethod def _to_frequency(val: str) -> Frequency: try: return Frequency[str.upper(val)] except Exception: raise InconsistentEnvVariableError(f"{val} is not a valid frequency.") @staticmethod def _to_function(val: str): module_name, fct_name = val.rsplit(".", 1) try: module = import_module(module_name) return attrgetter(fct_name)(module) except Exception: raise InconsistentEnvVariableError(f"{val} is not a valid function.") @staticmethod def _to_class(val: str): try: return locate(val) except Exception: raise InconsistentEnvVariableError(f"{val} is not a valid class.")
from __future__ import annotations from typing import Any, Dict, Optional, Union from ..common._config_blocker import _ConfigBlocker from ..common._template_handler import _TemplateHandler as _tpl class GlobalAppConfig: """ Configuration fields related to the global application. Attributes: **properties (Dict[str, Any]): A dictionary of additional properties. """ def __init__(self, **properties): self._properties = properties @property def properties(self): return {k: _tpl._replace_templates(v) for k, v in self._properties.items()} @properties.setter # type: ignore @_ConfigBlocker._check() def properties(self, val): self._properties = val def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) @classmethod def default_config(cls) -> GlobalAppConfig: return GlobalAppConfig() def _clean(self): self._properties.clear() def _to_dict(self): as_dict = {} as_dict.update(self._properties) return as_dict @classmethod def _from_dict(cls, config_as_dict: Dict[str, Any]): config = GlobalAppConfig() config._properties = config_as_dict return config def _update(self, config_as_dict): self._properties.update(config_as_dict)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import argparse from typing import Dict class _CLI: """Argument parser for Taipy application.""" # The conflict_handler is set to "resolve" to override conflict arguments _subparser_action = None _parser = argparse.ArgumentParser(conflict_handler="resolve") _sub_taipyparsers: Dict[str, argparse.ArgumentParser] = {} _arg_groups: Dict[str, argparse._ArgumentGroup] = {} @classmethod def _add_subparser(cls, name: str, **kwargs) -> argparse.ArgumentParser: """Create a new subparser and return a argparse handler.""" if subparser := cls._sub_taipyparsers.get(name): return subparser if not cls._subparser_action: cls._subparser_action = cls._parser.add_subparsers() subparser = cls._subparser_action.add_parser( name=name, conflict_handler="resolve", **kwargs, ) cls._sub_taipyparsers[name] = subparser subparser.set_defaults(which=name) return subparser @classmethod def _add_groupparser(cls, title: str, description: str = "") -> argparse._ArgumentGroup: """Create a new group for arguments and return a argparser handler.""" if groupparser := cls._arg_groups.get(title): return groupparser groupparser = cls._parser.add_argument_group(title=title, description=description) cls._arg_groups[title] = groupparser return groupparser @classmethod def _parse(cls): """Parse and return only known arguments.""" args, _ = cls._parser.parse_known_args() return args @classmethod def _remove_argument(cls, arg: str): """Remove an argument from the parser. Note that the `arg` must be without --. Source: https://stackoverflow.com/questions/32807319/disable-remove-argument-in-argparse """ for action in cls._parser._actions: opts = action.option_strings if (opts and opts[0] == arg) or action.dest == arg: cls._parser._remove_action(action) break for argument_group in cls._parser._action_groups: for group_action in argument_group._group_actions: opts = group_action.option_strings if (opts and opts[0] == arg) or group_action.dest == arg: argument_group._group_actions.remove(group_action) return
from ._cli import _CLI
"""The setup script.""" import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md", "rb") as readme_file: readme = readme_file.read().decode("UTF-8") with open(f"src{os.sep}taipy{os.sep}templates{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" test_requirements = ["pytest>=3.8"] setup( author="Avaiga", author_email="dev@taipy.io", python_requires=">=3.8", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], description="An open-source package holding Taipy application templates.", license="Apache License 2.0", long_description=readme, long_description_content_type="text/markdown", keywords="taipy-templates", name="taipy-templates", package_dir={"": "src"}, packages=find_namespace_packages(where="src") + find_packages(include=["taipy"]), include_package_data=True, test_suite="tests", url="https://github.com/avaiga/taipy-templates", version=version_string, zip_safe=False, )
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import subprocess import sys def _run_template(main_path, time_out=30): """Run the templates on a subprocess and get stdout after timeout""" with subprocess.Popen([sys.executable, main_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: try: stdout, stderr = proc.communicate(timeout=time_out) except subprocess.TimeoutExpired: proc.kill() stdout, stderr = proc.communicate() # Print the eror if there is any (for debugging) if stderr := str(stderr, "utf-8"): print(stderr) return stdout
import os from cookiecutter.main import cookiecutter from .utils import _run_template def test_scenario_management_with_toml_config(tmpdir): cookiecutter( template="src/taipy/templates/scenario-management", output_dir=tmpdir, no_input=True, extra_context={ "Application root folder name": "foo_app", "Application main Python file": "main.py", "Application title": "bar", "Does the application use TOML Config?": "yes", }, ) assert os.listdir(tmpdir) == ["foo_app"] assert ( os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "main.py", "algos", "config", "pages"].sort() ) # Assert post_gen_project hook is successful with open(os.path.join(tmpdir, "foo_app", "requirements.txt")) as requirements_file: assert "taipy==" in requirements_file.read() assert ( os.listdir(os.path.join(tmpdir, "foo_app", "config")).sort() == ["__init__.py", "config.py", "config.toml"].sort() ) with open(os.path.join(tmpdir, "foo_app", "config", "config.py")) as config_file: assert 'Config.load("config/config.toml")' in config_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "foo_app")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] Configuration 'config/config.toml' successfully loaded." in str(stdout, "utf-8") assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") def test_scenario_management_without_toml_config(tmpdir): cookiecutter( template="src/taipy/templates/scenario-management", output_dir=tmpdir, no_input=True, extra_context={ "Application root folder name": "foo_app", "Application main Python file": "main.py", "Application title": "bar", "Does the application use TOML Config?": "no", }, ) assert os.listdir(tmpdir) == ["foo_app"] assert ( os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "main.py", "algos", "config", "pages"].sort() ) # Assert post_gen_project hook is successful with open(os.path.join(tmpdir, "foo_app", "requirements.txt")) as requirements_file: assert "taipy==" in requirements_file.read() assert os.listdir(os.path.join(tmpdir, "foo_app", "config")).sort() == ["__init__.py", "config.py"].sort() with open(os.path.join(tmpdir, "foo_app", "config", "config.py")) as config_file: config_content = config_file.read() assert 'Config.load("config/config.toml")' not in config_content assert all([x in config_content for x in ["Config.configure_csv_data_node", "Config.configure_task"]]) oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "foo_app")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from config.config import configure from pages import job_page, scenario_page from pages.root import content, root, selected_data_node, selected_scenario import taipy as tp from taipy import Core, Gui def on_init(state): ... def on_change(state, var, val): if var == "selected_data_node" and val: state["scenario"].manage_data_node_partial(state) pages = { "/": root, "scenario": scenario_page, "jobs": job_page, } if __name__ == "__main__": # Instantiate, configure and run the Core core = Core() default_scenario_cfg = configure() core.run() # ################################################################################################################## # PLACEHOLDER: Initialize your data application here # # # # Example: # if len(tp.get_scenarios()) == 0: tp.create_scenario(default_scenario_cfg, name="Default Scenario") # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## # Instantiate, configure and run the GUI gui = Gui(pages=pages) data_node_partial = gui.add_partial("") gui.run(title="{{cookiecutter.__application_title}}", margin="0em")
from algos import clean_data from taipy import Config, Frequency, Scope def configure(): # ################################################################################################################## # PLACEHOLDER: Add your scenario configurations here # # # # Example: # initial_dataset_cfg = Config.configure_csv_data_node("initial_dataset", scope=Scope.CYCLE) replacement_type_cfg = Config.configure_data_node("replacement_type", default_data="NO VALUE") cleaned_dataset_cfg = Config.configure_csv_data_node("cleaned_dataset") clean_data_cfg = Config.configure_task( "clean_data", function=clean_data, input=[initial_dataset_cfg, replacement_type_cfg], output=cleaned_dataset_cfg, ) scenario_cfg = Config.configure_scenario( "scenario_configuration", task_configs=[clean_data_cfg], frequency=Frequency.DAILY ) return scenario_cfg # Comment, remove or replace the previous lines with your own use case # # ##################################################################################################################
from taipy import Config def configure(): Config.load("config/config.toml") return Config.scenarios["scenario_configuration"]
def clean_data(df, replacement_type): df = df.fillna(replacement_type) return df
from .algos import clean_data
from .scenario_page import scenario_page from .job_page import job_page
from taipy.gui import Markdown selected_scenario = None selected_data_node = None content = "" root = Markdown("pages/root.md")
from .job_page import job_page
from taipy.gui import Markdown job_page = Markdown("pages/job_page/job_page.md")
from taipy.gui import Markdown, notify from .data_node_management import manage_partial def notify_on_submission(state, submitable, details): if details['submission_status'] == 'COMPLETED': notify(state, "success", "Submision completed!") elif details['submission_status'] == 'FAILED': notify(state, "error", "Submission failed!") else: notify(state, "info", "In progress...") def manage_data_node_partial(state): manage_partial(state) scenario_page = Markdown("pages/scenario_page/scenario_page.md")
from .scenario_page import scenario_page
# build partial content for a specific data node def build_dn_partial(dn, dn_label): partial_content = "<|part|render={selected_scenario}|\n\n" # ################################################################################################################## # PLACEHOLDER: data node specific content before automatic content # # # # Example: # if dn_label == "replacement_type": partial_content += "All missing values will be replaced by the data node value." # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## # Automatic data node content partial_content += "<|{selected_scenario.data_nodes['" + dn.config_id + "']}|data_node|scenario={selected_scenario}|>\n\n" # ################################################################################################################## # PLACEHOLDER: data node specific content after automatic content # # # # Example: # if dn_label == "initial_dataset": partial_content += "Select your CSV file: <|{selected_data_node.path}|file_selector|extensions=.csv|on_action={lambda s: s.refresh('selected_scenario')}|>\n\n" # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## partial_content += "|>\n\n" return partial_content def manage_partial(state): dn = state.selected_data_node dn_label = dn.get_simple_label() partial_content = build_dn_partial(dn, dn_label) state.data_node_partial.update_content(state, partial_content)
import os import taipy # Add taipy version to requirements.txt with open(os.path.join(os.getcwd(), "requirements.txt"), "a") as requirement_file: requirement_file.write(f"taipy=={taipy.version._get_version()}\n") # Use TOML config file or not use_toml_config = "{{ cookiecutter.__use_toml_config }}".upper() if use_toml_config == "YES" or use_toml_config == "Y": os.remove(os.path.join(os.getcwd(), "config", "config.py")) os.rename( os.path.join(os.getcwd(), "config", "config_with_toml.py"), os.path.join(os.getcwd(), "config", "config.py") ) else: os.remove(os.path.join(os.getcwd(), "config", "config_with_toml.py")) os.remove(os.path.join(os.getcwd(), "config", "config.toml")) main_file_name = "{{cookiecutter.__main_file}}.py" print( f"New Taipy application has been created at {os.path.join(os.getcwd())}" f"\n\nTo start the application, change directory to the newly created folder:" f"\n\tcd {os.path.join(os.getcwd())}" f"\nand run the application as follows:" f"\n\ttaipy run {main_file_name}" )
""" Contain the application's configuration including the scenario configurations. The configuration is run by the Core service. """ from algorithms import * from taipy import Config # ############################################################################# # PLACEHOLDER: Put your application's configurations here # # # # Example: # # scenario_config = Config.configure_scenario("placeholder_scenario", []) # # Comment, remove or replace the previous lines with your own use case # # #############################################################################
from .config import *
""" This file is designed to contain the various Python functions used to configure tasks. The functions will be imported by the __init__.py file in this folder. """ # ################################################################################################################## # PLACEHOLDER: Put your Python functions here # # # # Example: # # def place_holder_algorithm(): # # pass # # Comment, remove or replace the previous lines with your own use case # # ##################################################################################################################
from algorithms import *
from .root import root_page
""" The root page of the application. Page content is imported from the root.md file. Please refer to https://docs.taipy.io/en/latest/manuals/gui/pages for more details. """ from taipy.gui import Markdown root_page = Markdown("pages/root.md")
""" A page of the application. Page content is imported from the page_example.md file. Please refer to https://docs.taipy.io/en/latest/manuals/gui/pages for more details. """ from taipy.gui import Markdown page_example = Markdown("pages/page_example/page_example.md")
import sys pages = "{{ cookiecutter.__pages }}".split(" ") # Remove empty string from pages list pages = [page for page in pages if page != ""] for page in pages: if not page.isidentifier(): sys.exit(f'Page name "{page}" is not a valid Python identifier. Please choose another name.')
import os import threading from flask import Flask from pyngrok import ngrok from hf_hub_ctranslate2 import GeneratorCT2fromHfHub from flask import request, jsonify model_name = "taipy5-ct2" # note this is local folder model, the model uploaded to huggingface did not response correctly #model_name = "michaelfeil/ct2fast-starchat-alpha" #model_name = "michaelfeil/ct2fast-starchat-beta" model = GeneratorCT2fromHfHub( # load in int8 on CUDA model_name_or_path=model_name, device="cuda", compute_type="int8_float16", # tokenizer=AutoTokenizer.from_pretrained("{ORG}/{NAME}") ) def generate_text_batch(prompt_texts, max_length=64): outputs = model.generate(prompt_texts, max_length=max_length, include_prompt_in_result=False) return outputs app = Flask(__name__) port = "5000" # Open a ngrok tunnel to the HTTP server public_url = ngrok.connect(port).public_url print(" * ngrok tunnel \"{}\" -> \"http://127.0.0.1:{}\"".format(public_url, port)) # Update any base URLs to use the public ngrok URL app.config["BASE_URL"] = public_url # ... Update inbound traffic via APIs to use the public-facing ngrok URL # Define Flask routes @app.route("/") def index(): return "Hello from Colab!" @app.route("/api/generate", methods=["POST"]) def generate_code(): try: # Get the JSON data from the request body data = request.get_json() # Extract 'inputs' and 'parameters' from the JSON data inputs = data.get('inputs', "") parameters = data.get('parameters', {}) # Extract the 'max_new_tokens' parameter max_new_tokens = parameters.get('max_new_tokens', 64) # Call the generate_text_batch function with inputs and max_new_tokens generated_text = generate_text_batch([inputs], max_new_tokens)[0] return jsonify({ "generated_text": generated_text, "status": 200 }) except Exception as e: return jsonify({"error": str(e)}) # Start the Flask server in a new thread threading.Thread(target=app.run, kwargs={"use_reloader": False}).start()
import os import json def tokenize_code(code, max_characters=256): """ Tokenize code into snippets of specified max_characters, breaking at new lines. """ lines = code.split('\n') snippets = [] current_snippet = "" for line in lines: if len(current_snippet) + len(line) + 1 <= max_characters: current_snippet += line + '\n' else: snippets.append(current_snippet.strip()) current_snippet = line + '\n' if current_snippet: snippets.append(current_snippet.strip()) return snippets def process_file(file_path): """ Read a file, tokenize the code, and create snippets. """ with open(file_path, 'r', encoding='utf-8') as file: content = file.read() # Tokenize code into snippets of 128 characters at new lines snippets = tokenize_code(content) return snippets def escape_string(s): """ Do not escape triple quotes, double quotes, single quotes, and new lines. """ return s def main(input_folder, output_file): snippets_list = [] for root, dirs, files in os.walk(input_folder): for file in files: if file.endswith(('.py', '.md')): file_path = os.path.join(root, file) snippets = process_file(file_path) for snippet in snippets: escaped_snippet = escape_string(snippet) snippets_list.append({'text': escaped_snippet}) # Write snippets to a JSONL file with open(output_file, 'w', encoding='utf-8') as jsonl_file: for snippet in snippets_list: jsonl_file.write(json.dumps(snippet) + '\n') if __name__ == "__main__": input_folder = 'taipy' # replace with your folder path output_file = 'snippets.jsonl' # replace with your desired output file path main(input_folder, output_file)
from taipy.gui import Gui from tensorflow.keras import models from PIL import Image import numpy as np model = models.load_model("assets/baseline.keras") class_names = { 0: "airplane", 1: "automobile", 2: "bird", 3: "cat", 4: "deer", 5: "dog", 6: "frog", 7: "horse", 8: "ship", 9: "truck", } def predict_image(model, path_to_image): img = Image.open(path_to_image) img = img.convert("RGB").resize((32, 32)) # Normalizing image data = np.asarray(img) print("Before: ", data[0][0]) # Printing color of very first pixel data = data / 255 # Comparing stuff to see if we broke something print("After: ", data[0][0]) # Printing color of very first pixel # Tricking model into thinking it is looking at an array of sample images and not a single image probability = model.predict(np.array([data])[:1]) probes = probability.max() prediction = class_names[np.argmax(probability)] return (probes, prediction) image_path = "assets/placeholder_image.png" prediction, prob, content = "", "", "" image_control_component = """ <|text-center| <|{"assets/logo.png"}|image|width=10vw|height=25vh|> <|{content}|file_selector|extensions=.png|> Select an image! <|{prediction}|> <|{image_path}|image|> <|{prob}|indicator|value={prob}|min=0|max=100|width=25vw|> > """ index = image_control_component def on_change(state, variable_name, variable_value): if variable_name == "content": state.image_path = variable_value probes, prediction = predict_image(model, variable_value) state.prob = round(probes * 100) # Converting decimal to percentage state.prediction = f"This is a : {prediction}" app = Gui(page=index) if __name__ == "__main__": app.run(use_reloader=True)
from taipy.gui import Gui from tensorflow.keras import models from PIL import Image import numpy as np model = models.load_model("baseline_mariya.keras") class_names = { 0: "airplane", 1: "automobile", 2: "bird", 3: "cat", 4: "deer", 5: "dog", 6: "frog", 7: "horse", 8: "ship", 9: "truck", } def predict_image(model, path_to_image): img = Image.open(path_to_image) img = img.convert("RGB").resize((32, 32)) # Normalizing image data = np.asarray(img) print("Before: ", data[0][0]) # Printing color of very first pixel data = data / 255 # Comparing stuff to see if we broke something print("After: ", data[0][0]) # Printing color of very first pixel # Tricking model into thinking it is looking at an array of sample images and not a single image probability = model.predict(np.array([data])[:1]) print(probability) probes = probability.max() prediction = class_names[np.argmax(probability)] return (probes, prediction) image_path = "placeholder_image.png" prediction, prob, content = "", "", "" image_control_component = """ <|text-center| <|{"logo.png"}|image|width=25vw|> <|{content}|file_selector|extensions=.png|> Select an image! <|{prediction}|> <|{image_path}|image|> <|{prob}|indicator|value={prob}|min=0|max=100|width=25vw|> > """ index = image_control_component def on_change(state, variable_name, variable_value): if variable_name == "content": state.image_path = variable_value probes, prediction = predict_image(model, variable_value) state.prob = round(probes * 100) # Converting decimal to percentage state.prediction = f"This is a : {prediction}" app = Gui(page=index) if __name__ == "__main__": app.run(use_reloader=True)
print("Hello, World!") print("Hi Taipy!")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md") as readme_file: readme = readme_file.read() with open(f"src{os.sep}taipy{os.sep}rest{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" setup( author="Avaiga", name="taipy-rest", keywords="taipy-rest", python_requires=">=3.8", version=version_string, author_email="dev@taipy.io", packages=find_namespace_packages(where="src") + find_packages(include=["taipy", "taipy.rest"]), package_dir={"": "src"}, include_package_data=True, long_description=readme, long_description_content_type="text/markdown", description="Library to expose taipy-core REST APIs.", license="Apache License 2.0", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], install_requires=[ "flask>=3.0.0,<3.1", "flask-restful>=0.3.9,<0.4", "passlib>=1.7.4,<1.8", "marshmallow>=3.20.1,<3.30", "apispec[yaml]>=6.3,<7.0", "apispec-webframeworks>=0.5.2,<0.6", "taipy-core@git+https://git@github.com/Avaiga/taipy-core.git@develop", ], )
from unittest import mock import pytest from flask import url_for from src.taipy.rest.api.exceptions.exceptions import ScenarioIdMissingException, SequenceNameMissingException from taipy.core.exceptions.exceptions import NonExistingScenario from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory def test_get_sequence(client, default_sequence): # test 404 user_url = url_for("api.sequence_by_id", sequence_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.sequence._sequence_manager._SequenceManager._get") as manager_mock: manager_mock.return_value = default_sequence # test get_sequence rep = client.get(url_for("api.sequence_by_id", sequence_id="foo")) assert rep.status_code == 200 def test_delete_sequence(client): # test 404 user_url = url_for("api.sequence_by_id", sequence_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.sequence._sequence_manager._SequenceManager._delete"), mock.patch( "taipy.core.sequence._sequence_manager._SequenceManager._get" ): # test get_sequence rep = client.delete(url_for("api.sequence_by_id", sequence_id="foo")) assert rep.status_code == 200 def test_create_sequence(client, default_scenario): sequences_url = url_for("api.sequences") rep = client.post(sequences_url, json={}) assert rep.status_code == 400 assert rep.json == {"message": "Scenario id is missing."} sequences_url = url_for("api.sequences") rep = client.post(sequences_url, json={"scenario_id": "SCENARIO_scenario_id"}) assert rep.status_code == 400 assert rep.json == {"message": "Sequence name is missing."} sequences_url = url_for("api.sequences") rep = client.post(sequences_url, json={"scenario_id": "SCENARIO_scenario_id", "sequence_name": "sequence"}) assert rep.status_code == 404 _ScenarioManagerFactory._build_manager()._set(default_scenario) with mock.patch("taipy.core.scenario._scenario_manager._ScenarioManager._get") as config_mock: config_mock.return_value = default_scenario sequences_url = url_for("api.sequences") rep = client.post( sequences_url, json={"scenario_id": default_scenario.id, "sequence_name": "sequence", "tasks": []} ) assert rep.status_code == 201 def test_get_all_sequences(client, default_scenario_config_list): for ds in range(10): with mock.patch("src.taipy.rest.api.resources.scenario.ScenarioList.fetch_config") as config_mock: config_mock.return_value = default_scenario_config_list[ds] scenario_url = url_for("api.scenarios", config_id=config_mock.name) client.post(scenario_url) sequences_url = url_for("api.sequences") rep = client.get(sequences_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10 @pytest.mark.xfail() def test_execute_sequence(client, default_sequence): # test 404 user_url = url_for("api.sequence_submit", sequence_id="foo") rep = client.post(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.sequence._sequence_manager._SequenceManager._get") as manager_mock: manager_mock.return_value = default_sequence # test get_sequence rep = client.post(url_for("api.sequence_submit", sequence_id="foo")) assert rep.status_code == 200
from unittest import mock from flask import url_for def test_get_job(client, default_job): # test 404 user_url = url_for("api.job_by_id", job_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.job._job_manager._JobManager._get") as manager_mock: manager_mock.return_value = default_job # test get_job rep = client.get(url_for("api.job_by_id", job_id="foo")) assert rep.status_code == 200 def test_delete_job(client): # test 404 user_url = url_for("api.job_by_id", job_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.job._job_manager._JobManager._delete"), mock.patch( "taipy.core.job._job_manager._JobManager._get" ): # test get_job rep = client.delete(url_for("api.job_by_id", job_id="foo")) assert rep.status_code == 200 def test_create_job(client, default_task_config): # without config param jobs_url = url_for("api.jobs") rep = client.post(jobs_url) assert rep.status_code == 400 with mock.patch("src.taipy.rest.api.resources.job.JobList.fetch_config") as config_mock: config_mock.return_value = default_task_config jobs_url = url_for("api.jobs", task_id="foo") rep = client.post(jobs_url) assert rep.status_code == 201 def test_get_all_jobs(client, create_job_list): jobs_url = url_for("api.jobs") rep = client.get(jobs_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10 def test_cancel_job(client, default_job): # test 404 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory _OrchestratorFactory._build_orchestrator() _OrchestratorFactory._build_dispatcher() user_url = url_for("api.job_cancel", job_id="foo") rep = client.post(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.job._job_manager._JobManager._get") as manager_mock: manager_mock.return_value = default_job # test get_job rep = client.post(url_for("api.job_cancel", job_id="foo")) assert rep.status_code == 200
from unittest import mock from flask import url_for def test_get_task(client, default_task): # test 404 user_url = url_for("api.task_by_id", task_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.task._task_manager._TaskManager._get") as manager_mock: manager_mock.return_value = default_task # test get_task rep = client.get(url_for("api.task_by_id", task_id="foo")) assert rep.status_code == 200 def test_delete_task(client): # test 404 user_url = url_for("api.task_by_id", task_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.task._task_manager._TaskManager._delete"), mock.patch( "taipy.core.task._task_manager._TaskManager._get" ): # test get_task rep = client.delete(url_for("api.task_by_id", task_id="foo")) assert rep.status_code == 200 def test_create_task(client, default_task_config): # without config param tasks_url = url_for("api.tasks") rep = client.post(tasks_url) assert rep.status_code == 400 # config does not exist tasks_url = url_for("api.tasks", config_id="foo") rep = client.post(tasks_url) assert rep.status_code == 404 with mock.patch("src.taipy.rest.api.resources.task.TaskList.fetch_config") as config_mock: config_mock.return_value = default_task_config tasks_url = url_for("api.tasks", config_id="bar") rep = client.post(tasks_url) assert rep.status_code == 201 def test_get_all_tasks(client, task_data, default_task_config_list): for ds in range(10): with mock.patch("src.taipy.rest.api.resources.task.TaskList.fetch_config") as config_mock: config_mock.return_value = default_task_config_list[ds] tasks_url = url_for("api.tasks", config_id=config_mock.name) client.post(tasks_url) rep = client.get(tasks_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10 def test_execute_task(client, default_task): # test 404 user_url = url_for("api.task_submit", task_id="foo") rep = client.post(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.task._task_manager._TaskManager._get") as manager_mock: manager_mock.return_value = default_task # test get_task rep = client.post(url_for("api.task_submit", task_id="foo")) assert rep.status_code == 200
from functools import wraps from unittest.mock import MagicMock, patch from src.taipy.rest.api.middlewares._middleware import _middleware def mock_enterprise_middleware(f): @wraps(f) def wrapper(*args, **kwargs): return f(*args, **kwargs) return wrapper @patch("src.taipy.rest.api.middlewares._middleware._using_enterprise") @patch("src.taipy.rest.api.middlewares._middleware._enterprise_middleware") def test_enterprise_middleware_applied_when_enterprise_is_installed( enterprise_middleware: MagicMock, using_enterprise: MagicMock ): enterprise_middleware.return_value = mock_enterprise_middleware using_enterprise.return_value = True @_middleware def f(): return "f" rv = f() assert rv == "f" using_enterprise.assert_called_once() enterprise_middleware.assert_called_once() @patch("src.taipy.rest.api.middlewares._middleware._using_enterprise") @patch("src.taipy.rest.api.middlewares._middleware._enterprise_middleware") def test_enterprise_middleware_not_applied_when_enterprise_is_not_installed( enterprise_middleware: MagicMock, using_enterprise: MagicMock ): enterprise_middleware.return_value = mock_enterprise_middleware using_enterprise.return_value = False @_middleware def f(): return "f" rv = f() assert rv == "f" using_enterprise.assert_called_once() enterprise_middleware.assert_not_called()
from unittest import mock import pytest from flask import url_for def test_get_datanode(client, default_datanode): # test 404 user_url = url_for("api.datanode_by_id", datanode_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.data._data_manager._DataManager._get") as manager_mock: manager_mock.return_value = default_datanode # test get_datanode rep = client.get(url_for("api.datanode_by_id", datanode_id="foo")) assert rep.status_code == 200 def test_delete_datanode(client): # test 404 user_url = url_for("api.datanode_by_id", datanode_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.data._data_manager._DataManager._delete"), mock.patch( "taipy.core.data._data_manager._DataManager._get" ): # test get_datanode rep = client.delete(url_for("api.datanode_by_id", datanode_id="foo")) assert rep.status_code == 200 def test_create_datanode(client, default_datanode_config): # without config param datanodes_url = url_for("api.datanodes") rep = client.post(datanodes_url) assert rep.status_code == 400 # config does not exist datanodes_url = url_for("api.datanodes", config_id="foo") rep = client.post(datanodes_url) assert rep.status_code == 404 with mock.patch("src.taipy.rest.api.resources.datanode.DataNodeList.fetch_config") as config_mock: config_mock.return_value = default_datanode_config datanodes_url = url_for("api.datanodes", config_id="bar") rep = client.post(datanodes_url) assert rep.status_code == 201 def test_get_all_datanodes(client, default_datanode_config_list): for ds in range(10): with mock.patch("src.taipy.rest.api.resources.datanode.DataNodeList.fetch_config") as config_mock: config_mock.return_value = default_datanode_config_list[ds] datanodes_url = url_for("api.datanodes", config_id=config_mock.name) client.post(datanodes_url) rep = client.get(datanodes_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10 def test_read_datanode(client, default_df_datanode): with mock.patch("taipy.core.data._data_manager._DataManager._get") as config_mock: config_mock.return_value = default_df_datanode # without operators datanodes_url = url_for("api.datanode_reader", datanode_id="foo") rep = client.get(datanodes_url, json={}) assert rep.status_code == 200 # Without operators and body rep = client.get(datanodes_url) assert rep.status_code == 200 # TODO: Revisit filter test # operators = {"operators": [{"key": "a", "value": 5, "operator": "LESS_THAN"}]} # rep = client.get(datanodes_url, json=operators) # assert rep.status_code == 200 def test_write_datanode(client, default_datanode): with mock.patch("taipy.core.data._data_manager._DataManager._get") as config_mock: config_mock.return_value = default_datanode # Get DataNode datanodes_read_url = url_for("api.datanode_reader", datanode_id=default_datanode.id) rep = client.get(datanodes_read_url, json={}) assert rep.status_code == 200 assert rep.json == {"data": [1, 2, 3, 4, 5, 6]} datanodes_write_url = url_for("api.datanode_writer", datanode_id=default_datanode.id) rep = client.put(datanodes_write_url, json=[1, 2, 3]) assert rep.status_code == 200 rep = client.get(datanodes_read_url, json={}) assert rep.status_code == 200 assert rep.json == {"data": [1, 2, 3]}
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import json from typing import Dict from flask import url_for def create_and_submit_scenario(config_id: str, client) -> Dict: response = client.post(url_for("api.scenarios", config_id=config_id)) assert response.status_code == 201 scenario = response.json.get("scenario") assert (set(scenario) - set(json.load(open("tests/json/expected/scenario.json")))) == set() response = client.post(url_for("api.scenario_submit", scenario_id=scenario.get("id"))) assert response.status_code == 200 return scenario def get(url, name, client) -> Dict: response = client.get(url) returned_data = response.json.get(name) assert (set(returned_data) - set(json.load(open(f"tests/json/expected/{name}.json")))) == set() return returned_data def get_assert_status(url, client, status_code) -> None: response = client.get(url) assert response.status_code == status_code def get_all(url, expected_quantity, client): response = client.get(url) assert len(response.json) == expected_quantity def delete(url, client): response = client.delete(url) assert response.status_code == 200 def test_end_to_end(client, setup_end_to_end): # Create Scenario: Should also create all of its dependencies(sequences, tasks, datanodes, etc) scenario = create_and_submit_scenario("scenario", client) # Get other models and verify if they return the necessary fields cycle = get(url_for("api.cycle_by_id", cycle_id=scenario.get("cycle")), "cycle", client) sequence = get( url_for("api.sequence_by_id", sequence_id=f"SEQUENCE_sequence_{scenario['id']}"), "sequence", client, ) task = get(url_for("api.task_by_id", task_id=sequence.get("tasks")[0]), "task", client) datanode = get( url_for("api.datanode_by_id", datanode_id=task.get("input_ids")[0]), "datanode", client, ) # Get All get_all(url_for("api.scenarios"), 1, client) get_all(url_for("api.cycles"), 1, client) get_all(url_for("api.sequences"), 1, client) get_all(url_for("api.tasks"), 2, client) get_all(url_for("api.datanodes"), 5, client) get_all(url_for("api.jobs"), 2, client) # Delete entities delete(url_for("api.cycle_by_id", cycle_id=cycle.get("id")), client) delete(url_for("api.sequence_by_id", sequence_id=sequence.get("id")), client) delete(url_for("api.task_by_id", task_id=task.get("id")), client) delete(url_for("api.datanode_by_id", datanode_id=datanode.get("id")), client) # Check status code # Non-existing entities should return 404 get_assert_status(url_for("api.cycle_by_id", cycle_id=9999999), client, 404) get_assert_status(url_for("api.scenario_by_id", scenario_id=9999999), client, 404) get_assert_status(url_for("api.sequence_by_id", sequence_id=9999999), client, 404) get_assert_status(url_for("api.task_by_id", task_id=9999999), client, 404) get_assert_status(url_for("api.datanode_by_id", datanode_id=9999999), client, 404) # Check URL with and without trailing slashes url_with_slash = url_for("api.scenarios") url_without_slash = url_for("api.scenarios")[:-1] get_all(url_with_slash, 1, client) get_all(url_without_slash, 1, client)
from unittest import mock from flask import url_for def test_get_cycle(client, default_cycle): # test 404 cycle_url = url_for("api.cycle_by_id", cycle_id="foo") rep = client.get(cycle_url) assert rep.status_code == 404 with mock.patch("taipy.core.cycle._cycle_manager._CycleManager._get") as manager_mock: manager_mock.return_value = default_cycle # test get_cycle rep = client.get(url_for("api.cycle_by_id", cycle_id="foo")) assert rep.status_code == 200 def test_delete_cycle(client): # test 404 cycle_url = url_for("api.cycle_by_id", cycle_id="foo") rep = client.get(cycle_url) assert rep.status_code == 404 with mock.patch("taipy.core.cycle._cycle_manager._CycleManager._delete"), mock.patch( "taipy.core.cycle._cycle_manager._CycleManager._get" ): # test get_cycle rep = client.delete(url_for("api.cycle_by_id", cycle_id="foo")) assert rep.status_code == 200 def test_create_cycle(client, cycle_data): # without config param cycles_url = url_for("api.cycles") data = {"bad": "data"} rep = client.post(cycles_url, json=data) assert rep.status_code == 400 rep = client.post(cycles_url, json=cycle_data) assert rep.status_code == 201 def test_get_all_cycles(client, create_cycle_list): cycles_url = url_for("api.cycles") rep = client.get(cycles_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10
from unittest import mock import pytest from flask import url_for def test_get_scenario(client, default_scenario): # test 404 user_url = url_for("api.scenario_by_id", scenario_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.scenario._scenario_manager._ScenarioManager._get") as manager_mock: manager_mock.return_value = default_scenario # test get_scenario rep = client.get(url_for("api.scenario_by_id", scenario_id="foo")) assert rep.status_code == 200 def test_delete_scenario(client): # test 404 user_url = url_for("api.scenario_by_id", scenario_id="foo") rep = client.get(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.scenario._scenario_manager._ScenarioManager._delete"), mock.patch( "taipy.core.scenario._scenario_manager._ScenarioManager._get" ): # test get_scenario rep = client.delete(url_for("api.scenario_by_id", scenario_id="foo")) assert rep.status_code == 200 def test_create_scenario(client, default_scenario_config): # without config param scenarios_url = url_for("api.scenarios") rep = client.post(scenarios_url) assert rep.status_code == 400 # config does not exist scenarios_url = url_for("api.scenarios", config_id="foo") rep = client.post(scenarios_url) assert rep.status_code == 404 with mock.patch("src.taipy.rest.api.resources.scenario.ScenarioList.fetch_config") as config_mock: config_mock.return_value = default_scenario_config scenarios_url = url_for("api.scenarios", config_id="bar") rep = client.post(scenarios_url) assert rep.status_code == 201 def test_get_all_scenarios(client, default_sequence, default_scenario_config_list): for ds in range(10): with mock.patch("src.taipy.rest.api.resources.scenario.ScenarioList.fetch_config") as config_mock: config_mock.return_value = default_scenario_config_list[ds] scenarios_url = url_for("api.scenarios", config_id=config_mock.name) client.post(scenarios_url) rep = client.get(scenarios_url) assert rep.status_code == 200 results = rep.get_json() assert len(results) == 10 @pytest.mark.xfail() def test_execute_scenario(client, default_scenario): # test 404 user_url = url_for("api.scenario_submit", scenario_id="foo") rep = client.post(user_url) assert rep.status_code == 404 with mock.patch("taipy.core.scenario._scenario_manager._ScenarioManager._get") as manager_mock: manager_mock.return_value = default_scenario # test get_scenario rep = client.post(url_for("api.scenario_submit", scenario_id="foo")) assert rep.status_code == 200
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import pickle import random from datetime import datetime, timedelta from typing import Any, Dict import pandas as pd n_predictions = 14 def forecast(model, date: datetime): dates = [date + timedelta(days=i) for i in range(n_predictions)] forecasts = [f + random.uniform(0, 2) for f in model.forecast(len(dates))] days = [str(dt.date()) for dt in dates] res = {"Date": days, "Forecast": forecasts} return pd.DataFrame.from_dict(res) def evaluate(cleaned: pd.DataFrame, forecasts: pd.DataFrame, date: datetime) -> Dict[str, Any]: cleaned = cleaned[cleaned["Date"].isin(forecasts["Date"].tolist())] forecasts_as_series = pd.Series(forecasts["Forecast"].tolist(), name="Forecast") res = pd.concat([cleaned.reset_index(), forecasts_as_series], axis=1) res["Delta"] = abs(res["Forecast"] - res["Value"]) return { "Date": date, "Dataframe": res, "Mean_absolute_error": res["Delta"].mean(), "Relative_error": (res["Delta"].mean() * 100) / res["Value"].mean(), } if __name__ == "__main__": model = pickle.load(open("../my_model.p", "rb")) day = datetime(2020, 1, 25) forecasts = forecast(model, day) historical_temperature = pd.read_csv("../historical_temperature.csv") evaluation = evaluate(historical_temperature, forecasts, day) print(evaluation["Dataframe"]) print() print(f'Mean absolute error : {evaluation["Mean_absolute_error"]}') print(f'Relative error in %: {evaluation["Relative_error"]}')
from taipy.core import Config, Frequency from .algorithms import evaluate, forecast model_cfg = Config.configure_data_node("model", path="my_model.p", storage_type="pickle") day_cfg = Config.configure_data_node(id="day") forecasts_cfg = Config.configure_data_node(id="forecasts") forecast_task_cfg = Config.configure_task( id="forecast_task", input=[model_cfg, day_cfg], function=forecast, output=forecasts_cfg, ) historical_temperature_cfg = Config.configure_data_node( "historical_temperature", storage_type="csv", path="historical_temperature.csv", has_header=True, ) evaluation_cfg = Config.configure_data_node("evaluation") evaluate_task_cfg = Config.configure_task( "evaluate_task", input=[historical_temperature_cfg, forecasts_cfg, day_cfg], function=evaluate, output=evaluation_cfg, ) scenario_cfg = Config.configure_scenario("scenario", [forecast_task_cfg, evaluate_task_cfg], frequency=Frequency.DAILY) scenario_cfg.add_sequences({"sequence": [forecast_task_cfg, evaluate_task_cfg]})
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from importlib.util import find_spec if find_spec("taipy"): if find_spec("taipy.config"): from taipy.config._init import * # type: ignore if find_spec("taipy.gui"): from taipy.gui._init import * # type: ignore if find_spec("taipy.core"): from taipy.core._init import * # type: ignore if find_spec("taipy.rest"): from taipy.rest._init import * # type: ignore if find_spec("taipy.gui_core"): from taipy.gui_core._init import * # type: ignore if find_spec("taipy.enterprise"): from taipy.enterprise._init import * # type: ignore if find_spec("taipy._run"): from taipy._run import _run as run # type: ignore
import json import os def _get_version(): with open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
from .rest import Rest
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from taipy.config import Config from .app import create_app as _create_app class Rest: """ Runnable Rest application serving REST APIs on top of Taipy Core functionalities. """ def __init__(self): """ Initialize a REST API server. A Flask application is instantiated and configured using three parameters from the global config. - Config.global_config.testing (bool): Run the application on testing mode. - Config.global_config.env (Optional[str]): The application environment. - Config.global_config.secret_key (Optional[str]): Application server secret key. However, editing these parameters is only recommended for advanced users. Indeed, the default behavior of the REST server without any required configuration satisfies all the standard and basic needs. """ self._app = _create_app(Config.global_config.testing or False, Config.global_config.env, Config.global_config.secret_key) def run(self, **kwargs): """ Start a REST API server. This method is blocking. Parameters: **kwargs : Options to provide to the application server. """ self._app.run(**kwargs)
"""# Taipy Rest The Taipy Rest package exposes the Runnable `Rest^` service to provide REST APIs on top of Taipy Core. (more details on Taipy Core functionalities in the [user manual](../../../manuals/core/)). Once the `Rest^` service runs, users can call REST APIs to create, read, update, submit and remove Taipy entities (including cycles, scenarios, sequences, tasks, jobs, and data nodes). It is handy when it comes to integrating a Taipy application in a more complex IT ecosystem. Please refer to [REST API](../../reference_rest/) page to get the exhaustive list of available APIs.""" from ._init import * from .version import _get_version __version__ = _get_version()
"""Extensions registry All extensions here are used as singletons and initialized in application factory """ from .commons.apispec import APISpecExt apispec = APISpecExt()
import os from flask import Flask from . import api from .commons.encoder import _CustomEncoder from .extensions import apispec def create_app(testing=False, flask_env=None, secret_key=None): """Application factory, used to create application""" app = Flask(__name__) app.config.update( ENV=os.getenv("FLASK_ENV", flask_env), TESTING=os.getenv("TESTING", testing), SECRET_KEY=os.getenv("SECRET_KEY", secret_key), ) app.url_map.strict_slashes = False app.config["RESTFUL_JSON"] = {"cls": _CustomEncoder} configure_apispec(app) register_blueprints(app) with app.app_context(): api.views.register_views() return app def configure_apispec(app): """Configure APISpec for swagger support""" apispec.init_app(app) apispec.spec.components.schema( "PaginatedResult", { "properties": { "total": {"type": "integer"}, "pages": {"type": "integer"}, "next": {"type": "string"}, "prev": {"type": "string"}, } }, ) def register_blueprints(app): """Register all blueprints for application""" app.register_blueprint(api.views.blueprint)
from taipy.core.cycle._cycle_converter import _CycleConverter from taipy.core.data._data_converter import _DataNodeConverter from taipy.core.scenario._scenario_converter import _ScenarioConverter from taipy.core.sequence._sequence_converter import _SequenceConverter from taipy.core.task._task_converter import _TaskConverter entity_to_models = { "scenario": _ScenarioConverter._entity_to_model, "sequence": _SequenceConverter._entity_to_model, "task": _TaskConverter._entity_to_model, "data": _DataNodeConverter._entity_to_model, "cycle": _CycleConverter._entity_to_model, } def _to_model(repository, entity, **kwargs): return entity_to_models[repository](entity)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from apispec import APISpec from apispec.exceptions import APISpecError from apispec.ext.marshmallow import MarshmallowPlugin from apispec_webframeworks.flask import FlaskPlugin from flask import Blueprint, jsonify, render_template class FlaskRestfulPlugin(FlaskPlugin): """Small plugin override to handle flask-restful resources""" @staticmethod def _rule_for_view(view, app=None): view_funcs = app.view_functions endpoint = None for ept, view_func in view_funcs.items(): if hasattr(view_func, "view_class"): view_func = view_func.view_class if view_func == view: endpoint = ept if not endpoint: raise APISpecError("Could not find endpoint for view {0}".format(view)) # WARNING: Assume 1 rule per view function for now rule = app.url_map._rules_by_endpoint[endpoint][0] return rule class APISpecExt: """Very simple and small extension to use apispec with this API as a flask extension""" def __init__(self, app=None, **kwargs): self.spec = None if app is not None: self.init_app(app, **kwargs) def init_app(self, app, **kwargs): app.config.setdefault("APISPEC_TITLE", "Taipy Rest") app.config.setdefault("APISPEC_VERSION", "1.0.0") app.config.setdefault("OPENAPI_VERSION", "3.0.2") app.config.setdefault("SWAGGER_JSON_URL", "/swagger.json") app.config.setdefault("SWAGGER_UI_URL", "/swagger-ui") app.config.setdefault("OPENAPI_YAML_URL", "/openapi.yaml") app.config.setdefault("REDOC_UI_URL", "/redoc-ui") app.config.setdefault("SWAGGER_URL_PREFIX", None) self.spec = APISpec( title=app.config["APISPEC_TITLE"], version=app.config["APISPEC_VERSION"], openapi_version=app.config["OPENAPI_VERSION"], plugins=[MarshmallowPlugin(), FlaskRestfulPlugin()], **kwargs ) blueprint = Blueprint( "swagger", __name__, template_folder="./templates", url_prefix=app.config["SWAGGER_URL_PREFIX"], ) blueprint.add_url_rule(app.config["SWAGGER_JSON_URL"], "swagger_json", self.swagger_json) blueprint.add_url_rule(app.config["SWAGGER_UI_URL"], "swagger_ui", self.swagger_ui) blueprint.add_url_rule(app.config["OPENAPI_YAML_URL"], "openapi_yaml", self.openapi_yaml) blueprint.add_url_rule(app.config["REDOC_UI_URL"], "redoc_ui", self.redoc_ui) app.register_blueprint(blueprint) def swagger_json(self): return jsonify(self.spec.to_dict()) def swagger_ui(self): return render_template("swagger.j2") def openapi_yaml(self): # Manually inject ReDoc's Authentication legend, then remove it self.spec.tag( { "name": "authentication", "x-displayName": "Authentication", "description": "<SecurityDefinitions />", } ) redoc_spec = self.spec.to_yaml() self.spec._tags.pop(0) return redoc_spec def redoc_ui(self): return render_template("redoc.j2")
import json from typing import Any, Union from datetime import datetime from enum import Enum Json = Union[dict, list, str, int, float, bool, None] class _CustomEncoder(json.JSONEncoder): def default(self, o: Any) -> Json: if isinstance(o, Enum): result = o.value elif isinstance(o, datetime): result = {"__type__": "Datetime", "__value__": o.isoformat()} else: result = json.JSONEncoder.default(self, o) return result
"""Simple helper to paginate query """ from flask import request, url_for DEFAULT_PAGE_SIZE = 50 DEFAULT_PAGE_NUMBER = 1 def extract_pagination(page=None, per_page=None, **request_args): page = int(page) if page is not None else DEFAULT_PAGE_NUMBER per_page = int(per_page) if per_page is not None else DEFAULT_PAGE_SIZE return page, per_page, request_args def paginate(query, schema): page, per_page, other_request_args = extract_pagination(**request.args) page_obj = query.paginate(page=page, per_page=per_page) next_ = url_for( request.endpoint, page=page_obj.next_num if page_obj.has_next else page_obj.page, per_page=per_page, **other_request_args, **request.view_args ) prev = url_for( request.endpoint, page=page_obj.prev_num if page_obj.has_prev else page_obj.page, per_page=per_page, **other_request_args, **request.view_args ) return { "total": page_obj.total, "pages": page_obj.pages, "next": next_, "prev": prev, "results": schema.dump(page_obj.items), }
from . import error_handler, views __all__ = ["views", "error_handler"]
from flask import jsonify from marshmallow import ValidationError from taipy.core.exceptions.exceptions import ( NonExistingCycle, NonExistingDataNode, NonExistingDataNodeConfig, NonExistingJob, NonExistingScenario, NonExistingScenarioConfig, NonExistingSequence, NonExistingSequenceConfig, NonExistingTask, NonExistingTaskConfig, ) from .exceptions.exceptions import ConfigIdMissingException, ScenarioIdMissingException, SequenceNameMissingException from .views import blueprint def _create_404(e): return {"message": e.message}, 404 @blueprint.errorhandler(ValidationError) def handle_marshmallow_error(e): """Return json error for marshmallow validation errors. This will avoid having to try/catch ValidationErrors in all endpoints, returning correct JSON response with associated HTTP 400 Status (https://tools.ietf.org/html/rfc7231#section-6.5.1) """ return jsonify(e.messages), 400 @blueprint.errorhandler(ConfigIdMissingException) def handle_config_id_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(ScenarioIdMissingException) def handle_scenario_id_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(SequenceNameMissingException) def handle_sequence_name_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(NonExistingDataNode) def handle_data_node_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingDataNodeConfig) def handle_data_node_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingCycle) def handle_cycle_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingJob) def handle_job_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingSequence) def handle_sequence_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingSequenceConfig) def handle_sequence_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingScenario) def handle_scenario_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingScenarioConfig) def handle_scenario_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingTask) def handle_task_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingTaskConfig) def handle_task_config_not_found(e): return _create_404(e)
from flask import Blueprint, current_app from flask_restful import Api from taipy.core.common._utils import _load_fct from taipy.logger._taipy_logger import _TaipyLogger from ..extensions import apispec from .middlewares._middleware import _using_enterprise from .resources import ( CycleList, CycleResource, DataNodeList, DataNodeReader, DataNodeResource, DataNodeWriter, JobExecutor, JobList, JobResource, ScenarioExecutor, ScenarioList, ScenarioResource, SequenceExecutor, SequenceList, SequenceResource, TaskExecutor, TaskList, TaskResource, ) from .schemas import CycleSchema, DataNodeSchema, JobSchema, ScenarioSchema, SequenceSchema, TaskSchema _logger = _TaipyLogger._get_logger() blueprint = Blueprint("api", __name__, url_prefix="/api/v1") api = Api(blueprint) api.add_resource( DataNodeResource, "/datanodes/<string:datanode_id>/", endpoint="datanode_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeReader, "/datanodes/<string:datanode_id>/read/", endpoint="datanode_reader", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeWriter, "/datanodes/<string:datanode_id>/write/", endpoint="datanode_writer", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeList, "/datanodes/", endpoint="datanodes", resource_class_kwargs={"logger": _logger}, ) api.add_resource( TaskResource, "/tasks/<string:task_id>/", endpoint="task_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource(TaskList, "/tasks/", endpoint="tasks", resource_class_kwargs={"logger": _logger}) api.add_resource( TaskExecutor, "/tasks/submit/<string:task_id>/", endpoint="task_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceResource, "/sequences/<string:sequence_id>/", endpoint="sequence_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceList, "/sequences/", endpoint="sequences", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceExecutor, "/sequences/submit/<string:sequence_id>/", endpoint="sequence_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioResource, "/scenarios/<string:scenario_id>/", endpoint="scenario_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioList, "/scenarios/", endpoint="scenarios", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioExecutor, "/scenarios/submit/<string:scenario_id>/", endpoint="scenario_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( CycleResource, "/cycles/<string:cycle_id>/", endpoint="cycle_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( CycleList, "/cycles/", endpoint="cycles", resource_class_kwargs={"logger": _logger}, ) api.add_resource( JobResource, "/jobs/<string:job_id>/", endpoint="job_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource(JobList, "/jobs/", endpoint="jobs", resource_class_kwargs={"logger": _logger}) api.add_resource( JobExecutor, "/jobs/cancel/<string:job_id>/", endpoint="job_cancel", resource_class_kwargs={"logger": _logger}, ) def load_enterprise_resources(api: Api): """ Load enterprise resources. """ if not _using_enterprise(): return load_resources = _load_fct("taipy.enterprise.rest.api.views", "_load_resources") load_resources(api) load_enterprise_resources(api) def register_views(): apispec.spec.components.schema("DataNodeSchema", schema=DataNodeSchema) apispec.spec.path(view=DataNodeResource, app=current_app) apispec.spec.path(view=DataNodeList, app=current_app) apispec.spec.path(view=DataNodeReader, app=current_app) apispec.spec.path(view=DataNodeWriter, app=current_app) apispec.spec.components.schema("TaskSchema", schema=TaskSchema) apispec.spec.path(view=TaskResource, app=current_app) apispec.spec.path(view=TaskList, app=current_app) apispec.spec.path(view=TaskExecutor, app=current_app) apispec.spec.components.schema("SequenceSchema", schema=SequenceSchema) apispec.spec.path(view=SequenceResource, app=current_app) apispec.spec.path(view=SequenceList, app=current_app) apispec.spec.path(view=SequenceExecutor, app=current_app) apispec.spec.components.schema("ScenarioSchema", schema=ScenarioSchema) apispec.spec.path(view=ScenarioResource, app=current_app) apispec.spec.path(view=ScenarioList, app=current_app) apispec.spec.path(view=ScenarioExecutor, app=current_app) apispec.spec.components.schema("CycleSchema", schema=CycleSchema) apispec.spec.path(view=CycleResource, app=current_app) apispec.spec.path(view=CycleList, app=current_app) apispec.spec.components.schema("JobSchema", schema=JobSchema) apispec.spec.path(view=JobResource, app=current_app) apispec.spec.path(view=JobList, app=current_app) apispec.spec.path(view=JobExecutor, app=current_app) apispec.spec.components.schema( "Any", { "description": "Any value", "nullable": True, }, ) if _using_enterprise(): _register_views = _load_fct("taipy.enterprise.rest.api.views", "_register_views") _register_views(apispec)
from .cycle import CycleList, CycleResource from .datanode import DataNodeList, DataNodeReader, DataNodeResource, DataNodeWriter from .job import JobExecutor, JobList, JobResource from .scenario import ScenarioExecutor, ScenarioList, ScenarioResource from .sequence import SequenceExecutor, SequenceList, SequenceResource from .task import TaskExecutor, TaskList, TaskResource __all__ = [ "DataNodeResource", "DataNodeList", "DataNodeReader", "DataNodeWriter", "TaskList", "TaskResource", "TaskExecutor", "SequenceList", "SequenceResource", "SequenceExecutor", "ScenarioList", "ScenarioResource", "ScenarioExecutor", "CycleResource", "CycleList", "JobResource", "JobList", "JobExecutor", ]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from functools import wraps from importlib import util from taipy.core.common._utils import _load_fct def _middleware(f): @wraps(f) def wrapper(*args, **kwargs): if _using_enterprise(): return _enterprise_middleware()(f)(*args, **kwargs) else: return f(*args, **kwargs) return wrapper def _using_enterprise(): return util.find_spec("taipy.enterprise") is not None def _enterprise_middleware(): return _load_fct("taipy.enterprise.rest.api.middlewares._middleware", "_middleware")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
class ConfigIdMissingException(Exception): def __init__(self): self.message = "Config id is missing." class ScenarioIdMissingException(Exception): def __init__(self): self.message = "Scenario id is missing." class SequenceNameMissingException(Exception): def __init__(self): self.message = "Sequence name is missing."
from marshmallow import Schema, fields class CycleSchema(Schema): name = fields.String() frequency = fields.String() properties = fields.Dict() creation_date = fields.String() start_date = fields.String() end_date = fields.String() class CycleResponseSchema(CycleSchema): id = fields.String()
from marshmallow import Schema, fields class TaskSchema(Schema): config_id = fields.String() id = fields.String() owner_id = fields.String() parent_ids = fields.List(fields.String) input_ids = fields.List(fields.String) function_name = fields.String() function_module = fields.String() output_ids = fields.List(fields.String) version = fields.String()
from marshmallow import Schema, fields class CallableSchema(Schema): fct_name = fields.String() fct_module = fields.String() class JobSchema(Schema): id = fields.String() task_id = fields.String() status = fields.String() force = fields.Boolean() creation_date = fields.String() subscribers = fields.Nested(CallableSchema) stacktrace = fields.List(fields.String)