text
stringlengths
0
105k
import re import typing as t from datetime import datetime from ..gui_types import PropertyType from .builder import _Builder if t.TYPE_CHECKING: from ..extension.library import ElementLibrary from ..gui import Gui class _Factory: DEFAULT_CONTROL = "text" _START_SUFFIX = ".start" _END_SUFFIX = ".end" __TAIPY_NAME_SPACE = "taipy." __CONTROL_DEFAULT_PROP_NAME = { "button": "label", "chart": "data", "content": "value", "date": "date", "date_range": "dates", "dialog": "open", "expandable": "title", "file_download": "content", "file_selector": "content", "image": "content", "indicator": "display", "input": "value", "layout": "columns", "menu": "lov", "navbar": "value", "number": "value", "pane": "open", "part": "class_name", "selector": "value", "slider": "value", "status": "value", "table": "data", "text": "value", "toggle": "value", "tree": "value", } _TEXT_ATTRIBUTES = ["format", "id", "hover_text", "raw"] __TEXT_ANCHORS = ["bottom", "top", "left", "right"] __TEXT_ANCHOR_NONE = "none" __LIBRARIES: t.Dict[str, t.List["ElementLibrary"]] = {} __CONTROL_BUILDERS = { "button": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Button", attributes=attrs, ) .set_value_and_default(with_update=False) .set_attributes( [ ("id",), ("on_action", PropertyType.function), ("active", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ] ), "chart": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Chart", attributes=attrs, ) .set_value_and_default(with_default=False, var_type=PropertyType.data) .set_attributes( [ ("id",), ("title",), ("width", PropertyType.string_or_number), ("height", PropertyType.string_or_number), ("layout", PropertyType.dynamic_dict), ("plot_config", PropertyType.dict), ("on_range_change", PropertyType.function), ("active", PropertyType.dynamic_boolean, True), ("render", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("on_change", PropertyType.function), ("template", PropertyType.dict), ("template[dark]", PropertyType.dict, gui._get_config("chart_dark_template", None)), ("template[light]", PropertyType.dict), ] ) ._get_chart_config("scatter", "lines+markers") ._set_propagate(), "content": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="PageContent", attributes=attrs ), "date": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="DateSelector", attributes=attrs, default_value=datetime.fromtimestamp(0), ) .set_value_and_default(var_type=PropertyType.date) .set_attributes( [ ("with_time", PropertyType.boolean), ("id",), ("active", PropertyType.dynamic_boolean, True), ("editable", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("label",), ("on_change", PropertyType.function), ("format",), ] ) ._set_propagate(), "date_range": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="DateRange", attributes=attrs, ) .set_value_and_default(var_type=PropertyType.date_range) .set_attributes( [ ("with_time", PropertyType.boolean), ("id",), ("active", PropertyType.dynamic_boolean, True), ("editable", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("label_start",), ("label_end",), ("on_change", PropertyType.function), ("format",), ] ) ._set_propagate(), "dialog": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Dialog", attributes=attrs, ) .set_value_and_default(var_type=PropertyType.dynamic_boolean) ._set_partial() # partial should be set before page .set_attributes( [ ("id",), ("page",), ("title",), ("on_action", PropertyType.function), ("close_label", PropertyType.string), ("labels", PropertyType.string_list), ("active", PropertyType.dynamic_boolean, True), ("width", PropertyType.string_or_number), ("height", PropertyType.string_or_number), ("hover_text", PropertyType.dynamic_string), ] ) ._set_propagate(), "expandable": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Expandable", attributes=attrs, default_value=None ) .set_value_and_default() ._set_partial() # partial should be set before page .set_attributes( [ ("id",), ("page",), ("expanded", PropertyType.dynamic_boolean, True, True, False), ("hover_text", PropertyType.dynamic_string), ("on_change", PropertyType.function), ] ), "file_download": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="FileDownload", attributes=attrs, ) .set_value_and_default(var_name="label", with_update=False) ._set_content("content", image=False) .set_attributes( [ ("id",), ("on_action", PropertyType.function), ("active", PropertyType.dynamic_boolean, True), ("render", PropertyType.dynamic_boolean, True), ("auto", PropertyType.boolean, False), ("bypass_preview", PropertyType.boolean, True), ("name",), ("hover_text", PropertyType.dynamic_string), ] ), "file_selector": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="FileSelector", attributes=attrs, ) .set_value_and_default(var_name="label", with_update=False) ._set_file_content() .set_attributes( [ ("id",), ("on_action", PropertyType.function), ("active", PropertyType.dynamic_boolean, True), ("multiple", PropertyType.boolean, False), ("extensions",), ("drop_message",), ("hover_text", PropertyType.dynamic_string), ] ), "image": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Image", attributes=attrs, ) .set_value_and_default(var_name="label", with_update=False) ._set_content("content") .set_attributes( [ ("id",), ("on_action", PropertyType.function), ("active", PropertyType.dynamic_boolean, True), ("width",), ("height",), ("hover_text", PropertyType.dynamic_string), ] ), "indicator": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Indicator", attributes=attrs, ) .set_value_and_default(with_update=False, native_type=True) .set_attributes( [ ("id",), ("min", PropertyType.number), ("max", PropertyType.number), ("value", PropertyType.dynamic_number), ("format",), ("orientation"), ("hover_text", PropertyType.dynamic_string), ("width",), ("height",), ] ), "input": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Input", attributes=attrs, ) ._set_input_type("text", True) .set_value_and_default() ._set_propagate() .set_attributes( [ ("id",), ("active", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("on_change", PropertyType.function), ("on_action", PropertyType.function), ("action_keys",), ("label",), ("change_delay", PropertyType.number, gui._get_config("change_delay", None)), ("multiline", PropertyType.boolean, False), ("lines_shown", PropertyType.number, 5), ] ), "layout": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Layout", attributes=attrs, default_value=None ) .set_value_and_default(with_default=False) .set_attributes( [ ("id",), ("columns[mobile]",), ("gap",), ] ), "menu": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="MenuCtl", attributes=attrs, ) ._get_adapter("lov") # need to be called before set_lov ._set_lov() .set_attributes( [ ("id",), ("active", PropertyType.dynamic_boolean, True), ("label"), ("width"), ("width[mobile]",), ("on_action", PropertyType.function), ("inactive_ids", PropertyType.dynamic_list), ("hover_text", PropertyType.dynamic_string), ] ) ._set_propagate(), "navbar": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="NavBar", attributes=attrs, default_value=None ) ._get_adapter("lov", multi_selection=False) # need to be called before set_lov ._set_lov() .set_attributes( [ ("id",), ("active", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ] ), "number": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Input", attributes=attrs, default_value=0, ) ._set_input_type("number") .set_value_and_default(var_type=PropertyType.dynamic_number) ._set_propagate() .set_attributes( [ ("id",), ("active", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("on_change", PropertyType.function), ("on_action", PropertyType.function), ("label",), ("change_delay", PropertyType.number, gui._get_config("change_delay", None)), ] ), "pane": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Pane", attributes=attrs, default_value=None ) .set_value_and_default(var_type=PropertyType.dynamic_boolean) ._set_partial() # partial should be set before page .set_attributes( [ ("id",), ("page",), ("anchor", PropertyType.string, "left"), ("on_close", PropertyType.function), ("persistent", PropertyType.boolean, False), ("active", PropertyType.dynamic_boolean, True), ("width", PropertyType.string_or_number, "30vw"), ("height", PropertyType.string_or_number, "30vh"), ("hover_text", PropertyType.dynamic_string), ("on_change", PropertyType.function), ] ) ._set_propagate(), "part": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Part", attributes=attrs, default_value=None ) ._set_partial() # partial should be set before page .set_attributes( [ ("id",), ("page", PropertyType.dynamic_string), ("render", PropertyType.dynamic_boolean, True), ("height", PropertyType.dynamic_string), ("content", PropertyType.toHtmlContent), ] ), "selector": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Selector", attributes=attrs, default_value=None ) .set_value_and_default(with_default=False, var_type=PropertyType.lov_value) ._get_adapter("lov") # need to be called before set_lov ._set_lov() .set_attributes( [ ("active", PropertyType.dynamic_boolean, True), ("dropdown", PropertyType.boolean, False), ("filter", PropertyType.boolean), ("height", PropertyType.string_or_number), ("hover_text", PropertyType.dynamic_string), ("id",), ("value_by_id", PropertyType.boolean), ("multiple", PropertyType.boolean), ("width", PropertyType.string_or_number), ("on_change", PropertyType.function), ("label",), ] ) ._set_propagate(), "slider": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Slider", attributes=attrs, default_value=0, ) .set_value_and_default(native_type=True, var_type=PropertyType.slider_value) .set_attributes( [ ("active", PropertyType.dynamic_boolean, True), ("height"), ("hover_text", PropertyType.dynamic_string), ("id",), ("value_by_id", PropertyType.boolean), ("max", PropertyType.number, 100), ("min", PropertyType.number, 0), ("orientation"), ("width", PropertyType.string, "300px"), ("on_change", PropertyType.function), ("continuous", PropertyType.boolean, True), ("lov", PropertyType.lov), ("change_delay", PropertyType.number, gui._get_config("change_delay", None)), ] ) ._set_labels() ._set_string_with_check("text_anchor", _Factory.__TEXT_ANCHORS + [_Factory.__TEXT_ANCHOR_NONE], "bottom") ._set_propagate(), "status": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Status", attributes=attrs, ) .set_value_and_default(with_update=False) .set_attributes( [ ("id",), ("without_close", PropertyType.boolean, False), ("hover_text", PropertyType.dynamic_string), ] ), "table": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Table", attributes=attrs, ) .set_value_and_default(with_default=False, var_type=PropertyType.data) ._get_dataframe_attributes() .set_attributes( [ ("page_size", PropertyType.number, "100"), ("allow_all_rows", PropertyType.boolean), ("show_all", PropertyType.boolean), ("auto_loading", PropertyType.boolean), ("width", PropertyType.string_or_number, "100%"), ("height", PropertyType.string_or_number, "80vh"), ("id",), ("active", PropertyType.dynamic_boolean, True), ("editable", PropertyType.dynamic_boolean, True), ("on_edit", PropertyType.function), ("on_delete", PropertyType.function), ("on_add", PropertyType.function), ("on_action", PropertyType.function), ("nan_value",), ("filter", PropertyType.boolean), ("hover_text", PropertyType.dynamic_string), ("size",), ] ) ._set_propagate() ._get_list_attribute("selected", PropertyType.number) ._set_table_pagesize_options(), "text": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Field", attributes=attrs, ) .set_value_and_default(with_update=False) ._set_dataType() .set_attributes( [ ("format",), ("id",), ("hover_text", PropertyType.dynamic_string), ("raw", PropertyType.boolean, False), ("mode", PropertyType.string), ] ), "toggle": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="Toggle", attributes=attrs, default_value=None ) .set_value_and_default(with_default=False, var_type=PropertyType.lov_value) ._get_adapter("lov", multi_selection=False) # need to be called before set_lov ._set_lov() .set_attributes( [ ("active", PropertyType.dynamic_boolean, True), ("hover_text", PropertyType.dynamic_string), ("id",), ("label",), ("value_by_id", PropertyType.boolean), ("unselected_value", PropertyType.string, ""), ("allow_unselect", PropertyType.boolean), ("on_change", PropertyType.function), ] ) ._set_kind() ._set_propagate(), "tree": lambda gui, control_type, attrs: _Builder( gui=gui, control_type=control_type, element_name="TreeView", attributes=attrs, ) .set_value_and_default(with_default=False, var_type=PropertyType.lov_value) .set_attributes( [ ("active", PropertyType.dynamic_boolean, True), ("expanded", PropertyType.boolean_or_list, True), ("filter", PropertyType.boolean), ("hover_text", PropertyType.dynamic_string), ("height", PropertyType.string_or_number), ("id",), ("value_by_id", PropertyType.boolean), ("multiple", PropertyType.boolean), ("width", PropertyType.string_or_number), ("on_change", PropertyType.function), ("select_leafs_only", PropertyType.boolean), ("row_height", PropertyType.string), ("lov", PropertyType.lov), ] ) ._set_propagate(), } # TODO: process \" in property value _PROPERTY_RE = re.compile(r"\s+([a-zA-Z][\.a-zA-Z_$0-9]*(?:\[(?:.*?)\])?)=\"((?:(?:(?<=\\)\")|[^\"])*)\"") @staticmethod def set_library(library: "ElementLibrary"): from ..extension.library import Element, ElementLibrary if isinstance(library, ElementLibrary) and isinstance(library.get_name(), str) and library.get_elements(): elements = library.get_elements() for name, element in elements.items(): if isinstance(element, Element): element.check(name) fact_lib = _Factory.__LIBRARIES.get(library.get_name()) if fact_lib is None: _Factory.__LIBRARIES.update({library.get_name(): [library]}) else: fact_lib.append(library) @staticmethod def get_default_property_name(control_name: str) -> t.Optional[str]: name = ( control_name[: -len(_Factory._START_SUFFIX)] if control_name.endswith(_Factory._START_SUFFIX) else control_name[: -len(_Factory._END_SUFFIX)] if control_name.endswith(_Factory._END_SUFFIX) else control_name ) name = name[len(_Factory.__TAIPY_NAME_SPACE) :] if name.startswith(_Factory.__TAIPY_NAME_SPACE) else name prop = _Factory.__CONTROL_DEFAULT_PROP_NAME.get(name) if prop is None: _, _, element = _Factory.__get_library_element(name) if element: prop = element.default_attribute return prop @staticmethod def __get_library_element(name: str): parts = name.split(".") if len(parts) > 1: element_name = ".".join(parts[1:]) for lib in _Factory.__LIBRARIES.get(parts[0], []): elts = lib.get_elements() if isinstance(elts, dict): element = elts.get(element_name) if element: return lib, element_name, element else: element_name = name for libs in list(_Factory.__LIBRARIES.values()): for lib in libs: elts = lib.get_elements() if isinstance(elts, dict): element = elts.get(element_name) if element: return lib, element_name, element return None, None, None @staticmethod def call_builder( gui: "Gui", name: str, all_properties: t.Optional[t.Dict[str, t.Any]] = None, is_html: t.Optional[bool] = False ) -> t.Optional[t.Union[t.Any, t.Tuple[str, str]]]: name = name[len(_Factory.__TAIPY_NAME_SPACE) :] if name.startswith(_Factory.__TAIPY_NAME_SPACE) else name builder = _Factory.__CONTROL_BUILDERS.get(name) built = None if builder is None: lib, element_name, element = _Factory.__get_library_element(name) if lib: from ..extension.library import Element if isinstance(element, Element): return element._call_builder(element_name, gui, all_properties, lib, is_html) else: built = builder(gui, name, all_properties) if isinstance(built, _Builder): return built._build_to_string() if is_html else built.el return None
import contextlib import json import numbers import time as _time import typing as t import xml.etree.ElementTree as etree from datetime import date, datetime, time from inspect import isclass from urllib.parse import quote from .._warnings import _warn from ..gui_types import PropertyType, _get_taipy_type from ..partial import Partial from ..utils import ( _date_to_string, _get_broadcast_var_name, _get_client_var_name, _get_data_type, _get_expr_var_name, _getscopeattr, _getscopeattr_drill, _is_boolean, _is_boolean_true, _MapDict, _to_camel_case, ) from ..utils.chart_config_builder import _CHART_NAMES, _build_chart_config from ..utils.table_col_builder import _enhance_columns, _get_name_indexed_property from ..utils.types import _TaipyBase, _TaipyData from .json import _TaipyJsonEncoder from .utils import _add_to_dict_and_get, _get_columns_dict, _get_tuple_val if t.TYPE_CHECKING: from ..gui import Gui class _Builder: """ Constructs an XML node that can be rendered as a React node. This class can only be instantiated internally by Taipy. """ __keys: t.Dict[str, int] = {} __BLOCK_CONTROLS = ["dialog", "expandable", "pane", "part"] __TABLE_COLUMNS_DEPS = [ "data", "columns", "date_format", "number_format", "nan_value", "width", "filter", "editable", "group_by", "apply", "style", "tooltip", ] def __init__( self, gui: "Gui", control_type: str, element_name: str, attributes: t.Optional[t.Dict[str, t.Any]], hash_names: t.Dict[str, str] = {}, default_value="<Empty>", lib_name: str = "taipy", ): from ..gui import Gui from .factory import _Factory self.el = etree.Element(element_name) self.__control_type = control_type self.__element_name = element_name self.__lib_name = lib_name self.__attributes = attributes or {} self.__hashes = hash_names.copy() self.__update_vars: t.List[str] = [] self.__gui: Gui = gui self.__default_property_name = _Factory.get_default_property_name(control_type) or "" default_property_value = self.__attributes.get(self.__default_property_name, None) if default_property_value is None and default_value is not None: self.__attributes[self.__default_property_name] = default_value # Bind properties dictionary to attributes if condition is matched (will # leave the binding for function at the builder ) if "properties" in self.__attributes: (prop_dict, prop_hash) = _Builder.__parse_attribute_value(gui, self.__attributes["properties"]) if prop_hash is None: prop_hash = prop_dict prop_hash = self.__gui._bind_var(prop_hash) if hasattr(self.__gui._bindings(), prop_hash): prop_dict = _getscopeattr(self.__gui, prop_hash) if isinstance(prop_dict, (dict, _MapDict)): # Iterate through prop_dict and append to self.attributes var_name, _ = gui._get_real_var_name(prop_hash) for k, v in prop_dict.items(): (val, key_hash) = _Builder.__parse_attribute_value(gui, v) self.__attributes[k] = ( f"{{None if ({var_name}) is None else ({var_name}).get('{k}')}}" if key_hash is None else v ) else: _warn(f"{self.__control_type}.properties ({prop_hash}) must be a dict.") # Bind potential function and expressions in self.attributes self.__hashes.update(_Builder._get_variable_hash_names(gui, self.__attributes, hash_names)) # set classname self.__set_class_names() # define a unique key self.set_attribute("key", _Builder._get_key(self.__element_name)) @staticmethod def __parse_attribute_value(gui: "Gui", value) -> t.Tuple: if isinstance(value, str) and gui._is_expression(value): hash_value = gui._evaluate_expr(value) try: func = gui._get_user_function(hash_value) if callable(func): return (func, hash_value) return (_getscopeattr_drill(gui, hash_value), hash_value) except AttributeError: _warn(f"Expression '{value}' cannot be evaluated.") return (value, None) @staticmethod def _get_variable_hash_names( gui: "Gui", attributes: t.Dict[str, t.Any], hash_names: t.Dict[str, str] = {} ) -> t.Dict[str, str]: hashes = {} # Bind potential function and expressions in self.attributes for k, v in attributes.items(): val = v hashname = hash_names.get(k) if hashname is None: if callable(v): if v.__name__ == "<lambda>": hashname = _get_expr_var_name(v.__code__) gui._bind_var_val(hashname, v) else: hashname = _get_expr_var_name(v.__name__) elif isinstance(v, str): # need to unescape the double quotes that were escaped during preprocessing (val, hashname) = _Builder.__parse_attribute_value(gui, v.replace('\\"', '"')) if val is not None or hashname: attributes[k] = val if hashname: hashes[k] = hashname return hashes @staticmethod def __to_string(x: t.Any) -> str: return str(x) @staticmethod def _get_key(name: str) -> str: key_index = _Builder.__keys.get(name, 0) _Builder.__keys[name] = key_index + 1 return f"{name}.{key_index}" @staticmethod def _reset_key() -> None: _Builder.__keys = {} def __get_list_of_(self, name: str): lof = self.__attributes.get(name) if isinstance(lof, str): lof = list(lof.split(";")) return lof def get_name_indexed_property(self, name: str) -> t.Dict[str, t.Any]: """ TODO-undocumented Returns all properties defined as <property name>[<named index>] as a dict. Arguments: name (str): The property name. """ return _get_name_indexed_property(self.__attributes, name) def __get_boolean_attribute(self, name: str, default_value=False): boolattr = self.__attributes.get(name, default_value) return _is_boolean_true(boolattr) if isinstance(boolattr, str) else bool(boolattr) def set_boolean_attribute(self, name: str, value: bool): """ TODO-undocumented Defines a React Boolean attribute (attr={true|false}). Arguments: name (str): The property name. value (bool): the boolean value. """ return self.__set_react_attribute(_to_camel_case(name), value) def set_dict_attribute(self, name: str, default_value: t.Optional[t.Dict[str, t.Any]] = None): """ TODO-undocumented Defines a React attribute as a stringified json dict. The original property can be a dict or a string formed as <key 1>:<value 1>;<key 2>:<value 2>. Arguments: name (str): The property name. default value (dict): used if no value is specified. """ dict_attr = self.__attributes.get(name) if dict_attr is None: dict_attr = default_value if dict_attr is not None: if isinstance(dict_attr, str): vals = [x.strip().split(":") for x in dict_attr.split(";")] dict_attr = {val[0].strip(): val[1].strip() for val in vals if len(val) > 1} if isinstance(dict_attr, (dict, _MapDict)): self.__set_json_attribute(_to_camel_case(name), dict_attr) else: _warn(f"{self.__element_name}: {name} should be a dict: '{str(dict_attr)}'.") return self def set_dynamic_dict_attribute(self, name: str, default_value: t.Optional[t.Dict[str, t.Any]] = None): """ TODO-undocumented Defines a React attribute as a stringified json dict. The original property can be a dict or a string formed as <key 1>:<value 1>;<key 2>:<value 2>. Arguments: name (str): The property name. default value (dict): used if no value is specified. """ dict_attr = self.__attributes.get(name) if dict_attr is None: dict_attr = default_value if dict_attr is not None: if isinstance(dict_attr, str): vals = [x.strip().split(":") for x in dict_attr.split(";")] dict_attr = {val[0].strip(): val[1].strip() for val in vals if len(val) > 1} if isinstance(dict_attr, (dict, _MapDict)): self.__set_json_attribute(_to_camel_case("default_" + name), dict_attr) else: _warn(f"{self.__element_name}: {name} should be a dict: '{str(dict_attr)}'.") if dict_hash := self.__hashes.get(name): dict_hash = self.__get_typed_hash_name(dict_hash, PropertyType.dynamic_dict) prop_name = _to_camel_case(name) self.__update_vars.append(f"{prop_name}={dict_hash}") self.__set_react_attribute(prop_name, dict_hash) return self def __set_json_attribute(self, name, value): return self.set_attribute(name, json.dumps(value, cls=_TaipyJsonEncoder)) def __set_list_of_(self, name: str): lof = self.__get_list_of_(name) if not isinstance(lof, (list, tuple)): if lof is not None: _warn(f"{self.__element_name}: {name} should be a list.") return self return self.__set_json_attribute(_to_camel_case(name), lof) def set_number_attribute(self, name: str, default_value: t.Optional[str] = None, optional: t.Optional[bool] = True): """ TODO-undocumented Defines a React number attribute (attr={<number>}). Arguments: name (str): The property name. default_value (optional(str)): the default value as a string. optional (bool): Default to True, the property is required if False. """ value = self.__attributes.get(name, default_value) if value is None: if not optional: _warn(f"Property {name} is required for control {self.__control_type}.") return self if isinstance(value, str): try: val = float(value) except ValueError: raise ValueError(f"Property {name} expects a number for control {self.__control_type}") elif isinstance(value, numbers.Number): val = value # type: ignore else: raise ValueError( f"Property {name} expects a number for control {self.__control_type}, received {type(value)}" ) return self.__set_react_attribute(_to_camel_case(name), val) def __set_string_attribute( self, name: str, default_value: t.Optional[str] = None, optional: t.Optional[bool] = True ): strattr = self.__attributes.get(name, default_value) if strattr is None: if not optional: _warn(f"Property {name} is required for control {self.__control_type}.") return self return self.set_attribute(_to_camel_case(name), str(strattr)) def __set_dynamic_string_attribute( self, name: str, default_value: t.Optional[str] = None, with_update: t.Optional[bool] = False, dynamic_property_name: t.Optional[str] = None, ): str_val = self.__attributes.get(name, default_value) if str_val is not None: self.set_attribute( _to_camel_case(f"default_{name}" if dynamic_property_name is None else name), str(str_val) ) if hash_name := self.__hashes.get(name): prop_name = _to_camel_case(name if dynamic_property_name is None else dynamic_property_name) if with_update: self.__update_vars.append(f"{prop_name}={hash_name}") self.__set_react_attribute(prop_name, hash_name) return self def __set_function_attribute( self, name: str, default_value: t.Optional[str] = None, optional: t.Optional[bool] = True ): strattr = self.__attributes.get(name, default_value) if strattr is None: if not optional: _warn(f"Property {name} is required for control {self.__control_type}.") return self elif callable(strattr): strattr = self.__hashes.get(name) if strattr is None: return self elif strattr: strattr = str(strattr) func = self.__gui._get_user_function(strattr) if func == strattr: _warn(f"{self.__control_type}.{name}: {strattr} is not a function.") return self.set_attribute(_to_camel_case(name), strattr) if strattr else self def __set_string_or_number_attribute(self, name: str, default_value: t.Optional[t.Any] = None): attr = self.__attributes.get(name, default_value) if attr is None: return self if isinstance(attr, numbers.Number): return self.__set_react_attribute(_to_camel_case(name), attr) else: return self.set_attribute(_to_camel_case(name), attr) def __set_react_attribute(self, name: str, value: t.Any): return self.set_attribute(name, "{!" + (str(value).lower() if isinstance(value, bool) else str(value)) + "!}") def _get_adapter(self, var_name: str, property_name: t.Optional[str] = None, multi_selection=True): # noqa: C901 property_name = var_name if property_name is None else property_name lov = self.__get_list_of_(var_name) if isinstance(lov, list): adapter = self.__attributes.get("adapter") if adapter and isinstance(adapter, str): adapter = self.__gui._get_user_function(adapter) if adapter and not callable(adapter): _warn("'adapter' property value is invalid.") adapter = None var_type = self.__attributes.get("type") if isclass(var_type): var_type = var_type.__name__ # type: ignore if not isinstance(var_type, str): elt = None if len(lov) == 0: value = self.__attributes.get("value") if isinstance(value, list): if len(value) > 0: elt = value[0] else: elt = value else: elt = lov[0] var_type = self.__gui._get_unique_type_adapter(type(elt).__name__) if adapter is None: adapter = self.__gui._get_adapter_for_type(var_type) elif var_type == str.__name__ and callable(adapter): var_type += ( _get_expr_var_name(str(adapter.__code__)) if adapter.__name__ == "<lambda>" else _get_expr_var_name(adapter.__name__) ) if lov_name := self.__hashes.get(var_name): if adapter is None: adapter = self.__gui._get_adapter_for_type(lov_name) else: self.__gui._add_type_for_var(lov_name, var_type) if value_name := self.__hashes.get("value"): if adapter is None: adapter = self.__gui._get_adapter_for_type(value_name) else: self.__gui._add_type_for_var(value_name, var_type) if adapter is not None: self.__gui._add_adapter_for_type(var_type, adapter) # type: ignore ret_list = [] if len(lov) > 0: for elt in lov: ret = self.__gui._run_adapter( t.cast(t.Callable, adapter), elt, adapter.__name__ if callable(adapter) else "adapter" ) # type: ignore if ret is not None: ret_list.append(ret) self.__attributes[f"default_{property_name}"] = ret_list ret_list = [] value = self.__attributes.get("value") val_list = value if isinstance(value, list) else [value] for val in val_list: ret = self.__gui._run_adapter( t.cast(t.Callable, adapter), val, adapter.__name__ if callable(adapter) else "adapter", id_only=True ) # type: ignore if ret is not None: ret_list.append(ret) if multi_selection: self.__set_default_value("value", ret_list) else: ret_val = ret_list[0] if len(ret_list) else "" if ret_val == "-1" and self.__attributes.get("unselected_value") is not None: ret_val = str(self.__attributes.get("unselected_value", "")) self.__set_default_value("value", ret_val) return self def __filter_attribute_names(self, names: t.Iterable[str]): return [k for k in self.__attributes if k in names or any(k.startswith(n + "[") for n in names)] def __get_holded_name(self, key: str): name = self.__hashes.get(key) if name: v = self.__attributes.get(key) if isinstance(v, _TaipyBase): return name[: len(v.get_hash()) + 1] return name def __filter_attributes_hashes(self, keys: t.List[str]): hash_names = [k for k in self.__hashes if k in keys] attr_names = [k for k in keys if k not in hash_names] return ( {k: v for k, v in self.__attributes.items() if k in attr_names}, {k: self.__get_holded_name(k) for k in self.__hashes if k in hash_names}, ) def __build_rebuild_fn(self, fn_name: str, attribute_names: t.Iterable[str]): rebuild = self.__attributes.get("rebuild", False) rebuild_hash = self.__hashes.get("rebuild") if rebuild_hash or rebuild: attributes, hashes = self.__filter_attributes_hashes(self.__filter_attribute_names(attribute_names)) rebuild_name = f"bool({self.__gui._get_real_var_name(rebuild_hash)[0]})" if rebuild_hash else "None" try: self.__gui._set_building(True) return self.__gui._evaluate_expr( "{" + f'{fn_name}({rebuild}, {rebuild_name}, "{quote(json.dumps(attributes))}", "{quote(json.dumps(hashes))}", {", ".join([f"{k}={v2}" for k, v2 in {v: self.__gui._get_real_var_name(v)[0] for v in hashes.values()}.items()])})' # noqa: E501 + "}" ) finally: self.__gui._set_building(False) return None def _get_dataframe_attributes(self) -> "_Builder": date_format = _add_to_dict_and_get(self.__attributes, "date_format", "MM/dd/yyyy") data = self.__attributes.get("data") data_hash = self.__hashes.get("data", "") col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash)) col_dict = _get_columns_dict( data, self.__attributes.get("columns", {}), col_types, date_format, self.__attributes.get("number_format") ) rebuild_fn_hash = self.__build_rebuild_fn( self.__gui._get_rebuild_fn_name("_tbl_cols"), _Builder.__TABLE_COLUMNS_DEPS ) if rebuild_fn_hash: self.__set_react_attribute("columns", rebuild_fn_hash) if col_dict is not None: _enhance_columns(self.__attributes, self.__hashes, col_dict, self.__element_name) self.__set_json_attribute("defaultColumns", col_dict) if line_style := self.__attributes.get("style"): if callable(line_style): value = self.__hashes.get("style") elif isinstance(line_style, str): value = line_style.strip() else: value = None if value in col_types.keys(): _warn(f"{self.__element_name}: style={value} must not be a column name.") elif value: self.set_attribute("lineStyle", value) if tooltip := self.__attributes.get("tooltip"): if callable(tooltip): value = self.__hashes.get("tooltip") elif isinstance(tooltip, str): value = tooltip.strip() else: value = None if value in col_types.keys(): _warn(f"{self.__element_name}: tooltip={value} must not be a column name.") elif value: self.set_attribute("tooltip", value) return self def _get_chart_config(self, default_type: str, default_mode: str): self.__attributes["_default_type"] = default_type self.__attributes["_default_mode"] = default_mode rebuild_fn_hash = self.__build_rebuild_fn( self.__gui._get_rebuild_fn_name("_chart_conf"), _CHART_NAMES + ("_default_type", "_default_mode", "data") ) if rebuild_fn_hash: self.__set_react_attribute("config", rebuild_fn_hash) # read column definitions data = self.__attributes.get("data") data_hash = self.__hashes.get("data", "") col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash)) config = _build_chart_config(self.__gui, self.__attributes, col_types) self.__set_json_attribute("defaultConfig", config) self._set_chart_selected(max=len(config.get("traces", ""))) self.__set_refresh_on_update() return self def _set_string_with_check(self, var_name: str, values: t.List[str], default_value: t.Optional[str] = None): value = self.__attributes.get(var_name, default_value) if value is not None: value = str(value).lower() self.__attributes[var_name] = value if value not in values: _warn(f"{self.__element_name}: {var_name}={value} should be in {values}.") else: self.__set_string_attribute(var_name, default_value) return self def __set_list_attribute( self, name: str, hash_name: t.Optional[str], val: t.Any, elt_type: t.Type, dynamic=True ) -> t.List[str]: if not hash_name and isinstance(val, str): val = [elt_type(t.strip()) for t in val.split(";")] if isinstance(val, list): if hash_name and dynamic: self.__set_react_attribute(name, hash_name) return [f"{name}={hash_name}"] else: self.__set_json_attribute(name, val) elif val is not None: _warn(f"{self.__element_name}: {name} should be a list of {elt_type}.") return [] def _set_chart_selected(self, max=0): name = "selected" default_sel = self.__attributes.get(name) idx = 1 name_idx = f"{name}[{idx}]" sel = self.__attributes.get(name_idx) while idx <= max: if sel is not None or default_sel is not None: self.__update_vars.extend( self.__set_list_attribute( f"{name}{idx - 1}", self.__hashes.get(name_idx if sel is not None else name), sel if sel is not None else default_sel, int, ) ) idx += 1 name_idx = f"{name}[{idx}]" sel = self.__attributes.get(name_idx) def _get_list_attribute(self, name: str, list_type: PropertyType): varname = self.__hashes.get(name) if varname is None: list_val = self.__attributes.get(name) if isinstance(list_val, str): list_val = list(list_val.split(";")) if isinstance(list_val, list): # TODO catch the cast exception if list_type.value == PropertyType.number.value: list_val = [int(v) for v in list_val] else: list_val = [int(v) for v in list_val] else: if list_val is not None: _warn(f"{self.__element_name}: {name} should be a list.") list_val = [] self.__set_react_attribute(_to_camel_case(name), list_val) else: self.__set_react_attribute(_to_camel_case(name), varname) return self def __set_class_names(self): self.set_attribute("libClassName", self.__lib_name + "-" + self.__control_type.replace("_", "-")) return self.__set_dynamic_string_attribute("class_name", dynamic_property_name="dynamic_class_name") def _set_dataType(self): value = self.__attributes.get("value") return self.set_attribute("dataType", _get_data_type(value)) def _set_file_content(self, var_name: str = "content"): if hash_name := self.__hashes.get(var_name): self.__set_update_var_name(hash_name) else: _warn(f"file_selector: {var_name} should be bound.") return self def _set_content(self, var_name: str = "content", image=True): content = self.__attributes.get(var_name) hash_name = self.__hashes.get(var_name) if content is None and hash_name is None: return self value = self.__gui._get_content(hash_name or var_name, content, image) if hash_name: hash_name = self.__get_typed_hash_name(hash_name, PropertyType.image if image else PropertyType.content) if hash_name: self.__set_react_attribute( var_name, _get_client_var_name(hash_name), ) return self.set_attribute(_to_camel_case(f"default_{var_name}"), value) def _set_lov(self, var_name="lov", property_name: t.Optional[str] = None): property_name = var_name if property_name is None else property_name self.__set_list_of_(f"default_{property_name}") if hash_name := self.__hashes.get(var_name): hash_name = self.__get_typed_hash_name(hash_name, PropertyType.lov) self.__update_vars.append(f"{property_name}={hash_name}") self.__set_react_attribute(property_name, hash_name) return self def __set_dynamic_string_list(self, var_name: str, default_value: t.Any): hash_name = self.__hashes.get(var_name) loi = self.__attributes.get(var_name) if loi is None: loi = default_value if isinstance(loi, str): loi = [s.strip() for s in loi.split(";") if s.strip()] if isinstance(loi, list): self.__set_json_attribute(_to_camel_case(f"default_{var_name}"), loi) if hash_name: self.__update_vars.append(f"{var_name}={hash_name}") self.__set_react_attribute(var_name, hash_name) return self def __set_dynamic_number_attribute(self, var_name: str, default_value: t.Any): hash_name = self.__hashes.get(var_name) numVal = self.__attributes.get(var_name) if numVal is None: numVal = default_value if isinstance(numVal, str): try: numVal = float(numVal) except Exception as e: _warn(f"{self.__element_name}: {var_name} cannot be transformed into a number", e) numVal = 0 if isinstance(numVal, numbers.Number): self.__set_react_attribute(_to_camel_case(f"default_{var_name}"), numVal) elif numVal is not None: _warn(f"{self.__element_name}: {var_name} value is not valid ({numVal}).") if hash_name: hash_name = self.__get_typed_hash_name(hash_name, PropertyType.number) self.__update_vars.append(f"{var_name}={hash_name}") self.__set_react_attribute(var_name, hash_name) return self def __set_default_value( self, var_name: str, value: t.Optional[t.Any] = None, native_type: bool = False, var_type: t.Optional[PropertyType] = None, ): if value is None: value = self.__attributes.get(var_name) default_var_name = _to_camel_case(f"default_{var_name}") if isinstance(value, (datetime, date, time)): return self.set_attribute(default_var_name, _date_to_string(value)) elif isinstance(value, str): return self.set_attribute(default_var_name, value) elif native_type and isinstance(value, numbers.Number): return self.__set_react_attribute(default_var_name, value) elif value is None: return self.__set_react_attribute(default_var_name, "null") elif var_type == PropertyType.lov_value: # Done by _get_adapter return self elif isclass(var_type) and issubclass(var_type, _TaipyBase): # type: ignore return self.__set_default_value(var_name, t.cast(t.Callable, var_type)(value, "").get()) else: return self.__set_json_attribute(default_var_name, value) def __set_update_var_name(self, hash_name: str): return self.set_attribute("updateVarName", hash_name) def set_value_and_default( self, var_name: t.Optional[str] = None, with_update=True, with_default=True, native_type=False, var_type: t.Optional[PropertyType] = None, default_val: t.Any = None, ): """ TODO-undocumented Sets the value associated with the default property. Arguments: var_name (str): The property name (default to default property name). with_update (optional(bool)): Should the attribute be dynamic (default True). with_default (optional(bool)): Should a default attribute be set (default True). native_type (optional(bool)): If var_type == dynamic_number, parse the value to number. var_type (optional(PropertyType)): the property type (default to string). default_val (optional(Any)): the default value. """ var_name = self.__default_property_name if var_name is None else var_name if var_type == PropertyType.slider_value: if self.__attributes.get("lov"): var_type = PropertyType.lov_value native_type = False else: var_type = ( PropertyType.dynamic_lo_numbers if isinstance(self.__attributes.get("value"), list) else PropertyType.dynamic_number ) native_type = True if var_type == PropertyType.dynamic_boolean: return self.set_attributes([(var_name, var_type, bool(default_val), with_update)]) if hash_name := self.__hashes.get(var_name): hash_name = self.__get_typed_hash_name(hash_name, var_type) self.__set_react_attribute( _to_camel_case(var_name), _get_client_var_name(hash_name), ) if with_update: self.__set_update_var_name(hash_name) if with_default: if native_type: val = self.__attributes.get(var_name) if native_type and isinstance(val, str): with contextlib.suppress(Exception): val = float(val) self.__set_default_value(var_name, val, native_type=native_type) else: self.__set_default_value(var_name, var_type=var_type) else: value = self.__attributes.get(var_name) if value is not None: if native_type: if isinstance(value, str): with contextlib.suppress(Exception): value = float(value) if isinstance(value, (int, float)): return self.__set_react_attribute(_to_camel_case(var_name), value) self.set_attribute(_to_camel_case(var_name), value) return self def _set_labels(self, var_name: str = "labels"): if value := self.__attributes.get(var_name): if _is_boolean_true(value): return self.__set_react_attribute(_to_camel_case(var_name), True) elif isinstance(value, (dict, _MapDict)): return self.set_dict_attribute(var_name) return self def _set_partial(self): if self.__control_type not in _Builder.__BLOCK_CONTROLS: return self if partial := self.__attributes.get("partial"): if self.__attributes.get("page"): _warn(f"{self.__element_name} control: page and partial should not be both defined.") if isinstance(partial, Partial): self.__attributes["page"] = partial._route self.__set_react_attribute("partial", partial._route) self.__set_react_attribute("defaultPartial", True) return self def _set_propagate(self): val = self.__get_boolean_attribute("propagate", self.__gui._config.config.get("propagate")) return self if val else self.set_boolean_attribute("propagate", False) def __set_refresh_on_update(self): if self.__update_vars: self.set_attribute("updateVars", ";".join(self.__update_vars)) return self def _set_table_pagesize_options(self, default_size=[50, 100, 500]): page_size_options = self.__attributes.get("page_size_options", default_size) if isinstance(page_size_options, str): try: page_size_options = [int(s.strip()) for s in page_size_options.split(";")] except Exception as e: _warn(f"{self.__element_name}: page_size_options value is invalid ({page_size_options})", e) if isinstance(page_size_options, list): self.__set_json_attribute("pageSizeOptions", page_size_options) else: _warn(f"{self.__element_name}: page_size_options should be a list.") return self def _set_input_type(self, type_name: str, allow_password=False): if allow_password and self.__get_boolean_attribute("password", False): return self.set_attribute("type", "password") return self.set_attribute("type", type_name) def _set_kind(self): if self.__attributes.get("theme", False): self.set_attribute("kind", "theme") return self def __get_typed_hash_name(self, hash_name: str, var_type: t.Optional[PropertyType]) -> str: if taipy_type := _get_taipy_type(var_type): expr = self.__gui._get_expr_from_hash(hash_name) hash_name = self.__gui._evaluate_bind_holder(taipy_type, expr) return hash_name def __set_dynamic_bool_attribute(self, name: str, def_val: t.Any, with_update: bool, update_main=True): hash_name = self.__hashes.get(name) val = self.__get_boolean_attribute(name, def_val) default_name = f"default_{name}" if hash_name is not None else name if val != def_val: self.set_boolean_attribute(default_name, val) if hash_name is not None: hash_name = self.__get_typed_hash_name(hash_name, PropertyType.dynamic_boolean) self.__set_react_attribute(_to_camel_case(name), _get_client_var_name(hash_name)) if with_update: if update_main: self.__set_update_var_name(hash_name) else: self.__update_vars.append(f"{_to_camel_case(name)}={hash_name}") def __set_dynamic_property_without_default( self, name: str, property_type: PropertyType, optional: t.Optional[bool] = False ): hash_name = self.__hashes.get(name) if hash_name is None: if not optional: _warn(f"{self.__element_name}.{name} should be bound.") else: hash_name = self.__get_typed_hash_name(hash_name, property_type) self.__update_vars.append(f"{_to_camel_case(name)}={hash_name}") self.__set_react_attribute(_to_camel_case(name), _get_client_var_name(hash_name)) return self def __set_html_content(self, name: str, property_name: str, property_type: PropertyType): hash_name = self.__hashes.get(name) if not hash_name: return self front_var = self.__get_typed_hash_name(hash_name, property_type) self.set_attribute( _to_camel_case(f"default_{property_name}"), self.__gui._get_user_content_url( None, { "variable_name": front_var, self.__gui._HTML_CONTENT_KEY: str(_time.time()), }, ), ) return self.__set_react_attribute(_to_camel_case(property_name), _get_client_var_name(front_var)) def set_attributes(self, attributes: t.List[tuple]): # noqa: C901 """ TODO-undocumented Sets the attributes from the property with type and default value. Arguments: attributes (list(tuple)): The list of attributes as (property name, property type, default value). """ for attr in attributes: if not isinstance(attr, tuple): attr = (attr,) var_type = _get_tuple_val(attr, 1, PropertyType.string) if var_type == PropertyType.boolean: def_val = _get_tuple_val(attr, 2, False) val = self.__get_boolean_attribute(attr[0], def_val) if val != def_val: self.set_boolean_attribute(attr[0], val) elif var_type == PropertyType.dynamic_boolean: self.__set_dynamic_bool_attribute( attr[0], _get_tuple_val(attr, 2, False), _get_tuple_val(attr, 3, False), _get_tuple_val(attr, 4, True), ) elif var_type == PropertyType.number: self.set_number_attribute(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.dynamic_number: self.__set_dynamic_number_attribute(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.string: self.__set_string_attribute(attr[0], _get_tuple_val(attr, 2, None), _get_tuple_val(attr, 3, True)) elif var_type == PropertyType.dynamic_string: self.__set_dynamic_string_attribute( attr[0], _get_tuple_val(attr, 2, None), _get_tuple_val(attr, 3, False) ) elif var_type == PropertyType.string_list: self.__set_list_attribute( attr[0], self.__hashes.get(attr[0]), self.__attributes.get(attr[0]), str, False ) elif var_type == PropertyType.function: self.__set_function_attribute(attr[0], _get_tuple_val(attr, 2, None), _get_tuple_val(attr, 3, True)) elif var_type == PropertyType.react: prop_name = _to_camel_case(attr[0]) if hash_name := self.__hashes.get(attr[0]): self.__update_vars.append(f"{prop_name}={hash_name}") self.__set_react_attribute(prop_name, hash_name) else: self.__set_react_attribute(prop_name, self.__attributes.get(attr[0], _get_tuple_val(attr, 2, None))) elif var_type == PropertyType.broadcast: self.__set_react_attribute( _to_camel_case(attr[0]), _get_broadcast_var_name(_get_tuple_val(attr, 2, None)) ) elif var_type == PropertyType.string_or_number: self.__set_string_or_number_attribute(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.dict: self.set_dict_attribute(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.dynamic_dict: self.set_dynamic_dict_attribute(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.dynamic_list: self.__set_dynamic_string_list(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.boolean_or_list: if _is_boolean(self.__attributes.get(attr[0])): self.__set_dynamic_bool_attribute(attr[0], _get_tuple_val(attr, 2, False), True, update_main=False) else: self.__set_dynamic_string_list(attr[0], _get_tuple_val(attr, 2, None)) elif var_type == PropertyType.data: self.__set_dynamic_property_without_default(attr[0], var_type) elif var_type == PropertyType.lov: self._get_adapter(attr[0]) # need to be called before set_lov self._set_lov(attr[0]) elif var_type == PropertyType.lov_value: self.__set_dynamic_property_without_default( attr[0], var_type, _get_tuple_val(attr, 2, None) == "optional" ) elif var_type == PropertyType.toHtmlContent: self.__set_html_content(attr[0], "page", var_type) elif isclass(var_type) and issubclass(var_type, _TaipyBase): if hash_name := self.__hashes.get(attr[0]): prop_name = _to_camel_case(attr[0]) expr = self.__gui._get_expr_from_hash(hash_name) hash_name = self.__gui._evaluate_bind_holder(var_type, expr) self.__update_vars.append(f"{prop_name}={hash_name}") self.__set_react_attribute(prop_name, hash_name) self.__set_refresh_on_update() return self def set_attribute(self, name: str, value: t.Any): """ TODO-undocumented Sets an attribute. Arguments: name (str): The name of the attribute. value (Any): The value of the attribute (must be json serializable). """ self.el.set(name, value) return self def get_element(self): """ TODO-undocumented Returns the xml.etree.ElementTree.Element """ return self.el def _build_to_string(self): el_str = str(etree.tostring(self.el, encoding="utf8").decode("utf8")) el_str = el_str.replace("<?xml version='1.0' encoding='utf8'?>\n", "") el_str = el_str.replace("/>", ">") return el_str, self.__element_name
import typing as t from .._warnings import _warn from ..gui_types import NumberTypes from ..utils import _RE_PD_TYPE, _get_date_col_str_name, _MapDict def _add_to_dict_and_get(dico: t.Dict[str, t.Any], key: str, value: t.Any) -> t.Any: if key not in dico.keys(): dico[key] = value return dico[key] def _get_tuple_val(attr: tuple, index: int, default_val: t.Any) -> t.Any: return attr[index] if len(attr) > index else default_val def _get_columns_dict_from_list( col_list: t.Union[t.List[str], t.Tuple[str]], col_types_keys: t.List[str], value: t.Any ): col_dict = {} idx = 0 for col in col_list: if col in col_types_keys: col_dict[col] = {"index": idx} idx += 1 elif col: _warn( f'Error column "{col}" is not present in the Dataframe "{value.head(0) if hasattr(value, "head") else value}".' # noqa: E501 ) return col_dict def _get_columns_dict( # noqa: C901 value: t.Any, columns: t.Union[str, t.List[str], t.Tuple[str], t.Dict[str, t.Any], _MapDict], col_types: t.Optional[t.Dict[str, str]] = None, date_format: t.Optional[str] = None, number_format: t.Optional[str] = None, opt_columns: t.Optional[t.Set[str]] = None, ): if col_types is None: return None col_dict: t.Optional[dict] = None if isinstance(columns, str): col_dict = _get_columns_dict_from_list([s.strip() for s in columns.split(";")], col_types_keys, value) elif isinstance(columns, (list, tuple)): col_dict = _get_columns_dict_from_list(columns, col_types_keys, value) elif isinstance(columns, _MapDict): col_dict = columns._dict.copy() elif isinstance(columns, dict): col_dict = columns.copy() if not isinstance(col_dict, dict): _warn("Error: columns attributes should be a string, a list, a tuple or a dict.") col_dict = {} nb_cols = len(col_dict) if nb_cols == 0: for col in col_types_keys: col_dict[col] = {"index": nb_cols} nb_cols += 1 else: col_dict = {str(k): v for k, v in col_dict.items()} if opt_columns: for col in opt_columns: if col in col_types_keys and col not in col_dict: col_dict[col] = {"index": nb_cols} nb_cols += 1 idx = 0 for col, ctype in col_types.items(): col = str(col) if col in col_dict: re_type = _RE_PD_TYPE.match(ctype) grps = re_type.groups() if re_type else () ctype = grps[0] if grps else ctype col_dict[col]["type"] = ctype col_dict[col]["dfid"] = col if len(grps) > 4 and grps[4]: col_dict[col]["tz"] = grps[4] idx = _add_to_dict_and_get(col_dict[col], "index", idx) + 1 if ctype == "datetime": if date_format: _add_to_dict_and_get(col_dict[col], "format", date_format) col_dict[_get_date_col_str_name(col_types.keys(), col)] = col_dict.pop(col) # type: ignore elif number_format and ctype in NumberTypes: _add_to_dict_and_get(col_dict[col], "format", number_format) return col_dict
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from __future__ import annotations from datetime import date, datetime, time from json import JSONEncoder from pathlib import Path from flask.json.provider import DefaultJSONProvider from .._warnings import _warn from ..icon import Icon from ..utils import _date_to_string, _MapDict, _TaipyBase def _default(o): if isinstance(o, Icon): return o._to_dict() if isinstance(o, _MapDict): return o._dict if isinstance(o, _TaipyBase): return o.get() if isinstance(o, (datetime, date, time)): return _date_to_string(o) if isinstance(o, Path): return str(o) try: raise TypeError(f"Object of type {o.__class__.__name__} is not JSON serializable") except Exception as e: _warn("Exception in JSONEncoder", e) return None class _TaipyJsonEncoder(JSONEncoder): def default(self, o): return _default(o) class _TaipyJsonProvider(DefaultJSONProvider): default = staticmethod(_default) # type: ignore sort_keys = False
from markdown.treeprocessors import Treeprocessor from ..builder import _Builder class _Postprocessor(Treeprocessor): @staticmethod def extend(md, gui, priority): instance = _Postprocessor(md) md.treeprocessors.register(instance, "taipy", priority) instance._gui = gui def run(self, root): MD_PARA_CLASSNAME = "md-para" for p in root.iter(): if p.tag == "p": classes = p.get("class") classes = f"{MD_PARA_CLASSNAME} {classes}" if classes else MD_PARA_CLASSNAME p.set("class", classes) p.tag = "div" if p != root: p.set("key", _Builder._get_key(p.tag)) return root
from markdown.inlinepatterns import InlineProcessor from .factory import _MarkdownFactory class _ControlPattern(InlineProcessor): __PATTERN = _MarkdownFactory._TAIPY_START + r"([a-zA-Z][\.a-zA-Z_$0-9]*)(.*?)" + _MarkdownFactory._TAIPY_END @staticmethod def extend(md, gui, priority): instance = _ControlPattern(_ControlPattern.__PATTERN, md) md.inlinePatterns.register(instance, "taipy", priority) instance._gui = gui def handleMatch(self, m, data): return _MarkdownFactory.create_element(self._gui, m.group(1), m.group(2)), m.start(0), m.end(0)
import re from markdown.blockprocessors import BlockProcessor from .factory import _MarkdownFactory class _StartBlockProcessor(BlockProcessor): __RE_FENCE_START = re.compile( _MarkdownFactory._TAIPY_START + r"([a-zA-Z][\.a-zA-Z_$0-9]*)\.start(.*?)" + _MarkdownFactory._TAIPY_END ) # start line __RE_OTHER_FENCE = re.compile( _MarkdownFactory._TAIPY_START + r"([a-zA-Z][\.a-zA-Z_$0-9]*)\.(start|end)(.*?)" + _MarkdownFactory._TAIPY_END ) # start or end tag @staticmethod def extend(md, gui, priority): instance = _StartBlockProcessor(md.parser) md.parser.blockprocessors.register(instance, "taipy", priority) instance._gui = gui def test(self, parent, block): return re.match(_StartBlockProcessor.__RE_FENCE_START, block) def run(self, parent, blocks): original_block = blocks[0] original_match = re.search(_StartBlockProcessor.__RE_FENCE_START, original_block) blocks[0] = re.sub(_StartBlockProcessor.__RE_FENCE_START, "", blocks[0], 1) tag = original_match.group(1) queue = [tag] # Find block with ending fence for block_num, block in enumerate(blocks): matches = re.findall(_StartBlockProcessor.__RE_OTHER_FENCE, block) for match in matches: if queue[-1] == match[0] and match[1] == "end": queue.pop() elif match[1] == "start": queue.append(match[0]) if not queue: # remove end fence blocks[block_num] = re.sub( _MarkdownFactory._TAIPY_START + tag + r"\.end(.*?)" + _MarkdownFactory._TAIPY_END, "", block, 1, ) # render fenced area inside a new div e = _MarkdownFactory.create_element(self._gui, original_match.group(1), original_match.group(2)) parent.append(e) # parse inside blocks self.parser.parseBlocks(e, blocks[: block_num + 1]) # remove used blocks del blocks[: block_num + 1] return True # or could have had no return statement # No closing marker! Restore and do nothing blocks[0] = original_block return False # equivalent to our test() routine returning False
from typing import Any from markdown.extensions import Extension from .blocproc import _StartBlockProcessor from .control import _ControlPattern from .postproc import _Postprocessor from .preproc import _Preprocessor class _TaipyMarkdownExtension(Extension): config = {"gui": ["", "Gui object for extension"]} def extendMarkdown(self, md): from ...gui import Gui gui = self.config["gui"][0] if not isinstance(gui, Gui): raise RuntimeError("Gui instance is not bound to Markdown Extension") md.registerExtension(self) _Preprocessor.extend(md, gui, 210) _ControlPattern.extend(md, gui, 205) _StartBlockProcessor.extend(md, gui, 175) _Postprocessor.extend(md, gui, 200)
import typing as t from ..factory import _Factory class _MarkdownFactory(_Factory): # Taipy Markdown tags _TAIPY_START = "TaIpY:" _TAIPY_END = ":tAiPy" _TAIPY_BLOCK_TAGS = ["layout", "part", "expandable", "dialog", "pane"] @staticmethod def create_element(gui, control_type: str, all_properties: str) -> t.Union[t.Any, str]: # Create properties dict from all_properties property_pairs = _Factory._PROPERTY_RE.findall(all_properties) properties = {property[0]: property[1] for property in property_pairs} builder_md = _Factory.call_builder(gui, control_type, properties) if builder_md is None: return f"<|INVALID SYNTAX - Control is '{control_type}'|>" return builder_md
import re import typing as t from typing import Any, List, Tuple from markdown.preprocessors import Preprocessor as MdPreprocessor from ..._warnings import _warn from ..builder import _Builder from .factory import _MarkdownFactory if t.TYPE_CHECKING: from ...gui import Gui class _Preprocessor(MdPreprocessor): # ---------------------------------------------------------------------- # Finds, in the Markdown text, control declaration constructs: # <|<some value>|> # or # <|<some value>|<control_type>|> # or # <|<some value>|<control_type>|<prop_name[=propvalue]>> # or # <|<control_type>|<prop_name[=propvalue]>> # # These constructs are converted a fragment that the ControlPattern # processes to create the components that get generated. # <control_type> prop_name="prop_value" ... # Note that if a value is provided before the control_type, it is set # as the default property value for that control type. # The default control type is 'text'. # ---------------------------------------------------------------------- # Control in Markdown __CONTROL_RE = re.compile(r"<\|(.*?)\|>") # Opening tag __OPENING_TAG_RE = re.compile(r"<([0-9a-zA-Z\_\.]*)\|((?:(?!\|>).)*)\s*$") # Closing tag __CLOSING_TAG_RE = re.compile(r"^\s*\|([0-9a-zA-Z\_\.]*)>") # Link in Markdown __LINK_RE = re.compile(r"(\[[^\]]*?\]\([^\)]*?\))") # Split properties and control type __SPLIT_RE = re.compile(r"(?<!\\\\)\|") # Property syntax: '<prop_name>[=<prop_value>]' # If <prop_value> is omitted: # '<prop_name>' is equivalent to '<prop_name>=true' # 'not <prop_name>' is equivalent to '<prop_name>=false' # 'not', 'dont', 'don't' are equivalent in this context # Note 1: 'not <prop_name>=<prop_value>' is an invalid syntax # Note 2: Space characters after the equal sign are significative __PROPERTY_RE = re.compile(r"((?:don'?t|not)\s+)?([a-zA-Z][\.a-zA-Z_$0-9]*(?:\[(?:.*?)\])?)\s*(?:=(.*))?$") _gui: "Gui" @staticmethod def extend(md, gui, priority): instance = _Preprocessor(md) md.preprocessors.register(instance, "taipy", priority) instance._gui = gui def _make_prop_pair(self, prop_name: str, prop_value: str) -> Tuple[str, str]: # Un-escape pipe character in property value return (prop_name, prop_value.replace("\\|", "|")) def run(self, lines: List[str]) -> List[str]: new_lines = [] tag_queue = [] for line_count, line in enumerate(lines, start=1): new_line = "" last_index = 0 # Opening tags m = _Preprocessor.__OPENING_TAG_RE.search(line) if m is not None: tag = "part" properties: List[Tuple[str, str]] = [] if m.group(2): tag, properties = self._process_control(m.group(2), line_count, tag) if tag in _MarkdownFactory._TAIPY_BLOCK_TAGS: tag_queue.append((tag, line_count, m.group(1) or None)) new_line_delimeter = "\n" if line.startswith("<|") else "\n\n" line = ( line[: m.start()] + new_line_delimeter + _MarkdownFactory._TAIPY_START + tag + _MarkdownFactory._START_SUFFIX ) for property in properties: prop_value = property[1].replace('"', '\\"') line += f' {property[0]}="{prop_value}"' line += _MarkdownFactory._TAIPY_END + new_line_delimeter else: _warn(f"Invalid tag name '{tag}' in line {line_count}.") # Other controls for m in _Preprocessor.__CONTROL_RE.finditer(line): control_name, properties = self._process_control(m.group(1), line_count) new_line += line[last_index : m.start()] control_text = _MarkdownFactory._TAIPY_START + control_name for property in properties: prop_value = property[1].replace('"', '\\"') control_text += f' {property[0]}="{prop_value}"' control_text += _MarkdownFactory._TAIPY_END new_line += control_text last_index = m.end() new_line = line if last_index == 0 else new_line + line[last_index:] # Add key attribute to links line = new_line new_line = "" last_index = 0 for m in _Preprocessor.__LINK_RE.finditer(line): new_line += line[last_index : m.end()] new_line += "{: key=" + _Builder._get_key("link") + "}" last_index = m.end() new_line = line if last_index == 0 else new_line + line[last_index:] # Look for a closing tag m = _Preprocessor.__CLOSING_TAG_RE.search(new_line) if m is not None: if len(tag_queue): open_tag, open_tag_line_count, open_tag_identifier = tag_queue.pop() close_tag_identifier = m.group(1) if close_tag_identifier and not open_tag_identifier: _warn( f"Missing opening '{open_tag}' tag identifier '{close_tag_identifier}' in line {open_tag_line_count}." # noqa: E501 ) if open_tag_identifier and not close_tag_identifier: _warn( f"Missing closing '{open_tag}' tag identifier '{open_tag_identifier}' in line {line_count}." ) if close_tag_identifier and open_tag_identifier and close_tag_identifier != open_tag_identifier: _warn( f"Unmatched '{open_tag}' tag identifier in line {open_tag_line_count} and line {line_count}." # noqa: E501 ) new_line = ( new_line[: m.start()] + _MarkdownFactory._TAIPY_START + open_tag + _MarkdownFactory._END_SUFFIX + _MarkdownFactory._TAIPY_END + "\n" + new_line[m.end() :] ) else: new_line = ( new_line[: m.start()] + f"<div>No matching opened tag on line {line_count}</div>" + new_line[m.end() :] ) _warn(f"Line {line_count} has an unmatched closing tag.") # append the new line new_lines.append(new_line) # Issue #337: add an empty string at the beginning of new_lines list if there is not one # so that markdown extension would be able to render properly if new_lines and new_lines[0] != "": new_lines.insert(0, "") # Check for tags left unclosed (but close them anyway) for tag, line_no, _ in tag_queue: new_lines.append( _MarkdownFactory._TAIPY_START + tag + _MarkdownFactory._END_SUFFIX + _MarkdownFactory._TAIPY_END ) _warn(f"Opened tag {tag} in line {line_no} is not closed.") return new_lines def _process_control( self, prop_string: str, line_count: int, default_control_name: str = _MarkdownFactory.DEFAULT_CONTROL ) -> Tuple[str, List[Tuple[str, str]]]: fragments = [f for f in _Preprocessor.__SPLIT_RE.split(prop_string) if f] control_name = None default_prop_name = None default_prop_value = None properties: List[Tuple[str, Any]] = [] for fragment in fragments: if control_name is None and _MarkdownFactory.get_default_property_name(fragment): control_name = fragment elif control_name is None and default_prop_value is None: default_prop_value = fragment elif prop_match := _Preprocessor.__PROPERTY_RE.match(fragment): not_prefix = prop_match.group(1) prop_name = prop_match.group(2) val = prop_match.group(3) if not_prefix and val: _warn(f"Negated property {prop_name} value ignored at {line_count}.") prop_value = "True" if not_prefix: prop_value = "False" elif val: prop_value = val properties.append(self._make_prop_pair(prop_name, prop_value)) elif len(fragment) > 1 and fragment[0] == "{" and fragment[-1] == "}": properties.append(self._make_prop_pair(fragment[1:-1], fragment)) else: _warn(f"Bad Taipy property format at line {line_count}: '{fragment}'.") if control_name is None: if properties and all(attribute != properties[0][0] for attribute in _MarkdownFactory._TEXT_ATTRIBUTES): control_name = properties[0][0] properties = properties[1:] _warn(f'Unrecognized control {control_name} at line {line_count}: "<|{prop_string}|>".') else: control_name = default_control_name if default_prop_value is not None: default_prop_name = _MarkdownFactory.get_default_property_name(control_name) # Set property only if it is not already defined if default_prop_name and default_prop_name not in [x[0] for x in properties]: properties.insert(0, self._make_prop_pair(default_prop_name, default_prop_value)) return control_name, properties
from .parser import _TaipyHTMLParser
import typing as t from ..factory import _Factory class _HtmlFactory(_Factory): @staticmethod def create_element(gui, namespace: str, control_type: str, all_properties: t.Dict[str, str]) -> t.Tuple[str, str]: builder_html = _Factory.call_builder(gui, f"{namespace}.{control_type}", all_properties, True) if builder_html is None: return f"<div>INVALID SYNTAX - Control is '{namespace}:{control_type}'</div>", "div" return builder_html # type: ignore
import re import typing as t from html.parser import HTMLParser from ..._warnings import _warn from .factory import _HtmlFactory class _TaipyHTMLParser(HTMLParser): __TAIPY_NAMESPACE_RE = re.compile(r"([a-zA-Z\_]+):([a-zA-Z\_]*)") def __init__(self, gui): super().__init__() self._gui = gui self.body = "" self.head = [] self.taipy_tag = None self.tag_mapping = {} self.is_body = True self.head_tag = None self._line_count = 0 self._tag_queue = [] # @override def handle_starttag(self, tag, props) -> None: self._tag_queue.append((tag, self._line_count)) if tag == "html": return if self.head_tag is not None: self.head.append(self.head_tag) self.head_tag = None if self.taipy_tag is not None: self.parse_taipy_tag() if tag == "head": self.is_body = False elif tag == "body": self.is_body = True elif m := self.__TAIPY_NAMESPACE_RE.match(tag): self.taipy_tag = _TaipyTag(m.group(1), m.group(2), props) elif not self.is_body: head_props = {prop[0]: prop[1] for prop in props} self.head_tag = {"tag": tag, "props": head_props, "content": ""} else: self.append_data(str(self.get_starttag_text())) # @override def handle_data(self, data: str) -> None: data = data.strip() if data and self.taipy_tag is not None and self.taipy_tag.set_value(data): self.parse_taipy_tag() elif not self.is_body and self.head_tag is not None: self.head_tag["content"] = data else: self.append_data(data) # @override def handle_endtag(self, tag) -> None: if not self._tag_queue: _warn(f"Closing '{tag}' at line {self._line_count} is missing an opening tag.") else: opening_tag, opening_tag_line = self._tag_queue.pop() if opening_tag != tag: _warn( f"Opening tag '{opening_tag}' at line {opening_tag_line} has no matching closing tag '{tag}' at line {self._line_count}." # noqa: E501 ) if tag in ["head", "body", "html"]: return if self.taipy_tag is not None: self.parse_taipy_tag() if not self.is_body: self.head.append(self.head_tag) self.head_tag = None elif tag in self.tag_mapping: self.append_data(f"</{self.tag_mapping[tag]}>") else: self.append_data(f"</{tag}>") def append_data(self, data: str) -> None: if self.is_body: self.body += data def parse_taipy_tag(self) -> None: tp_string, tp_element_name = self.taipy_tag.parse(self._gui) self.append_data(tp_string) self.tag_mapping[f"{self.taipy_tag.namespace}:{self.taipy_tag.control_type}"] = tp_element_name self.taipy_tag = None def get_jsx(self) -> str: return self.body def feed_data(self, data: str): data_lines = data.split("\n") for line, data_line in enumerate(data_lines): self._line_count = line + 1 self.feed(data_line) while self._tag_queue: opening_tag, opening_tag_line = self._tag_queue.pop() _warn(f"Opening tag '{opening_tag}' at line {opening_tag_line} has no matching closing tag.") class _TaipyTag(object): def __init__(self, namespace: str, tag_name: str, properties: t.List[t.Tuple[str, str]]) -> None: self.namespace = namespace self.control_type = tag_name self.properties = {prop[0]: prop[1] for prop in properties} self.has_set_value = False def set_value(self, value: str) -> bool: if self.has_set_value: return False property_name = _HtmlFactory.get_default_property_name(f"{self.namespace}.{self.control_type}") # Set property only if it is not already defined if property_name and property_name not in self.properties.keys(): self.properties[property_name] = value self.has_set_value = True return True def parse(self, gui) -> t.Tuple[str, str]: for k, v in self.properties.items(): self.properties[k] = v if v is not None else "true" # allow usage of 'class' property in html taipy tag if "class" in self.properties and "class_name" not in self.properties: self.properties["class_name"] = self.properties["class"] return _HtmlFactory.create_element(gui, self.namespace, self.control_type, self.properties)
"""The setup script.""" import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md", "rb") as readme_file: readme = readme_file.read().decode("UTF-8") with open(f"src{os.sep}taipy{os.sep}templates{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" test_requirements = ["pytest>=3.8"] setup( author="Avaiga", author_email="dev@taipy.io", python_requires=">=3.8", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], description="An open-source package holding Taipy application templates.", license="Apache License 2.0", long_description=readme, long_description_content_type="text/markdown", keywords="taipy-templates", name="taipy-templates", package_dir={"": "src"}, packages=find_namespace_packages(where="src") + find_packages(include=["taipy"]), include_package_data=True, test_suite="tests", url="https://github.com/avaiga/taipy-templates", version=version_string, zip_safe=False, )
from config.config import configure from pages import job_page, scenario_page from pages.root import content, root, selected_data_node, selected_scenario import taipy as tp from taipy import Core, Gui def on_init(state): ... def on_change(state, var, val): if var == "selected_data_node" and val: state["scenario"].manage_data_node_partial(state) pages = { "/": root, "scenario": scenario_page, "jobs": job_page, } if __name__ == "__main__": # Instantiate, configure and run the Core core = Core() default_scenario_cfg = configure() core.run() # ################################################################################################################## # PLACEHOLDER: Initialize your data application here # # # # Example: # if len(tp.get_scenarios()) == 0: tp.create_scenario(default_scenario_cfg, name="Default Scenario") # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## # Instantiate, configure and run the GUI gui = Gui(pages=pages) data_node_partial = gui.add_partial("") gui.run(title="{{cookiecutter.__application_title}}", margin="0em")
from algos import clean_data from taipy import Config, Frequency, Scope def configure(): # ################################################################################################################## # PLACEHOLDER: Add your scenario configurations here # # # # Example: # initial_dataset_cfg = Config.configure_csv_data_node("initial_dataset", scope=Scope.CYCLE) replacement_type_cfg = Config.configure_data_node("replacement_type", default_data="NO VALUE") cleaned_dataset_cfg = Config.configure_csv_data_node("cleaned_dataset") clean_data_cfg = Config.configure_task( "clean_data", function=clean_data, input=[initial_dataset_cfg, replacement_type_cfg], output=cleaned_dataset_cfg, ) scenario_cfg = Config.configure_scenario( "scenario_configuration", task_configs=[clean_data_cfg], frequency=Frequency.DAILY ) return scenario_cfg # Comment, remove or replace the previous lines with your own use case # # ##################################################################################################################
from taipy import Config def configure(): Config.load("config/config.toml") return Config.scenarios["scenario_configuration"]
def clean_data(df, replacement_type): df = df.fillna(replacement_type) return df
from .algos import clean_data
from .job_page import job_page from .scenario_page import scenario_page
from taipy.gui import Markdown selected_scenario = None selected_data_node = None content = "" root = Markdown("pages/root.md")
from .job_page import job_page
from taipy.gui import Markdown job_page = Markdown("pages/job_page/job_page.md")
from taipy.gui import Markdown, notify from .data_node_management import manage_partial def notify_on_submission(state, submitable, details): if details["submission_status"] == "COMPLETED": notify(state, "success", "Submision completed!") elif details["submission_status"] == "FAILED": notify(state, "error", "Submission failed!") else: notify(state, "info", "In progress...") def manage_data_node_partial(state): manage_partial(state) scenario_page = Markdown("pages/scenario_page/scenario_page.md")
from .scenario_page import scenario_page
# build partial content for a specific data node def build_dn_partial(dn, dn_label): partial_content = "<|part|render={selected_scenario}|\n\n" # ################################################################################################################## # PLACEHOLDER: data node specific content before automatic content # # # # Example: # if dn_label == "replacement_type": partial_content += "All missing values will be replaced by the data node value." # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## # Automatic data node content partial_content += ( "<|{selected_scenario.data_nodes['" + dn.config_id + "']}|data_node|scenario={" "selected_scenario}|>\n\n " ) # ################################################################################################################## # PLACEHOLDER: data node specific content after automatic content # # # # Example: # if dn_label == "initial_dataset": partial_content += ( "Select your CSV file: <|{selected_data_node.path}|file_selector|extensions=.csv|on_action" "={lambda s: s.refresh('selected_scenario')}|>\n\n " ) # Comment, remove or replace the previous lines with your own use case # # ################################################################################################################## partial_content += "|>\n\n" return partial_content def manage_partial(state): dn = state.selected_data_node dn_label = dn.get_simple_label() partial_content = build_dn_partial(dn, dn_label) state.data_node_partial.update_content(state, partial_content)
import os import taipy # Add taipy version to requirements.txt with open(os.path.join(os.getcwd(), "requirements.txt"), "a") as requirement_file: requirement_file.write(f"taipy=={taipy.version._get_version()}\n") # Use TOML config file or not use_toml_config = "{{ cookiecutter.__use_toml_config }}".upper() if use_toml_config == "YES" or use_toml_config == "Y": os.remove(os.path.join(os.getcwd(), "config", "config.py")) os.rename( os.path.join(os.getcwd(), "config", "config_with_toml.py"), os.path.join(os.getcwd(), "config", "config.py") ) else: os.remove(os.path.join(os.getcwd(), "config", "config_with_toml.py")) os.remove(os.path.join(os.getcwd(), "config", "config.toml")) main_file_name = "{{cookiecutter.__main_file}}.py" print( f"New Taipy application has been created at {os.path.join(os.getcwd())}" f"\n\nTo start the application, change directory to the newly created folder:" f"\n\tcd {os.path.join(os.getcwd())}" f"\nand run the application as follows:" f"\n\ttaipy run {main_file_name}" )
import os import pytest from cookiecutter.exceptions import FailedHookException from cookiecutter.main import cookiecutter from .utils import _run_template def test_default_answer(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, ) assert os.listdir(tmpdir) == ["taipy_application"] assert ( os.listdir(os.path.join(tmpdir, "taipy_application")).sort() == ["requirements.txt", "main.py", "images"].sort() ) with open(os.path.join(tmpdir, "taipy_application", "requirements.txt")) as requirements_file: # Assert post_gen_project hook is successful assert "taipy==" in requirements_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "taipy_application")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") def test_main_file_with_and_without_extension(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Application main Python file": "app.py", }, ) assert ( os.listdir(os.path.join(tmpdir, "taipy_application")).sort() == ["requirements.txt", "app.py", "images"].sort() ) cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Application root folder name": "foo_app", "Application main Python file": "app", }, ) assert os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "app.py", "images"].sort() def test_with_core_service(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Does the application use scenario management or version management?": "y", "Does the application use Rest API?": "no", }, ) assert ( os.listdir(os.path.join(tmpdir, "taipy_application")).sort() == ["requirements.txt", "main.py", "images", "configuration", "algorithms"].sort() ) with open(os.path.join(tmpdir, "taipy_application", "main.py")) as main_file: assert "core = Core()" in main_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "taipy_application")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") assert "[Taipy][INFO] Development mode: " in str(stdout, "utf-8") def test_with_rest_service(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Does the application use scenario management or version management?": "n", "Does the application use Rest API?": "yes", }, ) assert ( os.listdir(os.path.join(tmpdir, "taipy_application")).sort() == ["requirements.txt", "main.py", "images"].sort() ) with open(os.path.join(tmpdir, "taipy_application", "main.py")) as main_file: assert "rest = Rest()" in main_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "taipy_application")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") assert "[Taipy][INFO] Development mode: " in str(stdout, "utf-8") def test_with_both_core_rest_services(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Does the application use scenario management or version management?": "n", "Does the application use Rest API?": "yes", }, ) assert ( os.listdir(os.path.join(tmpdir, "taipy_application")).sort() == ["requirements.txt", "main.py", "images", "configuration", "algorithms"].sort() ) with open(os.path.join(tmpdir, "taipy_application", "main.py")) as main_file: assert "rest = Rest()" in main_file.read() assert "core = Core()" not in main_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "taipy_application")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") assert "[Taipy][INFO] Development mode: " in str(stdout, "utf-8") def test_multipage_gui_template(tmpdir): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Application root folder name": "foo_app", "Page names in multi-page application?": "name_1 name_2 name_3", }, ) assert ( os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "main.py", "pages", "images"].sort() ) assert ( os.listdir(os.path.join(tmpdir, "foo_app", "pages")).sort() == ["name_1", "name_2", "name_3", "root.md", "root.py", "__init__.py"].sort() ) oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "foo_app")) stdout = _run_template("main.py") os.chdir(oldpwd) assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") def test_multipage_gui_template_with_invalid_page_name(tmpdir, capfd): with pytest.raises(FailedHookException): cookiecutter( template="src/taipy/templates/default", output_dir=str(tmpdir), no_input=True, extra_context={ "Application root folder name": "foo_app", "Page names in multi-page application?": "valid_var_name 1_invalid_var_name", }, ) _, stderr = capfd.readouterr() assert 'Page name "1_invalid_var_name" is not a valid Python identifier' in stderr assert not os.path.exists(os.path.join(tmpdir, "foo_app"))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import subprocess import sys def _run_template(main_path, time_out=30): """Run the templates on a subprocess and get stdout after timeout""" with subprocess.Popen([sys.executable, main_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: try: stdout, stderr = proc.communicate(timeout=time_out) except subprocess.TimeoutExpired: proc.kill() stdout, stderr = proc.communicate() # Print the error if there is any (for debugging) if stderr := str(stderr, "utf-8"): print(stderr) return stdout
import os from cookiecutter.main import cookiecutter from .utils import _run_template def test_scenario_management_with_toml_config(tmpdir): cookiecutter( template="src/taipy/templates/scenario-management", output_dir=tmpdir, no_input=True, extra_context={ "Application root folder name": "foo_app", "Application main Python file": "main.py", "Application title": "bar", "Does the application use TOML Config?": "yes", }, ) assert os.listdir(tmpdir) == ["foo_app"] assert ( os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "main.py", "algos", "config", "pages"].sort() ) # Assert post_gen_project hook is successful with open(os.path.join(tmpdir, "foo_app", "requirements.txt")) as requirements_file: assert "taipy==" in requirements_file.read() assert ( os.listdir(os.path.join(tmpdir, "foo_app", "config")).sort() == ["__init__.py", "config.py", "config.toml"].sort() ) with open(os.path.join(tmpdir, "foo_app", "config", "config.py")) as config_file: assert 'Config.load("config/config.toml")' in config_file.read() oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "foo_app")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] Configuration 'config/config.toml' successfully loaded." in str(stdout, "utf-8") assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8") def test_scenario_management_without_toml_config(tmpdir): cookiecutter( template="src/taipy/templates/scenario-management", output_dir=tmpdir, no_input=True, extra_context={ "Application root folder name": "foo_app", "Application main Python file": "main.py", "Application title": "bar", "Does the application use TOML Config?": "no", }, ) assert os.listdir(tmpdir) == ["foo_app"] assert ( os.listdir(os.path.join(tmpdir, "foo_app")).sort() == ["requirements.txt", "main.py", "algos", "config", "pages"].sort() ) # Assert post_gen_project hook is successful with open(os.path.join(tmpdir, "foo_app", "requirements.txt")) as requirements_file: assert "taipy==" in requirements_file.read() assert os.listdir(os.path.join(tmpdir, "foo_app", "config")).sort() == ["__init__.py", "config.py"].sort() with open(os.path.join(tmpdir, "foo_app", "config", "config.py")) as config_file: config_content = config_file.read() assert 'Config.load("config/config.toml")' not in config_content assert all([x in config_content for x in ["Config.configure_csv_data_node", "Config.configure_task"]]) oldpwd = os.getcwd() os.chdir(os.path.join(tmpdir, "foo_app")) stdout = _run_template("main.py") os.chdir(oldpwd) # Assert the message when the application is run successfully is in the stdout assert "[Taipy][INFO] * Server starting on" in str(stdout, "utf-8")
""" Contain the application's configuration including the scenario configurations. The configuration is run by the Core service. """ from algorithms import * from taipy import Config # ############################################################################# # PLACEHOLDER: Put your application's configurations here # # # # Example: # # scenario_config = Config.configure_scenario("placeholder_scenario", []) # # Comment, remove or replace the previous lines with your own use case # # #############################################################################
from .config import *
""" This file is designed to contain the various Python functions used to configure tasks. The functions will be imported by the __init__.py file in this folder. """ # ################################################################################################################## # PLACEHOLDER: Put your Python functions here # # # # Example: # # def place_holder_algorithm(): # # pass # # Comment, remove or replace the previous lines with your own use case # # ##################################################################################################################
from algorithms import *
from .root import root_page
""" The root page of the application. Page content is imported from the root.md file. Please refer to https://docs.taipy.io/en/latest/manuals/gui/pages for more details. """ from taipy.gui import Markdown root_page = Markdown("pages/root.md")
""" A page of the application. Page content is imported from the page_example.md file. Please refer to https://docs.taipy.io/en/latest/manuals/gui/pages for more details. """ from taipy.gui import Markdown page_example = Markdown("pages/page_example/page_example.md")
import sys pages = "{{ cookiecutter.__pages }}".split(" ") # Remove empty string from pages list pages = [page for page in pages if page != ""] for page in pages: if not page.isidentifier(): sys.exit(f'Page name "{page}" is not a valid Python identifier. Please choose another name.')
import os import shutil import taipy def handle_services(use_rest, use_core): if use_core or use_rest: # Write "import taipy as tp" at the third line of the import.txt file with open(os.path.join(os.getcwd(), "sections", "import.txt"), "r") as import_file: import_lines = import_file.readlines() import_lines[0] = "import taipy as tp\n" + import_lines[0] + "\n" with open(os.path.join(os.getcwd(), "sections", "import.txt"), "w") as import_file: import_file.writelines(import_lines) # Import the necessary services if use_core and use_rest: with open(os.path.join(os.getcwd(), "sections", "import.txt"), "a") as import_file: import_file.write("from taipy import Core, Rest\n") elif use_core: with open(os.path.join(os.getcwd(), "sections", "import.txt"), "a") as import_file: import_file.write("from taipy import Core\n") elif use_rest: with open(os.path.join(os.getcwd(), "sections", "import.txt"), "a") as import_file: import_file.write("from taipy import Rest\n") # Start the Rest service if use_rest: with open(os.path.join(os.getcwd(), "sections", "main.txt"), "a") as main_file: main_file.write(" rest = Rest()\n") if use_core: # Create and submit the placeholder scenario with open(os.path.join(os.getcwd(), "sections", "main.txt"), "a") as main_file: main_file.write(" core = Core()\n") main_file.write(" core.run()\n") main_file.write(" # #############################################################################\n") main_file.write(" # PLACEHOLDER: Create and submit your scenario here #\n") main_file.write(" # #\n") main_file.write(" # Example: #\n") main_file.write(" # from configuration import scenario_config #\n") main_file.write(" # scenario = tp.create_scenario(scenario_config) #\n") main_file.write(" # scenario.submit() #\n") main_file.write(" # Comment, remove or replace the previous lines with your own use case #\n") main_file.write(" # #############################################################################\n") else: shutil.rmtree(os.path.join(os.getcwd(), "algorithms")) shutil.rmtree(os.path.join(os.getcwd(), "configuration")) def handle_run_service(): with open(os.path.join(os.getcwd(), "sections", "main.txt"), "a+") as main_file: main_file.seek(0) main_content = main_file.read() # Run Rest service along with the GUI service if "rest = Rest()" in main_content: main_file.write(' tp.run(gui, rest, title="{{cookiecutter.__application_title}}")\n') else: main_file.write(' gui.run(title="{{cookiecutter.__application_title}}")\n') def handle_single_page_app(): shutil.rmtree(os.path.join(os.getcwd(), "pages")) with open(os.path.join(os.getcwd(), "sections", "main.txt"), "a") as main_file: main_file.write("\n") main_file.write(" gui = Gui(page=page)\n") handle_run_service() with open(os.path.join(os.getcwd(), "sections", "page_content.txt"), "a") as page_content_file: page_content_file.write( ''' page = """ <center> <|navbar|lov={[("home", "Homepage")]}|> </center> """ ''' ) def handle_multi_page_app(pages): for page_name in pages: os.mkdir(os.path.join(os.getcwd(), "pages", page_name)) with open(os.path.join(os.getcwd(), "pages", "page_example", "page_example.md"), "r") as page_md_file: page_md_content = page_md_file.read() page_md_content = page_md_content.replace("Page example", page_name.replace("_", " ").title()) with open(os.path.join(os.getcwd(), "pages", page_name, page_name + ".md"), "w") as page_md_file: page_md_file.write(page_md_content) with open(os.path.join(os.getcwd(), "pages", "page_example", "page_example.py"), "r") as page_content_file: page_py_content = page_content_file.read() page_py_content = page_py_content.replace("page_example", page_name) with open(os.path.join(os.getcwd(), "pages", page_name, page_name + ".py"), "w") as page_content_file: page_content_file.write(page_py_content) with open(os.path.join(os.getcwd(), "pages", "__init__.py"), "a") as page_init_file: for page_name in pages: page_init_file.write(f"from .{page_name}.{page_name} import {page_name}\n") shutil.rmtree(os.path.join(os.getcwd(), "pages", "page_example")) newline = ",\n\t" user_page_dict = newline.join(f'"{page_name}": {page_name}' for page_name in pages) page_dict = """ pages = { "/": root_page, {pages} } """ with open(os.path.join(os.getcwd(), "sections", "page_content.txt"), "a") as page_content_file: page_content_file.write(page_dict.replace("{pages}", user_page_dict)) with open(os.path.join(os.getcwd(), "sections", "import.txt"), "a") as import_file: import_file.write("from pages import *\n") with open(os.path.join(os.getcwd(), "sections", "main.txt"), "a") as main_file: main_file.write("\n") main_file.write(" gui = Gui(pages=pages)\n") handle_run_service() def generate_main_file(): with open(os.path.join(os.getcwd(), "sections", "import.txt"), "r") as import_file: import_lines = import_file.read() with open(os.path.join(os.getcwd(), "sections", "page_content.txt"), "r") as page_content_file: page_content = page_content_file.read() with open(os.path.join(os.getcwd(), "sections", "main.txt"), "r") as main_file: main_lines = main_file.read() with open(os.path.join(os.getcwd(), "{{cookiecutter.__main_file}}.py"), "a") as app_main_file: app_main_file.write(import_lines) app_main_file.write("\n") app_main_file.write(page_content) app_main_file.write("\n\n") app_main_file.write(main_lines) with open(os.path.join(os.getcwd(), "requirements.txt"), "a") as requirement_file: requirement_file.write(f"taipy=={taipy.version._get_version()}\n") use_core = "{{ cookiecutter.__core }}".upper() use_rest = "{{ cookiecutter.__rest }}".upper() handle_services(use_rest in ["YES", "Y"], use_core in ["YES", "Y"]) pages = "{{ cookiecutter.__pages }}".split(" ") # Remove empty string from pages list pages = [page for page in pages if page != ""] if len(pages) == 0: handle_single_page_app() else: handle_multi_page_app(pages) generate_main_file() # Remove the sections folder shutil.rmtree(os.path.join(os.getcwd(), "sections")) main_file_name = "{{cookiecutter.__main_file}}.py" print( f"New Taipy application has been created at {os.path.join(os.getcwd())}" f"\n\nTo start the application, change directory to the newly created folder:" f"\n\tcd {os.path.join(os.getcwd())}" f"\nand run the application as follows:" f"\n\ttaipy run {main_file_name}" )
from ._GuiCoreLib import _GuiCore def _init_gui_core(): from taipy.gui import Gui Gui.add_library(_GuiCore()) _init_gui_core()
from ._init import *
import typing as t from datetime import datetime from taipy.gui import Gui, State from taipy.gui.extension import Element, ElementLibrary, ElementProperty, PropertyType from ..version import _get_version from ._adapters import _GuiCoreDatanodeAdapter, _GuiCoreScenarioAdapter, _GuiCoreScenarioDagAdapter from ._context import _GuiCoreContext class _GuiCore(ElementLibrary): __LIB_NAME = "taipy_gui_core" __CTX_VAR_NAME = f"__{__LIB_NAME}_Ctx" __SCENARIO_ADAPTER = "tgc_scenario" __DATANODE_ADAPTER = "tgc_datanode" __JOB_ADAPTER = "tgc_job" __elts = { "scenario_selector": Element( "value", { "id": ElementProperty(PropertyType.string), "show_add_button": ElementProperty(PropertyType.boolean, True), "display_cycles": ElementProperty(PropertyType.boolean, True), "show_primary_flag": ElementProperty(PropertyType.boolean, True), "value": ElementProperty(PropertyType.lov_value), "on_change": ElementProperty(PropertyType.function), "height": ElementProperty(PropertyType.string, "50vh"), "class_name": ElementProperty(PropertyType.dynamic_string), "show_pins": ElementProperty(PropertyType.boolean, False), "on_creation": ElementProperty(PropertyType.function), }, inner_properties={ "scenarios": ElementProperty(PropertyType.lov, f"{{{__CTX_VAR_NAME}.get_scenarios()}}"), "on_scenario_crud": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.crud_scenario}}"), "configs": ElementProperty(PropertyType.react, f"{{{__CTX_VAR_NAME}.get_scenario_configs()}}"), "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), "error": ElementProperty(PropertyType.react, f"{{{_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR}}}"), "type": ElementProperty(PropertyType.inner, __SCENARIO_ADAPTER), "scenario_edit": ElementProperty( _GuiCoreScenarioAdapter, f"{{{__CTX_VAR_NAME}.get_scenario_by_id({_GuiCoreContext._SCENARIO_SELECTOR_ID_VAR})}}", ), "on_scenario_select": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.select_scenario}}"), }, ), "scenario": Element( "scenario", { "id": ElementProperty(PropertyType.string), "scenario": ElementProperty(_GuiCoreScenarioAdapter), "active": ElementProperty(PropertyType.dynamic_boolean, True), "expandable": ElementProperty(PropertyType.boolean, True), "expanded": ElementProperty(PropertyType.boolean, True), "show_submit": ElementProperty(PropertyType.boolean, True), "show_delete": ElementProperty(PropertyType.boolean, True), "show_config": ElementProperty(PropertyType.boolean, False), "show_cycle": ElementProperty(PropertyType.boolean, False), "show_tags": ElementProperty(PropertyType.boolean, True), "show_properties": ElementProperty(PropertyType.boolean, True), "show_sequences": ElementProperty(PropertyType.boolean, True), "show_submit_sequences": ElementProperty(PropertyType.boolean, True), "class_name": ElementProperty(PropertyType.dynamic_string), "on_submission_change": ElementProperty(PropertyType.function), }, inner_properties={ "on_edit": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.edit_entity}}"), "on_submit": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.submit_entity}}"), "on_delete": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.crud_scenario}}"), "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), "error": ElementProperty(PropertyType.react, f"{{{_GuiCoreContext._SCENARIO_VIZ_ERROR_VAR}}}"), }, ), "scenario_dag": Element( "scenario", { "id": ElementProperty(PropertyType.string), "scenario": ElementProperty(_GuiCoreScenarioDagAdapter), "render": ElementProperty(PropertyType.dynamic_boolean, True), "show_toolbar": ElementProperty(PropertyType.boolean, True), "width": ElementProperty(PropertyType.string), "height": ElementProperty(PropertyType.string), "class_name": ElementProperty(PropertyType.dynamic_string), }, inner_properties={ "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), }, ), "data_node_selector": Element( "value", { "id": ElementProperty(PropertyType.string), "display_cycles": ElementProperty(PropertyType.boolean, True), "show_primary_flag": ElementProperty(PropertyType.boolean, True), "value": ElementProperty(PropertyType.lov_value), "on_change": ElementProperty(PropertyType.function), "height": ElementProperty(PropertyType.string, "50vh"), "class_name": ElementProperty(PropertyType.dynamic_string), "show_pins": ElementProperty(PropertyType.boolean, True), }, inner_properties={ "datanodes": ElementProperty(PropertyType.lov, f"{{{__CTX_VAR_NAME}.get_datanodes_tree()}}"), "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), "type": ElementProperty(PropertyType.inner, __DATANODE_ADAPTER), }, ), "data_node": Element( _GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP, { "id": ElementProperty(PropertyType.string), _GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP: ElementProperty(_GuiCoreDatanodeAdapter), "active": ElementProperty(PropertyType.dynamic_boolean, True), "expandable": ElementProperty(PropertyType.boolean, True), "expanded": ElementProperty(PropertyType.boolean, True), "show_config": ElementProperty(PropertyType.boolean, False), "show_owner": ElementProperty(PropertyType.boolean, True), "show_edit_date": ElementProperty(PropertyType.boolean, False), "show_expiration_date": ElementProperty(PropertyType.boolean, False), "show_properties": ElementProperty(PropertyType.boolean, True), "show_history": ElementProperty(PropertyType.boolean, True), "show_data": ElementProperty(PropertyType.boolean, True), "chart_config": ElementProperty(PropertyType.dict), "class_name": ElementProperty(PropertyType.dynamic_string), "scenario": ElementProperty(PropertyType.lov_value, "optional"), "width": ElementProperty(PropertyType.string), }, inner_properties={ "on_edit": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.edit_data_node}}"), "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), "error": ElementProperty(PropertyType.react, f"{{{_GuiCoreContext._DATANODE_VIZ_ERROR_VAR}}}"), "scenarios": ElementProperty( PropertyType.lov, f"{{{__CTX_VAR_NAME}.get_scenarios_for_owner({_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR})}}", ), "type": ElementProperty(PropertyType.inner, __SCENARIO_ADAPTER), "on_id_select": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.select_id}}"), "history": ElementProperty( PropertyType.react, f"{{{__CTX_VAR_NAME}.get_data_node_history(" + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, " + f"{_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR})}}", ), "data": ElementProperty( PropertyType.react, f"{{{__CTX_VAR_NAME}.get_data_node_data(<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>," + f" {_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}", ), "tabular_data": ElementProperty( PropertyType.data, f"{{{__CTX_VAR_NAME}.get_data_node_tabular_data(" + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, " + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}", ), "tabular_columns": ElementProperty( PropertyType.dynamic_string, f"{{{__CTX_VAR_NAME}.get_data_node_tabular_columns(" + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, " + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}", ), "chart_config": ElementProperty( PropertyType.dynamic_string, f"{{{__CTX_VAR_NAME}.get_data_node_chart_config(" + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, " + f"{_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR})}}", ), "on_data_value": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.update_data}}"), "on_tabular_data_edit": ElementProperty( PropertyType.function, f"{{{__CTX_VAR_NAME}.tabular_data_edit}}" ), "on_lock": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.lock_datanode_for_edit}}"), }, ), "job_selector": Element( "value", { "id": ElementProperty(PropertyType.string), "class_name": ElementProperty(PropertyType.dynamic_string), "value": ElementProperty(PropertyType.lov_value), "show_id": ElementProperty(PropertyType.boolean, True), "show_submitted_label": ElementProperty(PropertyType.boolean, True), "show_submitted_id": ElementProperty(PropertyType.boolean, False), "show_submission_id": ElementProperty(PropertyType.boolean, False), "show_date": ElementProperty(PropertyType.boolean, True), "show_cancel": ElementProperty(PropertyType.boolean, True), "show_delete": ElementProperty(PropertyType.boolean, True), "on_change": ElementProperty(PropertyType.function), "height": ElementProperty(PropertyType.string, "50vh"), }, inner_properties={ "jobs": ElementProperty(PropertyType.lov, f"{{{__CTX_VAR_NAME}.get_jobs_list()}}"), "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME), "type": ElementProperty(PropertyType.inner, __JOB_ADAPTER), "on_job_action": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.act_on_jobs}}"), "error": ElementProperty(PropertyType.dynamic_string, f"{{{_GuiCoreContext._JOB_SELECTOR_ERROR_VAR}}}"), }, ), } def get_name(self) -> str: return _GuiCore.__LIB_NAME def get_elements(self) -> t.Dict[str, Element]: return _GuiCore.__elts def get_scripts(self) -> t.List[str]: return ["lib/taipy-gui-core.js"] def on_init(self, gui: Gui) -> t.Optional[t.Tuple[str, t.Any]]: gui._get_default_locals_bind().update( { _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR: "", _GuiCoreContext._SCENARIO_SELECTOR_ID_VAR: "", _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR: "", _GuiCoreContext._JOB_SELECTOR_ERROR_VAR: "", _GuiCoreContext._DATANODE_VIZ_ERROR_VAR: "", _GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR: "", _GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR: "", _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR: "", _GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR: "", } ) ctx = _GuiCoreContext(gui) gui._add_adapter_for_type(_GuiCore.__SCENARIO_ADAPTER, ctx.scenario_adapter) gui._add_adapter_for_type(_GuiCore.__DATANODE_ADAPTER, ctx.data_node_adapter) gui._add_adapter_for_type(_GuiCore.__JOB_ADAPTER, ctx.job_adapter) return _GuiCore.__CTX_VAR_NAME, ctx def on_user_init(self, state: State): for var in [ _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, _GuiCoreContext._SCENARIO_SELECTOR_ID_VAR, _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, _GuiCoreContext._JOB_SELECTOR_ERROR_VAR, _GuiCoreContext._DATANODE_VIZ_ERROR_VAR, _GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR, _GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR, _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, _GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR, ]: state._add_attribute(var, "") def get_version(self) -> str: if not hasattr(self, "version"): self.version = _get_version() + str(datetime.now().timestamp()) return self.version
import json import typing as t from collections import defaultdict from enum import Enum from numbers import Number from threading import Lock try: import zoneinfo except ImportError: from backports import zoneinfo # type: ignore[no-redef] import pandas as pd from dateutil import parser from taipy.config import Config from taipy.core import Cycle, DataNode, Job, Scenario, Sequence, cancel_job, create_scenario from taipy.core import delete as core_delete from taipy.core import delete_job from taipy.core import get as core_get from taipy.core import ( get_cycles_scenarios, get_data_nodes, get_jobs, is_deletable, is_editable, is_promotable, is_readable, is_submittable, set_primary, ) from taipy.core import submit as core_submit from taipy.core.data._abstract_tabular import _AbstractTabularDataNode from taipy.core.notification import CoreEventConsumerBase, EventEntityType from taipy.core.notification.event import Event, EventOperation from taipy.core.notification.notifier import Notifier from taipy.gui import Gui, State from taipy.gui._warnings import _warn from taipy.gui.gui import _DoNotUpdate from ._adapters import _EntityType class _SubmissionStatus(Enum): SUBMITTED = 0 COMPLETED = 1 CANCELED = 2 FAILED = 3 BLOCKED = 4 WAITING = 5 RUNNING = 6 UNDEFINED = 7 class _SubmissionDetails: def __init__( self, client_id: str, module_context: str, callback: t.Callable, entity_id: str, status: _SubmissionStatus, jobs: t.List[Job], ) -> None: self.client_id = client_id self.module_context = module_context self.callback = callback self.entity_id = entity_id self.status = status self.jobs = jobs def set_status(self, status: _SubmissionStatus): self.status = status return self class _GuiCoreContext(CoreEventConsumerBase): __PROP_ENTITY_ID = "id" __PROP_ENTITY_COMMENT = "comment" __PROP_CONFIG_ID = "config" __PROP_DATE = "date" __PROP_ENTITY_NAME = "name" __PROP_SCENARIO_PRIMARY = "primary" __PROP_SCENARIO_TAGS = "tags" __ENTITY_PROPS = (__PROP_CONFIG_ID, __PROP_DATE, __PROP_ENTITY_NAME) __ACTION = "action" _CORE_CHANGED_NAME = "core_changed" _SCENARIO_SELECTOR_ERROR_VAR = "gui_core_sc_error" _SCENARIO_SELECTOR_ID_VAR = "gui_core_sc_id" _SCENARIO_VIZ_ERROR_VAR = "gui_core_sv_error" _JOB_SELECTOR_ERROR_VAR = "gui_core_js_error" _DATANODE_VIZ_ERROR_VAR = "gui_core_dv_error" _DATANODE_VIZ_OWNER_ID_VAR = "gui_core_dv_owner_id" _DATANODE_VIZ_HISTORY_ID_VAR = "gui_core_dv_history_id" _DATANODE_VIZ_DATA_ID_VAR = "gui_core_dv_data_id" _DATANODE_VIZ_DATA_CHART_ID_VAR = "gui_core_dv_data_chart_id" _DATANODE_VIZ_DATA_NODE_PROP = "data_node" def __init__(self, gui: Gui) -> None: self.gui = gui self.scenario_by_cycle: t.Optional[t.Dict[t.Optional[Cycle], t.List[Scenario]]] = None self.data_nodes_by_owner: t.Optional[t.Dict[t.Optional[str], DataNode]] = None self.scenario_configs: t.Optional[t.List[t.Tuple[str, str]]] = None self.jobs_list: t.Optional[t.List[Job]] = None self.client_jobs_by_submission: t.Dict[str, _SubmissionDetails] = dict() # register to taipy core notification reg_id, reg_queue = Notifier.register() # locks self.lock = Lock() self.submissions_lock = Lock() # super super().__init__(reg_id, reg_queue) self.start() def process_event(self, event: Event): if event.entity_type == EventEntityType.SCENARIO: if event.operation == EventOperation.SUBMISSION: self.scenario_status_callback(event.attribute_name, True) return self.scenario_refresh( event.entity_id if event.operation != EventOperation.DELETION and is_readable(event.entity_id) else None ) elif event.entity_type == EventEntityType.SEQUENCE and event.entity_id: sequence = None try: sequence = ( core_get(event.entity_id) if event.operation != EventOperation.DELETION and is_readable(event.entity_id) else None ) if sequence and hasattr(sequence, "parent_ids") and sequence.parent_ids: self.gui._broadcast( _GuiCoreContext._CORE_CHANGED_NAME, {"scenario": [x for x in sequence.parent_ids]} ) except Exception as e: _warn(f"Access to sequence {event.entity_id} failed", e) elif event.entity_type == EventEntityType.JOB: with self.lock: self.jobs_list = None self.scenario_status_callback(event.entity_id) elif event.entity_type == EventEntityType.DATA_NODE: with self.lock: self.data_nodes_by_owner = None self.gui._broadcast( _GuiCoreContext._CORE_CHANGED_NAME, {"datanode": event.entity_id if event.operation != EventOperation.DELETION else True}, ) def scenario_refresh(self, scenario_id: t.Optional[str]): with self.lock: self.scenario_by_cycle = None self.data_nodes_by_owner = None self.gui._broadcast( _GuiCoreContext._CORE_CHANGED_NAME, {"scenario": scenario_id or True}, ) def scenario_status_callback(self, job_id: str, is_submission: t.Optional[bool] = False): if not job_id or not (is_submission or is_readable(job_id)): return try: if is_submission: sub_id = job_id job = None else: job = core_get(job_id) if not job: return sub_id = job.submit_id sub_details = self.client_jobs_by_submission.get(sub_id) if not sub_details: return if not sub_details.client_id or not sub_details.entity_id or not sub_details.jobs: return entity = core_get(sub_details.entity_id) if not entity: return new_status = self._get_submittable_status(sub_details.jobs) if sub_details.status != new_status: # callback self.gui._call_user_callback( sub_details.client_id, sub_details.callback, [entity, {"submission_status": new_status.name, "job": job}], sub_details.module_context, ) with self.submissions_lock: if new_status in ( _SubmissionStatus.COMPLETED, _SubmissionStatus.FAILED, _SubmissionStatus.CANCELED, ): self.client_jobs_by_submission.pop(sub_id, None) else: self.client_jobs_by_submission[sub_id] = sub_details.set_status(new_status) except Exception as e: _warn(f"Job ({job_id}) is not available", e) finally: self.gui._broadcast(_GuiCoreContext._CORE_CHANGED_NAME, {"jobs": True}) def scenario_adapter(self, scenario_or_cycle): try: if ( hasattr(scenario_or_cycle, "id") and is_readable(scenario_or_cycle.id) and core_get(scenario_or_cycle.id) is not None ): if self.scenario_by_cycle and isinstance(scenario_or_cycle, Cycle): return ( scenario_or_cycle.id, scenario_or_cycle.get_simple_label(), self.scenario_by_cycle.get(scenario_or_cycle), _EntityType.CYCLE.value, False, ) elif isinstance(scenario_or_cycle, Scenario): return ( scenario_or_cycle.id, scenario_or_cycle.get_simple_label(), None, _EntityType.SCENARIO.value, scenario_or_cycle.is_primary, ) except Exception as e: _warn( f"Access to {type(scenario_or_cycle)} " + f"({scenario_or_cycle.id if hasattr(scenario_or_cycle, 'id') else 'No_id'})" + " failed", e, ) return None def get_scenarios(self): cycles_scenarios = [] with self.lock: if self.scenario_by_cycle is None: self.scenario_by_cycle = get_cycles_scenarios() for cycle, scenarios in self.scenario_by_cycle.items(): if cycle is None: cycles_scenarios.extend(scenarios) else: cycles_scenarios.append(cycle) return cycles_scenarios def select_scenario(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) == 0: return state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ID_VAR, args[0]) def get_scenario_by_id(self, id: str) -> t.Optional[Scenario]: if not id or not is_readable(id): return None try: return core_get(id) except Exception: return None def get_scenario_configs(self): with self.lock: if self.scenario_configs is None: configs = Config.scenarios if isinstance(configs, dict): self.scenario_configs = [(id, f"{c.id}") for id, c in configs.items() if id != "default"] return self.scenario_configs def crud_scenario(self, state: State, id: str, payload: t.Dict[str, str]): # noqa: C901 args = payload.get("args") if ( args is None or not isinstance(args, list) or len(args) < 3 or not isinstance(args[0], bool) or not isinstance(args[1], bool) or not isinstance(args[2], dict) ): return update = args[0] delete = args[1] data = args[2] scenario = None name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME) if update: scenario_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if delete: if not is_deletable(scenario_id): state.assign( _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Scenario. {scenario_id} is not deletable." ) return try: core_delete(scenario_id) except Exception as e: state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Error deleting Scenario. {e}") else: if not self.__check_readable_editable( state, scenario_id, "Scenario", _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR ): return scenario = core_get(scenario_id) else: config_id = data.get(_GuiCoreContext.__PROP_CONFIG_ID) scenario_config = Config.scenarios.get(config_id) if scenario_config is None: state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Invalid configuration id ({config_id})") return date_str = data.get(_GuiCoreContext.__PROP_DATE) try: date = parser.parse(date_str) if isinstance(date_str, str) else None except Exception as e: state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Invalid date ({date_str}).{e}") return scenario_id = None try: gui: Gui = state._gui on_creation = args[3] if len(args) > 3 and isinstance(args[3], str) else None on_creation_function = gui._get_user_function(on_creation) if on_creation else None if callable(on_creation_function): try: res = gui._call_function_with_state( on_creation_function, [ id, { "action": on_creation, "config": scenario_config, "date": date, "label": name, "properties": { v.get("key"): v.get("value") for v in data.get("properties", dict()) }, }, ], ) if isinstance(res, Scenario): # everything's fine scenario_id = res.id state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, "") return if res: # do not create state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"{res}") return except Exception as e: # pragma: no cover if not gui._call_on_exception(on_creation, e): _warn(f"on_creation(): Exception raised in '{on_creation}()'", e) state.assign( _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Error creating Scenario with '{on_creation}()'. {e}", ) return elif on_creation is not None: _warn(f"on_creation(): '{on_creation}' is not a function.") scenario = create_scenario(scenario_config, date, name) scenario_id = scenario.id except Exception as e: state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Error creating Scenario. {e}") finally: self.scenario_refresh(scenario_id) if scenario: if not is_editable(scenario): state.assign( _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Scenario {scenario_id or name} is not editable." ) return with scenario as sc: sc.properties[_GuiCoreContext.__PROP_ENTITY_NAME] = name if props := data.get("properties"): try: new_keys = [prop.get("key") for prop in props] for key in t.cast(dict, sc.properties).keys(): if key and key not in _GuiCoreContext.__ENTITY_PROPS and key not in new_keys: t.cast(dict, sc.properties).pop(key, None) for prop in props: key = prop.get("key") if key and key not in _GuiCoreContext.__ENTITY_PROPS: sc._properties[key] = prop.get("value") state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR, f"Error creating Scenario. {e}") def edit_entity(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] entity_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if not self.__check_readable_editable( state, entity_id, data.get("type", "Scenario"), _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR ): return entity: t.Union[Scenario, Sequence] = core_get(entity_id) if entity: try: if isinstance(entity, Scenario): primary = data.get(_GuiCoreContext.__PROP_SCENARIO_PRIMARY) if primary is True: if not is_promotable(entity): state.assign( _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, f"Scenario {entity_id} is not promotable." ) return set_primary(entity) self.__edit_properties(entity, data) state.assign(_GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, f"Error updating Scenario. {e}") def submit_entity(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] entity_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if not is_submittable(entity_id): state.assign( _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, f"{data.get('type', 'Scenario')} {entity_id} is not submittable.", ) return entity = core_get(entity_id) if entity: try: jobs = core_submit(entity) if submission_cb := data.get("on_submission_change"): submission_fn = self.gui._get_user_function(submission_cb) if callable(submission_fn): job_ids = [j.id for j in (jobs if isinstance(jobs, list) else [jobs])] client_id = self.gui._get_client_id() module_context = self.gui._get_locals_context() sub_id = jobs[0].submit_id if isinstance(jobs, list) else jobs.submit_id with self.submissions_lock: self.client_jobs_by_submission[sub_id] = _SubmissionDetails( client_id, module_context, submission_fn, entity_id, _SubmissionStatus.SUBMITTED, job_ids, ) else: _warn(f"on_submission_change(): '{submission_cb}' is not a valid function.") self.scenario_status_callback(jobs[0].id if len(jobs) else "" if isinstance(jobs, list) else jobs.id) state.assign(_GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._SCENARIO_VIZ_ERROR_VAR, f"Error submitting entity. {e}") def _get_submittable_status(self, jobs_ids: t.List[str]) -> _SubmissionStatus: abandoned = False canceled = False blocked = False waiting = False running = False completed = False for id in jobs_ids: job = core_get(id) if not job: continue if job.is_failed(): return _SubmissionStatus.FAILED if job.is_canceled(): canceled = True if job.is_blocked(): blocked = True continue if job.is_pending() or job.is_submitted(): waiting = True continue if job.is_running(): running = True continue if job.is_completed() or job.is_skipped(): completed = True continue if job.is_abandoned(): abandoned = True if canceled: return _SubmissionStatus.CANCELED if abandoned: return _SubmissionStatus.UNDEFINED if running: return _SubmissionStatus.RUNNING if waiting: return _SubmissionStatus.WAITING if blocked: return _SubmissionStatus.BLOCKED if completed: return _SubmissionStatus.COMPLETED return _SubmissionStatus.UNDEFINED def __do_datanodes_tree(self): if self.data_nodes_by_owner is None: self.data_nodes_by_owner = defaultdict(list) for dn in get_data_nodes(): self.data_nodes_by_owner[dn.owner_id].append(dn) def get_datanodes_tree(self): with self.lock: self.__do_datanodes_tree() return self.data_nodes_by_owner.get(None, []) + self.get_scenarios() def data_node_adapter(self, data): try: if hasattr(data, "id") and is_readable(data.id) and core_get(data.id) is not None: if isinstance(data, DataNode): return (data.id, data.get_simple_label(), None, _EntityType.DATANODE.value, False) else: with self.lock: self.__do_datanodes_tree() if self.data_nodes_by_owner: if isinstance(data, Cycle): return ( data.id, data.get_simple_label(), self.data_nodes_by_owner[data.id] + self.scenario_by_cycle.get(data, []), _EntityType.CYCLE.value, False, ) elif isinstance(data, Scenario): return ( data.id, data.get_simple_label(), self.data_nodes_by_owner[data.id] + list(data.sequences.values()), _EntityType.SCENARIO.value, data.is_primary, ) elif isinstance(data, Sequence): if datanodes := self.data_nodes_by_owner.get(data.id): return ( data.id, data.get_simple_label(), datanodes, _EntityType.SEQUENCE.value, False, ) except Exception as e: _warn( f"Access to {type(data)} ({data.id if hasattr(data, 'id') else 'No_id'}) failed", e, ) return None def get_jobs_list(self): with self.lock: if self.jobs_list is None: self.jobs_list = get_jobs() return self.jobs_list def job_adapter(self, job): try: if hasattr(job, "id") and is_readable(job.id) and core_get(job.id) is not None: if isinstance(job, Job): entity = core_get(job.owner_id) return ( job.id, job.get_simple_label(), [], entity.get_simple_label() if entity else "", entity.id if entity else "", job.submit_id, job.creation_date, job.status.value, is_deletable(job), is_readable(job), is_editable(job), ) except Exception as e: _warn(f"Access to job ({job.id if hasattr(job, 'id') else 'No_id'}) failed", e) return None def act_on_jobs(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] job_ids = data.get(_GuiCoreContext.__PROP_ENTITY_ID) job_action = data.get(_GuiCoreContext.__ACTION) if job_action and isinstance(job_ids, list): errs = [] if job_action == "delete": for job_id in job_ids: if not is_readable(job_id): errs.append(f"Job {job_id} is not readable.") continue if not is_deletable(job_id): errs.append(f"Job {job_id} is not deletable.") continue try: delete_job(core_get(job_id)) except Exception as e: errs.append(f"Error deleting job. {e}") elif job_action == "cancel": for job_id in job_ids: if not is_readable(job_id): errs.append(f"Job {job_id} is not readable.") continue if not is_editable(job_id): errs.append(f"Job {job_id} is not cancelable.") continue try: cancel_job(job_id) except Exception as e: errs.append(f"Error canceling job. {e}") state.assign(_GuiCoreContext._JOB_SELECTOR_ERROR_VAR, "<br/>".join(errs) if errs else "") def edit_data_node(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] entity_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if not self.__check_readable_editable(state, entity_id, "DataNode", _GuiCoreContext._DATANODE_VIZ_ERROR_VAR): return entity: DataNode = core_get(entity_id) if isinstance(entity, DataNode): try: self.__edit_properties(entity, data) state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, f"Error updating Datanode. {e}") def lock_datanode_for_edit(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] entity_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if not self.__check_readable_editable(state, entity_id, "Datanode", _GuiCoreContext._DATANODE_VIZ_ERROR_VAR): return lock = data.get("lock", True) entity: DataNode = core_get(entity_id) if isinstance(entity, DataNode): try: if lock: entity.lock_edit(self.gui._get_client_id()) else: entity.unlock_edit(self.gui._get_client_id()) state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, f"Error locking Datanode. {e}") def __edit_properties(self, entity: t.Union[Scenario, Sequence, DataNode], data: t.Dict[str, str]): with entity as ent: if isinstance(ent, Scenario): tags = data.get(_GuiCoreContext.__PROP_SCENARIO_TAGS) if isinstance(tags, (list, tuple)): ent.tags = {t for t in tags} name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME) if isinstance(name, str): ent.properties[_GuiCoreContext.__PROP_ENTITY_NAME] = name props = data.get("properties") if isinstance(props, (list, tuple)): for prop in props: key = prop.get("key") if key and key not in _GuiCoreContext.__ENTITY_PROPS: ent.properties[key] = prop.get("value") deleted_props = data.get("deleted_properties") if isinstance(deleted_props, (list, tuple)): for prop in deleted_props: key = prop.get("key") if key and key not in _GuiCoreContext.__ENTITY_PROPS: ent.properties.pop(key, None) def get_scenarios_for_owner(self, owner_id: str): cycles_scenarios: t.List[t.Union[Scenario, Cycle]] = [] with self.lock: if self.scenario_by_cycle is None: self.scenario_by_cycle = get_cycles_scenarios() if owner_id: if owner_id == "GLOBAL": for cycle, scenarios in self.scenario_by_cycle.items(): if cycle is None: cycles_scenarios.extend(scenarios) else: cycles_scenarios.append(cycle) elif is_readable(owner_id): entity = core_get(owner_id) if entity and (scenarios := self.scenario_by_cycle.get(entity)): cycles_scenarios.extend(scenarios) elif isinstance(entity, Scenario): cycles_scenarios.append(entity) return cycles_scenarios def get_data_node_history(self, datanode: DataNode, id: str): if ( id and isinstance(datanode, DataNode) and id == datanode.id and (dn := core_get(id)) and isinstance(dn, DataNode) ): res = [] for e in dn.edits: job_id = e.get("job_id") job: Job = None if job_id: if not is_readable(job_id): job_id += " not readable" else: job = core_get(job_id) res.append( ( e.get("timestamp"), job_id if job_id else e.get("writer_identifier", ""), f"Execution of task {job.task.get_simple_label()}." if job and job.task else e.get("comment", ""), ) ) return list(reversed(sorted(res, key=lambda r: r[0]))) return _DoNotUpdate() def get_data_node_data(self, datanode: DataNode, id: str): if ( id and isinstance(datanode, DataNode) and id == datanode.id and (dn := core_get(id)) and isinstance(dn, DataNode) ): if dn._last_edit_date: if isinstance(dn, _AbstractTabularDataNode): return (None, None, True, None) try: value = dn.read() if isinstance(value, (pd.DataFrame, pd.Series)): return (None, None, True, None) return ( value, "date" if "date" in type(value).__name__ else type(value).__name__ if isinstance(value, Number) else None, None, None, ) except Exception as e: return (None, None, None, f"read data_node: {e}") return (None, None, None, f"Data unavailable for {dn.get_simple_label()}") return _DoNotUpdate() def __check_readable_editable(self, state: State, id: str, type: str, var: str): if not is_readable(id): state.assign(var, f"{type} {id} is not readable.") return False if not is_editable(id): state.assign(var, f"{type} {id} is not editable.") return False return True def update_data(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict): return data = args[0] entity_id = data.get(_GuiCoreContext.__PROP_ENTITY_ID) if not self.__check_readable_editable(state, entity_id, "DataNode", _GuiCoreContext._DATANODE_VIZ_ERROR_VAR): return entity: DataNode = core_get(entity_id) if isinstance(entity, DataNode): try: entity.write( parser.parse(data.get("value")) if data.get("type") == "date" else int(data.get("value")) if data.get("type") == "int" else float(data.get("value")) if data.get("type") == "float" else data.get("value"), comment=data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT), ) entity.unlock_edit(self.gui._get_client_id()) state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, "") except Exception as e: state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, f"Error updating Datanode value. {e}") state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, entity_id) # this will update the data value def tabular_data_edit(self, state: State, var_name: str, payload: dict): user_data = payload.get("user_data", {}) dn_id = user_data.get("dn_id") if not self.__check_readable_editable(state, dn_id, "DataNode", _GuiCoreContext._DATANODE_VIZ_ERROR_VAR): return datanode = core_get(dn_id) if dn_id else None if isinstance(datanode, DataNode): try: idx = payload.get("index") col = payload.get("col") tz = payload.get("tz") val = ( parser.parse(str(payload.get("value"))).astimezone(zoneinfo.ZoneInfo(tz)).replace(tzinfo=None) if tz is not None else payload.get("value") ) # user_value = payload.get("user_value") data = self.__read_tabular_data(datanode) if hasattr(data, "at"): data.at[idx, col] = val datanode.write(data, comment=user_data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT)) state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, "") else: state.assign( _GuiCoreContext._DATANODE_VIZ_ERROR_VAR, "Error updating Datanode tabular value: type does not support at[] indexer.", ) except Exception as e: state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, f"Error updating Datanode tabular value. {e}") setattr(state, _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, dn_id) def __read_tabular_data(self, datanode: DataNode): return datanode.read() def get_data_node_tabular_data(self, datanode: DataNode, id: str): if ( id and isinstance(datanode, DataNode) and id == datanode.id and is_readable(id) and (dn := core_get(id)) and isinstance(dn, DataNode) and dn.is_ready_for_reading ): try: return self.__read_tabular_data(dn) except Exception: return None return None def get_data_node_tabular_columns(self, datanode: DataNode, id: str): if ( id and isinstance(datanode, DataNode) and id == datanode.id and is_readable(id) and (dn := core_get(id)) and isinstance(dn, DataNode) and dn.is_ready_for_reading ): try: return self.gui._tbl_cols( True, True, "{}", json.dumps({"data": "tabular_data"}), tabular_data=self.__read_tabular_data(dn) ) except Exception: return None return None def get_data_node_chart_config(self, datanode: DataNode, id: str): if ( id and isinstance(datanode, DataNode) and id == datanode.id and is_readable(id) and (dn := core_get(id)) and isinstance(dn, DataNode) and dn.is_ready_for_reading ): try: return self.gui._chart_conf( True, True, "{}", json.dumps({"data": "tabular_data"}), tabular_data=self.__read_tabular_data(dn) ) except Exception: return None return None def select_id(self, state: State, id: str, payload: t.Dict[str, str]): args = payload.get("args") if args is None or not isinstance(args, list) or len(args) == 0 and isinstance(args[0], dict): return data = args[0] if owner_id := data.get("owner_id"): state.assign(_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR, owner_id) elif history_id := data.get("history_id"): state.assign(_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR, history_id) elif data_id := data.get("data_id"): state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, data_id) elif chart_id := data.get("chart_id"): state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR, chart_id)
import typing as t from enum import Enum from taipy.core import Cycle, DataNode, Job, Scenario, Sequence, Task from taipy.core import get as core_get from taipy.core import is_deletable, is_editable, is_promotable, is_readable, is_submittable from taipy.gui._warnings import _warn from taipy.gui.gui import _DoNotUpdate from taipy.gui.utils import _TaipyBase # prevent gui from trying to push scenario instances to the front-end class _GCDoNotUpdate(_DoNotUpdate): def __repr__(self): return self.get_label() if hasattr(self, "get_label") else super().__repr__() Scenario.__bases__ += (_GCDoNotUpdate,) Sequence.__bases__ += (_GCDoNotUpdate,) DataNode.__bases__ += (_GCDoNotUpdate,) Cycle.__bases__ += (_GCDoNotUpdate,) Job.__bases__ += (_GCDoNotUpdate,) Task.__bases__ += (_GCDoNotUpdate,) class _EntityType(Enum): CYCLE = 0 SCENARIO = 1 SEQUENCE = 2 DATANODE = 3 class _GuiCoreScenarioAdapter(_TaipyBase): __INNER_PROPS = ["name"] def get(self): data = super().get() if isinstance(data, Scenario): try: scenario = core_get(data.id) if scenario: return [ scenario.id, scenario.is_primary, scenario.config_id, scenario.creation_date.isoformat(), scenario.cycle.get_simple_label() if scenario.cycle else "", scenario.get_simple_label(), list(scenario.tags) if scenario.tags else [], [ (k, v) for k, v in scenario.properties.items() if k not in _GuiCoreScenarioAdapter.__INNER_PROPS ] if scenario.properties else [], [ (p.id, p.get_simple_label(), is_submittable(p), is_editable(p)) for p in scenario.sequences.values() ] if hasattr(scenario, "sequences") and scenario.sequences else [], list(scenario.properties.get("authorized_tags", [])) if scenario.properties else [], is_deletable(scenario), is_promotable(scenario), is_submittable(scenario), is_readable(scenario), is_editable(scenario), ] except Exception as e: _warn(f"Access to scenario ({data.id if hasattr(data, 'id') else 'No_id'}) failed", e) return None @staticmethod def get_hash(): return _TaipyBase._HOLDER_PREFIX + "Sc" class _GuiCoreScenarioDagAdapter(_TaipyBase): @staticmethod def get_entity_type(node: t.Any): return DataNode.__name__ if isinstance(node.entity, DataNode) else node.type def get(self): data = super().get() if isinstance(data, Scenario): try: scenario = core_get(data.id) if scenario: dag = data._get_dag() nodes = dict() for id, node in dag.nodes.items(): entityType = _GuiCoreScenarioDagAdapter.get_entity_type(node) cat = nodes.get(entityType) if cat is None: cat = dict() nodes[entityType] = cat cat[id] = { "name": node.entity.get_simple_label(), "type": node.entity.storage_type() if hasattr(node.entity, "storage_type") else None, } return [ data.id, nodes, [ ( _GuiCoreScenarioDagAdapter.get_entity_type(e.src), e.src.entity.id, _GuiCoreScenarioDagAdapter.get_entity_type(e.dest), e.dest.entity.id, ) for e in dag.edges ], ] except Exception as e: _warn(f"Access to scenario ({data.id if hasattr(data, 'id') else 'No_id'}) failed", e) return None @staticmethod def get_hash(): return _TaipyBase._HOLDER_PREFIX + "ScG" class _GuiCoreDatanodeAdapter(_TaipyBase): __INNER_PROPS = ["name"] def get(self): data = super().get() if isinstance(data, DataNode): try: datanode = core_get(data.id) if datanode: owner = core_get(datanode.owner_id) if datanode.owner_id else None return [ datanode.id, datanode.storage_type() if hasattr(datanode, "storage_type") else "", datanode.config_id, f"{datanode.last_edit_date}" if datanode.last_edit_date else "", f"{datanode.expiration_date}" if datanode.last_edit_date else "", datanode.get_simple_label(), datanode.owner_id or "", owner.get_simple_label() if owner else "GLOBAL", _EntityType.CYCLE.value if isinstance(owner, Cycle) else _EntityType.SCENARIO.value if isinstance(owner, Scenario) else -1, [ (k, f"{v}") for k, v in datanode._get_user_properties().items() if k not in _GuiCoreDatanodeAdapter.__INNER_PROPS ], datanode._edit_in_progress, datanode._editor_id, is_readable(datanode), is_editable(datanode), ] except Exception as e: _warn(f"Access to datanode ({data.id if hasattr(data, 'id') else 'No_id'}) failed", e) return None @staticmethod def get_hash(): return _TaipyBase._HOLDER_PREFIX + "Dn"
import json import os def _get_version(): with open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
from .rest import Rest
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from taipy.config import Config from .app import create_app as _create_app class Rest: """ Runnable Rest application serving REST APIs on top of Taipy Core functionalities. """ def __init__(self): """ Initialize a REST API server. A Flask application is instantiated and configured using three parameters from the global config. - Config.global_config.testing (bool): Run the application on testing mode. - Config.global_config.env (Optional[str]): The application environment. - Config.global_config.secret_key (Optional[str]): Application server secret key. However, editing these parameters is only recommended for advanced users. Indeed, the default behavior of the REST server without any required configuration satisfies all the standard and basic needs. """ self._app = _create_app( Config.global_config.testing or False, Config.global_config.env, Config.global_config.secret_key ) def run(self, **kwargs): """ Start a REST API server. This method is blocking. Parameters: **kwargs : Options to provide to the application server. """ self._app.run(**kwargs)
"""# Taipy Rest The Taipy Rest package exposes the Runnable `Rest^` service to provide REST APIs on top of Taipy Core. (more details on Taipy Core functionalities in the [user manual](../../../manuals/core/)). Once the `Rest^` service runs, users can call REST APIs to create, read, update, submit and remove Taipy entities (including cycles, scenarios, sequences, tasks, jobs, and data nodes). It is handy when it comes to integrating a Taipy application in a more complex IT ecosystem. Please refer to [REST API](../../reference_rest/) page to get the exhaustive list of available APIs.""" from ._init import * from .version import _get_version __version__ = _get_version()
"""Extensions registry All extensions here are used as singletons and initialized in application factory """ from .commons.apispec import APISpecExt apispec = APISpecExt()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md") as readme_file: readme = readme_file.read() with open(f"src{os.sep}taipy{os.sep}rest{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" setup( author="Avaiga", name="taipy-rest", keywords="taipy-rest", python_requires=">=3.8", version=version_string, author_email="dev@taipy.io", packages=find_namespace_packages(where="src") + find_packages(include=["taipy", "taipy.rest"]), package_dir={"": "src"}, include_package_data=True, long_description=readme, long_description_content_type="text/markdown", description="Library to expose taipy-core REST APIs.", license="Apache License 2.0", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], install_requires=[ "flask>=3.0.0,<3.1", "flask-restful>=0.3.9,<0.4", "passlib>=1.7.4,<1.8", "marshmallow>=3.20.1,<3.30", "apispec[yaml]>=6.3,<7.0", "apispec-webframeworks>=0.5.2,<0.6", "taipy-core@git+https://git@github.com/Avaiga/taipy-core.git@develop", ], )
import os from flask import Flask from . import api from .commons.encoder import _CustomEncoder from .extensions import apispec def create_app(testing=False, flask_env=None, secret_key=None): """Application factory, used to create application""" app = Flask(__name__) app.config.update( ENV=os.getenv("FLASK_ENV", flask_env), TESTING=os.getenv("TESTING", testing), SECRET_KEY=os.getenv("SECRET_KEY", secret_key), ) app.url_map.strict_slashes = False app.config["RESTFUL_JSON"] = {"cls": _CustomEncoder} configure_apispec(app) register_blueprints(app) with app.app_context(): api.views.register_views() return app def configure_apispec(app): """Configure APISpec for swagger support""" apispec.init_app(app) apispec.spec.components.schema( "PaginatedResult", { "properties": { "total": {"type": "integer"}, "pages": {"type": "integer"}, "next": {"type": "string"}, "prev": {"type": "string"}, } }, ) def register_blueprints(app): """Register all blueprints for application""" app.register_blueprint(api.views.blueprint)
from taipy.core.cycle._cycle_converter import _CycleConverter from taipy.core.data._data_converter import _DataNodeConverter from taipy.core.scenario._scenario_converter import _ScenarioConverter from taipy.core.sequence._sequence_converter import _SequenceConverter from taipy.core.task._task_converter import _TaskConverter entity_to_models = { "scenario": _ScenarioConverter._entity_to_model, "sequence": _SequenceConverter._entity_to_model, "task": _TaskConverter._entity_to_model, "data": _DataNodeConverter._entity_to_model, "cycle": _CycleConverter._entity_to_model, } def _to_model(repository, entity, **kwargs): return entity_to_models[repository](entity)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from apispec import APISpec from apispec.exceptions import APISpecError from apispec.ext.marshmallow import MarshmallowPlugin from apispec_webframeworks.flask import FlaskPlugin from flask import Blueprint, jsonify, render_template class FlaskRestfulPlugin(FlaskPlugin): """Small plugin override to handle flask-restful resources""" @staticmethod def _rule_for_view(view, app=None): view_funcs = app.view_functions endpoint = None for ept, view_func in view_funcs.items(): if hasattr(view_func, "view_class"): view_func = view_func.view_class if view_func == view: endpoint = ept if not endpoint: raise APISpecError("Could not find endpoint for view {0}".format(view)) # WARNING: Assume 1 rule per view function for now rule = app.url_map._rules_by_endpoint[endpoint][0] return rule class APISpecExt: """Very simple and small extension to use apispec with this API as a flask extension""" def __init__(self, app=None, **kwargs): self.spec = None if app is not None: self.init_app(app, **kwargs) def init_app(self, app, **kwargs): app.config.setdefault("APISPEC_TITLE", "Taipy Rest") app.config.setdefault("APISPEC_VERSION", "1.0.0") app.config.setdefault("OPENAPI_VERSION", "3.0.2") app.config.setdefault("SWAGGER_JSON_URL", "/swagger.json") app.config.setdefault("SWAGGER_UI_URL", "/swagger-ui") app.config.setdefault("OPENAPI_YAML_URL", "/openapi.yaml") app.config.setdefault("REDOC_UI_URL", "/redoc-ui") app.config.setdefault("SWAGGER_URL_PREFIX", None) self.spec = APISpec( title=app.config["APISPEC_TITLE"], version=app.config["APISPEC_VERSION"], openapi_version=app.config["OPENAPI_VERSION"], plugins=[MarshmallowPlugin(), FlaskRestfulPlugin()], **kwargs ) blueprint = Blueprint( "swagger", __name__, template_folder="./templates", url_prefix=app.config["SWAGGER_URL_PREFIX"], ) blueprint.add_url_rule(app.config["SWAGGER_JSON_URL"], "swagger_json", self.swagger_json) blueprint.add_url_rule(app.config["SWAGGER_UI_URL"], "swagger_ui", self.swagger_ui) blueprint.add_url_rule(app.config["OPENAPI_YAML_URL"], "openapi_yaml", self.openapi_yaml) blueprint.add_url_rule(app.config["REDOC_UI_URL"], "redoc_ui", self.redoc_ui) app.register_blueprint(blueprint) def swagger_json(self): return jsonify(self.spec.to_dict()) def swagger_ui(self): return render_template("swagger.j2") def openapi_yaml(self): # Manually inject ReDoc's Authentication legend, then remove it self.spec.tag( { "name": "authentication", "x-displayName": "Authentication", "description": "<SecurityDefinitions />", } ) redoc_spec = self.spec.to_yaml() self.spec._tags.pop(0) return redoc_spec def redoc_ui(self): return render_template("redoc.j2")
import json from datetime import datetime from enum import Enum from typing import Any, Union Json = Union[dict, list, str, int, float, bool, None] class _CustomEncoder(json.JSONEncoder): def default(self, o: Any) -> Json: if isinstance(o, Enum): result = o.value elif isinstance(o, datetime): result = {"__type__": "Datetime", "__value__": o.isoformat()} else: result = json.JSONEncoder.default(self, o) return result
"""Simple helper to paginate query """ from flask import request, url_for DEFAULT_PAGE_SIZE = 50 DEFAULT_PAGE_NUMBER = 1 def extract_pagination(page=None, per_page=None, **request_args): page = int(page) if page is not None else DEFAULT_PAGE_NUMBER per_page = int(per_page) if per_page is not None else DEFAULT_PAGE_SIZE return page, per_page, request_args def paginate(query, schema): page, per_page, other_request_args = extract_pagination(**request.args) page_obj = query.paginate(page=page, per_page=per_page) next_ = url_for( request.endpoint, page=page_obj.next_num if page_obj.has_next else page_obj.page, per_page=per_page, **other_request_args, **request.view_args ) prev = url_for( request.endpoint, page=page_obj.prev_num if page_obj.has_prev else page_obj.page, per_page=per_page, **other_request_args, **request.view_args ) return { "total": page_obj.total, "pages": page_obj.pages, "next": next_, "prev": prev, "results": schema.dump(page_obj.items), }
from . import error_handler, views __all__ = ["views", "error_handler"]
from flask import jsonify from marshmallow import ValidationError from taipy.core.exceptions.exceptions import ( NonExistingCycle, NonExistingDataNode, NonExistingDataNodeConfig, NonExistingJob, NonExistingScenario, NonExistingScenarioConfig, NonExistingSequence, NonExistingSequenceConfig, NonExistingTask, NonExistingTaskConfig, ) from .exceptions.exceptions import ConfigIdMissingException, ScenarioIdMissingException, SequenceNameMissingException from .views import blueprint def _create_404(e): return {"message": e.message}, 404 @blueprint.errorhandler(ValidationError) def handle_marshmallow_error(e): """Return json error for marshmallow validation errors. This will avoid having to try/catch ValidationErrors in all endpoints, returning correct JSON response with associated HTTP 400 Status (https://tools.ietf.org/html/rfc7231#section-6.5.1) """ return jsonify(e.messages), 400 @blueprint.errorhandler(ConfigIdMissingException) def handle_config_id_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(ScenarioIdMissingException) def handle_scenario_id_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(SequenceNameMissingException) def handle_sequence_name_missing_exception(e): return jsonify({"message": e.message}), 400 @blueprint.errorhandler(NonExistingDataNode) def handle_data_node_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingDataNodeConfig) def handle_data_node_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingCycle) def handle_cycle_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingJob) def handle_job_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingSequence) def handle_sequence_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingSequenceConfig) def handle_sequence_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingScenario) def handle_scenario_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingScenarioConfig) def handle_scenario_config_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingTask) def handle_task_not_found(e): return _create_404(e) @blueprint.errorhandler(NonExistingTaskConfig) def handle_task_config_not_found(e): return _create_404(e)
from flask import Blueprint, current_app from flask_restful import Api from taipy.core.common._utils import _load_fct from taipy.logger._taipy_logger import _TaipyLogger from ..extensions import apispec from .middlewares._middleware import _using_enterprise from .resources import ( CycleList, CycleResource, DataNodeList, DataNodeReader, DataNodeResource, DataNodeWriter, JobExecutor, JobList, JobResource, ScenarioExecutor, ScenarioList, ScenarioResource, SequenceExecutor, SequenceList, SequenceResource, TaskExecutor, TaskList, TaskResource, ) from .schemas import CycleSchema, DataNodeSchema, JobSchema, ScenarioSchema, SequenceSchema, TaskSchema _logger = _TaipyLogger._get_logger() blueprint = Blueprint("api", __name__, url_prefix="/api/v1") api = Api(blueprint) api.add_resource( DataNodeResource, "/datanodes/<string:datanode_id>/", endpoint="datanode_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeReader, "/datanodes/<string:datanode_id>/read/", endpoint="datanode_reader", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeWriter, "/datanodes/<string:datanode_id>/write/", endpoint="datanode_writer", resource_class_kwargs={"logger": _logger}, ) api.add_resource( DataNodeList, "/datanodes/", endpoint="datanodes", resource_class_kwargs={"logger": _logger}, ) api.add_resource( TaskResource, "/tasks/<string:task_id>/", endpoint="task_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource(TaskList, "/tasks/", endpoint="tasks", resource_class_kwargs={"logger": _logger}) api.add_resource( TaskExecutor, "/tasks/submit/<string:task_id>/", endpoint="task_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceResource, "/sequences/<string:sequence_id>/", endpoint="sequence_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceList, "/sequences/", endpoint="sequences", resource_class_kwargs={"logger": _logger}, ) api.add_resource( SequenceExecutor, "/sequences/submit/<string:sequence_id>/", endpoint="sequence_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioResource, "/scenarios/<string:scenario_id>/", endpoint="scenario_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioList, "/scenarios/", endpoint="scenarios", resource_class_kwargs={"logger": _logger}, ) api.add_resource( ScenarioExecutor, "/scenarios/submit/<string:scenario_id>/", endpoint="scenario_submit", resource_class_kwargs={"logger": _logger}, ) api.add_resource( CycleResource, "/cycles/<string:cycle_id>/", endpoint="cycle_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource( CycleList, "/cycles/", endpoint="cycles", resource_class_kwargs={"logger": _logger}, ) api.add_resource( JobResource, "/jobs/<string:job_id>/", endpoint="job_by_id", resource_class_kwargs={"logger": _logger}, ) api.add_resource(JobList, "/jobs/", endpoint="jobs", resource_class_kwargs={"logger": _logger}) api.add_resource( JobExecutor, "/jobs/cancel/<string:job_id>/", endpoint="job_cancel", resource_class_kwargs={"logger": _logger}, ) def load_enterprise_resources(api: Api): """ Load enterprise resources. """ if not _using_enterprise(): return load_resources = _load_fct("taipy.enterprise.rest.api.views", "_load_resources") load_resources(api) load_enterprise_resources(api) def register_views(): apispec.spec.components.schema("DataNodeSchema", schema=DataNodeSchema) apispec.spec.path(view=DataNodeResource, app=current_app) apispec.spec.path(view=DataNodeList, app=current_app) apispec.spec.path(view=DataNodeReader, app=current_app) apispec.spec.path(view=DataNodeWriter, app=current_app) apispec.spec.components.schema("TaskSchema", schema=TaskSchema) apispec.spec.path(view=TaskResource, app=current_app) apispec.spec.path(view=TaskList, app=current_app) apispec.spec.path(view=TaskExecutor, app=current_app) apispec.spec.components.schema("SequenceSchema", schema=SequenceSchema) apispec.spec.path(view=SequenceResource, app=current_app) apispec.spec.path(view=SequenceList, app=current_app) apispec.spec.path(view=SequenceExecutor, app=current_app) apispec.spec.components.schema("ScenarioSchema", schema=ScenarioSchema) apispec.spec.path(view=ScenarioResource, app=current_app) apispec.spec.path(view=ScenarioList, app=current_app) apispec.spec.path(view=ScenarioExecutor, app=current_app) apispec.spec.components.schema("CycleSchema", schema=CycleSchema) apispec.spec.path(view=CycleResource, app=current_app) apispec.spec.path(view=CycleList, app=current_app) apispec.spec.components.schema("JobSchema", schema=JobSchema) apispec.spec.path(view=JobResource, app=current_app) apispec.spec.path(view=JobList, app=current_app) apispec.spec.path(view=JobExecutor, app=current_app) apispec.spec.components.schema( "Any", { "description": "Any value", "nullable": True, }, ) if _using_enterprise(): _register_views = _load_fct("taipy.enterprise.rest.api.views", "_register_views") _register_views(apispec)
from datetime import datetime from flask import request from flask_restful import Resource from taipy.config.common.frequency import Frequency from taipy.core import Cycle from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory from taipy.core.exceptions.exceptions import NonExistingCycle from ...commons.to_from_model import _to_model from ..middlewares._middleware import _middleware from ..schemas import CycleResponseSchema, CycleSchema REPOSITORY = "cycle" def _get_or_raise(cycle_id: str) -> None: manager = _CycleManagerFactory._build_manager() cycle = manager._get(cycle_id) if not cycle: raise NonExistingCycle(cycle_id) return cycle class CycleResource(Resource): """Single object resource --- get: tags: - api description: | Returns a `CycleSchema^` representing the unique `Cycle^` identified by the *cycle_id* given as parameter. If no cycle corresponds to *cycle_id*, a `404` error is returned. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/cycles/CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a` is the value of the *cycle_id* parameter. It represents the identifier of the Cycle we want to retrieve. In case of success here is an example of the response: ``` JSON {"cycle": { "frequency": "Frequency.DAILY", "creation_date": "2022-08-04T17:13:32.797384", "id": "CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a", "start_date": "2022-08-04T00:00:00", "end_date": "2022-08-04T23:59:59.999999", "name": "Frequency.DAILY_2022-08-04T17:13:32.797384" ``` In case of failure here is an example of the response: ``` JSON {"message": "Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a not found."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get("http://localhost:5000/api/v1/cycles/CYCLE_223894_e019-b50b-4b9f-ac09-527a") print(response) print(response.json()) ``` `CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a` is the value of the *cycle_id* parameter. It represents the identifier of the Cycle we want to retrieve. In case of success here is an output example: ``` <Response [200]> {'cycle': { 'frequency': 'Frequency.DAILY', 'creation_date': '2022-08-04T17:13:32.797384', 'id': 'CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a', 'start_date': '2022-08-04T00:00:00', 'end_date': '2022-08-04T23:59:59.999999', 'name': 'Frequency.DAILY_2022-08-04T17:13:32.797384' ``` In case of failure here is an output example: ``` <Response [404]> {'message': 'Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a not found.'} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. parameters: - in: path name: cycle_id schema: type: string description: The identifier of the cycle to retrieve. responses: 200: content: application/json: schema: type: object properties: cycle: CycleSchema 404: description: No cycle has the *cycle_id* identifier. delete: tags: - api description: | Deletes the `Cycle^` identified by the *cycle_id* given as parameter. If the cycle does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X DELETE http://localhost:5000/api/v1/cycles/CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a` is the value of the *cycle_id* parameter. It represents the identifier of the Cycle we want to delete. In case of success here is an example of the response: ``` JSON {"message": "Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a was deleted."} ``` In case of failure here is an example of the response: ``` JSON {"message": "Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a not found."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.delete("http://localhost:5000/api/v1/cycles/CYCLE_794_ef21-af91-4f41-b6e8-7648eda") print(response) print(response.json()) ``` `CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a` is the value of the *cycle_id* parameter. It represents the identifier of the Cycle we want to delete. In case of success here is an output example: ``` <Response [200]> {"message": "Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a was deleted."} ``` In case of failure here is an output example: ``` <Response [404]> {'message': 'Cycle CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a not found.'} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. parameters: - in: path name: cycle_id schema: type: string description: The id of the cycle to delete. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No cycle has the *cycle_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, cycle_id): schema = CycleResponseSchema() cycle = _get_or_raise(cycle_id) return {"cycle": schema.dump(_to_model(REPOSITORY, cycle))} @_middleware def delete(self, cycle_id): manager = _CycleManagerFactory._build_manager() _get_or_raise(cycle_id) manager._delete(cycle_id) return {"message": f"Cycle {cycle_id} was deleted."} class CycleList(Resource): """Creation and get_all --- get: tags: - api description: | Returns a `CycleSchema^` list representing all existing Cycles. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/cycles ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. Here is an example of the response: ``` JSON [ { "frequency": "Frequency.DAILY", "end_date": "2022-08-06T23:59:59.999999", "creation_date": "2022-08-06T15:45:50.223894", "start_date": "2022-08-06T00:00:00", "id": "CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a", "name": "Frequency.DAILY_2022-08-06T15:45:50.223894" } ] ``` If there is no cycle, the response is an empty list as follows: ``` JSON [] ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get("http://localhost:5000/api/v1/cycles") print(response) print(response.json()) ``` In case of success here is an output example: ``` <Response [200]> [{ "frequency": "Frequency.DAILY", "end_date": "2022-08-06T23:59:59.999999", "creation_date": "2022-08-06T15:45:50.223894", "start_date": "2022-08-06T00:00:00", "id": "CYCLE_223894_e0fab919-b50b-4b9f-ac09-52f77474fa7a", "name": "Frequency.DAILY_2022-08-06T15:45:50.223894" } ] ``` If there is no cycle, the response is an empty list as follows: ``` <Response [200]> [] ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/CycleSchema' post: tags: - api description: | Creates a new cycle from the `CycleSchema^` given in the request body. !!! Example === "Curl" ```shell curl -X POST -H "Content-Type: application/json"\ -d '{"frequency": "DAILY", "properties": {}, "creation_date": "2020-01-01T00:00:00",\ "start_date": "2020-01-01T00:00:00", "end_date": "2020-01-01T00:00:00"}'\ http://localhost:5000/api/v1/cycles ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. In the curl command line, a `CycleSchema^` is provided as JSON dictionary parameter with the curl option -d (--data) to specify the various attributes of the `Cycle^` to create: ``` JSON { "frequency": "DAILY", "properties": {}, "creation_date": "2020-01-01T00:00:00", "start_date": "2020-01-01T00:00:00", "end_date": "2020-01-01T00:00:00" } ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests cycle_schema = { "frequency": "DAILY", "properties": {}, "creation_date": "2020-01-01T00:00:00", "start_date": "2020-01-01T00:00:00", "end_date": "2020-01-01T00:00:00" } response = requests.post("http://localhost:5000/api/v1/cycles", json=cycle_schema) print(response) print(response.json()) ``` A `CycleSchema^` is provided as a dictionary to specify the various attributes of the `Cycle^` to create. Here is the output example: ``` <Response [201]> { 'message': 'Cycle was created.', 'cycle': { 'frequency': 'Frequency.DAILY', 'end_date': '2020-01-01T00:00:00', 'creation_date': '2020-01-01T00:00:00', 'start_date': '2020-01-01T00:00:00', 'id': 'CYCLE_c9cc527f-a8c8-4238-8f31-42166a9817db', 'name': 'Frequency.DAILY_2020-01-01T00:00:00', 'properties': {}}} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. requestBody: required: true content: application/json: schema: CycleSchema responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. cycle: CycleSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self): schema = CycleResponseSchema(many=True) manager = _CycleManagerFactory._build_manager() cycles = [_to_model(REPOSITORY, cycle) for cycle in manager._get_all()] return schema.dump(cycles) @_middleware def post(self): schema = CycleResponseSchema() manager = _CycleManagerFactory._build_manager() cycle = self.__create_cycle_from_schema(schema.load(request.json)) manager._set(cycle) return { "message": "Cycle was created.", "cycle": schema.dump(_to_model(REPOSITORY, cycle)), }, 201 def __create_cycle_from_schema(self, cycle_schema: CycleSchema): return Cycle( id=cycle_schema.get("id"), frequency=Frequency(getattr(Frequency, cycle_schema.get("frequency", "").upper())), properties=cycle_schema.get("properties", {}), creation_date=datetime.fromisoformat(cycle_schema.get("creation_date")), start_date=datetime.fromisoformat(cycle_schema.get("start_date")), end_date=datetime.fromisoformat(cycle_schema.get("end_date")), )
from flask import request from flask_restful import Resource from taipy.config.config import Config from taipy.core.exceptions.exceptions import NonExistingTask, NonExistingTaskConfig from taipy.core.task._task_manager_factory import _TaskManagerFactory from ...commons.to_from_model import _to_model from ..exceptions.exceptions import ConfigIdMissingException from ..middlewares._middleware import _middleware from ..schemas import TaskSchema def _get_or_raise(task_id: str): manager = _TaskManagerFactory._build_manager() task = manager._get(task_id) if task is None: raise NonExistingTask(task_id) return task REPOSITORY = "task" class TaskResource(Resource): """Single object resource --- get: tags: - api summary: Get a task. description: | Return a single task by *task_id*. If the task does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_READER` role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/tasks/TASK_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: task_id schema: type: string description: The identifier of the task. responses: 200: content: application/json: schema: type: object properties: task: TaskSchema 404: description: No task has the *task_id* identifier. delete: tags: - api summary: Delete a task. description: | Delete a task. If the task does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_EDITOR` role. Code example: ```shell curl -X DELETE http://localhost:5000/api/v1/tasks/TASK_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: task_id schema: type: string description: The identifier of the task. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No task has the *task_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, task_id): schema = TaskSchema() task = _get_or_raise(task_id) return {"task": schema.dump(_to_model(REPOSITORY, task))} @_middleware def delete(self, task_id): manager = _TaskManagerFactory._build_manager() _get_or_raise(task_id) manager._delete(task_id) return {"message": f"Task {task_id} was deleted."} class TaskList(Resource): """Creation and get_all --- get: tags: - api summary: Get all tasks. description: | Return an array of all tasks. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_READER` role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/tasks ``` responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/TaskSchema' post: tags: - api summary: Create a task. description: | Create a new task from its *config_id*. If the config does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_EDITOR` role. Code example: ```shell curl -X POST http://localhost:5000/api/v1/tasks?config_id=my_task_config ``` parameters: - in: query name: config_id schema: type: string description: The identifier of the task configuration. responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. task: TaskSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") def fetch_config(self, config_id): config = Config.tasks.get(config_id) if not config: raise NonExistingTaskConfig(config_id) return config @_middleware def get(self): schema = TaskSchema(many=True) manager = _TaskManagerFactory._build_manager() tasks = [_to_model(REPOSITORY, task) for task in manager._get_all()] return schema.dump(tasks) @_middleware def post(self): args = request.args config_id = args.get("config_id") schema = TaskSchema() manager = _TaskManagerFactory._build_manager() if not config_id: raise ConfigIdMissingException config = self.fetch_config(config_id) task = manager._bulk_get_or_create([config])[0] return { "message": "Task was created.", "task": schema.dump(_to_model(REPOSITORY, task)), }, 201 class TaskExecutor(Resource): """Execute a task --- post: tags: - api summary: Execute a task. description: | Execute a task by *task_id*. If the task does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_EXECUTOR` role. Code example: ```shell curl -X POST http://localhost:5000/api/v1/tasks/submit/TASK_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: task_id schema: type: string responses: 204: content: application/json: schema: type: object properties: message: type: string description: Status message. task: TaskSchema 404: description: No task has the *task_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def post(self, task_id): manager = _TaskManagerFactory._build_manager() task = _get_or_raise(task_id) manager._orchestrator().submit_task(task) return {"message": f"Task {task_id} was submitted."}
import uuid from typing import Optional from flask import request from flask_restful import Resource from taipy.config.config import Config from taipy.core import Job, JobId from taipy.core.exceptions.exceptions import NonExistingJob, NonExistingTaskConfig from taipy.core.job._job_manager_factory import _JobManagerFactory from taipy.core.task._task_manager_factory import _TaskManagerFactory from ..exceptions.exceptions import ConfigIdMissingException from ..middlewares._middleware import _middleware from ..schemas import JobSchema def _get_or_raise(job_id: str): manager = _JobManagerFactory._build_manager() job = manager._get(job_id) if job is None: raise NonExistingJob(job_id) return job class JobResource(Resource): """Single object resource --- get: tags: - api summary: Get a job. description: | Return a single job by *job_id*. If the job does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), the endpoint requires `TAIPY_READER` role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/jobs/JOB_my_task_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: job_id schema: type: string description: The identifier of the job. responses: 200: content: application/json: schema: type: object properties: job: JobSchema 404: description: No job has the *job_id* identifier. delete: tags: - api summary: Delete a job. description: | Delete a job. If the job does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), the endpoint requires `TAIPY_EDITOR` role. Code example: ```shell curl -X DELETE http://localhost:5000/api/v1/jobs/JOB_my_task_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: job_id schema: type: string description: The identifier of the job. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No job has the *job_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, job_id): schema = JobSchema() job = _get_or_raise(job_id) return {"job": schema.dump(job)} @_middleware def delete(self, job_id): manager = _JobManagerFactory._build_manager() job = _get_or_raise(job_id) manager._delete(job) return {"message": f"Job {job_id} was deleted."} class JobList(Resource): """Creation and get_all --- get: tags: - api summary: Get all jobs. description: | Return an array of all jobs. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), the endpoint requires `TAIPY_READER` role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/jobs ``` responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/JobSchema' post: tags: - api summary: Create a job. description: | Create a job from a task *config_id*. If the config does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), the endpoint requires `TAIPY_EDITOR` role. Code example: ```shell curl -X POST http://localhost:5000/api/v1/jobs?task_id=TASK_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: query name: task_id schema: type: string description: The identifier of the task configuration. responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. job: JobSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") def fetch_config(self, config_id): config = Config.tasks.get(config_id) if not config: raise NonExistingTaskConfig(config_id) return config @_middleware def get(self): schema = JobSchema(many=True) manager = _JobManagerFactory._build_manager() jobs = manager._get_all() return schema.dump(jobs) @_middleware def post(self): args = request.args task_config_id = args.get("task_id") if not task_config_id: raise ConfigIdMissingException manager = _JobManagerFactory._build_manager() schema = JobSchema() job = self.__create_job_from_schema(task_config_id) manager._set(job) return { "message": "Job was created.", "job": schema.dump(job), }, 201 def __create_job_from_schema(self, task_config_id: str) -> Optional[Job]: task_manager = _TaskManagerFactory._build_manager() task = task_manager._bulk_get_or_create([self.fetch_config(task_config_id)])[0] return Job( id=JobId(f"JOB_{uuid.uuid4()}"), task=task, submit_id=f"SUBMISSION_{uuid.uuid4()}", submit_entity_id=task.id ) class JobExecutor(Resource): """Cancel a job --- post: tags: - api summary: Cancel a job. description: | Cancel a job by *job_id*. If the job does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), the endpoint requires `TAIPY_EXECUTOR` role. Code example: ```shell curl -X POST http://localhost:5000/api/v1/jobs/cancel/JOB_my_task_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: job_id schema: type: string responses: 204: content: application/json: schema: type: object properties: message: type: string description: Status message. job: JobSchema 404: description: No job has the *job_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def post(self, job_id): manager = _JobManagerFactory._build_manager() job = _get_or_raise(job_id) manager._cancel(job) return {"message": f"Job {job_id} was cancelled."}
from flask import request from flask_restful import Resource from taipy.core.exceptions.exceptions import NonExistingScenario, NonExistingSequence from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory from taipy.core.sequence._sequence_manager_factory import _SequenceManagerFactory from ...commons.to_from_model import _to_model from ..exceptions.exceptions import ScenarioIdMissingException, SequenceNameMissingException from ..middlewares._middleware import _middleware from ..schemas import SequenceResponseSchema def _get_or_raise(sequence_id: str): manager = _SequenceManagerFactory._build_manager() sequence = manager._get(sequence_id) if sequence is None: raise NonExistingSequence(sequence_id) return sequence REPOSITORY = "sequence" class SequenceResource(Resource): """Single object resource --- get: tags: - api summary: Get a sequence. description: | Return a single sequence by sequence_id. If the sequence does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires _TAIPY_READER_ role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/sequences/SEQUENCE_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: sequence_id schema: type: string description: The identifier of the sequence. responses: 200: content: application/json: schema: type: object properties: sequence: SequenceSchema 404: description: No sequence has the *sequence_id* identifier. delete: tags: - api summary: Delete a sequence. description: | Delete a sequence. If the sequence does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires _TAIPY_EDITOR_ role. Code example: ```shell curl -X DELETE http://localhost:5000/api/v1/sequences/SEQUENCE_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: sequence_id schema: type: string description: The identifier of the sequence. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No sequence has the *sequence_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, sequence_id): schema = SequenceResponseSchema() sequence = _get_or_raise(sequence_id) return {"sequence": schema.dump(_to_model(REPOSITORY, sequence))} @_middleware def delete(self, sequence_id): manager = _SequenceManagerFactory._build_manager() _get_or_raise(sequence_id) manager._delete(sequence_id) return {"message": f"Sequence {sequence_id} was deleted."} class SequenceList(Resource): """Creation and get_all --- get: tags: - api summary: Get all sequences. description: | Return an array of all sequences. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires _TAIPY_READER_ role. Code example: ```shell curl -X GET http://localhost:5000/api/v1/sequences ``` responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/SequenceSchema' post: tags: - api summary: Create a sequence. description: | Create a sequence from scenario_id, sequence_name and task_ids. If the scenario_id does not exist or sequence_name is not provided, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires _TAIPY_EDITOR_ role. Code example: ```shell curl -X POST --data '{"scenario_id": "SCENARIO_scenario_id", "sequence_name": "sequence", "tasks": []}' \\ http://localhost:5000/api/v1/sequences ``` parameters: - in: query name: scenario_id schema: type: string description: The Scenario the Sequence belongs to. name: sequence_name schema: type: string description: The name of the Sequence. name: tasks schema: type: list[string] description: A list of task id of the Sequence. responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. sequence: SequenceSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self): schema = SequenceResponseSchema(many=True) manager = _SequenceManagerFactory._build_manager() sequences = [_to_model(REPOSITORY, sequence) for sequence in manager._get_all()] return schema.dump(sequences) @_middleware def post(self): sequence_data = request.json scenario_id = sequence_data.get("scenario_id") sequence_name = sequence_data.get("sequence_name") sequence_task_ids = sequence_data.get("task_ids", []) response_schema = SequenceResponseSchema() if not scenario_id: raise ScenarioIdMissingException if not sequence_name: raise SequenceNameMissingException scenario = _ScenarioManagerFactory._build_manager()._get(scenario_id) if not scenario: raise NonExistingScenario(scenario_id=scenario_id) scenario.add_sequence(sequence_name, sequence_task_ids) sequence = scenario.sequences[sequence_name] return { "message": "Sequence was created.", "sequence": response_schema.dump(_to_model(REPOSITORY, sequence)), }, 201 class SequenceExecutor(Resource): """Execute a sequence --- post: tags: - api summary: Execute a sequence. description: | Execute a sequence from sequence_id. If the sequence does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), This endpoint requires _TAIPY_EXECUTOR_ role. Code example: ```shell curl -X POST http://localhost:5000/api/v1/sequences/submit/SEQUENCE_my_config_7575-4e09-4e00-958d-e352ee426cc9 ``` parameters: - in: path name: sequence_id schema: type: string responses: 204: content: application/json: schema: type: object properties: message: type: string description: Status message. sequence: SequenceSchema 404: description: No sequence has the *sequence_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def post(self, sequence_id): _get_or_raise(sequence_id) manager = _SequenceManagerFactory._build_manager() manager._submit(sequence_id) return {"message": f"Sequence {sequence_id} was submitted."}
from .cycle import CycleList, CycleResource from .datanode import DataNodeList, DataNodeReader, DataNodeResource, DataNodeWriter from .job import JobExecutor, JobList, JobResource from .scenario import ScenarioExecutor, ScenarioList, ScenarioResource from .sequence import SequenceExecutor, SequenceList, SequenceResource from .task import TaskExecutor, TaskList, TaskResource __all__ = [ "DataNodeResource", "DataNodeList", "DataNodeReader", "DataNodeWriter", "TaskList", "TaskResource", "TaskExecutor", "SequenceList", "SequenceResource", "SequenceExecutor", "ScenarioList", "ScenarioResource", "ScenarioExecutor", "CycleResource", "CycleList", "JobResource", "JobList", "JobExecutor", ]
from typing import List import numpy as np import pandas as pd from flask import request from flask_restful import Resource from taipy.config.config import Config from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.operator import Operator from taipy.core.exceptions.exceptions import NonExistingDataNode, NonExistingDataNodeConfig from ...commons.to_from_model import _to_model from ..exceptions.exceptions import ConfigIdMissingException from ..middlewares._middleware import _middleware from ..schemas import ( CSVDataNodeConfigSchema, DataNodeFilterSchema, DataNodeSchema, ExcelDataNodeConfigSchema, GenericDataNodeConfigSchema, InMemoryDataNodeConfigSchema, JSONDataNodeConfigSchema, MongoCollectionDataNodeConfigSchema, PickleDataNodeConfigSchema, SQLDataNodeConfigSchema, SQLTableDataNodeConfigSchema, ) ds_schema_map = { "csv": CSVDataNodeConfigSchema, "pickle": PickleDataNodeConfigSchema, "in_memory": InMemoryDataNodeConfigSchema, "sql_table": SQLTableDataNodeConfigSchema, "sql": SQLDataNodeConfigSchema, "mongo_collection": MongoCollectionDataNodeConfigSchema, "excel": ExcelDataNodeConfigSchema, "generic": GenericDataNodeConfigSchema, "json": JSONDataNodeConfigSchema, } REPOSITORY = "data" def _get_or_raise(data_node_id: str) -> None: manager = _DataManagerFactory._build_manager() data_node = manager._get(data_node_id) if not data_node: raise NonExistingDataNode(data_node_id) return data_node class DataNodeResource(Resource): """Single object resource --- get: tags: - api description: | Returns a `DataNodeSchema^` representing the unique `DataNode^` identified by the *datanode_id* given as parameter. If no data node corresponds to *datanode_id*, a `404` error is returned. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/datanodes/DATANODE_hist_cfg_75750ed8-4e09-4e00-958d -e352ee426cc9 ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `DATANODE_hist_cfg_75750ed8-4e09-4e00-958d-e352ee426cc9` is the value of the *datanode_id* parameter. It represents the identifier of the data node we want to retrieve. In case of success here is an example of the response: ``` JSON {"datanode": { "id": "DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d", "config_id": "historical_data_set", "scope": "<Scope.SCENARIO: 2>", "storage_type": "csv", "name": "Name of my historical data node", "owner_id": "SCENARIO_my_awesome_scenario_97f3fd67-8556-4c62-9b3b-ef189a599a38", "last_edit_date": "2022-08-10T16:03:40.855082", "job_ids": [], "version": "latest", "cacheable": false, "validity_days": null, "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { "path": "daily-min-temperatures.csv", "has_header": true} }} ``` In case of failure here is an example of the response: ``` JSON {"message":"DataNode DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d not found"} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get( "http://localhost:5000/api/v1/datanodes/DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d") print(response) print(response.json()) ``` `DATANODE_hist_cfg_75750ed8-4e09-4e00-958d-e352ee426cc9` is the value of the *datanode_id* parameter. It represents the identifier of the data node we want to retrieve. In case of success here is an output example: ``` <Response [200]> {"datanode": { "id": "DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d", "config_id": "historical_data_set", "scope": "<Scope.SCENARIO: 2>", "storage_type": "csv", "name": "Name of my historical data node", "owner_id": "SCENARIO_my_awesome_scenario_97f3fd67-8556-4c62-9b3b-ef189a599a38", "last_edit_date": "2022-08-10T16:03:40.855082", "job_ids": [], "version": "latest", "cacheable": false, "validity_days": null, "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { "path": "daily-min-temperatures.csv", "has_header": true} }} ``` In case of failure here is an output example: ``` <Response [404]> {"message":"DataNode DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d not found"} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. parameters: - in: path name: datanode_id schema: type: string description: The identifier of the data node to retrieve. responses: 200: content: application/json: schema: type: object properties: datanode: DataNodeSchema 404: description: No data node has the *datanode_id* identifier. delete: tags: - api summary: Delete a data node. description: | Deletes the `DataNode^` identified by the *datanode_id* given as parameter. If the data node does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X DELETE \ http://localhost:5000/api/v1/datanodes/DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d` is the value of the *datanode_id* parameter. It represents the identifier of the data node we want to delete. In case of success here is an example of the response: ``` JSON {"msg": "datanode DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d deleted"} ``` In case of failure here is an example of the response: ``` JSON {"message": "Data node DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d not found."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.delete( "http://localhost:5000/api/v1/datanodes/DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d") print(response) print(response.json()) ``` `DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d` is the value of the *datanode_id* parameter. It represents the identifier of the Cycle we want to delete. In case of success here is an output example: ``` <Response [200]> {"msg": "Data node DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d deleted."} ``` In case of failure here is an output example: ``` <Response [404]> {'message': 'Data node DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d not found.'} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. parameters: - in: path name: datanode_id schema: type: string description: The identifier of the data node to delete. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No data node has the *datanode_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, datanode_id): schema = DataNodeSchema() datanode = _get_or_raise(datanode_id) return {"datanode": schema.dump(_to_model(REPOSITORY, datanode))} @_middleware def delete(self, datanode_id): _get_or_raise(datanode_id) manager = _DataManagerFactory._build_manager() manager._delete(datanode_id) return {"message": f"Data node {datanode_id} was deleted."} class DataNodeList(Resource): """Creation and get_all --- get: tags: - api description: | Returns a `DataNodeSchema^` list representing all existing data nodes. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/datanodes ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. Here is an example of the response: ``` JSON [ {"datanode": { "id": "DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d", "config_id": "historical_data_set", "scope": "<Scope.SCENARIO: 2>", "storage_type": "csv", "name": "Name of my historical data node", "owner_id": "SCENARIO_my_awesome_scenario_97f3fd67-8556-4c62-9b3b-ef189a599a38", "last_edit_date": "2022-08-10T16:03:40.855082", "job_ids": [], "version": "latest", "cacheable": false, "validity_days": null, "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { "path": "daily-min-temperatures.csv", "has_header": true} }} ] ``` If there is no data node, the response is an empty list as follows: ``` JSON [] ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get("http://localhost:5000/api/v1/datanodes") print(response) print(response.json()) ``` In case of success here is an output example: ``` <Response [200]> [ {"datanode": { "id": "DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d", "config_id": "historical_data_set", "scope": "<Scope.SCENARIO: 2>", "storage_type": "csv", "name": "Name of my historical data node", "owner_id": "SCENARIO_my_awesome_scenario_97f3fd67-8556-4c62-9b3b-ef189a599a38", "last_edit_date": "2022-08-10T16:03:40.855082", "job_ids": [], "version": "latest", "cacheable": false, "validity_days": null, "validity_seconds": null, "edit_in_progress": false, "data_node_properties": { "path": "daily-min-temperatures.csv", "has_header": true} }} ] ``` If there is no data node, the response is an empty list as follows: ``` <Response [200]> [] ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/DataNodeSchema' post: tags: - api description: | Creates a new data node from the *config_id* given as parameter. !!! Example === "Curl" ```shell curl -X POST http://localhost:5000/api/v1/datanodes?config_id=historical_data_set ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. In this example the *config_id* value ("historical_data_set") is given as parameter directly in the url. A corresponding `DataNodeConfig^` must exist and must have been configured before. Here is the output message example: ``` {"msg": "datanode created", "datanode": { "default_path": null, "path": "daily-min-temperatures.csv", "name": null, "storage_type": "csv", "scope": 2, "has_header": true} } ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.post("http://localhost:5000/api/v1/datanodes?config_id=historical_data_set") print(response) print(response.json()) ``` In this example the *config_id* value ("historical_data_set") is given as parameter directly in the url. A corresponding `DataNodeConfig^` must exist and must have been configured before. Here is the output example: ``` <Response [201]> {'msg': 'datanode created', 'datanode': { 'name': None, 'scope': 2, 'path': 'daily-min-temperatures.csv', 'storage_type': 'csv', 'default_path': None, 'has_header': True}} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. parameters: - in: query name: config_id schema: type: string description: The identifier of the data node configuration. responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. datanode: DataNodeSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") def fetch_config(self, config_id): config = Config.data_nodes.get(config_id) if not config: raise NonExistingDataNodeConfig(config_id) return config @_middleware def get(self): schema = DataNodeSchema(many=True) manager = _DataManagerFactory._build_manager() datanodes = [_to_model(REPOSITORY, datanode) for datanode in manager._get_all()] return schema.dump(datanodes) @_middleware def post(self): args = request.args config_id = args.get("config_id") if not config_id: raise ConfigIdMissingException config = self.fetch_config(config_id) schema = ds_schema_map.get(config.storage_type)() manager = _DataManagerFactory._build_manager() manager._bulk_get_or_create({config}) return { "message": "Data node was created.", "datanode": schema.dump(config), }, 201 class DataNodeReader(Resource): """Single object resource --- get: tags: - api description: | Returns the data read from the data node identified by *datanode_id*. If the data node does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X GET \ http://localhost:5000/api/v1/datanodes/DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d/read ``` `DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d` is the *datanode_id* parameter. It represents the identifier of the data node to read. Here is an output example. In this case, the storage type of the data node to read is `csv`, and no exposed type is specified. The data is exposed as a list of dictionaries, each dictionary representing a raw of the csv file. ``` {"data": [ {"Date": "1981-01-01", "Temp": 20.7}, {"Date": "1981-01-02", "Temp": 17.9}, {"Date": "1981-01-03", "Temp": 18.8}, {"Date": "1981-01-04", "Temp": 14.6}, {"Date": "1981-01-05", "Temp": 15.8}, {"Date": "1981-01-06", "Temp": 15.8}, {"Date": "1981-01-07", "Temp": 15.8} ]} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get( "http://localhost:5000/api/v1/datanodes/DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d/read") print(response) print(response.json()) ``` `DATANODE_historical_data_set_9db1b542-2e45-44e7-8a85-03ef9ead173d` is the *datanode_id* parameter. It represents the identifier of the data node to read. Here is an output example. In this case, the storage type of the data node to read is `csv`, and no exposed type is specified. The data is exposed as a list of dictionaries, each dictionary representing a raw of the csv file. ``` {"data": [ {"Date": "1981-01-01", "Temp": 20.7}, {"Date": "1981-01-02", "Temp": 17.9}, {"Date": "1981-01-03", "Temp": 18.8}, {"Date": "1981-01-04", "Temp": 14.6}, {"Date": "1981-01-05", "Temp": 15.8}, {"Date": "1981-01-06", "Temp": 15.8}, {"Date": "1981-01-07", "Temp": 15.8} ]} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. parameters: - in: path name: datanode_id schema: type: string description: The id of the data node to read. requestBody: content: application/json: schema: DataNodeFilterSchema responses: 200: content: application/json: schema: type: object properties: data: type: Any description: The data read from the data node. 404: description: No data node has the *datanode_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") def __make_operators(self, schema: DataNodeFilterSchema) -> List: return [ ( x.get("key"), x.get("value"), Operator(getattr(Operator, x.get("operator", "").upper())), ) for x in schema.get("operators") ] @_middleware def get(self, datanode_id): schema = DataNodeFilterSchema() data = request.get_json(silent=True) data_node = _get_or_raise(datanode_id) operators = self.__make_operators(schema.load(data)) if data else [] data = data_node.filter(operators) if isinstance(data, pd.DataFrame): data = data.to_dict(orient="records") elif isinstance(data, np.ndarray): data = list(data) return {"data": data} class DataNodeWriter(Resource): """Single object resource --- put: tags: - api summary: Write into a data node. description: | Write data from request body into a data node by *datanode_id*. If the data node does not exist, a 404 error is returned. !!! Note When the authorization feature is activated (available in the **Enterprise** edition only), this endpoint requires `TAIPY_EDITOR` role. Code example: ```shell curl -X PUT -d '[{"path": "/abc", "type": 1}, {"path": "/def", "type": 2}]' \\ -H 'Content-Type: application/json' \\ http://localhost:5000/api/v1/datanodes/DATANODE_my_config_75750ed8-4e09-4e00-958d-e352ee426cc9/write ``` parameters: - in: path name: datanode_id schema: type: string requestBody: content: application/json: schema: Any responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No data node has the *datanode_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def put(self, datanode_id): data = request.json data_node = _get_or_raise(datanode_id) data_node.write(data) return {"message": f"Data node {datanode_id} was successfully written."}
from flask import request from flask_restful import Resource from taipy.config.config import Config from taipy.core.exceptions.exceptions import NonExistingScenario, NonExistingScenarioConfig from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory from ...commons.to_from_model import _to_model from ..exceptions.exceptions import ConfigIdMissingException from ..middlewares._middleware import _middleware from ..schemas import ScenarioResponseSchema def _get_or_raise(scenario_id: str): manager = _ScenarioManagerFactory._build_manager() scenario = manager._get(scenario_id) if scenario is None: raise NonExistingScenario(scenario_id) return scenario REPOSITORY = "scenario" class ScenarioResource(Resource): """Single object resource --- get: tags: - api description: | Returns a `ScenarioSchema^` representing the unique scenario identified by *scenario_id*. If no scenario corresponds to *scenario_id*, a `404` error is returned. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/scenarios/SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the Scenario we want to retrieve. In case of success here is an example of the response: ``` JSON {"scenario": { "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true}} ``` In case of failure here is an example of the response: ``` JSON {"message": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c not found."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get( "http://localhost:5000/api/v1/scenarios/SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c") print(response) print(response.json()) ``` `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the Cycle we want to retrieve. In case of success here is an output example: ``` <Response [200]> {"scenario": { "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true}} ``` In case of failure here is an output example: ``` <Response [404]> {'message': 'Scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c not found.'} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. parameters: - in: path name: scenario_id schema: type: string description: The identifier of the scenario to retrieve. responses: 200: content: application/json: schema: type: object properties: scenario: ScenarioSchema 404: description: No scenario has the *scenario_id* identifier. delete: tags: - api description: | Delete the `Scenario^` scenario identified by the *scenario_id* given as parameter. If the scenario does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X DELETE http://localhost:5000/api/v1/scenarios/SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the scenario we want to delete. In case of success here is an example of the response: ``` JSON {"msg": "Scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c deleted."} ``` In case of failure here is an example of the response: ``` JSON {"message": "Scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c not found."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.delete( "http://localhost:5000/api/v1/scenarios/SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c") print(response) print(response.json()) ``` `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the Scenario we want to delete. In case of success here is an output example: ``` <Response [200]> {"msg": "Scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c deleted."} ``` In case of failure here is an output example: ``` <Response [404]> {'message': 'Scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c not found.'} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. parameters: - in: path name: scenario_id schema: type: string description: The identifier of the scenario to delete. responses: 200: content: application/json: schema: type: object properties: message: type: string description: Status message. 404: description: No scenario has the *scenario_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def get(self, scenario_id): schema = ScenarioResponseSchema() scenario = _get_or_raise(scenario_id) return {"scenario": schema.dump(_to_model(REPOSITORY, scenario))} @_middleware def delete(self, scenario_id): manager = _ScenarioManagerFactory._build_manager() _get_or_raise(scenario_id) manager._delete(scenario_id) return {"message": f"Scenario {scenario_id} was deleted."} class ScenarioList(Resource): """Creation and get_all --- get: tags: - api summary: Get all scenarios. description: | Returns a `ScenarioSchema^` list representing all existing Scenarios. !!! Example === "Curl" ```shell curl -X GET http://localhost:5000/api/v1/scenarios ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. Here is an example of the response: ``` JSON [{ "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true } ] ``` If there is no scenario, the response is an empty list as follows: ``` JSON [] ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.get("http://localhost:5000/api/v1/scenarios") print(response) print(response.json()) ``` In case of success here is an output example: ``` <Response [200]> [{ "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true } ] ``` If there is no scenario, the response is an empty list as follows: ``` <Response [200]> [] ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_READER` role. responses: 200: content: application/json: schema: allOf: - type: object properties: results: type: array items: $ref: '#/components/schemas/ScenarioSchema' post: tags: - api description: | Creates a new scenario from the *config_id*. If the config does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X POST http://localhost:5000/api/v1/scenarios?config_id=my_scenario_config ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. In this example the *config_id* value ("my_scenario_config") is given as parameter directly in the url. A corresponding `ScenarioConfig^` must exist and must have been configured before. Here is the output message example: ``` {"msg": "scenario created.", "scenario": { "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true} } ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.post("http://localhost:5000/api/v1/scenarios?config_id=my_scenario_config") print(response) print(response.json()) ``` In this example the *config_id* value ("my_scenario_config") is given as parameter directly in the url. A corresponding `ScenarioConfig^` must exist and must have been configured before. Here is the output example: ``` <Response [201]> {"msg": "scenario created.", "scenario": { "cycle": "CYCLE_863418_fdd1499a-8925-4540-93fd-9dbfb4f0846d", "id": "SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c", "properties": {}, "tags": [], "version": "latest", "sequences": [ "SEQUENCE_mean_baseline_5af317c9-34df-48b4-8a8a-bf4007e1de99", "SEQUENCE_arima_90aef6b9-8922-4a0c-b625-b2c6f3d19fa4"], "subscribers": [], "creation_date": "2022-08-15T19:21:01.871587", "primary_scenario": true} } ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EDITOR` role. parameters: - in: query name: config_id schema: type: string description: The identifier of the scenario configuration. responses: 201: content: application/json: schema: type: object properties: message: type: string description: Status message. scenario: ScenarioSchema """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") def fetch_config(self, config_id): config = Config.scenarios.get(config_id) if not config: raise NonExistingScenarioConfig(config_id) return config @_middleware def get(self): schema = ScenarioResponseSchema(many=True) manager = _ScenarioManagerFactory._build_manager() scenarios = [_to_model(REPOSITORY, scenario) for scenario in manager._get_all()] return schema.dump(scenarios) @_middleware def post(self): args = request.args config_id = args.get("config_id") response_schema = ScenarioResponseSchema() manager = _ScenarioManagerFactory._build_manager() if not config_id: raise ConfigIdMissingException config = self.fetch_config(config_id) scenario = manager._create(config) return { "message": "Scenario was created.", "scenario": response_schema.dump(_to_model(REPOSITORY, scenario)), }, 201 class ScenarioExecutor(Resource): """Execute a scenario --- post: tags: - api description: | Executes a scenario by *scenario_id*. If the scenario does not exist, a 404 error is returned. !!! Example === "Curl" ```shell curl -X POST http://localhost:5000/api/v1/scenarios/submit/SCENARIO_658d-5834-4d73-84e4-a6343df5e08c ``` In this example the REST API is served on port 5000 on localhost. We are using curl command line client. `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the Scenario we want to submit. Here is the output message example: ``` {"message": "Executed scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c."} ``` === "Python" This Python example requires the 'requests' package to be installed (`pip install requests`). ```python import requests response = requests.post( "http://localhost:5000/api/v1/scenarios/submit/SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c") print(response) print(response.json()) ``` `SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c` is the value of the *scenario_id* parameter. It represents the identifier of the Scenario we want to submit. Here is the output example: ``` <Response [202]> {"message": "Executed scenario SCENARIO_63cb358d-5834-4d73-84e4-a6343df5e08c."} ``` !!! Note When the authorization feature is activated (available in Taipy Enterprise edition only), this endpoint requires the `TAIPY_EXECUTOR` role. parameters: - in: path name: scenario_id schema: type: string description: The identifier of the scenario to submit. responses: 202: content: application/json: schema: type: object properties: message: type: string description: Status message. scenario: ScenarioSchema 404: description: No scenario has the *scenario_id* identifier. """ def __init__(self, **kwargs): self.logger = kwargs.get("logger") @_middleware def post(self, scenario_id): _get_or_raise(scenario_id) manager = _ScenarioManagerFactory._build_manager() manager._submit(scenario_id) return {"message": f"Scenario {scenario_id} was submitted."}
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from functools import wraps from importlib import util from taipy.core.common._utils import _load_fct def _middleware(f): @wraps(f) def wrapper(*args, **kwargs): if _using_enterprise(): return _enterprise_middleware()(f)(*args, **kwargs) else: return f(*args, **kwargs) return wrapper def _using_enterprise(): return util.find_spec("taipy.enterprise") is not None def _enterprise_middleware(): return _load_fct("taipy.enterprise.rest.api.middlewares._middleware", "_middleware")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
class ConfigIdMissingException(Exception): def __init__(self): self.message = "Config id is missing." class ScenarioIdMissingException(Exception): def __init__(self): self.message = "Scenario id is missing." class SequenceNameMissingException(Exception): def __init__(self): self.message = "Sequence name is missing."
from marshmallow import Schema, fields class CycleSchema(Schema): name = fields.String() frequency = fields.String() properties = fields.Dict() creation_date = fields.String() start_date = fields.String() end_date = fields.String() class CycleResponseSchema(CycleSchema): id = fields.String()
from marshmallow import Schema, fields class TaskSchema(Schema): config_id = fields.String() id = fields.String() owner_id = fields.String() parent_ids = fields.List(fields.String) input_ids = fields.List(fields.String) function_name = fields.String() function_module = fields.String() output_ids = fields.List(fields.String) version = fields.String()
from marshmallow import Schema, fields class CallableSchema(Schema): fct_name = fields.String() fct_module = fields.String() class JobSchema(Schema): id = fields.String() task_id = fields.String() status = fields.String() force = fields.Boolean() creation_date = fields.String() subscribers = fields.Nested(CallableSchema) stacktrace = fields.List(fields.String)
from marshmallow import Schema, fields class SequenceSchema(Schema): owner_id = fields.String() parent_ids = fields.List(fields.String) tasks = fields.List(fields.String) version = fields.String() properties = fields.Dict() class SequenceResponseSchema(SequenceSchema): id = fields.String() subscribers = fields.List(fields.Dict)
from .cycle import CycleResponseSchema, CycleSchema from .datanode import ( CSVDataNodeConfigSchema, DataNodeConfigSchema, DataNodeFilterSchema, DataNodeSchema, ExcelDataNodeConfigSchema, GenericDataNodeConfigSchema, InMemoryDataNodeConfigSchema, JSONDataNodeConfigSchema, MongoCollectionDataNodeConfigSchema, PickleDataNodeConfigSchema, SQLDataNodeConfigSchema, SQLTableDataNodeConfigSchema, ) from .job import JobSchema from .scenario import ScenarioResponseSchema, ScenarioSchema from .sequence import SequenceResponseSchema, SequenceSchema from .task import TaskSchema __all__ = [ "DataNodeSchema", "DataNodeFilterSchema", "TaskSchema", "SequenceSchema", "SequenceResponseSchema", "ScenarioSchema", "ScenarioResponseSchema", "CycleSchema", "CycleResponseSchema", "JobSchema", ]
from marshmallow import Schema, fields, pre_dump class DataNodeSchema(Schema): config_id = fields.String() scope = fields.String() id = fields.String() storage_type = fields.String() name = fields.String() owner_id = fields.String() parent_ids = fields.List(fields.String) last_edit_date = fields.String() job_ids = fields.List(fields.String) version = fields.String() cacheable = fields.Boolean() validity_days = fields.Float() validity_seconds = fields.Float() edit_in_progress = fields.Boolean() properties = fields.Dict() class DataNodeConfigSchema(Schema): name = fields.String() storage_type = fields.String() scope = fields.Integer() cacheable = fields.Boolean() @pre_dump def serialize_scope(self, obj, **kwargs): obj.scope = obj.scope.value return obj class CSVDataNodeConfigSchema(DataNodeConfigSchema): path = fields.String() default_path = fields.String() has_header = fields.Boolean() class InMemoryDataNodeConfigSchema(DataNodeConfigSchema): default_data = fields.Inferred() class PickleDataNodeConfigSchema(DataNodeConfigSchema): path = fields.String() default_path = fields.String() default_data = fields.Inferred() class SQLTableDataNodeConfigSchema(DataNodeConfigSchema): db_name = fields.String() table_name = fields.String() class SQLDataNodeConfigSchema(DataNodeConfigSchema): db_name = fields.String() read_query = fields.String() write_query = fields.List(fields.String()) class MongoCollectionDataNodeConfigSchema(DataNodeConfigSchema): db_name = fields.String() collection_name = fields.String() class ExcelDataNodeConfigSchema(DataNodeConfigSchema): path = fields.String() default_path = fields.String() has_header = fields.Boolean() sheet_name = fields.String() class GenericDataNodeConfigSchema(DataNodeConfigSchema): pass class JSONDataNodeConfigSchema(DataNodeConfigSchema): path = fields.String() default_path = fields.String() class OperatorSchema(Schema): key = fields.String() value = fields.Inferred() operator = fields.String() class DataNodeFilterSchema(DataNodeConfigSchema): operators = fields.List(fields.Nested(OperatorSchema)) join_operator = fields.String(default="AND")
from marshmallow import Schema, fields class ScenarioSchema(Schema): sequences = fields.Dict() properties = fields.Dict() primary_scenario = fields.Boolean(default=False) tags = fields.List(fields.String) version = fields.String() class ScenarioResponseSchema(ScenarioSchema): id = fields.String() subscribers = fields.List(fields.Dict) cycle = fields.String() creation_date = fields.String()
from taipy.gui import Gui from pages.examples import page_examples pages = {"/":""" Cette page est générée avec Taipy. Voir taipy.io pour plus d'informations <|navbar|>""", "Examples":page_examples} if __name__=="__main__": gui = Gui(pages=pages) gui.run(port=5006)
import tensorflow as tf import tensorflow.keras.preprocessing.image as imgtf import numpy as np from io import BytesIO def load_model(local_model_path): model = tf.keras.models.load_model(local_model_path, custom_objects={"iou_coef":iou_coef} ) return model def convert_mask_to_color(masks): ''' Convert a array of mask (height,width,8) in color visualisation array (height,width,3) ''' dic_color = { 0 : [76,0,153], # flat 1 : [0,0,204], # vehicle 2 : [96,96,96], # construction 3 : [224,224,224], # object 4 : [0,204,0], # nature 5 : [255,0,0], # human 6 : [153,255,255], # sky 7 : [0,0,0] # void } dic_r = {k:dic_color[k][0] for k in dic_color.keys()} dic_g = {k:dic_color[k][1] for k in dic_color.keys()} dic_b = {k:dic_color[k][2] for k in dic_color.keys()} id_mask = np.argmax(masks,axis=-1) color_masks_r = np.vectorize(dic_r.__getitem__)(id_mask) color_masks_g = np.vectorize(dic_g.__getitem__)(id_mask) color_masks_b= np.vectorize(dic_b.__getitem__)(id_mask) color_masks = np.stack((color_masks_r,color_masks_g,color_masks_b),axis=-1) return color_masks def make_predict(X,model): ''' Load the image store as static/image_submit.png and convert to an array Load the model and make a prediction Convert the masks array into a image and store it ''' ### Prediction y_pred = model.predict(X) masks = y_pred[0,] image_masks = convert_mask_to_color(masks) image_to_save = imgtf.array_to_img(image_masks) buf = BytesIO() tf.keras.utils.save_img(buf, image_to_save, data_format="channels_last", file_format="PNG") byte_im = buf.getvalue() return byte_im ### def iou_coef(y_true, y_pred, smooth=1e-1): y_pred = tf.convert_to_tensor(y_pred) y_pred = tf.cast(y_pred, tf.float32) y_true = tf.cast(y_true, tf.float32) smooth = tf.cast(smooth, tf.float32) y_true_f = K.flatten(y_true) y_pred_f = K.flatten(y_pred) intersection = K.sum(y_true_f * y_pred_f) union = K.sum(y_true_f) + K.sum(y_pred_f) - intersection iou = (intersection + smooth) / (union + smooth) return iou
from taipy.gui import Markdown import time from prediction_function.predmodel import * import tensorflow as tf import tensorflow.keras.backend as K from io import BytesIO image_originale = None image_traitee = None image_solution = None ville=None inference_time = 0. model = load_model("./model/unet_vgg16_v3_256_2Aug") def load_image(path_img): dimension = (256, 256) X = np.empty((1, *dimension, 3), dtype="uint8") image = imgtf.load_img(path_img, color_mode="rgb", target_size=dimension, # Depends of the model trained interpolation="nearest", ) X[0,] = image return X liste_ville = [ ("./static/examples/frankfurt_000000_001016","Frankfurt"), ("./static/examples/lindau_000000_000019","Lindau"), ("./static/examples/munster_000046_000019","Munster") ] def on_change_selector(state): path_img = state.ville[0]+"_leftImg8bit.png" X = load_image(path_img) t0 = time.time() byte_im = make_predict(X,model) inference_time = round(time.time() - t0,2) print("inference") state.image_originale = path_img state.image_traitee = byte_im state.image_solution = state.ville[0]+"_mask_colors.png" state.inference_time = inference_time print("finish") page_examples = Markdown(""" # Exemple d'application de segmentation sémantique Cette démo utilise un modèle Unet (avec encodeur VGG16) pour de la segmentation sémantique # Sélection de l'image example <|{ville}|selector|lov={liste_ville}|on_change=on_change_selector|> # Affichage du résultat <|{image_originale}|image|label=L'image originale|> <|{image_traitee}|image|label=La segmentation issue du modèle|> <|{image_solution}|image|label=Le résultat cible|> Le tout a pris <|{inference_time}|>s pour s'executer. """)
msg = "Hello world" print(msg)
from taipy.gui import Gui from math import cos, exp page = """ #This is *Taipy* GUI A value: <|{decay}|>. A slider: <br/> <|{decay}|slider|> My chart: <|{data}|chart|> """ def compute_data(decay): return [cos(i/16) * exp(-i*decay/6000) for i in range(720)] def on_change(state, var_name, var_value): if var_name == 'decay': state.data = compute_data(var_value) decay = 10 data = compute_data(decay) Gui(page=page).run(title='Taipy Demo GUI 1', dark_mode=False)
import json import time import requests import pandas as pd from taipy.gui import Gui, notify API_KEY = "ADD YOUR OPENAI API KEY HERE" INITIAL_PROMPT = "I am a helpful assistant." MAX_TOKENS = 150 API_ENDPOINT = "https://api.openai.com/v1/chat/completions" def generate_completion(messages, model="gpt-4", temperature=1): headers = { "Content-Type": "application/json", "Authorization": f"Bearer {API_KEY}", } data = { "model": model, "messages": messages, "temperature": temperature, } data["max_tokens"] = MAX_TOKENS response = requests.post(API_ENDPOINT, headers=headers, data=json.dumps(data)) if response.status_code == 200: return response.json()["choices"][0]["message"]["content"] else: return "I'm sorry, GPT-4 is not available right now." saved_messages = [ {"role": "system", "content": INITIAL_PROMPT}, ] user_message = "" def messages_to_data(messages): result = [] for message in messages: result_message = {} result_message["Role"] = message["role"] result_message["Message"] = message["content"] if result_message["Role"] == "system": result_message["Role"] = "GPT-4" else: result_message["Role"] = "You" result.append(result_message) return pd.DataFrame(result) def on_send_click(state): notify(state, "info", "Generating response...") message = state.user_message state.saved_messages.append({"role": "user", "content": message}) state.saved_messages = state.saved_messages state.user_message = "" time.sleep(0.1) response = generate_completion(state.saved_messages) state.saved_messages.append({"role": "system", "content": response}) state.saved_messages = state.saved_messages notify(state, "success", "GPT-4 generated a response!") page = """ # Chat with **GPT-4**{: .color-primary} <|{messages_to_data(saved_messages)}|table|show_all|width=100%|> <br/> <|{user_message}|input|multiline=True|lines_shown=2|label=Your Message|on_action=on_send_click|class_name=fullwidth|> <|Send|button|on_action=on_send_click|> """ Gui(page).run()
""" Module Name: Frontend App with Taipy GUI Author: Kenneth Leung Last Modified: 25 Mar 2023 """ # Launch app with the command in CLI: python main.py from src.config import * from src.functions import * from pages.analysis_md import * from pages.data_viewer_md import * import taipy as tp from taipy.gui import Gui, Icon, navigate with open('config.yml') as f: cfg = yaml.safe_load(f) # ======================= # Setup menu # ======================= menu = [("Analysis", Icon('assets/histogram_menu_2.png', 'Analysis')), ('Data', Icon('assets/data_menu.png', 'Data'))] page_markdown = """ <|toggle|theme|> <|menu|label=Menu|lov={menu}|on_action=menu_function|> """ pages = {"/":page_markdown, "Analysis":analysis_page, "Data":data_page} def menu_function(state, var_name: str, fct: str, var_value: list): # Change the value of the state.page variable in order to render the correct page navigate(state, var_value["args"][0]) # Run application if __name__ == "__main__": tp.Core().run() # Create and execute scenario scenario_selector = [(s.id, s.name) for s in tp.get_scenarios()] scenario = tp.create_scenario(scenario_cfg, name="Default Scenario") selected_scenario = scenario.id tp.submit(scenario) df = scenario.pipeline_keyword_analysis.data_keywords_df.read() df_keywords_count = scenario.pipeline_keyword_analysis.data_keywords_count.read() Gui(pages=pages).run(title="Keyword Extraction and Analysis with KeyBERT and Taipy", dark_mode=False, port=8020, use_reloader=True)
""" Module Name: Page Markdown Template (Analysis Page) Author: Kenneth Leung Last Modified: 8 Apr 2023 """ import taipy as tp from taipy.gui import notify from src.config import scenario_cfg import yaml with open('config.yml') as f: cfg = yaml.safe_load(f) query = cfg['QUERY'] ngram_min = cfg['NGRAM_MIN'] ngram_max = cfg['NGRAM_MAX'] diversity_algo = cfg['DIVERSITY_ALGO'] diversity_algo_options = ['mmr', 'maxsum'] diversity = cfg['DIVERSITY'] top_n = cfg['TOP_N'] nr_candidates = cfg['NR_CANDIDATES'] # Input section of dashboard input_page = """ # Keyword Extraction and Analysis with KeyBERT and Taipy <br/> <|layout|columns=1 1 1 1 1 1 1| <|{query}|input|label=Query Topic|> <|{ngram_min}|number|label=Min N-gram|> <|{ngram_max}|number|label=Max N-gram|> <|{top_n}|number|label=Top n results|> <|{diversity}|number|label=Diversity (for MMR)|> <|{nr_candidates}|number|label=No. of Candidates (for MaxSum)|> <|{diversity_algo}|selector|lov={diversity_algo_options}|dropdown|label=Diversity Algorithm|> <|Update Analysis|button|on_action=submit_scenario|> |> <br/> <br/> <|{selected_scenario}|selector|lov={scenario_selector}|dropdown|label=Scenario|on_change=synchronize_gui_core|value_by_id|> <|Save Scenario|button|on_action=create_scenario|> <br/> """ # Chart types: https://docs.taipy.io/en/latest/manuals/gui/viselements/chart/ chart_properties = {"type":"bar", "y":"keyword", "x":"count", "orientation": "h", "layout": { "xaxis": {"title": "Frequency Count"}, "yaxis": {"title": None}, "showlegend": False, # Hide the legend "title": 'Keyword Frequency Bar Plot', "margin": {'pad': 0} } } # Output section of dashboard output_page = """ <|layout|columns=1 1|gap=10px| <|{df_keywords_count}|table|width=30|page_size=10|height=20|> <|{df_keywords_count}|chart|properties={chart_properties}|height=20|> |> """ # Combine layout segments analysis_page = input_page + output_page # ======= Scenario Setup ========= def create_scenario(state): print("Creating scenario...") scenario = tp.create_scenario(scenario_cfg, name=f"Scenario N {len(state.scenario_selector)} {state.query}") state.scenario_selector += [(scenario.id, scenario.name)] state.selected_scenario = scenario.id notify(state, 'success', 'Scenario created!') submit_scenario(state) def update_chart(state): # Select the right scenario and pipeline scenario = tp.get(state.selected_scenario) # Update the chart based on this pipeline state.df = scenario.pipeline_keyword_analysis.data_keywords_df.read() state.df_keywords_count = scenario.pipeline_keyword_analysis.data_keywords_count.read() def submit_scenario(state): print("Submitting scenario...") notify(state, 'info', 'Submitting scenario...') # Get the selected scenario: in this current step a single scenario is created then modified here. scenario = tp.get(state.selected_scenario) # Change the default parameters by writing in the datanodes scenario.query.write(str(state.query)) scenario.ngram_max.write(int(state.ngram_max)) scenario.diversity_algo.write(str(state.diversity_algo)) scenario.diversity.write(float(state.diversity)) scenario.top_n.write(int(state.top_n)) scenario.nr_candidates.write(int(state.nr_candidates)) # Execute the pipelines/code tp.submit(scenario) notify(state, 'success', 'Execution finished!') # Update the chart when we change the scenario update_chart(state) def synchronize_gui_core(state): scenario = tp.get(state.selected_scenario) # get the information of the selected scenario and display it on the GUI state.query = scenario.query.read() state.ngram_max = scenario.ngram_max.read() state.diversity_algo = scenario.diversity_algo.read() state.diversity = scenario.diversity.read() state.top_n = scenario.top_n.read() state.nr_candidates = scenario.nr_candidates.read() update_chart(state)
""" Module Name: Page Markdown Template Author: Kenneth Leung Last Modified: 19 Mar 2023 """ data_page = """ # Extracted arXiv Abstracts <|{df}|table|> """
""" Module Name: Core Functions (Backend) Author: Kenneth Leung Last Modified: 19 Mar 2023 """ import arxiv import pandas as pd import yaml from keybert import KeyBERT with open('config.yml') as f: cfg = yaml.safe_load(f) def extract_arxiv(query: str): search = arxiv.Search( query=query, max_results=cfg['MAX_ABSTRACTS'], # Limit number of abstracts sort_by=arxiv.SortCriterion.SubmittedDate, sort_order=arxiv.SortOrder.Descending ) # Returns arxiv object return search def save_in_dataframe(search): df = pd.DataFrame([{'uid': result.entry_id.split('.')[-1], 'title': result.title, 'date_published': result.published, 'abstract': result.summary} for result in search.results()]) return df def preprocess_data(df: pd.DataFrame): df['date_published'] = pd.to_datetime(df['date_published']) # Create empty column to store keyword extraction output df['keywords_and_scores'] = '' # Create empty column to store top keywords df['keywords'] = '' return df def run_keybert(df: pd.DataFrame, ngram_min: int, ngram_max: int, diversity_algo: str, top_n: int, diversity: float, nr_candidates: int): kw_model = KeyBERT(model='all-MiniLM-L6-v2') use_mmr = diversity_algo.lower() == 'mmr' use_maxsum = diversity_algo.lower() == 'maxsum' for i, row in df.iterrows(): abstract_text = row['abstract'] kw_output = kw_model.extract_keywords(abstract_text, keyphrase_ngram_range=(ngram_min, ngram_max), stop_words='english', use_mmr=use_mmr, use_maxsum=use_maxsum, top_n=top_n, diversity=diversity, nr_candidates=nr_candidates) df.at[i, 'keywords_and_scores'] = kw_output df.at[i, 'keywords'] = [pair[0] for pair in kw_output] return df def get_keyword_value_counts(df): keywords_count = pd.Series(df['keywords'].explode()).value_counts().reset_index() keywords_count.columns = ['keyword', 'count'] return keywords_count
""" Module Name: Taipy Configurations Author: Kenneth Leung Last Modified: 19 Mar 2023 """ import yaml from taipy import Config, Scope from src.functions import * with open('config.yml') as f: cfg = yaml.safe_load(f) # ==================== # Input Data Nodes # ==================== # To place input values as DataNodeConfig objects data_query_cfg = Config.configure_data_node(id='query', default_data=cfg['QUERY']) data_ngram_min_cfg = Config.configure_data_node(id='ngram_min', default_data=cfg['NGRAM_MIN']) data_ngram_max_cfg = Config.configure_data_node(id='ngram_max', default_data=cfg['NGRAM_MAX']) data_diversity_algo_cfg = Config.configure_data_node(id='diversity_algo', default_data=cfg['DIVERSITY_ALGO']) data_top_n_cfg = Config.configure_data_node(id='top_n', default_data=cfg['TOP_N']) data_diversity_cfg = Config.configure_data_node(id='diversity', default_data=cfg['DIVERSITY']) data_nr_candidates_cfg = Config.configure_data_node(id='nr_candidates', default_data=cfg['NR_CANDIDATES']) # =================== # Key Data Nodes # =================== data_arxiv_search_cfg = Config.configure_data_node(id='data_arxiv_search') data_raw_df_cfg = Config.configure_data_node(id='data_raw_df') data_processed_df_cfg = Config.configure_data_node(id='data_processed_df') data_keywords_df_cfg = Config.configure_data_node(id='data_keywords_df') data_keywords_count_cfg = Config.configure_data_node(id='data_keywords_count') # ================= # Tasks # ================= task_arxiv_extraction_cfg = Config.configure_task(id="task_arxiv_extraction", function=extract_arxiv, input=data_query_cfg, output=data_arxiv_search_cfg, skippable=True) task_save_in_df_cfg = Config.configure_task(id="task_save_in_df", function=save_in_dataframe, input=data_arxiv_search_cfg, output=data_raw_df_cfg, skippable=True) task_process_data_cfg = Config.configure_task(id='task_process_data', function=preprocess_data, input=data_raw_df_cfg, output=data_processed_df_cfg, skippable=True) task_extract_keywords_cfg = Config.configure_task(id='task_extract_keywords', function=run_keybert, input=[data_processed_df_cfg, data_ngram_min_cfg, data_ngram_max_cfg, data_diversity_algo_cfg, data_top_n_cfg, data_diversity_cfg, data_nr_candidates_cfg], output=data_keywords_df_cfg) task_get_kw_count_cfg = Config.configure_task(id='task_count_keywords', function=get_keyword_value_counts, input=data_keywords_df_cfg, output=data_keywords_count_cfg) # ================= # Pipelines # ================= pipeline_data_prep_cfg = Config.configure_pipeline(id='pipeline_data_prep', task_configs=[task_arxiv_extraction_cfg, task_save_in_df_cfg, task_process_data_cfg, ]) pipeline_keyword_analysis_cfg = Config.configure_pipeline(id='pipeline_keyword_analysis', task_configs=[task_extract_keywords_cfg, task_get_kw_count_cfg ]) # ============= # Scenario # ============= scenario_cfg = Config.configure_scenario(id="scenario", pipeline_configs=[pipeline_data_prep_cfg, pipeline_keyword_analysis_cfg ])
from taipy.gui import Gui from pages import * pages = { "/": root_page, "pages": pages } if __name__ == "__main__": gui = Gui(pages=pages) gui.run(title="ui")
from .root import root_page from .pages.pages import pages
""" """ from taipy.gui import Markdown root_page = Markdown("pages/root.md")
""" <|{value}|text|format=%.2f|> """ from taipy.gui import Markdown pages = Markdown("pages/pages/pages.md")
import random # Define a dictionary to store the year-wise and month-wise electricity usage electricity_usage = {} months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] # For each year and each month, generate a random number between 1000 and 5000 to simulate the electricity usage for that month for year in range(1, 6): electricity_usage[year] = {} for month in months: electricity_usage[year][month] = random.randint(1000, 5000) import csv # Open a new CSV file in write mode with open('electricity_usage.csv', 'w', newline='') as file: writer = csv.writer(file) # Write the header row writer.writerow(['YearMonth', 'Usage']) # Iterate over the years from 2001 to 2010, and for each month, write a row to the CSV file for year in range(2001, 2011): for month in range(1, 13): usage = random.randint(1000, 5000) writer.writerow([f'{year}/{month}', usage])
from taipy.gui import Gui import taipy as tp import pandas as pd import os section_1 = """ ## Power Consumption Dashboard <|{dataset}|chart|x=YearMonth|y[1]=Usage|color=blue|> """ dataset = pd.read_csv("data.csv") rest = tp.Rest() gui = Gui(page=section_1) tp.run( rest=rest, gui=gui, title="Taipy Dashboard", host='0.0.0.0', port=os.environ.get('PORT', '5000'), debug=False, )
""" A multi-page Taipy application, which includes 3 pages: - A rootpage which is shared by other pages. - Two pages named page_1 and page_2. Please refer to https://docs.taipy.io/en/latest/manuals/gui/pages for more details. """ from pages import data_viz, scenario_page, performance from pages.root import * from config.config import * from taipy.gui import Gui import taipy as tp def on_change(state, var_name: str, var_value): state['scenario'].on_change(state, var_name, var_value) pages = { "/": root_page, "data_viz": data_viz, "scenario": scenario_page, "performance": performance } if __name__ == "__main__": tp.Core().run() gui = Gui(pages=pages) gui.run(title="Taipy Application")
import datetime as dt import pandas as pd from taipy import Config, Scope, Frequency from algos.algos import * path_to_csv = "data/dataset.csv" # Datanodes (3.1) ## Input Data Nodes initial_dataset_cfg = Config.configure_data_node(id="initial_dataset", storage_type="csv", path=path_to_csv, scope=Scope.GLOBAL) # We assume the current day is the 26th of July 2021. # This day can be changed to simulate multiple executions of scenarios on different days day_cfg = Config.configure_data_node(id="day", default_data=dt.datetime(2021, 7, 26)) n_predictions_cfg = Config.configure_data_node(id="n_predictions", default_data=40) max_capacity_cfg = Config.configure_data_node(id="max_capacity", default_data=200) ## Remaining Data Nodes cleaned_dataset_cfg = Config.configure_data_node(id="cleaned_dataset", scope=Scope.GLOBAL) predictions_baseline_cfg = Config.configure_data_node(id="predictions_baseline") predictions_ml_cfg = Config.configure_data_node(id="predictions_ml") full_predictions_cfg = Config.configure_data_node(id="full_predictions") metrics_baseline_cfg = Config.configure_data_node(id="metrics_baseline") metrics_ml_cfg = Config.configure_data_node(id="metrics_ml") # Functions (3.2) # Tasks (3.3) clean_data_task_cfg = Config.configure_task(id="task_clean_data", function=clean_data, input=initial_dataset_cfg, output=cleaned_dataset_cfg, skippable=True) predict_baseline_task_cfg = Config.configure_task(id="predict_baseline", function=predict_baseline, input=[cleaned_dataset_cfg, n_predictions_cfg, day_cfg, max_capacity_cfg], output=predictions_baseline_cfg) # Create the task configuration of the predict_ml function. ## We use the same input and ouput as the previous predict_baseline task but we change the funtion predict_ml_task_cfg = Config.configure_task(id="task_predict_ml", function=predict_ml, input=[cleaned_dataset_cfg, n_predictions_cfg, day_cfg, max_capacity_cfg], output=predictions_ml_cfg) metrics_baseline_task_cfg = Config.configure_task(id="task_metrics_baseline", function=compute_metrics, input=[cleaned_dataset_cfg, predictions_baseline_cfg], output=metrics_baseline_cfg) metrics_ml_task_cfg = Config.configure_task(id="task_metrics_ml", function=compute_metrics, input=[cleaned_dataset_cfg, predictions_ml_cfg], output=metrics_ml_cfg) full_predictions_task_cfg = Config.configure_task(id="task_full_predictions", function=create_predictions_dataset, input=[predictions_baseline_cfg, predictions_ml_cfg, day_cfg, n_predictions_cfg, cleaned_dataset_cfg], output=full_predictions_cfg) # Configure our scenario which is our business problem. scenario_cfg = Config.configure_scenario_from_tasks(id="scenario", task_configs=[clean_data_task_cfg, predict_baseline_task_cfg, predict_ml_task_cfg, metrics_baseline_task_cfg, metrics_ml_task_cfg, full_predictions_task_cfg], frequency=Frequency.WEEKLY) Config.export('config/config.toml')