text
stringlengths 0
5.92k
|
|---|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = [ { "Temperatures": [ [17.2, 27.4, 28.6, 21.5], [5.6, 15.1, 20.2, 8.1], [26.6, 22.8, 21.8, 24.0], [22.3, 15.5, 13.4, 19.6], [3.9, 18.9, 25.7, 9.8], ], "Cities": ["Hanoi", "Paris", "Rio", "Sydney", "Washington"], }, {"Seasons": ["Winter", "Spring", "Summer", "Autumn"]}, ] page = """ # Heatmap - Unbalanced <|{data}|chart|type=heatmap|z=0/Temperatures|x=1/Seasons|y=0/Cities|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Sample small plot definition trace = { "r": [1, 2, 3, 4, 1], "theta": [0, 40, 80, 120, 160], } # The same data is used in both traces data = [trace, trace] # Naming the subplot is mandatory to get them both in # the same chart options = [ { "subplot": "polar", }, {"subplot": "polar2"}, ] layout = { # Hide the legend "showlegend": False, # Restrict the angular values for second trace "polar2": {"sector": [30, 130]}, } md = """ # Polar - Sectors <|{data}|chart|type=scatterpolar|layout={layout}|options={options}|> """ Gui(md).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # You may need to install the yfinance package as well. # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "x": [1, 2, 3], "y": [1, 2, 3], "Colors": ["blue", "green", "red"], "Sizes": [20, 40, 30], "Opacities": [1, 0.4, 1], } marker = {"color": "Colors", "size": "Sizes", "opacity": "Opacities"} page = """ # Bubble - Simple <|{data}|chart|mode=markers|x=x|y=y|marker={marker}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Source: www.statista.com (Most used programming languages in 2022) data = { # List of programming languages "Language": ["JavaScript", "HTML/CSS", "SQL", "Python", "Typescript", "Java", "Bash/Shell"], # Percentage of usage, per language "%": [65.36, 55.08, 49.43, 48.07, 34.83, 33.27, 29.07], } # Close the shape for a nice-looking stroke # If the first point is *not* appended to the end of the list, # then the shape does not look as it is closed. data["%"].append(data["%"][0]) data["Language"].append(data["Language"][0]) layout = { "polar": { "radialaxis": { # Force the radial range to 0-100 "range": [0, 100], } }, # Hide legend "showlegend": False, } options = { # Fill the trace "fill": "toself" } md = """ # Radar - Simple <|{data}|chart|type=scatterpolar|r=%|theta=Language|options={options}|layout={layout}|> """ Gui(md).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Initial data set. y = count_of(x) samples = {"x": ["Apples", "Apples", "Apples", "Oranges", "Bananas", "Oranges"], "y": [5, 10, 3, 8, 5, 2]} # Create a data set array to allow for two traces data = [samples, samples] # Gather those settings in a single dictionary properties = { # 'x' of the first trace is the 'x' data from the first element of data "x[1]": "0/x", # 'y' of the first trace is the 'y' data from the first element of data "y[1]": "0/y", # 'x' of the second trace is the 'x' data from the second element of data "x[2]": "1/x", # 'y' of the second trace is the 'y' data from the second element of data "y[2]": "1/y", # Data set colors "color": ["#cd5c5c", "#505070"], # Data set names (for the legend) "name": ["Count", "Sum"], # Configure the binning functions "options": [ # First trace: count the bins {"histfunc": "count"}, # Second trace: sum the bin occurences {"histfunc": "sum"}, ], # Set x axis name "layout": {"xaxis": {"title": "Fruit"}}, } page = """ # Histogram - Binning function <|{data}|chart|type=histogram|properties={properties}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Skill categories skills = ["HTML", "CSS", "Java", "Python", "PHP", "JavaScript", "Photoshop"] data = [ # Proportion of skills used for Backend development {"Backend": [10, 10, 80, 70, 90, 30, 0], "Skills": skills}, # Proportion of skills used for Frontend development {"Frontend": [90, 90, 0, 10, 20, 80, 60], "Skills": skills}, ] # Append first elements to all arrays for a nice stroke skills.append(skills[0]) data[0]["Backend"].append(data[0]["Backend"][0]) data[1]["Frontend"].append(data[1]["Frontend"][0]) layout = { # Force the radial axis displayed range "polar": {"radialaxis": {"range": [0, 100]}} } # Fill the trace options = {"fill": "toself"} # Reflected in the legend names = ["Backend", "Frontend"] # To shorten the chart control definition r = ["0/Backend", "1/Frontend"] theta = ["0/Skills", "1/Skills"] page = """ # Radar - Multiple <|{data}|chart|type=scatterpolar|name={names}|r={r}|theta={theta}|options={options}|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # x values are [-10..10] x_range = range(-10, 11) # The data set holds the _x_ series and two distinct series for _y_ data = { "x": x_range, # y1 = x*x "y1": [x * x for x in x_range], # y2 = 2-x*x/50 "y2": [(100 - x * x) / 50 for x in x_range], } layout = { "yaxis2": { # Second axis overlays with the first y axis "overlaying": "y", # Place the second axis on the right "side": "right", # and give it a title "title": "Second y axis", }, "legend": { # Place the legend above chart "yanchor": "bottom" }, } page = """ # Basics - Multiple axis Shared axis: <|{data}|chart|x=x|y[1]=y1|y[2]=y2|height=300px|> With two axis: <|{data}|chart|x=x|y[1]=y1|y[2]=y2|yaxis[2]=y2|layout={layout}|height=300px|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Source https://www.fao.org/faostat/en/#data/SDGB data = { "Country": [ "Rest of the world", "Russian Federation", "Brazil", "Canada", "United States of America", "China", "Australia", "Democratic Republic of the Congo", "Indonesia", "Peru", ], "Area": [1445674.66, 815312, 496620, 346928, 309795, 219978, 134005, 126155, 92133.2, 72330.4], } page = """ # Pie - Simple <|{data}|chart|type=pie|values=Area|label=Country|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # You may need to install the scikit-learn package as well. # ----------------------------------------------------------------------------------------- from os.path import exists from sklearn.datasets import make_regression from sklearn.linear_model import LinearRegression from taipy.gui import Gui # Let scikit-learn generate a random regression problem n_samples = 300 X, y, coef = make_regression(n_samples=n_samples, n_features=1, n_informative=1, n_targets=1, noise=25, coef=True) model = LinearRegression().fit(X, y) x_data = X.flatten() y_data = y.flatten() predict = model.predict(X) data = {"x": x_data, "y": y_data, "Regression": predict} page = """ # Scatter - Regression <|{data}|chart|x=x|y[1]=y|mode[1]=markers|y[2]=Regression|mode[2]=line|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "x": [1, 2, 3], "y": [1, 2, 3], "Texts": ["Blue<br>Small", "Green<br>Medium", "Red<br>Large"], "Sizes": [60, 80, 100], "Colors": [ "rgb(93, 164, 214)", "rgb(44, 160, 101)", "rgb(255, 65, 54)", ], } marker = {"size": "Sizes", "color": "Colors"} page = """ # Bubble - Hover text <|{data}|chart|mode=markers|x=x|y=y|marker={marker}|text=Texts|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "Temperatures": [ [17.2, 27.4, 28.6, 21.5], [5.6, 15.1, 20.2, 8.1], [26.6, 22.8, 21.8, 24.0], [22.3, 15.5, 13.4, 19.6], ], "Cities": ["Hanoi", "Paris", "Rio", "Sydney"], "Seasons": ["Winter", "Spring", "Summer", "Autumn"], } layout = { # This array contains the information we want to display in the cells # These are filled later "annotations": [], # No ticks on the x axis, show labels on top the of the chart "xaxis": {"ticks": "", "side": "top"}, # No ticks on the y axis # Add a space character for a small margin with the text "yaxis": {"ticks": "", "ticksuffix": " "}, } seasons = data["Seasons"] cities = data["Cities"] # Iterate over all cities for city in range(len(cities)): # Iterate over all seasons for season in range(len(seasons)): temperature = data["Temperatures"][city][season] # Create the annotation annotation = { # The name of the season "x": seasons[season], # The name of the city "y": cities[city], # The temperature, as a formatted string "text": f"{temperature}\N{DEGREE SIGN}C", # Change the text color depending on the temperature # so it results in a better contrast "font": {"color": "white" if temperature < 14 else "black"}, # Remove the annotation arrow "showarrow": False, } # Add the annotation to the layout's annotations array layout["annotations"].append(annotation) page = """ ## Heatmap - Annotated <|{data}|chart|type=heatmap|z=Temperatures|x=Seasons|y=Cities|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Function to plot: x^3/3 - x def f(x): return x * x * x / 3 - x # x values: [-2.2, ..., 2.2] x = [(x - 10) / 4.5 for x in range(0, 21)] data = { "x": x, # y: [f(-2.2), ..., f(2.2)] "y": [f(x) for x in x], } layout = { # Chart title "title": "Local extrema", "annotations": [ # Annotation for local maximum (x = -1) {"text": "Local <b>max</b>", "font": {"size": 20}, "x": -1, "y": f(-1)}, # Annotation for local minimum (x = 1) {"text": "Local <b>min</b>", "font": {"size": 20}, "x": 1, "y": f(1), "xanchor": "left"}, ], } page = """ # Advanced - Annotations <|{data}|chart|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "Types": ["Website visit", "Downloads", "Prospects", "Invoice sent", "Closed"], "Visits_us": [13873, 10533, 5443, 2703, 908], "Visits_eu": [7063, 4533, 3443, 1003, 1208], "Visits_ap": [6873, 2533, 3443, 1703, 508], } # Columns for each trace x = ["Visits_us", "Visits_eu", "Visits_ap"] # Legend text for each trace names = ["US", "EU", "AP"] page = """ # Funnel - Multiple traces <|{data}|chart|type=funnel|x={x}|y=Types|name={names}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "Temperatures": [ [17.2, 27.4, 28.6, 21.5], [5.6, 15.1, 20.2, 8.1], [26.6, 22.8, 21.8, 24.0], [22.3, 15.5, 13.4, 19.6], ], "Cities": ["Hanoi", "Paris", "Rio", "Sydney"], "Seasons": ["Winter", "Spring", "Summer", "Autumn"], } page = """ # Heatmap - Basic <|{data}|chart|type=heatmap|z=Temperatures|x=Seasons|y=Cities|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- # Face-to-face bar charts example import numpy from taipy.gui import Gui n_years = 10 proportions_female = numpy.zeros(n_years) proportions_male = numpy.zeros(n_years) # Prepare the data set with random variations proportions_female[0] = 0.4 proportions_male[0] = proportions_female[0] * (1 + numpy.random.normal(0, 0.1)) for i in range(1, n_years): mean_i = (0.5 - proportions_female[i - 1]) / 5 new_value = proportions_female[i - 1] + numpy.random.normal(mean_i, 0.1) new_value = min(max(0, new_value), 1) proportions_female[i] = new_value proportions_male[i] = proportions_female[i] * (1 + numpy.random.normal(0, 0.1)) data = { "Hobbies": [ "Archery", "Tennis", "Football", "Basket", "Volley", "Golf", "Video-Games", "Reading", "Singing", "Music", ], "Female": proportions_female, # Negate these values so they appear to the left side "Male": -proportions_male, } properties = { # Shared y values "y": "Hobbies", # Bars for the female data set "x[1]": "Female", "color[1]": "#c26391", # Bars for the male data set "x[2]": "Male", "color[2]": "#5c91de", # Both data sets are represented with an horizontal orientation "orientation": "h", # "layout": { # This makes left and right bars aligned on the same y value "barmode": "overlay", # Set a relevant title for the x axis "xaxis": {"title": "Gender"}, # Hide the legend "showlegend": False, "margin": {"l": 100}, }, } page = """ # Bar - Facing <|{data}|chart|type=bar|properties={properties}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import random from taipy.gui import Gui # Random data set data = [random.random() for i in range(100)] # Normalize to show bin probabilities options = {"histnorm": "probability"} page = """ # Histogram - Normalized <|{data}|chart|type=histogram|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # You may need to install the scikit-learn package as well. # ----------------------------------------------------------------------------------------- import numpy import pandas from sklearn.datasets import make_classification from taipy.gui import Gui # Let scikit-learn generate a random 2-class classification problem features, label = make_classification(n_samples=1000, n_features=2, n_informative=2, n_redundant=0) random_data = pandas.DataFrame({"x": features[:, 0], "y": features[:, 1], "label": label}) data_x = random_data["x"] class_A = [random_data.loc[i, "y"] if random_data.loc[i, "label"] == 0 else numpy.nan for i in range(len(random_data))] class_B = [random_data.loc[i, "y"] if random_data.loc[i, "label"] == 1 else numpy.nan for i in range(len(random_data))] data = {"x": random_data["x"], "Class A": class_A, "Class B": class_B} page = """ # Scatter - Classification <|{data}|chart|mode=markers|x=x|y[1]=Class A|y[2]=Class B|width=60%|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from itertools import accumulate import numpy as np from taipy.gui import Gui grid_size = 10 data = [ { # z is set to: # - 0 if row+col is a multiple of 4 # - 1 if row+col is a multiple of 2 # - 0.5 otherwise "z": [ [0.0 if (row + col) % 4 == 0 else 1 if (row + col) % 2 == 0 else 0.5 for col in range(grid_size)] for row in range(grid_size) ] }, { # A series of coordinates, growing exponentially "x": [0] + list(accumulate(np.logspace(0, 1, grid_size))), # A series of coordinates, shrinking exponentially "y": [0] + list(accumulate(np.logspace(1, 0, grid_size))), }, ] # Axis template used in the layout object axis_template = { # Don't show any line or tick or label "showgrid": False, "zeroline": False, "ticks": "", "showticklabels": False, "visible": False, } layout = {"xaxis": axis_template, "yaxis": axis_template} options = { # Remove the color scale display "showscale": False } page = """ ## Heatmap - Unequal block sizes <|{data}|chart|type=heatmap|z=0/z|x=1/x|y=1/y|layout={layout}|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # 9-hole course n_holes = 9 # Data set # Each entry holds an array of values. One for each hole, plus one for th data = { # ["Hole1", "Hole2", ..., "Hole9"] "Hole": [f"Hole{h}" for h in range(1, n_holes + 1)] + ["Score"], # Par for each hole "Par": [3, 4, 4, 5, 3, 5, 4, 5, 3] + [None], # Score for each hole "Score": [4, 4, 5, 4, 4, 5, 4, 5, 4] + [None], # Represented as relative values except for the last one "M": n_holes * ["relative"] + ["total"], } # Compute difference (Score-Par) data["Diff"] = [data["Score"][i] - data["Par"][i] for i in range(0, n_holes)] + [None] # Show positive values in red, and negative values in green options = {"decreasing": {"marker": {"color": "green"}}, "increasing": {"marker": {"color": "red"}}} page = """ # Waterfall - Styling <|{data}|chart|type=waterfall|x=Hole|y=Diff|measure=M|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import random from taipy.gui import Gui # Common axis for all data: [1..10] x = list(range(1, 11)) # Sample data samples = [5, 7, 8, 4, 5, 9, 8, 8, 6, 5] # Generate error data # Error that adds to the input data error_plus = [3 * random.random() + 0.5 for _ in x] # Error substracted from to the input data error_minus = [3 * random.random() + 0.5 for _ in x] # Upper bound (y + error_plus) error_upper = [y + e for (y, e) in zip(samples, error_plus)] # Lower bound (y - error_minus) error_lower = [y - e for (y, e) in zip(samples, error_minus)] data = [ # Trace for samples {"x": x, "y": samples}, # Trace for error range { # Roundtrip around the error bounds: onward then return "x": x + list(reversed(x)), # The two error bounds, with lower bound reversed "y": error_upper + list(reversed(error_lower)), }, ] properties = { # Error data "x[1]": "1/x", "y[1]": "1/y", "options[1]": { # Shows as filled area "fill": "toself", "fillcolor": "rgba(70,70,240,0.6)", "showlegend": False, }, # Don't show surrounding stroke "color[1]": "transparent", # Raw data (displayed on top of the error band) "x[2]": "0/x", "y[2]": "0/y", "color[2]": "rgb(140,50,50)", # Shown in the legend "name[2]": "Input", } page = """ # Continuous Error - Simple <|{data}|chart|properties={properties}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "Products": [ "Nail polish", "Eyebrow pencil", "Rouge", "Lipstick", "Eyeshadows", "Eyeliner", "Foundation", "Lip gloss", "Mascara", ], "USA": [12814, 13012, 11624, 8814, 12998, 12321, 10342, 22998, 11261], "China": [3054, 5067, 7004, 9054, 12043, 15067, 10119, 12043, 10419], "EU": [4376, 3987, 3574, 4376, 4572, 3417, 5231, 4572, 6134], "Africa": [4229, 3932, 5221, 9256, 3308, 5432, 13701, 4008, 18712], } # Order the different traces ys = ["USA", "China", "EU", "Africa"] options = [ # For the USA {"stackgroup": "one", "groupnorm": "percent"}, # For China {"stackgroup": "one"}, # For the EU {"stackgroup": "one"}, # For Africa {"stackgroup": "one"}, ] layout = { # Show all values when hovering on a data point "hovermode": "x unified" } page = """ # Filled Area - Stacked Normalized <|{data}|chart|mode=none|x=Products|y={ys}|options={options}|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import math from taipy.gui import Gui # One data point for each degree theta = range(0, 360) # Create a rose-like shaped radius-array def create_rose(n_petals): return [math.cos(math.radians(n_petals * angle)) for angle in theta] data = {"theta": theta, "r1": create_rose(2), "r2": create_rose(3), "r3": create_rose(4)} # We want three traces in the same chart r = ["r1", "r2", "r3"] layout = { # Hide the legend "showlegend": False, "polar": { # Hide the angular axis "angularaxis": {"visible": False}, # Hide the radial axis "radialaxis": {"visible": False}, }, } page = """ # Polar - Multiple <|{data}|chart|type=scatterpolar|mode=lines|r={r}|theta=theta|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # You may need to install the yfinance package as well. # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "x": [1, 2, 3, 4, 5], "y": [10, 7, 4, 1, 5], "Sizes": [20, 30, 40, 50, 30], "Symbols": ["circle-open", "triangle-up", "hexagram", "star-diamond", "circle-cross"], } marker = { "color": "#77A", "size": "Sizes", "symbol": "Symbols", } page = """ # Bubble - Symbols <|{data}|chart|mode=markers|x=x|y=y|marker={marker}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # You may need to install the yfinance package as well. # ----------------------------------------------------------------------------------------- import yfinance from taipy.gui import Gui # Extraction of a few days of stock historical data for AAPL using # the yfinance package (see https://pypi.org/project/yfinance/). # The returned value is a Pandas DataFrame. ticker = yfinance.Ticker("AAPL") stock = ticker.history(interval="1d", start="2018-08-18", end="2018-09-10") # Copy the DataFrame index to a new column stock["Date"] = stock.index options = { # Candlesticks that show decreasing values are orange "decreasing": {"line": {"color": "orange"}}, # Candlesticks that show decreasing values are blue "increasing": {"line": {"color": "blue"}}, } layout = { "xaxis": { # Hide the range slider "rangeslider": {"visible": False} } } page = """ # Candlestick - Styling <|{stock}|chart|type=candlestick|x=Date|open=Open|close=Close|low=Low|high=High|options={options}|layout={layout}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui data = { "Types": ["Website visit", "Downloads", "Prospects", "Invoice sent", "Closed"], "Visits": [13873, 10533, 5443, 2703, 908], } marker = { # Boxes are filled with a blue gradient color "color": ["hsl(210,50%,50%)", "hsl(210,60%,60%)", "hsl(210,70%,70%)", "hsl(210,80%,80%)", "hsl(210,90%,90%)"], # Lines get thicker, with an orange-to-green gradient color "line": {"width": [1, 1, 2, 3, 4], "color": ["f5720a", "f39c1d", "f0cc3d", "aadb12", "8cb709"]}, } options = { # Lines connecting boxes are thick, dotted and green "connector": {"line": {"color": "green", "dash": "dot", "width": 4}} } page = """ # Funnel Chart - Custom markers <|{data}|chart|type=funnel|x=Visits|y=Types|marker={marker}|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import random from taipy.gui import Gui # Random data set data = [random.random() for i in range(500)] options = { # Enable the cumulative histogram "cumulative": {"enabled": True} } page = """ # Histogram - Cumulative <|{data}|chart|type=histogram|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Create a star shape data = {"r": [3, 1] * 5 + [3], "theta": list(range(0, 360, 36)) + [0]} options = [ # First plot is filled with a yellow-ish color {"subplot": "polar", "fill": "toself", "fillcolor": "#E4FF87"}, # Second plot is filled with a blue-ish color {"fill": "toself", "subplot": "polar2", "fillcolor": "#709BFF"}, ] layout = { "polar": { # This actually is the default value "angularaxis": { "direction": "counterclockwise", }, }, "polar2": { "angularaxis": { # Rotate the axis 180° (0 is on the left) "rotation": 180, # Orient the axis clockwise "direction": "clockwise", # Show the angles as radians "thetaunit": "radians", }, }, # Hide the legend "showlegend": False, } page = """ # Polar Charts - Direction <|{data}|chart|type=scatterpolar|layout={layout}|options={options}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = "XS" page = """ # Slider - List of values <|{value}|slider|lov=XXS;XS;S;M;L;XL;XXL|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import io from decimal import Decimal, getcontext from taipy.gui import Gui, download # Initial precision precision = 10 def pi(precision: int) -> list[int]: """Compute Pi to the required precision. Adapted from https://docs.python.org/3/library/decimal.html """ saved_precision = getcontext().prec # Save precision getcontext().prec = precision three = Decimal(3) # substitute "three=3.0" for regular floats lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24 while s != lasts: lasts = s n, na = n + na, na + 8 d, da = d + da, da + 32 t = (t * n) / d s += t digits = [] while s != 0: integral = int(s) digits.append(integral) s = (s - integral) * 10 getcontext().prec = saved_precision return digits # Generate the digits, save them in a CSV file content, and trigger a download action # so the user can retrieve them def download_pi(state): digits = pi(state.precision) buffer = io.StringIO() buffer.write("index,digit\n") for i, d in enumerate(digits): buffer.write(f"{i},{d}\n") download(state, content=bytes(buffer.getvalue(), "UTF-8"), name="pi.csv") page = """ # File Download - Dynamic content Precision: <|{precision}|slider|min=2|max=10000|> <|{None}|file_download|on_action=download_pi|label=Download Pi digits|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 9 page = """ # Slider - Custom range <|{value}|slider|min=1|max=10|> Value: <|{value}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from datetime import date, timedelta from taipy.gui import Gui # Create the list of dates (all year 2000) all_dates = {} all_dates_str = [] start_date = date(2000, 1, 1) end_date = date(2001, 1, 1) a_date = start_date while a_date < end_date: date_str = a_date.strftime("%Y/%m/%d") all_dates_str.append(date_str) all_dates[date_str] = a_date a_date += timedelta(days=1) # Initial selection: first and last day dates = [all_dates_str[1], all_dates_str[-1]] # These two variables are used in text controls start_sel = all_dates[dates[0]] end_sel = all_dates[dates[1]] def on_change(state, _, var_value): # Update the text controls state.start_sel = all_dates[var_value[0]] state.end_sel = all_dates[var_value[1]] page = """ # Slider - Date range <|{dates}|slider|lov={all_dates_str}|> Start: <|{start_sel}|text|format=d MMM|> End: <|{end_sel}|text|format=d MMM|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import os from decimal import Decimal, getcontext from tempfile import NamedTemporaryFile from taipy.gui import Gui, download # Initial precision precision = 10 # Stores the path to the temporary file temp_path = None def pi(precision: int) -> list[int]: """Compute Pi to the required precision. Adapted from https://docs.python.org/3/library/decimal.html """ saved_precision = getcontext().prec # Save precision getcontext().prec = precision three = Decimal(3) # substitute "three=3.0" for regular floats lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24 while s != lasts: lasts = s n, na = n + na, na + 8 d, da = d + da, da + 32 t = (t * n) / d s += t digits = [] while s != 0: integral = int(s) digits.append(integral) s = (s - integral) * 10 getcontext().prec = saved_precision return digits # Remove the temporary file def clean_up(state): os.remove(state.temp_path) # Generate the digits, save them in a CSV temporary file, then trigger a download action # for that file. def download_pi(state): digits = pi(state.precision) with NamedTemporaryFile("r+t", suffix=".csv", delete=False) as temp_file: state.temp_path = temp_file.name temp_file.write("index,digit\n") for i, d in enumerate(digits): temp_file.write(f"{i},{d}\n") download(state, content=temp_file.name, name="pi.csv", on_action=clean_up) page = """ # File Download - Dynamic content Precision: <|{precision}|slider|min=2|max=10000|> <|{None}|file_download|on_action=download_pi|label=Download Pi digits|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 50 page = """ # Slider - Simple <|{value}|slider|> Value: <|{value}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 40 page = """ # Slider - Vertical <|{value}|slider|orientation=v|> Value: <|{value}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Initial values values = [20, 40, 80] page = """ # Slider - Range <|{values}|slider|> Selection: <|{values}|> """ Gui(page).run()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
import os import sys from taipy._cli._base_cli import _CLI from taipy.core._core_cli import _CoreCLI from taipy.core._entity._migrate_cli import _MigrateCLI from taipy.core._version._cli._version_cli import _VersionCLI from taipy.gui._gui_cli import _GuiCLI from ._cli._help_cli import _HelpCLI from ._cli._run_cli import _RunCLI from ._cli._scaffold_cli import _ScaffoldCLI from .version import _get_version def _entrypoint(): # Add the current working directory to path to execute version command on FS repo sys.path.append(os.path.normpath(os.getcwd())) _CLI._parser.add_argument("-v", "--version", action="store_true", help="Print the current Taipy version and exit.") _RunCLI.create_parser() _GuiCLI.create_run_parser() _CoreCLI.create_run_parser() _VersionCLI.create_parser() _ScaffoldCLI.create_parser() _MigrateCLI.create_parser() _HelpCLI.create_parser() args = _CLI._parse() if args.version: print(f"Taipy {_get_version()}") sys.exit(0) _RunCLI.parse_arguments() _HelpCLI.parse_arguments() _VersionCLI.parse_arguments() _MigrateCLI.parse_arguments() _ScaffoldCLI.parse_arguments() _CLI._remove_argument("help") _CLI._parser.print_help()
|
import json import os def _get_version(): with open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
|
from importlib.util import find_spec if find_spec("taipy"): if find_spec("taipy.config"): from taipy.config._init import * # type: ignore if find_spec("taipy.gui"): from taipy.gui._init import * # type: ignore if find_spec("taipy.core"): from taipy.core._init import * # type: ignore if find_spec("taipy.rest"): from taipy.rest._init import * # type: ignore if find_spec("taipy.gui_core"): from taipy.gui_core._init import * # type: ignore if find_spec("taipy.enterprise"): from taipy.enterprise._init import * # type: ignore if find_spec("taipy._run"): from taipy._run import _run as run # type: ignore
|
import sys import typing as t from flask import Flask from taipy.core import Core from taipy.gui import Gui from taipy.rest import Rest if sys.version_info >= (3, 10): from typing import TypeGuard _AppType = t.Union[Gui, Rest, Core] _AppTypeT = t.TypeVar("_AppTypeT", Gui, Rest, Core) def _run(*services: _AppType, **kwargs) -> t.Optional[Flask]: """Run one or multiple Taipy services. A Taipy service is an instance of a class that runs code as a Web application. Parameters: *services (Union[`Gui^`, `Rest^`, `Core^`]): Services to run, as separate arguments.<br/> If several services are provided, all the services run simultaneously.<br/> If this is empty or set to None, this method does nothing. **kwargs: Other parameters to provide to the services. """ gui = __get_app(services, Gui) rest = __get_app(services, Rest) core = __get_app(services, Core) if gui and core: from taipy.core._core_cli import _CoreCLI from taipy.gui._gui_cli import _GuiCLI _CoreCLI.create_parser() _GuiCLI.create_parser() if rest or core: if not core: core = Core() core.run() if not rest and not gui: return None if gui and rest: gui._set_flask(rest._app) # type: ignore return gui.run(**kwargs) else: app = rest or gui assert app is not None # Avoid pyright typing error return app.run(**kwargs) # Avoid black adding too many empty lines # fmt: off if sys.version_info >= (3, 10): def __get_app(apps: t.Tuple[_AppType, ...], type_: t.Type[_AppTypeT]) -> t.Optional[_AppType]: def filter_isinstance(tl: _AppType) -> TypeGuard[_AppTypeT]: return isinstance(tl, type_) return next(filter(filter_isinstance, apps), None) else: def __get_app(apps: t.Tuple[_AppType, ...], type_: t.Type[_AppTypeT]) -> t.Optional[_AppType]: return next(filter(lambda a: isinstance(a, type_), apps), None) # fmt: on
|
from ._core import Core from ._entity.submittable import Submittable from .cycle.cycle import Cycle from .cycle.cycle_id import CycleId from .data.data_node import DataNode from .data.data_node_id import DataNodeId from .job.job import Job from .job.job_id import JobId from .job.status import Status from .scenario.scenario import Scenario from .scenario.scenario_id import ScenarioId from .sequence.sequence import Sequence from .sequence.sequence_id import SequenceId from .taipy import ( cancel_job, clean_all_entities_by_version, compare_scenarios, create_global_data_node, create_scenario, delete, delete_job, delete_jobs, exists, export_scenario, get, get_cycles, get_cycles_scenarios, get_data_nodes, get_entities_by_config_id, get_jobs, get_latest_job, get_parents, get_primary, get_primary_scenarios, get_scenarios, get_sequences, get_tasks, is_deletable, is_editable, is_promotable, is_readable, is_submittable, set, set_primary, submit, subscribe_scenario, subscribe_sequence, tag, unsubscribe_scenario, unsubscribe_sequence, untag, ) from .task.task import Task from .task.task_id import TaskId
|
"""# Taipy Core The Taipy Core package is a Python library designed to build powerful, customized, data-driven back-end applications. It provides the tools to help Python developers transform their algorithms into a complete back-end application. More details on the [Taipy Core](../../core/index.md) functionalities are available in the user manual. To build a Taipy Core application, the first step consists of setting up the Taipy configuration to design your application's characteristics and behaviors. Import `Config^` from the `taipy.config^` module, then use the various methods of the `Config^` singleton class to configure your core application. In particular, configure the [data nodes](../../core/config/data-node-config.md), [tasks](../../core/config/task-config.md), and [scenarios](../../core/config/scenario-config.md). Please refer to the [Core configuration user manual](../../core/config/index.md) for more information and detailed examples. Once your application is configured, import module `import taipy as tp` so you can use any function described in the following section on [Functionc](#functions). In particular, the most used functions are `tp.create_scenario()`, `tp.get_scenarios()`, `tp.get_data_nodes()`, `tp.submit()`, used to get, create, and submit entities. !!! Note Taipy Core provides a runnable service, `Core^` that runs as a service in a dedicated thread. The purpose is to have a dedicated thread responsible for dispatching the submitted jobs to an available executor for their execution. In particular, this `Core^` service is automatically run when Core is used with Taipy REST or Taipy GUI. See the [running services](../../run-deploy/run/running_services.md) page of the user manual for more details. """ from ._init import * from ._init_version import _read_version from .common.mongo_default_document import MongoDefaultDocument from .data.data_node_id import Edit from .exceptions import exceptions __version__ = _read_version()
|
import json import os from pathlib import Path def _read_version(): with open(f"{Path(os.path.abspath(__file__)).parent}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
|
from multiprocessing import Lock from typing import Optional from taipy.config import Config from taipy.logger._taipy_logger import _TaipyLogger from ._backup._backup import _init_backup_file_with_storage_folder from ._core_cli import _CoreCLI from ._orchestrator._dispatcher._job_dispatcher import _JobDispatcher from ._orchestrator._orchestrator import _Orchestrator from ._orchestrator._orchestrator_factory import _OrchestratorFactory from ._version._version_manager_factory import _VersionManagerFactory from .config import CoreSection from .exceptions.exceptions import CoreServiceIsAlreadyRunning class Core: """ Core service """ _is_running = False __lock_is_running = Lock() __logger = _TaipyLogger._get_logger() _orchestrator: Optional[_Orchestrator] = None _dispatcher: Optional[_JobDispatcher] = None def __init__(self): """ Initialize a Core service. """ pass def run(self, force_restart=False): """ Start a Core service. This function checks the configuration, manages application's version, and starts a dispatcher and lock the Config. """ if self.__class__._is_running: raise CoreServiceIsAlreadyRunning with self.__class__.__lock_is_running: self.__class__._is_running = True self.__update_core_section() self.__manage_version() self.__check_and_block_config() if self._orchestrator is None: self._orchestrator = _OrchestratorFactory._build_orchestrator() self.__start_dispatcher(force_restart) def stop(self): """ Stop the Core service. This function stops the dispatcher and unblock the Config for update. """ Config.unblock_update() if self._dispatcher: self._dispatcher = _OrchestratorFactory._remove_dispatcher() self.__logger.info("Core service has been stopped.") with self.__class__.__lock_is_running: self.__class__._is_running = False @staticmethod def __update_core_section(): _CoreCLI.create_parser() Config._applied_config._unique_sections[CoreSection.name]._update(_CoreCLI.parse_arguments()) @staticmethod def __manage_version(): _VersionManagerFactory._build_manager()._manage_version() Config._applied_config._unique_sections[CoreSection.name]._update( {"version_number": _VersionManagerFactory._build_manager()._get_latest_version()} ) @staticmethod def __check_and_block_config(): Config.check() Config.block_update() _init_backup_file_with_storage_folder() def __start_dispatcher(self, force_restart): if dispatcher := _OrchestratorFactory._build_dispatcher(force_restart=force_restart): self._dispatcher = dispatcher if Config.job_config.is_development: _Orchestrator._check_and_execute_jobs_if_development_mode()
|
#!/usr/bin/env python """The setup script.""" import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md") as readme_file: readme = readme_file.read() with open(f"src{os.sep}taipy{os.sep}core{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" requirements = [ "pyarrow>=10.0.1,<11.0", "networkx>=2.6,<3.0", "openpyxl>=3.1.2,<3.2", "modin[dask]>=0.23.0,<1.0", "pymongo[srv]>=4.2.0,<5.0", "sqlalchemy>=2.0.16,<2.1", "toml>=0.10,<0.11", "taipy-config@git+https://git@github.com/Avaiga/taipy-config.git@develop", ] test_requirements = ["pytest>=3.8"] extras_require = { "fastparquet": ["fastparquet==2022.11.0"], "mssql": ["pyodbc>=4,<4.1"], "mysql": ["pymysql>1,<1.1"], "postgresql": ["psycopg2>2.9,<2.10"], } setup( author="Avaiga", author_email="dev@taipy.io", python_requires=">=3.8", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], description="A Python library to build powerful and customized data-driven back-end applications.", install_requires=requirements, long_description=readme, long_description_content_type="text/markdown", license="Apache License 2.0", keywords="taipy-core", name="taipy-core", package_dir={"": "src"}, packages=find_namespace_packages(where="src") + find_packages(include=["taipy", "taipy.core", "taipy.core.*"]), include_package_data=True, test_suite="tests", tests_require=test_requirements, url="https://github.com/avaiga/taipy-core", version=version_string, zip_safe=False, extras_require=extras_require, )
|
from typing import Dict from taipy._cli._base_cli import _CLI from .config import CoreSection class _CoreCLI: """Command-line interface for Taipy Core application.""" __MODE_ARGS: Dict[str, Dict] = { "--development": { "action": "store_true", "dest": "taipy_development", "help": """ When execute Taipy application in `development` mode, all entities from the previous development version will be deleted before running new Taipy application. """, }, "--experiment": { "dest": "taipy_experiment", "nargs": "?", "const": "", "metavar": "VERSION", "help": """ When execute Taipy application in `experiment` mode, the current Taipy application is saved to a new version. If version name already exists, check for compatibility with current Python Config and run the application. Without being specified, the version number will be a random string. """, }, "--production": { "dest": "taipy_production", "nargs": "?", "const": "", "metavar": "VERSION", "help": """ When execute in `production` mode, the current version is used in production. All production versions should have the same configuration and share all entities. Without being specified, the latest version is used. """, }, } __FORCE_ARGS: Dict[str, Dict] = { "--force": { "dest": "taipy_force", "action": "store_true", "help": """ Force override the configuration of the version if existed and run the application. Default to False. """, }, "--no-force": { "dest": "no_taipy_force", "action": "store_true", "help": "Stop the application if any Config conflict exists.", }, } @classmethod def create_parser(cls): core_parser = _CLI._add_groupparser("Taipy Core", "Optional arguments for Taipy Core service") mode_group = core_parser.add_mutually_exclusive_group() for mode_arg, mode_arg_dict in cls.__MODE_ARGS.items(): mode_group.add_argument(mode_arg, cls.__add_taipy_prefix(mode_arg), **mode_arg_dict) force_group = core_parser.add_mutually_exclusive_group() for force_arg, force_arg_dict in cls.__FORCE_ARGS.items(): force_group.add_argument(cls.__add_taipy_prefix(force_arg), **force_arg_dict) @classmethod def create_run_parser(cls): run_parser = _CLI._add_subparser("run", help="Run a Taipy application.") mode_group = run_parser.add_mutually_exclusive_group() for mode_arg, mode_arg_dict in cls.__MODE_ARGS.items(): mode_group.add_argument(mode_arg, **mode_arg_dict) force_group = run_parser.add_mutually_exclusive_group() for force_arg, force_arg_dict in cls.__FORCE_ARGS.items(): force_group.add_argument(force_arg, **force_arg_dict) @classmethod def parse_arguments(cls): args = _CLI._parse() as_dict = {} if args.taipy_development: as_dict[CoreSection._MODE_KEY] = CoreSection._DEVELOPMENT_MODE elif args.taipy_experiment is not None: as_dict[CoreSection._MODE_KEY] = CoreSection._EXPERIMENT_MODE as_dict[CoreSection._VERSION_NUMBER_KEY] = args.taipy_experiment elif args.taipy_production is not None: as_dict[CoreSection._MODE_KEY] = CoreSection._PRODUCTION_MODE as_dict[CoreSection._VERSION_NUMBER_KEY] = args.taipy_production if args.taipy_force: as_dict[CoreSection._FORCE_KEY] = True elif args.no_taipy_force: as_dict[CoreSection._FORCE_KEY] = False return as_dict @classmethod def __add_taipy_prefix(cls, key: str): if key.startswith("--no-"): return key[:5] + "taipy-" + key[5:] return key[:2] + "taipy-" + key[2:]
|
import json import re from datetime import datetime, timedelta class _Decoder(json.JSONDecoder): def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) def _str_to_timedelta(self, timedelta_str: str) -> timedelta: """ Parse a time string e.g. (2h13m) into a timedelta object. :param timedelta_str: A string identifying a duration. (eg. 2h13m) :return datetime.timedelta: A datetime.timedelta object """ regex = re.compile( r"^((?P<days>[\.\d]+?)d)? *" r"((?P<hours>[\.\d]+?)h)? *" r"((?P<minutes>[\.\d]+?)m)? *" r"((?P<seconds>[\.\d]+?)s)?$" ) parts = regex.match(timedelta_str) if not parts: raise TypeError("Can not deserialize string into timedelta") time_params = {name: float(param) for name, param in parts.groupdict().items() if param} # mypy has an issue with dynamic keyword parameters, hence the type ignore on the line bellow. return timedelta(**time_params) # type: ignore def object_hook(self, source): if source.get("__type__") == "Datetime": return datetime.fromisoformat(source.get("__value__")) if source.get("__type__") == "Timedelta": return self._str_to_timedelta(source.get("__value__")) else: return source def loads(d): return json.loads(d, cls=_Decoder)
|
import pathlib from abc import abstractmethod from typing import Any, Dict, Generic, Iterable, List, Optional, TypeVar, Union ModelType = TypeVar("ModelType") Entity = TypeVar("Entity") class _AbstractRepository(Generic[ModelType, Entity]): @abstractmethod def _save(self, entity: Entity): """ Save an entity in the repository. Parameters: entity: The data from an object. """ raise NotImplementedError @abstractmethod def _exists(self, entity_id: str) -> bool: """ Check if an entity with id entity_id exists in the repository. Parameters: entity_id: The entity id, i.e., its primary key. Returns: True if the entity id exists. """ raise NotImplementedError @abstractmethod def _load(self, entity_id: str) -> Entity: """ Retrieve the entity data from the repository. Parameters: entity_id: The entity id, i.e., its primary key. Returns: An entity. """ raise NotImplementedError @abstractmethod def _load_all(self, filters: Optional[List[Dict]] = None) -> List[Entity]: """ Retrieve all the entities' data from the repository taking any passed filter into account. Returns: A list of entities. """ raise NotImplementedError @abstractmethod def _delete(self, entity_id: str): """ Delete an entity in the repository. Parameters: entity_id: The id of the entity to be deleted. """ raise NotImplementedError @abstractmethod def _delete_all(self): """ Delete all entities from the repository. """ raise NotImplementedError @abstractmethod def _delete_many(self, ids: Iterable[str]): """ Delete all entities from the list of ids from the repository. Parameters: ids: List of ids to be deleted. """ raise NotImplementedError @abstractmethod def _delete_by(self, attribute: str, value: str): """ Delete all entities from the list of ids from the repository. Parameters: attribute: The entity property that is the key to the search. value: The value of the attribute that are being searched. """ raise NotImplementedError @abstractmethod def _search(self, attribute: str, value: Any, filters: Optional[List[Dict]] = None) -> List[Entity]: """ Parameters: attribute: The entity property that is the key to the search. value: The value of the attribute that are being searched. Returns: A list of entities that match the search criteria. """ raise NotImplementedError @abstractmethod def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]): """ Export an entity from the repository. Parameters: entity_id (str): The id of the entity to be exported. folder_path (Union[str, pathlib.Path]): The folder path to export the entity to. """ raise NotImplementedError
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
import json from datetime import datetime, timedelta from enum import Enum from typing import Any class _Encoder(json.JSONEncoder): def _timedelta_to_str(self, obj: timedelta) -> str: total_seconds = obj.total_seconds() return ( f"{int(total_seconds // 86400)}d" f"{int(total_seconds % 86400 // 3600)}h" f"{int(total_seconds % 3600 // 60)}m" f"{int(total_seconds % 60)}s" ) def default(self, o: Any): if isinstance(o, Enum): result = o.value elif isinstance(o, datetime): result = {"__type__": "Datetime", "__value__": o.isoformat()} elif isinstance(o, timedelta): result = {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)} else: result = json.JSONEncoder.default(self, o) return result def dumps(d): return json.dumps(d, cls=_Encoder)
|
from abc import ABC, abstractmethod class _AbstractConverter(ABC): @classmethod @abstractmethod def _entity_to_model(cls, entity): raise NotImplementedError @classmethod @abstractmethod def _model_to_entity(cls, model): raise NotImplementedError
|
import dataclasses import enum import json from typing import Any, Dict from sqlalchemy import Table from ._decoder import _Decoder from ._encoder import _Encoder class _BaseModel: __table__: Table def __iter__(self): for attr, value in self.__dict__.items(): yield attr, value def to_dict(self) -> Dict[str, Any]: model_dict = {**dataclasses.asdict(self)} for k, v in model_dict.items(): if isinstance(v, enum.Enum): model_dict[k] = repr(v) return model_dict @staticmethod def _serialize_attribute(value): return json.dumps(value, ensure_ascii=False, cls=_Encoder) @staticmethod def _deserialize_attribute(value): if isinstance(value, str): return json.loads(value.replace("'", '"'), cls=_Decoder) return value @staticmethod def from_dict(data: Dict[str, Any]): pass def to_list(self): pass
|
import sqlite3 from functools import lru_cache from sqlite3 import Connection from sqlalchemy.dialects import sqlite from sqlalchemy.schema import CreateTable from taipy.config.config import Config from ...exceptions import MissingRequiredProperty def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d class _SQLConnection: _connection = None @classmethod def init_db(cls): if cls._connection: return cls._connection cls._connection = _build_connection() cls._connection.row_factory = dict_factory from ..._version._version_model import _VersionModel from ...cycle._cycle_model import _CycleModel from ...data._data_model import _DataNodeModel from ...job._job_model import _JobModel from ...scenario._scenario_model import _ScenarioModel from ...submission._submission_model import _SubmissionModel from ...task._task_model import _TaskModel cls._connection.execute( str(CreateTable(_CycleModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_DataNodeModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_JobModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_ScenarioModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_TaskModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_VersionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_SubmissionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) return cls._connection def _build_connection() -> Connection: # Set SQLite threading mode to Serialized, means that threads may share the module, connections and cursors sqlite3.threadsafety = 3 properties = Config.core.repository_properties try: db_location = properties["db_location"] except KeyError: raise MissingRequiredProperty("Missing property db_location.") return __build_connection(db_location) @lru_cache def __build_connection(db_location: str): return sqlite3.connect(db_location, check_same_thread=False)
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
from sqlalchemy.orm import declarative_base, registry _SQLBaseModel = declarative_base() mapper_registry = registry()
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
import os from taipy.config import Config __BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME = "TAIPY_BACKUP_FILE_PATH" def _init_backup_file_with_storage_folder(): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME): with open(preserve_file_path, "a") as f: f.write(f"{Config.core.storage_folder}\n") def _append_to_backup_file(new_file_path: str): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME): storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep if not os.path.abspath(new_file_path).startswith(storage_folder): with open(preserve_file_path, "a") as f: f.write(f"{new_file_path}\n") def _remove_from_backup_file(to_remove_file_path: str): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME, None): storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep if not os.path.abspath(to_remove_file_path).startswith(storage_folder): try: with open(preserve_file_path, "r+") as f: old_backup = f.read() to_remove_file_path = to_remove_file_path + "\n" # To avoid removing the file path of different data nodes that are pointing # to the same file. We will only replace the file path only once. if old_backup.startswith(to_remove_file_path): new_backup = old_backup.replace(to_remove_file_path, "", 1) else: new_backup = old_backup.replace("\n" + to_remove_file_path, "\n", 1) if new_backup is not old_backup: f.seek(0) f.write(new_backup) f.truncate() except Exception: pass def _replace_in_backup_file(old_file_path: str, new_file_path: str): _remove_from_backup_file(old_file_path) _append_to_backup_file(new_file_path)
|
from taipy.config import _inject_section from taipy.config.checker._checker import _Checker from taipy.config.common.frequency import Frequency # type: ignore from taipy.config.common.scope import Scope # type: ignore from taipy.config.config import Config # type: ignore from taipy.config.global_app.global_app_config import GlobalAppConfig # type: ignore from .checkers._config_id_checker import _ConfigIdChecker from .checkers._core_section_checker import _CoreSectionChecker from .checkers._data_node_config_checker import _DataNodeConfigChecker from .checkers._job_config_checker import _JobConfigChecker from .checkers._scenario_config_checker import _ScenarioConfigChecker from .checkers._task_config_checker import _TaskConfigChecker from .core_section import CoreSection from .data_node_config import DataNodeConfig from .job_config import JobConfig from .migration_config import MigrationConfig from .scenario_config import ScenarioConfig from .task_config import TaskConfig _inject_section( JobConfig, "job_config", JobConfig.default_config(), [("configure_job_executions", JobConfig._configure)], add_to_unconflicted_sections=True, ) _inject_section( DataNodeConfig, "data_nodes", DataNodeConfig.default_config(), [ ("configure_data_node", DataNodeConfig._configure), ("configure_data_node_from", DataNodeConfig._configure_from), ("set_default_data_node_configuration", DataNodeConfig._set_default_configuration), ("configure_csv_data_node", DataNodeConfig._configure_csv), ("configure_json_data_node", DataNodeConfig._configure_json), ("configure_parquet_data_node", DataNodeConfig._configure_parquet), ("configure_sql_table_data_node", DataNodeConfig._configure_sql_table), ("configure_sql_data_node", DataNodeConfig._configure_sql), ("configure_mongo_collection_data_node", DataNodeConfig._configure_mongo_collection), ("configure_in_memory_data_node", DataNodeConfig._configure_in_memory), ("configure_pickle_data_node", DataNodeConfig._configure_pickle), ("configure_excel_data_node", DataNodeConfig._configure_excel), ("configure_generic_data_node", DataNodeConfig._configure_generic), ], ) _inject_section( TaskConfig, "tasks", TaskConfig.default_config(), [ ("configure_task", TaskConfig._configure), ("set_default_task_configuration", TaskConfig._set_default_configuration), ], ) _inject_section( ScenarioConfig, "scenarios", ScenarioConfig.default_config(), [ ("configure_scenario", ScenarioConfig._configure), ("set_default_scenario_configuration", ScenarioConfig._set_default_configuration), ], ) _inject_section( MigrationConfig, "migration_functions", MigrationConfig.default_config(), [("add_migration_function", MigrationConfig._add_migration_function)], add_to_unconflicted_sections=True, ) _inject_section( CoreSection, "core", CoreSection.default_config(), [("configure_core", CoreSection._configure)], add_to_unconflicted_sections=True, ) _Checker.add_checker(_ConfigIdChecker) _Checker.add_checker(_CoreSectionChecker) _Checker.add_checker(_DataNodeConfigChecker) _Checker.add_checker(_JobConfigChecker) # We don't need to add _MigrationConfigChecker because it is run only when the Core service is run. _Checker.add_checker(_TaskConfigChecker) _Checker.add_checker(_ScenarioConfigChecker)
|
import collections.abc from copy import deepcopy from typing import Any, Callable, Dict, Optional, Union from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.config import Config from taipy.config.section import Section from taipy.config.unique_section import UniqueSection class MigrationConfig(UniqueSection): """ Configuration fields needed to register migration functions from an old version to newer one. Attributes: migration_fcts (Dict[str, Dict[str, Callable]]): A dictionary that maps the version that entities are migrated from to the migration functions. **properties (dict[str, Any]): A dictionary of additional properties. """ name = "VERSION_MIGRATION" _MIGRATION_FCTS_KEY = "migration_fcts" def __init__( self, migration_fcts: Dict[str, Dict[str, Callable]], **properties, ): self.migration_fcts = migration_fcts super().__init__(**properties) def __copy__(self): return MigrationConfig( deepcopy(self.migration_fcts), **deepcopy(self._properties), ) def _clean(self): self.migration_fcts.clear() self._properties.clear() def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) # type: ignore @classmethod def default_config(cls): return MigrationConfig({}) def _to_dict(self): return { self._MIGRATION_FCTS_KEY: self.migration_fcts, **self._properties, } @classmethod def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config]): return MigrationConfig(**as_dict) def _update(self, as_dict, default_section=None): def deep_update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = deep_update(d.get(k, {}), v) else: d[k] = v return d migration_fcts = as_dict.pop(self._MIGRATION_FCTS_KEY) deep_update(self.migration_fcts, migration_fcts) self._properties.update(as_dict) @staticmethod def _add_migration_function( target_version: str, config: Union[Section, str], migration_fct: Callable, **properties, ): """Add a migration function for a Configuration to migrate entities to the target version. Parameters: target_version (str): The production version that entities are migrated to. config (Union[Section, str]): The configuration or the `id` of the config that needs to migrate. migration_fct (Callable): Migration function that takes an entity as input and returns a new entity that is compatible with the target production version. **properties (Dict[str, Any]): A keyworded variable length list of additional arguments. Returns: `MigrationConfig^`: The Migration configuration. """ config_id = config if isinstance(config, str) else config.id migration_fcts = {target_version: {config_id: migration_fct}} section = MigrationConfig( migration_fcts, **properties, ) Config._register(section) return Config.unique_sections[MigrationConfig.name]
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from copy import copy from typing import Any, Dict, Optional, Union from taipy.config import Config from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.unique_section import UniqueSection from ..exceptions.exceptions import ModeNotAvailable class JobConfig(UniqueSection): """ Configuration fields related to the jobs' executions. Parameters: mode (str): The Taipy operating mode. By default, the "development" mode is set for testing and debugging the executions of jobs. A "standalone" mode is also available. **properties (dict[str, any]): A dictionary of additional properties. """ name = "JOB" _MODE_KEY = "mode" _STANDALONE_MODE = "standalone" _DEVELOPMENT_MODE = "development" _DEFAULT_MODE = _DEVELOPMENT_MODE _MODES = [_STANDALONE_MODE, _DEVELOPMENT_MODE] def __init__(self, mode: Optional[str] = None, **properties): self.mode = mode or self._DEFAULT_MODE self._config = self._create_config(self.mode, **properties) super().__init__(**properties) def __copy__(self): return JobConfig(self.mode, **copy(self._properties)) def __getattr__(self, key: str) -> Optional[Any]: return self._config.get(key, None) @classmethod def default_config(cls): return JobConfig(cls._DEFAULT_MODE) def _clean(self): self.mode = self._DEFAULT_MODE self._config = self._create_config(self.mode) def _to_dict(self): as_dict = {} if self.mode is not None: as_dict[self._MODE_KEY] = self.mode as_dict.update(self._config) return as_dict @classmethod def _from_dict(cls, config_as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None): mode = config_as_dict.pop(cls._MODE_KEY, None) job_config = JobConfig(mode, **config_as_dict) return job_config def _update(self, as_dict: Dict[str, Any], default_section=None): mode = _tpl._replace_templates(as_dict.pop(self._MODE_KEY, self.mode)) if self.mode != mode: self.mode = mode self._config = self._create_config(self.mode, **as_dict) if self._config is not None: self._update_config(as_dict) @staticmethod def _configure( mode: Optional[str] = None, max_nb_of_workers: Optional[Union[int, str]] = None, **properties ) -> "JobConfig": """Configure job execution. Parameters: mode (Optional[str]): The job execution mode. Possible values are: *"standalone"* (the default value) or *"development"*. max_nb_of_workers (Optional[int, str]): Parameter used only in default *"standalone"* mode. This indicates the maximum number of jobs able to run in parallel.<br/> The default value is 1.<br/> A string can be provided to dynamically set the value using an environment variable. The string must follow the pattern: `ENV[<env_var>]` where `<env_var>` is the name of an environment variable. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new job execution configuration. """ section = JobConfig(mode, max_nb_of_workers=max_nb_of_workers, **properties) Config._register(section) return Config.unique_sections[JobConfig.name] def _update_config(self, config_as_dict: Dict[str, Any]): for k, v in config_as_dict.items(): type_to_convert = type(self.get_default_config(self.mode).get(k, None)) or str value = _tpl._replace_templates(v, type_to_convert) if value is not None: self._config[k] = value @property def is_standalone(self) -> bool: """True if the config is set to standalone mode""" return self.mode == self._STANDALONE_MODE @property def is_development(self) -> bool: """True if the config is set to development mode""" return self.mode == self._DEVELOPMENT_MODE @classmethod def get_default_config(cls, mode: str) -> Dict[str, Any]: if cls.is_standalone: # type: ignore return {"max_nb_of_workers": 1} if cls.is_development: return {} raise ModeNotAvailable(mode) @classmethod def _create_config(cls, mode, **properties): return {**cls.get_default_config(mode), **properties}
|
from typing import Set from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..core_section import CoreSection class _CoreSectionChecker(_ConfigChecker): _ACCEPTED_REPOSITORY_TYPES: Set[str] = {"filesystem", "sql"} def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if core_section := self._config._unique_sections.get(CoreSection.name): self._check_repository_type(core_section) return self._collector def _check_repository_type(self, core_section: CoreSection): value = core_section.repository_type if value not in self._ACCEPTED_REPOSITORY_TYPES: self._warning( core_section._REPOSITORY_TYPE_KEY, value, f'Value "{value}" for field {core_section._REPOSITORY_TYPE_KEY} of the CoreSection is not supported. ' f'Default value "filesystem" is applied.', )
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..._version._version_manager_factory import _VersionManagerFactory from ..migration_config import MigrationConfig class _MigrationConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if migration_config := self._config._unique_sections.get(MigrationConfig.name): self._check_if_entity_property_key_used_is_predefined(migration_config) migration_fcts = migration_config.migration_fcts for target_version, migration_functions in migration_config.migration_fcts.items(): for config_id, migration_function in migration_functions.items(): self._check_callable(target_version, config_id, migration_function) self._check_valid_production_version(migration_fcts) self._check_migration_from_productions_to_productions_exist(migration_fcts) return self._collector def _check_callable(self, target_version, config_id, migration_function): if not callable(migration_function): self._error( MigrationConfig._MIGRATION_FCTS_KEY, migration_function, f"The migration function of config `{config_id}` from version {target_version}" f" must be populated with Callable value.", ) def _check_valid_production_version(self, migration_fcts): for target_version in migration_fcts.keys(): if target_version not in _VersionManagerFactory._build_manager()._get_production_versions(): self._error( MigrationConfig._MIGRATION_FCTS_KEY, target_version, "The target version for a migration function must be a production version.", ) def _check_migration_from_productions_to_productions_exist(self, migration_fcts): production_versions = _VersionManagerFactory._build_manager()._get_production_versions() for source_version, target_version in zip(production_versions[:-1], production_versions[1:]): if not migration_fcts.get(target_version): self._info( "target_version", None, f'There is no migration function from production version "{source_version}"' f' to version "{target_version}".', )
|
from typing import Dict from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..data_node_config import DataNodeConfig from ..job_config import JobConfig class _JobConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if job_config := self._config._unique_sections.get(JobConfig.name): data_node_configs = self._config._sections[DataNodeConfig.name] self._check_multiprocess_mode(job_config, data_node_configs) return self._collector def _check_multiprocess_mode(self, job_config: JobConfig, data_node_configs: Dict[str, DataNodeConfig]): if job_config.is_standalone: for cfg_id, data_node_config in data_node_configs.items(): if data_node_config.storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_IN_MEMORY: self._error( DataNodeConfig._STORAGE_TYPE_KEY, data_node_config.storage_type, f"DataNode `{cfg_id}`: In-memory storage type can ONLY be used in " f"{JobConfig._DEVELOPMENT_MODE} mode.", )
|
from typing import Dict, List from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector class _ConfigIdChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: existing_config_ids: Dict[str, List[str]] = dict() for entity_type, section_dictionary in self._config._sections.items(): for config_id in section_dictionary.keys(): if config_id in existing_config_ids.keys(): existing_config_ids[config_id].append(entity_type) else: existing_config_ids[config_id] = [entity_type] for config_id, entity_types in existing_config_ids.items(): if config_id != "default" and len(entity_types) > 1: self._error( "config_id", config_id, f"`{config_id}` is used as the config_id of multiple configurations {str(entity_types)}", )
|
from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..data_node_config import DataNodeConfig from ..task_config import TaskConfig class _TaskConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: task_configs = self._config._sections[TaskConfig.name] for task_config_id, task_config in task_configs.items(): if task_config_id != _Config.DEFAULT_KEY: self._check_existing_config_id(task_config) self._check_if_entity_property_key_used_is_predefined(task_config) self._check_existing_function(task_config_id, task_config) self._check_inputs(task_config_id, task_config) self._check_outputs(task_config_id, task_config) return self._collector def _check_inputs(self, task_config_id: str, task_config: TaskConfig): self._check_children( TaskConfig, task_config_id, task_config._INPUT_KEY, task_config.input_configs, DataNodeConfig ) def _check_outputs(self, task_config_id: str, task_config: TaskConfig): self._check_children( TaskConfig, task_config_id, task_config._OUTPUT_KEY, task_config.output_configs, DataNodeConfig ) def _check_existing_function(self, task_config_id: str, task_config: TaskConfig): if not task_config.function: self._error( task_config._FUNCTION, task_config.function, f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` is empty.", ) else: if not callable(task_config.function): self._error( task_config._FUNCTION, task_config.function, f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be" f" populated with Callable value.", )
|
from dataclasses import dataclass, field from datetime import datetime from functools import singledispatch from typing import Any, Optional from ..common._repr_enum import _ReprEnum from ..exceptions.exceptions import InvalidEventAttributeName, InvalidEventOperation class EventOperation(_ReprEnum): """Enum representing a type of operation performed on a Core entity. `EventOperation` is used as an attribute of the `Event^` object to describe the operation performed on an entity.<br> The possible operations are `CREATION`, `UPDATE`, `DELETION`, or `SUBMISSION`. """ CREATION = 1 UPDATE = 2 DELETION = 3 SUBMISSION = 4 class EventEntityType(_ReprEnum): """Enum representing an entity type. `EventEntityType` is used as an attribute of the `Event^` object to describe an entity that was changed.<br> The possible operations are `CYCLE`, `SCENARIO`, `SEQUENCE`, `TASK`, `DATA_NODE`, `JOB` or `SUBMISSION`. """ CYCLE = 1 SCENARIO = 2 SEQUENCE = 3 TASK = 4 DATA_NODE = 5 JOB = 6 SUBMISSION = 7 _NO_ATTRIBUTE_NAME_OPERATIONS = set([EventOperation.CREATION, EventOperation.DELETION, EventOperation.SUBMISSION]) _UNSUBMITTABLE_ENTITY_TYPES = (EventEntityType.CYCLE, EventEntityType.DATA_NODE, EventEntityType.JOB) _ENTITY_TO_EVENT_ENTITY_TYPE = { "scenario": EventEntityType.SCENARIO, "sequence": EventEntityType.SEQUENCE, "task": EventEntityType.TASK, "data": EventEntityType.DATA_NODE, "job": EventEntityType.JOB, "cycle": EventEntityType.CYCLE, "submission": EventEntityType.SUBMISSION, } @dataclass(frozen=True) class Event: """Event object used to notify any change in the Core service. An event holds the necessary attributes to identify the change. Attributes: entity_type (EventEntityType^): Type of the entity that was changed (`DataNode^`, `Scenario^`, `Cycle^`, etc. ). entity_id (Optional[str]): Unique identifier of the entity that was changed. operation (EventOperation^): Enum describing the operation (among `CREATION`, `UPDATE`, `DELETION`, and `SUBMISSION`) that was performed on the entity. attribute_name (Optional[str]): Name of the entity's attribute changed. Only relevant for `UPDATE` operations attribute_value (Optional[str]): Name of the entity's attribute changed. Only relevant for `UPDATE` operations metadata (dict): A dict of additional medata about the source of this event creation_date (datetime): Date and time of the event creation. """ entity_type: EventEntityType operation: EventOperation entity_id: Optional[str] = None attribute_name: Optional[str] = None attribute_value: Optional[Any] = None metadata: dict = field(default_factory=dict) creation_date: datetime = field(init=False) def __post_init__(self): # Creation date super().__setattr__("creation_date", datetime.now()) # Check operation: if self.entity_type in _UNSUBMITTABLE_ENTITY_TYPES and self.operation == EventOperation.SUBMISSION: raise InvalidEventOperation # Check attribute name: if self.operation in _NO_ATTRIBUTE_NAME_OPERATIONS and self.attribute_name is not None: raise InvalidEventAttributeName @singledispatch def _make_event( entity: Any, operation: EventOperation, /, attribute_name: Optional[str] = None, attribute_value: Optional[Any] = None, **kwargs, ) -> Event: """Helper function to make an event for this entity with the given `EventOperation^` type. In case of `EventOperation.UPDATE^` events, an attribute name and value must be given. Parameters: entity (Any): The entity object to generate an event for. operation (EventOperation^): The operation of the event. The possible values are: <ul> <li>CREATION</li> <li>UPDATE</li> <li>DELETION</li> <li>SUBMISSION</li> </ul> attribute_name (Optional[str]): The name of the updated attribute for a `EventOperation.UPDATE`. This argument is always given in case of an UPDATE. attribute_value (Optional[Any]): The value of the udated attribute for a `EventOperation.UPDATE`. This argument is always given in case of an UPDATE. **kwargs (dict[str, any]): Any extra information that would be passed to the metadata event. Note: you should pass only simple types: str, float, double as values.""" raise Exception(f"Unexpected entity type: {type(entity)}")
|
""" Package for notifications about changes on `Core^` service entities. The Core service generates `Event^` objects to track changes on entities. These events are then relayed to a `Notifier^`, which handles the dispatch to consumers interested in specific event topics. To subscribe, a consumer needs to invoke the `Notifier.register()^` method. This call will yield a `RegistrationId^` and a dedicated event queue for receiving notifications. To handle notifications, an event consumer (e.g., the `CoreEventConsumerBase^` object) must be instantiated with an associated event queue. """ from ._registration import _Registration from ._topic import _Topic from .core_event_consumer import CoreEventConsumerBase from .event import _ENTITY_TO_EVENT_ENTITY_TYPE, Event, EventEntityType, EventOperation, _make_event from .notifier import Notifier, _publish_event from .registration_id import RegistrationId
|
from typing import NewType RegistrationId = NewType("RegistrationId", str) RegistrationId.__doc__ = """Registration identifier. It can be used to instantiate a `CoreEventConsumerBase^`."""
|
from queue import SimpleQueue from typing import Optional from uuid import uuid4 from ._topic import _Topic from .event import EventEntityType, EventOperation from .registration_id import RegistrationId class _Registration: _ID_PREFIX = "REGISTRATION" __SEPARATOR = "_" def __init__( self, entity_type: Optional[EventEntityType] = None, entity_id: Optional[str] = None, operation: Optional[EventOperation] = None, attribute_name: Optional[str] = None, ): self.registration_id: str = self._new_id() self.topic: _Topic = _Topic(entity_type, entity_id, operation, attribute_name) self.queue: SimpleQueue = SimpleQueue() @staticmethod def _new_id() -> RegistrationId: """Generate a unique registration identifier.""" return RegistrationId(_Registration.__SEPARATOR.join([_Registration._ID_PREFIX, str(uuid4())])) def __hash__(self) -> int: return hash(self.registration_id)
|
from typing import Optional from ..exceptions.exceptions import InvalidEventOperation from .event import _UNSUBMITTABLE_ENTITY_TYPES, EventEntityType, EventOperation class _Topic: def __init__( self, entity_type: Optional[EventEntityType] = None, entity_id: Optional[str] = None, operation: Optional[EventOperation] = None, attribute_name: Optional[str] = None, ): self.entity_type = entity_type self.entity_id = entity_id self.operation = self.__preprocess_operation(operation, self.entity_type) self.attribute_name = self.__preprocess_attribute_name(attribute_name, self.operation) @classmethod def __preprocess_attribute_name( cls, attribute_name: Optional[str] = None, operation: Optional[EventOperation] = None ) -> Optional[str]: # if operation in _NO_ATTRIBUTE_NAME_OPERATIONS and attribute_name is not None: # raise InvalidEventAttributeName return attribute_name @classmethod def __preprocess_operation( cls, operation: Optional[EventOperation] = None, entity_type: Optional[EventEntityType] = None ) -> Optional[EventOperation]: if ( entity_type and operation and entity_type in _UNSUBMITTABLE_ENTITY_TYPES and operation == EventOperation.SUBMISSION ): raise InvalidEventOperation return operation def __hash__(self): return hash((self.entity_type, self.entity_id, self.operation, self.attribute_name)) def __eq__(self, __value) -> bool: if ( self.entity_type == __value.entity_type and self.entity_id == __value.entity_id and self.operation == __value.operation and self.attribute_name == __value.attribute_name ): return True return False
|
import abc import threading from queue import Empty, SimpleQueue from .event import Event class CoreEventConsumerBase(threading.Thread): """Abstract base class for implementing a Core event consumer. This class provides a framework for consuming events from a queue in a separate thread. It should be subclassed, and the `process_event` method should be implemented to define the custom logic for handling incoming events. Example usage: ```python class MyEventConsumer(CoreEventConsumerBase): def process_event(self, event: Event): # Custom event processing logic here print(f"Received event created at : {event.creation_date}") pass consumer = MyEventConsumer("consumer_1", event_queue) consumer.start() # ... consumer.stop() ``` Subclasses should implement the `process_event` method to define their specific event handling behavior. Attributes: queue (SimpleQueue): The queue from which events will be consumed. """ def __init__(self, registration_id: str, queue: SimpleQueue): """Initialize a CoreEventConsumerBase instance. Parameters: registration_id (str): A unique identifier of the registration. You can get a registration id invoking `Notifier.register()^` method. queue (SimpleQueue): The queue from which events will be consumed. You can get a queue invoking `Notifier.register()^` method. """ threading.Thread.__init__(self, name=f"Thread-Taipy-Core-Consumer-{registration_id}") self.daemon = True self.queue = queue self.__STOP_FLAG = False self._TIMEOUT = 0.1 def start(self): """Start the event consumer thread.""" self.__STOP_FLAG = False threading.Thread.start(self) def stop(self): """Stop the event consumer thread.""" self.__STOP_FLAG = True def run(self): while not self.__STOP_FLAG: try: event: Event = self.queue.get(block=True, timeout=self._TIMEOUT) self.process_event(event) except Empty: pass @abc.abstractmethod def process_event(self, event: Event): """This method should be overridden in subclasses to define how events are processed.""" raise NotImplementedError
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._cycle_converter import _CycleConverter from ._cycle_model import _CycleModel class _CycleFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_CycleModel, converter=_CycleConverter, dir_name="cycles")
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._cycle_converter import _CycleConverter from ._cycle_model import _CycleModel class _CycleSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_CycleModel, converter=_CycleConverter)
|
from typing import NewType CycleId = NewType("CycleId", str) CycleId.__doc__ = """Type that holds a `Cycle^` identifier."""
|
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import JSON, Column, Enum, String, Table from taipy.config.common.frequency import Frequency from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .cycle_id import CycleId @mapper_registry.mapped @dataclass class _CycleModel(_BaseModel): __table__ = Table( "cycle", mapper_registry.metadata, Column("id", String, primary_key=True), Column("name", String), Column("frequency", Enum(Frequency)), Column("properties", JSON), Column("creation_date", String), Column("start_date", String), Column("end_date", String), ) id: CycleId name: str frequency: Frequency properties: Dict[str, Any] creation_date: str start_date: str end_date: str @staticmethod def from_dict(data: Dict[str, Any]): return _CycleModel( id=data["id"], name=data["name"], frequency=Frequency._from_repr(data["frequency"]), properties=_BaseModel._deserialize_attribute(data["properties"]), creation_date=data["creation_date"], start_date=data["start_date"], end_date=data["end_date"], ) def to_list(self): return [ self.id, self.name, repr(self.frequency), _BaseModel._serialize_attribute(self.properties), self.creation_date, self.start_date, self.end_date, ]
|
from datetime import datetime from .._repository._abstract_converter import _AbstractConverter from ..cycle._cycle_model import _CycleModel from ..cycle.cycle import Cycle class _CycleConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, cycle: Cycle) -> _CycleModel: return _CycleModel( id=cycle.id, name=cycle._name, frequency=cycle._frequency, creation_date=cycle._creation_date.isoformat(), start_date=cycle._start_date.isoformat(), end_date=cycle._end_date.isoformat(), properties=cycle._properties.data, ) @classmethod def _model_to_entity(cls, model: _CycleModel) -> Cycle: return Cycle( id=model.id, name=model.name, frequency=model.frequency, properties=model.properties, creation_date=datetime.fromisoformat(model.creation_date), start_date=datetime.fromisoformat(model.start_date), end_date=datetime.fromisoformat(model.end_date), )
|
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ..cycle._cycle_manager import _CycleManager from ._cycle_fs_repository import _CycleFSRepository from ._cycle_sql_repository import _CycleSQLRepository class _CycleManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _CycleFSRepository, "sql": _CycleSQLRepository} @classmethod def _build_manager(cls) -> Type[_CycleManager]: # type: ignore if cls._using_enterprise(): cycle_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager_factory", "_CycleManagerFactory" )._build_repository # type: ignore else: cycle_manager = _CycleManager build_repository = cls._build_repository cycle_manager._repository = build_repository() # type: ignore return cycle_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
|
from abc import abstractmethod from importlib import util from typing import Type from taipy.config import Config from ._manager import _Manager class _ManagerFactory: _TAIPY_ENTERPRISE_MODULE = "taipy.enterprise" _TAIPY_ENTERPRISE_CORE_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core" @classmethod @abstractmethod def _build_manager(cls) -> Type[_Manager]: # type: ignore raise NotImplementedError @classmethod def _build_repository(cls): raise NotImplementedError @classmethod def _using_enterprise(cls) -> bool: return util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None @staticmethod def _get_repository_with_repo_map(repository_map: dict): return repository_map.get(Config.core.repository_type, repository_map.get("default"))
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
import pathlib from importlib import metadata from typing import Dict, Generic, Iterable, List, Optional, TypeVar, Union from taipy.logger._taipy_logger import _TaipyLogger from .._entity._entity_ids import _EntityIds from .._repository._abstract_repository import _AbstractRepository from ..exceptions.exceptions import ModelNotFound from ..notification import Event, EventOperation, Notifier EntityType = TypeVar("EntityType") class _Manager(Generic[EntityType]): _repository: _AbstractRepository _logger = _TaipyLogger._get_logger() _ENTITY_NAME: str = "Entity" @classmethod def _delete_all(cls): """ Deletes all entities. """ cls._repository._delete_all() if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, metadata={"delete_all": True}, ) ) @classmethod def _delete_many(cls, ids: Iterable): """ Deletes entities by a list of ids. """ cls._repository._delete_many(ids) if hasattr(cls, "_EVENT_ENTITY_TYPE"): for entity_id in ids: Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, entity_id=entity_id, metadata={"delete_all": True}, ) ) @classmethod def _delete_by_version(cls, version_number: str): """ Deletes entities by version number. """ cls._repository._delete_by(attribute="version", value=version_number) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, metadata={"delete_by_version": version_number}, ) ) @classmethod def _delete(cls, id): """ Deletes an entity by id. """ cls._repository._delete(id) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, entity_id=id, ) ) @classmethod def _set(cls, entity: EntityType): """ Save or update an entity. """ cls._repository._save(entity) @classmethod def _get_all(cls, version_number: Optional[str] = "all") -> List[EntityType]: """ Returns all entities. """ filters: List[Dict] = [] return cls._repository._load_all(filters) @classmethod def _get_all_by(cls, filters: Optional[List[Dict]] = None) -> List[EntityType]: """ Returns all entities based on a criteria. """ if not filters: filters = [] return cls._repository._load_all(filters) @classmethod def _get(cls, entity: Union[str, EntityType], default=None) -> EntityType: """ Returns an entity by id or reference. """ entity_id = entity if isinstance(entity, str) else entity.id # type: ignore try: return cls._repository._load(entity_id) except ModelNotFound: cls._logger.error(f"{cls._ENTITY_NAME} not found: {entity_id}") return default @classmethod def _exists(cls, entity_id: str) -> bool: """ Returns True if the entity id exists. """ return cls._repository._exists(entity_id) @classmethod def _delete_entities_of_multiple_types(cls, _entity_ids: _EntityIds): """ Deletes entities of multiple types. """ from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory _CycleManagerFactory._build_manager()._delete_many(_entity_ids.cycle_ids) _SequenceManagerFactory._build_manager()._delete_many(_entity_ids.sequence_ids) _ScenarioManagerFactory._build_manager()._delete_many(_entity_ids.scenario_ids) _TaskManagerFactory._build_manager()._delete_many(_entity_ids.task_ids) _JobManagerFactory._build_manager()._delete_many(_entity_ids.job_ids) _DataManagerFactory._build_manager()._delete_many(_entity_ids.data_node_ids) _SubmissionManagerFactory._build_manager()._delete_many(_entity_ids.submission_ids) @classmethod def _export(cls, id: str, folder_path: Union[str, pathlib.Path]): """ Export an entity. """ return cls._repository._export(id, folder_path) @classmethod def _is_editable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True @classmethod def _is_readable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True
|
from datetime import datetime from typing import Any from taipy.config import Config from taipy.config._config import _Config from .._entity._entity import _Entity class _Version(_Entity): def __init__(self, id: str, config: Any) -> None: self.id: str = id self.config: _Config = config self.creation_date: datetime = datetime.now() def __eq__(self, other): return self.id == other.id and self.__is_config_eq(other) def __is_config_eq(self, other): return Config._serializer._str(self.config) == Config._serializer._str(other.config)
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
from typing import Dict, List from .._version._version_manager_factory import _VersionManagerFactory class _VersionMixin: _version_manager = _VersionManagerFactory._build_manager() @classmethod def __fetch_version_number(cls, version_number): version_number = _VersionManagerFactory._build_manager()._replace_version_number(version_number) if not isinstance(version_number, List): version_number = [version_number] if version_number else [] return version_number @classmethod def _build_filters_with_version(cls, version_number) -> List[Dict]: filters = [] if versions := cls.__fetch_version_number(version_number): filters = [{"version": version} for version in versions] return filters @classmethod def _get_latest_version(cls): return cls._version_manager._get_latest_version()
|
from .._manager._manager_factory import _ManagerFactory from ..common import _utils from ._version_fs_repository import _VersionFSRepository from ._version_manager import _VersionManager from ._version_sql_repository import _VersionSQLRepository class _VersionManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _VersionFSRepository, "sql": _VersionSQLRepository} @classmethod def _build_manager(cls) -> _VersionManager: # type: ignore if cls._using_enterprise(): version_manager = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager", "_VersionManager" ) # type: ignore build_repository = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager_factory", "_VersionManagerFactory" )._build_repository # type: ignore else: version_manager = _VersionManager build_repository = cls._build_repository version_manager._repository = build_repository() # type: ignore return version_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
|
from abc import ABC, abstractmethod class _VersionRepositoryInterface(ABC): _LATEST_VERSION_KEY = "latest_version" _DEVELOPMENT_VERSION_KEY = "development_version" _PRODUCTION_VERSION_KEY = "production_version" @abstractmethod def _set_latest_version(self, version_number): raise NotImplementedError @abstractmethod def _get_latest_version(self): raise NotImplementedError @abstractmethod def _set_development_version(self, version_number): raise NotImplementedError @abstractmethod def _get_development_version(self): raise NotImplementedError @abstractmethod def _set_production_version(self, version_number): raise NotImplementedError @abstractmethod def _get_production_versions(self): raise NotImplementedError @abstractmethod def _delete_production_version(self, version_number): raise NotImplementedError
|
import json from typing import List from taipy.logger._taipy_logger import _TaipyLogger from .._repository._filesystem_repository import _FileSystemRepository from ..exceptions.exceptions import VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionFSRepository(_FileSystemRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter, dir_name="version") @property def _version_file_path(self): return super()._storage_folder / "version.json" def _delete_all(self): super()._delete_all() if self._version_file_path.exists(): self._version_file_path.unlink() def _set_latest_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_latest_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._LATEST_VERSION_KEY] def _set_development_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._DEVELOPMENT_VERSION_KEY] = version_number file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: version_number, self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_development_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._DEVELOPMENT_VERSION_KEY] def _set_production_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: file_content[self._PRODUCTION_VERSION_KEY].append(version_number) else: _TaipyLogger._get_logger().info(f"Version {version_number} is already a production version.") else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [version_number], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_production_versions(self) -> List[str]: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._PRODUCTION_VERSION_KEY] def _delete_production_version(self, version_number): try: with open(self._version_file_path, "r") as f: file_content = json.load(f) if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") file_content[self._PRODUCTION_VERSION_KEY].remove(version_number) self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) except FileNotFoundError: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.")
|
from sqlalchemy.dialects import sqlite from .._repository._sql_repository import _SQLRepository from ..exceptions.exceptions import ModelNotFound, VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionSQLRepository(_SQLRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter) def _set_latest_version(self, version_number): if old_latest := self.db.execute(str(self.table.select().filter_by(is_latest=True))).fetchone(): old_latest = self.model_type.from_dict(old_latest) old_latest.is_latest = False self._update_entry(old_latest) version = self.__get_by_id(version_number) version.is_latest = True self._update_entry(version) def _get_latest_version(self): if latest := self.db.execute( str(self.table.select().filter_by(is_latest=True).compile(dialect=sqlite.dialect())) ).fetchone(): return latest["id"] raise ModelNotFound(self.model_type, "") def _set_development_version(self, version_number): if old_development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): old_development = self.model_type.from_dict(old_development) old_development.is_development = False self._update_entry(old_development) version = self.__get_by_id(version_number) version.is_development = True self._update_entry(version) self._set_latest_version(version_number) def _get_development_version(self): if development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): return development["id"] raise ModelNotFound(self.model_type, "") def _set_production_version(self, version_number): version = self.__get_by_id(version_number) version.is_production = True self._update_entry(version) self._set_latest_version(version_number) def _get_production_versions(self): if productions := self.db.execute( str(self.table.select().filter_by(is_production=True).compile(dialect=sqlite.dialect())), ).fetchall(): return [p["id"] for p in productions] return [] def _delete_production_version(self, version_number): version = self.__get_by_id(version_number) if not version or not version.is_production: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") version.is_production = False self._update_entry(version) def __get_by_id(self, version_id): query = str(self.table.select().filter_by(id=version_id).compile(dialect=sqlite.dialect())) entry = self.db.execute(query, [version_id]).fetchone() return self.model_type.from_dict(entry) if entry else None
|
from datetime import datetime from taipy.config import Config from .._repository._abstract_converter import _AbstractConverter from .._version._version import _Version from .._version._version_model import _VersionModel class _VersionConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, version: _Version) -> _VersionModel: return _VersionModel( id=version.id, config=Config._to_json(version.config), creation_date=version.creation_date.isoformat() ) @classmethod def _model_to_entity(cls, model: _VersionModel) -> _Version: version = _Version(id=model.id, config=Config._from_json(model.config)) version.creation_date = datetime.fromisoformat(model.creation_date) return version
|
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry @mapper_registry.mapped @dataclass class _VersionModel(_BaseModel): __table__ = Table( "version", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config", String), # config is store as a json string Column("creation_date", String), Column("is_production", Boolean), Column("is_development", Boolean), Column("is_latest", Boolean), ) id: str config: Dict[str, Any] creation_date: str @staticmethod def from_dict(data: Dict[str, Any]): model = _VersionModel( id=data["id"], config=data["config"], creation_date=data["creation_date"], ) model.is_production = data.get("is_production") # type: ignore model.is_development = data.get("is_development") # type: ignore model.is_latest = data.get("is_latest") # type: ignore return model def to_list(self): return [ self.id, self.config, self.creation_date, self.is_production, self.is_development, self.is_latest, ]
|
from typing import Callable, List from taipy.config.config import Config from .._entity._reload import _Reloader from ..config import MigrationConfig from ._version_manager_factory import _VersionManagerFactory def _migrate_entity(entity): if ( latest_version := _VersionManagerFactory._build_manager()._get_latest_version() ) in _VersionManagerFactory._build_manager()._get_production_versions(): if migration_fcts := __get_migration_fcts_to_latest(entity._version, entity.config_id): with _Reloader(): for fct in migration_fcts: entity = fct(entity) entity._version = latest_version return entity def __get_migration_fcts_to_latest(source_version: str, config_id: str) -> List[Callable]: migration_fcts_to_latest: List[Callable] = [] production_versions = _VersionManagerFactory._build_manager()._get_production_versions() try: start_index = production_versions.index(source_version) + 1 except ValueError: return migration_fcts_to_latest versions_to_migrate = production_versions[start_index:] for version in versions_to_migrate: migration_fct = Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id) if migration_fct: migration_fcts_to_latest.append(migration_fct) return migration_fcts_to_latest
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
import os import sys def _vt_codes_enabled_in_windows_registry(): """ Check the Windows Registry to see if VT code handling has been enabled by default, see https://superuser.com/a/1300251/447564. """ try: # winreg is only available on Windows. import winreg except ImportError: return False else: try: reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console") reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel") except FileNotFoundError: return False else: return reg_key_value == 1 def _is_color_supported(): """ Return True if the running system's terminal supports color, and False otherwise. """ is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() return is_a_tty and ( sys.platform != "win32" or "ANSICON" in os.environ or "WT_SESSION" in os.environ # Windows Terminal supports VT codes. or os.environ.get("TERM_PROGRAM") == "vscode" # VSCode's built-in terminal supports colors. or _vt_codes_enabled_in_windows_registry() ) class _Bcolors: PURPLE = "\033[95m" if _is_color_supported() else "" BLUE = "\033[94m" if _is_color_supported() else "" CYAN = "\033[96m" if _is_color_supported() else "" GREEN = "\033[92m" if _is_color_supported() else "" BOLD = "\033[1m" if _is_color_supported() else "" UNDERLINE = "\033[4m" if _is_color_supported() else "" END = "\033[0m" if _is_color_supported() else ""
|
from .exceptions import *
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from __future__ import annotations import abc from typing import Any, Callable, List, Optional, Set, Union import networkx as nx from ..common._listattributes import _ListAttributes from ..common._utils import _Subscriber from ..data.data_node import DataNode from ..job.job import Job from ..task.task import Task from ._dag import _DAG class Submittable: """Instance of an entity that can be submitted for execution. A submittable holds functions that can be used to build the execution directed acyclic graph. Attributes: subscribers (List[Callable]): The list of callbacks to be called on `Job^`'s status change. """ def __init__(self, subscribers: Optional[List[_Subscriber]] = None): self._subscribers = _ListAttributes(self, subscribers or list()) @abc.abstractmethod def submit( self, callbacks: Optional[List[Callable]] = None, force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ): raise NotImplementedError def get_inputs(self) -> Set[DataNode]: """Return the set of input data nodes of the submittable entity. Returns: The set of input data nodes. """ dag = self._build_dag() return self.__get_inputs(dag) def __get_inputs(self, dag: nx.DiGraph) -> Set[DataNode]: return {node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_outputs(self) -> Set[DataNode]: """Return the set of output data nodes of the submittable entity. Returns: The set of output data nodes. """ dag = self._build_dag() return self.__get_outputs(dag) def __get_outputs(self, dag: nx.DiGraph) -> set[DataNode]: return {node for node, degree in dict(dag.out_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_intermediate(self) -> Set[DataNode]: """Return the set of intermediate data nodes of the submittable entity. Returns: The set of intermediate data nodes. """ dag = self._build_dag() all_data_nodes_in_dag = {node for node in dag.nodes if isinstance(node, DataNode)} return all_data_nodes_in_dag - self.__get_inputs(dag) - self.__get_outputs(dag) def is_ready_to_run(self) -> bool: """Indicate if the entity is ready to be run. Returns: True if the given entity is ready to be run. False otherwise. """ return all(dn.is_ready_for_reading for dn in self.get_inputs()) def data_nodes_being_edited(self) -> Set[DataNode]: """Return the set of data nodes of the submittable entity that are being edited. Returns: The set of data nodes that are being edited. """ dag = self._build_dag() return {node for node in dag.nodes if isinstance(node, DataNode) and node.edit_in_progress} @abc.abstractmethod def subscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def unsubscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def _get_set_of_tasks(self) -> Set[Task]: raise NotImplementedError def _get_dag(self) -> _DAG: return _DAG(self._build_dag()) def _build_dag(self) -> nx.DiGraph: graph = nx.DiGraph() tasks = self._get_set_of_tasks() for task in tasks: if has_input := task.input: for predecessor in task.input.values(): graph.add_edges_from([(predecessor, task)]) if has_output := task.output: for successor in task.output.values(): graph.add_edges_from([(task, successor)]) if not has_input and not has_output: graph.add_node(task) return graph def _get_sorted_tasks(self) -> List[List[Task]]: dag = self._build_dag() remove = [node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)] dag.remove_nodes_from(remove) return list(nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes))) def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): params = [] if params is None else params self._subscribers.append(_Subscriber(callback=callback, params=params)) def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): if params is not None: self._subscribers.remove(_Subscriber(callback, params)) else: elem = [x for x in self._subscribers if x.callback == callback] if not elem: raise ValueError self._subscribers.remove(elem[0])
|
from typing import List from .._entity._reload import _get_manager from ..notification import Notifier class _Entity: _MANAGER_NAME: str _is_in_context = False _in_context_attributes_changed_collector: List def __enter__(self): self._is_in_context = True self._in_context_attributes_changed_collector = list() return self def __exit__(self, exc_type, exc_value, exc_traceback): # If multiple entities is in context, the last to enter will be the first to exit self._is_in_context = False if hasattr(self, "_properties"): for to_delete_key in self._properties._pending_deletions: self._properties.data.pop(to_delete_key, None) self._properties.data.update(self._properties._pending_changes) _get_manager(self._MANAGER_NAME)._set(self) for event in self._in_context_attributes_changed_collector: Notifier.publish(event) _get_manager(self._MANAGER_NAME)._set(self)
|
from collections import UserDict from ..notification import _ENTITY_TO_EVENT_ENTITY_TYPE, EventOperation, Notifier, _make_event class _Properties(UserDict): __PROPERTIES_ATTRIBUTE_NAME = "properties" def __init__(self, entity_owner, **kwargs): super().__init__(**kwargs) self._entity_owner = entity_owner self._pending_changes = {} self._pending_deletions = set() def __setitem__(self, key, value): super(_Properties, self).__setitem__(key, value) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=value, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: if key in self._pending_deletions: self._pending_deletions.remove(key) self._pending_changes[key] = value self._entity_owner._in_context_attributes_changed_collector.append(event) def __getitem__(self, key): from taipy.config.common._template_handler import _TemplateHandler as _tpl return _tpl._replace_templates(super(_Properties, self).__getitem__(key)) def __delitem__(self, key): super(_Properties, self).__delitem__(key) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=None, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: self._pending_changes.pop(key, None) self._pending_deletions.add(key) self._entity_owner._in_context_attributes_changed_collector.append(event)
|
from __future__ import annotations class _EntityIds: def __init__(self): self.data_node_ids = set() self.task_ids = set() self.scenario_ids = set() self.sequence_ids = set() self.job_ids = set() self.cycle_ids = set() self.submission_ids = set() def __add__(self, other: _EntityIds): self.data_node_ids.update(other.data_node_ids) self.task_ids.update(other.task_ids) self.scenario_ids.update(other.scenario_ids) self.sequence_ids.update(other.sequence_ids) self.job_ids.update(other.job_ids) self.cycle_ids.update(other.cycle_ids) self.submission_ids.update(other.submission_ids) return self def __iadd__(self, other: _EntityIds): self.__add__(other) return self
|
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.