text
stringlengths
0
105k
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import random from taipy.gui import Gui # Random data set data = [random.random() for i in range(500)] options = { # Enable the cumulative histogram "cumulative": {"enabled": True} } page = """ # Histogram - Cumulative <|{data}|chart|type=histogram|options={options}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import numpy import pandas from taipy.gui import Gui dates = pandas.date_range("2023-01-01", periods=365, freq="D") temp = [ -11.33333333, -6, -0.111111111, 1.444444444, 2.388888889, 4.555555556, 4.333333333, 0.666666667, 9, 9.611111111, -0.555555556, 1.833333333, -0.444444444, 2.166666667, -4, -12.05555556, -2.722222222, 5, 9.888888889, 6.611111111, -2.833333333, -3.277777778, -1.611111111, -1.388888889, 5.777777778, 2.166666667, -1.055555556, 1.777777778, 1.5, 8.444444444, 6.222222222, -2.5, -0.388888889, 6.111111111, -1.5, 2.666666667, -2.5, 0.611111111, 8.222222222, 2.333333333, -9.333333333, -7.666666667, -6.277777778, -0.611111111, 7.722222222, 6.111111111, -4, 3.388888889, 9.333333333, -6.333333333, -15, -12.94444444, -8.722222222, -6.222222222, -2.833333333, -2.5, 1.5, 3.444444444, 2.666666667, 0.888888889, 7.555555556, 12.66666667, 12.83333333, 1.777777778, -0.111111111, -1.055555556, 4.611111111, 11.16666667, 8.5, 0.5, 2.111111111, 4.722222222, 8.277777778, 10.66666667, 5.833333333, 5.555555556, 6.944444444, 1.722222222, 2.444444444, 6.111111111, 12.11111111, 15.55555556, 9.944444444, 10.27777778, 5.888888889, 1.388888889, 3.555555556, 1.222222222, 4.055555556, 7.833333333, 0.666666667, 10.05555556, 6.444444444, 4.555555556, 11, 3.555555556, -0.555555556, 11.83333333, 7.222222222, 10.16666667, 17.5, 14.55555556, 6.777777778, 3.611111111, 5.888888889, 10.05555556, 16.61111111, 5.5, 7.055555556, 10.5, 1.555555556, 6.166666667, 11.05555556, 5.111111111, 6.055555556, 11, 11.05555556, 14.72222222, 19.16666667, 16.5, 12.61111111, 8.277777778, 6.611111111, 10.38888889, 15.38888889, 17.22222222, 18.27777778, 18.72222222, 17.05555556, 19.72222222, 16.83333333, 12.66666667, 11.66666667, 12.88888889, 14.77777778, 18, 19.44444444, 16.5, 9.722222222, 7.888888889, 13.72222222, 17.55555556, 18.27777778, 20.11111111, 21.66666667, 23.38888889, 23.5, 16.94444444, 16.27777778, 18.61111111, 20.83333333, 24.61111111, 18.27777778, 17.88888889, 22.27777778, 25.94444444, 25.27777778, 24.72222222, 25.61111111, 23.94444444, 26.33333333, 22.05555556, 20.83333333, 24.5, 27.83333333, 25.61111111, 23.11111111, 19.27777778, 16.44444444, 19.44444444, 17.22222222, 19.44444444, 22.16666667, 21.77777778, 17.38888889, 17.22222222, 23.88888889, 28.44444444, 29.44444444, 29.61111111, 21.05555556, 18.55555556, 25.27777778, 26.55555556, 24.55555556, 23.38888889, 22.55555556, 27.05555556, 27.66666667, 26.66666667, 27.61111111, 26.66666667, 24.77777778, 23, 26.5, 23.11111111, 19.83333333, 22.27777778, 24.61111111, 27.05555556, 27.05555556, 27.94444444, 27.33333333, 22.05555556, 21.5, 22, 19.72222222, 20.27777778, 17.88888889, 18.55555556, 18.94444444, 20, 22.05555556, 23.22222222, 24.38888889, 24.5, 24.5, 21.22222222, 20.83333333, 20.61111111, 22.05555556, 23.77777778, 24.16666667, 24.22222222, 21.83333333, 21.33333333, 21.88888889, 22.44444444, 23.11111111, 20.44444444, 16.88888889, 15.77777778, 17.44444444, 17.72222222, 23.11111111, 24.55555556, 24.88888889, 25.11111111, 25.27777778, 19.5, 19.55555556, 24.05555556, 24.27777778, 21.05555556, 19.88888889, 20.66666667, 20.27777778, 17.66666667, 16.44444444, 15.88888889, 18.44444444, 22.44444444, 23, 24.72222222, 24.16666667, 25.94444444, 24.44444444, 23.33333333, 25.22222222, 25, 23.88888889, 23.72222222, 18.94444444, 16.22222222, 19.5, 21.22222222, 19.72222222, 13.22222222, 11.88888889, 16.55555556, 10.05555556, 12.16666667, 11.5, 10.22222222, 17.27777778, 21.72222222, 13.83333333, 13, 6.944444444, 6.388888889, 4.222222222, 2.5, 1.111111111, 3.055555556, 6.388888889, 10.44444444, -2, -2.222222222, 4.388888889, 8.333333333, 11.11111111, 12.66666667, 10.88888889, 12.83333333, 14.16666667, 12.55555556, 12.05555556, 11.22222222, 12.44444444, 14.38888889, 12, 15.83333333, 6.722222222, 2.5, 4.833333333, 7.5, 8.888888889, 4, 7.388888889, 3.888888889, 1.611111111, -0.333333333, -2, 4.833333333, -1.055555556, -5.611111111, -2.388888889, 5.722222222, 8.444444444, 5.277777778, 0.5, -2.5, 1.111111111, 2.111111111, 5.777777778, 7.555555556, 7.555555556, 4.111111111, -0.388888889, -1, 4.944444444, 9.444444444, 4.722222222, -0.166666667, 0.5, -2.444444444, -2.722222222, -2.888888889, -1.111111111, -4.944444444, -3.111111111, -1.444444444, -0.833333333, 2.333333333, 6.833333333, 4.722222222, 0.888888889, 0.666666667, 4.611111111, 4.666666667, 4.444444444, 6.777777778, 5.833333333, 0.5, 4.888888889, 1.444444444, -2.111111111, 2.444444444, -0.111111111, -2.555555556, -4.611111111, -8.666666667, -8.055555556, 1.555555556, -4.777777778, ] min = [ -14.33333333, -12.9, -3.311111111, -4.955555556, -3.611111111, 0.555555556, 1.133333333, -5.133333333, 2.3, 3.911111111, -7.055555556, -1.366666667, -4.844444444, -3.333333333, -6.1, -17.15555556, -4.822222222, 0.4, 3.488888889, 4.211111111, -6.433333333, -7.577777778, -7.111111111, -7.088888889, 1.577777778, -3.433333333, -4.355555556, -0.722222222, -2.1, 2.044444444, 2.222222222, -4.7, -2.388888889, 4.111111111, -5, -0.133333333, -5.3, -2.288888889, 6.022222222, -1.766666667, -15.53333333, -13.46666667, -9.277777778, -3.211111111, 3.122222222, 1.411111111, -6.8, 1.388888889, 5.333333333, -9.833333333, -22, -19.74444444, -14.62222222, -9.622222222, -8.433333333, -8.5, -2.8, 0.144444444, -3.233333333, -3.411111111, 5.355555556, 8.366666667, 7.333333333, -0.322222222, -6.911111111, -4.955555556, -1.588888889, 4.966666667, 2.5, -4.3, -1.888888889, -1.777777778, 2.477777778, 3.766666667, 0.533333333, 1.755555556, 2.944444444, -4.977777778, -4.055555556, 1.711111111, 6.011111111, 13.15555556, 5.044444444, 6.577777778, 3.388888889, -1.011111111, -0.244444444, -2.477777778, -1.444444444, 2.533333333, -6.333333333, 4.255555556, 1.944444444, 0.855555556, 5.4, -1.244444444, -2.855555556, 4.833333333, 2.722222222, 6.466666667, 14.5, 9.855555556, 2.277777778, -3.188888889, 0.788888889, 4.155555556, 13.41111111, 2.3, 0.855555556, 8.4, -0.444444444, 1.166666667, 7.755555556, -0.288888889, -0.244444444, 8.7, 5.555555556, 8.222222222, 16.26666667, 14.4, 5.711111111, 5.177777778, 4.511111111, 5.988888889, 10.08888889, 10.52222222, 15.37777778, 12.42222222, 14.95555556, 15.22222222, 11.93333333, 6.866666667, 6.866666667, 9.688888889, 11.57777778, 12, 13.34444444, 11.3, 6.222222222, 2.088888889, 8.322222222, 14.05555556, 13.77777778, 16.91111111, 16.86666667, 16.68888889, 18.5, 12.54444444, 12.27777778, 15.91111111, 15.03333333, 22.11111111, 15.77777778, 13.68888889, 17.87777778, 19.94444444, 18.57777778, 18.62222222, 20.11111111, 17.14444444, 20.43333333, 15.75555556, 17.33333333, 20, 23.03333333, 19.61111111, 18.51111111, 15.27777778, 11.44444444, 13.64444444, 11.42222222, 16.14444444, 19.76666667, 18.77777778, 11.88888889, 12.32222222, 20.78888889, 25.04444444, 25.34444444, 23.81111111, 18.35555556, 11.85555556, 18.37777778, 23.15555556, 21.55555556, 17.48888889, 19.05555556, 20.25555556, 23.86666667, 23.86666667, 21.41111111, 21.16666667, 18.67777778, 18.1, 24.4, 19.01111111, 17.13333333, 18.27777778, 21.71111111, 22.85555556, 22.65555556, 25.14444444, 24.13333333, 17.95555556, 14.7, 15.1, 16.02222222, 14.27777778, 11.18888889, 13.65555556, 16.74444444, 16.7, 17.65555556, 16.62222222, 21.68888889, 19.6, 18.6, 15.52222222, 18.53333333, 17.01111111, 17.75555556, 20.47777778, 17.76666667, 22.22222222, 18.23333333, 17.83333333, 15.38888889, 19.64444444, 17.81111111, 15.44444444, 14.88888889, 13.07777778, 15.24444444, 11.82222222, 20.81111111, 21.45555556, 18.98888889, 19.71111111, 19.27777778, 12.7, 15.05555556, 19.15555556, 20.77777778, 15.35555556, 17.68888889, 18.26666667, 15.47777778, 12.76666667, 10.54444444, 13.38888889, 12.54444444, 19.84444444, 19.5, 21.92222222, 17.86666667, 22.44444444, 19.64444444, 20.73333333, 22.02222222, 19, 20.48888889, 19.02222222, 16.44444444, 14.22222222, 16.3, 16.42222222, 17.22222222, 8.322222222, 8.288888889, 13.95555556, 5.555555556, 5.666666667, 7.7, 4.022222222, 11.77777778, 16.42222222, 11.83333333, 9.7, 0.044444444, 3.688888889, -2.077777778, 0.1, -5.388888889, -3.244444444, 0.688888889, 5.744444444, -7.7, -7.022222222, -0.211111111, 4.833333333, 8.111111111, 5.766666667, 7.888888889, 10.43333333, 11.56666667, 10.15555556, 7.155555556, 4.522222222, 7.144444444, 10.88888889, 9.5, 12.13333333, 4.022222222, -3.9, 1.433333333, 0.7, 3.188888889, -1.7, 3.588888889, -0.111111111, -2.788888889, -7.133333333, -5, 0.733333333, -7.555555556, -12.51111111, -8.188888889, 3.122222222, 2.944444444, 0.477777778, -3.2, -9.2, -4.788888889, -0.288888889, 1.077777778, 4.755555556, 5.455555556, 0.511111111, -3.888888889, -7.4, -1.355555556, 5.144444444, 0.122222222, -5.166666667, -5, -5.144444444, -8.822222222, -6.388888889, -6.811111111, -8.944444444, -10.11111111, -7.144444444, -5.133333333, -1.166666667, 1.833333333, -1.477777778, -1.811111111, -2.433333333, -1.188888889, -2.333333333, 0.744444444, 1.877777778, 1.333333333, -1.7, 0.888888889, -3.855555556, -8.211111111, -1.055555556, -4.211111111, -7.355555556, -8.111111111, -10.96666667, -13.05555556, -4.644444444, -7.577777778, ] max = [ -7.233333333, -1.6, 5.488888889, 7.744444444, 6.188888889, 6.555555556, 10.53333333, 6.766666667, 14.1, 14.11111111, 2.044444444, 4.633333333, 2.055555556, 8.666666667, -1.4, -5.555555556, 4.177777778, 11.8, 15.58888889, 12.31111111, 3.666666667, -0.977777778, 1.288888889, 4.211111111, 9.377777778, 5.266666667, 2.144444444, 3.977777778, 7.2, 11.94444444, 11.32222222, 4, 6.611111111, 8.211111111, 3.5, 8.866666667, 3.6, 3.711111111, 13.12222222, 7.833333333, -3.333333333, -2.166666667, -2.877777778, 5.188888889, 13.12222222, 12.11111111, -0.7, 6.688888889, 14.03333333, -2.433333333, -8.6, -8.244444444, -2.122222222, -2.722222222, 1.266666667, 2.8, 5.7, 6.944444444, 5.066666667, 5.688888889, 13.35555556, 16.66666667, 17.33333333, 7.277777778, 6.388888889, 1.344444444, 9.111111111, 17.96666667, 12.8, 5.8, 6.911111111, 6.822222222, 11.87777778, 13.16666667, 9.233333333, 8.655555556, 10.04444444, 7.022222222, 7.644444444, 8.311111111, 16.71111111, 18.85555556, 12.14444444, 13.27777778, 11.18888889, 7.088888889, 8.255555556, 7.522222222, 9.955555556, 9.933333333, 4.866666667, 15.25555556, 9.244444444, 9.755555556, 14, 8.955555556, 2.344444444, 17.43333333, 12.12222222, 13.46666667, 23, 18.45555556, 12.77777778, 7.211111111, 8.588888889, 14.35555556, 19.01111111, 12.4, 9.155555556, 15.6, 4.955555556, 8.966666667, 16.95555556, 9.511111111, 10.15555556, 16, 14.45555556, 21.02222222, 25.76666667, 20.5, 15.71111111, 11.67777778, 12.81111111, 12.88888889, 17.58888889, 23.12222222, 21.77777778, 24.42222222, 20.05555556, 24.32222222, 18.83333333, 19.56666667, 14.96666667, 19.68888889, 18.57777778, 23, 23.34444444, 20.7, 11.82222222, 11.48888889, 17.52222222, 22.55555556, 20.47777778, 23.01111111, 27.86666667, 30.28888889, 30.3, 22.94444444, 18.57777778, 25.51111111, 24.13333333, 30.01111111, 24.77777778, 20.28888889, 28.67777778, 32.74444444, 31.37777778, 28.52222222, 31.81111111, 27.24444444, 32.53333333, 26.15555556, 24.63333333, 28.3, 31.23333333, 32.21111111, 28.21111111, 23.07777778, 21.64444444, 24.34444444, 19.62222222, 25.14444444, 24.46666667, 23.87777778, 21.28888889, 20.22222222, 29.98888889, 32.04444444, 36.44444444, 36.01111111, 24.85555556, 23.45555556, 29.17777778, 32.25555556, 28.75555556, 30.28888889, 28.85555556, 30.45555556, 31.26666667, 28.86666667, 33.31111111, 30.66666667, 28.67777778, 27.4, 32.2, 25.41111111, 22.23333333, 26.67777778, 30.21111111, 29.15555556, 29.65555556, 31.94444444, 31.43333333, 28.35555556, 24.8, 25.5, 25.42222222, 24.17777778, 20.88888889, 24.35555556, 25.54444444, 22, 27.95555556, 29.42222222, 28.88888889, 26.8, 28.2, 26.92222222, 24.13333333, 22.61111111, 26.15555556, 30.57777778, 30.86666667, 29.92222222, 27.33333333, 23.43333333, 24.68888889, 26.94444444, 28.81111111, 25.54444444, 22.48888889, 21.67777778, 19.74444444, 23.82222222, 25.91111111, 30.85555556, 28.48888889, 29.21111111, 28.37777778, 22.4, 25.55555556, 27.35555556, 30.67777778, 27.95555556, 25.98888889, 23.46666667, 25.37777778, 20.46666667, 22.54444444, 20.18888889, 22.24444444, 26.84444444, 25.8, 29.62222222, 26.36666667, 32.24444444, 29.84444444, 28.33333333, 31.22222222, 29.9, 29.98888889, 27.42222222, 25.54444444, 20.22222222, 24, 24.52222222, 25.02222222, 16.12222222, 17.58888889, 23.25555556, 15.75555556, 18.66666667, 18.4, 12.52222222, 20.07777778, 28.62222222, 17.23333333, 16.6, 13.34444444, 10.98888889, 9.522222222, 5.8, 6.811111111, 6.555555556, 12.18888889, 12.64444444, 4.2, 3.577777778, 8.888888889, 15.23333333, 16.11111111, 18.36666667, 16.98888889, 15.63333333, 16.46666667, 15.55555556, 15.65555556, 17.42222222, 18.74444444, 19.48888889, 15.9, 19.73333333, 13.02222222, 8.1, 8.933333333, 11.3, 12.38888889, 8.3, 12.38888889, 6.388888889, 4.211111111, 4.666666667, 0.7, 7.133333333, 2.344444444, 1.088888889, 0.111111111, 11.62222222, 10.84444444, 8.777777778, 3.5, 3.4, 7.211111111, 5.711111111, 9.677777778, 12.25555556, 10.15555556, 6.511111111, 4.911111111, 1.5, 11.44444444, 15.54444444, 8.122222222, 6.233333333, 7, 4.355555556, 0.277777778, 3.711111111, 2.888888889, 1.555555556, 3.888888889, 4.555555556, 5.666666667, 7.833333333, 9.833333333, 10.02222222, 6.288888889, 5.366666667, 11.41111111, 9.566666667, 9.744444444, 13.57777778, 9.433333333, 3.1, 11.08888889, 3.844444444, 2.488888889, 7.544444444, 4.488888889, -0.455555556, -2.111111111, -3.566666667, -1.955555556, 3.955555556, 1.222222222, ] week_number = [f"W{i//7}" if i % 7 == 0 else None for i in range(0, 365)] start = 50 size = 100 data = { "Date": dates[start:size], "Temp°C": temp[start:size], "Week": numpy.array(max[start:size]) + 5, "WeekN": week_number[start:size], } page = """ # Line - Texts <|{data}|chart|x=Date|y[1]=Temp°C|y[2]=Week|mode[2]=text|text[2]=WeekN|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Create a star shape data = {"r": [3, 1] * 5 + [3], "theta": list(range(0, 360, 36)) + [0]} options = [ # First plot is filled with a yellow-ish color {"subplot": "polar", "fill": "toself", "fillcolor": "#E4FF87"}, # Second plot is filled with a blue-ish color {"fill": "toself", "subplot": "polar2", "fillcolor": "#709BFF"}, ] layout = { "polar": { # This actually is the default value "angularaxis": { "direction": "counterclockwise", }, }, "polar2": { "angularaxis": { # Rotate the axis 180° (0 is on the left) "rotation": 180, # Orient the axis clockwise "direction": "clockwise", # Show the angles as radians "thetaunit": "radians", }, }, # Hide the legend "showlegend": False, } page = """ # Polar Charts - Direction <|{data}|chart|type=scatterpolar|layout={layout}|options={options}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = "XS" page = """ # Slider - List of values <|{value}|slider|lov=XXS;XS;S;M;L;XL;XXL|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import io from decimal import Decimal, getcontext from taipy.gui import Gui, download # Initial precision precision = 10 def pi(precision: int) -> list[int]: """Compute Pi to the required precision. Adapted from https://docs.python.org/3/library/decimal.html """ saved_precision = getcontext().prec # Save precision getcontext().prec = precision three = Decimal(3) # substitute "three=3.0" for regular floats lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24 while s != lasts: lasts = s n, na = n + na, na + 8 d, da = d + da, da + 32 t = (t * n) / d s += t digits = [] while s != 0: integral = int(s) digits.append(integral) s = (s - integral) * 10 getcontext().prec = saved_precision return digits # Generate the digits, save them in a CSV file content, and trigger a download action # so the user can retrieve them def download_pi(state): digits = pi(state.precision) buffer = io.StringIO() buffer.write("index,digit\n") for i, d in enumerate(digits): buffer.write(f"{i},{d}\n") download(state, content=bytes(buffer.getvalue(), "UTF-8"), name="pi.csv") page = """ # File Download - Dynamic content Precision: <|{precision}|slider|min=2|max=10000|> <|{None}|file_download|on_action=download_pi|label=Download Pi digits|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 9 page = """ # Slider - Custom range <|{value}|slider|min=1|max=10|> Value: <|{value}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from datetime import date, timedelta from taipy.gui import Gui # Create the list of dates (all year 2000) all_dates = {} all_dates_str = [] start_date = date(2000, 1, 1) end_date = date(2001, 1, 1) a_date = start_date while a_date < end_date: date_str = a_date.strftime("%Y/%m/%d") all_dates_str.append(date_str) all_dates[date_str] = a_date a_date += timedelta(days=1) # Initial selection: first and last day dates = [all_dates_str[1], all_dates_str[-1]] # These two variables are used in text controls start_sel = all_dates[dates[0]] end_sel = all_dates[dates[1]] def on_change(state, _, var_value): # Update the text controls state.start_sel = all_dates[var_value[0]] state.end_sel = all_dates[var_value[1]] page = """ # Slider - Date range <|{dates}|slider|lov={all_dates_str}|> Start: <|{start_sel}|text|format=d MMM|> End: <|{end_sel}|text|format=d MMM|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- import os from decimal import Decimal, getcontext from tempfile import NamedTemporaryFile from taipy.gui import Gui, download # Initial precision precision = 10 # Stores the path to the temporary file temp_path = None def pi(precision: int) -> list[int]: """Compute Pi to the required precision. Adapted from https://docs.python.org/3/library/decimal.html """ saved_precision = getcontext().prec # Save precision getcontext().prec = precision three = Decimal(3) # substitute "three=3.0" for regular floats lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24 while s != lasts: lasts = s n, na = n + na, na + 8 d, da = d + da, da + 32 t = (t * n) / d s += t digits = [] while s != 0: integral = int(s) digits.append(integral) s = (s - integral) * 10 getcontext().prec = saved_precision return digits # Remove the temporary file def clean_up(state): os.remove(state.temp_path) # Generate the digits, save them in a CSV temporary file, then trigger a download action # for that file. def download_pi(state): digits = pi(state.precision) with NamedTemporaryFile("r+t", suffix=".csv", delete=False) as temp_file: state.temp_path = temp_file.name temp_file.write("index,digit\n") for i, d in enumerate(digits): temp_file.write(f"{i},{d}\n") download(state, content=temp_file.name, name="pi.csv", on_action=clean_up) page = """ # File Download - Dynamic content Precision: <|{precision}|slider|min=2|max=10000|> <|{None}|file_download|on_action=download_pi|label=Download Pi digits|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 50 page = """ # Slider - Simple <|{value}|slider|> Value: <|{value}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui value = 40 page = """ # Slider - Vertical <|{value}|slider|orientation=v|> Value: <|{value}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # ----------------------------------------------------------------------------------------- # To execute this script, make sure that the taipy-gui package is installed in your # Python environment and run: # python <script> # ----------------------------------------------------------------------------------------- from taipy.gui import Gui # Initial values values = [20, 40, 80] page = """ # Slider - Range <|{values}|slider|> Selection: <|{values}|> """ Gui(page).run()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import os import sys from taipy._cli._base_cli import _CLI from taipy.core._core_cli import _CoreCLI from taipy.core._entity._migrate_cli import _MigrateCLI from taipy.core._version._cli._version_cli import _VersionCLI from taipy.gui._gui_cli import _GuiCLI from ._cli._help_cli import _HelpCLI from ._cli._run_cli import _RunCLI from ._cli._scaffold_cli import _ScaffoldCLI from .version import _get_version def _entrypoint(): # Add the current working directory to path to execute version command on FS repo sys.path.append(os.path.normpath(os.getcwd())) _CLI._parser.add_argument("-v", "--version", action="store_true", help="Print the current Taipy version and exit.") _RunCLI.create_parser() _GuiCLI.create_run_parser() _CoreCLI.create_run_parser() _VersionCLI.create_parser() _ScaffoldCLI.create_parser() _MigrateCLI.create_parser() _HelpCLI.create_parser() args = _CLI._parse() if args.version: print(f"Taipy {_get_version()}") sys.exit(0) _RunCLI.parse_arguments() _HelpCLI.parse_arguments() _VersionCLI.parse_arguments() _MigrateCLI.parse_arguments() _ScaffoldCLI.parse_arguments() _CLI._remove_argument("help") _CLI._parser.print_help()
import json import os def _get_version(): with open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
from importlib.util import find_spec if find_spec("taipy"): if find_spec("taipy.config"): from taipy.config._init import * # type: ignore if find_spec("taipy.gui"): from taipy.gui._init import * # type: ignore if find_spec("taipy.core"): from taipy.core._init import * # type: ignore if find_spec("taipy.rest"): from taipy.rest._init import * # type: ignore if find_spec("taipy.gui_core"): from taipy.gui_core._init import * # type: ignore if find_spec("taipy.enterprise"): from taipy.enterprise._init import * # type: ignore if find_spec("taipy._run"): from taipy._run import _run as run # type: ignore
import sys import typing as t from flask import Flask from taipy.core import Core from taipy.gui import Gui from taipy.rest import Rest if sys.version_info >= (3, 10): from typing import TypeGuard _AppType = t.Union[Gui, Rest, Core] _AppTypeT = t.TypeVar("_AppTypeT", Gui, Rest, Core) def _run(*services: _AppType, **kwargs) -> t.Optional[Flask]: """Run one or multiple Taipy services. A Taipy service is an instance of a class that runs code as a Web application. Parameters: *services (Union[`Gui^`, `Rest^`, `Core^`]): Services to run, as separate arguments.<br/> If several services are provided, all the services run simultaneously.<br/> If this is empty or set to None, this method does nothing. **kwargs: Other parameters to provide to the services. """ gui = __get_app(services, Gui) rest = __get_app(services, Rest) core = __get_app(services, Core) if gui and core: from taipy.core._core_cli import _CoreCLI from taipy.gui._gui_cli import _GuiCLI _CoreCLI.create_parser() _GuiCLI.create_parser() if rest or core: if not core: core = Core() core.run() if not rest and not gui: return None if gui and rest: gui._set_flask(rest._app) # type: ignore return gui.run(**kwargs) else: app = rest or gui assert app is not None # Avoid pyright typing error return app.run(**kwargs) # Avoid black adding too many empty lines # fmt: off if sys.version_info >= (3, 10): def __get_app(apps: t.Tuple[_AppType, ...], type_: t.Type[_AppTypeT]) -> t.Optional[_AppType]: def filter_isinstance(tl: _AppType) -> TypeGuard[_AppTypeT]: return isinstance(tl, type_) return next(filter(filter_isinstance, apps), None) else: def __get_app(apps: t.Tuple[_AppType, ...], type_: t.Type[_AppTypeT]) -> t.Optional[_AppType]: return next(filter(lambda a: isinstance(a, type_), apps), None) # fmt: on
from ._core import Core from ._entity.submittable import Submittable from .cycle.cycle import Cycle from .cycle.cycle_id import CycleId from .data.data_node import DataNode from .data.data_node_id import DataNodeId from .job.job import Job from .job.job_id import JobId from .job.status import Status from .scenario.scenario import Scenario from .scenario.scenario_id import ScenarioId from .sequence.sequence import Sequence from .sequence.sequence_id import SequenceId from .taipy import ( cancel_job, clean_all_entities_by_version, compare_scenarios, create_global_data_node, create_scenario, delete, delete_job, delete_jobs, exists, export_scenario, get, get_cycles, get_cycles_scenarios, get_data_nodes, get_entities_by_config_id, get_jobs, get_latest_job, get_parents, get_primary, get_primary_scenarios, get_scenarios, get_sequences, get_tasks, is_deletable, is_editable, is_promotable, is_readable, is_submittable, set, set_primary, submit, subscribe_scenario, subscribe_sequence, tag, unsubscribe_scenario, unsubscribe_sequence, untag, ) from .task.task import Task from .task.task_id import TaskId
"""# Taipy Core The Taipy Core package is a Python library designed to build powerful, customized, data-driven back-end applications. It provides the tools to help Python developers transform their algorithms into a complete back-end application. More details on the [Taipy Core](../../core/index.md) functionalities are available in the user manual. To build a Taipy Core application, the first step consists of setting up the Taipy configuration to design your application's characteristics and behaviors. Import `Config^` from the `taipy.config^` module, then use the various methods of the `Config^` singleton class to configure your core application. In particular, configure the [data nodes](../../core/config/data-node-config.md), [tasks](../../core/config/task-config.md), and [scenarios](../../core/config/scenario-config.md). Please refer to the [Core configuration user manual](../../core/config/index.md) for more information and detailed examples. Once your application is configured, import module `import taipy as tp` so you can use any function described in the following section on [Functionc](#functions). In particular, the most used functions are `tp.create_scenario()`, `tp.get_scenarios()`, `tp.get_data_nodes()`, `tp.submit()`, used to get, create, and submit entities. !!! Note Taipy Core provides a runnable service, `Core^` that runs as a service in a dedicated thread. The purpose is to have a dedicated thread responsible for dispatching the submitted jobs to an available executor for their execution. In particular, this `Core^` service is automatically run when Core is used with Taipy REST or Taipy GUI. See the [running services](../../run-deploy/run/running_services.md) page of the user manual for more details. """ from ._init import * from ._init_version import _read_version from .common.mongo_default_document import MongoDefaultDocument from .data.data_node_id import Edit from .exceptions import exceptions __version__ = _read_version()
import json import os from pathlib import Path def _read_version(): with open(f"{Path(os.path.abspath(__file__)).parent}{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" return version_string
import pathlib import shutil from datetime import datetime from typing import Any, Callable, Dict, List, Optional, Set, Union, overload from taipy.config.common.scope import Scope from taipy.logger._taipy_logger import _TaipyLogger from ._entity._entity import _Entity from ._version._version_manager_factory import _VersionManagerFactory from .common._warnings import _warn_no_core_service from .config.data_node_config import DataNodeConfig from .config.scenario_config import ScenarioConfig from .cycle._cycle_manager_factory import _CycleManagerFactory from .cycle.cycle import Cycle from .cycle.cycle_id import CycleId from .data._data_manager_factory import _DataManagerFactory from .data.data_node import DataNode from .data.data_node_id import DataNodeId from .exceptions.exceptions import ( DataNodeConfigIsNotGlobal, ModelNotFound, NonExistingVersion, VersionIsNotProductionVersion, ) from .job._job_manager_factory import _JobManagerFactory from .job.job import Job from .job.job_id import JobId from .scenario._scenario_manager_factory import _ScenarioManagerFactory from .scenario.scenario import Scenario from .scenario.scenario_id import ScenarioId from .sequence._sequence_manager_factory import _SequenceManagerFactory from .sequence.sequence import Sequence from .sequence.sequence_id import SequenceId from .submission._submission_manager_factory import _SubmissionManagerFactory from .submission.submission import Submission from .task._task_manager_factory import _TaskManagerFactory from .task.task import Task from .task.task_id import TaskId __logger = _TaipyLogger._get_logger() def set(entity: Union[DataNode, Task, Sequence, Scenario, Cycle]): """Save or update an entity. This function allows you to save or update an entity in Taipy. Parameters: entity (Union[DataNode^, Task^, Sequence^, Scenario^, Cycle^]): The entity to save or update. """ if isinstance(entity, Cycle): return _CycleManagerFactory._build_manager()._set(entity) if isinstance(entity, Scenario): return _ScenarioManagerFactory._build_manager()._set(entity) if isinstance(entity, Sequence): return _SequenceManagerFactory._build_manager()._set(entity) if isinstance(entity, Task): return _TaskManagerFactory._build_manager()._set(entity) if isinstance(entity, DataNode): return _DataManagerFactory._build_manager()._set(entity) def is_submittable(entity: Union[Scenario, ScenarioId, Sequence, SequenceId, Task, TaskId]) -> bool: """Indicate if an entity can be submitted. This function checks if the given entity can be submitted for execution. Returns: True if the given entity can be submitted. False otherwise. """ if isinstance(entity, Scenario) or (isinstance(entity, str) and entity.startswith(Scenario._ID_PREFIX)): return _ScenarioManagerFactory._build_manager()._is_submittable(entity) # type: ignore if isinstance(entity, Sequence) or (isinstance(entity, str) and entity.startswith(Sequence._ID_PREFIX)): return _SequenceManagerFactory._build_manager()._is_submittable(entity) # type: ignore if isinstance(entity, Task) or (isinstance(entity, str) and entity.startswith(Task._ID_PREFIX)): return _TaskManagerFactory._build_manager()._is_submittable(entity) # type: ignore return False def is_editable( entity: Union[ DataNode, Task, Job, Sequence, Scenario, Cycle, DataNodeId, TaskId, JobId, SequenceId, ScenarioId, CycleId ] ) -> bool: """Indicate if an entity can be edited. This function checks if the given entity can be edited. Returns: True if the given entity can be edited. False otherwise. """ if isinstance(entity, Cycle) or (isinstance(entity, str) and entity.startswith(Cycle._ID_PREFIX)): return _CycleManagerFactory._build_manager()._is_editable(entity) # type: ignore if isinstance(entity, Scenario) or (isinstance(entity, str) and entity.startswith(Scenario._ID_PREFIX)): return _ScenarioManagerFactory._build_manager()._is_editable(entity) # type: ignore if isinstance(entity, Sequence) or (isinstance(entity, str) and entity.startswith(Sequence._ID_PREFIX)): return _SequenceManagerFactory._build_manager()._is_editable(entity) # type: ignore if isinstance(entity, Task) or (isinstance(entity, str) and entity.startswith(Task._ID_PREFIX)): return _TaskManagerFactory._build_manager()._is_editable(entity) # type: ignore if isinstance(entity, Job) or (isinstance(entity, str) and entity.startswith(Job._ID_PREFIX)): return _JobManagerFactory._build_manager()._is_editable(entity) # type: ignore if isinstance(entity, DataNode) or (isinstance(entity, str) and entity.startswith(DataNode._ID_PREFIX)): return _DataManagerFactory._build_manager()._is_editable(entity) # type: ignore return False def is_readable( entity: Union[ DataNode, Task, Job, Sequence, Scenario, Cycle, DataNodeId, TaskId, JobId, SequenceId, ScenarioId, CycleId ] ) -> bool: """Indicate if an entity can be read. This function checks if the given entity can be read. Returns: True if the given entity can be read. False otherwise. """ if isinstance(entity, Cycle) or (isinstance(entity, str) and entity.startswith(Cycle._ID_PREFIX)): return _CycleManagerFactory._build_manager()._is_readable(entity) # type: ignore if isinstance(entity, Scenario) or (isinstance(entity, str) and entity.startswith(Scenario._ID_PREFIX)): return _ScenarioManagerFactory._build_manager()._is_readable(entity) # type: ignore if isinstance(entity, Sequence) or (isinstance(entity, str) and entity.startswith(Sequence._ID_PREFIX)): return _SequenceManagerFactory._build_manager()._is_readable(entity) # type: ignore if isinstance(entity, Task) or (isinstance(entity, str) and entity.startswith(Task._ID_PREFIX)): return _TaskManagerFactory._build_manager()._is_readable(entity) # type: ignore if isinstance(entity, Job) or (isinstance(entity, str) and entity.startswith(Job._ID_PREFIX)): return _JobManagerFactory._build_manager()._is_readable(entity) # type: ignore if isinstance(entity, DataNode) or (isinstance(entity, str) and entity.startswith(DataNode._ID_PREFIX)): return _DataManagerFactory._build_manager()._is_readable(entity) # type: ignore return False @_warn_no_core_service() def submit( entity: Union[Scenario, Sequence, Task], force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ) -> Union[Job, List[Job]]: """Submit a scenario, sequence or task entity for execution. This function submits the given entity for execution and returns the created job(s). If the entity is a sequence or a scenario, all the tasks of the entity are submitted for execution. Parameters: entity (Union[Scenario^, Sequence^, Task^]): The scenario, sequence or task to submit. force (bool): If True, the execution is forced even if for skippable tasks. wait (bool): Wait for the orchestrated jobs created from the submission to be finished in asynchronous mode. timeout (Union[float, int]): The optional maximum number of seconds to wait for the jobs to be finished before returning. Returns: The created `Job^` or a collection of the created `Job^` depends on the submitted entity. - If a `Scenario^` or a `Sequence^` is provided, it will return a list of `Job^`. - If a `Task^` is provided, it will return the created `Job^`. """ if isinstance(entity, Scenario): return _ScenarioManagerFactory._build_manager()._submit(entity, force=force, wait=wait, timeout=timeout) if isinstance(entity, Sequence): return _SequenceManagerFactory._build_manager()._submit(entity, force=force, wait=wait, timeout=timeout) if isinstance(entity, Task): return _TaskManagerFactory._build_manager()._submit(entity, force=force, wait=wait, timeout=timeout) @overload def exists(entity_id: TaskId) -> bool: ... @overload def exists(entity_id: DataNodeId) -> bool: ... @overload def exists(entity_id: SequenceId) -> bool: ... @overload def exists(entity_id: ScenarioId) -> bool: ... @overload def exists(entity_id: CycleId) -> bool: ... @overload def exists(entity_id: JobId) -> bool: ... @overload def exists(entity_id: str) -> bool: ... def exists(entity_id: Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, CycleId, str]) -> bool: """Check if an entity with the specified identifier exists. This function checks if an entity with the given identifier exists. It supports various types of entity identifiers, including `TaskId^`, `DataNodeId^`, `SequenceId^`, `ScenarioId^`, `JobId^`, `CycleId^`, and string representations. Parameters: entity_id (Union[DataNodeId^, TaskId^, SequenceId^, ScenarioId^, JobId^, CycleId^]): The identifier of the entity to check for existence. Returns: True if the given entity exists. False otherwise. Raises: ModelNotFound: If the entity's type cannot be determined. Note: The function performs checks for various entity types (`Job^`, `Cycle^`, `Scenario^`, `Sequence^`, `Task^`, `DataNode^`) based on their respective identifier prefixes. """ if entity_id.startswith(Job._ID_PREFIX): return _JobManagerFactory._build_manager()._exists(JobId(entity_id)) if entity_id.startswith(Cycle._ID_PREFIX): return _CycleManagerFactory._build_manager()._exists(CycleId(entity_id)) if entity_id.startswith(Scenario._ID_PREFIX): return _ScenarioManagerFactory._build_manager()._exists(ScenarioId(entity_id)) if entity_id.startswith(Sequence._ID_PREFIX): return _SequenceManagerFactory._build_manager()._exists(SequenceId(entity_id)) if entity_id.startswith(Task._ID_PREFIX): return _TaskManagerFactory._build_manager()._exists(TaskId(entity_id)) if entity_id.startswith(DataNode._ID_PREFIX): return _DataManagerFactory._build_manager()._exists(DataNodeId(entity_id)) raise ModelNotFound("NOT_DETERMINED", entity_id) @overload def get(entity_id: TaskId) -> Task: ... @overload def get(entity_id: DataNodeId) -> DataNode: ... @overload def get(entity_id: SequenceId) -> Sequence: ... @overload def get(entity_id: ScenarioId) -> Scenario: ... @overload def get(entity_id: CycleId) -> Cycle: ... @overload def get(entity_id: JobId) -> Job: ... @overload def get(entity_id: str) -> Union[Task, DataNode, Sequence, Scenario, Job, Cycle]: ... def get( entity_id: Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, CycleId, str] ) -> Union[Task, DataNode, Sequence, Scenario, Job, Cycle]: """Retrieve an entity by its unique identifier. This function allows you to retrieve an entity by specifying its identifier. The identifier must match the pattern of one of the supported entity types: Task^, DataNode^, Sequence^, Job^, Cycle^, or Scenario^. Parameters: entity_id (Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, CycleId, str]): The identifier of the entity to retrieve.<br/> It should conform to the identifier pattern of one of the entities (`Task^`, `DataNode^`, `Sequence^`, `Job^`, `Cycle^` or `Scenario^`). Returns: The entity that corresponds to the provided identifier. Returns None if no matching entity is found. Raises: ModelNotFound^: If the provided *entity_id* does not match any known entity pattern. """ if entity_id.startswith(Job._ID_PREFIX): return _JobManagerFactory._build_manager()._get(JobId(entity_id)) if entity_id.startswith(Cycle._ID_PREFIX): return _CycleManagerFactory._build_manager()._get(CycleId(entity_id)) if entity_id.startswith(Scenario._ID_PREFIX): return _ScenarioManagerFactory._build_manager()._get(ScenarioId(entity_id)) if entity_id.startswith(Sequence._ID_PREFIX): return _SequenceManagerFactory._build_manager()._get(SequenceId(entity_id)) if entity_id.startswith(Task._ID_PREFIX): return _TaskManagerFactory._build_manager()._get(TaskId(entity_id)) if entity_id.startswith(DataNode._ID_PREFIX): return _DataManagerFactory._build_manager()._get(DataNodeId(entity_id)) raise ModelNotFound("NOT_DETERMINED", entity_id) def get_tasks() -> List[Task]: """Retrieve a list of all existing tasks. This function returns a list of all tasks that currently exist. Returns: A list containing all the tasks. """ return _TaskManagerFactory._build_manager()._get_all() def is_deletable(entity: Union[Scenario, Job, ScenarioId, JobId]) -> bool: """Check if a `Scenario^` or a `Job^` can be deleted. This function determines whether a scenario or a job can be safely deleted without causing conflicts or issues. Parameters: entity (Union[Scenario, Job, ScenarioId, JobId]): The scenario or job to check. Returns: True if the given scenario or job can be deleted. False otherwise. """ if isinstance(entity, str) and entity.startswith(Job._ID_PREFIX) or isinstance(entity, Job): return _JobManagerFactory._build_manager()._is_deletable(entity) # type: ignore if isinstance(entity, str) and entity.startswith(Scenario._ID_PREFIX) or isinstance(entity, Scenario): return _ScenarioManagerFactory._build_manager()._is_deletable(entity) # type: ignore return True def delete(entity_id: Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, CycleId]): """Delete an entity and its nested entities. This function deletes the specified entity and recursively deletes all its nested entities. The behavior varies depending on the type of entity provided: - If a `CycleId` is provided, the nested scenarios, tasks, data nodes, and jobs are deleted. - If a `ScenarioId` is provided, the nested tasks, data nodes, and jobs are deleted. If the scenario is primary, it can only be deleted if it is the only scenario in the cycle. In that case, its cycle is also deleted. Use the `is_deletable()^` function to check if the scenario can be deleted. - If a `SequenceId` is provided, the related jobs are deleted. - If a `TaskId` is provided, the related data nodes, and jobs are deleted. Parameters: entity_id (Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, CycleId]): The identifier of the entity to delete. Raises: ModelNotFound: No entity corresponds to the specified *entity_id*. """ if entity_id.startswith(Job._ID_PREFIX): job_manager = _JobManagerFactory._build_manager() return job_manager._delete(job_manager._get(JobId(entity_id))) # type: ignore if entity_id.startswith(Cycle._ID_PREFIX): return _CycleManagerFactory._build_manager()._hard_delete(CycleId(entity_id)) if entity_id.startswith(Scenario._ID_PREFIX): return _ScenarioManagerFactory._build_manager()._hard_delete(ScenarioId(entity_id)) if entity_id.startswith(Sequence._ID_PREFIX): return _SequenceManagerFactory._build_manager()._hard_delete(SequenceId(entity_id)) if entity_id.startswith(Task._ID_PREFIX): return _TaskManagerFactory._build_manager()._hard_delete(TaskId(entity_id)) if entity_id.startswith(DataNode._ID_PREFIX): return _DataManagerFactory._build_manager()._delete(DataNodeId(entity_id)) raise ModelNotFound("NOT_DETERMINED", entity_id) def get_scenarios(cycle: Optional[Cycle] = None, tag: Optional[str] = None) -> List[Scenario]: """Retrieve a list of existing scenarios filtered by cycle or tag. This function allows you to retrieve a list of scenarios based on optional filtering criteria. If both a _cycle_ and a _tag_ are provided, the returned list contains scenarios that belong to the specified _cycle_ **and** also have the specified _tag_. Parameters: cycle (Optional[Cycle^]): The optional `Cycle^` to filter scenarios by. tag (Optional[str]): The optional tag to filter scenarios by. Returns: The list of scenarios filtered by cycle or tag. If no filtering criteria are provided, this method returns all existing scenarios. """ if not cycle and not tag: return _ScenarioManagerFactory._build_manager()._get_all() if cycle and not tag: return _ScenarioManagerFactory._build_manager()._get_all_by_cycle(cycle) if not cycle and tag: return _ScenarioManagerFactory._build_manager()._get_all_by_tag(tag) if cycle and tag: cycles_scenarios = _ScenarioManagerFactory._build_manager()()._get_all_by_cycle(cycle) return [scenario for scenario in cycles_scenarios if scenario.has_tag(tag)] return [] def get_primary(cycle: Cycle) -> Optional[Scenario]: """Retrieve the primary scenario associated with a cycle. Parameters: cycle (Cycle^): The cycle for which to retrieve the primary scenario. Returns: The primary scenario of the given _cycle_. If the cycle has no primary scenario, this method returns None. """ return _ScenarioManagerFactory._build_manager()._get_primary(cycle) def get_primary_scenarios() -> List[Scenario]: """Retrieve a list of all primary scenarios. Returns: A list containing all primary scenarios. """ return _ScenarioManagerFactory._build_manager()._get_primary_scenarios() def is_promotable(scenario: Union[Scenario, ScenarioId]) -> bool: """Determine if a scenario can be promoted to become a primary scenario. This function checks whether the given scenario is eligible to be promoted as a primary scenario. Parameters: scenario (Union[Scenario, ScenarioId]): The scenario to be evaluated for promotability. Returns: True if the given scenario can be promoted to be a primary scenario. False otherwise. """ return _ScenarioManagerFactory._build_manager()._is_promotable_to_primary(scenario) def set_primary(scenario: Scenario): """Promote a scenario as the primary scenario of its cycle. This function promotes the given scenario as the primary scenario of its associated cycle. If the cycle already has a primary scenario, that scenario is demoted and is no longer considered the primary scenario for its cycle. Parameters: scenario (Scenario^): The scenario to promote as the new _primary_ scenario. """ return _ScenarioManagerFactory._build_manager()._set_primary(scenario) def tag(scenario: Scenario, tag: str): """Add a tag to a scenario. This function adds a user-defined tag to the specified scenario. If another scenario within the same cycle already has the same tag applied, the previous scenario is untagged. Parameters: scenario (Scenario^): The scenario to which the tag will be added. tag (str): The tag to apply to the scenario. """ return _ScenarioManagerFactory._build_manager()._tag(scenario, tag) def untag(scenario: Scenario, tag: str): """Remove a tag from a scenario. This function removes a specified tag from the given scenario. If the scenario does not have the specified tag, it has no effect. Parameters: scenario (Scenario^): The scenario from which the tag will be removed. tag (str): The tag to remove from the scenario. """ return _ScenarioManagerFactory._build_manager()._untag(scenario, tag) def compare_scenarios(*scenarios: Scenario, data_node_config_id: Optional[str] = None) -> Dict[str, Any]: """Compare the data nodes of several scenarios. You can specify which data node config identifier should the comparison be performed on. Parameters: *scenarios (*Scenario^): The list of the scenarios to compare. data_node_config_id (Optional[str]): The config identifier of the DataNode to perform the comparison on. <br/> If _data_node_config_id_ is not provided, the scenarios are compared on all defined comparators.<br/> Returns: The comparison results. The key is the data node config identifier used for comparison. Raises: InsufficientScenarioToCompare^: Raised when only one or no scenario for comparison is provided. NonExistingComparator^: Raised when the scenario comparator does not exist. DifferentScenarioConfigs^: Raised when the provided scenarios do not share the same scenario config. NonExistingScenarioConfig^: Raised when the scenario config of the provided scenarios could not be found. """ return _ScenarioManagerFactory._build_manager()._compare(*scenarios, data_node_config_id=data_node_config_id) def subscribe_scenario( callback: Callable[[Scenario, Job], None], params: Optional[List[Any]] = None, scenario: Optional[Scenario] = None, ): """Subscribe a function to be called on job status change. The subscription is applied to all jobs created for the execution of _scenario_. If no scenario is provided, the subscription applies to all scenarios. Parameters: callback (Callable[[Scenario^, Job^], None]): The function to be called on status change. params (Optional[List[Any]]): The parameters to be passed to the _callback_. scenario (Optional[Scenario^]): The scenario to which the callback is applied. If None, the subscription is registered for all scenarios. Note: Notifications are applied only for jobs created **after** this subscription. """ params = [] if params is None else params return _ScenarioManagerFactory._build_manager()._subscribe(callback, params, scenario) def unsubscribe_scenario( callback: Callable[[Scenario, Job], None], params: Optional[List[Any]] = None, scenario: Optional[Scenario] = None ): """Unsubscribe a function that is called when the status of a `Job^` changes. If no scenario is provided, the subscription is removed for all scenarios. Parameters: callback (Callable[[Scenario^, Job^], None]): The function to unsubscribe from. params (Optional[List[Any]]): The parameters to be passed to the callback. scenario (Optional[Scenario]): The scenario to unsubscribe from. If None, it applies to all scenarios. Note: The callback function will continue to be called for ongoing jobs. """ return _ScenarioManagerFactory._build_manager()._unsubscribe(callback, params, scenario) def subscribe_sequence( callback: Callable[[Sequence, Job], None], params: Optional[List[Any]] = None, sequence: Optional[Sequence] = None ): """Subscribe a function to be called on job status change. The subscription is applied to all jobs created for the execution of _sequence_. Parameters: callback (Callable[[Sequence^, Job^], None]): The callable function to be called on status change. params (Optional[List[Any]]): The parameters to be passed to the _callback_. sequence (Optional[Sequence^]): The sequence to subscribe on. If None, the subscription is applied to all sequences. Note: Notifications are applied only for jobs created **after** this subscription. """ return _SequenceManagerFactory._build_manager()._subscribe(callback, params, sequence) def unsubscribe_sequence( callback: Callable[[Sequence, Job], None], params: Optional[List[Any]] = None, sequence: Optional[Sequence] = None ): """Unsubscribe a function that is called when the status of a Job changes. Parameters: callback (Callable[[Sequence^, Job^], None]): The callable function to be called on status change. params (Optional[List[Any]]): The parameters to be passed to the _callback_. sequence (Optional[Sequence^]): The sequence to unsubscribe to. If None, it applies to all sequences. Note: The function will continue to be called for ongoing jobs. """ return _SequenceManagerFactory._build_manager()._unsubscribe(callback, params, sequence) def get_sequences() -> List[Sequence]: """Return all existing sequences. Returns: The list of all sequences. """ return _SequenceManagerFactory._build_manager()._get_all() def get_jobs() -> List[Job]: """Return all the existing jobs. Returns: The list of all jobs. """ return _JobManagerFactory._build_manager()._get_all() def delete_job(job: Job, force=False): """Delete a job. This function deletes the specified job. If the job is not completed and *force* is not set to True, a `JobNotDeletedException^` may be raised. Parameters: job (Job^): The job to delete. force (Optional[bool]): If True, forces the deletion of _job_, even if it is not completed yet. Raises: JobNotDeletedException^: If the job is not finished. """ return _JobManagerFactory._build_manager()._delete(job, force) def delete_jobs(): """Delete all jobs.""" return _JobManagerFactory._build_manager()._delete_all() def cancel_job(job: Union[str, Job]): """Cancel a job and set the status of the subsequent jobs to ABANDONED. This function cancels the specified job and sets the status of any subsequent jobs to ABANDONED. Parameters: job (Job^): The job to cancel. """ _JobManagerFactory._build_manager()._cancel(job) def get_latest_job(task: Task) -> Optional[Job]: """Return the latest job of a task. This function retrieves the latest job associated with a task. Parameters: task (Task^): The task to retrieve the latest job from. Returns: The latest job created from _task_, or None if no job has been created from _task_. """ return _JobManagerFactory._build_manager()._get_latest(task) def get_latest_submission(entity: Union[Scenario, Sequence, Task]) -> Optional[Submission]: """Return the latest submission of a scenario, sequence or task. This function retrieves the latest submission associated with a scenario, sequence or task. Parameters: entity (Union[Scenario^, Sequence^, Task^]): The scenario, sequence or task to retrieve the latest submission from. Returns: The latest submission created from _scenario_, _sequence_ and _task_, or None if no submission has been created from _scenario_, _sequence_ and _task_. """ return _SubmissionManagerFactory._build_manager()._get_latest(entity) def get_data_nodes() -> List[DataNode]: """Return all the existing data nodes. Returns: The list of all data nodes. """ return _DataManagerFactory._build_manager()._get_all() def get_cycles() -> List[Cycle]: """Return the list of all existing cycles. Returns: The list of all cycles. """ return _CycleManagerFactory._build_manager()._get_all() def create_scenario( config: ScenarioConfig, creation_date: Optional[datetime] = None, name: Optional[str] = None, ) -> Scenario: """Create and return a new scenario based on a scenario configuration. If the scenario belongs to a cycle, a cycle (corresponding to the _creation_date_ and the configuration frequency attribute) is created if it does not exist yet. Parameters: config (ScenarioConfig^): The scenario configuration used to create a new scenario. creation_date (Optional[datetime.datetime]): The creation date of the scenario. If None, the current date time is used. name (Optional[str]): The displayable name of the scenario. Returns: The new scenario. """ return _ScenarioManagerFactory._build_manager()._create(config, creation_date, name) def create_global_data_node(config: DataNodeConfig) -> DataNode: """Create and return a new GLOBAL data node from a data node configuration. Parameters: config (DataNodeConfig^): The data node configuration. It must have a `GLOBAL` scope. Returns: The new global data node. Raises: DataNodeConfigIsNotGlobal^: If the data node configuration does not have GLOBAL scope. """ # Check if the data node config has GLOBAL scope if config.scope is not Scope.GLOBAL: raise DataNodeConfigIsNotGlobal(config.id) # type: ignore if dns := _DataManagerFactory._build_manager()._get_by_config_id(config.id): # type: ignore return dns[0] return _DataManagerFactory._build_manager()._create_and_set(config, None, None) def clean_all_entities_by_version(version_number=None) -> bool: """Delete all entities of a specific version. This function deletes all entities associated with the specified version. Parameters: version_number (optional[str]): The version number of the entities to be deleted. If None, the default behavior may apply. Returns: True if the operation succeeded, False otherwise. Notes: - If the specified version does not exist, the operation will be aborted, and False will be returned. - This function cleans all entities, including jobs, scenarios, sequences, tasks, and data nodes. - The production version of the specified version is also deleted if it exists. """ version_manager = _VersionManagerFactory._build_manager() try: version_number = version_manager._replace_version_number(version_number) except NonExistingVersion as e: __logger.warning(f"{e.message} Abort cleaning the entities of version '{version_number}'.") return False _JobManagerFactory._build_manager()._delete_by_version(version_number) _ScenarioManagerFactory._build_manager()._delete_by_version(version_number) _SequenceManagerFactory._build_manager()._delete_by_version(version_number) _TaskManagerFactory._build_manager()._delete_by_version(version_number) _DataManagerFactory._build_manager()._delete_by_version(version_number) version_manager._delete(version_number) try: version_manager._delete_production_version(version_number) except VersionIsNotProductionVersion: pass return True def export_scenario( scenario_id: ScenarioId, folder_path: Union[str, pathlib.Path], ): """Export all related entities of a scenario to a folder. This function exports all related entities of the specified scenario to the specified folder. Parameters: scenario_id (ScenarioId): The ID of the scenario to export. folder_path (Union[str, pathlib.Path]): The folder path to export the scenario to. """ manager = _ScenarioManagerFactory._build_manager() scenario = manager._get(scenario_id) entity_ids = manager._get_children_entity_ids(scenario) # type: ignore entity_ids.scenario_ids = {scenario_id} entity_ids.cycle_ids = {scenario.cycle.id} shutil.rmtree(folder_path, ignore_errors=True) for data_node_id in entity_ids.data_node_ids: _DataManagerFactory._build_manager()._export(data_node_id, folder_path) for task_id in entity_ids.task_ids: _TaskManagerFactory._build_manager()._export(task_id, folder_path) for sequence_id in entity_ids.sequence_ids: _SequenceManagerFactory._build_manager()._export(sequence_id, folder_path) for cycle_id in entity_ids.cycle_ids: _CycleManagerFactory._build_manager()._export(cycle_id, folder_path) for scenario_id in entity_ids.scenario_ids: _ScenarioManagerFactory._build_manager()._export(scenario_id, folder_path) for job_id in entity_ids.job_ids: _JobManagerFactory._build_manager()._export(job_id, folder_path) def get_parents( entity: Union[TaskId, DataNodeId, SequenceId, Task, DataNode, Sequence], parent_dict=None ) -> Dict[str, Set[_Entity]]: """Get the parents of an entity from itself or its identifier. Parameters: entity (Union[TaskId, DataNodeId, SequenceId, Task, DataNode, Sequence]): The entity or its identifier to get the parents. Returns: The dictionary of all parent entities. They are grouped by their type (Scenario^, Sequences^, or tasks^) so each key corresponds to a level of the parents and the value is a set of the parent entities. An empty dictionary is returned if the entity does not have parents.<br/> Example: The following instruction returns all the scenarios that include the datanode identified by "my_datanode_id". `taipy.get_parents("id_of_my_datanode")["scenario"]` Raises: ModelNotFound^: If _entity_ does not match a correct entity pattern. """ def update_parent_dict(parents_set, parent_dict): for k, value in parents_set.items(): if k in parent_dict.keys(): parent_dict[k].update(value) else: parent_dict[k] = value if isinstance(entity, str): entity = get(entity) # type: ignore parent_dict = parent_dict or dict() if isinstance(entity, (Scenario, Cycle)): return parent_dict current_parent_dict: Dict[str, Set] = {} for parent in entity.parent_ids: parent_entity = get(parent) if parent_entity._MANAGER_NAME in current_parent_dict.keys(): current_parent_dict[parent_entity._MANAGER_NAME].add(parent_entity) else: current_parent_dict[parent_entity._MANAGER_NAME] = {parent_entity} if isinstance(entity, Sequence): update_parent_dict(current_parent_dict, parent_dict) if isinstance(entity, Task): parent_entity_key_to_search_next = "scenario" update_parent_dict(current_parent_dict, parent_dict) for parent in parent_dict.get(parent_entity_key_to_search_next, []): get_parents(parent, parent_dict) if isinstance(entity, DataNode): parent_entity_key_to_search_next = "task" update_parent_dict(current_parent_dict, parent_dict) for parent in parent_dict.get(parent_entity_key_to_search_next, []): get_parents(parent, parent_dict) return parent_dict def get_cycles_scenarios() -> Dict[Optional[Cycle], List[Scenario]]: """Get the scenarios grouped by cycles. Returns: The dictionary of all cycles and their corresponding scenarios. """ cycles_scenarios: Dict[Optional[Cycle], List[Scenario]] = {} for scenario in get_scenarios(): if scenario.cycle in cycles_scenarios.keys(): cycles_scenarios[scenario.cycle].append(scenario) else: cycles_scenarios[scenario.cycle] = [scenario] return cycles_scenarios def get_entities_by_config_id( config_id: str, ) -> Union[List, List[Task], List[DataNode], List[Sequence], List[Scenario]]: """Get the entities by its config id. Parameters: config_id (str): The config id of the entities Returns: The list of all entities by the config id. """ entities: List = [] if entities := _ScenarioManagerFactory._build_manager()._get_by_config_id(config_id): return entities if entities := _TaskManagerFactory._build_manager()._get_by_config_id(config_id): return entities if entities := _DataManagerFactory._build_manager()._get_by_config_id(config_id): return entities return entities
from multiprocessing import Lock from typing import Optional from taipy.config import Config from taipy.logger._taipy_logger import _TaipyLogger from ._backup._backup import _init_backup_file_with_storage_folder from ._core_cli import _CoreCLI from ._orchestrator._dispatcher._job_dispatcher import _JobDispatcher from ._orchestrator._orchestrator import _Orchestrator from ._orchestrator._orchestrator_factory import _OrchestratorFactory from ._version._version_manager_factory import _VersionManagerFactory from .config import CoreSection from .exceptions.exceptions import CoreServiceIsAlreadyRunning class Core: """ Core service """ _is_running = False __lock_is_running = Lock() __logger = _TaipyLogger._get_logger() _orchestrator: Optional[_Orchestrator] = None _dispatcher: Optional[_JobDispatcher] = None def __init__(self): """ Initialize a Core service. """ pass def run(self, force_restart=False): """ Start a Core service. This function checks the configuration, manages application's version, and starts a dispatcher and lock the Config. """ if self.__class__._is_running: raise CoreServiceIsAlreadyRunning with self.__class__.__lock_is_running: self.__class__._is_running = True self.__update_core_section() self.__manage_version() self.__check_and_block_config() if self._orchestrator is None: self._orchestrator = _OrchestratorFactory._build_orchestrator() self.__start_dispatcher(force_restart) def stop(self): """ Stop the Core service. This function stops the dispatcher and unblock the Config for update. """ Config.unblock_update() if self._dispatcher: self._dispatcher = _OrchestratorFactory._remove_dispatcher() self.__logger.info("Core service has been stopped.") with self.__class__.__lock_is_running: self.__class__._is_running = False @staticmethod def __update_core_section(): _CoreCLI.create_parser() Config._applied_config._unique_sections[CoreSection.name]._update(_CoreCLI.parse_arguments()) @staticmethod def __manage_version(): _VersionManagerFactory._build_manager()._manage_version() Config._applied_config._unique_sections[CoreSection.name]._update( {"version_number": _VersionManagerFactory._build_manager()._get_latest_version()} ) @staticmethod def __check_and_block_config(): Config.check() Config.block_update() _init_backup_file_with_storage_folder() def __start_dispatcher(self, force_restart): if dispatcher := _OrchestratorFactory._build_dispatcher(force_restart=force_restart): self._dispatcher = dispatcher if Config.job_config.is_development: _Orchestrator._check_and_execute_jobs_if_development_mode()
#!/usr/bin/env python """The setup script.""" import json import os from setuptools import find_namespace_packages, find_packages, setup with open("README.md") as readme_file: readme = readme_file.read() with open(f"src{os.sep}taipy{os.sep}core{os.sep}version.json") as version_file: version = json.load(version_file) version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}' if vext := version.get("ext"): version_string = f"{version_string}.{vext}" requirements = [ "pyarrow>=10.0.1,<11.0", "networkx>=2.6,<3.0", "openpyxl>=3.1.2,<3.2", "modin[dask]>=0.23.0,<1.0", "pymongo[srv]>=4.2.0,<5.0", "sqlalchemy>=2.0.16,<2.1", "toml>=0.10,<0.11", "taipy-config@git+https://git@github.com/Avaiga/taipy-config.git@develop", ] test_requirements = ["pytest>=3.8"] extras_require = { "fastparquet": ["fastparquet==2022.11.0"], "mssql": ["pyodbc>=4,<4.1"], "mysql": ["pymysql>1,<1.1"], "postgresql": ["psycopg2>2.9,<2.10"], } setup( author="Avaiga", author_email="dev@taipy.io", python_requires=">=3.8", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ], description="A Python library to build powerful and customized data-driven back-end applications.", install_requires=requirements, long_description=readme, long_description_content_type="text/markdown", license="Apache License 2.0", keywords="taipy-core", name="taipy-core", package_dir={"": "src"}, packages=find_namespace_packages(where="src") + find_packages(include=["taipy", "taipy.core", "taipy.core.*"]), include_package_data=True, test_suite="tests", tests_require=test_requirements, url="https://github.com/avaiga/taipy-core", version=version_string, zip_safe=False, extras_require=extras_require, )
from typing import Dict from taipy._cli._base_cli import _CLI from .config import CoreSection class _CoreCLI: """Command-line interface for Taipy Core application.""" __MODE_ARGS: Dict[str, Dict] = { "--development": { "action": "store_true", "dest": "taipy_development", "help": """ When execute Taipy application in `development` mode, all entities from the previous development version will be deleted before running new Taipy application. """, }, "--experiment": { "dest": "taipy_experiment", "nargs": "?", "const": "", "metavar": "VERSION", "help": """ When execute Taipy application in `experiment` mode, the current Taipy application is saved to a new version. If version name already exists, check for compatibility with current Python Config and run the application. Without being specified, the version number will be a random string. """, }, "--production": { "dest": "taipy_production", "nargs": "?", "const": "", "metavar": "VERSION", "help": """ When execute in `production` mode, the current version is used in production. All production versions should have the same configuration and share all entities. Without being specified, the latest version is used. """, }, } __FORCE_ARGS: Dict[str, Dict] = { "--force": { "dest": "taipy_force", "action": "store_true", "help": """ Force override the configuration of the version if existed and run the application. Default to False. """, }, "--no-force": { "dest": "no_taipy_force", "action": "store_true", "help": "Stop the application if any Config conflict exists.", }, } @classmethod def create_parser(cls): core_parser = _CLI._add_groupparser("Taipy Core", "Optional arguments for Taipy Core service") mode_group = core_parser.add_mutually_exclusive_group() for mode_arg, mode_arg_dict in cls.__MODE_ARGS.items(): mode_group.add_argument(mode_arg, cls.__add_taipy_prefix(mode_arg), **mode_arg_dict) force_group = core_parser.add_mutually_exclusive_group() for force_arg, force_arg_dict in cls.__FORCE_ARGS.items(): force_group.add_argument(cls.__add_taipy_prefix(force_arg), **force_arg_dict) @classmethod def create_run_parser(cls): run_parser = _CLI._add_subparser("run", help="Run a Taipy application.") mode_group = run_parser.add_mutually_exclusive_group() for mode_arg, mode_arg_dict in cls.__MODE_ARGS.items(): mode_group.add_argument(mode_arg, **mode_arg_dict) force_group = run_parser.add_mutually_exclusive_group() for force_arg, force_arg_dict in cls.__FORCE_ARGS.items(): force_group.add_argument(force_arg, **force_arg_dict) @classmethod def parse_arguments(cls): args = _CLI._parse() as_dict = {} if args.taipy_development: as_dict[CoreSection._MODE_KEY] = CoreSection._DEVELOPMENT_MODE elif args.taipy_experiment is not None: as_dict[CoreSection._MODE_KEY] = CoreSection._EXPERIMENT_MODE as_dict[CoreSection._VERSION_NUMBER_KEY] = args.taipy_experiment elif args.taipy_production is not None: as_dict[CoreSection._MODE_KEY] = CoreSection._PRODUCTION_MODE as_dict[CoreSection._VERSION_NUMBER_KEY] = args.taipy_production if args.taipy_force: as_dict[CoreSection._FORCE_KEY] = True elif args.no_taipy_force: as_dict[CoreSection._FORCE_KEY] = False return as_dict @classmethod def __add_taipy_prefix(cls, key: str): if key.startswith("--no-"): return key[:5] + "taipy-" + key[5:] return key[:2] + "taipy-" + key[2:]
import copy import json import pathlib import shutil from typing import Any, Dict, Iterable, Iterator, List, Optional, Type, Union from taipy.config.config import Config from ..common._utils import _retry_read_entity from ..common.typing import Converter, Entity, Json, ModelType from ..exceptions import FileCannotBeRead, InvalidExportPath, ModelNotFound from ._abstract_repository import _AbstractRepository from ._decoder import _Decoder from ._encoder import _Encoder class _FileSystemRepository(_AbstractRepository[ModelType, Entity]): """ Holds common methods to be used and extended when the need for saving dataclasses as JSON files in local storage emerges. Some lines have type: ignore because MyPy won't recognize some generic attributes. This should be revised in the future. Attributes: model_type (ModelType): Generic dataclass. converter: A class that handles conversion to and from a database backend dir_name (str): Folder that will hold the files for this dataclass model. """ __EXCEPTIONS_TO_RETRY = (FileCannotBeRead,) def __init__(self, model_type: Type[ModelType], converter: Type[Converter], dir_name: str): self.model_type = model_type self.converter = converter self._dir_name = dir_name @property def dir_path(self): return self._storage_folder / self._dir_name @property def _storage_folder(self) -> pathlib.Path: return pathlib.Path(Config.core.storage_folder) ############################### # ## Inherited methods ## # ############################### def _save(self, entity: Entity): self.__create_directory_if_not_exists() model = self.converter._entity_to_model(entity) # type: ignore self.__get_path(model.id).write_text( json.dumps(model.to_dict(), ensure_ascii=False, indent=0, cls=_Encoder, check_circular=False), encoding="UTF-8", ) def _exists(self, entity_id: str) -> bool: return self.__get_path(entity_id).exists() def _load(self, entity_id: str) -> Entity: path = pathlib.Path(self.__get_path(entity_id)) try: file_content = self.__read_file(path) except (FileNotFoundError, FileCannotBeRead): raise ModelNotFound(str(self.dir_path), entity_id) return self.__file_content_to_entity(file_content) def _load_all(self, filters: Optional[List[Dict]] = None) -> List[Entity]: entities = [] try: for f in self.dir_path.iterdir(): if data := self.__filter_by(f, filters): entities.append(self.__file_content_to_entity(data)) except FileNotFoundError: pass return entities def _delete(self, entity_id: str): try: self.__get_path(entity_id).unlink() except FileNotFoundError: raise ModelNotFound(str(self.dir_path), entity_id) def _delete_all(self): shutil.rmtree(self.dir_path, ignore_errors=True) def _delete_many(self, ids: Iterable[str]): for model_id in ids: self._delete(model_id) def _delete_by(self, attribute: str, value: str): filters: List[Dict] = [{}] for fil in filters: fil.update({attribute: value}) try: for f in self.dir_path.iterdir(): if self.__filter_by(f, filters): f.unlink() except FileNotFoundError: pass def _search(self, attribute: str, value: Any, filters: Optional[List[Dict]] = None) -> List[Entity]: return list(self.__search(attribute, value, filters)) def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]): if isinstance(folder_path, str): folder: pathlib.Path = pathlib.Path(folder_path) else: folder = folder_path if folder.resolve() == self._storage_folder.resolve(): raise InvalidExportPath("The export folder must not be the storage folder.") export_dir = folder / self._dir_name if not export_dir.exists(): export_dir.mkdir(parents=True) export_path = export_dir / f"{entity_id}.json" # Delete if exists. if export_path.exists(): export_path.unlink() shutil.copy2(self.__get_path(entity_id), export_path) ########################################### # ## Specific or optimized methods ## # ########################################### def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional[List[Dict]] = None): # Design in order to optimize performance on Entity creation. # Maintainability and readability were impacted. if not filters: filters = [{}] res = {} configs_and_owner_ids = set(configs_and_owner_ids) try: for f in self.dir_path.iterdir(): config_id, owner_id, entity = self.__match_file_and_get_entity( f, configs_and_owner_ids, copy.deepcopy(filters) ) if entity: key = config_id, owner_id res[key] = entity configs_and_owner_ids.remove(key) if len(configs_and_owner_ids) == 0: return res except FileNotFoundError: # Folder with data was not created yet. return {} return res def _get_by_config_and_owner_id( self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None ) -> Optional[Entity]: if not filters: filters = [{}] else: filters = copy.deepcopy(filters) if owner_id is not None: for fil in filters: fil.update({"owner_id": owner_id}) return self.__filter_files_by_config_and_owner_id(config_id, owner_id, filters) ############################# # ## Private methods ## # ############################# def __filter_files_by_config_and_owner_id( self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None ): try: files = filter(lambda f: config_id in f.name, self.dir_path.iterdir()) entities = map( lambda f: self.__file_content_to_entity(self.__filter_by(f, filters)), files, ) corresponding_entities = filter( lambda e: e is not None and e.config_id == config_id and e.owner_id == owner_id, # type: ignore entities, ) return next(corresponding_entities, None) # type: ignore except FileNotFoundError: pass return None def __match_file_and_get_entity(self, filepath, config_and_owner_ids, filters): if match := [(c, p) for c, p in config_and_owner_ids if c.id in filepath.name]: for config, owner_id in match: for fil in filters: fil.update({"config_id": config.id, "owner_id": owner_id}) if data := self.__filter_by(filepath, filters): return config, owner_id, self.__file_content_to_entity(data) return None, None, None def __create_directory_if_not_exists(self): self.dir_path.mkdir(parents=True, exist_ok=True) def __search(self, attribute: str, value: str, filters: Optional[List[Dict]] = None) -> Iterator[Entity]: return filter(lambda e: getattr(e, attribute, None) == value, self._load_all(filters)) def __get_path(self, model_id) -> pathlib.Path: return self.dir_path / f"{model_id}.json" def __file_content_to_entity(self, file_content): if not file_content: return None if isinstance(file_content, str): file_content = json.loads(file_content, cls=_Decoder) model = self.model_type.from_dict(file_content) entity = self.converter._model_to_entity(model) return entity def __filter_by(self, filepath: pathlib.Path, filters: Optional[List[Dict]]) -> Optional[Json]: if not filters: filters = [{}] try: file_content = self.__read_file(filepath) except (FileNotFoundError, FileCannotBeRead): return None for _filter in filters: conditions = [ f'"{key}": "{value}"' if value is not None else f'"{key}": null' for key, value in _filter.items() ] if all(condition in file_content for condition in conditions): return json.loads(file_content, cls=_Decoder) return None @_retry_read_entity(__EXCEPTIONS_TO_RETRY) def __read_file(self, filepath: pathlib.Path) -> str: if not filepath.is_file(): raise FileNotFoundError try: with filepath.open("r", encoding="UTF-8") as f: file_content = f.read() return file_content except Exception: raise FileCannotBeRead(str(filepath))
import json import re from datetime import datetime, timedelta class _Decoder(json.JSONDecoder): def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) def _str_to_timedelta(self, timedelta_str: str) -> timedelta: """ Parse a time string e.g. (2h13m) into a timedelta object. :param timedelta_str: A string identifying a duration. (eg. 2h13m) :return datetime.timedelta: A datetime.timedelta object """ regex = re.compile( r"^((?P<days>[\.\d]+?)d)? *" r"((?P<hours>[\.\d]+?)h)? *" r"((?P<minutes>[\.\d]+?)m)? *" r"((?P<seconds>[\.\d]+?)s)?$" ) parts = regex.match(timedelta_str) if not parts: raise TypeError("Can not deserialize string into timedelta") time_params = {name: float(param) for name, param in parts.groupdict().items() if param} # mypy has an issue with dynamic keyword parameters, hence the type ignore on the line bellow. return timedelta(**time_params) # type: ignore def object_hook(self, source): if source.get("__type__") == "Datetime": return datetime.fromisoformat(source.get("__value__")) if source.get("__type__") == "Timedelta": return self._str_to_timedelta(source.get("__value__")) else: return source def loads(d): return json.loads(d, cls=_Decoder)
import pathlib from abc import abstractmethod from typing import Any, Dict, Generic, Iterable, List, Optional, TypeVar, Union ModelType = TypeVar("ModelType") Entity = TypeVar("Entity") class _AbstractRepository(Generic[ModelType, Entity]): @abstractmethod def _save(self, entity: Entity): """ Save an entity in the repository. Parameters: entity: The data from an object. """ raise NotImplementedError @abstractmethod def _exists(self, entity_id: str) -> bool: """ Check if an entity with id entity_id exists in the repository. Parameters: entity_id: The entity id, i.e., its primary key. Returns: True if the entity id exists. """ raise NotImplementedError @abstractmethod def _load(self, entity_id: str) -> Entity: """ Retrieve the entity data from the repository. Parameters: entity_id: The entity id, i.e., its primary key. Returns: An entity. """ raise NotImplementedError @abstractmethod def _load_all(self, filters: Optional[List[Dict]] = None) -> List[Entity]: """ Retrieve all the entities' data from the repository taking any passed filter into account. Returns: A list of entities. """ raise NotImplementedError @abstractmethod def _delete(self, entity_id: str): """ Delete an entity in the repository. Parameters: entity_id: The id of the entity to be deleted. """ raise NotImplementedError @abstractmethod def _delete_all(self): """ Delete all entities from the repository. """ raise NotImplementedError @abstractmethod def _delete_many(self, ids: Iterable[str]): """ Delete all entities from the list of ids from the repository. Parameters: ids: List of ids to be deleted. """ raise NotImplementedError @abstractmethod def _delete_by(self, attribute: str, value: str): """ Delete all entities from the list of ids from the repository. Parameters: attribute: The entity property that is the key to the search. value: The value of the attribute that are being searched. """ raise NotImplementedError @abstractmethod def _search(self, attribute: str, value: Any, filters: Optional[List[Dict]] = None) -> List[Entity]: """ Parameters: attribute: The entity property that is the key to the search. value: The value of the attribute that are being searched. Returns: A list of entities that match the search criteria. """ raise NotImplementedError @abstractmethod def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]): """ Export an entity from the repository. Parameters: entity_id (str): The id of the entity to be exported. folder_path (Union[str, pathlib.Path]): The folder path to export the entity to. """ raise NotImplementedError
import json import pathlib from typing import Any, Dict, Iterable, List, Optional, Type, Union from sqlalchemy.dialects import sqlite from sqlalchemy.exc import NoResultFound from .._repository._abstract_repository import _AbstractRepository from ..common.typing import Converter, Entity, ModelType from ..exceptions import ModelNotFound from .db._sql_connection import _SQLConnection class _SQLRepository(_AbstractRepository[ModelType, Entity]): def __init__(self, model_type: Type[ModelType], converter: Type[Converter]): """ Holds common methods to be used and extended when the need for saving dataclasses in a SqlLite database. Some lines have type: ignore because MyPy won't recognize some generic attributes. This should be revised in the future. Attributes: model_type: Generic dataclass. converter: A class that handles conversion to and from a database backend db: An sqlite3 session object """ self.db = _SQLConnection.init_db() self.model_type = model_type self.converter = converter self.table = self.model_type.__table__ ############################### # ## Inherited methods ## # ############################### def _save(self, entity: Entity): obj = self.converter._entity_to_model(entity) if self._exists(entity.id): # type: ignore self._update_entry(obj) return self.__insert_model(obj) def _exists(self, entity_id: str): query = self.table.select().filter_by(id=entity_id) return bool(self.db.execute(str(query), [entity_id]).fetchone()) def _load(self, entity_id: str) -> Entity: query = self.table.select().filter_by(id=entity_id) if entry := self.db.execute(str(query.compile(dialect=sqlite.dialect())), [entity_id]).fetchone(): entry = self.model_type.from_dict(entry) return self.converter._model_to_entity(entry) raise ModelNotFound(str(self.model_type.__name__), entity_id) def _load_all(self, filters: Optional[List[Dict]] = None) -> List[Entity]: query = self.table.select() entities: List[Entity] = [] for f in filters or [{}]: filtered_query = query.filter_by(**f) try: entries = self.db.execute( str(filtered_query.compile(dialect=sqlite.dialect())), [self.__serialize_filter_values(val) for val in list(f.values())], ).fetchall() entities.extend([self.converter._model_to_entity(self.model_type.from_dict(m)) for m in entries]) except NoResultFound: continue return entities def _delete(self, entity_id: str): delete_query = self.table.delete().filter_by(id=entity_id) cursor = self.db.execute(str(delete_query.compile(dialect=sqlite.dialect())), [entity_id]) if cursor.rowcount == 0: raise ModelNotFound(str(self.model_type.__name__), entity_id) self.db.commit() def _delete_all(self): self.db.execute(str(self.table.delete().compile(dialect=sqlite.dialect()))) self.db.commit() def _delete_many(self, ids: Iterable[str]): for entity_id in ids: self._delete(entity_id) def _delete_by(self, attribute: str, value: str): delete_by_query = self.table.delete().filter_by(**{attribute: value}) self.db.execute(str(delete_by_query.compile(dialect=sqlite.dialect())), [value]) self.db.commit() def _search(self, attribute: str, value: Any, filters: Optional[List[Dict]] = None) -> List[Entity]: query = self.table.select().filter_by(**{attribute: value}) entities: List[Entity] = [] for f in filters or [{}]: entries = self.db.execute( str(query.filter_by(**f).compile(dialect=sqlite.dialect())), [value] + [self.__serialize_filter_values(val) for val in list(f.values())], ).fetchall() entities.extend([self.converter._model_to_entity(self.model_type.from_dict(m)) for m in entries]) return entities def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]): if isinstance(folder_path, str): folder: pathlib.Path = pathlib.Path(folder_path) else: folder = folder_path export_dir = folder / self.table.name if not export_dir.exists(): export_dir.mkdir(parents=True) export_path = export_dir / f"{entity_id}.json" query = self.table.select().filter_by(id=entity_id) if entry := self.db.execute(str(query.compile(dialect=sqlite.dialect())), [entity_id]).fetchone(): with open(export_path, "w", encoding="utf-8") as export_file: export_file.write(json.dumps(entry)) else: raise ModelNotFound(self.model_type, entity_id) # type: ignore ########################################### # ## Specific or optimized methods ## # ########################################### def _get_multi(self, *, skip: int = 0, limit: int = 100) -> List[ModelType]: query = self.table.select().offset(skip).limit(limit) return self.db.execute(str(query.compile(dialect=sqlite.dialect()))).fetchall() def _get_by_config(self, config_id: Any) -> Optional[ModelType]: query = self.table.select().filter_by(config_id=config_id) return self.db.execute(str(query.compile(dialect=sqlite.dialect())), [config_id]).fetchall() def _get_by_config_and_owner_id( self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None ) -> Optional[Entity]: if not filters: filters = [{}] if entry := self.__get_entities_by_config_and_owner(config_id, owner_id, filters): return self.converter._model_to_entity(entry) return None def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional[List[Dict]] = None): # Design in order to optimize performance on Entity creation. # Maintainability and readability were impacted. if not filters: filters = [{}] res = {} configs_and_owner_ids = set(configs_and_owner_ids) for config, owner in configs_and_owner_ids: entry = self.__get_entities_by_config_and_owner(config.id, owner, filters) if entry: entity = self.converter._model_to_entity(entry) key = config, owner res[key] = entity return res def __get_entities_by_config_and_owner( self, config_id: str, owner_id: Optional[str] = None, filters: Optional[List[Dict]] = None ) -> Optional[ModelType]: if not filters: filters = [] versions = [item.get("version") for item in filters if item.get("version")] query = self.table.select().filter_by(config_id=config_id) parameters: List = [config_id] if owner_id: parameters.append(owner_id) query = query.filter_by(owner_id=owner_id) query = str(query.compile(dialect=sqlite.dialect())) if versions: table_name = self.table.name query = query + f" AND {table_name}.version IN ({','.join(['?']*len(versions))})" parameters.extend(versions) if entry := self.db.execute(query, parameters).fetchone(): return self.model_type.from_dict(entry) return None ############################# # ## Private methods ## # ############################# def __insert_model(self, model: ModelType): query = self.table.insert() self.db.execute(str(query.compile(dialect=sqlite.dialect())), model.to_list()) self.db.commit() def _update_entry(self, model): query = self.table.update().filter_by(id=model.id) self.db.execute(str(query.compile(dialect=sqlite.dialect())), model.to_list() + [model.id]) self.db.commit() @staticmethod def __serialize_filter_values(value): if isinstance(value, (dict, list)): return json.dumps(value).replace('"', "'") return value
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import json from datetime import datetime, timedelta from enum import Enum from typing import Any class _Encoder(json.JSONEncoder): def _timedelta_to_str(self, obj: timedelta) -> str: total_seconds = obj.total_seconds() return ( f"{int(total_seconds // 86400)}d" f"{int(total_seconds % 86400 // 3600)}h" f"{int(total_seconds % 3600 // 60)}m" f"{int(total_seconds % 60)}s" ) def default(self, o: Any): if isinstance(o, Enum): result = o.value elif isinstance(o, datetime): result = {"__type__": "Datetime", "__value__": o.isoformat()} elif isinstance(o, timedelta): result = {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)} else: result = json.JSONEncoder.default(self, o) return result def dumps(d): return json.dumps(d, cls=_Encoder)
from abc import ABC, abstractmethod class _AbstractConverter(ABC): @classmethod @abstractmethod def _entity_to_model(cls, entity): raise NotImplementedError @classmethod @abstractmethod def _model_to_entity(cls, model): raise NotImplementedError
import dataclasses import enum import json from typing import Any, Dict from sqlalchemy import Table from ._decoder import _Decoder from ._encoder import _Encoder class _BaseModel: __table__: Table def __iter__(self): for attr, value in self.__dict__.items(): yield attr, value def to_dict(self) -> Dict[str, Any]: model_dict = {**dataclasses.asdict(self)} for k, v in model_dict.items(): if isinstance(v, enum.Enum): model_dict[k] = repr(v) return model_dict @staticmethod def _serialize_attribute(value): return json.dumps(value, ensure_ascii=False, cls=_Encoder) @staticmethod def _deserialize_attribute(value): if isinstance(value, str): return json.loads(value.replace("'", '"'), cls=_Decoder) return value @staticmethod def from_dict(data: Dict[str, Any]): pass def to_list(self): pass
import sqlite3 from functools import lru_cache from sqlite3 import Connection from sqlalchemy.dialects import sqlite from sqlalchemy.schema import CreateTable from taipy.config.config import Config from ...exceptions import MissingRequiredProperty def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d class _SQLConnection: _connection = None @classmethod def init_db(cls): if cls._connection: return cls._connection cls._connection = _build_connection() cls._connection.row_factory = dict_factory from ..._version._version_model import _VersionModel from ...cycle._cycle_model import _CycleModel from ...data._data_model import _DataNodeModel from ...job._job_model import _JobModel from ...scenario._scenario_model import _ScenarioModel from ...submission._submission_model import _SubmissionModel from ...task._task_model import _TaskModel cls._connection.execute( str(CreateTable(_CycleModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_DataNodeModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_JobModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_ScenarioModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_TaskModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_VersionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) cls._connection.execute( str(CreateTable(_SubmissionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())) ) return cls._connection def _build_connection() -> Connection: # Set SQLite threading mode to Serialized, means that threads may share the module, connections and cursors sqlite3.threadsafety = 3 properties = Config.core.repository_properties try: db_location = properties["db_location"] except KeyError: raise MissingRequiredProperty("Missing property db_location.") return __build_connection(db_location) @lru_cache def __build_connection(db_location: str): return sqlite3.connect(db_location, check_same_thread=False)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from sqlalchemy.orm import declarative_base, registry _SQLBaseModel = declarative_base() mapper_registry = registry()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import os from taipy.config import Config __BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME = "TAIPY_BACKUP_FILE_PATH" def _init_backup_file_with_storage_folder(): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME): with open(preserve_file_path, "a") as f: f.write(f"{Config.core.storage_folder}\n") def _append_to_backup_file(new_file_path: str): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME): storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep if not os.path.abspath(new_file_path).startswith(storage_folder): with open(preserve_file_path, "a") as f: f.write(f"{new_file_path}\n") def _remove_from_backup_file(to_remove_file_path: str): if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME, None): storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep if not os.path.abspath(to_remove_file_path).startswith(storage_folder): try: with open(preserve_file_path, "r+") as f: old_backup = f.read() to_remove_file_path = to_remove_file_path + "\n" # To avoid removing the file path of different data nodes that are pointing # to the same file. We will only replace the file path only once. if old_backup.startswith(to_remove_file_path): new_backup = old_backup.replace(to_remove_file_path, "", 1) else: new_backup = old_backup.replace("\n" + to_remove_file_path, "\n", 1) if new_backup is not old_backup: f.seek(0) f.write(new_backup) f.truncate() except Exception: pass def _replace_in_backup_file(old_file_path: str, new_file_path: str): _remove_from_backup_file(old_file_path) _append_to_backup_file(new_file_path)
import re from copy import copy from typing import Any, Dict, Optional, Union from taipy.config import Config, UniqueSection from taipy.config._config import _Config from taipy.config.common._config_blocker import _ConfigBlocker from taipy.config.common._template_handler import _TemplateHandler as _tpl from .._init_version import _read_version from ..exceptions.exceptions import ConfigCoreVersionMismatched class CoreSection(UniqueSection): """ Configuration parameters for running the `Core^` service. Attributes: root_folder (str): Path of the base folder for the taipy application. The default value is "./taipy/" storage_folder (str): Folder name used to store Taipy data. The default value is ".data/". It is used in conjunction with the *root_folder* attribute. That means the storage path is <root_folder><storage_folder> (The default path is "./taipy/.data/"). repository_type (str): Type of the repository to be used to store Taipy data. The default value is "filesystem". repository_properties (Dict[str, Union[str, int]]): A dictionary of additional properties to be used by the repository. read_entity_retry (int): Number of retries to read an entity from the repository before return failure. The default value is 3. mode (str): The Taipy operating mode. By default, the `Core^` service runs in "development" mode. An "experiment" and a "production" mode are also available. Please refer to the [Versioning management](../core/versioning/index.md) documentation page for more details. version_number (str)): The identifier of the user application version. Please refer to the [Versioning management](../core/versioning/index.md) documentation page for more details. force (bool): If True, force the application run even if there are some conflicts in the configuration. core_version (str): The Taipy Core package version. **properties (dict[str, any]): A dictionary of additional properties. """ name = "CORE" _ROOT_FOLDER_KEY = "root_folder" _DEFAULT_ROOT_FOLDER = "./taipy/" _STORAGE_FOLDER_KEY = "storage_folder" _DEFAULT_STORAGE_FOLDER = ".data/" _REPOSITORY_TYPE_KEY = "repository_type" _DEFAULT_REPOSITORY_TYPE = "filesystem" _REPOSITORY_PROPERTIES_KEY = "repository_properties" _DEFAULT_REPOSITORY_PROPERTIES: Dict = dict() _READ_ENTITY_RETRY_KEY = "read_entity_retry" _DEFAULT_READ_ENTITY_RETRY = 1 _MODE_KEY = "mode" _DEVELOPMENT_MODE = "development" _EXPERIMENT_MODE = "experiment" _PRODUCTION_MODE = "production" _DEFAULT_MODE = _DEVELOPMENT_MODE _VERSION_NUMBER_KEY = "version_number" _DEFAULT_VERSION_NUMBER = "" _FORCE_KEY = "force" _DEFAULT_FORCE = False _CORE_VERSION_KEY = "core_version" _CURRENT_CORE_VERSION = _read_version() def __init__( self, root_folder: Optional[str] = None, storage_folder: Optional[str] = None, repository_type: Optional[str] = None, repository_properties: Optional[Dict[str, Union[str, int]]] = None, read_entity_retry: Optional[int] = None, mode: Optional[str] = None, version_number: Optional[str] = None, force: Optional[bool] = None, core_version: Optional[str] = None, **properties, ): self._root_folder = root_folder self._storage_folder = storage_folder self._repository_type = repository_type self._repository_properties = repository_properties or {} self._read_entity_retry = ( read_entity_retry if read_entity_retry is not None else self._DEFAULT_READ_ENTITY_RETRY ) self.mode = mode or self._DEFAULT_MODE self.version_number = version_number or self._DEFAULT_VERSION_NUMBER self.force = force or self._DEFAULT_FORCE self._check_compatibility(core_version) self._core_version = core_version super().__init__(**properties) def __copy__(self): return CoreSection( self.root_folder, self.storage_folder, self.repository_type, self.repository_properties, self.read_entity_retry, self.mode, self.version_number, self.force, self._core_version, **copy(self._properties), ) @property def storage_folder(self): return _tpl._replace_templates(self._storage_folder) @storage_folder.setter # type: ignore @_ConfigBlocker._check() def storage_folder(self, val): self._storage_folder = val @property def root_folder(self): return _tpl._replace_templates(self._root_folder) @root_folder.setter # type: ignore @_ConfigBlocker._check() def root_folder(self, val): self._root_folder = val @property def repository_type(self): return _tpl._replace_templates(self._repository_type) @repository_type.setter # type: ignore @_ConfigBlocker._check() def repository_type(self, val): self._repository_type = val @property def repository_properties(self): return ( {k: _tpl._replace_templates(v) for k, v in self._repository_properties.items()} if self._repository_properties else self._DEFAULT_REPOSITORY_PROPERTIES.copy() ) @repository_properties.setter # type: ignore @_ConfigBlocker._check() def repository_properties(self, val): self._repository_properties = val @property def read_entity_retry(self): return _tpl._replace_templates(self._read_entity_retry) @read_entity_retry.setter # type: ignore @_ConfigBlocker._check() def read_entity_retry(self, val): self._read_entity_retry = val @classmethod def default_config(cls): return CoreSection( cls._DEFAULT_ROOT_FOLDER, cls._DEFAULT_STORAGE_FOLDER, cls._DEFAULT_REPOSITORY_TYPE, cls._DEFAULT_REPOSITORY_PROPERTIES, cls._DEFAULT_READ_ENTITY_RETRY, cls._DEFAULT_MODE, cls._DEFAULT_VERSION_NUMBER, cls._DEFAULT_FORCE, cls._CURRENT_CORE_VERSION, ) def _clean(self): self._root_folder = self._DEFAULT_ROOT_FOLDER self._storage_folder = self._DEFAULT_STORAGE_FOLDER self._repository_type = self._DEFAULT_REPOSITORY_TYPE self._repository_properties = self._DEFAULT_REPOSITORY_PROPERTIES.copy() self._read_entity_retry = self._DEFAULT_READ_ENTITY_RETRY self.mode = self._DEFAULT_MODE self.version_number = self._DEFAULT_VERSION_NUMBER self.force = self._DEFAULT_FORCE self._core_version = self._CURRENT_CORE_VERSION self._properties.clear() def _to_dict(self): as_dict = {} if self._root_folder: as_dict[self._ROOT_FOLDER_KEY] = self._root_folder if self._storage_folder: as_dict[self._STORAGE_FOLDER_KEY] = self._storage_folder if self._repository_type: as_dict[self._REPOSITORY_TYPE_KEY] = self._repository_type if self._repository_properties: as_dict[self._REPOSITORY_PROPERTIES_KEY] = self._repository_properties if self._read_entity_retry is not None: as_dict[self._READ_ENTITY_RETRY_KEY] = self._read_entity_retry if self.mode is not None: as_dict[self._MODE_KEY] = self.mode if self.version_number is not None: as_dict[self._VERSION_NUMBER_KEY] = self.version_number if self.force is not None: as_dict[self._FORCE_KEY] = self.force if self._core_version is not None: as_dict[self._CORE_VERSION_KEY] = self._core_version as_dict.update(self._properties) return as_dict @classmethod def _from_dict(cls, as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None): root_folder = as_dict.pop(cls._ROOT_FOLDER_KEY, None) storage_folder = as_dict.pop(cls._STORAGE_FOLDER_KEY, None) repository_type = as_dict.pop(cls._REPOSITORY_TYPE_KEY, None) repository_properties = as_dict.pop(cls._REPOSITORY_PROPERTIES_KEY, None) read_entity_retry = as_dict.pop(cls._READ_ENTITY_RETRY_KEY, None) mode = as_dict.pop(cls._MODE_KEY, None) version_nb = as_dict.pop(cls._VERSION_NUMBER_KEY, None) force = as_dict.pop(cls._FORCE_KEY, None) core_version = as_dict.pop(cls._CORE_VERSION_KEY, None) return CoreSection( root_folder, storage_folder, repository_type, repository_properties, read_entity_retry, mode, version_nb, force, core_version, **as_dict, ) def _update(self, as_dict: Dict[str, Any]): root_folder = _tpl._replace_templates(as_dict.pop(self._ROOT_FOLDER_KEY, self._root_folder)) if self._root_folder != root_folder: self._root_folder = root_folder storage_folder = _tpl._replace_templates(as_dict.pop(self._STORAGE_FOLDER_KEY, self._storage_folder)) if self._storage_folder != storage_folder: self._storage_folder = storage_folder repository_type = _tpl._replace_templates(as_dict.pop(self._REPOSITORY_TYPE_KEY, self._repository_type)) if self._repository_type != repository_type: self._repository_type = repository_type repository_properties = _tpl._replace_templates( as_dict.pop(self._REPOSITORY_PROPERTIES_KEY, self._repository_properties) ) self._repository_properties.update(repository_properties) read_entity_retry = _tpl._replace_templates(as_dict.pop(self._READ_ENTITY_RETRY_KEY, self._read_entity_retry)) if self._read_entity_retry != read_entity_retry: self._read_entity_retry = read_entity_retry mode = _tpl._replace_templates(as_dict.pop(self._MODE_KEY, self.mode)) if self.mode != mode: self.mode = mode version_number = _tpl._replace_templates(as_dict.pop(self._VERSION_NUMBER_KEY, self.version_number)) if self.version_number != version_number: self.version_number = version_number force = _tpl._replace_templates(as_dict.pop(self._FORCE_KEY, self.force)) if self.force != force: self.force = force core_version = as_dict.pop(self._CORE_VERSION_KEY, None) self._check_compatibility(core_version) self._properties.update(as_dict) @classmethod def _check_compatibility(cls, core_version): if not core_version: return version_pattern = r"^(\d+)\.(\d+)\.(\d+)$" dev_version_pattern = r"^(\d+)\.(\d+)\.(\d+).(\w*)$" installed_match = re.match(version_pattern, cls._CURRENT_CORE_VERSION) or re.match( dev_version_pattern, cls._CURRENT_CORE_VERSION ) required_match = re.match(version_pattern, core_version) or re.match(dev_version_pattern, core_version) if required_match and installed_match: installed_group = installed_match.groups() required_group = required_match.groups() installed_major, installed_minor = installed_group[0], installed_group[1] required_major, required_minor = required_group[0], required_group[1] if required_major != installed_major or required_minor != installed_minor: raise ConfigCoreVersionMismatched(core_version, cls._CURRENT_CORE_VERSION) @staticmethod def _configure( root_folder: Optional[str] = None, storage_folder: Optional[str] = None, repository_type: Optional[str] = None, repository_properties: Optional[Dict[str, Union[str, int]]] = None, read_entity_retry: Optional[int] = None, mode: Optional[str] = None, version_number: Optional[str] = None, force: Optional[bool] = None, **properties, ) -> "CoreSection": """Configure the Core service. Parameters: root_folder (Optional[str]): Path of the base folder for the taipy application. The default value is "./taipy/" storage_folder (Optional[str]): Folder name used to store Taipy data. The default value is ".data/". It is used in conjunction with the `root_folder` field. That means the storage path is <root_folder><storage_folder> (The default path is "./taipy/.data/"). repository_type (Optional[str]): The type of the repository to be used to store Taipy data. The default value is "filesystem". repository_properties (Optional[Dict[str, Union[str, int]]]): A dictionary of additional properties to be used by the repository. read_entity_retry (Optional[int]): Number of retries to read an entity from the repository before return failure. The default value is 3. mode (Optional[str]): Indicates the mode of the version management system. Possible values are *"development"*, *"experiment"*, or *"production"*. version_number (Optional[str]): The string identifier of the version. In development mode, the version number is ignored. force (Optional[bool]): If True, Taipy will override a version even if the configuration has changed and run the application. **properties (Dict[str, Any]): A keyworded variable length list of additional arguments configure the behavior of the `Core^` service. Returns: The Core configuration. """ section = CoreSection( root_folder=root_folder, storage_folder=storage_folder, repository_type=repository_type, repository_properties=repository_properties, read_entity_retry=read_entity_retry, mode=mode, version_number=version_number, force=force, core_version=_read_version(), **properties, ) Config._register(section) return Config.unique_sections[CoreSection.name]
from collections import defaultdict from copy import copy from typing import Any, Callable, Dict, List, Optional, Union from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.common._validate_id import _validate_id from taipy.config.common.frequency import Frequency from taipy.config.config import Config from taipy.config.section import Section from .data_node_config import DataNodeConfig from .task_config import TaskConfig class ScenarioConfig(Section): """ Configuration fields needed to instantiate an actual `Scenario^`. Attributes: id (str): Identifier of the scenario config. It must be a valid Python variable name. tasks (Optional[Union[TaskConfig, List[TaskConfig]]]): List of task configs.<br/> The default value is None. additional_data_nodes (Optional[Union[DataNodeConfig, List[DataNodeConfig]]]): <br/> List of additional data node configs. The default value is None. frequency (Optional[Frequency]): The frequency of the scenario's cycle. The default value is None. comparators: Optional[Dict[str, Union[List[Callable], Callable]]]: Dictionary of the data node <br/> config id as key and a list of Callable used to compare the data nodes as value. sequences (Optional[Dict[str, List[TaskConfig]]]): Dictionary of sequence descriptions. The default value is None. **properties (dict[str, any]): A dictionary of additional properties. """ name = "SCENARIO" _SEQUENCES_KEY = "sequences" _TASKS_KEY = "tasks" _ADDITIONAL_DATA_NODES_KEY = "additional_data_nodes" _FREQUENCY_KEY = "frequency" _SEQUENCES_KEY = "sequences" _COMPARATOR_KEY = "comparators" def __init__( self, id: str, tasks: Optional[Union[TaskConfig, List[TaskConfig]]] = None, additional_data_nodes: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, frequency: Optional[Frequency] = None, comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None, sequences: Optional[Dict[str, List[TaskConfig]]] = None, **properties, ): if tasks: self._tasks = list(tasks) if isinstance(tasks, TaskConfig) else copy(tasks) else: self._tasks = [] if additional_data_nodes: self._additional_data_nodes = ( list(additional_data_nodes) if isinstance(additional_data_nodes, DataNodeConfig) else copy(additional_data_nodes) ) else: self._additional_data_nodes = [] self.sequences = sequences if sequences else {} self.frequency = frequency self.comparators = defaultdict(list) if comparators: for k, v in comparators.items(): if isinstance(v, list): self.comparators[_validate_id(k)].extend(v) else: self.comparators[_validate_id(k)].append(v) super().__init__(id, **properties) def __copy__(self): comp = None if self.comparators is None else self.comparators scenario_config = ScenarioConfig( self.id, copy(self._tasks), copy(self._additional_data_nodes), self.frequency, copy(comp), copy(self.sequences), **copy(self._properties), ) return scenario_config def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) @property def task_configs(self) -> List[TaskConfig]: return self._tasks @property def tasks(self) -> List[TaskConfig]: return self._tasks @property def additional_data_node_configs(self) -> List[DataNodeConfig]: return self._additional_data_nodes @property def additional_data_nodes(self) -> List[DataNodeConfig]: return self._additional_data_nodes @property def data_node_configs(self) -> List[DataNodeConfig]: return self.__get_all_unique_data_nodes() @property def data_nodes(self) -> List[DataNodeConfig]: return self.__get_all_unique_data_nodes() def __get_all_unique_data_nodes(self) -> List[DataNodeConfig]: data_node_configs = set(self._additional_data_nodes) for task in self._tasks: data_node_configs.update(task.inputs) data_node_configs.update(task.outputs) return list(data_node_configs) @classmethod def default_config(cls): return ScenarioConfig(cls._DEFAULT_KEY, list(), list(), None, dict()) def _clean(self): self._tasks = list() self._additional_data_nodes = list() self.frequency = None self.comparators = dict() self.sequences = dict() self._properties = dict() def _to_dict(self) -> Dict[str, Any]: return { self._COMPARATOR_KEY: self.comparators, self._TASKS_KEY: self._tasks, self._ADDITIONAL_DATA_NODES_KEY: self._additional_data_nodes, self._FREQUENCY_KEY: self.frequency, self._SEQUENCES_KEY: self.sequences, **self._properties, } @classmethod def _from_dict( cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None ) -> "ScenarioConfig": # type: ignore as_dict.pop(cls._ID_KEY, id) tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, list()), config) additional_data_node_ids = as_dict.pop(cls._ADDITIONAL_DATA_NODES_KEY, list()) additional_data_nodes = cls.__get_additional_data_node_configs(additional_data_node_ids, config) frequency = as_dict.pop(cls._FREQUENCY_KEY, None) comparators = as_dict.pop(cls._COMPARATOR_KEY, dict()) sequences = as_dict.pop(cls._SEQUENCES_KEY, {}) for sequence_name, sequence_tasks in sequences.items(): sequences[sequence_name] = cls.__get_task_configs(sequence_tasks, config) scenario_config = ScenarioConfig( id=id, tasks=tasks, additional_data_nodes=additional_data_nodes, frequency=frequency, comparators=comparators, sequences=sequences, **as_dict, ) return scenario_config @staticmethod def __get_task_configs(task_config_ids: List[str], config: Optional[_Config]): task_configs = set() if config: if task_config_section := config._sections.get(TaskConfig.name): for task_config_id in task_config_ids: if task_config := task_config_section.get(task_config_id, None): task_configs.add(task_config) return list(task_configs) @staticmethod def __get_additional_data_node_configs(additional_data_node_ids: List[str], config: Optional[_Config]): additional_data_node_configs = set() if config: if data_node_config_section := config._sections.get(DataNodeConfig.name): for additional_data_node_id in additional_data_node_ids: if additional_data_node_config := data_node_config_section.get(additional_data_node_id): additional_data_node_configs.add(additional_data_node_config) return list(additional_data_node_configs) def _update(self, as_dict: Dict[str, Any], default_section=None): self._tasks = as_dict.pop(self._TASKS_KEY, self._tasks) if self._tasks is None and default_section: self._tasks = default_section._tasks self._additional_data_nodes = as_dict.pop(self._ADDITIONAL_DATA_NODES_KEY, self._additional_data_nodes) if self._additional_data_nodes is None and default_section: self._additional_data_nodes = default_section._additional_data_nodes self.frequency = as_dict.pop(self._FREQUENCY_KEY, self.frequency) if self.frequency is None and default_section: self.frequency = default_section.frequency self.comparators = as_dict.pop(self._COMPARATOR_KEY, self.comparators) if self.comparators is None and default_section: self.comparators = default_section.comparators self.sequences = as_dict.pop(self._SEQUENCES_KEY, self.sequences) if self.sequences is None and default_section: self.sequences = default_section.sequences self._properties.update(as_dict) if default_section: self._properties = {**default_section.properties, **self._properties} def add_comparator(self, dn_config_id: str, comparator: Callable): self.comparators[dn_config_id].append(comparator) def delete_comparator(self, dn_config_id: str): if dn_config_id in self.comparators: del self.comparators[dn_config_id] @staticmethod def _configure( id: str, task_configs: Optional[List[TaskConfig]] = None, additional_data_node_configs: Optional[List[DataNodeConfig]] = None, frequency: Optional[Frequency] = None, comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None, sequences: Optional[Dict[str, List[TaskConfig]]] = None, **properties, ) -> "ScenarioConfig": """Configure a new scenario configuration. Parameters: id (str): The unique identifier of the new scenario configuration. task_configs (Optional[List[TaskConfig^]]): The list of task configurations used by this scenario configuration. The default value is None. additional_data_node_configs (Optional[List[DataNodeConfig^]]): The list of additional data nodes related to this scenario configuration. The default value is None. frequency (Optional[Frequency^]): The scenario frequency.<br/> It corresponds to the recurrence of the scenarios instantiated from this configuration. Based on this frequency each scenario will be attached to the relevant cycle. comparators (Optional[Dict[str, Union[List[Callable], Callable]]]): The list of functions used to compare scenarios. A comparator function is attached to a scenario's data node configuration. The key of the dictionary parameter corresponds to the data node configuration id. During the scenarios' comparison, each comparator is applied to all the data nodes instantiated from the data node configuration attached to the comparator. See `(taipy.)compare_scenarios()^` more more details. sequences (Optional[Dict[str, List[TaskConfig]]]): Dictionary of sequence descriptions. The default value is None. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new scenario configuration. """ section = ScenarioConfig( id, task_configs, additional_data_node_configs, frequency=frequency, comparators=comparators, sequences=sequences, **properties, ) Config._register(section) return Config.sections[ScenarioConfig.name][id] @staticmethod def _set_default_configuration( task_configs: Optional[List[TaskConfig]] = None, additional_data_node_configs: List[DataNodeConfig] = None, frequency: Optional[Frequency] = None, comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None, sequences: Optional[Dict[str, List[TaskConfig]]] = None, **properties, ) -> "ScenarioConfig": """Set the default values for scenario configurations. This function creates the *default scenario configuration* object, where all scenario configuration objects will find their default values when needed. Parameters: task_configs (Optional[List[TaskConfig^]]): The list of task configurations used by this scenario configuration. additional_data_node_configs (Optional[List[DataNodeConfig^]]): The list of additional data nodes related to this scenario configuration. frequency (Optional[Frequency^]): The scenario frequency. It corresponds to the recurrence of the scenarios instantiated from this configuration. Based on this frequency each scenario will be attached to the relevant cycle. comparators (Optional[Dict[str, Union[List[Callable], Callable]]]): The list of functions used to compare scenarios. A comparator function is attached to a scenario's data node configuration. The key of the dictionary parameter corresponds to the data node configuration id. During the scenarios' comparison, each comparator is applied to all the data nodes instantiated from the data node configuration attached to the comparator. See `taipy.compare_scenarios()^` more more details. sequences (Optional[Dict[str, List[TaskConfig]]]): Dictionary of sequences. The default value is None. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new default scenario configuration. """ section = ScenarioConfig( _Config.DEFAULT_KEY, task_configs, additional_data_node_configs, frequency=frequency, comparators=comparators, sequences=sequences, **properties, ) Config._register(section) return Config.sections[ScenarioConfig.name][_Config.DEFAULT_KEY] def add_sequences(self, sequences: Dict[str, List[TaskConfig]]): self.sequences.update(sequences) def remove_sequences(self, sequence_names: Union[str, List[str]]): if isinstance(sequence_names, List): for sequence_name in sequence_names: self.sequences.pop(sequence_name) else: self.sequences.pop(sequence_names)
import json from copy import copy from datetime import timedelta from typing import Any, Callable, Dict, List, Optional, Union from taipy.config._config import _Config from taipy.config.common._config_blocker import _ConfigBlocker from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.common.scope import Scope from taipy.config.config import Config from taipy.config.section import Section from ..common._warnings import _warn_deprecated from ..common.mongo_default_document import MongoDefaultDocument class DataNodeConfig(Section): """ Configuration fields needed to instantiate a `DataNode^`. A Data Node config is made to be used as a generator for actual data nodes. It holds configuration information needed to create an actual data node. Attributes: id (str): Unique identifier of the data node config. It must be a valid Python variable name. storage_type (str): Storage type of the data nodes created from the data node config. The possible values are : "csv", "excel", "pickle", "sql_table", "sql", "mongo_collection", "generic", "json", "parquet" and "in_memory". The default value is "pickle". Note that the "in_memory" value can only be used when `JobConfig^`.mode is "standalone". scope (Optional[Scope^]): The optional `Scope^` of the data nodes instantiated from the data node config. The default value is SCENARIO. **properties (dict[str, any]): A dictionary of additional properties. """ name = "DATA_NODE" _STORAGE_TYPE_KEY = "storage_type" _STORAGE_TYPE_VALUE_PICKLE = "pickle" _STORAGE_TYPE_VALUE_SQL_TABLE = "sql_table" _STORAGE_TYPE_VALUE_SQL = "sql" _STORAGE_TYPE_VALUE_MONGO_COLLECTION = "mongo_collection" _STORAGE_TYPE_VALUE_CSV = "csv" _STORAGE_TYPE_VALUE_EXCEL = "excel" _STORAGE_TYPE_VALUE_IN_MEMORY = "in_memory" _STORAGE_TYPE_VALUE_GENERIC = "generic" _STORAGE_TYPE_VALUE_JSON = "json" _STORAGE_TYPE_VALUE_PARQUET = "parquet" _DEFAULT_STORAGE_TYPE = _STORAGE_TYPE_VALUE_PICKLE _ALL_STORAGE_TYPES = [ _STORAGE_TYPE_VALUE_PICKLE, _STORAGE_TYPE_VALUE_SQL_TABLE, _STORAGE_TYPE_VALUE_SQL, _STORAGE_TYPE_VALUE_MONGO_COLLECTION, _STORAGE_TYPE_VALUE_CSV, _STORAGE_TYPE_VALUE_EXCEL, _STORAGE_TYPE_VALUE_IN_MEMORY, _STORAGE_TYPE_VALUE_GENERIC, _STORAGE_TYPE_VALUE_JSON, _STORAGE_TYPE_VALUE_PARQUET, ] _EXPOSED_TYPE_KEY = "exposed_type" _EXPOSED_TYPE_PANDAS = "pandas" _EXPOSED_TYPE_MODIN = "modin" _EXPOSED_TYPE_NUMPY = "numpy" _DEFAULT_EXPOSED_TYPE = _EXPOSED_TYPE_PANDAS _ALL_EXPOSED_TYPES = [ _EXPOSED_TYPE_PANDAS, _EXPOSED_TYPE_MODIN, _EXPOSED_TYPE_NUMPY, ] _OPTIONAL_ENCODING_PROPERTY = "encoding" _DEFAULT_ENCODING_VALUE = "utf-8" # Generic _OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY = "read_fct" _OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY = "read_fct_args" _OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY = "write_fct" _OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY = "write_fct_args" # CSV _OPTIONAL_EXPOSED_TYPE_CSV_PROPERTY = "exposed_type" _OPTIONAL_DEFAULT_PATH_CSV_PROPERTY = "default_path" _OPTIONAL_HAS_HEADER_CSV_PROPERTY = "has_header" # Excel _OPTIONAL_EXPOSED_TYPE_EXCEL_PROPERTY = "exposed_type" _OPTIONAL_DEFAULT_PATH_EXCEL_PROPERTY = "default_path" _OPTIONAL_HAS_HEADER_EXCEL_PROPERTY = "has_header" _OPTIONAL_SHEET_NAME_EXCEL_PROPERTY = "sheet_name" # In memory _OPTIONAL_DEFAULT_DATA_IN_MEMORY_PROPERTY = "default_data" # SQL _REQUIRED_DB_NAME_SQL_PROPERTY = "db_name" _REQUIRED_DB_ENGINE_SQL_PROPERTY = "db_engine" _DB_ENGINE_SQLITE = "sqlite" _OPTIONAL_FOLDER_PATH_SQLITE_PROPERTY = "sqlite_folder_path" _OPTIONAL_FILE_EXTENSION_SQLITE_PROPERTY = "sqlite_file_extension" _OPTIONAL_DB_PASSWORD_SQL_PROPERTY = "db_password" _OPTIONAL_DB_USERNAME_SQL_PROPERTY = "db_username" _OPTIONAL_PORT_SQL_PROPERTY = "db_port" _OPTIONAL_HOST_SQL_PROPERTY = "db_host" _OPTIONAL_DRIVER_SQL_PROPERTY = "db_driver" _OPTIONAL_DB_EXTRA_ARGS_SQL_PROPERTY = "db_extra_args" _OPTIONAL_EXPOSED_TYPE_SQL_PROPERTY = "exposed_type" # SQL_TABLE _REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY = "table_name" # SQL _REQUIRED_READ_QUERY_SQL_PROPERTY = "read_query" _REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY = "write_query_builder" _OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY = "append_query_builder" # MONGO _REQUIRED_DB_NAME_MONGO_PROPERTY = "db_name" _REQUIRED_COLLECTION_NAME_MONGO_PROPERTY = "collection_name" _OPTIONAL_CUSTOM_DOCUMENT_MONGO_PROPERTY = "custom_document" _OPTIONAL_USERNAME_MONGO_PROPERTY = "db_username" _OPTIONAL_PASSWORD_MONGO_PROPERTY = "db_password" _OPTIONAL_HOST_MONGO_PROPERTY = "db_host" _OPTIONAL_PORT_MONGO_PROPERTY = "db_port" _OPTIONAL_DRIVER_MONGO_PROPERTY = "db_driver" _OPTIONAL_DB_EXTRA_ARGS_MONGO_PROPERTY = "db_extra_args" # Pickle _OPTIONAL_DEFAULT_PATH_PICKLE_PROPERTY = "default_path" _OPTIONAL_DEFAULT_DATA_PICKLE_PROPERTY = "default_data" # JSON _OPTIONAL_ENCODER_JSON_PROPERTY = "encoder" _OPTIONAL_DECODER_JSON_PROPERTY = "decoder" _OPTIONAL_DEFAULT_PATH_JSON_PROPERTY = "default_path" # Parquet _OPTIONAL_EXPOSED_TYPE_PARQUET_PROPERTY = "exposed_type" _OPTIONAL_DEFAULT_PATH_PARQUET_PROPERTY = "default_path" _OPTIONAL_ENGINE_PARQUET_PROPERTY = "engine" _OPTIONAL_COMPRESSION_PARQUET_PROPERTY = "compression" _OPTIONAL_READ_KWARGS_PARQUET_PROPERTY = "read_kwargs" _OPTIONAL_WRITE_KWARGS_PARQUET_PROPERTY = "write_kwargs" _REQUIRED_PROPERTIES: Dict[str, List] = { _STORAGE_TYPE_VALUE_PICKLE: [], _STORAGE_TYPE_VALUE_SQL_TABLE: [ _REQUIRED_DB_NAME_SQL_PROPERTY, _REQUIRED_DB_ENGINE_SQL_PROPERTY, _REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, ], _STORAGE_TYPE_VALUE_SQL: [ _REQUIRED_DB_NAME_SQL_PROPERTY, _REQUIRED_DB_ENGINE_SQL_PROPERTY, _REQUIRED_READ_QUERY_SQL_PROPERTY, _REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, ], _STORAGE_TYPE_VALUE_MONGO_COLLECTION: [ _REQUIRED_DB_NAME_MONGO_PROPERTY, _REQUIRED_COLLECTION_NAME_MONGO_PROPERTY, ], _STORAGE_TYPE_VALUE_CSV: [], _STORAGE_TYPE_VALUE_EXCEL: [], _STORAGE_TYPE_VALUE_IN_MEMORY: [], _STORAGE_TYPE_VALUE_GENERIC: [], _STORAGE_TYPE_VALUE_JSON: [], _STORAGE_TYPE_VALUE_PARQUET: [], } _OPTIONAL_PROPERTIES = { _STORAGE_TYPE_VALUE_GENERIC: { _OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY: None, _OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY: None, _OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY: None, _OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY: None, }, _STORAGE_TYPE_VALUE_CSV: { _OPTIONAL_DEFAULT_PATH_CSV_PROPERTY: None, _OPTIONAL_ENCODING_PROPERTY: _DEFAULT_ENCODING_VALUE, _OPTIONAL_HAS_HEADER_CSV_PROPERTY: True, _OPTIONAL_EXPOSED_TYPE_CSV_PROPERTY: _DEFAULT_EXPOSED_TYPE, }, _STORAGE_TYPE_VALUE_EXCEL: { _OPTIONAL_DEFAULT_PATH_EXCEL_PROPERTY: None, _OPTIONAL_HAS_HEADER_EXCEL_PROPERTY: True, _OPTIONAL_SHEET_NAME_EXCEL_PROPERTY: None, _OPTIONAL_EXPOSED_TYPE_EXCEL_PROPERTY: _DEFAULT_EXPOSED_TYPE, }, _STORAGE_TYPE_VALUE_IN_MEMORY: {_OPTIONAL_DEFAULT_DATA_IN_MEMORY_PROPERTY: None}, _STORAGE_TYPE_VALUE_SQL_TABLE: { _OPTIONAL_DB_USERNAME_SQL_PROPERTY: None, _OPTIONAL_DB_PASSWORD_SQL_PROPERTY: None, _OPTIONAL_HOST_SQL_PROPERTY: "localhost", _OPTIONAL_PORT_SQL_PROPERTY: 1433, _OPTIONAL_DRIVER_SQL_PROPERTY: "", _OPTIONAL_FOLDER_PATH_SQLITE_PROPERTY: None, _OPTIONAL_FILE_EXTENSION_SQLITE_PROPERTY: ".db", _OPTIONAL_DB_EXTRA_ARGS_SQL_PROPERTY: None, _OPTIONAL_EXPOSED_TYPE_SQL_PROPERTY: _DEFAULT_EXPOSED_TYPE, }, _STORAGE_TYPE_VALUE_SQL: { _OPTIONAL_DB_USERNAME_SQL_PROPERTY: None, _OPTIONAL_DB_PASSWORD_SQL_PROPERTY: None, _OPTIONAL_HOST_SQL_PROPERTY: "localhost", _OPTIONAL_PORT_SQL_PROPERTY: 1433, _OPTIONAL_DRIVER_SQL_PROPERTY: "", _OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY: None, _OPTIONAL_FOLDER_PATH_SQLITE_PROPERTY: None, _OPTIONAL_FILE_EXTENSION_SQLITE_PROPERTY: ".db", _OPTIONAL_DB_EXTRA_ARGS_SQL_PROPERTY: None, _OPTIONAL_EXPOSED_TYPE_SQL_PROPERTY: _DEFAULT_EXPOSED_TYPE, }, _STORAGE_TYPE_VALUE_MONGO_COLLECTION: { _OPTIONAL_CUSTOM_DOCUMENT_MONGO_PROPERTY: MongoDefaultDocument, _OPTIONAL_USERNAME_MONGO_PROPERTY: "", _OPTIONAL_PASSWORD_MONGO_PROPERTY: "", _OPTIONAL_HOST_MONGO_PROPERTY: "localhost", _OPTIONAL_PORT_MONGO_PROPERTY: 27017, _OPTIONAL_DRIVER_MONGO_PROPERTY: "", _OPTIONAL_DB_EXTRA_ARGS_MONGO_PROPERTY: None, }, _STORAGE_TYPE_VALUE_PICKLE: { _OPTIONAL_DEFAULT_PATH_PICKLE_PROPERTY: None, _OPTIONAL_DEFAULT_DATA_PICKLE_PROPERTY: None, }, _STORAGE_TYPE_VALUE_JSON: { _OPTIONAL_DEFAULT_PATH_PICKLE_PROPERTY: None, _OPTIONAL_ENCODING_PROPERTY: _DEFAULT_ENCODING_VALUE, _OPTIONAL_ENCODER_JSON_PROPERTY: None, _OPTIONAL_DECODER_JSON_PROPERTY: None, }, _STORAGE_TYPE_VALUE_PARQUET: { _OPTIONAL_DEFAULT_PATH_PARQUET_PROPERTY: None, _OPTIONAL_ENGINE_PARQUET_PROPERTY: "pyarrow", _OPTIONAL_COMPRESSION_PARQUET_PROPERTY: "snappy", _OPTIONAL_READ_KWARGS_PARQUET_PROPERTY: None, _OPTIONAL_WRITE_KWARGS_PARQUET_PROPERTY: None, _OPTIONAL_EXPOSED_TYPE_PARQUET_PROPERTY: _DEFAULT_EXPOSED_TYPE, }, } _SCOPE_KEY = "scope" _DEFAULT_SCOPE = Scope.SCENARIO _VALIDITY_PERIOD_KEY = "validity_period" _DEFAULT_VALIDITY_PERIOD = None def __init__( self, id: str, storage_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ): self._storage_type = storage_type self._scope = scope self._validity_period = validity_period super().__init__(id, **properties) def __copy__(self): return DataNodeConfig(self.id, self._storage_type, self._scope, self._validity_period, **copy(self._properties)) def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) @property def storage_type(self): return _tpl._replace_templates(self._storage_type) @storage_type.setter # type: ignore @_ConfigBlocker._check() def storage_type(self, val): self._storage_type = val @property def scope(self): return _tpl._replace_templates(self._scope) @scope.setter # type: ignore @_ConfigBlocker._check() def scope(self, val): self._scope = val @property def validity_period(self): return _tpl._replace_templates(self._validity_period) @validity_period.setter # type: ignore @_ConfigBlocker._check() def validity_period(self, val): self._validity_period = val @property def cacheable(self): _warn_deprecated("cacheable", suggest="the skippable feature") cacheable = self._properties.get("cacheable") if cacheable is not None: return _tpl._replace_templates(cacheable) else: return False @cacheable.setter # type: ignore @_ConfigBlocker._check() def cacheable(self, val): _warn_deprecated("cacheable", suggest="the skippable feature") self._properties["cacheable"] = val @classmethod def default_config(cls): return DataNodeConfig( cls._DEFAULT_KEY, cls._DEFAULT_STORAGE_TYPE, cls._DEFAULT_SCOPE, cls._DEFAULT_VALIDITY_PERIOD ) def _clean(self): self._storage_type = self._DEFAULT_STORAGE_TYPE self._scope = self._DEFAULT_SCOPE self._validity_period = self._DEFAULT_VALIDITY_PERIOD self._properties.clear() def _to_dict(self): as_dict = {} if self._storage_type is not None: as_dict[self._STORAGE_TYPE_KEY] = self._storage_type if self._scope is not None: as_dict[self._SCOPE_KEY] = self._scope if self._validity_period is not None: as_dict[self._VALIDITY_PERIOD_KEY] = self._validity_period as_dict.update(self._properties) return as_dict @classmethod def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None): as_dict.pop(cls._ID_KEY, id) storage_type = as_dict.pop(cls._STORAGE_TYPE_KEY, None) scope = as_dict.pop(cls._SCOPE_KEY, None) validity_perid = as_dict.pop(cls._VALIDITY_PERIOD_KEY, None) return DataNodeConfig(id=id, storage_type=storage_type, scope=scope, validity_period=validity_perid, **as_dict) def _update(self, as_dict, default_section=None): self._storage_type = as_dict.pop(self._STORAGE_TYPE_KEY, self._storage_type) if self._storage_type is None and default_section: self._storage_type = default_section.storage_type self._scope = as_dict.pop(self._SCOPE_KEY, self._scope) if self._scope is None and default_section: if default_section.scope and self._storage_type == default_section.storage_type: self._scope = default_section.scope else: self._scope = self._DEFAULT_SCOPE self._validity_period = as_dict.pop(self._VALIDITY_PERIOD_KEY, self._validity_period) if self._validity_period is None and default_section: self._validity_period = default_section.validity_period self._properties.update(as_dict) if default_section and self._storage_type == default_section.storage_type: self._properties = {**default_section.properties, **self._properties} # Assign default value to optional properties if not defined by user if self._OPTIONAL_PROPERTIES.get(self._storage_type): for optional_property, default_value in self._OPTIONAL_PROPERTIES[self._storage_type].items(): if default_value is not None and self._properties.get(optional_property) is None: self._properties[optional_property] = default_value @staticmethod def _set_default_configuration( storage_type: str, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties ) -> "DataNodeConfig": """Set the default values for data node configurations. This function creates the _default data node configuration_ object, where all data node configuration objects will find their default values when needed. Parameters: storage_type (str): The default storage type for all data node configurations. The possible values are *"pickle"* (the default value), *"csv"*, *"excel"*, *"sql"*, *"mongo_collection"*, *"in_memory"*, *"json"*, *"parquet"* or *"generic"*. scope (Optional[Scope^]): The default scope for all data node configurations.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The default data node configuration. """ section = DataNodeConfig(_Config.DEFAULT_KEY, storage_type, scope, validity_period, **properties) Config._register_default(section) return Config.sections[DataNodeConfig.name][_Config.DEFAULT_KEY] @classmethod def _configure_from( cls, source_configuration: "DataNodeConfig", id: str, **properties, ) -> "DataNodeConfig": """Configure a new data node configuration from an existing one. Parameters: source_configuration (DataNodeConfig): The source data node configuration. id (str): The unique identifier of the new data node configuration. **properties (dict[str, any]): A keyworded variable length list of additional arguments.<br/> The default properties are the properties of the source data node configuration. Returns: The new data node configuration. """ scope = properties.pop("scope", None) or source_configuration.scope validity_period = properties.pop("validity_period", None) or source_configuration.validity_period properties = {**source_configuration.properties, **properties} # type: ignore return cls.__configure(id, source_configuration.storage_type, scope, validity_period, **properties) @classmethod def _configure( cls, id: str, storage_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new data node configuration. Parameters: id (str): The unique identifier of the new data node configuration. storage_type (Optional[str]): The data node configuration storage type. The possible values are None (which is the default value of *"pickle"*, unless it has been overloaded by the *storage_type* value set in the default data node configuration (see `(Config.)set_default_data_node_configuration()^`)), *"pickle"*, *"csv"*, *"excel"*, *"sql_table"*, *"sql"*, *"json"*, *"parquet"*, *"mongo_collection"*, *"in_memory"*, or *"generic"*. scope (Optional[Scope^]): The scope of the data node configuration.<br/> The default value is `Scope.SCENARIO` (or the one specified in `(Config.)set_default_data_node_configuration()^`). validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new data node configuration. """ configuration_map: Dict[str, Callable] = { cls._STORAGE_TYPE_VALUE_PICKLE: cls._configure_pickle, cls._STORAGE_TYPE_VALUE_SQL_TABLE: cls._configure_sql_table, cls._STORAGE_TYPE_VALUE_SQL: cls._configure_sql, cls._STORAGE_TYPE_VALUE_MONGO_COLLECTION: cls._configure_mongo_collection, cls._STORAGE_TYPE_VALUE_CSV: cls._configure_csv, cls._STORAGE_TYPE_VALUE_EXCEL: cls._configure_excel, cls._STORAGE_TYPE_VALUE_IN_MEMORY: cls._configure_in_memory, cls._STORAGE_TYPE_VALUE_GENERIC: cls._configure_generic, cls._STORAGE_TYPE_VALUE_JSON: cls._configure_json, cls._STORAGE_TYPE_VALUE_PARQUET: cls._configure_parquet, } if storage_type in cls._ALL_STORAGE_TYPES: return configuration_map[storage_type](id=id, scope=scope, validity_period=validity_period, **properties) return cls.__configure(id, storage_type, scope, validity_period, **properties) @classmethod def _configure_csv( cls, id: str, default_path: Optional[str] = None, encoding: Optional[str] = None, has_header: Optional[bool] = None, exposed_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new CSV data node configuration. Parameters: id (str): The unique identifier of the new CSV data node configuration. default_path (Optional[str]): The default path of the CSV file. encoding (Optional[str]): The encoding of the CSV file. has_header (Optional[bool]): If True, indicates that the CSV file has a header. exposed_type (Optional[str]): The exposed type of the data read from CSV file.<br/> The default value is `pandas`. scope (Optional[Scope^]): The scope of the CSV data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new CSV data node configuration. """ if default_path is not None: properties[cls._OPTIONAL_DEFAULT_PATH_CSV_PROPERTY] = default_path if encoding is not None: properties[cls._OPTIONAL_ENCODING_PROPERTY] = encoding if has_header is not None: properties[cls._OPTIONAL_HAS_HEADER_CSV_PROPERTY] = has_header if exposed_type is not None: properties[cls._OPTIONAL_EXPOSED_TYPE_CSV_PROPERTY] = exposed_type return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_CSV, scope, validity_period, **properties) @classmethod def _configure_json( cls, id: str, default_path: Optional[str] = None, encoding: Optional[str] = None, encoder: Optional[json.JSONEncoder] = None, decoder: Optional[json.JSONDecoder] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new JSON data node configuration. Parameters: id (str): The unique identifier of the new JSON data node configuration. default_path (Optional[str]): The default path of the JSON file. encoding (Optional[str]): The encoding of the JSON file. encoder (Optional[json.JSONEncoder]): The JSON encoder used to write data into the JSON file. decoder (Optional[json.JSONDecoder]): The JSON decoder used to read data from the JSON file. scope (Optional[Scope^]): The scope of the JSON data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new JSON data node configuration. """ if default_path is not None: properties[cls._OPTIONAL_DEFAULT_PATH_JSON_PROPERTY] = default_path if encoding is not None: properties[cls._OPTIONAL_ENCODING_PROPERTY] = encoding if encoder is not None: properties[cls._OPTIONAL_ENCODER_JSON_PROPERTY] = encoder if decoder is not None: properties[cls._OPTIONAL_DECODER_JSON_PROPERTY] = decoder return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_JSON, scope, validity_period, **properties) @classmethod def _configure_parquet( cls, id: str, default_path: Optional[str] = None, engine: Optional[str] = None, compression: Optional[str] = None, read_kwargs: Optional[Dict] = None, write_kwargs: Optional[Dict] = None, exposed_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new Parquet data node configuration. Parameters: id (str): The unique identifier of the new Parquet data node configuration. default_path (Optional[str]): The default path of the Parquet file. engine (Optional[str]): Parquet library to use. Possible values are *"fastparquet"* or *"pyarrow"*.<br/> The default value is *"pyarrow"*. compression (Optional[str]): Name of the compression to use. Possible values are *"snappy"*, *"gzip"*, *"brotli"*, or *"none"* (no compression). The default value is *"snappy"*. read_kwargs (Optional[dict]): Additional parameters passed to the `pandas.read_parquet()` function. write_kwargs (Optional[dict]): Additional parameters passed to the `pandas.DataFrame.write_parquet()` function.<br/> The parameters in *read_kwargs* and *write_kwargs* have a **higher precedence** than the top-level parameters which are also passed to Pandas. exposed_type (Optional[str]): The exposed type of the data read from Parquet file.<br/> The default value is `pandas`. scope (Optional[Scope^]): The scope of the Parquet data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new Parquet data node configuration. """ if default_path is not None: properties[cls._OPTIONAL_DEFAULT_PATH_PARQUET_PROPERTY] = default_path if engine is not None: properties[cls._OPTIONAL_ENGINE_PARQUET_PROPERTY] = engine if compression is not None: properties[cls._OPTIONAL_COMPRESSION_PARQUET_PROPERTY] = compression if read_kwargs is not None: properties[cls._OPTIONAL_READ_KWARGS_PARQUET_PROPERTY] = read_kwargs if write_kwargs is not None: properties[cls._OPTIONAL_WRITE_KWARGS_PARQUET_PROPERTY] = write_kwargs if exposed_type is not None: properties[cls._OPTIONAL_EXPOSED_TYPE_PARQUET_PROPERTY] = exposed_type return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_PARQUET, scope, validity_period, **properties) @classmethod def _configure_excel( cls, id: str, default_path: Optional[str] = None, has_header: Optional[bool] = None, sheet_name: Optional[Union[List[str], str]] = None, exposed_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new Excel data node configuration. Parameters: id (str): The unique identifier of the new Excel data node configuration. default_path (Optional[str]): The path of the Excel file. has_header (Optional[bool]): If True, indicates that the Excel file has a header. sheet_name (Optional[Union[List[str], str]]): The list of sheet names to be used. This can be a unique name. exposed_type (Optional[str]): The exposed type of the data read from Excel file.<br/> The default value is `pandas`. scope (Optional[Scope^]): The scope of the Excel data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new Excel data node configuration. """ if default_path is not None: properties[cls._OPTIONAL_DEFAULT_PATH_EXCEL_PROPERTY] = default_path if has_header is not None: properties[cls._OPTIONAL_HAS_HEADER_EXCEL_PROPERTY] = has_header if sheet_name is not None: properties[cls._OPTIONAL_SHEET_NAME_EXCEL_PROPERTY] = sheet_name if exposed_type is not None: properties[cls._OPTIONAL_EXPOSED_TYPE_EXCEL_PROPERTY] = exposed_type return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_EXCEL, scope, validity_period, **properties) @classmethod def _configure_generic( cls, id: str, read_fct: Optional[Callable] = None, write_fct: Optional[Callable] = None, read_fct_args: Optional[List] = None, write_fct_args: Optional[List] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new generic data node configuration. Parameters: id (str): The unique identifier of the new generic data node configuration. read_fct (Optional[Callable]): The Python function called to read the data. write_fct (Optional[Callable]): The Python function called to write the data. The provided function must have at least one parameter that receives the data to be written. read_fct_args (Optional[List]): The list of arguments that are passed to the function *read_fct* to read data. write_fct_args (Optional[List]): The list of arguments that are passed to the function *write_fct* to write the data. scope (Optional[Scope^]): The scope of the Generic data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new Generic data node configuration. """ if read_fct is not None: properties[cls._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY] = read_fct if write_fct is not None: properties[cls._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY] = write_fct if read_fct_args is not None: properties[cls._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY] = read_fct_args if write_fct_args is not None: properties[cls._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY] = write_fct_args return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC, scope, validity_period, **properties) @classmethod def _configure_in_memory( cls, id: str, default_data: Optional[Any] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new *in-memory* data node configuration. Parameters: id (str): The unique identifier of the new in_memory data node configuration. default_data (Optional[any]): The default data of the data nodes instantiated from this in_memory data node configuration. scope (Optional[Scope^]): The scope of the in_memory data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new *in-memory* data node configuration. """ if default_data is not None: properties[cls._OPTIONAL_DEFAULT_DATA_IN_MEMORY_PROPERTY] = default_data return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_IN_MEMORY, scope, validity_period, **properties) @classmethod def _configure_pickle( cls, id: str, default_path: Optional[str] = None, default_data: Optional[Any] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new pickle data node configuration. Parameters: id (str): The unique identifier of the new pickle data node configuration. default_path (Optional[str]): The path of the pickle file. default_data (Optional[any]): The default data of the data nodes instantiated from this pickle data node configuration. scope (Optional[Scope^]): The scope of the pickle data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new pickle data node configuration. """ if default_path is not None: properties[cls._OPTIONAL_DEFAULT_PATH_PICKLE_PROPERTY] = default_path if default_data is not None: properties[cls._OPTIONAL_DEFAULT_DATA_PICKLE_PROPERTY] = default_data return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_PICKLE, scope, validity_period, **properties) @classmethod def _configure_sql_table( cls, id: str, db_name: str, db_engine: str, table_name: str, db_username: Optional[str] = None, db_password: Optional[str] = None, db_host: Optional[str] = None, db_port: Optional[int] = None, db_driver: Optional[str] = None, sqlite_folder_path: Optional[str] = None, sqlite_file_extension: Optional[str] = None, db_extra_args: Optional[Dict[str, Any]] = None, exposed_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new SQL table data node configuration. Parameters: id (str): The unique identifier of the new SQL data node configuration. db_name (str): The database name, or the name of the SQLite database file. db_engine (str): The database engine. Possible values are *"sqlite"*, *"mssql"*, *"mysql"*, or *"postgresql"*. table_name (str): The name of the SQL table. db_username (Optional[str]): The database username. Required by the *"mssql"*, *"mysql"*, and *"postgresql"* engines. db_password (Optional[str]): The database password. Required by the *"mssql"*, *"mysql"*, and *"postgresql"* engines. db_host (Optional[str]): The database host.<br/> The default value is "localhost". db_port (Optional[int]): The database port.<br/> The default value is 1433. db_driver (Optional[str]): The database driver. sqlite_folder_path (Optional[str]): The path to the folder that contains SQLite file.<br/> The default value is the current working folder. sqlite_file_extension (Optional[str]): The file extension of the SQLite file.<br/> The default value is ".db". db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed into database connection string. exposed_type (Optional[str]): The exposed type of the data read from SQL table.<br/> The default value is "pandas". scope (Optional[Scope^]): The scope of the SQL data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new SQL data node configuration. """ properties.update( { cls._REQUIRED_DB_NAME_SQL_PROPERTY: db_name, cls._REQUIRED_DB_ENGINE_SQL_PROPERTY: db_engine, cls._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY: table_name, } ) if db_username is not None: properties[cls._OPTIONAL_DB_USERNAME_SQL_PROPERTY] = db_username if db_password is not None: properties[cls._OPTIONAL_DB_PASSWORD_SQL_PROPERTY] = db_password if db_host is not None: properties[cls._OPTIONAL_HOST_SQL_PROPERTY] = db_host if db_port is not None: properties[cls._OPTIONAL_PORT_SQL_PROPERTY] = db_port if db_driver is not None: properties[cls._OPTIONAL_DRIVER_SQL_PROPERTY] = db_driver if sqlite_folder_path is not None: properties[cls._OPTIONAL_FOLDER_PATH_SQLITE_PROPERTY] = sqlite_folder_path if sqlite_file_extension is not None: properties[cls._OPTIONAL_FILE_EXTENSION_SQLITE_PROPERTY] = sqlite_file_extension if db_extra_args is not None: properties[cls._OPTIONAL_DB_EXTRA_ARGS_SQL_PROPERTY] = db_extra_args if exposed_type is not None: properties[cls._OPTIONAL_EXPOSED_TYPE_SQL_PROPERTY] = exposed_type return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_SQL_TABLE, scope, validity_period, **properties) @classmethod def _configure_sql( cls, id: str, db_name: str, db_engine: str, read_query: str, write_query_builder: Callable, append_query_builder: Optional[Callable] = None, db_username: Optional[str] = None, db_password: Optional[str] = None, db_host: Optional[str] = None, db_port: Optional[int] = None, db_driver: Optional[str] = None, sqlite_folder_path: Optional[str] = None, sqlite_file_extension: Optional[str] = None, db_extra_args: Optional[Dict[str, Any]] = None, exposed_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new SQL data node configuration. Parameters: id (str): The unique identifier of the new SQL data node configuration. db_name (str): The database name, or the name of the SQLite database file. db_engine (str): The database engine. Possible values are *"sqlite"*, *"mssql"*, *"mysql"*, or *"postgresql"*. read_query (str): The SQL query string used to read the data from the database. write_query_builder (Callable): A callback function that takes the data as an input parameter and returns a list of SQL queries to be executed when writing data to the data node. append_query_builder (Optional[Callable]): A callback function that takes the data as an input parameter and returns a list of SQL queries to be executed when appending data to the data node. db_username (Optional[str]): The database username. Required by the *"mssql"*, *"mysql"*, and *"postgresql"* engines. db_password (Optional[str]): The database password. Required by the *"mssql"*, *"mysql"*, and *"postgresql"* engines. db_host (Optional[str]): The database host.<br/> The default value is "localhost". db_port (Optional[int]): The database port.<br/> The default value is 1433. db_driver (Optional[str]): The database driver. sqlite_folder_path (Optional[str]): The path to the folder that contains SQLite file.<br/> The default value is the current working folder. sqlite_file_extension (Optional[str]): The file extension of the SQLite file.<br/> The default value is ".db". db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed into database connection string. exposed_type (Optional[str]): The exposed type of the data read from SQL query.<br/> The default value is "pandas". scope (Optional[Scope^]): The scope of the SQL data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new SQL data node configuration. """ properties.update( { cls._REQUIRED_DB_NAME_SQL_PROPERTY: db_name, cls._REQUIRED_DB_ENGINE_SQL_PROPERTY: db_engine, cls._REQUIRED_READ_QUERY_SQL_PROPERTY: read_query, cls._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY: write_query_builder, } ) if append_query_builder is not None: properties[cls._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY] = append_query_builder if db_username is not None: properties[cls._OPTIONAL_DB_USERNAME_SQL_PROPERTY] = db_username if db_password is not None: properties[cls._OPTIONAL_DB_PASSWORD_SQL_PROPERTY] = db_password if db_host is not None: properties[cls._OPTIONAL_HOST_SQL_PROPERTY] = db_host if db_port is not None: properties[cls._OPTIONAL_PORT_SQL_PROPERTY] = db_port if db_driver is not None: properties[cls._OPTIONAL_DRIVER_SQL_PROPERTY] = db_driver if sqlite_folder_path is not None: properties[cls._OPTIONAL_FOLDER_PATH_SQLITE_PROPERTY] = sqlite_folder_path if sqlite_file_extension is not None: properties[cls._OPTIONAL_FILE_EXTENSION_SQLITE_PROPERTY] = sqlite_file_extension if db_extra_args is not None: properties[cls._OPTIONAL_DB_EXTRA_ARGS_SQL_PROPERTY] = db_extra_args if exposed_type is not None: properties[cls._OPTIONAL_EXPOSED_TYPE_SQL_PROPERTY] = exposed_type return cls.__configure(id, DataNodeConfig._STORAGE_TYPE_VALUE_SQL, scope, validity_period, **properties) @classmethod def _configure_mongo_collection( cls, id: str, db_name: str, collection_name: str, custom_document: Optional[Any] = None, db_username: Optional[str] = None, db_password: Optional[str] = None, db_host: Optional[str] = None, db_port: Optional[int] = None, db_driver: Optional[str] = None, db_extra_args: Optional[Dict[str, Any]] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ) -> "DataNodeConfig": """Configure a new Mongo collection data node configuration. Parameters: id (str): The unique identifier of the new Mongo collection data node configuration. db_name (str): The database name. collection_name (str): The collection in the database to read from and to write the data to. custom_document (Optional[any]): The custom document class to store, encode, and decode data when reading and writing to a Mongo collection. The custom_document can have an optional *decode()* method to decode data in the Mongo collection to a custom object, and an optional *encode()*) method to encode the object's properties to the Mongo collection when writing. db_username (Optional[str]): The database username. db_password (Optional[str]): The database password. db_host (Optional[str]): The database host.<br/> The default value is "localhost". db_port (Optional[int]): The database port.<br/> The default value is 27017. db_driver (Optional[str]): The database driver. db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed into database connection string. scope (Optional[Scope^]): The scope of the Mongo collection data node configuration.<br/> The default value is `Scope.SCENARIO`. validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be considered up-to-date. Once the validity period has passed, the data node is considered stale and relevant tasks will run even if they are skippable (see the [Task configs page](../core/config/task-config.md) for more details). If *validity_period* is set to None, the data node is always up-to-date. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new Mongo collection data node configuration. """ properties.update( { cls._REQUIRED_DB_NAME_MONGO_PROPERTY: db_name, cls._REQUIRED_COLLECTION_NAME_MONGO_PROPERTY: collection_name, } ) if custom_document is not None: properties[cls._OPTIONAL_CUSTOM_DOCUMENT_MONGO_PROPERTY] = custom_document if db_username is not None: properties[cls._OPTIONAL_USERNAME_MONGO_PROPERTY] = db_username if db_password is not None: properties[cls._OPTIONAL_PASSWORD_MONGO_PROPERTY] = db_password if db_host is not None: properties[cls._OPTIONAL_HOST_MONGO_PROPERTY] = db_host if db_port is not None: properties[cls._OPTIONAL_PORT_MONGO_PROPERTY] = db_port if db_driver is not None: properties[cls._OPTIONAL_DRIVER_MONGO_PROPERTY] = db_driver if db_extra_args is not None: properties[cls._OPTIONAL_DB_EXTRA_ARGS_MONGO_PROPERTY] = db_extra_args return cls.__configure( id, DataNodeConfig._STORAGE_TYPE_VALUE_MONGO_COLLECTION, scope, validity_period, **properties ) @staticmethod def __configure( id: str, storage_type: Optional[str] = None, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties, ): section = DataNodeConfig(id, storage_type, scope, validity_period, **properties) Config._register(section) return Config.sections[DataNodeConfig.name][id]
from copy import copy from typing import Any, Dict, List, Optional, Union from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.config import Config from taipy.config.section import Section from ..common._warnings import _warn_deprecated from .data_node_config import DataNodeConfig class TaskConfig(Section): """ Configuration fields needed to instantiate an actual `Task^`. Attributes: id (str): Identifier of the task config. Must be a valid Python variable name. inputs (Union[DataNodeConfig^, List[DataNodeConfig^]]): The optional list of `DataNodeConfig^` inputs.<br/> The default value is []. outputs (Union[DataNodeConfig^, List[DataNodeConfig^]]): The optional list of `DataNodeConfig^` outputs.<br/> The default value is []. skippable (bool): If True, indicates that the task can be skipped if no change has been made on inputs.<br/> The default value is False. function (Callable): User function taking as inputs some parameters compatible with the exposed types (*exposed_type* field) of the input data nodes and returning results compatible with the exposed types (*exposed_type* field) of the outputs list.<br/> The default value is None. **properties (dict[str, any]): A dictionary of additional properties. """ name = "TASK" _INPUT_KEY = "inputs" _FUNCTION = "function" _OUTPUT_KEY = "outputs" _IS_SKIPPABLE_KEY = "skippable" def __init__( self, id: str, function, inputs: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, outputs: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, skippable: Optional[bool] = False, **properties, ): if inputs: self._inputs = [inputs] if isinstance(inputs, DataNodeConfig) else copy(inputs) else: self._inputs = [] if outputs: self._outputs = [outputs] if isinstance(outputs, DataNodeConfig) else copy(outputs) outputs_all_cacheable = all(output.cacheable for output in self._outputs) if not skippable and outputs_all_cacheable: _warn_deprecated("cacheable", suggest="the skippable feature") skippable = True else: self._outputs = [] self._skippable = skippable self.function = function super().__init__(id, **properties) def __copy__(self): return TaskConfig( self.id, self.function, copy(self._inputs), copy(self._outputs), self.skippable, **copy(self._properties) ) def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) @property def input_configs(self) -> List[DataNodeConfig]: return list(self._inputs) @property def inputs(self) -> List[DataNodeConfig]: return list(self._inputs) @property def output_configs(self) -> List[DataNodeConfig]: return list(self._outputs) @property def outputs(self) -> List[DataNodeConfig]: return list(self._outputs) @property def skippable(self): return _tpl._replace_templates(self._skippable) @classmethod def default_config(cls): return TaskConfig(cls._DEFAULT_KEY, None, [], [], False) def _clean(self): self.function = None self._inputs = [] self._outputs = [] self._skippable = False self._properties.clear() def _to_dict(self): return { self._FUNCTION: self.function, self._INPUT_KEY: self._inputs, self._OUTPUT_KEY: self._outputs, self._IS_SKIPPABLE_KEY: self._skippable, **self._properties, } @classmethod def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config]): as_dict.pop(cls._ID_KEY, id) funct = as_dict.pop(cls._FUNCTION, None) dn_configs = config._sections.get(DataNodeConfig.name, None) or [] # type: ignore inputs = [] if inputs_as_str := as_dict.pop(cls._INPUT_KEY, None): inputs = [dn_configs[dn_id] for dn_id in inputs_as_str if dn_id in dn_configs] outputs = [] if outputs_as_str := as_dict.pop(cls._OUTPUT_KEY, None): outputs = [dn_configs[ds_id] for ds_id in outputs_as_str if ds_id in dn_configs] skippable = as_dict.pop(cls._IS_SKIPPABLE_KEY, False) return TaskConfig(id=id, function=funct, inputs=inputs, outputs=outputs, skippable=skippable, **as_dict) def _update(self, as_dict, default_section=None): function = as_dict.pop(self._FUNCTION, None) if function is not None and type(function) is not str: self.function = function self._inputs = as_dict.pop(self._INPUT_KEY, self._inputs) if self._inputs is None and default_section: self._inputs = default_section._inputs self._outputs = as_dict.pop(self._OUTPUT_KEY, self._outputs) if self._outputs is None and default_section: self._outputs = default_section._outputs self._skippable = as_dict.pop(self._IS_SKIPPABLE_KEY, self._skippable) self._properties.update(as_dict) if default_section: self._properties = {**default_section.properties, **self._properties} @staticmethod def _configure( id: str, function, input: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, output: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, skippable: Optional[bool] = False, **properties, ) -> "TaskConfig": """Configure a new task configuration. Parameters: id (str): The unique identifier of this task configuration. function (Callable): The python function called by Taipy to run the task. input (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the function input data node configurations. This can be a unique data node configuration if there is a single input data node, or None if there are none. output (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the function output data node configurations. This can be a unique data node configuration if there is a single output data node, or None if there are none. skippable (bool): If True, indicates that the task can be skipped if no change has been made on inputs.<br/> The default value is False. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new task configuration. """ section = TaskConfig(id, function, input, output, skippable, **properties) Config._register(section) return Config.sections[TaskConfig.name][id] @staticmethod def _set_default_configuration( function, input: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, output: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None, skippable: Optional[bool] = False, **properties, ) -> "TaskConfig": """Set the default values for task configurations. This function creates the *default task configuration* object, where all task configuration objects will find their default values when needed. Parameters: function (Callable): The python function called by Taipy to run the task. input (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the input data node configurations. This can be a unique data node configuration if there is a single input data node, or None if there are none. output (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the output data node configurations. This can be a unique data node configuration if there is a single output data node, or None if there are none. skippable (bool): If True, indicates that the task can be skipped if no change has been made on inputs.<br/> The default value is False. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The default task configuration. """ section = TaskConfig(_Config.DEFAULT_KEY, function, input, output, skippable, **properties) Config._register(section) return Config.sections[TaskConfig.name][_Config.DEFAULT_KEY]
from taipy.config import _inject_section from taipy.config.checker._checker import _Checker from taipy.config.common.frequency import Frequency # type: ignore from taipy.config.common.scope import Scope # type: ignore from taipy.config.config import Config # type: ignore from taipy.config.global_app.global_app_config import GlobalAppConfig # type: ignore from .checkers._config_id_checker import _ConfigIdChecker from .checkers._core_section_checker import _CoreSectionChecker from .checkers._data_node_config_checker import _DataNodeConfigChecker from .checkers._job_config_checker import _JobConfigChecker from .checkers._scenario_config_checker import _ScenarioConfigChecker from .checkers._task_config_checker import _TaskConfigChecker from .core_section import CoreSection from .data_node_config import DataNodeConfig from .job_config import JobConfig from .migration_config import MigrationConfig from .scenario_config import ScenarioConfig from .task_config import TaskConfig _inject_section( JobConfig, "job_config", JobConfig.default_config(), [("configure_job_executions", JobConfig._configure)], add_to_unconflicted_sections=True, ) _inject_section( DataNodeConfig, "data_nodes", DataNodeConfig.default_config(), [ ("configure_data_node", DataNodeConfig._configure), ("configure_data_node_from", DataNodeConfig._configure_from), ("set_default_data_node_configuration", DataNodeConfig._set_default_configuration), ("configure_csv_data_node", DataNodeConfig._configure_csv), ("configure_json_data_node", DataNodeConfig._configure_json), ("configure_parquet_data_node", DataNodeConfig._configure_parquet), ("configure_sql_table_data_node", DataNodeConfig._configure_sql_table), ("configure_sql_data_node", DataNodeConfig._configure_sql), ("configure_mongo_collection_data_node", DataNodeConfig._configure_mongo_collection), ("configure_in_memory_data_node", DataNodeConfig._configure_in_memory), ("configure_pickle_data_node", DataNodeConfig._configure_pickle), ("configure_excel_data_node", DataNodeConfig._configure_excel), ("configure_generic_data_node", DataNodeConfig._configure_generic), ], ) _inject_section( TaskConfig, "tasks", TaskConfig.default_config(), [ ("configure_task", TaskConfig._configure), ("set_default_task_configuration", TaskConfig._set_default_configuration), ], ) _inject_section( ScenarioConfig, "scenarios", ScenarioConfig.default_config(), [ ("configure_scenario", ScenarioConfig._configure), ("set_default_scenario_configuration", ScenarioConfig._set_default_configuration), ], ) _inject_section( MigrationConfig, "migration_functions", MigrationConfig.default_config(), [("add_migration_function", MigrationConfig._add_migration_function)], add_to_unconflicted_sections=True, ) _inject_section( CoreSection, "core", CoreSection.default_config(), [("configure_core", CoreSection._configure)], add_to_unconflicted_sections=True, ) _Checker.add_checker(_ConfigIdChecker) _Checker.add_checker(_CoreSectionChecker) _Checker.add_checker(_DataNodeConfigChecker) _Checker.add_checker(_JobConfigChecker) # We don't need to add _MigrationConfigChecker because it is run only when the Core service is run. _Checker.add_checker(_TaskConfigChecker) _Checker.add_checker(_ScenarioConfigChecker)
import collections.abc from copy import deepcopy from typing import Any, Callable, Dict, Optional, Union from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.config import Config from taipy.config.section import Section from taipy.config.unique_section import UniqueSection class MigrationConfig(UniqueSection): """ Configuration fields needed to register migration functions from an old version to newer one. Attributes: migration_fcts (Dict[str, Dict[str, Callable]]): A dictionary that maps the version that entities are migrated from to the migration functions. **properties (dict[str, Any]): A dictionary of additional properties. """ name = "VERSION_MIGRATION" _MIGRATION_FCTS_KEY = "migration_fcts" def __init__( self, migration_fcts: Dict[str, Dict[str, Callable]], **properties, ): self.migration_fcts = migration_fcts super().__init__(**properties) def __copy__(self): return MigrationConfig( deepcopy(self.migration_fcts), **deepcopy(self._properties), ) def _clean(self): self.migration_fcts.clear() self._properties.clear() def __getattr__(self, item: str) -> Optional[Any]: return _tpl._replace_templates(self._properties.get(item)) # type: ignore @classmethod def default_config(cls): return MigrationConfig({}) def _to_dict(self): return { self._MIGRATION_FCTS_KEY: self.migration_fcts, **self._properties, } @classmethod def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config]): return MigrationConfig(**as_dict) def _update(self, as_dict, default_section=None): def deep_update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = deep_update(d.get(k, {}), v) else: d[k] = v return d migration_fcts = as_dict.pop(self._MIGRATION_FCTS_KEY) deep_update(self.migration_fcts, migration_fcts) self._properties.update(as_dict) @staticmethod def _add_migration_function( target_version: str, config: Union[Section, str], migration_fct: Callable, **properties, ): """Add a migration function for a Configuration to migrate entities to the target version. Parameters: target_version (str): The production version that entities are migrated to. config (Union[Section, str]): The configuration or the `id` of the config that needs to migrate. migration_fct (Callable): Migration function that takes an entity as input and returns a new entity that is compatible with the target production version. **properties (Dict[str, Any]): A keyworded variable length list of additional arguments. Returns: `MigrationConfig^`: The Migration configuration. """ config_id = config if isinstance(config, str) else config.id migration_fcts = {target_version: {config_id: migration_fct}} section = MigrationConfig( migration_fcts, **properties, ) Config._register(section) return Config.unique_sections[MigrationConfig.name]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from copy import copy from typing import Any, Dict, Optional, Union from taipy.config import Config from taipy.config._config import _Config from taipy.config.common._template_handler import _TemplateHandler as _tpl from taipy.config.unique_section import UniqueSection from ..exceptions.exceptions import ModeNotAvailable class JobConfig(UniqueSection): """ Configuration fields related to the jobs' executions. Parameters: mode (str): The Taipy operating mode. By default, the "development" mode is set for testing and debugging the executions of jobs. A "standalone" mode is also available. **properties (dict[str, any]): A dictionary of additional properties. """ name = "JOB" _MODE_KEY = "mode" _STANDALONE_MODE = "standalone" _DEVELOPMENT_MODE = "development" _DEFAULT_MODE = _DEVELOPMENT_MODE _MODES = [_STANDALONE_MODE, _DEVELOPMENT_MODE] def __init__(self, mode: Optional[str] = None, **properties): self.mode = mode or self._DEFAULT_MODE self._config = self._create_config(self.mode, **properties) super().__init__(**properties) def __copy__(self): return JobConfig(self.mode, **copy(self._properties)) def __getattr__(self, key: str) -> Optional[Any]: return self._config.get(key, None) @classmethod def default_config(cls): return JobConfig(cls._DEFAULT_MODE) def _clean(self): self.mode = self._DEFAULT_MODE self._config = self._create_config(self.mode) def _to_dict(self): as_dict = {} if self.mode is not None: as_dict[self._MODE_KEY] = self.mode as_dict.update(self._config) return as_dict @classmethod def _from_dict(cls, config_as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None): mode = config_as_dict.pop(cls._MODE_KEY, None) job_config = JobConfig(mode, **config_as_dict) return job_config def _update(self, as_dict: Dict[str, Any], default_section=None): mode = _tpl._replace_templates(as_dict.pop(self._MODE_KEY, self.mode)) if self.mode != mode: self.mode = mode self._config = self._create_config(self.mode, **as_dict) if self._config is not None: self._update_config(as_dict) @staticmethod def _configure( mode: Optional[str] = None, max_nb_of_workers: Optional[Union[int, str]] = None, **properties ) -> "JobConfig": """Configure job execution. Parameters: mode (Optional[str]): The job execution mode. Possible values are: *"standalone"* (the default value) or *"development"*. max_nb_of_workers (Optional[int, str]): Parameter used only in default *"standalone"* mode. This indicates the maximum number of jobs able to run in parallel.<br/> The default value is 1.<br/> A string can be provided to dynamically set the value using an environment variable. The string must follow the pattern: `ENV[&lt;env_var&gt;]` where `&lt;env_var&gt;` is the name of an environment variable. **properties (dict[str, any]): A keyworded variable length list of additional arguments. Returns: The new job execution configuration. """ section = JobConfig(mode, max_nb_of_workers=max_nb_of_workers, **properties) Config._register(section) return Config.unique_sections[JobConfig.name] def _update_config(self, config_as_dict: Dict[str, Any]): for k, v in config_as_dict.items(): type_to_convert = type(self.get_default_config(self.mode).get(k, None)) or str value = _tpl._replace_templates(v, type_to_convert) if value is not None: self._config[k] = value @property def is_standalone(self) -> bool: """True if the config is set to standalone mode""" return self.mode == self._STANDALONE_MODE @property def is_development(self) -> bool: """True if the config is set to development mode""" return self.mode == self._DEVELOPMENT_MODE @classmethod def get_default_config(cls, mode: str) -> Dict[str, Any]: if cls.is_standalone: # type: ignore return {"max_nb_of_workers": 1} if cls.is_development: return {} raise ModeNotAvailable(mode) @classmethod def _create_config(cls, mode, **properties): return {**cls.get_default_config(mode), **properties}
from typing import Set from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..core_section import CoreSection class _CoreSectionChecker(_ConfigChecker): _ACCEPTED_REPOSITORY_TYPES: Set[str] = {"filesystem", "sql"} def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if core_section := self._config._unique_sections.get(CoreSection.name): self._check_repository_type(core_section) return self._collector def _check_repository_type(self, core_section: CoreSection): value = core_section.repository_type if value not in self._ACCEPTED_REPOSITORY_TYPES: self._warning( core_section._REPOSITORY_TYPE_KEY, value, f'Value "{value}" for field {core_section._REPOSITORY_TYPE_KEY} of the CoreSection is not supported. ' f'Default value "filesystem" is applied.', )
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..._version._version_manager_factory import _VersionManagerFactory from ..migration_config import MigrationConfig class _MigrationConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if migration_config := self._config._unique_sections.get(MigrationConfig.name): self._check_if_entity_property_key_used_is_predefined(migration_config) migration_fcts = migration_config.migration_fcts for target_version, migration_functions in migration_config.migration_fcts.items(): for config_id, migration_function in migration_functions.items(): self._check_callable(target_version, config_id, migration_function) self._check_valid_production_version(migration_fcts) self._check_migration_from_productions_to_productions_exist(migration_fcts) return self._collector def _check_callable(self, target_version, config_id, migration_function): if not callable(migration_function): self._error( MigrationConfig._MIGRATION_FCTS_KEY, migration_function, f"The migration function of config `{config_id}` from version {target_version}" f" must be populated with Callable value.", ) def _check_valid_production_version(self, migration_fcts): for target_version in migration_fcts.keys(): if target_version not in _VersionManagerFactory._build_manager()._get_production_versions(): self._error( MigrationConfig._MIGRATION_FCTS_KEY, target_version, "The target version for a migration function must be a production version.", ) def _check_migration_from_productions_to_productions_exist(self, migration_fcts): production_versions = _VersionManagerFactory._build_manager()._get_production_versions() for source_version, target_version in zip(production_versions[:-1], production_versions[1:]): if not migration_fcts.get(target_version): self._info( "target_version", None, f'There is no migration function from production version "{source_version}"' f' to version "{target_version}".', )
from typing import Dict from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..data_node_config import DataNodeConfig from ..job_config import JobConfig class _JobConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: if job_config := self._config._unique_sections.get(JobConfig.name): data_node_configs = self._config._sections[DataNodeConfig.name] self._check_multiprocess_mode(job_config, data_node_configs) return self._collector def _check_multiprocess_mode(self, job_config: JobConfig, data_node_configs: Dict[str, DataNodeConfig]): if job_config.is_standalone: for cfg_id, data_node_config in data_node_configs.items(): if data_node_config.storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_IN_MEMORY: self._error( DataNodeConfig._STORAGE_TYPE_KEY, data_node_config.storage_type, f"DataNode `{cfg_id}`: In-memory storage type can ONLY be used in " f"{JobConfig._DEVELOPMENT_MODE} mode.", )
from typing import Dict, List from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector class _ConfigIdChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: existing_config_ids: Dict[str, List[str]] = dict() for entity_type, section_dictionary in self._config._sections.items(): for config_id in section_dictionary.keys(): if config_id in existing_config_ids.keys(): existing_config_ids[config_id].append(entity_type) else: existing_config_ids[config_id] = [entity_type] for config_id, entity_types in existing_config_ids.items(): if config_id != "default" and len(entity_types) > 1: self._error( "config_id", config_id, f"`{config_id}` is used as the config_id of multiple configurations {str(entity_types)}", )
from taipy.config import Config from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from taipy.config.common.frequency import Frequency from ..data_node_config import DataNodeConfig from ..scenario_config import ScenarioConfig from ..task_config import TaskConfig class _ScenarioConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: scenario_configs = self._config._sections[ScenarioConfig.name] for scenario_config_id, scenario_config in scenario_configs.items(): if scenario_config_id != _Config.DEFAULT_KEY: self._check_if_entity_property_key_used_is_predefined(scenario_config) self._check_existing_config_id(scenario_config) self._check_frequency(scenario_config_id, scenario_config) self._check_task_configs(scenario_config_id, scenario_config) self._check_addition_data_node_configs(scenario_config_id, scenario_config) self._check_additional_dns_not_overlapping_tasks_dns(scenario_config_id, scenario_config) self._check_tasks_in_sequences_exist_in_scenario_tasks(scenario_config_id, scenario_config) self._check_comparators(scenario_config_id, scenario_config) return self._collector def _check_task_configs(self, scenario_config_id: str, scenario_config: ScenarioConfig): self._check_children( ScenarioConfig, scenario_config_id, scenario_config._TASKS_KEY, scenario_config.tasks, TaskConfig, ) def _check_addition_data_node_configs(self, scenario_config_id: str, scenario_config: ScenarioConfig): self._check_children( ScenarioConfig, scenario_config_id, scenario_config._ADDITIONAL_DATA_NODES_KEY, scenario_config.additional_data_nodes, DataNodeConfig, can_be_empty=True, ) def _check_frequency(self, scenario_config_id: str, scenario_config: ScenarioConfig): if scenario_config.frequency and not isinstance(scenario_config.frequency, Frequency): self._error( scenario_config._FREQUENCY_KEY, scenario_config.frequency, f"{scenario_config._FREQUENCY_KEY} field of ScenarioConfig `{scenario_config_id}` must be" f" populated with a Frequency value.", ) def _check_comparators(self, scenario_config_id: str, scenario_config: ScenarioConfig): if scenario_config.comparators is not None and not isinstance(scenario_config.comparators, dict): self._error( ScenarioConfig._COMPARATOR_KEY, scenario_config.comparators, f"{ScenarioConfig._COMPARATOR_KEY} field of ScenarioConfig" f" `{scenario_config_id}` must be populated with a dictionary value.", ) else: for data_node_id, comparator in scenario_config.comparators.items(): if data_node_id not in Config.data_nodes: self._error( ScenarioConfig._COMPARATOR_KEY, scenario_config.comparators, f"The key `{data_node_id}` in {ScenarioConfig._COMPARATOR_KEY} field of ScenarioConfig" f" `{scenario_config_id}` must be populated with a valid data node configuration id.", ) if not callable(comparator): if not isinstance(comparator, list) or not all(callable(comp) for comp in comparator): self._error( ScenarioConfig._COMPARATOR_KEY, scenario_config.comparators, f"The value of `{data_node_id}` in {ScenarioConfig._COMPARATOR_KEY} field of ScenarioConfig" f" `{scenario_config_id}` must be populated with a list of Callable values.", ) def _check_additional_dns_not_overlapping_tasks_dns(self, scenario_config_id: str, scenario_config: ScenarioConfig): data_node_configs = set() for task_config in scenario_config.task_configs: if isinstance(task_config, TaskConfig): input_dn_configs = task_config.input_configs if task_config.input_configs else [] output_dn_configs = task_config.output_configs if task_config.output_configs else [] data_node_configs.update({*input_dn_configs, *output_dn_configs}) for additional_data_node_config in scenario_config.additional_data_node_configs: if additional_data_node_config in data_node_configs: self._warning( ScenarioConfig._ADDITIONAL_DATA_NODES_KEY, scenario_config.additional_data_node_configs, f"The additional data node `{additional_data_node_config.id}` in" f" {ScenarioConfig._ADDITIONAL_DATA_NODES_KEY} field of ScenarioConfig" f" `{scenario_config_id}` has already existed as an input or output data node of" f" ScenarioConfig `{scenario_config_id}` tasks.", ) def _check_tasks_in_sequences_exist_in_scenario_tasks( self, scenario_config_id: str, scenario_config: ScenarioConfig ): scenario_task_ids = set() for task_config in scenario_config.tasks: if isinstance(task_config, TaskConfig): scenario_task_ids.add(task_config.id) for sequence_tasks in scenario_config.sequences.values(): self._check_children( ScenarioConfig, scenario_config_id, scenario_config._SEQUENCES_KEY, sequence_tasks, TaskConfig, can_be_empty=True, ) for task in sequence_tasks: if isinstance(task, TaskConfig) and task.id not in scenario_task_ids: self._error( ScenarioConfig._SEQUENCES_KEY, scenario_config.sequences, f"The task `{task.id}` in {ScenarioConfig._SEQUENCES_KEY} field of ScenarioConfig" f" `{scenario_config_id}` must exist in {ScenarioConfig._TASKS_KEY} field of ScenarioConfig" f" `{scenario_config_id}`.", )
from datetime import timedelta from typing import Dict from taipy.config._config import _Config from taipy.config.checker._checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from taipy.config.common.scope import Scope from ..data_node_config import DataNodeConfig class _DataNodeConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: data_node_configs: Dict[str, DataNodeConfig] = self._config._sections[DataNodeConfig.name] for data_node_config_id, data_node_config in data_node_configs.items(): self._check_existing_config_id(data_node_config) self._check_if_entity_property_key_used_is_predefined(data_node_config) self._check_storage_type(data_node_config_id, data_node_config) self._check_scope(data_node_config_id, data_node_config) self._check_validity_period(data_node_config_id, data_node_config) self._check_required_properties(data_node_config_id, data_node_config) self._check_callable(data_node_config_id, data_node_config) self._check_generic_read_write_fct_and_args(data_node_config_id, data_node_config) self._check_exposed_type(data_node_config_id, data_node_config) return self._collector def _check_storage_type(self, data_node_config_id: str, data_node_config: DataNodeConfig): if data_node_config.storage_type not in DataNodeConfig._ALL_STORAGE_TYPES: self._error( data_node_config._STORAGE_TYPE_KEY, data_node_config.storage_type, f"`{data_node_config._STORAGE_TYPE_KEY}` field of DataNodeConfig `{data_node_config_id}` must be" f" either csv, sql_table, sql, mongo_collection, pickle, excel, generic, json, parquet, or in_memory.", ) def _check_scope(self, data_node_config_id: str, data_node_config: DataNodeConfig): if not isinstance(data_node_config.scope, Scope): self._error( data_node_config._SCOPE_KEY, data_node_config.scope, f"`{data_node_config._SCOPE_KEY}` field of DataNodeConfig `{data_node_config_id}` must be" f" populated with a Scope value.", ) def _check_validity_period(self, data_node_config_id: str, data_node_config: DataNodeConfig): if data_node_config.validity_period and not isinstance(data_node_config.validity_period, timedelta): self._error( data_node_config._VALIDITY_PERIOD_KEY, data_node_config.validity_period, f"`{data_node_config._VALIDITY_PERIOD_KEY}` field of DataNodeConfig `{data_node_config_id}` must be" f" None or populated with a timedelta value.", ) def _check_required_properties(self, data_node_config_id: str, data_node_config: DataNodeConfig): if storage_type := data_node_config.storage_type: if storage_type in DataNodeConfig._REQUIRED_PROPERTIES: required_properties = DataNodeConfig._REQUIRED_PROPERTIES[storage_type] if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL: if data_node_config.properties: if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): if engine == DataNodeConfig._DB_ENGINE_SQLITE: required_properties = [ DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, ] else: required_properties = [ DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY, DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, ] if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL_TABLE: if data_node_config.properties: if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY): if engine == DataNodeConfig._DB_ENGINE_SQLITE: required_properties = [ DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, ] else: required_properties = [ DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY, DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY, DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY, DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY, ] for required_property in required_properties: if not data_node_config.properties or required_property not in data_node_config.properties: if data_node_config_id == DataNodeConfig._DEFAULT_KEY: self._warning( required_property, None, f"DataNodeConfig `{data_node_config_id}` is missing the required " f"property `{required_property}` for type `{storage_type}`.", ) else: self._error( required_property, None, f"DataNodeConfig `{data_node_config_id}` is missing the required " f"property `{required_property}` for type `{storage_type}`.", ) def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_node_config: DataNodeConfig): if data_node_config.storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: properties_to_check = [ DataNodeConfig._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY, DataNodeConfig._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY, ] for prop_key in properties_to_check: if data_node_config.properties and prop_key in data_node_config.properties: prop_value = data_node_config.properties[prop_key] if not isinstance(prop_value, list): self._error( prop_key, prop_value, f"`{prop_key}` field of DataNodeConfig" f" `{data_node_config_id}` must be populated with a List value.", ) if data_node_config_id != DataNodeConfig._DEFAULT_KEY: properties_to_check_at_least_one = [ DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, ] has_at_least_one = False for prop_key in properties_to_check_at_least_one: if data_node_config.properties and prop_key in data_node_config.properties: has_at_least_one = True if not has_at_least_one: self._error( ", ".join(properties_to_check_at_least_one), None, f"Either `{DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY}` field or " f"`{DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY}` field of " f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.", ) def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeConfig): properties_to_check = { DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: [ DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, ], DataNodeConfig._STORAGE_TYPE_VALUE_SQL: [ DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY, ], } if data_node_config.storage_type in properties_to_check.keys(): for prop_key in properties_to_check[data_node_config.storage_type]: prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None if prop_value and not callable(prop_value): self._error( prop_key, prop_value, f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be" f" populated with a Callable function.", ) def _check_exposed_type(self, data_node_config_id: str, data_node_config: DataNodeConfig): if not isinstance(data_node_config.exposed_type, str): return if data_node_config.exposed_type not in DataNodeConfig._ALL_EXPOSED_TYPES: self._error( data_node_config._EXPOSED_TYPE_KEY, data_node_config.exposed_type, f"The `{data_node_config._EXPOSED_TYPE_KEY}` of DataNodeConfig `{data_node_config_id}` " f'must be either "pandas", "modin", "numpy", or a custom type.', )
from taipy.config._config import _Config from taipy.config.checker._checkers._config_checker import _ConfigChecker from taipy.config.checker.issue_collector import IssueCollector from ..data_node_config import DataNodeConfig from ..task_config import TaskConfig class _TaskConfigChecker(_ConfigChecker): def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) def _check(self) -> IssueCollector: task_configs = self._config._sections[TaskConfig.name] for task_config_id, task_config in task_configs.items(): if task_config_id != _Config.DEFAULT_KEY: self._check_existing_config_id(task_config) self._check_if_entity_property_key_used_is_predefined(task_config) self._check_existing_function(task_config_id, task_config) self._check_inputs(task_config_id, task_config) self._check_outputs(task_config_id, task_config) return self._collector def _check_inputs(self, task_config_id: str, task_config: TaskConfig): self._check_children( TaskConfig, task_config_id, task_config._INPUT_KEY, task_config.input_configs, DataNodeConfig ) def _check_outputs(self, task_config_id: str, task_config: TaskConfig): self._check_children( TaskConfig, task_config_id, task_config._OUTPUT_KEY, task_config.output_configs, DataNodeConfig ) def _check_existing_function(self, task_config_id: str, task_config: TaskConfig): if not task_config.function: self._error( task_config._FUNCTION, task_config.function, f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` is empty.", ) else: if not callable(task_config.function): self._error( task_config._FUNCTION, task_config.function, f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be" f" populated with Callable value.", )
from dataclasses import dataclass, field from datetime import datetime from functools import singledispatch from typing import Any, Optional from ..common._repr_enum import _ReprEnum from ..exceptions.exceptions import InvalidEventAttributeName, InvalidEventOperation class EventOperation(_ReprEnum): """Enum representing a type of operation performed on a Core entity. `EventOperation` is used as an attribute of the `Event^` object to describe the operation performed on an entity.<br> The possible operations are `CREATION`, `UPDATE`, `DELETION`, or `SUBMISSION`. """ CREATION = 1 UPDATE = 2 DELETION = 3 SUBMISSION = 4 class EventEntityType(_ReprEnum): """Enum representing an entity type. `EventEntityType` is used as an attribute of the `Event^` object to describe an entity that was changed.<br> The possible operations are `CYCLE`, `SCENARIO`, `SEQUENCE`, `TASK`, `DATA_NODE`, `JOB` or `SUBMISSION`. """ CYCLE = 1 SCENARIO = 2 SEQUENCE = 3 TASK = 4 DATA_NODE = 5 JOB = 6 SUBMISSION = 7 _NO_ATTRIBUTE_NAME_OPERATIONS = set([EventOperation.CREATION, EventOperation.DELETION, EventOperation.SUBMISSION]) _UNSUBMITTABLE_ENTITY_TYPES = (EventEntityType.CYCLE, EventEntityType.DATA_NODE, EventEntityType.JOB) _ENTITY_TO_EVENT_ENTITY_TYPE = { "scenario": EventEntityType.SCENARIO, "sequence": EventEntityType.SEQUENCE, "task": EventEntityType.TASK, "data": EventEntityType.DATA_NODE, "job": EventEntityType.JOB, "cycle": EventEntityType.CYCLE, "submission": EventEntityType.SUBMISSION, } @dataclass(frozen=True) class Event: """Event object used to notify any change in the Core service. An event holds the necessary attributes to identify the change. Attributes: entity_type (EventEntityType^): Type of the entity that was changed (`DataNode^`, `Scenario^`, `Cycle^`, etc. ). entity_id (Optional[str]): Unique identifier of the entity that was changed. operation (EventOperation^): Enum describing the operation (among `CREATION`, `UPDATE`, `DELETION`, and `SUBMISSION`) that was performed on the entity. attribute_name (Optional[str]): Name of the entity's attribute changed. Only relevant for `UPDATE` operations attribute_value (Optional[str]): Name of the entity's attribute changed. Only relevant for `UPDATE` operations metadata (dict): A dict of additional medata about the source of this event creation_date (datetime): Date and time of the event creation. """ entity_type: EventEntityType operation: EventOperation entity_id: Optional[str] = None attribute_name: Optional[str] = None attribute_value: Optional[Any] = None metadata: dict = field(default_factory=dict) creation_date: datetime = field(init=False) def __post_init__(self): # Creation date super().__setattr__("creation_date", datetime.now()) # Check operation: if self.entity_type in _UNSUBMITTABLE_ENTITY_TYPES and self.operation == EventOperation.SUBMISSION: raise InvalidEventOperation # Check attribute name: if self.operation in _NO_ATTRIBUTE_NAME_OPERATIONS and self.attribute_name is not None: raise InvalidEventAttributeName @singledispatch def _make_event( entity: Any, operation: EventOperation, /, attribute_name: Optional[str] = None, attribute_value: Optional[Any] = None, **kwargs, ) -> Event: """Helper function to make an event for this entity with the given `EventOperation^` type. In case of `EventOperation.UPDATE^` events, an attribute name and value must be given. Parameters: entity (Any): The entity object to generate an event for. operation (EventOperation^): The operation of the event. The possible values are: <ul> <li>CREATION</li> <li>UPDATE</li> <li>DELETION</li> <li>SUBMISSION</li> </ul> attribute_name (Optional[str]): The name of the updated attribute for a `EventOperation.UPDATE`. This argument is always given in case of an UPDATE. attribute_value (Optional[Any]): The value of the udated attribute for a `EventOperation.UPDATE`. This argument is always given in case of an UPDATE. **kwargs (dict[str, any]): Any extra information that would be passed to the metadata event. Note: you should pass only simple types: str, float, double as values.""" raise Exception(f"Unexpected entity type: {type(entity)}")
from queue import SimpleQueue from typing import Any, Dict, Optional, Set, Tuple from ._registration import _Registration from ._topic import _Topic from .event import Event, EventEntityType, EventOperation def _publish_event( entity_type: EventEntityType, operation: EventOperation, /, entity_id: Optional[str] = None, attribute_name: Optional[str] = None, attribute_value: Optional[Any] = None, **kwargs, ): """Internal helper function to send events. It basically creates an event corresponding to the given arguments and send it using `Notifier.publish(event)` Parameters: entity_type (EventEntityType^) operation (EventOperation^) entity_id (Optional[str]) attribute_name (Optional[str]) attribute_value (Optional[Any]) **kwargs """ event = Event( entity_id=entity_id, entity_type=entity_type, operation=operation, attribute_name=attribute_name, attribute_value=attribute_value, metadata=kwargs, ) Notifier.publish(event) class Notifier: """A class for managing event registrations and publishing `Core^` service events.""" _topics_registrations_list: Dict[_Topic, Set[_Registration]] = {} @classmethod def register( cls, entity_type: Optional[EventEntityType] = None, entity_id: Optional[str] = None, operation: Optional[EventOperation] = None, attribute_name: Optional[str] = None, ) -> Tuple[str, SimpleQueue]: """Register a listener for a specific event topic. The topic is defined by the combination of the entity type, the entity id, the operation and the attribute name. Parameters: entity_type (Optional[EventEntityType^]): If provided, the listener will be notified for all events related to this entity type. Otherwise, the listener will be notified for events related to all entity types. <br> The possible entity type values are defined in the `EventEntityType^` enum. The possible values are: <ul> <li>CYCLE</li> <li>SCENARIO</li> <li>SEQUENCE</li> <li>TASK</li> <li>DATA_NODE</li> <li>JOB</li> </ul> entity_id (Optional[str]): If provided, the listener will be notified for all events related to this entity. Otherwise, the listener will be notified for events related to all entities. operation (Optional[EventOperation^]): If provided, the listener will be notified for all events related to this operation. Otherwise, the listener will be notified for events related to all operations. <br> The possible operation values are defined in the `EventOperation^` enum. The possible values are: <ul> <li>CREATION</li> <li>UPDATE</li> <li>DELETION</li> <li>SUBMISSION</li> </ul> attribute_name (Optional[str]): If provided, the listener will be notified for all events related to this entity's attribute. Otherwise, the listener will be notified for events related to all attributes. Returns: A tuple containing the registration id and the event queue. """ registration = _Registration(entity_type, entity_id, operation, attribute_name) if registrations := cls._topics_registrations_list.get(registration.topic, None): registrations.add(registration) else: cls._topics_registrations_list[registration.topic] = {registration} return registration.registration_id, registration.queue @classmethod def unregister(cls, registration_id: str): """Unregister a listener. Parameters: registration_id (RegistrationId^): The registration id returned by the `register` method. """ to_remove_registration: Optional[_Registration] = None for _, registrations in cls._topics_registrations_list.items(): for registration in registrations: if registration.registration_id == registration_id: to_remove_registration = registration break if to_remove_registration: registrations = cls._topics_registrations_list[to_remove_registration.topic] registrations.remove(to_remove_registration) if len(registrations) == 0: del cls._topics_registrations_list[to_remove_registration.topic] @classmethod def publish(cls, event): """Publish a `Core^` service event to all registered listeners whose topic matches the event. Parameters: event (Event^): The event to publish. """ for topic, registrations in cls._topics_registrations_list.items(): if Notifier._is_matching(event, topic): for registration in registrations: registration.queue.put(event) @staticmethod def _is_matching(event: Event, topic: _Topic) -> bool: """Check if an event matches a topic.""" if topic.entity_type is not None and event.entity_type != topic.entity_type: return False if topic.entity_id is not None and event.entity_id != topic.entity_id: return False if topic.operation is not None and event.operation != topic.operation: return False if topic.attribute_name is not None and event.attribute_name and event.attribute_name != topic.attribute_name: return False return True
""" Package for notifications about changes on `Core^` service entities. The Core service generates `Event^` objects to track changes on entities. These events are then relayed to a `Notifier^`, which handles the dispatch to consumers interested in specific event topics. To subscribe, a consumer needs to invoke the `Notifier.register()^` method. This call will yield a `RegistrationId^` and a dedicated event queue for receiving notifications. To handle notifications, an event consumer (e.g., the `CoreEventConsumerBase^` object) must be instantiated with an associated event queue. """ from ._registration import _Registration from ._topic import _Topic from .core_event_consumer import CoreEventConsumerBase from .event import _ENTITY_TO_EVENT_ENTITY_TYPE, Event, EventEntityType, EventOperation, _make_event from .notifier import Notifier, _publish_event from .registration_id import RegistrationId
from typing import NewType RegistrationId = NewType("RegistrationId", str) RegistrationId.__doc__ = """Registration identifier. It can be used to instantiate a `CoreEventConsumerBase^`."""
from queue import SimpleQueue from typing import Optional from uuid import uuid4 from ._topic import _Topic from .event import EventEntityType, EventOperation from .registration_id import RegistrationId class _Registration: _ID_PREFIX = "REGISTRATION" __SEPARATOR = "_" def __init__( self, entity_type: Optional[EventEntityType] = None, entity_id: Optional[str] = None, operation: Optional[EventOperation] = None, attribute_name: Optional[str] = None, ): self.registration_id: str = self._new_id() self.topic: _Topic = _Topic(entity_type, entity_id, operation, attribute_name) self.queue: SimpleQueue = SimpleQueue() @staticmethod def _new_id() -> RegistrationId: """Generate a unique registration identifier.""" return RegistrationId(_Registration.__SEPARATOR.join([_Registration._ID_PREFIX, str(uuid4())])) def __hash__(self) -> int: return hash(self.registration_id)
from typing import Optional from ..exceptions.exceptions import InvalidEventOperation from .event import _UNSUBMITTABLE_ENTITY_TYPES, EventEntityType, EventOperation class _Topic: def __init__( self, entity_type: Optional[EventEntityType] = None, entity_id: Optional[str] = None, operation: Optional[EventOperation] = None, attribute_name: Optional[str] = None, ): self.entity_type = entity_type self.entity_id = entity_id self.operation = self.__preprocess_operation(operation, self.entity_type) self.attribute_name = self.__preprocess_attribute_name(attribute_name, self.operation) @classmethod def __preprocess_attribute_name( cls, attribute_name: Optional[str] = None, operation: Optional[EventOperation] = None ) -> Optional[str]: # if operation in _NO_ATTRIBUTE_NAME_OPERATIONS and attribute_name is not None: # raise InvalidEventAttributeName return attribute_name @classmethod def __preprocess_operation( cls, operation: Optional[EventOperation] = None, entity_type: Optional[EventEntityType] = None ) -> Optional[EventOperation]: if ( entity_type and operation and entity_type in _UNSUBMITTABLE_ENTITY_TYPES and operation == EventOperation.SUBMISSION ): raise InvalidEventOperation return operation def __hash__(self): return hash((self.entity_type, self.entity_id, self.operation, self.attribute_name)) def __eq__(self, __value) -> bool: if ( self.entity_type == __value.entity_type and self.entity_id == __value.entity_id and self.operation == __value.operation and self.attribute_name == __value.attribute_name ): return True return False
import abc import threading from queue import Empty, SimpleQueue from .event import Event class CoreEventConsumerBase(threading.Thread): """Abstract base class for implementing a Core event consumer. This class provides a framework for consuming events from a queue in a separate thread. It should be subclassed, and the `process_event` method should be implemented to define the custom logic for handling incoming events. Example usage: ```python class MyEventConsumer(CoreEventConsumerBase): def process_event(self, event: Event): # Custom event processing logic here print(f"Received event created at : {event.creation_date}") pass consumer = MyEventConsumer("consumer_1", event_queue) consumer.start() # ... consumer.stop() ``` Subclasses should implement the `process_event` method to define their specific event handling behavior. Attributes: queue (SimpleQueue): The queue from which events will be consumed. """ def __init__(self, registration_id: str, queue: SimpleQueue): """Initialize a CoreEventConsumerBase instance. Parameters: registration_id (str): A unique identifier of the registration. You can get a registration id invoking `Notifier.register()^` method. queue (SimpleQueue): The queue from which events will be consumed. You can get a queue invoking `Notifier.register()^` method. """ threading.Thread.__init__(self, name=f"Thread-Taipy-Core-Consumer-{registration_id}") self.daemon = True self.queue = queue self.__STOP_FLAG = False self._TIMEOUT = 0.1 def start(self): """Start the event consumer thread.""" self.__STOP_FLAG = False threading.Thread.start(self) def stop(self): """Stop the event consumer thread.""" self.__STOP_FLAG = True def run(self): while not self.__STOP_FLAG: try: event: Event = self.queue.get(block=True, timeout=self._TIMEOUT) self.process_event(event) except Empty: pass @abc.abstractmethod def process_event(self, event: Event): """This method should be overridden in subclasses to define how events are processed.""" raise NotImplementedError
import re import uuid from datetime import datetime from typing import Any, Dict, Optional from taipy.config.common.frequency import Frequency from .._entity._entity import _Entity from .._entity._labeled import _Labeled from .._entity._properties import _Properties from .._entity._reload import _Reloader, _self_reload, _self_setter from ..exceptions.exceptions import _SuspiciousFileOperation from ..notification.event import Event, EventEntityType, EventOperation, _make_event from .cycle_id import CycleId class Cycle(_Entity, _Labeled): """An iteration of a recurrent work pattern. Attributes: id (str): The unique identifier of the cycle. frequency (Frequency^): The frequency of this cycle. creation_date (datetime): The date and time of the creation of this cycle. start_date (datetime): The date and time of the start of this cycle. end_date (datetime): The date and time of the end of this cycle. name (str): The name of this cycle. properties (dict[str, Any]): A dictionary of additional properties. """ _ID_PREFIX = "CYCLE" __SEPARATOR = "_" _MANAGER_NAME = "cycle" def __init__( self, frequency: Frequency, properties: Dict[str, Any], creation_date: datetime, start_date: datetime, end_date: datetime, name: Optional[str] = None, id: Optional[CycleId] = None, ): self._frequency = frequency self._creation_date = creation_date self._start_date = start_date self._end_date = end_date self._name = self._new_name(name) self.id = id or self._new_id(self._name) self._properties = _Properties(self, **properties) def _new_name(self, name: Optional[str] = None) -> str: if name: return name if self._frequency == Frequency.DAILY: # Example "Monday, 2. January 2023" return self._start_date.strftime("%A, %d. %B %Y") if self._frequency == Frequency.WEEKLY: # Example "Week 01 2023, from 2. January" return self._start_date.strftime("Week %W %Y, from %d. %B") if self._frequency == Frequency.MONTHLY: # Example "January 2023" return self._start_date.strftime("%B %Y") if self._frequency == Frequency.QUARTERLY: # Example "2023 Q1" return f"{self._start_date.strftime('%Y')} Q{(self._start_date.month-1)//3+1}" if self._frequency == Frequency.YEARLY: # Example "2023" return self._start_date.strftime("%Y") return Cycle.__SEPARATOR.join([str(self._frequency.value), self._start_date.ctime()]) @property # type: ignore @_self_reload(_MANAGER_NAME) def frequency(self): return self._frequency @frequency.setter # type: ignore @_self_setter(_MANAGER_NAME) def frequency(self, val): self._frequency = val @property # type: ignore @_self_reload(_MANAGER_NAME) def creation_date(self): return self._creation_date @creation_date.setter # type: ignore @_self_setter(_MANAGER_NAME) def creation_date(self, val): self._creation_date = val @property # type: ignore @_self_reload(_MANAGER_NAME) def start_date(self): return self._start_date @start_date.setter # type: ignore @_self_setter(_MANAGER_NAME) def start_date(self, val): self._start_date = val @property # type: ignore @_self_reload(_MANAGER_NAME) def end_date(self): return self._end_date @end_date.setter # type: ignore @_self_setter(_MANAGER_NAME) def end_date(self, val): self._end_date = val @property # type: ignore @_self_reload(_MANAGER_NAME) def name(self): return self._name @name.setter # type: ignore @_self_setter(_MANAGER_NAME) def name(self, val): self._name = val @property def properties(self): self._properties = _Reloader()._reload(self._MANAGER_NAME, self)._properties return self._properties @staticmethod def _new_id(name: str) -> CycleId: def _get_valid_filename(name: str) -> str: """ Source: https://github.com/django/django/blob/main/django/utils/text.py """ s = str(name).strip().replace(" ", "_") s = re.sub(r"(?u)[^-\w.]", "", s) if s in {"", ".", ".."}: raise _SuspiciousFileOperation("Could not derive file name from '%s'" % name) s = str(s).strip().replace(" ", "_") return re.sub(r"(?u)[^-\w.]", "", s) return CycleId(_get_valid_filename(Cycle.__SEPARATOR.join([Cycle._ID_PREFIX, name, str(uuid.uuid4())]))) def __getattr__(self, attribute_name): protected_attribute_name = attribute_name if protected_attribute_name in self._properties: return self._properties[protected_attribute_name] raise AttributeError(f"{attribute_name} is not an attribute of cycle {self.id}") def __eq__(self, other): return self.id == other.id def __hash__(self): return hash(self.id) def get_label(self) -> str: """Returns the cycle label. Returns: The label of the cycle as a string. """ return self._get_label() def get_simple_label(self) -> str: """Returns the cycle simple label. Returns: The simple label of the cycle as a string. """ return self._get_simple_label() @_make_event.register(Cycle) def _make_event_for_cycle( cycle: Cycle, operation: EventOperation, /, attribute_name: Optional[str] = None, attribute_value: Optional[Any] = None, **kwargs, ) -> Event: metadata = {**kwargs} return Event( entity_type=EventEntityType.CYCLE, entity_id=cycle.id, operation=operation, attribute_name=attribute_name, attribute_value=attribute_value, metadata=metadata, )
import calendar from datetime import datetime, time, timedelta from typing import Callable, Dict, List, Optional from taipy.config.common.frequency import Frequency from .._entity._entity_ids import _EntityIds from .._manager._manager import _Manager from .._repository._abstract_repository import _AbstractRepository from ..job._job_manager_factory import _JobManagerFactory from ..notification import EventEntityType, EventOperation, _publish_event from ..submission._submission_manager_factory import _SubmissionManagerFactory from .cycle import Cycle from .cycle_id import CycleId class _CycleManager(_Manager[Cycle]): _ENTITY_NAME = Cycle.__name__ _repository: _AbstractRepository _EVENT_ENTITY_TYPE = EventEntityType.CYCLE @classmethod def _create( cls, frequency: Frequency, name: Optional[str] = None, creation_date: Optional[datetime] = None, **properties ): creation_date = creation_date if creation_date else datetime.now() start_date = _CycleManager._get_start_date_of_cycle(frequency, creation_date) end_date = _CycleManager._get_end_date_of_cycle(frequency, start_date) cycle = Cycle( frequency, properties, creation_date=creation_date, start_date=start_date, end_date=end_date, name=name ) cls._set(cycle) _publish_event( cls._EVENT_ENTITY_TYPE, EventOperation.CREATION, entity_id=cycle.id, ) return cycle @classmethod def _get_or_create( cls, frequency: Frequency, creation_date: Optional[datetime] = None, name: Optional[str] = None ) -> Cycle: creation_date = creation_date if creation_date else datetime.now() start_date = _CycleManager._get_start_date_of_cycle(frequency, creation_date) cycles = cls._get_cycles_by_frequency_and_start_date( frequency=frequency, start_date=start_date, cycles=cls._get_all() ) if len(cycles) > 0: return cycles[0] else: return cls._create(frequency=frequency, creation_date=creation_date, name=name) @staticmethod def _get_start_date_of_cycle(frequency: Frequency, creation_date: datetime): start_date = creation_date.date() start_time = time() if frequency == Frequency.DAILY: start_date = start_date if frequency == Frequency.WEEKLY: start_date = start_date - timedelta(days=start_date.weekday()) if frequency == Frequency.MONTHLY: start_date = start_date.replace(day=1) if frequency == Frequency.YEARLY: start_date = start_date.replace(day=1, month=1) return datetime.combine(start_date, start_time) @staticmethod def _get_end_date_of_cycle(frequency: Frequency, start_date: datetime): end_date = start_date if frequency == Frequency.DAILY: end_date = end_date + timedelta(days=1) if frequency == Frequency.WEEKLY: end_date = end_date + timedelta(7 - end_date.weekday()) if frequency == Frequency.MONTHLY: last_day_of_month = calendar.monthrange(start_date.year, start_date.month)[1] end_date = end_date.replace(day=last_day_of_month) + timedelta(days=1) if frequency == Frequency.YEARLY: end_date = end_date.replace(month=12, day=31) + timedelta(days=1) return end_date - timedelta(microseconds=1) @classmethod def _hard_delete(cls, cycle_id: CycleId): cycle = cls._get(cycle_id) entity_ids_to_delete = cls._get_children_entity_ids(cycle) entity_ids_to_delete.cycle_ids.add(cycle.id) cls._delete_entities_of_multiple_types(entity_ids_to_delete) @classmethod def _get_children_entity_ids(cls, cycle: Cycle) -> _EntityIds: from ..scenario._scenario_manager_factory import _ScenarioManagerFactory entity_ids = _EntityIds() scenarios = _ScenarioManagerFactory._build_manager()._get_all_by_cycle(cycle) for scenario in scenarios: entity_ids.scenario_ids.add(scenario.id) owner_ids = {scenario.id, cycle.id} for sequence in scenario.sequences.values(): if sequence.owner_id in owner_ids: entity_ids.sequence_ids.add(sequence.id) for task in scenario.tasks.values(): if task.owner_id in owner_ids: entity_ids.task_ids.add(task.id) for data_node in scenario.data_nodes.values(): if data_node.owner_id in owner_ids: entity_ids.data_node_ids.add(data_node.id) jobs = _JobManagerFactory._build_manager()._get_all() for job in jobs: if job.task.id in entity_ids.task_ids: entity_ids.job_ids.add(job.id) submissions = _SubmissionManagerFactory._build_manager()._get_all() submitted_entity_ids = list(entity_ids.scenario_ids.union(entity_ids.sequence_ids, entity_ids.task_ids)) for submission in submissions: if submission.entity_id in submitted_entity_ids: entity_ids.submission_ids.add(submission.id) return entity_ids @classmethod def _get_cycles_by_frequency_and_start_date( cls, frequency: Frequency, start_date: datetime, cycles: List[Cycle] ) -> List[Cycle]: return cls._get_cycles_cdt( lambda cycle: cycle.frequency == frequency and cycle.start_date == start_date, cycles ) @classmethod def _get_cycles_by_frequency_and_overlapping_date( cls, frequency: Frequency, date: datetime, cycles: List[Cycle] ) -> List[Cycle]: return cls._get_cycles_cdt( lambda cycle: cycle.frequency == frequency and cycle.start_date <= date <= cycle.end_date, cycles ) @classmethod def _get_cycles_cdt(cls, cdt: Callable[[Cycle], bool], cycles: List[Cycle]) -> List[Cycle]: return [cycle for cycle in cycles if cdt(cycle)]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._filesystem_repository import _FileSystemRepository from ._cycle_converter import _CycleConverter from ._cycle_model import _CycleModel class _CycleFSRepository(_FileSystemRepository): def __init__(self): super().__init__(model_type=_CycleModel, converter=_CycleConverter, dir_name="cycles")
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from .._repository._sql_repository import _SQLRepository from ._cycle_converter import _CycleConverter from ._cycle_model import _CycleModel class _CycleSQLRepository(_SQLRepository): def __init__(self): super().__init__(model_type=_CycleModel, converter=_CycleConverter)
from typing import NewType CycleId = NewType("CycleId", str) CycleId.__doc__ = """Type that holds a `Cycle^` identifier."""
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import JSON, Column, Enum, String, Table from taipy.config.common.frequency import Frequency from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry from .cycle_id import CycleId @mapper_registry.mapped @dataclass class _CycleModel(_BaseModel): __table__ = Table( "cycle", mapper_registry.metadata, Column("id", String, primary_key=True), Column("name", String), Column("frequency", Enum(Frequency)), Column("properties", JSON), Column("creation_date", String), Column("start_date", String), Column("end_date", String), ) id: CycleId name: str frequency: Frequency properties: Dict[str, Any] creation_date: str start_date: str end_date: str @staticmethod def from_dict(data: Dict[str, Any]): return _CycleModel( id=data["id"], name=data["name"], frequency=Frequency._from_repr(data["frequency"]), properties=_BaseModel._deserialize_attribute(data["properties"]), creation_date=data["creation_date"], start_date=data["start_date"], end_date=data["end_date"], ) def to_list(self): return [ self.id, self.name, repr(self.frequency), _BaseModel._serialize_attribute(self.properties), self.creation_date, self.start_date, self.end_date, ]
from datetime import datetime from .._repository._abstract_converter import _AbstractConverter from ..cycle._cycle_model import _CycleModel from ..cycle.cycle import Cycle class _CycleConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, cycle: Cycle) -> _CycleModel: return _CycleModel( id=cycle.id, name=cycle._name, frequency=cycle._frequency, creation_date=cycle._creation_date.isoformat(), start_date=cycle._start_date.isoformat(), end_date=cycle._end_date.isoformat(), properties=cycle._properties.data, ) @classmethod def _model_to_entity(cls, model: _CycleModel) -> Cycle: return Cycle( id=model.id, name=model.name, frequency=model.frequency, properties=model.properties, creation_date=datetime.fromisoformat(model.creation_date), start_date=datetime.fromisoformat(model.start_date), end_date=datetime.fromisoformat(model.end_date), )
from typing import Type from .._manager._manager_factory import _ManagerFactory from ..common._utils import _load_fct from ..cycle._cycle_manager import _CycleManager from ._cycle_fs_repository import _CycleFSRepository from ._cycle_sql_repository import _CycleSQLRepository class _CycleManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _CycleFSRepository, "sql": _CycleSQLRepository} @classmethod def _build_manager(cls) -> Type[_CycleManager]: # type: ignore if cls._using_enterprise(): cycle_manager = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager" ) # type: ignore build_repository = _load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager_factory", "_CycleManagerFactory" )._build_repository # type: ignore else: cycle_manager = _CycleManager build_repository = cls._build_repository cycle_manager._repository = build_repository() # type: ignore return cycle_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from abc import abstractmethod from importlib import util from typing import Type from taipy.config import Config from ._manager import _Manager class _ManagerFactory: _TAIPY_ENTERPRISE_MODULE = "taipy.enterprise" _TAIPY_ENTERPRISE_CORE_MODULE = _TAIPY_ENTERPRISE_MODULE + ".core" @classmethod @abstractmethod def _build_manager(cls) -> Type[_Manager]: # type: ignore raise NotImplementedError @classmethod def _build_repository(cls): raise NotImplementedError @classmethod def _using_enterprise(cls) -> bool: return util.find_spec(cls._TAIPY_ENTERPRISE_MODULE) is not None @staticmethod def _get_repository_with_repo_map(repository_map: dict): return repository_map.get(Config.core.repository_type, repository_map.get("default"))
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import pathlib from importlib import metadata from typing import Dict, Generic, Iterable, List, Optional, TypeVar, Union from taipy.logger._taipy_logger import _TaipyLogger from .._entity._entity_ids import _EntityIds from .._repository._abstract_repository import _AbstractRepository from ..exceptions.exceptions import ModelNotFound from ..notification import Event, EventOperation, Notifier EntityType = TypeVar("EntityType") class _Manager(Generic[EntityType]): _repository: _AbstractRepository _logger = _TaipyLogger._get_logger() _ENTITY_NAME: str = "Entity" @classmethod def _delete_all(cls): """ Deletes all entities. """ cls._repository._delete_all() if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, metadata={"delete_all": True}, ) ) @classmethod def _delete_many(cls, ids: Iterable): """ Deletes entities by a list of ids. """ cls._repository._delete_many(ids) if hasattr(cls, "_EVENT_ENTITY_TYPE"): for entity_id in ids: Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, entity_id=entity_id, metadata={"delete_all": True}, ) ) @classmethod def _delete_by_version(cls, version_number: str): """ Deletes entities by version number. """ cls._repository._delete_by(attribute="version", value=version_number) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, # type: ignore EventOperation.DELETION, metadata={"delete_by_version": version_number}, ) ) @classmethod def _delete(cls, id): """ Deletes an entity by id. """ cls._repository._delete(id) if hasattr(cls, "_EVENT_ENTITY_TYPE"): Notifier.publish( Event( cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, entity_id=id, ) ) @classmethod def _set(cls, entity: EntityType): """ Save or update an entity. """ cls._repository._save(entity) @classmethod def _get_all(cls, version_number: Optional[str] = "all") -> List[EntityType]: """ Returns all entities. """ filters: List[Dict] = [] return cls._repository._load_all(filters) @classmethod def _get_all_by(cls, filters: Optional[List[Dict]] = None) -> List[EntityType]: """ Returns all entities based on a criteria. """ if not filters: filters = [] return cls._repository._load_all(filters) @classmethod def _get(cls, entity: Union[str, EntityType], default=None) -> EntityType: """ Returns an entity by id or reference. """ entity_id = entity if isinstance(entity, str) else entity.id # type: ignore try: return cls._repository._load(entity_id) except ModelNotFound: cls._logger.error(f"{cls._ENTITY_NAME} not found: {entity_id}") return default @classmethod def _exists(cls, entity_id: str) -> bool: """ Returns True if the entity id exists. """ return cls._repository._exists(entity_id) @classmethod def _delete_entities_of_multiple_types(cls, _entity_ids: _EntityIds): """ Deletes entities of multiple types. """ from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory _CycleManagerFactory._build_manager()._delete_many(_entity_ids.cycle_ids) _SequenceManagerFactory._build_manager()._delete_many(_entity_ids.sequence_ids) _ScenarioManagerFactory._build_manager()._delete_many(_entity_ids.scenario_ids) _TaskManagerFactory._build_manager()._delete_many(_entity_ids.task_ids) _JobManagerFactory._build_manager()._delete_many(_entity_ids.job_ids) _DataManagerFactory._build_manager()._delete_many(_entity_ids.data_node_ids) _SubmissionManagerFactory._build_manager()._delete_many(_entity_ids.submission_ids) @classmethod def _export(cls, id: str, folder_path: Union[str, pathlib.Path]): """ Export an entity. """ return cls._repository._export(id, folder_path) @classmethod def _is_editable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True @classmethod def _is_readable(cls, entity: Union[EntityType, _EntityIds]) -> bool: return True
import uuid from typing import List, Optional, Union from taipy.config import Config from taipy.config._config_comparator._comparator_result import _ComparatorResult from taipy.config.checker.issue_collector import IssueCollector from taipy.config.exceptions.exceptions import InconsistentEnvVariableError from taipy.logger._taipy_logger import _TaipyLogger from .._manager._manager import _Manager from ..exceptions.exceptions import ConflictedConfigurationError, ModelNotFound, NonExistingVersion from ._version import _Version from ._version_fs_repository import _VersionFSRepository class _VersionManager(_Manager[_Version]): _ENTITY_NAME = _Version.__name__ __logger = _TaipyLogger._get_logger() __DEVELOPMENT_VERSION = ["development", "dev"] __LATEST_VERSION = "latest" __PRODUCTION_VERSION = "production" __ALL_VERSION = ["all", ""] _DEFAULT_VERSION = __LATEST_VERSION _repository: _VersionFSRepository @classmethod def _get(cls, entity: Union[str, _Version], default=None) -> _Version: """ Returns the version entity by id or reference. """ entity_id = entity if isinstance(entity, str) else entity.id try: return cls._repository._load(entity_id) except ModelNotFound: return default @classmethod def _get_or_create(cls, id: str, force: bool) -> _Version: if version := cls._get(id): comparator_result = Config._comparator._find_conflict_config(version.config, Config._applied_config, id) if comparator_result.get(_ComparatorResult.CONFLICTED_SECTION_KEY): if force: cls.__logger.warning( f"Option --force is detected, overriding the configuration of version {id} ..." ) version.config = Config._applied_config else: raise ConflictedConfigurationError() else: version = _Version(id=id, config=Config._applied_config) cls._set(version) return version @classmethod def _get_all(cls, version_number: Optional[Union[str, List]] = "all") -> List[_Version]: """ Returns all entities. """ version_number = cls._replace_version_number(version_number) # type: ignore if not isinstance(version_number, List): version_number = [version_number] if version_number else [] filters = [{"version": version} for version in version_number] return cls._repository._load_all(filters) @classmethod def _set_development_version(cls, version_number: str) -> str: cls._get_or_create(version_number, force=True) cls._repository._set_development_version(version_number) return version_number @classmethod def _get_development_version(cls) -> str: try: return cls._repository._get_development_version() except (FileNotFoundError, ModelNotFound): return cls._set_development_version(str(uuid.uuid4())) @classmethod def _set_experiment_version(cls, version_number: str, force: bool = False) -> str: if version_number == cls._get_development_version(): raise SystemExit( f"Version number {version_number} is the development version. Please choose a different name" f" for this experiment." ) if version_number in cls._get_production_versions(): raise SystemExit( f"Version number {version_number} is already a production version. Please choose a different name" f" for this experiment." ) try: cls._get_or_create(version_number, force) except ConflictedConfigurationError: raise SystemExit( f"Please add a new experiment version or run your application with --force option to" f" override the Config of experiment {version_number}." ) cls._repository._set_latest_version(version_number) return version_number @classmethod def _get_latest_version(cls) -> str: try: return cls._repository._get_latest_version() except (FileNotFoundError, ModelNotFound): # If there is no version in the system yet, create a new version as development version # This set the default versioning behavior on Jupyter notebook to Development mode return cls._set_development_version(str(uuid.uuid4())) @classmethod def _set_production_version(cls, version_number: str, force: bool = False) -> str: if version_number == cls._get_development_version(): cls._set_development_version(str(uuid.uuid4())) try: cls._get_or_create(version_number, force) except ConflictedConfigurationError: raise SystemExit( f"Please add a new production version with migration functions.\n" f"If old entities remain compatible with the new configuration, you can also run your application with" f" --force option to override the production configuration of version {version_number}." ) cls._repository._set_production_version(version_number) return version_number @classmethod def _get_production_versions(cls) -> List[str]: try: return cls._repository._get_production_versions() except (FileNotFoundError, ModelNotFound): return [] @classmethod def _delete_production_version(cls, version_number) -> str: return cls._repository._delete_production_version(version_number) @classmethod def _replace_version_number(cls, version_number: Optional[str] = None): if version_number is None: version_number = cls._replace_version_number(cls._DEFAULT_VERSION) production_versions = cls._get_production_versions() if version_number in production_versions: return production_versions return version_number if version_number == cls.__LATEST_VERSION: return cls._get_latest_version() if version_number in cls.__DEVELOPMENT_VERSION: return cls._get_development_version() if version_number == cls.__PRODUCTION_VERSION: return cls._get_production_versions() if version_number in cls.__ALL_VERSION: return "" try: if version := cls._get(version_number): return version.id except InconsistentEnvVariableError: # The version exist but the Config is alternated return version_number raise NonExistingVersion(version_number) @classmethod def _manage_version(cls): from ..taipy import clean_all_entities_by_version if Config.core.mode == "development": current_version_number = cls._get_development_version() cls.__logger.info(f"Development mode: Clean all entities of version {current_version_number}") clean_all_entities_by_version(current_version_number) cls._set_development_version(current_version_number) elif Config.core.mode in ["experiment", "production"]: default_version_number = { "experiment": str(uuid.uuid4()), "production": cls._get_latest_version(), } version_setter = { "experiment": cls._set_experiment_version, "production": cls._set_production_version, } if Config.core.version_number: current_version_number = Config.core.version_number else: current_version_number = default_version_number[Config.core.mode] version_setter[Config.core.mode](current_version_number, Config.core.force) if Config.core.mode == "production": cls.__check_production_migration_config() else: raise SystemExit(f"Undefined execution mode: {Config.core.mode}.") @classmethod def __check_production_migration_config(self): from ..config.checkers._migration_config_checker import _MigrationConfigChecker collector = _MigrationConfigChecker(Config._applied_config, IssueCollector())._check() for issue in collector._warnings: self.__logger.warning(str(issue)) for issue in collector._infos: self.__logger.info(str(issue)) for issue in collector._errors: self.__logger.error(str(issue)) if len(collector._errors) != 0: raise SystemExit("Configuration errors found. Please check the error log for more information.") @classmethod def _delete_entities_of_multiple_types(cls, _entity_ids): raise NotImplementedError
from datetime import datetime from typing import Any from taipy.config import Config from taipy.config._config import _Config from .._entity._entity import _Entity class _Version(_Entity): def __init__(self, id: str, config: Any) -> None: self.id: str = id self.config: _Config = config self.creation_date: datetime = datetime.now() def __eq__(self, other): return self.id == other.id and self.__is_config_eq(other) def __is_config_eq(self, other): return Config._serializer._str(self.config) == Config._serializer._str(other.config)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
from typing import Dict, List from .._version._version_manager_factory import _VersionManagerFactory class _VersionMixin: _version_manager = _VersionManagerFactory._build_manager() @classmethod def __fetch_version_number(cls, version_number): version_number = _VersionManagerFactory._build_manager()._replace_version_number(version_number) if not isinstance(version_number, List): version_number = [version_number] if version_number else [] return version_number @classmethod def _build_filters_with_version(cls, version_number) -> List[Dict]: filters = [] if versions := cls.__fetch_version_number(version_number): filters = [{"version": version} for version in versions] return filters @classmethod def _get_latest_version(cls): return cls._version_manager._get_latest_version()
from .._manager._manager_factory import _ManagerFactory from ..common import _utils from ._version_fs_repository import _VersionFSRepository from ._version_manager import _VersionManager from ._version_sql_repository import _VersionSQLRepository class _VersionManagerFactory(_ManagerFactory): __REPOSITORY_MAP = {"default": _VersionFSRepository, "sql": _VersionSQLRepository} @classmethod def _build_manager(cls) -> _VersionManager: # type: ignore if cls._using_enterprise(): version_manager = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager", "_VersionManager" ) # type: ignore build_repository = _utils._load_fct( cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager_factory", "_VersionManagerFactory" )._build_repository # type: ignore else: version_manager = _VersionManager build_repository = cls._build_repository version_manager._repository = build_repository() # type: ignore return version_manager # type: ignore @classmethod def _build_repository(cls): return cls._get_repository_with_repo_map(cls.__REPOSITORY_MAP)()
from abc import ABC, abstractmethod class _VersionRepositoryInterface(ABC): _LATEST_VERSION_KEY = "latest_version" _DEVELOPMENT_VERSION_KEY = "development_version" _PRODUCTION_VERSION_KEY = "production_version" @abstractmethod def _set_latest_version(self, version_number): raise NotImplementedError @abstractmethod def _get_latest_version(self): raise NotImplementedError @abstractmethod def _set_development_version(self, version_number): raise NotImplementedError @abstractmethod def _get_development_version(self): raise NotImplementedError @abstractmethod def _set_production_version(self, version_number): raise NotImplementedError @abstractmethod def _get_production_versions(self): raise NotImplementedError @abstractmethod def _delete_production_version(self, version_number): raise NotImplementedError
import json from typing import List from taipy.logger._taipy_logger import _TaipyLogger from .._repository._filesystem_repository import _FileSystemRepository from ..exceptions.exceptions import VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionFSRepository(_FileSystemRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter, dir_name="version") @property def _version_file_path(self): return super()._storage_folder / "version.json" def _delete_all(self): super()._delete_all() if self._version_file_path.exists(): self._version_file_path.unlink() def _set_latest_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_latest_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._LATEST_VERSION_KEY] def _set_development_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._DEVELOPMENT_VERSION_KEY] = version_number file_content[self._LATEST_VERSION_KEY] = version_number else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: version_number, self._PRODUCTION_VERSION_KEY: [], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_development_version(self) -> str: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._DEVELOPMENT_VERSION_KEY] def _set_production_version(self, version_number): if self._version_file_path.exists(): with open(self._version_file_path, "r") as f: file_content = json.load(f) file_content[self._LATEST_VERSION_KEY] = version_number if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: file_content[self._PRODUCTION_VERSION_KEY].append(version_number) else: _TaipyLogger._get_logger().info(f"Version {version_number} is already a production version.") else: self.dir_path.mkdir(parents=True, exist_ok=True) file_content = { self._LATEST_VERSION_KEY: version_number, self._DEVELOPMENT_VERSION_KEY: "", self._PRODUCTION_VERSION_KEY: [version_number], } self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) def _get_production_versions(self) -> List[str]: with open(self._version_file_path, "r") as f: file_content = json.load(f) return file_content[self._PRODUCTION_VERSION_KEY] def _delete_production_version(self, version_number): try: with open(self._version_file_path, "r") as f: file_content = json.load(f) if version_number not in file_content[self._PRODUCTION_VERSION_KEY]: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") file_content[self._PRODUCTION_VERSION_KEY].remove(version_number) self._version_file_path.write_text( json.dumps( file_content, ensure_ascii=False, indent=0, ) ) except FileNotFoundError: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.")
from sqlalchemy.dialects import sqlite from .._repository._sql_repository import _SQLRepository from ..exceptions.exceptions import ModelNotFound, VersionIsNotProductionVersion from ._version_converter import _VersionConverter from ._version_model import _VersionModel from ._version_repository_interface import _VersionRepositoryInterface class _VersionSQLRepository(_SQLRepository, _VersionRepositoryInterface): def __init__(self): super().__init__(model_type=_VersionModel, converter=_VersionConverter) def _set_latest_version(self, version_number): if old_latest := self.db.execute(str(self.table.select().filter_by(is_latest=True))).fetchone(): old_latest = self.model_type.from_dict(old_latest) old_latest.is_latest = False self._update_entry(old_latest) version = self.__get_by_id(version_number) version.is_latest = True self._update_entry(version) def _get_latest_version(self): if latest := self.db.execute( str(self.table.select().filter_by(is_latest=True).compile(dialect=sqlite.dialect())) ).fetchone(): return latest["id"] raise ModelNotFound(self.model_type, "") def _set_development_version(self, version_number): if old_development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): old_development = self.model_type.from_dict(old_development) old_development.is_development = False self._update_entry(old_development) version = self.__get_by_id(version_number) version.is_development = True self._update_entry(version) self._set_latest_version(version_number) def _get_development_version(self): if development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone(): return development["id"] raise ModelNotFound(self.model_type, "") def _set_production_version(self, version_number): version = self.__get_by_id(version_number) version.is_production = True self._update_entry(version) self._set_latest_version(version_number) def _get_production_versions(self): if productions := self.db.execute( str(self.table.select().filter_by(is_production=True).compile(dialect=sqlite.dialect())), ).fetchall(): return [p["id"] for p in productions] return [] def _delete_production_version(self, version_number): version = self.__get_by_id(version_number) if not version or not version.is_production: raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") version.is_production = False self._update_entry(version) def __get_by_id(self, version_id): query = str(self.table.select().filter_by(id=version_id).compile(dialect=sqlite.dialect())) entry = self.db.execute(query, [version_id]).fetchone() return self.model_type.from_dict(entry) if entry else None
from datetime import datetime from taipy.config import Config from .._repository._abstract_converter import _AbstractConverter from .._version._version import _Version from .._version._version_model import _VersionModel class _VersionConverter(_AbstractConverter): @classmethod def _entity_to_model(cls, version: _Version) -> _VersionModel: return _VersionModel( id=version.id, config=Config._to_json(version.config), creation_date=version.creation_date.isoformat() ) @classmethod def _model_to_entity(cls, model: _VersionModel) -> _Version: version = _Version(id=model.id, config=Config._from_json(model.config)) version.creation_date = datetime.fromisoformat(model.creation_date) return version
from dataclasses import dataclass from typing import Any, Dict from sqlalchemy import Boolean, Column, String, Table from .._repository._base_taipy_model import _BaseModel from .._repository.db._sql_base_model import mapper_registry @mapper_registry.mapped @dataclass class _VersionModel(_BaseModel): __table__ = Table( "version", mapper_registry.metadata, Column("id", String, primary_key=True), Column("config", String), # config is store as a json string Column("creation_date", String), Column("is_production", Boolean), Column("is_development", Boolean), Column("is_latest", Boolean), ) id: str config: Dict[str, Any] creation_date: str @staticmethod def from_dict(data: Dict[str, Any]): model = _VersionModel( id=data["id"], config=data["config"], creation_date=data["creation_date"], ) model.is_production = data.get("is_production") # type: ignore model.is_development = data.get("is_development") # type: ignore model.is_latest = data.get("is_latest") # type: ignore return model def to_list(self): return [ self.id, self.config, self.creation_date, self.is_production, self.is_development, self.is_latest, ]
from typing import Callable, List from taipy.config.config import Config from .._entity._reload import _Reloader from ..config import MigrationConfig from ._version_manager_factory import _VersionManagerFactory def _migrate_entity(entity): if ( latest_version := _VersionManagerFactory._build_manager()._get_latest_version() ) in _VersionManagerFactory._build_manager()._get_production_versions(): if migration_fcts := __get_migration_fcts_to_latest(entity._version, entity.config_id): with _Reloader(): for fct in migration_fcts: entity = fct(entity) entity._version = latest_version return entity def __get_migration_fcts_to_latest(source_version: str, config_id: str) -> List[Callable]: migration_fcts_to_latest: List[Callable] = [] production_versions = _VersionManagerFactory._build_manager()._get_production_versions() try: start_index = production_versions.index(source_version) + 1 except ValueError: return migration_fcts_to_latest versions_to_migrate = production_versions[start_index:] for version in versions_to_migrate: migration_fct = Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id) if migration_fct: migration_fcts_to_latest.append(migration_fct) return migration_fcts_to_latest
import sys from taipy._cli._base_cli import _CLI from taipy.config import Config from taipy.config.exceptions.exceptions import InconsistentEnvVariableError from taipy.logger._taipy_logger import _TaipyLogger from ...data._data_manager_factory import _DataManagerFactory from ...exceptions.exceptions import VersionIsNotProductionVersion from ...job._job_manager_factory import _JobManagerFactory from ...scenario._scenario_manager_factory import _ScenarioManagerFactory from ...sequence._sequence_manager_factory import _SequenceManagerFactory from ...taipy import clean_all_entities_by_version from ...task._task_manager_factory import _TaskManagerFactory from .._version_manager_factory import _VersionManagerFactory from ._bcolor import _Bcolors class _VersionCLI: """Command-line interface of the versioning system.""" __logger = _TaipyLogger._get_logger() @classmethod def create_parser(cls): version_parser = _CLI._add_subparser("manage-versions", help="Taipy version control system.") version_parser.add_argument( "-l", "--list", action="store_true", help="List all existing versions of the Taipy application." ) version_parser.add_argument( "--rename", nargs=2, metavar=("OLD_VERSION", "NEW_VERSION"), help="Rename a Taipy version." ) version_parser.add_argument( "--compare-config", nargs=2, metavar=("VERSION_1", "VERSION_2"), help="Compare the Configuration of 2 Taipy versions.", ) version_parser.add_argument( "-d", "--delete", metavar="VERSION", help="Delete a Taipy version by version number." ) version_parser.add_argument( "-dp", "--delete-production", metavar="VERSION", help="Delete a Taipy version from production by version number. The version is still kept as an experiment " "version.", ) @classmethod def parse_arguments(cls): args = _CLI._parse() if getattr(args, "which", None) != "manage-versions": return if args.list: print(cls.__list_versions()) sys.exit(0) if args.rename: try: cls.__rename_version(args.rename[0], args.rename[1]) except InconsistentEnvVariableError as error: cls.__logger.error( f"Fail to rename version {args.rename[0]} to {args.rename[1]} due to outdated Configuration." f"Detail: {str(error)}" ) sys.exit(1) cls.__logger.info(f"Successfully renamed version '{args.rename[0]}' to '{args.rename[1]}'.") sys.exit(0) if args.compare_config: cls.__compare_version_config(args.compare_config[0], args.compare_config[1]) sys.exit(0) if args.delete_production: try: _VersionManagerFactory._build_manager()._delete_production_version(args.delete_production) cls.__logger.info( f"Successfully delete version {args.delete_production} from the production version list." ) sys.exit(0) except VersionIsNotProductionVersion as e: raise SystemExit(e) if args.delete: if clean_all_entities_by_version(args.delete): cls.__logger.info(f"Successfully delete version {args.delete}.") else: sys.exit(1) sys.exit(0) @classmethod def __list_versions(cls): list_version_message = f"\n{'Version number':<36} {'Mode':<20} {'Creation date':<20}\n" latest_version_number = _VersionManagerFactory._build_manager()._get_latest_version() development_version_number = _VersionManagerFactory._build_manager()._get_development_version() production_version_numbers = _VersionManagerFactory._build_manager()._get_production_versions() versions = _VersionManagerFactory._build_manager()._get_all() versions.sort(key=lambda x: x.creation_date, reverse=True) bold = False for version in versions: if version.id == development_version_number: list_version_message += _Bcolors.GREEN mode = "Development" elif version.id in production_version_numbers: list_version_message += _Bcolors.PURPLE mode = "Production" else: list_version_message += _Bcolors.BLUE mode = "Experiment" if version.id == latest_version_number: list_version_message += _Bcolors.BOLD bold = True mode += " (latest)" list_version_message += ( f"{(version.id):<36} {mode:<20} {version.creation_date.strftime('%Y-%m-%d %H:%M:%S'):<20}" ) list_version_message += _Bcolors.END if bold: list_version_message += _Bcolors.END list_version_message += "\n" return list_version_message @classmethod def __rename_version(cls, old_version: str, new_version: str): _version_manager = _VersionManagerFactory._build_manager() # Check if the new version already exists, return an error if _version_manager._get(new_version): cls.__logger.error(f"Version name '{new_version}' is already used.") sys.exit(1) # Make sure that all entities of the old version are exists and loadable version_entity = _version_manager._get(old_version) if version_entity is None: cls.__logger.error(f"Version '{old_version}' does not exist.") sys.exit(1) jobs = _JobManagerFactory._build_manager()._get_all(version_number=old_version) scenarios = _ScenarioManagerFactory._build_manager()._get_all(version_number=old_version) sequences = _SequenceManagerFactory._build_manager()._get_all(version_number=old_version) tasks = _TaskManagerFactory._build_manager()._get_all(version_number=old_version) datanodes = _DataManagerFactory._build_manager()._get_all(version_number=old_version) # Update the version of all entities for job in jobs: job._version = new_version _JobManagerFactory._build_manager()._set(job) for scenario in scenarios: scenario._version = new_version _ScenarioManagerFactory._build_manager()._set(scenario) for sequence in sequences: sequence._version = new_version _SequenceManagerFactory._build_manager()._set(sequence) for task in tasks: task._version = new_version _TaskManagerFactory._build_manager()._set(task) for datanode in datanodes: datanode._version = new_version _DataManagerFactory._build_manager()._set(datanode) # Update the version entity if old_version in _version_manager._get_production_versions(): _version_manager._set_production_version(new_version) if old_version == _version_manager._get_latest_version(): _version_manager._set_experiment_version(new_version) if old_version == _version_manager._get_development_version(): _version_manager._set_development_version(new_version) _version_manager._delete(old_version) try: _version_manager._delete_production_version(old_version) except VersionIsNotProductionVersion: pass if not _version_manager._get(new_version): version_entity.id = new_version _version_manager._set(version_entity) @classmethod def __compare_version_config(cls, version_1: str, version_2: str): version_entity_1 = _VersionManagerFactory._build_manager()._get(version_1) if version_entity_1 is None: cls.__logger.error(f"Version '{version_1}' does not exist.") sys.exit(1) version_entity_2 = _VersionManagerFactory._build_manager()._get(version_2) if version_entity_2 is None: cls.__logger.error(f"Version '{version_2}' does not exist.") sys.exit(1) Config._comparator._compare( version_entity_1.config, version_entity_2.config, version_1, version_2, )
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
import os import sys def _vt_codes_enabled_in_windows_registry(): """ Check the Windows Registry to see if VT code handling has been enabled by default, see https://superuser.com/a/1300251/447564. """ try: # winreg is only available on Windows. import winreg except ImportError: return False else: try: reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console") reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel") except FileNotFoundError: return False else: return reg_key_value == 1 def _is_color_supported(): """ Return True if the running system's terminal supports color, and False otherwise. """ is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() return is_a_tty and ( sys.platform != "win32" or "ANSICON" in os.environ or "WT_SESSION" in os.environ # Windows Terminal supports VT codes. or os.environ.get("TERM_PROGRAM") == "vscode" # VSCode's built-in terminal supports colors. or _vt_codes_enabled_in_windows_registry() ) class _Bcolors: PURPLE = "\033[95m" if _is_color_supported() else "" BLUE = "\033[94m" if _is_color_supported() else "" CYAN = "\033[96m" if _is_color_supported() else "" GREEN = "\033[92m" if _is_color_supported() else "" BOLD = "\033[1m" if _is_color_supported() else "" UNDERLINE = "\033[4m" if _is_color_supported() else "" END = "\033[0m" if _is_color_supported() else ""
from .exceptions import *
from typing import List, Optional class ConfigCoreVersionMismatched(Exception): """Raised if core version in Config does not match with the version of Taipy Core.""" def __init__(self, config_core_version: str, core_version: str) -> None: self.message = ( f"Core version {config_core_version} in Config does not match with version of Taipy Core {core_version}." ) class CoreServiceIsAlreadyRunning(Exception): """Raised if the Core service is already running.""" class CycleAlreadyExists(Exception): """Raised if it is trying to create a Cycle that has already exists.""" class NonExistingCycle(Exception): """Raised if a requested cycle is not known by the Cycle manager.""" def __init__(self, cycle_id: str): self.message = f"Cycle: {cycle_id} does not exist." class MissingRequiredProperty(Exception): """Raised if a required property is missing when creating a Data Node.""" class InvalidDataNodeType(Exception): """Raised if a data node storage type does not exist.""" class MultipleDataNodeFromSameConfigWithSameOwner(Exception): """ Raised if there are multiple data nodes from the same data node configuration and the same owner identifier. """ class NoData(Exception): """Raised if a data node is read before it has been written. This exception can be raised by `DataNode.read_or_raise()^`. """ class UnknownDatabaseEngine(Exception): """Raised if the database engine is not known when creating a connection with a SQLDataNode.""" class MissingAppendQueryBuilder(Exception): """Raised if no append query build is provided when appending data to a SQLDataNode.""" class UnknownParquetEngine(Exception): """Raised if the parquet engine is not known or not supported when create a ParquetDataNode.""" class UnknownCompressionAlgorithm(Exception): """Raised if the compression algorithm is not supported by ParquetDataNode.""" class NonExistingDataNode(Exception): """Raised if a requested DataNode is not known by the DataNode Manager.""" def __init__(self, data_node_id: str): self.message = f"DataNode: {data_node_id} does not exist." class DataNodeIsBeingEdited(Exception): """Raised if a DataNode is being edited.""" def __init__(self, data_node_id: str, editor_id: Optional[str] = None): self.message = f"DataNode {data_node_id} is being edited{ ' by ' + editor_id if editor_id else ''}." class NonExistingDataNodeConfig(Exception): """Raised if a requested DataNodeConfig is not known by the DataNode Manager.""" def __init__(self, data_node_config_id: str): self.message = f"Data node config: {data_node_config_id} does not exist." class NonExistingExcelSheet(Exception): """Raised if a requested Sheet name does not exist in the provided Excel file.""" def __init__(self, sheet_name: str, excel_file_name: str): self.message = f"{sheet_name} does not exist in {excel_file_name}." class ExposedTypeLengthMismatch(Exception): """Raised if length of exposed type list does not match with number of sheets in the provided Excel file.""" class SheetNameLengthMismatch(Exception): """Raised if length of sheet_name list does not match with number of sheets in the data to be written to Excel file.""" class InvalidExposedType(Exception): """Raised if an invalid exposed type is provided.""" class InvalidCustomDocument(Exception): """Raised if an invalid custom document class is provided to a `MongoCollectionDataNode`.""" class DataNodeConfigIsNotGlobal(Exception): """Raised if a DataNode is not global.""" def __init__(self, data_node_config_id: str): self.message = f"Data node config `{data_node_config_id}` does not have GLOBAL scope." class MissingReadFunction(Exception): """Raised if no read function is provided for the GenericDataNode.""" class MissingWriteFunction(Exception): """Raised if no write function is provided for the GenericDataNode.""" class JobNotDeletedException(RuntimeError): """Raised if there is an attempt to delete a job that cannot be deleted. This exception can be raised by `taipy.delete_job()^`. """ def __init__(self, job_id: str): self.message = f"Job: {job_id} cannot be deleted." class NonExistingJob(RuntimeError): """Raised if a requested job is not known by the Job manager.""" def __init__(self, job_id: str): self.message = f"Job: {job_id} does not exist." class DataNodeWritingError(RuntimeError): """Raised if an error happens during the writing in a data node.""" class InvalidSubscriber(RuntimeError): """Raised if the loaded function is not valid.""" class InvalidSequenceId(Exception): """Raised if a Sequence id can not be broken down.""" def __init__(self, sequence_id: str): self.message = f"Sequence: {sequence_id} is invalid." class InvalidSequence(Exception): """Raised if a Sequence is not a connected Directed Acyclic Graph.""" def __init__(self, sequence_id: str): self.message = f"Sequence: {sequence_id} is not a connected Directed Acyclic Graph." class NonExistingSequence(Exception): """Raised if a requested Sequence is not known by the Sequence Manager.""" def __init__(self, sequence_id: str): self.message = f"Sequence: {sequence_id} does not exist." class SequenceBelongsToNonExistingScenario(Exception): """Raised if a Sequence does not belong to an existing Scenario.""" def __init__(self, sequence_id: str, scenario_id: str): self.message = f"Sequence: {sequence_id} belongs to a non-existing Scenario: {scenario_id}." class SequenceTaskDoesNotExistInScenario(Exception): """Raised if Tasks of a Sequence do not exist in the same Scenario that the Sequence belongs to.""" def __init__(self, task_ids: List[Optional[str]], sequence_name: str, scenario_id: str): self.message = f"Tasks {task_ids} of Sequence {sequence_name} does not exist in Scenario {scenario_id}." class SequenceTaskConfigDoesNotExistInSameScenarioConfig(Exception): """Raised if TaskConfigs of a Sequence do not exist in the same ScenarioConfig that the Sequence belongs to.""" def __init__(self, task_config_ids: List[Optional[str]], sequence_name: str, scenario_config_id: str): self.message = f"TaskConfig {task_config_ids} of Sequence name {sequence_name} " self.message += f"does not exist in ScenarioConfig {scenario_config_id}." class NonExistingSequenceConfig(Exception): """Raised if a requested Sequence configuration is not known by the Sequence Manager.""" def __init__(self, sequence_config_id: str): self.message = f"Sequence config: {sequence_config_id} does not exist." class MultipleSequenceFromSameConfigWithSameOwner(Exception): """Raised if it exists multiple sequences from the same sequence config and with the same _owner_id_.""" class ModelNotFound(Exception): """Raised when trying to fetch a non-existent model. This exception can be raised by `taipy.get()^` and `taipy.delete()^`. """ def __init__(self, model_name: str, model_id: str): self.message = f"A {model_name} model with id {model_id} could not be found." class NonExistingScenario(Exception): """Raised if a requested scenario is not known by the Scenario Manager.""" def __init__(self, scenario_id: str): self.message = f"Scenario: {scenario_id} does not exist." class NonExistingScenarioConfig(Exception): """Raised if a requested scenario configuration is not known by the Scenario Manager. This exception can be raised by `taipy.compare_scenarios()^`. """ def __init__(self, scenario_config_id: str): self.message = f"Scenario config: {scenario_config_id} does not exist." class InvalidSscenario(Exception): """Raised if a Scenario is not a Directed Acyclic Graph.""" def __init__(self, scenario_id: str): self.message = f"Scenario: {scenario_id} is not a Directed Acyclic Graph." class DoesNotBelongToACycle(Exception): """Raised if a scenario without any cycle is promoted as primary scenario.""" class DeletingPrimaryScenario(Exception): """Raised if a primary scenario is deleted.""" class DifferentScenarioConfigs(Exception): """Raised if scenario comparison is requested on scenarios with different scenario configs. This exception can be raised by `taipy.compare_scenarios()^`. """ class InsufficientScenarioToCompare(Exception): """Raised if too few scenarios are requested to be compared. Scenario comparison need at least two scenarios to compare. This exception can be raised by `taipy.compare_scenarios()^`. """ class NonExistingComparator(Exception): """Raised if a scenario comparator does not exist. This exception can be raised by `taipy.compare_scenarios()^`. """ class UnauthorizedTagError(Exception): """Must provide an authorized tag.""" class DependencyNotInstalled(Exception): """Raised if a package is missing.""" def __init__(self, package_name: str): self.message = f""" Package '{package_name}' should be installed. Run 'pip install taipy[{package_name}]' to installed it. """ class NonExistingTask(Exception): """Raised if a requested task is not known by the Task Manager.""" def __init__(self, task_id: str): self.message = f"Task: {task_id} does not exist." class NonExistingTaskConfig(Exception): """Raised if a requested task configuration is not known by the Task Manager.""" def __init__(self, id: str): self.message = f"Task config: {id} does not exist." class MultipleTaskFromSameConfigWithSameOwner(Exception): """Raised if there are multiple tasks from the same task configuration and the same owner identifier.""" class OrchestratorNotBuilt(Exception): """Raised if the orchestrator was not built in the OrchestratorFactory""" class ModeNotAvailable(Exception): """Raised if the mode in JobConfig is not supported.""" class InvalidExportPath(Exception): """Raised if the export path is not valid.""" class NonExistingVersion(Exception): """Raised if request a Version that is not known by the Version Manager.""" def __init__(self, version_number: str): self.message = f"Version '{version_number}' does not exist." class VersionIsNotProductionVersion(Exception): """Raised if the version is not a production version.""" class ConflictedConfigurationError(Exception): """Conflicts have been detected between the current and previous Configurations.""" class InvalidEventAttributeName(Exception): """ Raised if the attribute doesn't exist or an attribute name is provided when operation is either creation, deletion or submission """ class InvalidEventOperation(Exception): """Raised when operation doesn't belong to the entity""" class FileCannotBeRead(Exception): """Raised when a file cannot be read.""" class _SuspiciousFileOperation(Exception): pass
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. from __future__ import annotations import abc from typing import Any, Callable, List, Optional, Set, Union import networkx as nx from ..common._listattributes import _ListAttributes from ..common._utils import _Subscriber from ..data.data_node import DataNode from ..job.job import Job from ..task.task import Task from ._dag import _DAG class Submittable: """Instance of an entity that can be submitted for execution. A submittable holds functions that can be used to build the execution directed acyclic graph. Attributes: subscribers (List[Callable]): The list of callbacks to be called on `Job^`'s status change. """ def __init__(self, subscribers: Optional[List[_Subscriber]] = None): self._subscribers = _ListAttributes(self, subscribers or list()) @abc.abstractmethod def submit( self, callbacks: Optional[List[Callable]] = None, force: bool = False, wait: bool = False, timeout: Optional[Union[float, int]] = None, ): raise NotImplementedError def get_inputs(self) -> Set[DataNode]: """Return the set of input data nodes of the submittable entity. Returns: The set of input data nodes. """ dag = self._build_dag() return self.__get_inputs(dag) def __get_inputs(self, dag: nx.DiGraph) -> Set[DataNode]: return {node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_outputs(self) -> Set[DataNode]: """Return the set of output data nodes of the submittable entity. Returns: The set of output data nodes. """ dag = self._build_dag() return self.__get_outputs(dag) def __get_outputs(self, dag: nx.DiGraph) -> set[DataNode]: return {node for node, degree in dict(dag.out_degree).items() if degree == 0 and isinstance(node, DataNode)} def get_intermediate(self) -> Set[DataNode]: """Return the set of intermediate data nodes of the submittable entity. Returns: The set of intermediate data nodes. """ dag = self._build_dag() all_data_nodes_in_dag = {node for node in dag.nodes if isinstance(node, DataNode)} return all_data_nodes_in_dag - self.__get_inputs(dag) - self.__get_outputs(dag) def is_ready_to_run(self) -> bool: """Indicate if the entity is ready to be run. Returns: True if the given entity is ready to be run. False otherwise. """ return all(dn.is_ready_for_reading for dn in self.get_inputs()) def data_nodes_being_edited(self) -> Set[DataNode]: """Return the set of data nodes of the submittable entity that are being edited. Returns: The set of data nodes that are being edited. """ dag = self._build_dag() return {node for node in dag.nodes if isinstance(node, DataNode) and node.edit_in_progress} @abc.abstractmethod def subscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def unsubscribe(self, callback: Callable[[Submittable, Job], None], params: Optional[List[Any]] = None): raise NotImplementedError @abc.abstractmethod def _get_set_of_tasks(self) -> Set[Task]: raise NotImplementedError def _get_dag(self) -> _DAG: return _DAG(self._build_dag()) def _build_dag(self) -> nx.DiGraph: graph = nx.DiGraph() tasks = self._get_set_of_tasks() for task in tasks: if has_input := task.input: for predecessor in task.input.values(): graph.add_edges_from([(predecessor, task)]) if has_output := task.output: for successor in task.output.values(): graph.add_edges_from([(task, successor)]) if not has_input and not has_output: graph.add_node(task) return graph def _get_sorted_tasks(self) -> List[List[Task]]: dag = self._build_dag() remove = [node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)] dag.remove_nodes_from(remove) return list(nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes))) def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): params = [] if params is None else params self._subscribers.append(_Subscriber(callback=callback, params=params)) def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None): if params is not None: self._subscribers.remove(_Subscriber(callback, params)) else: elem = [x for x in self._subscribers if x.callback == callback] if not elem: raise ValueError self._subscribers.remove(elem[0])
from typing import List from .._entity._reload import _get_manager from ..notification import Notifier class _Entity: _MANAGER_NAME: str _is_in_context = False _in_context_attributes_changed_collector: List def __enter__(self): self._is_in_context = True self._in_context_attributes_changed_collector = list() return self def __exit__(self, exc_type, exc_value, exc_traceback): # If multiple entities is in context, the last to enter will be the first to exit self._is_in_context = False if hasattr(self, "_properties"): for to_delete_key in self._properties._pending_deletions: self._properties.data.pop(to_delete_key, None) self._properties.data.update(self._properties._pending_changes) _get_manager(self._MANAGER_NAME)._set(self) for event in self._in_context_attributes_changed_collector: Notifier.publish(event) _get_manager(self._MANAGER_NAME)._set(self)
from collections import UserDict from ..notification import _ENTITY_TO_EVENT_ENTITY_TYPE, EventOperation, Notifier, _make_event class _Properties(UserDict): __PROPERTIES_ATTRIBUTE_NAME = "properties" def __init__(self, entity_owner, **kwargs): super().__init__(**kwargs) self._entity_owner = entity_owner self._pending_changes = {} self._pending_deletions = set() def __setitem__(self, key, value): super(_Properties, self).__setitem__(key, value) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=value, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: if key in self._pending_deletions: self._pending_deletions.remove(key) self._pending_changes[key] = value self._entity_owner._in_context_attributes_changed_collector.append(event) def __getitem__(self, key): from taipy.config.common._template_handler import _TemplateHandler as _tpl return _tpl._replace_templates(super(_Properties, self).__getitem__(key)) def __delitem__(self, key): super(_Properties, self).__delitem__(key) from ... import core as tp if hasattr(self, "_entity_owner"): event = _make_event( self._entity_owner, EventOperation.UPDATE, attribute_name=self.__PROPERTIES_ATTRIBUTE_NAME, attribute_value=None, ) if not self._entity_owner._is_in_context: tp.set(self._entity_owner) Notifier.publish(event) else: self._pending_changes.pop(key, None) self._pending_deletions.add(key) self._entity_owner._in_context_attributes_changed_collector.append(event)
from __future__ import annotations class _EntityIds: def __init__(self): self.data_node_ids = set() self.task_ids = set() self.scenario_ids = set() self.sequence_ids = set() self.job_ids = set() self.cycle_ids = set() self.submission_ids = set() def __add__(self, other: _EntityIds): self.data_node_ids.update(other.data_node_ids) self.task_ids.update(other.task_ids) self.scenario_ids.update(other.scenario_ids) self.sequence_ids.update(other.sequence_ids) self.job_ids.update(other.job_ids) self.cycle_ids.update(other.cycle_ids) self.submission_ids.update(other.submission_ids) return self def __iadd__(self, other: _EntityIds): self.__add__(other) return self
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License.
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import math from functools import reduce from typing import Any, Dict, List, Tuple import networkx as nx class _Node: def __init__(self, entity: Any, x, y): self.type = entity.__class__.__name__ self.entity = entity self.x = x self.y = y class _Edge: def __init__(self, src: _Node, dest: _Node): self.src = src self.dest = dest class _DAG: def __init__(self, dag: nx.DiGraph): self._sorted_nodes = list(nodes for nodes in nx.topological_generations(dag)) self._length, self._width = self.__compute_size() self._grid_length, self._grid_width = self.__compute_grid_size() self._nodes = self.__compute_nodes() self._edges = self.__compute_edges(dag) @property def width(self) -> int: return self._width @property def length(self) -> int: return self._length @property def nodes(self) -> Dict[str, _Node]: return self._nodes @property def edges(self) -> List[_Edge]: return self._edges def __compute_size(self) -> Tuple[int, int]: return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes]) def __compute_grid_size(self) -> Tuple[int, int]: if self._width == 1: grd_wdt = 1 else: grd_wdt = self.__lcm(*[len(i) + 1 if len(i) != self._width else len(i) - 1 for i in self._sorted_nodes]) + 1 return len(self._sorted_nodes), grd_wdt def __compute_nodes(self) -> Dict[str, _Node]: nodes = {} x = 0 for same_lvl_nodes in self._sorted_nodes: lcl_wdt = len(same_lvl_nodes) is_max = lcl_wdt != self.width if self.width != 1: y_incr = (self._grid_width - 1) / (lcl_wdt + 1) if is_max else (self._grid_width - 1) / (lcl_wdt - 1) else: y_incr = 1 y = 0 if is_max else -y_incr for node in same_lvl_nodes: y += y_incr nodes[node.id] = _Node(node, x, y) x += 1 return nodes def __compute_edges(self, dag) -> List[_Edge]: edges = [] for edge in dag.edges(): edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id])) return edges @staticmethod def __lcm(*integers) -> int: # Function math.lcm is only implemented for Python 3.9+ # For compatibility with Python 3.8 it has been re implemented. if 0 in integers: return 0 return reduce(lambda x, y: (x * y) // math.gcd(x, y), integers)
import sys from typing import List from taipy._cli._base_cli import _CLI from taipy.logger._taipy_logger import _TaipyLogger from ._migrate import ( _migrate_fs_entities, _migrate_mongo_entities, _migrate_sql_entities, _remove_backup_file_entities, _remove_backup_mongo_entities, _remove_backup_sql_entities, _restore_migrate_file_entities, _restore_migrate_mongo_entities, _restore_migrate_sql_entities, ) class _MigrateCLI: __logger = _TaipyLogger._get_logger() @classmethod def create_parser(cls): migrate_parser = _CLI._add_subparser( "migrate", help="Migrate entities created from old taipy versions to be compatible with the current taipy version. " " The entity migration should be performed only after updating taipy code to the current version.", ) migrate_parser.add_argument( "--repository-type", required=True, nargs="+", help="The type of repository to migrate. If filesystem or sql, a path to the database folder/.sqlite file " "should be informed. In case of mongo host, port, user and password must be informed, if left empty it " "is assumed default values", ) migrate_parser.add_argument( "--skip-backup", action="store_true", help="Skip the backup of entities before migration.", ) migrate_parser.add_argument( "--restore", action="store_true", help="Restore the migration of entities from backup folder.", ) migrate_parser.add_argument( "--remove-backup", action="store_true", help="Remove the backup of entities. Only use this option if the migration was successful.", ) @classmethod def parse_arguments(cls): args = _CLI._parse() if getattr(args, "which", None) != "migrate": return repository_type = args.repository_type[0] repository_args = args.repository_type[1:] if len(args.repository_type) > 1 else [None] if args.restore: cls.__handle_restore_backup(repository_type, repository_args) if args.remove_backup: cls.__handle_remove_backup(repository_type, repository_args) do_backup = False if args.skip_backup else True cls.__migrate_entities(repository_type, repository_args, do_backup) sys.exit(0) @classmethod def __handle_remove_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _remove_backup_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _remove_backup_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": if not _remove_backup_mongo_entities(): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __handle_restore_backup(cls, repository_type: str, repository_args: List): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _restore_migrate_file_entities(path): sys.exit(1) elif repository_type == "sql": if not _restore_migrate_sql_entities(repository_args[0]): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] if not _restore_migrate_mongo_entities(*mongo_args): sys.exit(1) else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1) sys.exit(0) @classmethod def __migrate_entities(cls, repository_type: str, repository_args: List, do_backup: bool): if repository_type == "filesystem": path = repository_args[0] or ".data" if not _migrate_fs_entities(path, do_backup): sys.exit(1) elif repository_type == "sql": if not _migrate_sql_entities(repository_args[0], do_backup): sys.exit(1) elif repository_type == "mongo": mongo_args = repository_args[1:5] if repository_args[0] else [] _migrate_mongo_entities(*mongo_args, backup=do_backup) # type: ignore else: cls.__logger.error(f"Unknown repository type {repository_type}") sys.exit(1)
import functools from ..notification import EventOperation, Notifier, _make_event class _Reloader: """The _Reloader singleton class""" _instance = None _no_reload_context = False def __new__(class_, *args, **kwargs): if not isinstance(class_._instance, class_): class_._instance = object.__new__(class_, *args, **kwargs) return class_._instance def _reload(self, manager: str, obj): if self._no_reload_context: return obj entity = _get_manager(manager)._get(obj, obj) if obj._is_in_context and hasattr(entity, "_properties"): if obj._properties._pending_changes: entity._properties._pending_changes = obj._properties._pending_changes if obj._properties._pending_deletions: entity._properties._pending_deletions = obj._properties._pending_deletions entity._properties._entity_owner = obj return entity def __enter__(self): self._no_reload_context = True return self def __exit__(self, exc_type, exc_value, exc_traceback): self._no_reload_context = False def _self_reload(manager): def __reload(fct): @functools.wraps(fct) def _do_reload(self, *args, **kwargs): self = _Reloader()._reload(manager, self) return fct(self, *args, **kwargs) return _do_reload return __reload def _self_setter(manager): def __set_entity(fct): @functools.wraps(fct) def _do_set_entity(self, *args, **kwargs): fct(self, *args, **kwargs) entity_manager = _get_manager(manager) if len(args) == 1: value = args[0] else: value = args event = _make_event( self, EventOperation.UPDATE, attribute_name=fct.__name__, attribute_value=value, ) if not self._is_in_context: entity = _Reloader()._reload(manager, self) fct(entity, *args, **kwargs) entity_manager._set(entity) Notifier.publish(event) else: self._in_context_attributes_changed_collector.append(event) return _do_set_entity return __set_entity @functools.lru_cache def _get_manager(manager: str): from ..cycle._cycle_manager_factory import _CycleManagerFactory from ..data._data_manager_factory import _DataManagerFactory from ..job._job_manager_factory import _JobManagerFactory from ..scenario._scenario_manager_factory import _ScenarioManagerFactory from ..sequence._sequence_manager_factory import _SequenceManagerFactory from ..submission._submission_manager_factory import _SubmissionManagerFactory from ..task._task_manager_factory import _TaskManagerFactory return { "scenario": _ScenarioManagerFactory._build_manager(), "sequence": _SequenceManagerFactory._build_manager(), "data": _DataManagerFactory._build_manager(), "cycle": _CycleManagerFactory._build_manager(), "job": _JobManagerFactory._build_manager(), "task": _TaskManagerFactory._build_manager(), "submission": _SubmissionManagerFactory._build_manager(), }[manager]
# # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. import abc from typing import Optional class _Labeled: __LABEL_SEPARATOR = " > " @abc.abstractmethod def get_label(self) -> str: raise NotImplementedError def _get_label(self) -> str: """Returns the entity label made of the simple label prefixed by the owner label. Returns: The label of the entity as a string. """ return self._get_explicit_label() or self._generate_label() @abc.abstractmethod def get_simple_label(self) -> str: raise NotImplementedError def _get_simple_label(self) -> str: """Returns the simple label. Returns: The simple label of the entity as a string. """ return self._get_explicit_label() or self._generate_label(True) def _generate_label(self, simple=False) -> str: ls = [] if not simple: if owner_id := self._get_owner_id(): if getattr(self, "id") != owner_id: from ... import core as tp owner = tp.get(owner_id) ls.append(owner.get_label()) ls.append(self._generate_entity_label()) return self.__LABEL_SEPARATOR.join(ls) def _get_explicit_label(self) -> Optional[str]: if hasattr(self, "_properties"): return getattr(self, "_properties").get("label") return None def _get_owner_id(self) -> Optional[str]: if hasattr(self, "owner_id"): return getattr(self, "owner_id") return None def _get_name(self) -> Optional[str]: if hasattr(self, "name"): return getattr(self, "name") if hasattr(self, "_properties"): return getattr(self, "_properties").get("name") return None def _get_config_id(self) -> Optional[str]: if hasattr(self, "config_id"): return getattr(self, "config_id") return None def _generate_entity_label(self) -> str: if name := self._get_name(): return name if config_id := self._get_config_id(): return config_id return getattr(self, "id")
import os import shutil from functools import lru_cache from typing import Dict import bson import pymongo from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() OLD_COLLECTIONS = [ "cycle", "scenario", "pipeline", "task", "data_node", "job", "version", ] NEW_COLLECTIONS = [ "cycle", "scenario", "task", "data_node", "job", "version", ] DATABASE_NAME = "taipy" MONGO_BACKUP_FOLDER = ".mongo_backup" @lru_cache def _connect_mongodb(db_host: str, db_port: int, db_username: str, db_password: str) -> pymongo.MongoClient: auth_str = "" if db_username and db_password: auth_str = f"{db_username}:{db_password}@" connection_string = f"mongodb://{auth_str}{db_host}:{db_port}" return pymongo.MongoClient(connection_string) def __load_all_entities_from_mongo( hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) entities = {} for collection in OLD_COLLECTIONS: db = client[DATABASE_NAME] cursor = db[collection].find({}) for document in cursor: entities[document["id"]] = {"data": document} return entities def __write_entities_to_mongo( _entities: Dict, hostname: str, port: int, user: str, password: str, ): client = _connect_mongodb(hostname, port, user, password) for collection in NEW_COLLECTIONS: db = client[DATABASE_NAME] db[collection].insert_many( [entity["data"] for entity in _entities.values() if collection in entity["data"]["id"]] ) def _backup_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.exists(MONGO_BACKUP_FOLDER): os.makedirs(MONGO_BACKUP_FOLDER, exist_ok=True) for collection in OLD_COLLECTIONS: with open(os.path.join(MONGO_BACKUP_FOLDER, f"{collection}.bson"), "wb+") as f: for doc in db[collection].find(): f.write(bson.BSON.encode(doc)) __logger.info(f"Backed up entities to folder '{MONGO_BACKUP_FOLDER}' before migration.") return True def _restore_migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", ) -> bool: client = _connect_mongodb(hostname, port, user, password) db = client[DATABASE_NAME] if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False for collection in os.listdir(MONGO_BACKUP_FOLDER): if collection.endswith(".bson"): with open(os.path.join(MONGO_BACKUP_FOLDER, collection), "rb+") as f: if bson_data := bson.decode_all(f.read()): # type: ignore db[collection.split(".")[0]].insert_many(bson_data) shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Restored entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _remove_backup_mongo_entities() -> bool: if not os.path.isdir(MONGO_BACKUP_FOLDER): __logger.info(f"The backup folder '{MONGO_BACKUP_FOLDER}' does not exist.") return False shutil.rmtree(MONGO_BACKUP_FOLDER) __logger.info(f"Removed backup entities from the backup folder '{MONGO_BACKUP_FOLDER}'.") return True def _migrate_mongo_entities( hostname: str = "localhost", port: int = 27017, user: str = "", password: str = "", backup: bool = True, ) -> bool: """Migrate entities from mongodb to the current version. Args: hostname (str, optional): The hostname of the mongodb. Defaults to "localhost". port (int, optional): The port of the mongodb. Defaults to 27017. user (str, optional): The username of the mongodb. Defaults to "". password (str, optional): The password of the mongodb. Defaults to "". backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if backup: _backup_mongo_entities(hostname=hostname, port=port, user=user, password=password) __logger.info(f"Starting entity migration from MongoDB {hostname}:{port}") entities = __load_all_entities_from_mongo(hostname, port, user, password) entities, _ = _migrate(entities) __write_entities_to_mongo(entities, hostname, port, user, password) __logger.info("Migration finished") return True
from ._migrate_fs import _migrate_fs_entities, _remove_backup_file_entities, _restore_migrate_file_entities from ._migrate_mongo import _migrate_mongo_entities, _remove_backup_mongo_entities, _restore_migrate_mongo_entities from ._migrate_sql import _migrate_sql_entities, _remove_backup_sql_entities, _restore_migrate_sql_entities
import json import os import shutil from typing import Dict from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() def _load_all_entities_from_fs(root: str) -> Dict: # run through all files in the data folder and load them entities = {} for root, dirs, files in os.walk(root): for file in files: if file.endswith(".json"): with open(os.path.join(root, file)) as f: _id = file.split(".")[0] if "version" in root: _id = f"VERSION_{_id}" entities[_id] = { "data": json.load(f), "path": os.path.join(root, file), } return entities def __write_entities_to_fs(_entities: Dict, root: str): if not os.path.exists(root): os.makedirs(root, exist_ok=True) for _id, entity in _entities.items(): # Do not write pipeline entities if "PIPELINE" in _id: continue with open(entity["path"], "w") as f: json.dump(entity["data"], f, indent=0) # Remove pipelines folder pipelines_path = os.path.join(root, "pipelines") if os.path.exists(pipelines_path): shutil.rmtree(pipelines_path) def _restore_migrate_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False if os.path.exists(path): shutil.rmtree(path) else: __logger.warning(f"The original entities folder '{path}' does not exist.") os.rename(backup_path, path) __logger.info(f"Restored entities from the backup folder '{backup_path}' to '{path}'.") return True def _remove_backup_file_entities(path: str) -> bool: backup_path = f"{path}_backup" if not os.path.exists(backup_path): __logger.error(f"The backup folder '{backup_path}' does not exist.") return False shutil.rmtree(backup_path) __logger.info(f"Removed backup entities from the backup folder '{backup_path}'.") return True def _migrate_fs_entities(path: str, backup: bool = True) -> bool: """Migrate entities from filesystem to the current version. Args: path (str): The path to the folder containing the entities. backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if not os.path.isdir(path): __logger.error(f"Folder '{path}' does not exist.") return False if backup: backup_path = f"{path}_backup" try: shutil.copytree(path, backup_path) except FileExistsError: __logger.warning(f"The backup folder '{backup_path}' already exists. Migration canceled.") return False else: __logger.info(f"Backed up entities from '{path}' to '{backup_path}' folder before migration.") __logger.info(f"Starting entity migration from '{path}' folder.") entities = _load_all_entities_from_fs(path) entities, _ = _migrate(entities) __write_entities_to_fs(entities, path) __logger.info("Migration finished") return True
import json from typing import Dict, List, Optional, Tuple from taipy.logger._taipy_logger import _TaipyLogger __logger = _TaipyLogger._get_logger() def __update_parent_ids(entity: Dict, data: Dict) -> Dict: # parent_ids was not present in 2.0, need to be search for in tasks parent_ids = entity.get("parent_ids", []) if not parent_ids: parent_ids = __search_parent_ids(entity["id"], data) entity["parent_ids"] = parent_ids return entity def __update_config_parent_ids(id: str, entity: Dict, entity_type: str, config: Dict) -> Dict: # parent_ids was not present in 2.0, need to be search for in tasks parent_ids = entity.get("parent_ids", []) if not parent_ids: parent_ids = __search_parent_config(id, config, entity_type) entity["parent_ids"] = parent_ids return entity def __search_parent_ids(entity_id: str, data: Dict) -> List: parents = [] entity_type = entity_id.split("_", 1)[0] for _id, entity_data in data.items(): entity_data = entity_data["data"] if entity_type == "DATANODE" and "TASK" in _id: if entity_id in entity_data["input_ids"] or entity_id in entity_data["output_ids"]: parents.append(_id) if entity_type == "TASK" and "SCENARIO" in _id: if entity_id in entity_data["tasks"]: parents.append(_id) parents.sort() return parents def __search_parent_config(entity_id: str, config: Dict, entity_type: str) -> List: parents = [] possible_parents = "TASK" if entity_type == "DATA_NODE" else "SCENARIO" data = config[possible_parents] for _id, entity_data in data.items(): section_id = f"{entity_id}:SECTION" if entity_type == "DATANODE" and possible_parents == "TASK": if section_id in entity_data["input_ids"] or section_id in entity_data["output_ids"]: parents.append(section_id) if entity_type == "TASK" and possible_parents == "SCENARIO": if section_id in entity_data["tasks"]: parents.append(section_id) parents.sort() return parents def __fetch_tasks_from_pipelines(pipelines: List, data: Dict) -> List: tasks = [] for pipeline in pipelines: pipeline_data = data[pipeline]["data"] tasks.extend(pipeline_data["tasks"]) return tasks def __migrate_subscriber(fct_module, fct_name): """Rename scheduler by orchestrator on old jobs. Used to migrate from <=2.2 to >=2.3 version.""" if fct_module == "taipy.core._scheduler._scheduler": fct_module = fct_module.replace("_scheduler", "_orchestrator") fct_name = fct_name.replace("_Scheduler", "_Orchestrator") return fct_module, fct_name def __migrate_scenario(scenario: Dict, data: Dict) -> Dict: # pipelines were replaced by tasks scenario["tasks"] = __fetch_tasks_from_pipelines(scenario["pipelines"], data) # pipeline attribute not removed in 3.0 scenario["pipelines"] = None # additional_data_nodes attribute added scenario["additional_data_nodes"] = [] return scenario def __is_cacheable(task: Dict, data: Dict) -> bool: output_ids = task.get("output_ids", []) or task.get("outputs", []) # output_ids is on entity, outputs is on config for output_id in output_ids: if output_id.endswith(":SECTION"): # Get the config_id if the task is a Config output_id = output_id.split(":")[0] dn = data.get(output_id, {}) if "data" in dn: dn = dn.get("data", {}) if "cacheable" not in dn or not dn["cacheable"] or dn["cacheable"] == "False:bool": return False return True def __migrate_task(task: Dict, data: Dict, is_entity: bool = True) -> Dict: if is_entity: # parent_id has been renamed to owner_id try: task["owner_id"] = task["parent_id"] del task["parent_id"] except KeyError: pass # properties was not present in 2.0 task["properties"] = task.get("properties", {}) # skippable was not present in 2.0 task["skippable"] = task.get("skippable", False) or __is_cacheable(task, data) return task def __migrate_task_entity(task: Dict, data: Dict) -> Dict: task = __update_parent_ids(task, data) return __migrate_task(task, data) def __migrate_task_config(task: Dict, config: Dict) -> Dict: task = __migrate_task(task, config["DATA_NODE"], False) # Convert the skippable boolean to a string if needed if isinstance(task.get("skippable"), bool): task["skippable"] = str(task["skippable"]) + ":bool" return task def __update_scope(scope: str): if scope in "<Scope.SCENARIO: 2>": return "<Scope.SCENARIO: 1>" elif scope == "<Scope.CYCLE: 3>": return "<Scope.CYCLE: 2>" elif scope == "<Scope.GLOBAL: 4>": return "<Scope.GLOBAL: 3>" return scope def __migrate_datanode(datanode: Dict) -> Dict: # cacheable was removed in after 2.0 _ = datanode.pop("cacheable", False) # job_ids was replaced by edits if "job_ids" in datanode: datanode["edits"] = [{"job_id": job, "timestamp": datanode["last_edit_date"]} for job in datanode["job_ids"]] elif "edits" in datanode: # make sure timestamp inside edits is a string edits = [] for edit in datanode["edits"]: timestamp = edit.get("timestamp") if isinstance(timestamp, dict): timestamp = timestamp.get("__value__") new_edit = {"timestamp": timestamp} if "job_id" in edit: new_edit["job_id"] = edit["job_id"] edits.append(new_edit) datanode["edits"] = edits # parent_id has been renamed to owner_id try: datanode["owner_id"] = datanode["parent_id"] del datanode["parent_id"] except KeyError: pass # Update Scope enum after Pipeline removal datanode["scope"] = __update_scope(datanode["scope"]) # Update move name attribute to properties dictionary datanode["data_node_properties"]["name"] = datanode.pop("name", None) if "last_edit_date" not in datanode: datanode["last_edit_date"] = datanode.get("last_edition_date") if "last_edition_date" in datanode: del datanode["last_edition_date"] if "edit_in_progress" not in datanode: datanode["edit_in_progress"] = datanode.get("edition_in_progress") if "edition_in_progress" in datanode: del datanode["edition_in_progress"] return datanode def __migrate_datanode_entity(datanode: Dict, data: Dict) -> Dict: datanode = __update_parent_ids(datanode, data) return __migrate_datanode(datanode) def __migrate_datanode_config(datanode: Dict) -> Dict: if datanode["storage_type"] in ["csv", "json"]: datanode["encoding"] = "utf-8" return datanode def __migrate_job(job: Dict) -> Dict: # submit_entity_id was not present before 3.0 job["submit_entity_id"] = job.get("submit_entity_id", None) if "subscribers" in job: for sub in job["subscribers"]: sub["fct_module"], sub["fct_name"] = __migrate_subscriber(sub["fct_module"], sub["fct_name"]) return job def __migrate_global_config(config: Dict): fields_to_remove = ["clean_entities_enabled"] fields_to_move = ["root_folder", "storage_folder", "repository_type", "read_entity_retry"] for field in fields_to_remove: if field in config["TAIPY"]: del config["TAIPY"][field] try: for field in fields_to_move: if field not in config["CORE"]: config["CORE"][field] = config["TAIPY"][field] del config["TAIPY"][field] except KeyError: pass return config def __migrate_version(version: Dict) -> Dict: config_str = version["config"] # Remove PIPELINE scope config_str = config_str.replace("PIPELINE:SCOPE", "SCENARIO:SCOPE") config = json.loads(config_str) # remove unused fields and move others from TAIPY to CORE section config = __migrate_global_config(config) # replace pipelines for tasks pipelines_section = config["PIPELINE"] for id, content in config["SCENARIO"].items(): tasks = [] for _pipeline in content["pipelines"]: pipeline_id = _pipeline.split(":")[0] tasks = pipelines_section[pipeline_id]["tasks"] config["SCENARIO"][id]["tasks"] = tasks del config["SCENARIO"][id]["pipelines"] for id, content in config["TASK"].items(): config["TASK"][id] = __migrate_task_config(content, config) for id, content in config["DATA_NODE"].items(): config["DATA_NODE"][id] = __migrate_datanode_config(content) del config["PIPELINE"] version["config"] = json.dumps(config, ensure_ascii=False, indent=0) return version def __migrate_entities(entity_type: str, data: Dict) -> Dict: migration_fct = FCT_MIGRATION_MAP.get(entity_type) _entities = {k: data[k] for k in data if entity_type in k} for k, v in _entities.items(): if entity_type in ["JOB", "VERSION"]: v["data"] = migration_fct(v["data"]) # type: ignore else: v["data"] = migration_fct(v["data"], data) # type: ignore data[k] = v return data FCT_MIGRATION_MAP = { "SCENARIO": __migrate_scenario, "TASK": __migrate_task_entity, "DATANODE": __migrate_datanode_entity, "JOB": __migrate_job, "VERSION": __migrate_version, } def _migrate(entities: Dict, versions: Optional[Dict] = None) -> Tuple[Dict, Dict]: __logger.info("Migrating SCENARIOS") entities = __migrate_entities("SCENARIO", entities) __logger.info("Migrating TASKS") entities = __migrate_entities("TASK", entities) __logger.info("Migrating DATANODES") entities = __migrate_entities("DATANODE", entities) __logger.info("Migrating JOBS") entities = __migrate_entities("JOB", entities) __logger.info("Migrating VERSION") if versions: versions = __migrate_entities("VERSION", versions) else: entities = __migrate_entities("VERSION", entities) versions = {} return entities, versions
import json import os import shutil import sqlite3 from typing import Dict, Tuple from taipy.logger._taipy_logger import _TaipyLogger from ._utils import _migrate __logger = _TaipyLogger._get_logger() def _load_all_entities_from_sql(db_file: str) -> Tuple[Dict, Dict]: conn = sqlite3.connect(db_file) query = "SELECT model_id, document FROM taipy_model" query_version = "SELECT * FROM taipy_version" cursor = conn.execute(query) entities = {} versions = {} for row in cursor: _id = row[0] document = row[1] entities[_id] = {"data": json.loads(document)} cursor = conn.execute(query_version) for row in cursor: id = row[0] config_id = row[1] creation_date = row[2] is_production = row[3] is_development = row[4] is_latest = row[5] versions[id] = { "config_id": config_id, "creation_date": creation_date, "is_production": is_production, "is_development": is_development, "is_latest": is_latest, } return entities, versions def __insert_scenario(scenario: dict, conn): query = f""" INSERT INTO scenario (id, config_id, tasks, additional_data_nodes, creation_date, primary_scenario, subscribers, tags, version, pipelines, cycle) VALUES ({scenario['id']}, {scenario['config_id']}, {scenario['tasks']}, {scenario['additional_data_nodes']}, {scenario['creation_date']}, {scenario['primary_scenario']}, {scenario['subscribers']}, {scenario['tags']}, {scenario['version']}, {scenario['pipelines']}, {scenario['cycle']}) """ conn.execute(query) conn.commit() def __insert_task(task: dict, conn): query = f""" INSERT INTO task (id, owner_id, parent_ids, config_id, input_ids, function_name, function_module, output_ids, version, skippable, properties) VALUES ({task['id']}, {task['owner_id']}, {task['parent_ids']}, {task['config_id']}, {task['input_ids']}, {task['function_name']}, {task['function_module']}, {task['output_ids']}, {task['version']}, {task['skippable']}, {task['properties']}) """ conn.execute(query) conn.commit() def __insert_datanode(datanode: dict, conn): query = f""" INSERT INTO data_node (id, config_id, scope, storage_type, name, owner_id, parent_ids, last_edit_date, edits, version, validity_days, validity_seconds, edit_in_progress, data_node_properties) VALUES ({datanode['id']}, {datanode['config_id']}, {datanode['scope']}, {datanode['storage_type']}, {datanode['name']}, {datanode['owner_id']}, {datanode['parent_ids']}, {datanode['last_edit_date']}, {datanode['edits']}, {datanode['version']}, {datanode['validity_days']}, {datanode['validity_seconds']}, {datanode['edit_in_progress']}, {datanode['data_node_properties']}) """ conn.execute(query) conn.commit() def __insert_job(job: dict, conn): query = f""" INSERT INTO job (id, task_id, status, force, submit_id, submit_entity_id, creation_date, subscribers, stacktrace, version) VALUES ({job['id']}, {job['task_id']}, {job['status']}, {job['force']}, {job['submit_id']}, {job['submit_entity_id']}, {job['creation_date']}, {job['subscribers']}, {job['stacktrace']}, {job['version']}) """ conn.execute(query) conn.commit() def __insert_cycle(cycle: dict, conn): query = f""" INSERT INTO scenario (id, name, frequency, properties, creation_date, start_date, end_date) VALUES ({cycle['id']}, {cycle['name']}, {cycle['frequency']}, {cycle['properties']}, {cycle['creation_date']}, {cycle['start_date']}, {cycle['end_date']}) """ conn.execute(query) conn.commit() def __insert_version(version: dict, conn): query = f""" INSERT INTO version (id, config_id, creation_date, is_production, is_development, is_latest) VALUES ({version['id']}, {version['config_id']}, {version['creation_date']}, {version['is_production']}, {version['is_development']}, {version['is_latest']}) """ conn.execute(query) conn.commit() def __write_entities_to_sql(_entities: Dict, _versions: Dict, db_file: str): conn = sqlite3.connect(db_file) for k, entity in _entities.items(): if "SCENARIO" in k: __insert_scenario(entity["data"], conn) elif "TASK" in k: __insert_task(entity["data"], conn) elif "DATANODE" in k: __insert_datanode(entity["data"], conn) elif "JOB" in k: __insert_job(entity["data"], conn) elif "CYCLE" in k: __insert_cycle(entity["data"], conn) for k, version in _versions.items(): __insert_version(version, conn) def _restore_migrate_sql_entities(path: str) -> bool: file_name, file_extension = path.rsplit(".", 1) backup_path = f"{file_name}_backup.{file_extension}" if not os.path.exists(backup_path): __logger.error(f"The backup database '{backup_path}' does not exist.") return False if os.path.exists(path): os.remove(path) else: __logger.warning(f"The original entities database '{path}' does not exist.") os.rename(backup_path, path) __logger.info(f"Restored entities from the backup database '{backup_path}' to '{path}'.") return True def _remove_backup_sql_entities(path: str) -> bool: file_name, file_extension = path.rsplit(".", 1) backup_path = f"{file_name}_backup.{file_extension}" if not os.path.exists(backup_path): __logger.error(f"The backup database '{backup_path}' does not exist.") return False os.remove(backup_path) __logger.info(f"Removed backup entities from the backup database '{backup_path}'.") return True def _migrate_sql_entities(path: str, backup: bool = True) -> bool: """Migrate entities from sqlite database to the current version. Args: path (str): The path to the sqlite database. backup (bool, optional): Whether to backup the entities before migrating. Defaults to True. Returns: bool: True if the migration was successful, False otherwise. """ if not path: __logger.error("Missing the required sqlite path.") return False if not os.path.exists(path): __logger.error(f"File '{path}' does not exist.") return False if backup: file_name, file_extension = path.rsplit(".", 1) shutil.copyfile(path, f"{file_name}_backup.{file_extension}") __logger.info(f"Backed up entities from '{path}' to '{file_name}_backup.{file_extension}' before migration.") __logger.info(f"Starting entity migration from sqlite database '{path}'") entities, versions = _load_all_entities_from_sql(path) entities, versions = _migrate(entities, versions) __write_entities_to_sql(entities, versions, path) __logger.info("Migration finished") return True