code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
from typing import List
import matplotlib
# Make the plotting scripts function without a
# valid DISPLAY variable -- MS 17/03/2020
matplotlib.use('Agg')
import numpy as np # noqa: E402
from matplotlib import pyplot as plt # noqa: E402
import sage_analysis.observations as obs # noqa: E402
from sage_analysis.model import Model # noqa: E402
from sage_analysis.plot_helper import PlotHelper # noqa: E402
def plot_SMF(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
plot_sub_populations: bool = False
) -> matplotlib.figure.Figure:
"""
Plots the stellar mass function for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the stellar mass
function of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
plot_sub_populations : Boolean, default False
If ``True``, plots the stellar mass function for red and blue sub-populations.
Generates
---------
The plot will be saved as "<output_path>1.StellarMassFunction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# The SMF is normalized by the simulation volume which is in Mpc/h.
normalization_factor = model._volume / pow(model.hubble_h, 3) * bin_widths
# Colour will be used for the model, linestyle for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
ls = plot_helper.linestyles[snapshot_num]
norm_SMF = model.properties[f"snapshot_{snapshot}"]["SMF"] / normalization_factor
ax.plot(
bin_middles,
norm_SMF,
color=color,
ls=ls,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - All",
)
if plot_sub_populations:
norm_red = model.properties[f"snapshot_{snapshot}"]["red_SMF"] / normalization_factor
norm_blue = model.properties[f"snapshot_{snapshot}"]["blue_SMF"] / normalization_factor
ax.plot(
bin_middles,
norm_red,
color=color,
ls=plot_helper.linestyles[model_num+1],
lw=2,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Red"
)
ax.plot(
bin_middles,
norm_blue,
color=color,
ls=plot_helper.linestyles[model_num+2],
lw=2,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Blue"
)
# For scaling the observational data, we use the values of the zeroth
# model.
zeroth_hubble_h = models[0].hubble_h
zeroth_IMF = models[0].IMF
ax = obs.plot_smf_data(ax, zeroth_hubble_h, zeroth_IMF)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$\phi\ (\mathrm{Mpc}^{-3}\ \mathrm{dex}^{-1})$")
ax.set_yscale("log", nonpositive="clip")
ax.set_xlim([8.0, 12.0])
ax.set_ylim([1.0e-6, 1.0e-1])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.1))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}1.StellarMassFunction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_BMF(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the baryonic mass function for the specified models. This is the mass
function for the stellar mass + cold gas.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the baryonic mass
function of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>2.BaryonicMassFunction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# The MF is normalized by the simulation volume which is in Mpc/h.
normalization_factor = model._volume / pow(model.hubble_h, 3) * bin_widths
# Colour will be used for the snapshot, linestyle for the model.
ls = plot_helper.linestyles[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
color = plot_helper.colors[snapshot_num]
ax.plot(
bin_middles,
model.properties[f"snapshot_{snapshot}"]["BMF"] / normalization_factor,
color=color,
ls=ls,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - All",
)
# For scaling the observational data, we use the values of the zeroth
# model.
zeroth_hubble_h = models[0].hubble_h
zeroth_IMF = models[0].IMF
ax = obs.plot_bmf_data(ax, zeroth_hubble_h, zeroth_IMF)
ax.set_xlabel(r"$\log_{10}\ M_{\mathrm{bar}}\ (M_{\odot})$")
ax.set_ylabel(r"$\phi\ (\mathrm{Mpc}^{-3}\ \mathrm{dex}^{-1})$")
ax.set_yscale("log", nonpositive="clip")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([1.0e-6, 1.0e-1])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.1))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}2.BaryonicMassFunction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_GMF(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the gas mass function for the specified models. This is the mass function for the cold gas.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the gas mass
function of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>3.GasMassFunction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# The GMF is normalized by the simulation volume which is in Mpc/h.
normalization_factor = model._volume / pow(model.hubble_h, 3) * bin_widths
# Colour will be used for the snapshot, linestyle for the model.
ls = plot_helper.linestyles[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
color = plot_helper.colors[snapshot_num]
ax.plot(
bin_middles,
model.properties[f"snapshot_{snapshot}"]["GMF"] / normalization_factor,
color=color,
ls=ls,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Cold Gas",
)
# For scaling the observational data, we use the values of the zeroth
# model.
zeroth_hubble_h = models[0].hubble_h
obs.plot_gmf_data(ax, zeroth_hubble_h)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{X}}\ (M_{\odot})$")
ax.set_ylabel(r"$\phi\ (\mathrm{Mpc}^{-3}\ \mathrm{dex}^{-1})$")
ax.set_yscale("log", nonpositive="clip")
# Find the models that have the smallest/largest stellar mass bin.
ax.set_xlim([7.8, 12.2])
ax.set_ylim([1.0e-6, 1.0e-1])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.1))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}3.GasMassFunction.{plot_helper.output_format}"
fig.savefig(output_file) # Save the figure
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_BTF(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the baryonic Tully-Fisher relationship for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the baryonic
Tully-Fisher relationship of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>4.BaryonicTullyFisher.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["BTF_vel"],
model.properties[f"snapshot_{snapshot}"]["BTF_mass"],
marker=marker,
s=5,
color=color,
alpha=0.8,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Sb/c galaxies",
)
ax.set_xlim([1.4, 2.6])
ax.set_ylim([8.0, 12.0])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.05))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.set_xlabel(r"$\log_{10}V_{max}\ (km/s)$")
ax.set_ylabel(r"$\log_{10}\ M_{\mathrm{bar}}\ (M_{\odot})$")
ax = obs.plot_btf_data(ax)
plot_helper.adjust_legend(ax, location="upper left", scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}4.BaryonicTullyFisher.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_sSFR(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the specific star formation rate as a function of stellar mass for the specified
models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the specific star
formation rate of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>5.SpecificStarFormationRate.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["sSFR_mass"],
model.properties[f"snapshot_{snapshot}"]["sSFR_sSFR"],
marker=marker,
s=5,
color=color,
alpha=0.8,
label=f"{label} - z = {model._redshifts[snapshot]:.2f}",
)
# Overplot a dividing line between passive and SF galaxies.
w = np.arange(7.0, 13.0, 1.0)
min_sSFRcut = np.min([model._sSFRcut for model in models])
ax.plot(w, w/w*min_sSFRcut, "b:", lw=2.0)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$\log_{10}\ s\mathrm{SFR}\ (\mathrm{yr^{-1}})$")
ax.set_xlim([8.0, 12.0])
ax.set_ylim([-16.0, -8.0])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.05))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
plot_helper.adjust_legend(ax, scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}5.SpecificStarFormationRate.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_gas_fraction(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the fraction of baryons that are in the cold gas reservoir as a function of
stellar mass for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the gas fraction
of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>6.GasFraction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["gas_frac_mass"],
model.properties[f"snapshot_{snapshot}"]["gas_frac"],
marker=marker,
s=20,
color=color,
alpha=0.5,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Sb/c galaxies",
)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$\mathrm{Cold\ Mass\ /\ (Cold+Stellar\ Mass)}$")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([0.0, 1.0])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.05))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
plot_helper.adjust_legend(ax, scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}6.GasFraction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_metallicity(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the metallicity as a function of stellar mass for the speicifed models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the metallicity
of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>7.Metallicity.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["metallicity_mass"],
model.properties[f"snapshot_{snapshot}"]["metallicity"],
marker=marker,
s=5,
color=color,
alpha=0.8,
label=f"{label} - z = {model._redshifts[snapshot]:.2f}",
)
# Use the IMF of the zeroth model to scale the observational results.
zeroth_IMF = models[0].IMF
ax = obs.plot_metallicity_data(ax, zeroth_IMF)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$12\ +\ \log_{10}[\mathrm{O/H}]$")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([8.0, 9.5])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.05))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
# Since we're doing a scatter plot, we need to resize the legend points.
plot_helper.adjust_legend(ax, location="upper right", scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}7.Metallicity.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_bh_bulge(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the black-hole bulge relationship for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the black hole
bulge relationship of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>8.BlackHoleBulgeRelationship.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["bulge_mass"],
model.properties[f"snapshot_{snapshot}"]["bh_mass"],
marker=marker,
s=5,
color=color,
alpha=0.8,
label=f"{label} - z = {model._redshifts[snapshot]:.2f}",
)
ax = obs.plot_bh_bulge_data(ax)
ax.set_xlabel(r"$\log\ M_{\mathrm{bulge}}\ (M_{\odot})$")
ax.set_ylabel(r"$\log\ M_{\mathrm{BH}}\ (M_{\odot})$")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([6.0, 10.0])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.05))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
plot_helper.adjust_legend(ax, location="upper right", scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}8.BlackHoleBulgeRelationship.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_quiescent(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
plot_sub_populations: bool = False
) -> matplotlib.figure.Figure:
"""
Plots the fraction of galaxies that are quiescent as a function of stellar mass for the
specified models. The quiescent cut is defined by :py:attr:`~sage_analysis.model.Model.sSFRcut`.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the quiescent
fraction of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
plot_sub_populations : Boolean, default False
If ``True``, plots the centrals and satellite sub-populations.
Generates
---------
The plot will be saved as "<output_path>9.QuiescentFraction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# Colour will be used for the snapshot, linestyle for the model.
ls = plot_helper.linestyles[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
color = plot_helper.colors[snapshot_num]
non_zero_SMF = np.where(model.properties[f"snapshot_{snapshot}"]["SMF"] > 0)
quiescent_fraction = model.properties[f"snapshot_{snapshot}"]["quiescent_galaxy_counts"][non_zero_SMF] / \
model.properties[f"snapshot_{snapshot}"]["SMF"][non_zero_SMF]
ax.plot(
bin_middles[non_zero_SMF],
quiescent_fraction,
color=color,
ls=ls,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - All",
)
# Be careful to not overcrowd the plot.
if plot_sub_populations:
non_zero_MF = np.where(model.properties[f"snapshot_{snapshot}"]["centrals_MF"])[0]
quiescent_central_fraction = \
model.properties[f"snapshot_{snapshot}"]["quiescent_centrals_counts"][non_zero_MF] / \
model.properties[f"snapshot_{snapshot}"]["centrals_MF"][non_zero_MF]
ax.plot(
bin_middles[non_zero_MF],
quiescent_central_fraction,
color=color,
linestyle="--",
)
non_zero_MF = np.where(model.properties[f"snapshot_{snapshot}"]["satellites_MF"])[0]
quiescent_sat_fraction = \
model.properties[f"snapshot_{snapshot}"]["quiescent_satellites_counts"][non_zero_MF] / \
model.properties[f"snapshot_{snapshot}"]["satellites_MF"][non_zero_MF]
ax.plot(
bin_middles[non_zero_MF],
quiescent_sat_fraction,
color=color,
linestyle="-.",
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Satellites",
)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stellar}}\ (M_{\odot})$")
ax.set_ylabel(r"$\mathrm{Quescient\ Fraction}$")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([0.0, 1.05])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.10))
plot_helper.adjust_legend(ax, location="upper left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}9.QuiescentFraction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_bulge_fraction(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
plot_disk_fraction: bool = False,
plot_var: bool = False
) -> matplotlib.figure.Figure:
"""
Plots the fraction of the stellar mass that is located in the bulge/disk as a function
of stellar mass for the specified models.
Parameters
----------
models : List of ``Model`` class instance
Models that will be plotted. These instances contain the properties necessary to
create the plot, accessed via ``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the bulge fraction
of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
plot_disk_fraction : bool, optional
If specified, will also plot the disk fraction.
plot_var : Boolean, default False
If ``True``, plots the variance as shaded regions.
Generates
---------
The plot will be saved as :py:attr:`~sage_analysis.plot_helper.PlotHelper.output_path`10.BulgeMassFraction.
:py:attr:`~sage_analysis.plot_helper.PlotHelper.output_format`.
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# Colour will be used for the snapshot, linestyle for the model.
ls = plot_helper.linestyles[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
color = plot_helper.colors[snapshot_num]
# Remember we need to average the properties in each bin.
non_zero_SMF = np.where(model.properties[f"snapshot_{snapshot}"]["SMF"] > 0)
bulge_mean = model.properties[f"snapshot_{snapshot}"]["fraction_bulge_sum"][non_zero_SMF] / \
model.properties[f"snapshot_{snapshot}"]["SMF"][non_zero_SMF]
disk_mean = model.properties[f"snapshot_{snapshot}"]["fraction_disk_sum"][non_zero_SMF] / \
model.properties[f"snapshot_{snapshot}"]["SMF"][non_zero_SMF]
# The variance has already been weighted when we calculated it.
bulge_var = model.properties[f"snapshot_{snapshot}"]["fraction_bulge_var"][non_zero_SMF]
disk_var = model.properties[f"snapshot_{snapshot}"]["fraction_disk_var"][non_zero_SMF]
ax.plot(
bin_middles[non_zero_SMF],
bulge_mean,
color=color,
ls=ls,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Bulge",
)
if plot_disk_fraction:
ax.plot(
bin_middles,
disk_mean,
color=color,
ls=plot_helper.linestyles[model_num+1],
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Disk",
)
if plot_var:
ax.fill_between(
bin_middles,
bulge_mean+bulge_var,
bulge_mean-bulge_var,
facecolor=color,
alpha=0.25
)
if plot_disk_fraction:
ax.fill_between(
bin_middles,
disk_mean+disk_var,
disk_mean-disk_var,
facecolor=color,
alpha=0.25
)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$\mathrm{Stellar\ Mass\ Fraction}$")
ax.set_xlim([7.8, 12.2])
ax.set_ylim([0.0, 1.05])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.10))
plot_helper.adjust_legend(ax, location="upper left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}10.BulgeMassFraction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_baryon_fraction(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
plot_sub_populations: bool = False
) -> matplotlib.figure.Figure:
"""
Plots the total baryon fraction as afunction of halo mass for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the baryon fraction
of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
plot_sub_populations : Boolean, default False
If ``True``, plots the baryon fraction for each reservoir. Otherwise, only plots
the total baryon fraction.
Generates
---------
The plot will be saved as "<output_path>11.BaryonFraction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# Colour will be used for the snapshot, linestyle for the model.
ls = plot_helper.linestyles[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
color = plot_helper.colors[snapshot_num]
# Remember we need to average the properties in each bin.
non_zero_HMF = np.where(model.properties[f"snapshot_{snapshot}"]["fof_HMF"] > 0)
baryon_mean = model.properties[f"snapshot_{snapshot}"]["halo_baryon_fraction_sum"][non_zero_HMF] / \
model.properties[f"snapshot_{snapshot}"]["fof_HMF"][non_zero_HMF]
ax.plot(
bin_middles[non_zero_HMF],
baryon_mean,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - Total",
color=color,
ls=ls,
)
if plot_sub_populations:
attrs = ["stars", "cold", "hot", "ejected", "ICS"]
res_labels = ["Stars", "Cold", "Hot", "Ejected", "ICS"]
res_colors = ["k", "b", "r", "g", "y"]
for (attr, res_label, res_color) in zip(attrs, res_labels, res_colors):
dict_key = "halo_{0}_fraction_sum".format(attr)
mean = model.properties[f"snapshot_{snapshot}"][dict_key][non_zero_HMF] / \
model.properties[f"snapshot_{snapshot}"]["fof_HMF"][non_zero_HMF]
ax.plot(
bin_middles[non_zero_HMF],
mean,
label=f"{label} - z = {model._redshifts[snapshot]:.2f} - {res_label}",
color=res_color,
ls=ls,
)
ax.set_xlabel(r"$\mathrm{Central}\ \log_{10} M_{\mathrm{vir}}\ (M_{\odot})$")
ax.set_ylabel(r"$\mathrm{Baryon\ Fraction}$")
ax.set_xlim([9.8, 14.2])
ax.set_ylim([0.0, 0.23])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.05))
plot_helper.adjust_legend(ax, location="upper left", scatter_plot=0)
output_file = f"{plot_helper.output_path}11.BaryonFraction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_reservoirs(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> List[matplotlib.figure.Figure]:
"""
Plots the mass in each reservoir as a function of halo mass for the
specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, each snapshot will
be plotted and saved as a separate figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
A plot will be saved as ``"<output_path>12.MassReservoirs<model.label>.<output_format>"`` for each mode.
"""
# This scatter plot will be messy so we're going to make one for each model.
figs = []
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
label = model.label
marker = plot_helper.markers[model_num]
# Furthermore, make one for each snapshot we requested.
for snapshot_num, snapshot in enumerate(model_snapshots):
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
attribute_names = ["stars", "cold", "hot", "ejected", "ICS"]
res_labels = ["Stars", "ColdGas", "HotGas", "EjectedGas", "IntraclusterStars"]
res_colors = ["k", "b", "r", "g", "y"]
for (attribute_name, res_label, res_color) in zip(attribute_names, res_labels, res_colors):
dict_key = f"reservoir_{attribute_name}"
ax.scatter(
model.properties[f"snapshot_{snapshot}"]["reservoir_mvir"],
model.properties[f"snapshot_{snapshot}"][dict_key],
marker=marker,
s=0.3,
label=res_label,
color=res_color,
)
ax.set_xlabel(r"$\log\ M_{\mathrm{vir}}\ (M_{\odot})$")
ax.set_ylabel(r"$\mathrm{Reservoir\ Mass\ (M_{\odot})}$")
ax.set_xlim([9.8, 14.2])
ax.set_ylim([7.5, 12.5])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.25))
plot_helper.adjust_legend(ax, location="upper left", scatter_plot=1)
fig.tight_layout()
output_file = f"{plot_helper.output_path}12.MassReservoirs_{label}_snapshot{snapshot}.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
figs.append(fig)
return figs
def plot_spatial(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the spatial distribution of the galaxies for specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
The snapshots to be plotted for each :py:class:`~sage_analysis.model.Model` in ``models``.
The length of the outer list **MUST** be equal to the length of ``models``. For each model, the spatial
position of all snapshots are plotted on the figure.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
A plot will be saved as ``"<output_path>13.SpatialDistribution<model.label>.<output_format>"`` for each
model.
"""
fig = plt.figure(figsize=plot_helper.figsize)
# 4-panel plot.
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223)
ax4 = fig.add_subplot(224)
# Go through each of the models and plot.
for model_num, (model, model_snapshots) in enumerate(zip(models, snapshots)):
# Colour will be used for the model, marker style for the snapshot.
color = plot_helper.colors[model_num]
label = model.label
for snapshot_num, snapshot in enumerate(model_snapshots):
marker = plot_helper.markers[snapshot_num]
ax1.scatter(
model.properties[f"snapshot_{snapshot}"]["x_pos"],
model.properties[f"snapshot_{snapshot}"]["y_pos"],
marker=marker,
s=0.3,
color=color,
alpha=0.5
)
ax2.scatter(
model.properties[f"snapshot_{snapshot}"]["x_pos"],
model.properties[f"snapshot_{snapshot}"]["z_pos"],
marker=marker,
s=0.3,
color=color,
alpha=0.5
)
ax3.scatter(
model.properties[f"snapshot_{snapshot}"]["y_pos"],
model.properties[f"snapshot_{snapshot}"]["z_pos"],
marker=marker,
s=0.3,
color=color,
alpha=0.5
)
# The bottom right panel will only contain the legend.
# For some odd reason, plotting `np.nan` causes some legend entries to not
# appear. Plot junk and we'll adjust the axis to not show it.
ax4.scatter(
-999,
-999,
marker=marker,
color=color,
label=f"{label} - z = {model._redshifts[snapshot]:.2f}",
)
ax4.axis("off")
ax1.set_xlabel(r"$\mathrm{x}\ [\mathrm{Mpc}/h]$")
ax1.set_ylabel(r"$\mathrm{y}\ [\mathrm{Mpc}/h]$")
ax2.set_xlabel(r"$\mathrm{x}\ [\mathrm{Mpc}/h]$")
ax2.set_ylabel(r"$\mathrm{z}\ [\mathrm{Mpc}/h]$")
ax3.set_xlabel(r"$\mathrm{y}\ [\mathrm{Mpc}/h]$")
ax3.set_ylabel(r"$\mathrm{z}\ [\mathrm{Mpc}/h]$")
# Find the model with the largest box.
max_box = np.max([model.box_size for model in models])
buffer = max_box*0.05
for ax in [ax1, ax2, ax3, ax4]:
ax.set_xlim([0.0-buffer, max_box+buffer])
ax.set_ylim([0.0-buffer, max_box+buffer])
ax.xaxis.set_minor_locator(plt.MultipleLocator(5))
ax.yaxis.set_minor_locator(plt.MultipleLocator(5))
plot_helper.adjust_legend(ax4, location="upper left", scatter_plot=1)
# Make sure everything remains nicely layed out.
fig.tight_layout()
output_file = f"{plot_helper.output_path}13.SpatialDistribution.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_spatial_3d(pos, output_file, box_size) -> matplotlib.figure.Figure:
"""
Plots the 3D spatial distribution of galaxies.
Parameters
==========
pos : ``numpy`` 3D array with length equal to the number of galaxies
The position (in Mpc/h) of the galaxies.
output_file : String
Name of the file the plot will be saved as.
Returns
=======
None. A plot will be saved as ``output_file``.
"""
from mpl_toolkits.mplot3d import Axes3D # noqa: F401
from random import sample
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
# Generate a subsample if necessary.
num_gals = len(pos)
sample_size = 10000
if num_gals > sample_size:
w = sample(list(np.arange(num_gals)), sample_size)
else:
w = np.arange(num_gals)
ax.scatter(pos[w, 0], pos[w, 1], pos[w, 2], alpha=0.5)
ax.set_xlim([0.0, box_size])
ax.set_ylim([0.0, box_size])
ax.set_zlim([0.0, box_size])
ax.set_xlabel(r"$\mathbf{x \: [h^{-1}Mpc]}$")
ax.set_ylabel(r"$\mathbf{y \: [h^{-1}Mpc]}$")
ax.set_zlabel(r"$\mathbf{z \: [h^{-1}Mpc]}$")
fig.tight_layout()
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_SMF_history(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the evolution of the stellar mass function for the specified models.
This function loops over the value of ``model.SMF_snaps`` and plots and the SMFs at
each snapshots.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
This is a dummy variable that is present to ensure the signature is identical to the other plot functions.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>A.StellarMassFunction.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
# Go through each of the models and plot.
for (model_num, model) in enumerate(models):
ls = plot_helper.linestyles[model_num]
# Set the x-axis values to be the centre of the bins.
bin_widths = model.bins["stellar_mass_bins"][1::] - model.bins["stellar_mass_bins"][0:-1]
bin_middles = model.bins["stellar_mass_bins"][:-1] + bin_widths
# Iterate over the snapshots.
for snap in model._history_SMF_history_snapshots:
# Maybe there weren't any galaxies present for this snapshot.
if np.isclose(np.sum(model.properties[f"snapshot_{snap}"]["SMF_history"]), 0.0):
continue
label = f"{model.label} z = {model.redshifts[snap]:.3f}"
# The SMF is normalized by the simulation volume which is in Mpc/h.
ax.plot(
bin_middles,
model.properties[f"snapshot_{snap}"]["SMF_history"] / model._volume*pow(model.hubble_h, 3)/bin_widths,
ls=ls,
label=label
)
# For scaling the observational data, we use the values of the zeroth model.
zeroth_IMF = models[0].IMF
ax = obs.plot_temporal_smf_data(ax, zeroth_IMF)
ax.set_xlabel(r"$\log_{10} M_{\mathrm{stars}}\ (M_{\odot})$")
ax.set_ylabel(r"$\phi\ (\mathrm{Mpc}^{-3}\ \mathrm{dex}^{-1})$")
ax.set_yscale("log", nonpositive="clip")
ax.set_xlim([8.0, 12.0])
ax.set_ylim([1.0e-6, 1.0e-1])
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.1))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}A.StellarMassFunction.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_SFRD_history(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the evolution of star formation rate density for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
This is a dummy variable that is present to ensure the signature is identical to the other plot functions.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>B.SFRDensity.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
for (model_num, model) in enumerate(models):
label = model.label
color = plot_helper.colors[model_num]
linestyle = plot_helper.linestyles[model_num]
marker = plot_helper.markers[model_num]
SFRD = np.array(
[model.properties[f"snapshot_{snap}"]["SFRD_history"] for snap in model._history_SFRD_history_snapshots]
)
redshifts = model._history_SFRD_history_redshifts
# All snapshots are initialized with zero values for "SFRD_history". We only want to plot those non-zero
# values.
non_zero_inds = np.where(SFRD > 0.0)[0]
# Only use a line if we have enough snapshots to plot.
if len(non_zero_inds) > 20:
ax.plot(
redshifts[non_zero_inds],
np.log10(SFRD[non_zero_inds] / model._volume*pow(model.hubble_h, 3)),
label=label,
color=color,
ls=linestyle
)
else:
ax.scatter(
redshifts[non_zero_inds],
np.log10(SFRD[non_zero_inds] / model._volume*pow(model.hubble_h, 3)),
label=label,
color=color,
marker=marker,
)
ax = obs.plot_sfrd_data(ax)
ax.set_xlabel(r"$\mathrm{redshift}$")
ax.set_ylabel(r"$\log_{10} \mathrm{SFR\ density}\ (M_{\odot}\ \mathrm{yr}^{-1}\ \mathrm{Mpc}^{-3})$")
ax.set_xlim([0.0, 8.0])
ax.set_ylim([-3.0, -0.4])
ax.xaxis.set_minor_locator(plt.MultipleLocator(1))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.5))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}B.SFRDensity.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig
def plot_SMD_history(
models: List[Model],
snapshots: List[List[int]],
plot_helper: PlotHelper,
) -> matplotlib.figure.Figure:
"""
Plots the evolution of stellar mass density for the specified models.
Parameters
----------
models : List of :py:class:`~sage_analysis.model.Model` class instance
Models that will be plotted. These instances contain the properties necessary to create the plot, accessed via
``Model.properties["snapshot_<snapshot>"]["property_name"]``.
snapshots : nested list of ints
This is a dummy variable that is present to ensure the signature is identical to the other plot functions.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
Generates
---------
The plot will be saved as "<output_path>C.StellarMassDensity.<output_format>"
"""
fig = plt.figure(figsize=plot_helper.figsize)
ax = fig.add_subplot(111)
for (model_num, model) in enumerate(models):
label = model.label
color = plot_helper.colors[model_num]
linestyle = plot_helper.linestyles[model_num]
marker = plot_helper.markers[model_num]
SMD = np.array(
[model.properties[f"snapshot_{snap}"]["SMD_history"] for snap in model._history_SMD_history_snapshots]
)
redshifts = model._history_SMD_history_redshifts
# All snapshots are initialized with zero values for "SMD_history". We only want to plot those non-zero
# values.
non_zero_inds = np.where(SMD > 0.0)[0]
# Only use a line if we have enough snapshots to plot.
if len(non_zero_inds) > 20:
ax.plot(
redshifts[non_zero_inds],
np.log10(SMD[non_zero_inds] / model._volume*pow(model.hubble_h, 3)),
label=label,
color=color,
ls=linestyle
)
else:
ax.scatter(
redshifts[non_zero_inds],
np.log10(SMD[non_zero_inds] / model._volume*pow(model.hubble_h, 3)),
label=label,
color=color,
marker=marker,
)
# For scaling the observational data, we use the values of the zeroth
# model.
zeroth_IMF = models[0].IMF
ax = obs.plot_smd_data(ax, zeroth_IMF)
ax.set_xlabel(r"$\mathrm{redshift}$")
ax.set_ylabel(r'$\log_{10}\ \phi\ (M_{\odot}\ \mathrm{Mpc}^{-3})$')
ax.set_xlim([0.0, 4.2])
ax.set_ylim([6.5, 9.0])
ax.xaxis.set_minor_locator(plt.MultipleLocator(1))
ax.yaxis.set_minor_locator(plt.MultipleLocator(0.5))
plot_helper.adjust_legend(ax, location="lower left", scatter_plot=0)
fig.tight_layout()
output_file = f"{plot_helper.output_path}C.StellarMassDensity.{plot_helper.output_format}"
fig.savefig(output_file)
print(f"Saved file to {output_file}")
plt.close()
return fig | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/example_plots.py | 0.947527 | 0.563108 | example_plots.py | pypi |
import logging
import os
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import matplotlib
matplotlib.use('Agg')
import numpy as np
import sage_analysis.example_calcs
import sage_analysis.example_plots # noqa: F401
from sage_analysis.default_analysis_arguments import (
default_calculation_functions,
default_plot_functions,
default_galaxy_properties_to_analyze,
default_plot_toggles
)
from sage_analysis.model import Model
from sage_analysis.plot_helper import PlotHelper
from sage_analysis.sage_binary import SageBinaryData
from sage_analysis.utils import generate_func_dict, read_generic_sage_params, find_closest_indices
try:
from sage_analysis.sage_hdf5 import SageHdf5Data
except ImportError:
print("h5py not found. If you're reading in HDF5 output from SAGE, please install this package.")
logger = logging.getLogger(__name__)
class GalaxyAnalysis:
"""
Handles the ingestion, analysis, and plotting of **SAGE** galaxy outputs.
"""
def __init__(
self,
sage_parameter_fnames: List[str],
plot_toggles: Optional[Dict[str, bool]] = None,
sage_output_formats: Optional[List[str]] = None,
labels: Optional[List[str]] = None,
first_files_to_analyze: Optional[List[int]] = None,
last_files_to_analyze: Optional[List[int]] = None,
num_sage_output_files: Optional[List[int]] = None,
output_format_data_classes_dict: Optional[Dict[str, Any]] = None,
random_seeds: Optional[List[int]] = None,
history_redshifts: Optional[Dict[str, Union[List[float], str]]] = None,
calculation_functions: Optional[Dict[str, Tuple[Callable, Dict[str, Any]]]] = None,
plot_functions: Optional[Dict[str, Tuple[Callable, Dict[str, Any]]]] = None,
galaxy_properties_to_analyze: Optional[Dict[str, Dict[str, Union[str, List[str]]]]] = None,
plots_that_need_smf: Optional[List[str]] = None,
IMFs: Optional[List[str]] = None,
):
"""
Parameters
----------
sage_parameter_fnames : list of strings
The name of the **SAGE** parameter files that are to be analyzed. These are the ``.ini`` files used to
generate the galaxy files. The length of this variable is equal to the number of models to be analyzed.
plot_toggles : dict [str, bool], optional
Specifies which properties should be analyzed and plotted.
If not specified, uses
.. highlight:: python
.. code-block:: python
default_plot_toggles = {
"SMF" : True,
"BMF" : True,
"GMF" : True,
"BTF" : True,
"sSFR" : True,
"gas_fraction" : True,
"metallicity" : True,
"bh_bulge" : True,
"quiescent" : True,
"bulge_fraction" : True,
"baryon_fraction" : True,
"reservoirs" : True,
"spatial" : True,
"SMF_history": False,
"SFRD_history": False,
"SMD_history": False,
}
sage_output_formats : list of strings, optional
The output formats of each **SAGE** model being analyzed. Each value here **MUST** have a corresponding
entry in ``output_format_data_classes_dict``. The length of this variable is equal to the number of models
to be analyzed.
If not specified, will use the ``OutputFormat`` entry from the respective **SAGE** parameter file.
labels : list of strings, optional
The labels to be used in the legend for each model. The length of this variable is equal to the number of
models to be analyzed.
If not specified, will use the ``FileNameGalaxies`` entry from the respective **SAGE** parameter file.
first_files_to_analyze, last_files_to_analyze : list of ints, optional-ish
The output **SAGE** files to be analyzed. This is an inclusive range, with the output files analyzed
ranging from ``[first_file_to_analyze, last_file_to_analyze]`` for each model. The length of this variable
is equal to the number of models to be analyzed.
If the corresponding entry in ``sage_output_format`` is ``sage_binary`` (whether passed explicitly or read
from ``sage_file``), these two variables **MUST** be specified. Otherwise, if not specified, will analyze
**ALL** output HDF5 files.
num_sage_output_files : list of ints, optional-ish
Specifies the number of output files that were generated by running **SAGE**. This will generally be equal
to the number of processors used to run **SAGE** and can be different to the range specified by
``[first_file_to_analyze, last_file_to_analyze]``.
If the corresponding entry in ``sage_output_format`` is ``sage_binary`` (whether passed explicitly or read
from ``sage_file``), this **MUST** be specified. Otherwise, this variable is **NOT** used.
output_format_data_classes_dict : dict [string, class], optional
A dictionary that maps the output format name to the corresponding data class. Each value in
``sage_output_formats`` **MUST** have an entry in this dictionary.
If not specified, will use a default value
``output_format_data_classes_dict = {"sage_binary":`` :py:class:`~sage_analysis.sage_binary.SageBinaryData`
``,
"sage_hdf5":`` :py:class:`~sage_analysis.sage_hdf5.SageHdf5Data` ``}``.
random_seeds : list of ints, optional
The values to seed the random number generator for each model. If the value is ``None``, then the
generator is seeded using the ``np.random.seed()`` method. The length of this variable is equal to the
number of models to be analyzed.
If not specified, uses ``None`` for each model (i.e., no predetermined seed).
history_redshifts : dict [string, string or list of floats], optional
Specifies which redshifts should be analyzed for properties and plots that are tracked over time. The keys
here **MUST** have the same name as in ``plot_toggles``.
If the value of the entry is ``"All"``, then all snapshots will be analyzed. Otherwise, will search for the
closest snapshots to the requested redshifts.
If not specified, uses
.. highlight:: python
.. code-block:: python
history_redshifts = {
"SMF_history": "All",
"SMD_history": "All",
"SFRD_history": "All",
}
calculation_functions : dict [string, tuple(function, dict[string, variable])], optional
A dictionary of functions that are used to compute the properties of galaxies being analyzed. Here, the
string is the name of the plot toggle (e.g., ``"SMF"``), the value is a tuple containing the function
itself (e.g., ``calc_SMF()``), and another dictionary which specifies any optional keyword arguments to
that function with keys as the name of variable (e.g., ``"calc_sub_populations"``) and values as the
variable value (e.g., ``True``).
The functions in this dictionary are called for all files analyzed and **MUST** have a signature
``func(model, gals, snapshot, optional_keyword_arguments)``. This dict can be generated using
:py:func:`~sage_analysis.utils.generate_func_dict`.
If not specified, will use the functions found in :py:mod:`~sage_analysis.example_calcs`, filtered to
ensure that only those functions necessary to plot the plots specified by ``plot_toggles`` are run.
plot_functions : dict [string, tuple(function, dict[string, variable])], optional
A dictionary of functions that are used to plot the properties of galaxies being analyzed. Here, the
string is the name of the function (e.g., ``"plot_SMF"``), the value is a tuple containing the function
itself (e.g., ``plot_SMF()``), and another dictionary which specifies any optional keyword arguments to
that function with keys as the name of variable (e.g., ``"plot_sub_populations"``) and values as the
variable value (e.g., ``True``).
The functions in this dictionary are called for all files analyzed and **MUST** have a signature
``func(models, snapshots, plot_helper, optional_keyword_arguments)``. This dict
can be generated using :py:func:`~sage_analysis.utils.generate_func_dict`.
If not specified, will use the functions found in :py:mod:`~sage_analysis.example_plots`, filtered to
ensure that only those functions necessary to plot the plots specified by ``plot_toggles`` are run.
galaxy_properties_to_analyze : dict [string, dict[str, float or str or list of strings]], optional
The galaxy properties that are used when running ``calculation_functions``. The properties initialized here
will be accessible through ``model.properties["property_name"]``.
This variable is a nested dictionary with the outer dictionary specifying the name of the bins (if the
properties will be binned), or a unique name otherwise.
The inner dictionary has a number of fields that depend upon the type of property. We support properties
being either binned against a property (e.g., the stellar or halo mass functions are binned on stellar/halo
mass), plotted as x-vs-y scatter plots (e.g., specific star formation rate vs stellar mass for 1000
galaxies), or as a single value (e.g., the stellar mass density).
For binned against a property, the key/value pairs are: ``"type": "binned"``, ``bin_low: The lower bound
of the bin (float)``, ``bin_high: The upper bound of the bin (float)``, ``bin_width: The width of the bin
(float)``, ``property_names: A list of strings denoting the properties to be initialised``. The bin values
are all initialized as 0.0.
For properties to be plotted as x-vs-y scatter plots, the key/value pairs are: ``"type": "scatter"``,
``property_names: A list of strings denoting the properties to be initialised``. All properties are
initialized as empty lists.
For properties that are single values, the key/value pairs are: ``"type": "single"``,
``property_names: A list of strings denoting the properties to be initialised``. All properties are
initialized with a value of 0.0.
If not specified, uses
.. highlight:: python
.. code-block:: python
default_galaxy_properties_to_analyze = {
"stellar_mass_bins": {
"type": "binned",
"bin_low": 8.0,
"bin_high": 12.0,
"bin_width": 0.1,
"property_names": [
"SMF", "red_SMF", "blue_SMF", "BMF", "GMF",
"centrals_MF", "satellites_MF", "quiescent_galaxy_counts",
"quiescent_centrals_counts", "quiescent_satellites_counts",
"fraction_bulge_sum", "fraction_bulge_var",
"fraction_disk_sum", "fraction_disk_var", "SMF_history",
],
},
"halo_mass_bins": {
"type": "binned",
"bin_low": 10.0,
"bin_high": 14.0,
"bin_width": 0.1,
"property_names": ["fof_HMF"] + [f"halo_{component}_fraction_sum"
for component in ["baryon", "stars", "cold", "hot", "ejected", "ICS", "bh"]
],
},
"scatter_properties": {
"type": "scatter",
"property_names": [
"BTF_mass", "BTF_vel", "sSFR_mass", "sSFR_sSFR",
"gas_frac_mass", "gas_frac", "metallicity_mass",
"metallicity", "bh_mass", "bulge_mass", "reservoir_mvir",
"reservoir_stars", "reservoir_cold", "reservoir_hot",
"reservoir_ejected", "reservoir_ICS", "x_pos",
"y_pos", "z_pos"
],
},
"single_properties": {
"type": "single",
"property_names": ["SMD_history", "SFRD_history"],
},
}
plots_that_need_smf : list of strings, optional
The plot toggles that require the stellar mass function to be properly computed and analyzed. For example,
plotting the quiescent fraction of galaxies requires knowledge of the total number of galaxies. The strings
here must **EXACTLY** match the keys in ``plot_toggles``.
If not specified, uses a default value of ``["SMF", "quiescent", "bulge_fraction", "SMF_history"]``.
IMFs : list of strings, optional, ``{"Chabrier", "Salpeter"}``
The initial mass functions used during the analysis of the galaxies. This is used to shift the
observational data points. The length of this variable is equal to the number of models to be analyzed.
If not specified, uses a ``"Chabrier"`` IMF for each model.
"""
num_models = len(sage_parameter_fnames)
self._num_models = num_models
if labels is None:
labels = [None] * num_models
if sage_output_formats is None:
sage_output_formats = [None] * num_models
if first_files_to_analyze is None:
first_files_to_analyze = [None] * num_models
if last_files_to_analyze is None:
last_files_to_analyze = [None] * num_models
if num_sage_output_files is None:
num_sage_output_files = [None] * num_models
if output_format_data_classes_dict is None:
output_format_data_classes_dict = {"sage_binary": SageBinaryData, "sage_hdf5": SageHdf5Data}
self._output_format_data_classes_dict = output_format_data_classes_dict
if random_seeds is None:
random_seeds = [None] * num_models
if IMFs is None:
IMFs = ["Chabrier"] * num_models
# These are parameters that are model dependant.
individual_model_parameters = [
sage_parameter_fnames,
sage_output_formats,
labels,
first_files_to_analyze,
last_files_to_analyze,
num_sage_output_files,
random_seeds,
IMFs,
]
if plot_toggles is None:
plot_toggles = default_plot_toggles
self._plot_toggles = plot_toggles
if history_redshifts is None:
history_redshifts = {
"SMF_history": "All",
"SMD_history": "All",
"SFRD_history": "All",
}
self._history_redshifts = history_redshifts
if plots_that_need_smf is None:
plots_that_need_smf = ["SMF", "quiescent", "bulge_fraction", "SMF_history"]
global_model_parameters = [
plot_toggles,
plots_that_need_smf,
]
# ``parameters`` is a matrix of parameters with each "column" specifying the parameters for a single model.
# Hence we want to iterate through column-wise and use these to build the ``Model`` class instance. Here, the
# ``map`` function does this transpose into a column-wise iterable.
models = [
Model(*model_parameters, *global_model_parameters)
for model_parameters in map(list, zip(*individual_model_parameters))
]
self._models = models
# Determine if the stellar mass function needs to be computed for each model. Important we do this before
# checking ``calculation_functions``.
for model in models:
if self._does_smf_need_computing(model):
model.plot_toggles["SMF"] = True
plot_toggles["SMF"] = True
# Then populate the `calculation_methods` dictionary. This dictionary will control which properties each model
# will calculate. The dictionary is populated using the plot_toggles defined above.
if calculation_functions is None:
calculation_functions = generate_func_dict(
plot_toggles, module_name="sage_analysis.example_calcs", function_prefix="calc_"
)
# Because we have adjusted the SMF, check if it's in the calculation dict.
for model in models:
# Condition checks for condition where SMF is being compputed but not in ``calculation_functions``.
if model._plot_toggles.get("SMF", None) and not calculation_functions.get("SMF", None):
raise ValueError(
"The stellar mass function (``SMF``) is being computed, either because it was set manually "
"(through ``plot_toggles``) or because another plot requires it (``plots_that_need_smf``).\n"
"However, ``calc_SMF`` was not found in ``calculation_functions``. Ensure that it is added."
)
if plot_functions is None:
plot_functions = generate_func_dict(
plot_toggles, module_name="sage_analysis.example_plots", function_prefix="plot_"
)
self._plot_functions = plot_functions
if galaxy_properties_to_analyze is None:
galaxy_properties_to_analyze = default_galaxy_properties_to_analyze
for model in self._models:
# Read the parameter files and update any of the missing parameters.
self._read_sage_file(model)
# Also initialise all of the properties that are required.
for name, galaxy_properties in galaxy_properties_to_analyze.items():
for snapshot, _ in enumerate(model._redshifts):
self._initialise_properties(name, model, galaxy_properties, snapshot)
model._calculation_functions = calculation_functions
# Go through the calculation functions and pull out those that are actually being analyzed over a number of
# snapshots. Ensure these aren't in ``_calculation_functions`` because otherwise we'd double count.
history_calculation_functions = {}
for func_name in self._history_redshifts.keys():
try:
calculation_function = calculation_functions[func_name]
except KeyError:
continue
history_calculation_functions[func_name] = calculation_function
del model._calculation_functions[func_name]
model._history_calculation_functions = history_calculation_functions
# Determine what snapshots we need to loop over to compute the properties that are tracked over time.
history_snaps_to_loop = self._determine_history_snapshots(model)
model._history_snaps_to_loop = history_snaps_to_loop
logger.info(f"Looping through snapshots {model._history_snaps_to_loop}")
@property
def num_models(self) -> int:
"""
int : The number of models being analyzed.
"""
return self._num_models
@property
def output_format_data_classes_dict(self) -> Dict[str, Any]:
"""
dict [str, class] : A dictionary that maps the output format name to the corresponding data class.
"""
return self._output_format_data_classes_dict
@property
def history_redshifts(self) -> Dict[str, Union[str, List[float]]]:
"""
dict [string, string or list of floats] : Specifies which redshifts should be analyzed for properties and
plots that are tracked over time. The keys here **MUST** correspond to the keys in :py:attr:`~plot_toggles`. If
the value of the entry is ``"All"``, then all snapshots will be analyzed. Otherwise, will search for the
closest snapshots to the requested redshifts.
"""
return self._history_redshifts
@property
def models(self) -> List[Model]:
"""
list of :py:class:`~sage_analysis.model.Model` class instances : The :py:class:`~sage_analysis.model.Model` s
being analyzed.
"""
return self._models
@property
def plot_toggles(self) -> Dict[str, bool]:
"""
dict [str, bool] : Specifies which properties should be analyzed and plotted.
"""
return self._plot_toggles
@property
def plot_functions(self) -> Dict[str, Tuple[Callable, Dict[str, Any]]]:
"""
dict [str, tuple(function, dict [str, any])] : A dictionary of functions that are used to plot the properties
of galaxies being analyzed. Here, the outer key is the name of the corresponding plot toggle (e.g.,
``"SMF"``), the value is a tuple containing the function itself (e.g., ``plot_SMF()``), and another dictionary
which specifies any optional keyword arguments to that function with keys as the name of variable (e.g.,
``"plot_sub_populations"``) and values as the variable value (e.g., ``True``).
The functions in this dictionary are called for all files analyzed and **MUST** have a signature ``func(Models,
snapshot, plot_helper, plot_output_format, optional_keyword_arguments)``. This dict can be generated using
:py:func:`~sage_analysis.utils.generate_func_dict`.
"""
return self._plot_functions
def _determine_history_snapshots(self, model: Model) -> Optional[List[int]]:
"""
Determines which snapshots need to be iterated over to track properties over time. For each
:py:class:`~sage_analysis.model.Model`, the ``_history_<property>_redshifts`` and
``_history_<property>_snapshots`` attributes are updated.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model`
The :py:class:`~sage_analysis.model.Model` instance to be updated.
Returns
-------
snapshots_to_loop : list of ints
The snapshots that need to be analyzed for this model to ensure that the requested redshifts are analyzed
for the history properties.
"""
# Maybe there were no history redshifts specified.
if self._history_redshifts is None or self._history_redshifts == {}:
return None
# Convert these redshifts into snapshots.
for property_name, property_redshifts in self._history_redshifts.items():
# "All" denotes that we want all of the redshifts, otherwise use the values that were specified.
if property_redshifts == "All":
redshifts = model._redshifts
else:
redshifts = property_redshifts
attrname = f"_history_{property_name}_redshifts"
setattr(model, attrname, redshifts)
# Find the snapshots that are closest to the requested redshifts.
property_snaps = find_closest_indices(model._redshifts, redshifts)
attrname = f"_history_{property_name}_snapshots"
setattr(model, attrname, property_snaps)
# Based on the snapshots required to analyze over history, we may have to loop over different snapshots.
snapshots_to_loop: List[int] = []
for property_name in self._history_redshifts.keys():
# If the plot toggles have been changed, then there's no guarantee that the keys for
# ``_history_redshifts`` and ``_plot_toggles`` match.
try:
plot_toggle_value = self._plot_toggles[property_name]
except KeyError:
continue
# Furthermore, maybe plotting has been disabled for this property.
if not plot_toggle_value:
continue
# Otherwise, we are following this history of this property and hence will need to loop over its snapshots.
snaps = getattr(model, f"_history_{property_name}_snapshots")
snapshots_to_loop.extend(snaps)
return list(np.unique(snapshots_to_loop))
def _read_sage_file(self, model: Model) -> None:
"""
Reads a **SAGE** parameter file to determine all parameters such as cosmology, redshift list, etc. In
particular, also initializes the :py:attr:`~sage_analysis.model.Model.data_class` for each model. This
attribute is unique depending upon the value of :py:attr:`~sage_analysis.model.Model.sage_output_format` and
the corresponding entry in :py:attr:`~output_format_data_classes_dict`.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model`
The :py:class:`~sage_analysis.model.Model` instance to be updated.
"""
# If the format wasn't defined, then attempt to read a default parameter file to determine format.
if model._sage_output_format is None:
logger.info(
f"No SAGE output format specified. Attempting to read ``{model.sage_file}`` and using the format "
f"specified inside."
)
sage_dict = read_generic_sage_params(model.sage_file)
model._sage_output_format = sage_dict["_output_format"]
logger.info(f"Using ``{model._sage_output_format}`` output format.")
# Each SAGE output has a specific class written to read in the data.
model.data_class = self._output_format_data_classes_dict[model._sage_output_format](model, model.sage_file)
# The data class has read the SAGE ini file. Update the model with the parameters read and those specified by
# the user. We will also log some of these.
for key, value in model.data_class.sage_model_dict.items():
# Check if the attribute has already been set to a non-default value.
try:
attr_value = getattr(model, key)
except AttributeError:
pass
else:
if attr_value is not None:
continue
# At this point, the user has not specified a non-default value. Use the one read from the ini file.
setattr(model, key, value)
default_messages = {
"_snapshot": f"Snapshot to analyze not specified; using final snapshot of the simulation ({value})",
"_label": f"Label not specified; using the FileNameGalaxies from parameter file ({value})",
"_first_file_to_analyze": f"First file to analyze not specified; using {value}",
"_last_file_to_analyze":
f"Last file analyze not specified; using 1 - num cores SAGE ran with ({value})",
}
try:
logger.info(default_messages[key])
except KeyError:
pass
# Finally, compute the volume based on the number of files being analyzed.
model._volume = model.data_class.determine_volume_analyzed(model)
def _initialise_properties(
self,
name: str,
model: Model,
galaxy_properties: Dict[str, Union[str, List[str]]],
snapshot: int,
) -> None:
"""
Initialises galaxy properties that will be analyzed.
Parameters
----------
name : string
The name of the bins if the properties will be binned or a unique identifying name otherwise.
model : :py:class:`~sage_analysis.model.Model`
The :py:class:`~sage_analysis.model.Model` instance to be updated.
galaxy_properties : dict[str, float or str or list of strings]]
The galaxy properties that will be initialized. We defer to ``galaxy_properties_to_analyze`` in the
:py:method:`~__init__` method for a full description of this variable.
snapshot : int
The snapshot the properties are being updated for.
"""
# Only currently support a few property types.
allowed_property_types = ["binned", "scatter", "single"]
if galaxy_properties["type"] not in allowed_property_types:
raise ValueError(
f"Requested to analyze a galaxy property with unkown type. The galaxy properties were "
f"{galaxy_properties} and the only accepted types are {allowed_property_types}."
)
# TODO: Should this be a dict to allow the user to specify their own property type?
if galaxy_properties["type"] == "binned":
model.init_binned_properties(
galaxy_properties["bin_low"],
galaxy_properties["bin_high"],
galaxy_properties["bin_width"],
name,
galaxy_properties["property_names"],
snapshot,
)
elif galaxy_properties["type"] == "scatter":
model.init_scatter_properties(galaxy_properties["property_names"], snapshot)
elif galaxy_properties["type"] == "single":
model.init_single_properties(galaxy_properties["property_names"], snapshot)
logger.info(f"Initialized galaxy properties {galaxy_properties} for Snapshot {snapshot}")
def _does_smf_need_computing(self, model: Model) -> bool:
"""
Determines whether the stellar mass function needs to be calculated based on the values of
:py:attr:`~plot_toggles` :py:attr:`~sage_analysis.model.Model.plots_that_need_smf`.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model`
The :py:class:`~sage_analysis.model.Model` instance we're checking.
Returns
-------
bool
A boolean indicating whether the stellar mass function needs to be computed or not.
"""
# Maybe the SMF has already been toggled on.
toggle = model._plot_toggles.get("SMF", None)
if toggle:
return True
# Determine those plots that are being computed.
plots = [toggle for toggle, value in model._plot_toggles.items() if value]
# Then, check if any of these plots need the SMF.
if any([plot in model._plots_that_need_smf for plot in plots]):
logger.info(f"One of your plots require the SMF to be calculated. Turning the SMF plot toggle on.")
return True
# Otherwise, they don't need the SMF.
return False
def _determine_snapshots_to_use(
self, snapshots: Optional[List[List[int]]], redshifts: Optional[List[List[int]]]
) -> List[List[int]]:
"""
Determine which snapshots should be analyzed/plotted based on the input from the user.
Parameters
---------
snapshots : nested list of ints or string, optional
The snapshots to analyze for each model. If both this variable and ``redshifts`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
redshifts : nested list of ints, optional
The redshift to analyze for each model. If both this variable and ``snapshots`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
The snapshots selected for analysis will be those that result in the redshifts closest to those requested.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
Returns
-------
snapshots_for_models : nested list of ints
The snapshots to be analyzed for each model.
Errors
------
ValueError
Thrown if **BOTH** ``snapshots`` and ``redshifts`` are specified.
"""
# The user cannot have non-None values for both snapshots and redshifts.
if snapshots is not None and redshifts is not None:
raise ValueError("Both the ``snapshots`` and ``redshift`` arguments CANNOT be non-none. Only specify one.")
if snapshots is None and redshifts is None:
# User hasn't explicitly specified which snapshots or redshifts they want -> use the lowest redshift ones.
snapshots_for_models = [[len(model._redshifts) - 1] for model in self._models]
elif snapshots == "All" or redshifts == "All":
# User wants all snapshots (or equivalently redshifts).
snapshots_for_models = [list(np.arange(len(model._redshifts)) - 1) for model in self._models]
elif redshifts is not None:
# User has specified which redshifts they want; convert to the corresponding snapshots.
snapshots_for_models = [find_closest_indices(model._redshifts, redshifts) for model in self._models]
elif snapshots is not None:
# Otherwise the user has specified exactly what snapshots they want.
snapshots_for_models = snapshots
return snapshots_for_models
def analyze_galaxies(
self,
snapshots: Optional[List[List[Union[int, str]]]] = None,
redshifts: Optional[List[List[Union[float, str]]]] = None,
analyze_history_snapshots: bool = True,
) -> None:
"""
Analyses the galaxies of the initialized :py:attr:`~models`. These attributes will be updated directly, with
the properties accessible via ``GalaxyAnalysis.models[<model_num>].properties[<snapshot>][<property_name>]``.
Also, all snapshots required to track the properties over time (as specified by
:py:attr:`~sage_analysis.model.Model._history_snaps_to_loop`) will be analyzed, unless
``analyze_history_snapshots`` is ``False``.
Parameters
----------
snapshots : nested list of ints or string, optional
The snapshots to analyze for each model. If both this variable and ``redshifts`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
Notes
-----
If ``analyze_history_snapshots`` is ``True``, then the snapshots iterated over will be the unique
combination of the snapshots required for history snapshots and those specified by this variable.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
redshifts : nested list of ints, optional
The redshift to analyze for each model. If both this variable and ``snapshots`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
The snapshots selected for analysis will be those that result in the redshifts closest to those requested.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
Notes
-----
If ``analyze_history_snapshots`` is ``True``, then the snapshots iterated over will be the unique
combination of the snapshots required for history snapshots and those specified by this variable.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
analyze_history_snapshots : bool, optional
Specifies whether the snapshots required to analyze the properties tracked over time (e.g., stellar mass or
star formation rate density) should be iterated over. If not specified, then only ``snapshot`` will be
analyzed.
Notes
-----
If you wish to analyze different properties to when you initialized an instance of :py:class:`~GalaxyAnalysis`,
you **MUST** re-initialize another instance. Otherwise, the properties will be non-zeroed and not initialized
correctly.
Errors
------
ValueError
Thrown if **BOTH** ``snapshots`` and ``redshifts`` are specified.
"""
if self._plot_toggles == {}:
logger.debug(f"No plot toggles specified.")
return
baseline_snapshots_models = self._determine_snapshots_to_use(snapshots, redshifts)
for model, baseline_snapshots in zip(self._models, baseline_snapshots_models):
logger.info(f"Analyzing baseline snapshots {baseline_snapshots}")
for snap in baseline_snapshots:
# First compute all of the "normal" properties that aren't tracked over time.
model.calc_properties_all_files(
model._calculation_functions, snap, debug=False, close_file=False
)
# Then check if this is a snapshot we're analyzing properties over time.
if model._history_snaps_to_loop is None or not analyze_history_snapshots:
continue
# Can't combine this condition with the line above because it would throw an error if ``None``.
if snap not in model._history_snaps_to_loop:
continue
model.calc_properties_all_files(
model._history_calculation_functions, snap, debug=False, close_file=False
)
# Finally, determine if there are any remaining snapshots that need to be analyzed for the history
# properties.
if model._history_snaps_to_loop is None or not analyze_history_snapshots:
continue
history_snaps = list(set(model._history_snaps_to_loop).difference(set(baseline_snapshots)))
logger.info(f"Also analyzing snapshots {history_snaps} for the properties over redshift.")
for snap in history_snaps:
model.calc_properties_all_files(
model._history_calculation_functions, snap, debug=False, close_file=False
)
model.data_class.close_file(model)
def generate_plots(
self,
snapshots: Optional[List[List[Union[int, str]]]] = None,
redshifts: Optional[List[List[Union[float, str]]]] = None,
plot_helper: Optional[PlotHelper] = None,
) -> Optional[List[matplotlib.figure.Figure]]:
"""
Generates the plots for the :py:attr:`~models` being analyzed. The plots to be created are defined by the
values of :py:attr:`~plot_toggles` specified when an instance of :py:class:`~GalaxyAnalysis` was initialized.
If you wish to analyze different properties or create different plots, you **MUST** initialize another instance
of :py:class:`~GalaxyAnalysis` with the new values for :py:attr:`~plot_toggles` (ensuring that values of
``calcuations_functions`` and ``plot_functions`` are updated if using non-default values for ``plot_toggles``).
This method should be run after analysing the galaxies using :py:method:`~analyze_galaxies`.
Parameters
----------
snapshots : nested list of ints or string, optional
The snapshots to plot for each model. If both this variable and ``redshifts`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
For properties that aren't analyzed over redshift, the snapshots for each model will be plotted on each
figure. For example, if we are plotting a single model, setting this variable to ``[[63, 50]]`` will
give results for snapshot 63 and 50 on each figure. For some plots (e.g., those properties that are scatter
plotted), this is undesirable and one should instead iterate over single snapshot values instead.
Notes
-----
If ``analyze_history_snapshots`` is ``True``, then the snapshots iterated over will be the unique
combination of the snapshots required for history snapshots and those specified by this variable.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
redshifts : nested list of ints, optional
The redshift to plot for each model. If both this variable and ``snapshots`` are not specified, uses
the highest snapshot (i.e., lowest redshift) as dictated by the
:py:attr:`~sage_analysis.model.Model.redshifts` attribute from the parameter file read for each model.
The snapshots selected for analysis will be those that result in the redshifts closest to those requested.
If an entry if ``"All"``, then all snapshots for that model will be analyzed.
The length of the outer list **MUST** be equal to :py:attr:`~num_models`.
Warnings
--------
Only **ONE** of ``snapshots`` and ``redshifts`` can be specified.
plot_helper : :py:class:`~sage_analysis.plot_helper.PlotHelper`, optional
A helper class that contains attributes and methods to assist with plotting. In particular, the path where
the plots will be saved and the output format. Refer to :doc:`../user/plot_helper` for more information on
how to initialize this class and its use.
If not specified, then will initialize a default instance of
:py:class:`~sage_analysis.plot_helper.PlotHelper`. Refer to the
:py:class:`~sage_analysis.plot_helper.PlotHelper` documentation for a list of default attributes.
Returns
-------
None
Returned if :py:attr:`~plot_toggles` is an empty dictionary.
figs
The figures generated by the :py:attr:`~plot_functions` functions.
"""
if self._plot_toggles == {}:
logger.debug(f"No plot toggles specified.")
return None
if plot_helper is None:
plot_helper = PlotHelper()
snapshots = self._determine_snapshots_to_use(snapshots, redshifts)
# Now do the plotting.
figs: List[matplotlib.figure.Figure] = []
for func, kwargs in self._plot_functions.values():
fig = func(
self._models,
snapshots,
plot_helper,
**kwargs
)
if type(fig) == list:
figs.extend(fig)
else:
figs.append(fig)
return figs | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/galaxy_analysis.py | 0.872863 | 0.389808 | galaxy_analysis.py | pypi |
from abc import ABC, abstractmethod
from typing import Any, Dict, Optional
from tqdm import tqdm
from sage_analysis.model import Model
class DataClass(ABC):
"""
An abstract baseclass for handling the various **SAGE** output formats. It should not be instantiated directly;
instead the underlying subclasses for each format (e.g., :py:class:`~sage_analysis.sage_binary.SageBinaryData` and
:py:class:`~sage_analysis.sage_binary.SageBinaryData`).
We refer to :doc:`../user/data_class` for more information about adding your own Data Class to ingest custom data
formats.
"""
@abstractmethod
def determine_volume_analyzed(self, model: Model, **kwargs: Any) -> float:
"""
Determines the volume analyzed. This can be smaller than the total simulation box.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model` instance
The model that this data class is associated with.
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
Returns
-------
volume : float
The numeric volume being processed during this run of the code in (Mpc/h)^3.
"""
pass # pragma: no cover
@abstractmethod
def read_sage_params(self, sage_file_path: str, *kwargs: Any) -> Dict[str, Any]:
"""
Read the **SAGE** parameter file.
Parameters
----------
sage_file_path: string
Path to the **SAGE** parameter file.
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
Returns
-------
model_dict: dict [str, var]
Dictionary containing the parameter names and their values.
"""
pass # pragma: no cover
@abstractmethod
def determine_num_gals(self, model: Model, **kwargs: Any):
"""
Determines the number of galaxies in all files for this :py:class:`~sage_analysis.model.Model`.
Parameters
----------
model: :py:class:`~sage_analysis.model.Model` class
The :py:class:`~sage_analysis.model.Model` we're reading data for.
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
"""
pass # pragma: no cover
@abstractmethod
def read_gals(
self,
model: Model,
file_num: int,
snapshot: int,
pbar: Optional[tqdm] = None,
plot_galaxies: bool = False,
debug: bool = False,
**kwargs: Any,
) -> Any:
"""
Reads the galaxies of a model file for the specified file number and snapshot.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model` class
The :py:class:`~sage_analysis.model.Model` we're reading data for.
file_num : int
Suffix number of the file we're reading.
snapshot : int
The snapshot we're reading.
pbar : ``tqdm`` class instance, optional
Bar showing the progress of galaxy reading. If not specified, progress bar will not show.
plot_galaxies : bool, optional
If specified, plots and saves the 3D distribution of galaxies for this file.
debug : bool, optional
If specified, prints out extra useful debug information.
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
Returns
-------
gals : The format is specified by the underlying data class implementation
The galaxies for this file.
"""
pass # pragma: no cover
@abstractmethod
def update_snapshot_and_data_path(self, model: Model, snapshot: int, **kwargs: Any) -> None:
"""
Updates the :py:attr:`~sage_analysis.model.Model._sage_data_path` to point to a new redshift file (if necessary
for the underlying implementation). Uses the redshift array :py:attr:`~sage_analysis.model.Model.redshifts`.
Parameters
----------
snapshot : int
Snapshot we're updating :py:attr:`~sage_analysis.model.Model._sage_data_path` to
point to.
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
"""
pass # pragma: no cover
@abstractmethod
def close_file(self, model: Model, **kwargs) -> None:
"""
Closes an open galaxy file. This is useful when reading the HDF5 data format where a single file contains many
snapshots. For the binary format, this is an empty method.
Parameters
----------
**kwargs : any
Extra arguments to allow other data classes to pass extra arguments to their implementation.
"""
pass # pragma: no cover | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/data_class.py | 0.939353 | 0.601886 | data_class.py | pypi |
import sys
import logging
import os
from typing import Any, Callable, Dict, Optional, Tuple, List
import numpy as np
logger = logging.getLogger(__name__)
def generate_func_dict(
plot_toggles,
module_name,
function_prefix,
keyword_args={}
) -> Dict[str, Tuple[Callable, Dict[str, Any]]]:
"""
Generates a dictionary where the keys are the function name and the value is a list
containing the function itself (0th element) and keyword arguments as a dictionary
(1st element). All functions in the returned dictionary are expected to have the same
call signature for non-keyword arguments. Functions are only added when the
``plot_toggles`` value is non-zero.
Functions are required to be named ``<module_name><function_prefix><plot_toggle_key>``
For example, the default calculation function are kept in the ``model.py`` module and
are named ``calc_<toggle>``. E.g., ``sage_analysis.model.calc_SMF()``,
``sage_analysis.model.calc_BTF()``, ``sage_analysis.model.calc_sSFR()`` etc.
Parameters
----------
plot_toggles: dict, [string, int]
Dictionary specifying the name of each property/plot and whether the values
will be generated + plotted. A value of 1 denotes plotting, whilst a value of
0 denotes not plotting. Entries with a value of 1 will be added to the function
dictionary.
module_name: string
Name of the module where the functions are located. If the functions are located
in this module, pass an empty string "".
function_prefix: string
Prefix that is added to the start of each function.
keyword_args: dict [string, dict[string, variable]], optional
Allows the adding of keyword aguments to the functions associated with the
specified plot toggle. The name of each keyword argument and associated value is
specified in the inner dictionary.
Returns
-------
func_dict: dict [string, tuple(function, dict[string, variable])]
The key of this dictionary is the name of the function. The value is a list with
the 0th element being the function and the 1st element being a dictionary of
additional keyword arguments to be passed to the function. The inner dictionary is
keyed by the keyword argument names with the value specifying the keyword argument
value.
"""
# Check if the specified module is present.
try:
module = sys.modules[module_name]
except KeyError:
raise KeyError(
f"Module ``{module_name}`` has not been imported.\nPerhaps you need to create an empty ``__init__.py`` "
f"file to ensure your package can be imported.\nAlso, ensure ``import {module_name}`` is at the top of "
f"your script, before ``generate_func_dict`` is called."
)
# Only populate those methods that have been marked in the `plot_toggles` dictionary.
func_dict = {}
for toggle, value in plot_toggles.items():
if value:
func_name = "{0}{1}".format(function_prefix, toggle)
# Be careful. Maybe the func for a specified `plot_toggle` value wasn't
# added to the module.
try:
func = getattr(module, func_name)
except AttributeError:
raise AttributeError(
"Tried to get the func named ``{func_name}`` corresponding to ``plot_toggle`` value ``{toggle}``. "
f"However, no func named ``{func_name}`` could be found in ``{module_name}`` module."
)
# We may have specified some keyword arguments for this plot toggle. Check.
try:
key_args = keyword_args[toggle]
except KeyError:
# No extra arguments for this.
key_args = {}
func_dict[toggle] = (func, key_args)
return func_dict
def select_random_indices(
inds: np.ndarray,
global_num_inds_available: int,
global_num_inds_requested: int,
seed: Optional[int] = None,
) -> np.ndarray:
"""
Select a random subset of indices if the total number of indices (across all files) is known. This function is
used if selecting (e.g.,) 100 galaxies from a sample of 10,000.
However, if the total number of indices is **NOT** known, then this function is not valid. For example, if one
wanted to select 100 spiral galaxies, we may not know how many spiral galaxies are present across all files. In
such scenarios,
:py:meth:`~sage_analysis.model.Model.select_random_indices_assumed_equal_distribution` should be used.
Parameters
----------
vals : :obj:`~numpy.ndarray` of values
Values that the random subset is selected from.
global_num_inds_available : int
The total number of indices available across all files.
global_num_inds_requested : int
The total number of indices requested across all files.
seed : int, optional
If specified, seeds the random number generator with the specified seed.
Returns
-------
random_inds : :obj:`~numpy.ndarray` of values
Values chosen.
"""
if seed is not None:
np.random.seed(seed)
# First find out the fraction of value that we need to select.
num_inds_to_choose = int(len(inds) / global_num_inds_available * global_num_inds_requested)
# Do we have more values than we need?
if len(inds) > num_inds_to_choose:
# Randomly select them.
random_inds = np.random.choice(inds, size=num_inds_to_choose)
else:
# Otherwise, we will just use all the indices we were passed.
random_inds = inds
return random_inds
def read_generic_sage_params(sage_file_path: str) -> Dict[str, Any]:
"""
Reads the **SAGE** parameter file values. This function is used for the default ``sage_binary`` and ``sage_hdf5``
formats. If you have a custom format, you will need to write a ``read_sage_params`` function in your own data
class.
Parameters
----------
sage_file_path: string
Path to the **SAGE** parameter file.
Returns
-------
model_dict: dict [str, var]
Dictionary containing the parameter names and their values.
Errors
------
FileNotFoundError
Raised if the specified **SAGE** parameter file is not found.
"""
# Fields that we will be reading from the ini file.
SAGE_fields = [
"FileNameGalaxies",
"OutputDir",
"FirstFile",
"LastFile",
"OutputFormat",
"NumSimulationTreeFiles",
"FileWithSnapList",
"Hubble_h",
"BoxSize",
"PartMass"
]
SAGE_dict = {}
# Ignore lines starting with one of these.
comment_characters = [";", "%", "-"]
try:
with open(sage_file_path, "r") as SAGE_file:
data = SAGE_file.readlines()
# Each line in the parameter file is of the form...
# parameter_name parameter_value.
for line in range(len(data)):
# Remove surrounding whitespace from the line.
stripped = data[line].strip()
# May have been an empty line.
try:
first_char = stripped[0]
except IndexError:
continue
# Check for comment.
if first_char in comment_characters:
continue
# Split into [name, value] list.
split = stripped.split()
# Then check if the field is one we care about.
if split[0] in SAGE_fields:
SAGE_dict[split[0]] = split[1]
except FileNotFoundError:
raise FileNotFoundError(f"Could not find SAGE ini file {sage_file_path}")
# Now we have all the fields, rebuild the dictionary to be exactly what we need for
# initialising the model.
model_dict = {}
model_dict["_label"] = SAGE_dict["FileNameGalaxies"]
try:
model_dict["_output_format"] = SAGE_dict["OutputFormat"]
except KeyError:
pass
model_dict["_parameter_dirpath"] = os.path.dirname(sage_file_path)
# ``FileWithSnapList`` may either be an absolute or relative path (wrt to ``_parameter_dirpath``).
try:
fname_absolute = f"{model_dict['_parameter_dirpath']}/{SAGE_dict['FileWithSnapList']}"
alist = np.loadtxt(fname_absolute)
except IOError:
fname_relative = f"{SAGE_dict['FileWithSnapList']}"
logger.debug(f"Could not find snapshot file {fname_absolute}. Trying as {fname_relative} instead.")
alist = np.loadtxt(f"{SAGE_dict['FileWithSnapList']}")
redshifts = 1.0 / alist - 1.0
model_dict["_redshifts"] = redshifts
model_dict["_snapshot"] = len(alist) - 1 # By default, plot the final snapshot.
base_sage_output_path_absolute = f"{model_dict['_parameter_dirpath']}/{SAGE_dict['OutputDir']}/{SAGE_dict['FileNameGalaxies']}" # noqa: E501
model_dict["_base_sage_output_path_absolute"] = base_sage_output_path_absolute
base_sage_output_path_relative = f"{SAGE_dict['OutputDir']}/{SAGE_dict['FileNameGalaxies']}" # noqa: E501
model_dict["_base_sage_output_path_relative"] = base_sage_output_path_relative
model_dict["_output_dir"] = SAGE_dict['OutputDir']
model_dict["_hubble_h"] = float(SAGE_dict["Hubble_h"])
model_dict["_box_size"] = float(SAGE_dict["BoxSize"])
model_dict["_num_sim_tree_files"] = int(SAGE_dict["NumSimulationTreeFiles"])
return model_dict
def find_closest_indices(values: List[float], target_values: List[float]) -> List[int]:
"""
Finds the indices in ``values`` that result in values closest to ``target_values``.
"""
closest_indices = [(np.abs(values - target_value)).argmin() for target_value in target_values]
return closest_indices | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/utils.py | 0.773986 | 0.632616 | utils.py | pypi |
import warnings
import numpy as np
from scipy import stats
from sage_analysis.model import Model
def calc_SMF(
model: Model,
gals,
snapshot: int,
calc_sub_populations: bool = False,
smf_property_name: str = "SMF"
):
"""
Calculates the stellar mass function of the given galaxies. That is, the number of galaxies at a given stellar
mass.
The ``Model.properties["snapshot_<snapshot>"]"SMF"]`` array will be updated. We also split the galaxy population
into "red" and "blue" based on the value of :py:attr:`~sage_analysis.model.Model.sSFRcut` and update the
``Model.properties["snapshot_<snapshot>"]["red_SMF"]`` and ``Model.properties["snapshot_<snapshot>"]["blue_SMF"]``
arrays.
Parameters
----------
snapshot : int
The snapshot the SMF is being calculated at.
plot_sub_populations : boolean, optional
If ``True``, calculates the stellar mass function for red and blue sub-populations.
smf_property_name : string, optional
The name of the property used to store the stellar mass function. Useful if different calculations are
computing the stellar mass function but saving it as a different property.
"""
non_zero_stellar = np.where(gals["StellarMass"][:] > 0.0)[0]
if len(non_zero_stellar) == 0:
logger.info(f"Could not find any galaxies with non-zero stellar mass for the stellar mass function.")
return
stellar_mass = np.log10(gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h)
gals_per_bin, _ = np.histogram(stellar_mass, bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"][f"{smf_property_name}"] += gals_per_bin
# We often want to plot the red and blue subpopulations. So bin them if requested.
if calc_sub_populations:
sSFR = (gals["SfrDisk"][:][non_zero_stellar] + gals["SfrBulge"][:][non_zero_stellar]) / \
(gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h)
red_gals = np.where(sSFR < 10.0**model._sSFRcut)[0]
red_mass = stellar_mass[red_gals]
counts, _ = np.histogram(red_mass, bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["red_SMF"] += counts
blue_gals = np.where(sSFR > 10.0**model._sSFRcut)[0]
blue_mass = stellar_mass[blue_gals]
counts, _ = np.histogram(blue_mass, bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["blue_SMF"] += counts
def calc_BMF(model, gals, snapshot: int):
"""
Calculates the baryon mass function of the given galaxies. That is, the number of galaxies at a given baryon
(stellar + cold gas) mass.
The ``Model.properties["snapshot_<snapshot>"]["BMF"]`` array will be updated.
"""
non_zero_baryon = np.where(gals["StellarMass"][:] + gals["ColdGas"][:] > 0.0)[0]
baryon_mass = np.log10(
(gals["StellarMass"][:][non_zero_baryon] + gals["ColdGas"][:][non_zero_baryon])
* 1.0e10 / model.hubble_h
)
(counts, _) = np.histogram(baryon_mass, bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["BMF"] += counts
def calc_GMF(model, gals, snapshot: int):
"""
Calculates the gas mass function of the given galaxies. That is, the number of galaxies at a given cold gas mass.
The ``Model.properties["snapshot_<snapshot>"]["GMF"]`` array will be updated.
"""
non_zero_cold = np.where(gals["ColdGas"][:] > 0.0)[0]
cold_mass = np.log10(gals["ColdGas"][:][non_zero_cold] * 1.0e10 / model.hubble_h)
(counts, _) = np.histogram(cold_mass, bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["GMF"] += counts
def calc_BTF(model, gals, snapshot: int):
"""
Calculates the baryonic Tully-Fisher relation for spiral galaxies in the given set of galaxies.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["BTF_mass"]`` and
``Model.properties["snapshot_<snapshot>"]["BTF_vel"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_spirals_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than ``number_spirals_passed``,
then all spiral galaxies will be used.
"""
# Make sure we're getting spiral galaxies. That is, don't include galaxies that are too bulgy.
w = np.where(gals["StellarMass"][:] > 0.0)[0] # This will ensure we don't get divide by 0 errors.
spirals = np.where((gals["Type"][:][w] == 0) & (gals["StellarMass"][:][w] + gals["ColdGas"][:][w] > 0.0) &
(gals["StellarMass"][:][w] > 0.0) & (gals["ColdGas"][:][w] > 0.0) &
(gals["BulgeMass"][:][w] / gals["StellarMass"][:][w] > 0.1) &
(gals["BulgeMass"][:][w] / gals["StellarMass"][:][w] < 0.5))[0]
if len(spirals) == 0:
logger.info(f"Could not find any spiral galaxies for analysis of the baryonic Tully-Fisher relationship.")
return
# Careful here, ``spirals`` is selecting on ``w``. We want to select on ``gals``.
spirals = w[spirals]
# Select a random subset of galaxies (if necessary).
spirals = model.select_random_galaxy_indices(spirals, len(model.properties[f"snapshot_{snapshot}"]["BTF_mass"]))
baryon_mass = np.log10((gals["StellarMass"][:][spirals] + gals["ColdGas"][:][spirals]) * 1.0e10 / model.hubble_h)
velocity = np.log10(gals["Vmax"][:][spirals])
model.properties[f"snapshot_{snapshot}"]["BTF_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["BTF_mass"], baryon_mass
)
model.properties[f"snapshot_{snapshot}"]["BTF_vel"] = np.append(
model.properties[f"snapshot_{snapshot}"]["BTF_vel"], velocity
)
def calc_sSFR(model, gals, snapshot: int):
"""
Calculates the specific star formation rate (star formation divided by the stellar mass of the galaxy) as a
function of stellar mass.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["sSFR_mass"]`` and
``Model.properties["snapshot_<snapshot>"]["sSFR_sSFR"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_gals_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than ``number_gals_passed``,
then all galaxies with non-zero stellar mass will be used.
"""
non_zero_stellar = np.where(
(gals["StellarMass"][:] > 0.0) & (gals["SfrDisk"][:] + gals["SfrBulge"][:] > 0.0)
)[0]
# Select a random subset of galaxies (if necessary).
random_inds = model.select_random_galaxy_indices(non_zero_stellar, len(model.properties[f"snapshot_{snapshot}"]["sSFR_mass"]))
stellar_mass = np.log10(gals["StellarMass"][:][random_inds] * 1.0e10 / model.hubble_h)
sSFR = (gals["SfrDisk"][:][random_inds] + gals["SfrBulge"][:][random_inds]) / \
(gals["StellarMass"][:][random_inds] * 1.0e10 / model.hubble_h)
model.properties[f"snapshot_{snapshot}"]["sSFR_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["sSFR_mass"], stellar_mass
)
model.properties[f"snapshot_{snapshot}"]["sSFR_sSFR"] = np.append(
model.properties[f"snapshot_{snapshot}"]["sSFR_sSFR"], np.log10(sSFR)
)
def calc_gas_fraction(model, gals, snapshot: int):
"""
Calculates the fraction of baryons that are in the cold gas reservoir as a function of stellar mass.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["gas_frac_mass"]`` and
``Model.properties["snapshot_<snapshot>"]["gas_frac"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_spirals_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than ``number_spirals_passed``,
then all spiral galaxies will be used.
"""
# Make sure we're getting spiral galaxies. That is, don't include galaxies that are too bulgy.
w = np.where(gals["StellarMass"][:] > 0.0)[0] # This will ensure we don't get divide by 0 errors.
spirals = np.where((gals["Type"][:][w] == 0) & (gals["StellarMass"][:][w] + gals["ColdGas"][:][w] > 0.0) &
(gals["BulgeMass"][:][w] / gals["StellarMass"][:][w] > 0.1) &
(gals["BulgeMass"][:][w] / gals["StellarMass"][:][w] < 0.5))[0]
# Careful here, ``spirals`` is selecting on ``w``. We want to select on ``gals``.
spirals = w[spirals]
# Select a random subset of galaxies (if necessary).
spirals = model.select_random_galaxy_indices(
spirals, len(model.properties[f"snapshot_{snapshot}"]["gas_frac_mass"])
)
stellar_mass = np.log10(gals["StellarMass"][:][spirals] * 1.0e10 / model.hubble_h)
gas_fraction = gals["ColdGas"][:][spirals] / (gals["StellarMass"][:][spirals] + gals["ColdGas"][:][spirals])
model.properties[f"snapshot_{snapshot}"]["gas_frac_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["gas_frac_mass"], stellar_mass
)
model.properties[f"snapshot_{snapshot}"]["gas_frac"] = np.append(
model.properties[f"snapshot_{snapshot}"]["gas_frac"], gas_fraction
)
def calc_metallicity(model, gals, snapshot: int):
"""
Calculates the metallicity as a function of stellar mass.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["metallicity_mass"]`` and
``Model.properties["snapshot_<snapshot>"]["metallicity"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_centrals_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than
``number_centrals_passed``, then all central galaxies will be used.
"""
# Only care about central galaxies (Type 0) that have appreciable mass.
w = np.where(gals["StellarMass"][:] > 0.0)[0] # This will ensure we don't get divide by 0 errors.
centrals = np.where(
(gals["Type"][:][w] == 0) &
(gals["ColdGas"][:][w] > 0.0) &
(gals["MetalsColdGas"][:][w] > 0.0) &
(gals["ColdGas"][:][w] / (gals["StellarMass"][:][w] + gals["ColdGas"][:][w]) > 0.1)
)[0]
# Careful here, ``centrals`` is selecting on ``w``. We want to select on ``gals``.
centrals = w[centrals]
# Select a random subset of galaxies (if necessary).
centrals = model.select_random_galaxy_indices(
centrals, len(model.properties[f"snapshot_{snapshot}"]["metallicity_mass"])
)
stellar_mass = np.log10(gals["StellarMass"][:][centrals] * 1.0e10 / model.hubble_h)
Z = np.log10((gals["MetalsColdGas"][:][centrals] / gals["ColdGas"][:][centrals]) / 0.02) + 9.0
model.properties[f"snapshot_{snapshot}"]["metallicity_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["metallicity_mass"], stellar_mass
)
model.properties[f"snapshot_{snapshot}"]["metallicity"] = np.append(
model.properties[f"snapshot_{snapshot}"]["metallicity"], Z
)
def calc_bh_bulge(model, gals, snapshot: int):
"""
Calculates the black hole mass as a function of bulge mass.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["BlackHoleMass"]`` and
``Model.propertiesp["snapshot_<snapshot>"]["BulgeMass"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_galaxies_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than
``number_galaxies_passed``, then all galaxies will be used.
Notes
-----
We only consider galaxies with bulge mass greater than 10^8 Msun/h and a black hole mass greater than 10^5 Msun/h.
"""
# Only care about galaxies that have appreciable masses.
my_gals = np.where((gals["BulgeMass"][:] > 0.01) & (gals["BlackHoleMass"][:] > 0.00001))[0]
# Select a random subset of galaxies (if necessary).
my_gals = model.select_random_galaxy_indices(
my_gals, len(model.properties[f"snapshot_{snapshot}"]["bh_mass"])
)
bh = np.log10(gals["BlackHoleMass"][:][my_gals] * 1.0e10 / model.hubble_h)
bulge = np.log10(gals["BulgeMass"][:][my_gals] * 1.0e10 / model.hubble_h)
model.properties[f"snapshot_{snapshot}"]["bh_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["bh_mass"], bh
)
model.properties[f"snapshot_{snapshot}"]["bulge_mass"] = np.append(
model.properties[f"snapshot_{snapshot}"]["bulge_mass"], bulge
)
def calc_quiescent(model, gals, snapshot: int):
"""
Calculates the quiescent galaxy fraction as a function of stellar mass. The galaxy population is also split into
central and satellites and the quiescent fraction of these are calculated.
The ``Model.properties["snapshot_<snapshot>"]["centrals_MF"]``,
``Model.properties["snapshot_<snapshot>"]["satellites_MF"]``,
``Model.properties["snapshot_<snapshot>"]["quiescent_galaxy_counts"]``,
``Model.properties["snapshot_<snapshot>"]["quiescent_centrals_counts"]``, and
``Model.properties["snapshot_<snapshot>"]["quiescent_satellites_counts"]`` arrays will be updated.
Notes
-----
We only **count** the number of quiescent galaxies in each stellar mass bin. When converting this to the quiescent
fraction, one must divide by the number of galaxies in each stellar mass bin, the stellar mass function
``Model.properties["snapshot_<snapshot>"]["SMF"]``. See :func:`~sage_analysis.example_plots.plot_quiescent` for an
example implementation.
"""
non_zero_stellar = np.where(gals["StellarMass"][:] > 0.0)[0]
mass = np.log10(gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h)
gal_type = gals["Type"][:][non_zero_stellar]
# For the sSFR, we will create a mask that is True for quiescent galaxies and
# False for star-forming galaxies.
sSFR = (gals["SfrDisk"][:][non_zero_stellar] + gals["SfrBulge"][:][non_zero_stellar]) / \
(gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h)
quiescent = sSFR < 10.0 ** model._sSFRcut
# Mass function for number of centrals/satellites.
centrals_counts, _ = np.histogram(mass[gal_type == 0], bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["centrals_MF"] += centrals_counts
satellites_counts, _ = np.histogram(mass[gal_type == 1], bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["satellites_MF"] += satellites_counts
# Then bin those galaxies/centrals/satellites that are quiescent.
quiescent_counts, _ = np.histogram(mass[quiescent], bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["quiescent_galaxy_counts"] += quiescent_counts
quiescent_centrals_counts, _ = np.histogram(mass[(gal_type == 0) & quiescent],
bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["quiescent_centrals_counts"] += quiescent_centrals_counts
quiescent_satellites_counts, _ = np.histogram(mass[(gal_type == 1) & quiescent],
bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["quiescent_satellites_counts"] += quiescent_satellites_counts
def calc_bulge_fraction(model, gals, snapshot: int):
"""
Calculates the ``bulge_mass / stellar_mass`` and ``disk_mass / stellar_mass`` ratios as a function of stellar mass.
The ``Model.properties["snapshot_<snapshot>"]["fraction_bulge_sum"]``,
``Model.properties["snapshot_<snapshot>"]["fraction_disk_sum"]``,
``Model.properties["snapshot_<snapshot>"]["fraction_bulge_var"]``,
``Model.properties["snapshot_<snapshot>"]["fraction_disk_var"]`` arrays will be updated.
Notes
-----
We only **sum** the bulge/disk mass in each stellar mass bin. When converting this to the mass fraction, one must
divide by the number of galaxies in each stellar mass bin, the stellar mass function
``Model.properties["snapshot_<snapshot>"]["SMF"]``. See :func:`~sage_analysis.example_plots.plot_bulge_fraction`
for full implementation.
"""
non_zero_stellar = np.where(gals["StellarMass"][:] > 0.0)[0]
stellar_mass = np.log10(gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h)
fraction_bulge = gals["BulgeMass"][:][non_zero_stellar] / gals["StellarMass"][:][non_zero_stellar]
fraction_disk = 1.0 - (gals["BulgeMass"][:][non_zero_stellar] / gals["StellarMass"][:][non_zero_stellar])
# We want the mean bulge/disk fraction as a function of stellar mass. To allow
# us to sum across each file, we will record the sum in each bin and then average later.
fraction_bulge_sum, _, _ = stats.binned_statistic(stellar_mass, fraction_bulge,
statistic=np.sum,
bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["fraction_bulge_sum"] += fraction_bulge_sum
fraction_disk_sum, _, _ = stats.binned_statistic(stellar_mass, fraction_disk,
statistic=np.sum,
bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["fraction_disk_sum"] += fraction_disk_sum
# For the variance, weight these by the total number of samples we will be
# averaging over (i.e., number of files).
fraction_bulge_var, _, _ = stats.binned_statistic(stellar_mass, fraction_bulge,
statistic=np.var,
bins=model.bins["stellar_mass_bins"])
model.properties[f"snapshot_{snapshot}"]["fraction_bulge_var"] += fraction_bulge_var / \
(model._last_file_to_analyze - model._first_file_to_analyze + 1)
fraction_disk_var, _, _ = stats.binned_statistic(
stellar_mass, fraction_disk, statistic=np.var, bins=model.bins["stellar_mass_bins"]
)
model.properties[f"snapshot_{snapshot}"]["fraction_disk_var"] += fraction_disk_var / \
(model._last_file_to_analyze - model._first_file_to_analyze + 1)
def calc_baryon_fraction(model, gals, snapshot: int):
"""
Calculates the ``mass_baryons / halo_virial_mass`` as a function of halo virial mass for each baryon reseroivr
(stellar, cold, hot, ejected, intra-cluster stars and black hole). Also calculates the ratio for the total baryonic
mass.
The ``Model.properties["snapshot_<snapshot>"]["halo_<reservoir_name>_fraction_sum"]`` arrays are updated for
each reservoir. In addition, ``Model.properties["snapshot_<snapshot>"]["halo_baryon_fraction_sum"]`` is updated.
Notes
-----
The halo virial mass we use is the **background FoF halo**, not the immediate host halo of each galaxy.
We only **sum** the baryon mass in each stellar mass bin. When converting this to the mass fraction, one must
divide by the number of halos in each halo mass bin, ``Model.properties["snapshot_<snapshot>"]["fof_HMF"]``. See
:func:`~sage_analysis.example_plots.plot_baryon_fraction` for full implementation.
If the ``Model.properties["snapshot_<snapshot>"]["fof_HMF"]`` property, with associated bins
``Model.bins["halo_mass"bin"]`` have not been initialized, a ``ValueError`` is thrown.
"""
# Careful here, our "Halo Mass Function" is only counting the *BACKGROUND FOF HALOS*.
centrals = np.where((gals["Type"][:] == 0) & (gals["Mvir"][:] > 0.0))[0]
centrals_fof_mass = np.log10(gals["Mvir"][:][centrals] * 1.0e10 / model.hubble_h)
try:
halos_binned, _ = np.histogram(centrals_fof_mass, bins=model.bins["halo_mass_bins"])
except KeyError:
print("The `halo_mass_bins` bin array was not initialised.")
raise ValueError
try:
model.properties[f"snapshot_{snapshot}"]["fof_HMF"] += halos_binned
except KeyError:
print("The `fof_HMF` property was not iniitalized.")
raise ValueError
non_zero_mvir = np.where((gals["CentralMvir"][:] > 0.0))[0] # Will only be dividing by this value.
# These are the *BACKGROUND FOF HALO* for each galaxy.
fof_halo_mass = gals["CentralMvir"][:][non_zero_mvir]
fof_halo_mass_log = np.log10(gals["CentralMvir"][:][non_zero_mvir] * 1.0e10 / model.hubble_h)
# We want to calculate the fractions as a function of the FoF mass. To allow
# us to sum across each file, we will record the sum in each bin and then
# average later.
components = ["StellarMass", "ColdGas", "HotGas", "EjectedMass", "IntraClusterStars", "BlackHoleMass"]
attrs_different_name = ["stars", "cold", "hot", "ejected", "ICS", "bh"]
for (component_key, attr_name) in zip(components, attrs_different_name):
# The bins are defined in log. However the other properties are all in 1.0e10 Msun/h.
fraction_sum, _, _ = stats.binned_statistic(
fof_halo_mass_log,
gals[component_key][:][non_zero_mvir] / fof_halo_mass,
statistic=np.sum,
bins=model.bins["halo_mass_bins"]
)
dict_key = "halo_{0}_fraction_sum".format(attr_name)
model.properties[f"snapshot_{snapshot}"][dict_key] += fraction_sum
# Finally want the sum across all components.
baryons = sum(gals[component_key][:][non_zero_mvir] for component_key in components)
baryon_fraction_sum, _, _ = stats.binned_statistic(
fof_halo_mass_log, baryons / fof_halo_mass, statistic=np.sum, bins=model.bins["halo_mass_bins"]
)
model.properties[f"snapshot_{snapshot}"]["halo_baryon_fraction_sum"] += baryon_fraction_sum
def calc_reservoirs(model, gals, snapshot: int):
"""
Calculates the mass in each reservoir as a function of halo virial mass.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["reservoir_mvir"]`` and
``Model.properties["snapshot_<snapshot>"]["reservoir_<reservoir_name>"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_centrals_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than
``number_centrals_passed``, then all central galaxies will be used.
"""
# To reduce scatter, only use galaxies in halos with mass > 1.0e10 Msun/h.
centrals = np.where(
(gals["Type"][:] == 0) & (gals["Mvir"][:] > 1.0) & (gals["StellarMass"][:] > 0.0)
)[0]
# Select a random subset of galaxies (if necessary).
centrals = model.select_random_galaxy_indices(
centrals, len(model.properties[f"snapshot_{snapshot}"]["reservoir_mvir"])
)
reservoirs = ["Mvir", "StellarMass", "ColdGas", "HotGas", "EjectedMass", "IntraClusterStars"]
attribute_names = ["mvir", "stars", "cold", "hot", "ejected", "ICS"]
for (reservoir, attribute_name) in zip(reservoirs, attribute_names):
# Some galaxies will have a zero reservoir mass which cannot be (theroetically) logged. However, to keep the
# length of all arrays equal, we will take the log and use the entry of ``-np.inf``. WHen plotting, these will
# be naturally cutoff when adjusting the axis.
# We know this will throw an error for these galaxies, so lets temporarily disable warnings.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
mass = np.log10(gals[reservoir][:][centrals] * 1.0e10 / model.hubble_h)
# Extend the previous list of masses with these new values.
dict_key = "reservoir_{0}".format(attribute_name)
model.properties[f"snapshot_{snapshot}"][dict_key] = np.append(
model.properties[f"snapshot_{snapshot}"][dict_key], mass
)
def calc_spatial(model, gals, snapshot: int):
"""
Calculates the spatial position of the galaxies.
The number of galaxies added to ``Model.properties["snapshot_<snapshot>"]["<x/y/z>_pos"]`` arrays is given by
:py:attr:`~sage_analysis.model.Model.sample_size` weighted by ``number_galaxies_passed /``
:py:attr:`~sage_analysis.model.Model._num_gals_all_files`. If this value is greater than
``number_galaxies_passed``, then all galaxies will be used.
"""
non_zero_stellar = np.where((gals["Mvir"][:] > 0.0) & (gals["StellarMass"][:] > 0.1))[0]
# Select a random subset of galaxies (if necessary).
non_zero_stellar = model.select_random_galaxy_indices(
non_zero_stellar, len(model.properties[f"snapshot_{snapshot}"]["x_pos"])
)
attribute_names = ["x_pos", "y_pos", "z_pos"]
data_names = ["Posx", "Posy", "Posz"]
for (attribute_name, data_name) in zip(attribute_names, data_names):
# Units are Mpc/h.
pos = gals[data_name][:][non_zero_stellar]
model.properties[f"snapshot_{snapshot}"][attribute_name] = np.append(
model.properties[f"snapshot_{snapshot}"][attribute_name], pos
)
def calc_SMF_history(model, gals, snapshot: int):
"""
Calculates the stellar mass function of the given galaxies. That is, the number of galaxies at a given stellar
mass.
The ``Model.properties["SMF"_history]`` array will be updated.
"""
calc_SMF(model, gals, snapshot, calc_sub_populations=False, smf_property_name="SMF_history")
def calc_SFRD_history(model, gals, snapshot: int):
"""
Calculates the sum of the star formation across all galaxies. This will be normalized by the simulation volume to
determine the density. See :func:`~sage_analysis.example_plots.plot_SFRD` for full implementation.
The ``Model.properties["snapshot_<snapshot>"]["SFRD"]`` value is updated.
"""
SFR = gals["SfrDisk"][:] + gals["SfrBulge"][:]
model.properties[f"snapshot_{snapshot}"]["SFRD_history"] += np.sum(SFR)
def calc_SMD_history(model, gals, snapshot: int):
"""
Calculates the sum of the stellar mass across all galaxies. This will be normalized by the simulation volume to
determine the density. See :func:`~sage_analysis.example_plots.plot_SMD` for full implementation.
The ``Model.properties["snapshot_<snapshot>"]["SMD"]`` value is updated.
"""
non_zero_stellar = np.where(gals["StellarMass"][:] > 0.0)[0]
stellar_mass = gals["StellarMass"][:][non_zero_stellar] * 1.0e10 / model.hubble_h
model.properties[f"snapshot_{snapshot}"]["SMD_history"] += np.sum(stellar_mass) | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/example_calcs.py | 0.843605 | 0.505615 | example_calcs.py | pypi |
import logging
import os
from typing import Any, Dict, Optional
import numpy as np
from tqdm import tqdm
from sage_analysis.data_class import DataClass
from sage_analysis.model import Model
from sage_analysis.utils import read_generic_sage_params
logger = logging.getLogger(__name__)
class SageBinaryData(DataClass):
"""
Class intended to inteface with the :py:class:`~sage_analysis.model.Model` class to
ingest the data written by **SAGE**. It includes methods for reading the output
galaxies, setting cosmology etc. It is specifically written for when
:py:attr:`~sage_analysis.model.Model.sage_output_format` is ``sage_binary``.
"""
def __init__(self, model: Model, sage_file_to_read: str) -> None:
"""
Instantiates the Data Class for reading in **SAGE** binary data. In particular,
generates the ``numpy`` structured array to read the output galaxies.
model: :py:class:`~sage_analysis.model.Model` instance
The model that this data class is associated with; this class will read the
data for this model.
"""
logger.info("Reading using SAGE binary output format.")
self._get_galaxy_struct()
# Use the SAGE parameter file to generate a bunch of attributes.
sage_dict = self.read_sage_params(sage_file_to_read)
self.sage_model_dict = sage_dict
logger.info(f"The read SAGE parameters are {sage_dict}")
def determine_volume_analyzed(self, model: Model) -> float:
"""
Determines the volume analyzed. This can be smaller than the total simulation box.
Parameters
----------
model : :py:class:`~sage_analysis.model.Model` instance
The model that this data class is associated with.
Returns
-------
volume : float
The numeric volume being processed during this run of the code in (Mpc/h)^3.
"""
# To properly scale properties that use the simulation volume (e.g., SMF), we need
# to know how much of the volume this model is analysing. SAGE is formulated such
# that every processor writes out a single file. However, each model here can
# analyze fewer files than were simulated by SAGE.
# For example, SAGE could have run on 4 processors, and hence 4 files would be
# produced. To allow the quick inspection of results, we may only be running our
# analysis on one file. Hence, we should scale some of the properties by a factor
# of 4.
# Importantly: There is no way for the binary output of SAGE to know this factor!
# Hence, if the user is running in binary mode, they MUST specify the total number
# of files that SAGE output (i.e., the number of processors they ran SAGE with).
frac_volume_analyzed = (model._last_file_to_analyze - model._first_file_to_analyze + 1) / \
model._num_sage_output_files
volume = pow(model._box_size, 3) * frac_volume_analyzed
logger.info(
f"The files read is [{model._first_file_to_analyze}, {model._last_file_to_analyze}] with a total number "
f"of {model._num_sage_output_files}; resulting a volume fraction analyzed of {frac_volume_analyzed}.\nThe "
f"box size is {model._box_size} (Mpc/h) yielding a analyzed volume of {volume} (Mpc/h)^3."
)
return volume
def read_sage_params(self, sage_file_path: str) -> Dict[str, Any]:
"""
Read the **SAGE** parameter file.
Parameters
----------
sage_file_path: string
Path to the **SAGE** parameter file.
Returns
-------
model_dict: dict [str, var]
Dictionary containing the parameter names and their values.
"""
model_dict = read_generic_sage_params(sage_file_path)
return model_dict
def _get_galaxy_struct(self):
"""
Sets the ``numpy`` structured array for holding the galaxy data.
"""
galdesc_full = [
("SnapNum", np.int32),
("Type", np.int32),
("GalaxyIndex", np.int64),
("CentralGalaxyIndex", np.int64),
("SAGEHaloIndex", np.int32),
("SAGETreeIndex", np.int32),
("SimulationHaloIndex", np.int64),
("mergeType", np.int32),
("mergeIntoID", np.int32),
("mergeIntoSnapNum", np.int32),
("dT", np.float32),
("Pos", (np.float32, 3)),
("Vel", (np.float32, 3)),
("Spin", (np.float32, 3)),
("Len", np.int32),
("Mvir", np.float32),
("CentralMvir", np.float32),
("Rvir", np.float32),
("Vvir", np.float32),
("Vmax", np.float32),
("VelDisp", np.float32),
("ColdGas", np.float32),
("StellarMass", np.float32),
("BulgeMass", np.float32),
("HotGas", np.float32),
("EjectedMass", np.float32),
("BlackHoleMass", np.float32),
("IntraClusterStars", np.float32),
("MetalsColdGas", np.float32),
("MetalsStellarMass", np.float32),
("MetalsBulgeMass", np.float32),
("MetalsHotGas", np.float32),
("MetalsEjectedMass", np.float32),
("MetalsIntraClusterStars", np.float32),
("SfrDisk", np.float32),
("SfrBulge", np.float32),
("SfrDiskZ", np.float32),
("SfrBulgeZ", np.float32),
("DiskRadius", np.float32),
("Cooling", np.float32),
("Heating", np.float32),
("QuasarModeBHaccretionMass", np.float32),
("TimeOfLastMajorMerger", np.float32),
("TimeOfLastMinorMerger", np.float32),
("OutflowRate", np.float32),
("infallMvir", np.float32),
("infallVvir", np.float32),
("infallVmax", np.float32)
]
names = [galdesc_full[i][0] for i in range(len(galdesc_full))]
formats = [galdesc_full[i][1] for i in range(len(galdesc_full))]
galdesc = np.dtype({"names": names, "formats": formats}, align=True)
self.galaxy_struct = galdesc
def determine_num_gals(self, model: Model, *args):
"""
Determines the number of galaxies in all files for this
:py:class:`~sage_analysis.model.Model`.
Parameters
----------
model: :py:class:`~sage_analysis.model.Model` class
The :py:class:`~sage_analysis.model.Model` we're reading data for.
*args : Any
Extra arguments to allow other data class to pass extra arguments to their version of
``determine_num_gals``.
"""
num_gals = 0
for file_num in range(model._first_file_to_analyze, model._last_file_to_analyze+1):
fname = self._check_for_file(model, file_num)
if fname is None:
logger.debug(f"File\t{fname} \tdoes not exist!")
continue
with open(fname, "rb") as f:
_ = np.fromfile(f, np.dtype(np.int32), 1)[0]
num_gals_file = np.fromfile(f, np.dtype(np.int32), 1)[0]
num_gals += num_gals_file
model._num_gals_all_files = num_gals
def read_gals(
self,
model: Model,
file_num: int,
snapshot: int,
pbar: Optional[tqdm] = None,
plot_galaxies: bool = False,
debug: bool = False
):
"""
Reads the galaxies of a model file at snapshot specified by
:py:attr:`~sage_analysis.model.Model.snapshot`.
Parameters
----------
model: :py:class:`~sage_analysis.model.Model` class
The :py:class:`~sage_analysis.model.Model` we're reading data for.
file_num: int
Suffix number of the file we're reading.
pbar: ``tqdm`` class instance, optional
Bar showing the progress of galaxy reading. If ``None``, progress bar will
not show.
plot_galaxies: bool, optional
If set, plots and saves the 3D distribution of galaxies for this file.
debug: bool, optional
If set, prints out extra useful debug information.
Returns
-------
gals : ``numpy`` structured array with format given by :py:method:`~_get_galaxy_struct`
The galaxies for this file.
Notes
-----
``tqdm`` does not play nicely with printing to stdout. Hence we disable
the ``tqdm`` progress bar if ``debug=True``.
"""
fname = self._check_for_file(model, file_num)
if fname is None:
logger.debug(f"File\t{fname} \tdoes not exist!")
return None
with open(fname, "rb") as f:
# First read the header information.
Ntrees = np.fromfile(f, np.dtype(np.int32), 1)[0]
num_gals = np.fromfile(f, np.dtype(np.int32), 1)[0]
_ = np.fromfile(f, np.dtype((np.int32, Ntrees)), 1)
# If there aren't any galaxies, exit here.
if num_gals == 0:
return None
# Then the actual galaxies.
gals = np.fromfile(f, self.galaxy_struct, num_gals)
# If we're using the `tqdm` package, update the progress bar.
if pbar is not None:
pbar.set_postfix(file=fname, refresh=False)
pbar.update(num_gals)
if debug:
print("")
print(f"File {fname} contained {Ntrees} trees with {num_gals} galaxies")
w = np.where(gals["StellarMass"] > 1.0)[0]
print(f"{len(w)} of these galaxies have mass greater than 10^10Msun/h")
if plot_galaxies:
from sage_analysis.plots import plot_spatial_3d
# Show the distribution of galaxies in 3D.
pos = gals["Pos"][:]
output_file = f"./galaxies_{file_num}.{model.plot_output_format}"
plot_spatial_3d(pos, output_file, self.box_size)
# For the HDF5 file, some data sets have dimensions Nx1 rather than Nx3
# (e.g., Position). To ensure the galaxy data format is identical to the binary
# output, we will split the binary fields into Nx1. This is simpler than creating
# a new dataset within the HDF5 regime.
from numpy.lib import recfunctions as rfn
multidim_fields = ["Pos", "Vel", "Spin"]
dim_names = ["x", "y", "z"]
for field in multidim_fields:
for dim_num, dim_name in enumerate(dim_names):
dim_field = f"{field}{dim_name}"
gals = rfn.rec_append_fields(gals, dim_field,
gals[field][:, dim_num])
return gals
def update_snapshot_and_data_path(self, model: Model, snapshot: int, use_absolute_path: bool = False):
"""
Updates the :py:attr:`~sage_analysis.model.Model._sage_data_path` to point to a new redshift file. Uses the
redshift array :py:attr:`~sage_analysis.model.Model.redshifts`.
Parameters
----------
snapshot : int
Snapshot we're updating :py:attr:`~sage_analysis.model.Model._sage_data_path` to
point to.
use_absolute_path : bool
If specified, will use the absolute path to the **SAGE** output data. Otherwise, will use the path that is
relative to the **SAGE** parameter file. This is hand because the **SAGE** parameter file can contain
either relative or absolute paths.
"""
model._snapshot = snapshot
new_redshift = model.redshifts[snapshot]
# The parameter file could refer to the absolute path or the relative path, so be careful.
if use_absolute_path:
base_path = model._base_sage_output_path_absolute
else:
base_path = model._base_sage_output_path_relative
model._sage_data_path = f"{base_path}_z{new_redshift:.3f}"
def _check_for_file(self, model: Model, file_num: int) -> Optional[str]:
"""
Checks to see if a file for the given file number exists. Importantly, we check assuming that the path given
in the **SAGE** parameter file is **relative** and **absolute**.
Parameters
----------
file_num : int
The file number that we're checking for files.
Returns
-------
fname or ``None``
If a file exists, the name of that file. Otherwise, if the file does not exist (using either relative or
absolute paths), then ``None``.
"""
# Try relative, and then absolute paths.
for use_absolute_path in [False, True]:
self.update_snapshot_and_data_path(model, model._snapshot, use_absolute_path=use_absolute_path)
fname = f"{model._sage_data_path}_{file_num}"
if os.path.isfile(fname):
return fname
return None
def close_file(self, model: Model):
"""
An empty method to ensure consistency with the HDF5 data class. This is empty because snapshots are saved over
different files by default in the binary format.
"""
pass | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/sage_binary.py | 0.878458 | 0.44571 | sage_binary.py | pypi |
import logging
import time
from collections import defaultdict
from typing import Dict, List, Optional
import numpy as np
try:
from tqdm import tqdm
except ImportError:
print("Package 'tqdm' not found. Not showing pretty progress bars :(")
else:
pass
logger = logging.getLogger(__name__)
class Model(object):
"""
Handles all the galaxy data (including calculated properties) for a ``SAGE`` model.
The ingestion of data is handled by inidivudal Data Classes (e.g.,
:py:class:`~sage_analysis.sage_binary.SageBinaryData` and :py:class:`~sage_analysis.sage_hdf5.SageHdf5Data`).
We refer to :doc:`../user/data_class` for more information about adding your own Data Class to ingest data.
"""
def __init__(
self,
sage_file: str,
sage_output_format: Optional[str],
label: Optional[str],
first_file_to_analyze: int,
last_file_to_analyze: int,
num_sage_output_files: Optional[int],
random_seed: Optional[int],
IMF: str,
plot_toggles: Dict[str, bool],
plots_that_need_smf: List[str],
sample_size: int = 1000,
sSFRcut: float = -11.0,
):
"""
Sets the galaxy path and number of files to be read for a model. Also initialises
the plot toggles that dictates which properties will be calculated.
Parameters
----------
label : str, optional
The label that will be placed on the plots for this model. If not specified, will use ``FileNameGalaxies``
read from ``sage_file``.
sage_output_format : str, optional
If not specified will use the ``OutputFormat`` read from ``sage_file``.
num_sage_output_files : int, optional
Specifies the number of output files that were generated by running **SAGE**. This can be different to the
range specified by [first_file_to_analyze, last_file_to_analyze].
Notes
-----
This variable only needs to be specified if ``sage_output_format`` is ``sage_binary``.
sample_size: int, optional
Specifies the length of the :py:attr:`~properties` attributes stored as 1-dimensional
:obj:`~numpy.ndarray`. These :py:attr:`~properties` are initialized using
:py:meth:`~init_scatter_properties`.
sSFRcut : float, optional
The specific star formation rate above which a galaxy is flagged as "star forming". Units are log10.
"""
self._sage_file = sage_file
self._IMF = IMF
self._label = label
self._sage_output_format = sage_output_format
self._first_file_to_analyze = first_file_to_analyze
self._last_file_to_analyze = last_file_to_analyze
self._random_seed = random_seed
self._plot_toggles = plot_toggles
self._plots_that_need_smf = plots_that_need_smf
self._sample_size = sample_size
self._sSFRcut = sSFRcut
self._num_files_analyzed = 0
self._bins = {}
self._properties = defaultdict(dict)
if num_sage_output_files is None and sage_output_format == "sage_binary":
raise RuntimeError(
"When analysing binary SAGE output, the number of output files generated by SAGE must be specified."
)
else:
self._num_sage_output_files = num_sage_output_files
if (first_file_to_analyze is None or last_file_to_analyze is None) and sage_output_format == "sage_binary":
raise RuntimeError(
"When analysing binary SAGE output, the first and last SAGE output file to analyze must be specified."
)
@property
def sage_file(self) -> str:
"""
str : The path to where the **SAGE** ``.ini`` file is located.
"""
return self._sage_file
@property
def num_sage_output_files(self):
"""
int: The number of files that **SAGE** wrote. This will be equal to the number of
processors the **SAGE** ran with.
Notes
-----
If :py:attr:`~sage_output_format` is ``sage_hdf5``, this attribute is not required.
"""
return self._num_sage_output_files
@property
def hubble_h(self):
"""
float: Value of the fractional Hubble parameter. That is, ``H = 100*hubble_h``.
"""
return self._hubble_h
@property
def box_size(self):
"""
float: Size of the simulation box. Units are Mpc/h.
"""
return self._box_size
@property
def volume(self):
"""
volume: Volume spanned by the trees analyzed by this model. This depends upon the
number of files processed, ``[:py:attr:`~first_file_to_analyze`, :py:attr:`~last_file_to_analyze`]``,
relative to the total number of files the simulation spans over,
:py:attr:`~num_sim_tree_files`.
Notes
-----
This is **not** necessarily :py:attr:`~box_size` cubed. It is possible that this
model is only analysing a subset of files and hence the volume will be less.
"""
return self._volume
@volume.setter
def volume(self, vol):
if vol > pow(self.box_size, 3):
print("The volume analyzed by a model cannot exceed the volume of the box "
"itself. Error was encountered for the following model.")
print(self)
raise ValueError
self._volume = vol
@property
def redshifts(self):
"""
:obj:`~numpy.ndarray`: Redshifts for this simulation.
"""
return self._redshifts
@property
def sage_output_format(self):
"""
{``"sage_binary"``, ``"sage_binary"``}: The output format **SAGE** wrote in.
A specific Data Class (e.g., :py:class:`~sage_analysis.sage_binary.SageBinaryData`
and :py:class:`~sage_analysis.sage_hdf5.SageHdf5Data`) must be written and
used for each :py:attr:`~sage_output_format` option. We refer to
:doc:`../user/data_class` for more information about adding your own Data Class to ingest
data.
"""
return self._sage_output_format
@property
def base_sage_data_path(self) -> str:
"""
string: Base path to the output data. This is the path without specifying any extra information about redshift
or the file extension itself.
"""
return self._base_sage_data_path
@property
def sage_data_path(self) -> str:
"""
string: Path to the output data. If :py:attr:`~sage_output_format` is
``sage_binary``, files read must be labelled :py:attr:`~sage_data_path`.XXX.
If :py:attr:`~sage_output_format` is ``sage_hdf5``, the file read will be
:py:attr:`~sage_data_path` and the groups accessed will be Core_XXX at snapshot
:py:attr:`~snapshot`. In both cases, ``XXX`` represents the numbers in the range
[:py:attr:`~first_file_to_analyze`, :py:attr:`~last_file_to_analyze`] inclusive.
"""
return self._sage_data_path
@property
def output_path(self):
"""
string: Path to where some plots will be saved. Used for
:py:meth:`~sage_analysis.plots.plot_spatial_3d`.
"""
return self._output_path
@property
def IMF(self):
"""
{``"Chabrier"``, ``"Salpeter"``}: The initial mass function.
"""
return self._IMF
@IMF.setter
def IMF(self, IMF):
# Only allow Chabrier or Salpeter IMF.
allowed_IMF = ["Chabrier", "Salpeter"]
if IMF not in allowed_IMF:
raise ValueError(
"Value of IMF selected ({0}) is not allowed. Only {1} are "
"allowed.".format(IMF, allowed_IMF)
)
self._IMF = IMF
@property
def label(self):
"""
string: Label that will go on axis legends for this :py:class:`~Model`.
"""
return self._label
@property
def first_file_to_analyze(self):
"""
int: The first *SAGE* sub-file to be read. If :py:attr:`~sage_output_format` is
``sage_binary``, files read must be labelled :py:attr:`~sage_data_path`.XXX.
If :py:attr:`~sage_output_format` is ``sage_hdf5``, the file read will be
:py:attr:`~sage_data_path` and the groups accessed will be Core_XXX. In both cases,
``XXX`` represents the numbers in the range
[:py:attr:`~first_file_to_analyze`, :py:attr:`~last_file_to_analyze`] inclusive.
"""
return self._first_file_to_analyze
@property
def last_file_to_analyze(self):
"""
int: The last **SAGE** sub-file to be read. If :py:attr:`~sage_output_format` is
``sage_binary``, files read must be labelled :py:attr:`~sage_data_path`.XXX.
If :py:attr:`~sage_output_format` is ``sage_hdf5``, the file read will be
:py:attr:`~sage_data_path` and the groups accessed will be Core_XXX. In both cases,
``XXX`` represents the numbers in the range
[:py:attr:`~first_file_to_analyze`, :py:attr:`~last_file_to_analyze`] inclusive.
"""
return self._last_file_to_analyze
@property
def snapshot(self):
"""
int: Specifies the snapshot to be read. If :py:attr:`~sage_output_format` is
``sage_hdf5``, this specifies the HDF5 group to be read. Otherwise, if
:py:attr:`sage_output_format` is ``sage_binary``, this attribute will be used to
index :py:attr:`~redshifts` and generate the suffix for :py:attr:`~sage_data_path`.
"""
return self._snapshot
@property
def bins(self):
"""
dict [string, :obj:`~numpy.ndarray` ]: The bins used to bin some
:py:attr:`properties`. Bins are initialized through
:py:meth:`~Model.init_binned_properties`. Key is the name of the bin,
(``bin_name`` in :py:meth:`~Model.init_binned_properties` ).
"""
return self._bins
@property
def properties(self):
"""
dict [string, dict [string, :obj:`~numpy.ndarray` ]] or dict[string, dict[string, float]: The galaxy properties
stored across the input files and snapshots. These properties are updated within the respective
``calc_<plot_toggle>`` functions.
The outside key is ``"snapshot_XX"`` where ``XX`` is the snapshot number for the property. The inner key is the
name of the proeprty (e.g., ``"SMF"``).
"""
return self._properties
@property
def sample_size(self):
"""
int: Specifies the length of the :py:attr:`~properties` attributes stored as 1-dimensional
:obj:`~numpy.ndarray`. These :py:attr:`~properties` are initialized using
:py:meth:`~init_scatter_properties`.
"""
return self._sample_size
@property
def num_gals_all_files(self):
"""
int: Number of galaxies across all files. For HDF5 data formats, this represents
the number of galaxies across all `Core_XXX` sub-groups.
"""
return self._num_gals_all_files
@property
def parameter_dirpath(self):
"""
str : The directory path to where the **SAGE** paramter file is located. This is only the base directory path
and does not include the name of the file itself.
"""
return self._parameter_dirpath
@property
def random_seed(self) -> Optional[int]:
"""
Optional[int] : Specifies the seed used for the random number generator, used to select galaxies for plotting
purposes. If ``None``, then uses default call to :func:`~numpy.random.seed`.
"""
return self._random_seed
@property
def plots_that_need_smf(self) -> List[str]:
"""
list of ints : Specifies the plot toggles that require the stellar mass function to be properly computed and
analyzed. For example, plotting the quiescent fraction of galaxies requires knowledge of the total number of
galaxies. The strings here must **EXACTLY** match the keys in :py:attr:`~plot_toggles`.
"""
return self._plots_that_need_smf
@property
def plot_toggles(self):
"""
dict[str, bool] : Specifies which plots should be created for this model. This will control which properties
should be calculated; e.g., if no stellar mass function is to be plotted, the stellar mass function will not be
computed.
"""
return self._plot_toggles
@property
def calculation_functions(self):
"""
dict[str, tuple[func, dict[str, any]]] : A dictionary of functions that are used to compute the properties of
galaxies. Here, the string is the name of the toggle (e.g., ``"SMF"``), the value is a tuple
containing the function itself (e.g., ``calc_SMF()``), and another dictionary which specifies any optional
keyword arguments to that function with keys as the name of variable (e.g., ``"calc_sub_populations"``) and
values as the variable value (e.g., ``True``).
"""
return self._calculation_functions
@property
def sSFRcut(self) -> float:
"""
float : The specific star formation rate above which a galaxy is flagged as "star forming". Units are log10.
"""
return self._sSFRcut
def __repr__(self):
string = "========================\n" \
f"Model {self._label}\n" \
f"SAGE File: {self._sage_file}\n" \
f"SAGE Output Format: {self._sage_output_format}\n" \
f"First file to read: {self._first_file_to_analyze}\n" \
f"Last file to read: {self._last_file_to_analyze}\n" \
"========================"
return string
def init_binned_properties(
self,
bin_low: float,
bin_high: float,
bin_width: float,
bin_name: str,
property_names: List[str],
snapshot: int
):
"""
Initializes the :py:attr:`~properties` (and respective :py:attr:`~bins`) that will
binned on some variable. For example, the stellar mass function (SMF) will
describe the number of galaxies within a stellar mass bin.
:py:attr:`~bins` can be accessed via ``Model.bins["bin_name"]`` and are
initialized as :obj:`~numpy.ndarray`. :py:attr:`~properties` can be accessed via
``Model.properties["property_name"]`` and are initialized using
:obj:`numpy.zeros`.
Parameters
----------
bin_low, bin_high, bin_width : floats
Values that define the minimum, maximum and width of the bins respectively.
This defines the binning axis that the ``property_names`` properties will be
binned on.
bin_name : string
Name of the binning axis, accessed by ``Model.bins["bin_name"]``.
property_names : list of strings
Name of the properties that will be binned along the defined binning axis.
Properties can be accessed using ``Model.properties["property_name"]``; e.g.,
``Model.properties["SMF"]`` would return the stellar mass function that is binned
using the ``bin_name`` bins.
snapshot : int
The snapshot we're initialising the properties for.
"""
# Parameters that define the specified binning axis.
bins = np.arange(bin_low, bin_high + bin_width, bin_width)
# Add the bins to the dictionary.
self.bins[bin_name] = bins
# When making histograms, the right-most bin is closed. Hence the length of the
# produced histogram will be `len(bins)-1`.
for my_property in property_names:
self.properties[f"snapshot_{snapshot}"][my_property] = np.zeros(len(bins) - 1, dtype=np.float64)
def init_scatter_properties(self, property_names: List[str], snapshot: int):
"""
Initializes the :py:attr:`~properties` that will be extended as
:obj:`~numpy.ndarray`. These are used to plot (e.g.,) a the star formation rate
versus stellar mass for a subset of :py:attr:`~sample_size` galaxies. Initializes
as empty :obj:`~numpy.ndarray`.
Parameters
----------
property_names : list of strings
Name of the properties that will be extended as :obj:`~numpy.ndarray`.
snapshot : int
The snapshot we're initialising the properties for.
"""
# Initialize empty arrays.
for my_property in property_names:
self.properties[f"snapshot_{snapshot}"][my_property] = np.array([])
def init_single_properties(self, property_names: List[str], snapshot: int) -> None:
"""
Initializes the :py:attr:`~properties` that are described using a single number.
This is used to plot (e.g.,) a the sum of stellar mass across all galaxies.
Initializes as ``0.0``.
Parameters
----------
property_names : list of strings
Name of the properties that will be described using a single number.
snapshot : int
The snapshot we're initialising the properties for.
"""
# Initialize as zeros.
for my_property in property_names:
self.properties[f"snapshot_{snapshot}"][my_property] = 0.0
def calc_properties_all_files(
self,
calculation_functions,
snapshot: int,
close_file: bool = True,
use_pbar: bool = True,
debug: bool = False,
):
"""
Calculates galaxy properties for all files of a single :py:class:`~Model`.
Parameters
----------
calculation_functions: dict [string, list(function, dict[string, variable])]
Specifies the functions used to calculate the properties of this
:py:class:`~Model`. The key of this dictionary is the name of the plot toggle.
The value is a list with the 0th element being the function and the 1st
element being a dictionary of additional keyword arguments to be passed to
the function. The inner dictionary is keyed by the keyword argument names
with the value specifying the keyword argument value.
All functions in this dictionary for called after the galaxies for each
sub-file have been loaded. The function signature is required to be
``func(Model, gals, <Extra Keyword Arguments>)``.
snapshot : int
The snapshot that we're calculating properties for.
close_file: boolean, optional
Some data formats have a single file data is read from rather than opening and
closing the sub-files in :py:meth:`read_gals`. Hence once the properties are
calculated, the file must be closed. This variable flags whether the data
class specific :py:meth:`~close_file` method should be called upon completion of
this method.
use_pbar: Boolean, optional
If set, uses the ``tqdm`` package to create a progress bar.
debug: Boolean, optional
If set, prints out extra useful debug information.
"""
start_time = time.time()
# Ensure that we're pointing at the correct snapshot. This allows the model path to point to the correct file.
self.data_class.update_snapshot_and_data_path(self, snapshot)
# First determine how many galaxies are in all files.
self.data_class.determine_num_gals(self, snapshot)
if self._num_gals_all_files == 0:
logger.info(f"There were no galaxies associated with this model at Snapshot {self._snapshot}.")
print(f"There were no galaxies associated with this model at Snapshot {self._snapshot}.")
return
# If the user requested the number of galaxies plotted/calculated
# The `tqdm` package provides a beautiful progress bar.
try:
if debug or not use_pbar:
pbar = None
else:
pbar = tqdm(total=self._num_gals_all_files, unit="Gals", unit_scale=True)
except NameError:
pbar = None
else:
pass
# Now read the galaxies and calculate the properties.
for file_num in range(self.first_file_to_analyze, self.last_file_to_analyze + 1):
# This is Data Class specific. Refer to the relevant module for implementation.
gals = self.data_class.read_gals(self, file_num, snapshot, pbar=pbar, debug=debug)
# We may have skipped a file.
if gals is None:
continue
self.calc_properties(calculation_functions, gals, snapshot)
self._num_files_analyzed += 1
# Some data formats (e.g., HDF5) have a single file we read from.
if close_file:
self.data_class.close_file(self)
end_time = time.time()
duration = end_time - start_time
if debug:
print(
"Took {0:.2f} seconds ({1:.2f} minutes)".format(duration, duration / 60.0)
)
print("")
def calc_properties(self, calculation_functions, gals, snapshot: int):
"""
Calculates galaxy properties for a single file of galaxies.
Parameters
----------
calculation_functions: dict [string, function]
Specifies the functions used to calculate the properties. All functions in
this dictionary are called on the galaxies. The function signature is required
to be ``func(Model, gals)``
gals: exact format given by the :py:class:`~Model` Data Class.
The galaxies for this file.
snapshot : int
The snapshot that we're calculating properties for.
Notes
-----
If :py:attr:`~sage_output_format` is ``sage_binary``, ``gals`` is a ``numpy``
structured array. If :py:attr:`~sage_output_format`: is
``sage_hdf5``, ``gals`` is an open HDF5 group. We refer to
:doc:`../user/data_class` for more information about adding your own Data Class to ingest data.
"""
# Now check which plots the user is creating and hence decide which properties they need.
for func, kwargs in calculation_functions.values():
# **kwargs unpacks the `kwargs` dictionary, passing each keyword properly to the function.
func(self, gals, snapshot, **kwargs)
def select_random_galaxy_indices(self, inds: np.ndarray, num_inds_selected_already: int) -> np.ndarray:
"""
Selects random indices (representing galaxies) from ``inds``. This method assumes that the total number of
galaxies selected across all **SAGE** files analyzed is :py:attr:`~sample_size` and that (preferably) these
galaxies should be selected **equally** amongst all files analyzed.
For example, if we are analyzing 8 **SAGE** output files and wish to select 10,000 galaxies, this function
would hence select 1,250 indices from ``inds``.
If the length of ``inds`` is less than the number of requested values (e.g., ``inds`` only contains 1,000
values), then the next file analyzed will attempt to select 1,500 random galaxies (1,250 base plus an addition
250 as the previous file could not find enough galaxies).
At the end of the analysis, if there have not been enough galaxies selected, then a message is sent to the
user.
"""
if self._random_seed is not None:
np.random.seed(self._random_seed)
# Firstly, how many indices should ideally be drawn from each file?
num_inds_per_file = self._sample_size / (self._last_file_to_analyze - self._first_file_to_analyze + 1)
# Based on the number of files that have been analyzed so far, how far are we off our target?
target_num_inds_so_far = self._num_files_analyzed * num_inds_per_file
num_inds_defecit = target_num_inds_so_far - num_inds_selected_already
logger.info(
f"Thus far, analyzed {self._num_files_analyzed} files and selected {num_inds_selected_already} random "
f"galaxies.\nBy now, {target_num_inds_so_far} galaxies should have been selected, introducing a deficit "
f"of {num_inds_defecit} galaxies."
)
# The number of indices that we need to select is hence the baseline plus any defecit we have missed.
num_inds_this_file = num_inds_per_file + num_inds_defecit
selected_inds = np.random.choice(inds, size=int(num_inds_this_file))
# If this is the last file to analyze and we still haven't selected enough galaxies, print a message.
if self._num_files_analyzed == (self._last_file_to_analyze - self._first_file_to_analyze):
if num_inds_this_file < len(selected_inds):
msg = f"When attempting to select {self._sample_size} random galaxies, only " \
f"{num_inds_selected_already + len(selected_inds)} could be selected (missing " \
f"{num_inds_this_file - len(selected_inds)}.\nEither apply less stringent cuts on your galaxy " \
f"sample or reduce ``sample_size``."
print(msg)
logger.info(msg)
return selected_inds | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/model.py | 0.893843 | 0.432603 | model.py | pypi |
import numpy as np
def plot_smf_data(ax, hubble_h, imf):
"""
Plots stellar mass function observational data. Uses data from Balry et al., 2008.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
hubble_h : Float
Little h value (between 0 and 1). Used to scale the y-values of the Baldry data
which is irrespective of h.
imf : {"Salpeter", "Chabrier"}
If "Chabrier", reduces the x-values of the Baldry data by 0.26 dex.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Baldry+ 2008 modified data used for the MCMC fitting
Baldry = np.array([
[7.05, 1.3531e-01, 6.0741e-02],
[7.15, 1.3474e-01, 6.0109e-02],
[7.25, 2.0971e-01, 7.7965e-02],
[7.35, 1.7161e-01, 3.1841e-02],
[7.45, 2.1648e-01, 5.7832e-02],
[7.55, 2.1645e-01, 3.9988e-02],
[7.65, 2.0837e-01, 4.8713e-02],
[7.75, 2.0402e-01, 7.0061e-02],
[7.85, 1.5536e-01, 3.9182e-02],
[7.95, 1.5232e-01, 2.6824e-02],
[8.05, 1.5067e-01, 4.8824e-02],
[8.15, 1.3032e-01, 2.1892e-02],
[8.25, 1.2545e-01, 3.5526e-02],
[8.35, 9.8472e-02, 2.7181e-02],
[8.45, 8.7194e-02, 2.8345e-02],
[8.55, 7.0758e-02, 2.0808e-02],
[8.65, 5.8190e-02, 1.3359e-02],
[8.75, 5.6057e-02, 1.3512e-02],
[8.85, 5.1380e-02, 1.2815e-02],
[8.95, 4.4206e-02, 9.6866e-03],
[9.05, 4.1149e-02, 1.0169e-02],
[9.15, 3.4959e-02, 6.7898e-03],
[9.25, 3.3111e-02, 8.3704e-03],
[9.35, 3.0138e-02, 4.7741e-03],
[9.45, 2.6692e-02, 5.5029e-03],
[9.55, 2.4656e-02, 4.4359e-03],
[9.65, 2.2885e-02, 3.7915e-03],
[9.75, 2.1849e-02, 3.9812e-03],
[9.85, 2.0383e-02, 3.2930e-03],
[9.95, 1.9929e-02, 2.9370e-03],
[10.05, 1.8865e-02, 2.4624e-03],
[10.15, 1.8136e-02, 2.5208e-03],
[10.25, 1.7657e-02, 2.4217e-03],
[10.35, 1.6616e-02, 2.2784e-03],
[10.45, 1.6114e-02, 2.1783e-03],
[10.55, 1.4366e-02, 1.8819e-03],
[10.65, 1.2588e-02, 1.8249e-03],
[10.75, 1.1372e-02, 1.4436e-03],
[10.85, 9.1213e-03, 1.5816e-03],
[10.95, 6.1125e-03, 9.6735e-04],
[11.05, 4.3923e-03, 9.6254e-04],
[11.15, 2.5463e-03, 5.0038e-04],
[11.25, 1.4298e-03, 4.2816e-04],
[11.35, 6.4867e-04, 1.6439e-04],
[11.45, 2.8294e-04, 9.9799e-05],
[11.55, 1.0617e-04, 4.9085e-05],
[11.65, 3.2702e-05, 2.4546e-05],
[11.75, 1.2571e-05, 1.2571e-05],
[11.85, 8.4589e-06, 8.4589e-06],
[11.95, 7.4764e-06, 7.4764e-06],
], dtype=np.float32)
Baldry_xval = np.log10(10 ** Baldry[:, 0] / hubble_h / hubble_h)
if imf == "Chabrier":
# Convert the Baldry data to Chabrier.
Baldry_xval = Baldry_xval - 0.26
Baldry_yvalU = (Baldry[:, 1]+Baldry[:, 2]) * pow(hubble_h, 3)
Baldry_yvalL = (Baldry[:, 1]-Baldry[:, 2]) * pow(hubble_h, 3)
ax.fill_between(Baldry_xval, Baldry_yvalU, Baldry_yvalL,
facecolor='purple', alpha=0.25,
label='Baldry et al. 2008 (z=0.1)')
return ax
def plot_temporal_smf_data(ax, imf):
"""
Plots stellar mass function observational data at a variety of redshifts. Uses data
from Marchesini et al., 2009.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
imf : {"Salpeter", "Chabrier"}
If "Salpeter", scales the x-values of the Marchesini data by a factor of 1.6.
If "Chabrier", scales the x-values of the Marchesini data by a factor of 1.6 / 1.8.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Marchesini et al. 2009ApJ...701.1765M SMF, h=0.7
# We add plots for z=[0.1], z=[1.3,2.0], z=[2.0,3.0] and z=[3.0,4.0].
labels = ["Marchesini et al. 2009 z=[0.1]", "... z=[1.3,2.0]",
"... z=[2.0,3.0]", "... z=[3.0,4.0]"]
colors = ["k", "b", "g", "r"]
Mstar_exponent = [10.96, 10.91, 10.96, 11.38]
alpha = [-1.18, -0.99, -1.01, -1.39]
phistar = [30.87*1e-4, 10.17*1e-4, 3.95*1e-4, 0.53*1e-4]
# Each redshift is valid over a slightly different mass range.
M = [np.arange(7.0, 11.8, 0.01), np.arange(9.3, 11.8, 0.01),
np.arange(9.7, 11.8, 0.01), np.arange(10.0, 11.8, 0.01)]
# When we're plotting, need to check which IMF we're using.
if imf == "Salpeter":
M_plot = [np.log10(10.0**M_vals * 1.6) for M_vals in M]
elif imf == "Chabrier":
M_plot = [np.log10(10.0**M_vals * 1.6/1.8) for M_vals in M]
else:
print("plot_temporal_smf_data() called with an IMF value of {0}. Only Salpeter "
"or Chabrier allowed.".format(imf))
raise ValueError
# Shift the mass by Mstar for each.
shifted_mass = []
for (mass, Mstar) in zip(M, Mstar_exponent):
shifted_mass.append(10 ** (mass - Mstar))
# Then calculate the Phi.
phi = []
for (shifted_M, phi_val, alpha_val) in zip(shifted_mass, phistar, alpha):
phi.append(np.log(10.0) * phi_val * shifted_M ** (alpha_val + 1) * np.exp(-shifted_M))
# Then plot!
for (M_vals, phi_vals, label, color) in zip(M_plot, phi, labels, colors):
ax.plot(M_vals, phi_vals, color=color, label=label, lw=10, ls=":", alpha=0.3)
return ax
def plot_bmf_data(ax, hubble_h, imf):
"""
Plots baryonic mass function observational data. Uses data from Bell et al., 2003.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
hubble_h : Float
Little h value (between 0 and 1). Used to scale the y-values of the Bell data
which is irrespective of h.
imf : {"Salpeter", "Chabrier"}
If "Salpeter", scales the x-values of the Bell data by a factor of 0.7.
If "Chabrier", scales the x-values of the Bell data by a factor of 0.7 / 1.8.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Bell et al. 2003 BMF. They assume h=1.0.
M = np.arange(7.0, 13.0, 0.01)
Mstar = np.log10(5.3*1.0e10 / hubble_h / hubble_h)
alpha = -1.21
phistar = 0.0108 * hubble_h * hubble_h * hubble_h
xval = 10.0 ** (M-Mstar)
yval = np.log(10.) * phistar * xval ** (alpha+1) * np.exp(-xval)
# Bell use a diet Salpeter IMF.
if imf == "Salpeter":
# Then to Salpeter.
mass_shifted = np.log10(10.0**M / 0.7)
elif imf == "Chabrier":
# To Salpeter then to Chabrier.
mass_shifted = np.log10(10.0**M / 0.7 / 1.8)
else:
print("plot_bmf_data() called with an IMF value of {0}. Only Salpeter or "
"Chabrier allowed.".format(imf))
ax.plot(mass_shifted, yval, 'g--', lw=1.5,
label='Bell et al. 2003')
return ax
def plot_gmf_data(ax, hubble_h):
"""
Plots gas mass function observational data. Uses data from Baldry et al., 2008.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
hubble_h : Float
Little h value (between 0 and 1). Used to scale the y-values of the Baldry data
which is irrespective of h.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Baldry+ 2008 modified data used for the MCMC fitting
Zwaan = np.array(
[
[6.933, -0.333],
[7.057, -0.490],
[7.209, -0.698],
[7.365, -0.667],
[7.528, -0.823],
[7.647, -0.958],
[7.809, -0.917],
[7.971, -0.948],
[8.112, -0.927],
[8.263, -0.917],
[8.404, -1.062],
[8.566, -1.177],
[8.707, -1.177],
[8.853, -1.312],
[9.010, -1.344],
[9.161, -1.448],
[9.302, -1.604],
[9.448, -1.792],
[9.599, -2.021],
[9.740, -2.406],
[9.897, -2.615],
[10.053, -3.031],
[10.178, -3.677],
[10.335, -4.448],
[10.492, -5.083],
],
dtype=np.float32
)
ObrRaw = np.array(
[
[7.300, -1.104],
[7.576, -1.302],
[7.847, -1.250],
[8.133, -1.240],
[8.409, -1.344],
[8.691, -1.479],
[8.956, -1.792],
[9.231, -2.271],
[9.507, -3.198],
[9.788, -5.062],
],
dtype=np.float32
)
ObrCold = np.array(
[
[8.009, -1.042],
[8.215, -1.156],
[8.409, -0.990],
[8.604, -1.156],
[8.799, -1.208],
[9.020, -1.333],
[9.194, -1.385],
[9.404, -1.552],
[9.599, -1.677],
[9.788, -1.812],
[9.999, -2.312],
[10.172, -2.656],
[10.362, -3.500],
[10.551, -3.635],
[10.740, -5.010],
],
dtype=np.float32
)
ObrCold_xval = np.log10(10**(ObrCold[:, 0]) / hubble_h / hubble_h)
ObrCold_yval = (10**(ObrCold[:, 1]) * hubble_h * hubble_h * hubble_h)
Zwaan_xval = np.log10(10**(Zwaan[:, 0]) / hubble_h / hubble_h)
Zwaan_yval = (10**(Zwaan[:, 1]) * hubble_h * hubble_h * hubble_h)
ObrRaw_xval = np.log10(10**(ObrRaw[:, 0]) / hubble_h / hubble_h)
ObrRaw_yval = (10**(ObrRaw[:, 1]) * hubble_h * hubble_h * hubble_h)
ax.plot( # noqa: W605.
ObrCold_xval, ObrCold_yval, color='black', lw=7, alpha=0.25, label='Obr. \& Raw. 2009 (Cold Gas)'
)
ax.plot( # noqa: W605.
Zwaan_xval, Zwaan_yval, color='cyan', lw=7, alpha=0.25, label='Zwaan et al. 2005 (HI)'
)
ax.plot( # noqa: W605.
ObrRaw_xval, ObrRaw_yval, color='magenta', lw=7, alpha=0.25, label='Obr. \& Raw. 2009 (H2)'
)
return ax
def plot_btf_data(ax):
"""
Plots baryonic Tully-Fisher observational data. Uses data from Stark, McGaugh &
Swatter, 2009.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
w = np.arange(0.5, 10.0, 0.5)
TF = 3.94*w + 1.79
ax.plot(w, TF, 'b-', lw=2.0, label='Stark, McGaugh \& Swatters 2009') # noqa: W605
return ax
def plot_metallicity_data(ax, imf):
"""
Plots metallicity observational data. Uses data from Tremonti et al., 2003.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
imf : {"Salpeter", "Chabrier"}
If "Salpeter", scales the x-values of the Tremonti data by a factor of 1.5.
If "Chabrier", scales the x-values of the Tremonti data by a factor of 1.5 / 1.8.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Tremonti et al. 2003 (h=0.7)
M = np.arange(7.0, 13.0, 0.1)
Zobs = -1.492 + 1.847*M - 0.08026*M*M
# Tremonti use a Kroupa IMF.
if imf == "Salpeter":
# Conversion from Kroupa IMF to Salpeter IMF.
mass_shifted = np.log10(10**M * 1.5)
elif imf == "Chabrier":
# Conversion from Kroupa IMF to Salpeter IMF to Chabrier IMF.
mass_shifted = np.log10(10**M * 1.5 / 1.8)
else:
print("plot_metallicity_data() called with an IMF value of {0}. Only Salpeter "
"or Chabrier allowed.".format(imf))
raise ValueError
ax.plot(mass_shifted, Zobs, 'b-', lw=2.0, label='Tremonti et al. 2003')
return ax
def plot_bh_bulge_data(ax):
"""
Plots black hole-bulge relationship observational data. Uses data from Haring & Rix
2004.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# Haring & Rix 2004
w = 10. ** np.arange(20)
BHdata = 10. ** (8.2 + 1.12 * np.log10(w / 1.0e11))
ax.plot(np.log10(w), np.log10(BHdata), 'b-', label="Haring \& Rix 2004") # noqa: W605
return ax
def plot_sfrd_data(ax):
"""
Plots observational data for the evolution of the star formation rate density.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
ObsSFRdensity = np.array([
[0, 0.0158489, 0, 0, 0.0251189, 0.01000000],
[0.150000, 0.0173780, 0, 0.300000, 0.0181970, 0.0165959],
[0.0425000, 0.0239883, 0.0425000, 0.0425000, 0.0269153, 0.0213796],
[0.200000, 0.0295121, 0.100000, 0.300000, 0.0323594, 0.0269154],
[0.350000, 0.0147911, 0.200000, 0.500000, 0.0173780, 0.0125893],
[0.625000, 0.0275423, 0.500000, 0.750000, 0.0331131, 0.0229087],
[0.825000, 0.0549541, 0.750000, 1.00000, 0.0776247, 0.0389045],
[0.625000, 0.0794328, 0.500000, 0.750000, 0.0954993, 0.0660693],
[0.700000, 0.0323594, 0.575000, 0.825000, 0.0371535, 0.0281838],
[1.25000, 0.0467735, 1.50000, 1.00000, 0.0660693, 0.0331131],
[0.750000, 0.0549541, 0.500000, 1.00000, 0.0389045, 0.0776247],
[1.25000, 0.0741310, 1.00000, 1.50000, 0.0524807, 0.104713],
[1.75000, 0.0562341, 1.50000, 2.00000, 0.0398107, 0.0794328],
[2.75000, 0.0794328, 2.00000, 3.50000, 0.0562341, 0.112202],
[4.00000, 0.0309030, 3.50000, 4.50000, 0.0489779, 0.0194984],
[0.250000, 0.0398107, 0.00000, 0.500000, 0.0239883, 0.0812831],
[0.750000, 0.0446684, 0.500000, 1.00000, 0.0323594, 0.0776247],
[1.25000, 0.0630957, 1.00000, 1.50000, 0.0478630, 0.109648],
[1.75000, 0.0645654, 1.50000, 2.00000, 0.0489779, 0.112202],
[2.50000, 0.0831764, 2.00000, 3.00000, 0.0512861, 0.158489],
[3.50000, 0.0776247, 3.00000, 4.00000, 0.0416869, 0.169824],
[4.50000, 0.0977237, 4.00000, 5.00000, 0.0416869, 0.269153],
[5.50000, 0.0426580, 5.00000, 6.00000, 0.0177828, 0.165959],
[3.00000, 0.120226, 2.00000, 4.00000, 0.173780, 0.0831764],
[3.04000, 0.128825, 2.69000, 3.39000, 0.151356, 0.109648],
[4.13000, 0.114815, 3.78000, 4.48000, 0.144544, 0.0912011],
[0.350000, 0.0346737, 0.200000, 0.500000, 0.0537032, 0.0165959],
[0.750000, 0.0512861, 0.500000, 1.00000, 0.0575440, 0.0436516],
[1.50000, 0.0691831, 1.00000, 2.00000, 0.0758578, 0.0630957],
[2.50000, 0.147911, 2.00000, 3.00000, 0.169824, 0.128825],
[3.50000, 0.0645654, 3.00000, 4.00000, 0.0776247, 0.0512861],
], dtype=np.float32)
ObsRedshift = ObsSFRdensity[:, 0]
xErrLo = ObsSFRdensity[:, 0]-ObsSFRdensity[:, 2]
xErrHi = ObsSFRdensity[:, 3]-ObsSFRdensity[:, 0]
ObsSFR = np.log10(ObsSFRdensity[:, 1])
yErrLo = np.log10(ObsSFRdensity[:, 1])-np.log10(ObsSFRdensity[:, 4])
yErrHi = np.log10(ObsSFRdensity[:, 5])-np.log10(ObsSFRdensity[:, 1])
ax.errorbar(
ObsRedshift,
ObsSFR,
yerr=[yErrLo, yErrHi],
xerr=[xErrLo, xErrHi],
color='g',
lw=1.0,
alpha=0.3,
marker='o',
ls='none',
label='Observations'
)
return ax
def plot_smd_data(ax, imf):
"""
Plots observational data for the evolution of the stellar mass density. Uses data from
Dickenson et al., 2003; Drory et al., 2005; Perez-Gonzalez et al., 2008; Glazebrook et
al., 2004; Fontana et al., 2006; Rudnick et al., 2006; Elsner et al., 2008.
Parameters
----------
ax : ``matplotlib`` axes object
Axis to plot the data on.
imf : {"Salpeter", "Chabrier"}
If "Salpeter", scales the x-values by a factor of 1.6.
If "Chabrier", scales the x-values by a factor of 1.6 / 1.8.
Returns
-------
ax : ``matplotlib`` axes object
Axis with the data plotted on it.
"""
# SMD observations taken from Marchesini+ 2009, h=0.7
# Values are (minz, maxz, rho,-err,+err)
dickenson2003 = np.array(
(
(0.60, 1.40, 8.26, 0.08, 0.08),
(1.40, 2.00, 7.86, 0.22, 0.33),
(2.00, 2.50, 7.58, 0.29, 0.54),
(2.50, 3.00, 7.52, 0.51, 0.48)
),
float
)
drory2005 = np.array(
(
(0.25, 0.75, 8.30, 0.15, 0.15),
(0.75, 1.25, 8.16, 0.15, 0.15),
(1.25, 1.75, 8.00, 0.16, 0.16),
(1.75, 2.25, 7.85, 0.20, 0.20),
(2.25, 3.00, 7.75, 0.20, 0.20),
(3.00, 4.00, 7.58, 0.20, 0.20)
),
float
)
# Perez-Gonzalez (2008)
pg2008 = np.array(
(
(0.2, 0.4, 8.41, 0.06, 0.06),
(0.4, 0.6, 8.37, 0.04, 0.04),
(0.6, 0.8, 8.32, 0.05, 0.05),
(0.8, 1.0, 8.24, 0.05, 0.05),
(1.0, 1.3, 8.15, 0.05, 0.05),
(1.3, 1.6, 7.95, 0.07, 0.07),
(1.6, 2.0, 7.82, 0.07, 0.07),
(2.0, 2.5, 7.67, 0.08, 0.08),
(2.5, 3.0, 7.56, 0.18, 0.18),
(3.0, 3.5, 7.43, 0.14, 0.14),
(3.5, 4.0, 7.29, 0.13, 0.13)
),
float
)
glazebrook2004 = np.array(
(
(0.8, 1.1, 7.98, 0.14, 0.10),
(1.1, 1.3, 7.62, 0.14, 0.11),
(1.3, 1.6, 7.90, 0.14, 0.14),
(1.6, 2.0, 7.49, 0.14, 0.12)
),
float
)
fontana2006 = np.array(
(
(0.4, 0.6, 8.26, 0.03, 0.03),
(0.6, 0.8, 8.17, 0.02, 0.02),
(0.8, 1.0, 8.09, 0.03, 0.03),
(1.0, 1.3, 7.98, 0.02, 0.02),
(1.3, 1.6, 7.87, 0.05, 0.05),
(1.6, 2.0, 7.74, 0.04, 0.04),
(2.0, 3.0, 7.48, 0.04, 0.04),
(3.0, 4.0, 7.07, 0.15, 0.11)
),
float
)
rudnick2006 = np.array(
(
(0.0, 1.0, 8.17, 0.27, 0.05),
(1.0, 1.6, 7.99, 0.32, 0.05),
(1.6, 2.4, 7.88, 0.34, 0.09),
(2.4, 3.2, 7.71, 0.43, 0.08)
),
float
)
elsner2008 = np.array(
(
(0.25, 0.75, 8.37, 0.03, 0.03),
(0.75, 1.25, 8.17, 0.02, 0.02),
(1.25, 1.75, 8.02, 0.03, 0.03),
(1.75, 2.25, 7.90, 0.04, 0.04),
(2.25, 3.00, 7.73, 0.04, 0.04),
(3.00, 4.00, 7.39, 0.05, 0.05)
),
float
)
obs = (
dickenson2003, drory2005, pg2008, glazebrook2004, fontana2006, rudnick2006, elsner2008
)
for o in obs:
xval = ((o[:, 1] - o[:, 0]) / 2.) + o[:, 0]
if imf == "Salpeter":
yval = np.log10(10**o[:, 2] * 1.6)
elif imf == "Chabrier":
yval = np.log10(10**o[:, 2] * 1.6 / 1.8)
ax.errorbar(
xval,
yval,
xerr=(xval - o[:, 0], o[:, 1] - xval),
yerr=(o[:, 3], o[:, 4]),
alpha=0.3,
lw=1.0,
marker='o',
ls='none'
)
return ax | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/observations.py | 0.919066 | 0.741931 | observations.py | pypi |
from sage_analysis.utils import generate_func_dict
default_plot_toggles = {
"SMF" : True, # Stellar mass function.
"BMF" : True, # Baryonic mass function.
"GMF" : True, # Gas mass function (cold gas).
"BTF" : True, # Baryonic Tully-Fisher.
"sSFR" : True, # Specific star formation rate.
"gas_fraction" : True, # Fraction of galaxy that is cold gas.
"metallicity" : True, # Metallicity scatter plot.
"bh_bulge" : True, # Black hole-bulge relationship.
"quiescent" : True, # Fraction of galaxies that are quiescent.
"bulge_fraction" : True, # Fraction of galaxies that are bulge/disc dominated.
"baryon_fraction" : True, # Fraction of baryons in galaxy/reservoir.
"reservoirs" : True, # Mass in each reservoir.
"spatial" : True, # Spatial distribution of galaxies.
"SMF_history": False,
"SFRD_history": False,
"SMD_history": False,
}
default_galaxy_properties_to_analyze = {
"stellar_mass_bins": {
"type": "binned",
"bin_low": 8.0,
"bin_high": 12.0,
"bin_width": 0.1,
"property_names": [
"SMF", "red_SMF", "blue_SMF", "BMF", "GMF",
"centrals_MF", "satellites_MF", "quiescent_galaxy_counts",
"quiescent_centrals_counts", "quiescent_satellites_counts",
"fraction_bulge_sum", "fraction_bulge_var",
"fraction_disk_sum", "fraction_disk_var", "SMF_history",
],
},
"halo_mass_bins": {
"type": "binned",
"bin_low": 10.0,
"bin_high": 14.0,
"bin_width": 0.1,
"property_names": ["fof_HMF"] + [f"halo_{component}_fraction_sum"
for component in ["baryon", "stars", "cold", "hot", "ejected", "ICS", "bh"]
],
},
"scatter_properties": {
"type": "scatter",
"property_names": [
"BTF_mass", "BTF_vel", "sSFR_mass", "sSFR_sSFR",
"gas_frac_mass", "gas_frac", "metallicity_mass",
"metallicity", "bh_mass", "bulge_mass", "reservoir_mvir",
"reservoir_stars", "reservoir_cold", "reservoir_hot",
"reservoir_ejected", "reservoir_ICS", "x_pos",
"y_pos", "z_pos"
],
},
"single_properties": {
"type": "single",
"property_names": ["SMD_history", "SFRD_history"],
},
}
default_calculation_functions = generate_func_dict(default_plot_toggles, "sage_analysis.example_calcs", "calc_")
default_plot_functions = generate_func_dict(default_plot_toggles, "sage_analysis.example_calcs", "calc_") | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/default_analysis_arguments.py | 0.745861 | 0.560734 | default_analysis_arguments.py | pypi |
from typing import Dict, List, Optional, Tuple, Union
import os
import matplotlib
import matplotlib.pyplot as plt
class PlotHelper():
"""
This class contains a number of useful attributes and methods to assist with creating good looking plots.
"""
def __init__(
self,
colors: Optional[List[str]] = None,
markers: Optional[List[str]] = None,
linestyles: Optional[List[str]] = None,
output_format: str = "png",
output_path: str = "./plots/",
figsize: List[float] = None,
usetex: bool = False,
) -> None:
"""
plot_output_format : string, optional
The format of the saved plots.
plot_output_path : string, optional
The path where the plots will be saved. If the base directory does not exist, it will be created.
"""
if colors is None:
# Colours selected from colorbrewer2.org/ to be colorblind + Black/White friendly.
colors = [
"r",
"c",
"m",
]
self._colors = colors
if markers is None:
markers = ["x", "o"]
self._markers = markers
if linestyles is None:
linestyles = ["-", "--", "-.", ":"]
self._linestyles = linestyles
self._output_format = output_format
self._output_path = output_path
if figsize is None:
figsize = [12.0, 12.0]
self._figsize = figsize
self._usetex = usetex
# Check to see if the directory exists. If ``output_path`` is "directory/tag" then we create "directory/".
if not os.path.exists(os.path.dirname(output_path)):
os.makedirs(os.path.dirname(output_path))
matplotlib.rcdefaults()
plt.rc("font", size=20)
plt.rc("xtick", labelsize=16, direction="in")
plt.rc("ytick", labelsize=16, direction="in")
plt.rc("lines", linewidth=2.0)
plt.rc("legend", numpoints=1, fontsize="x-large", handletextpad=0.1, handlelength=1.5)
if self._usetex:
plt.rc("text", usetex=usetex)
@property
def colors(self) -> List[str]:
"""
list of str : the colours that will be used for plotting.
"""
return self._colors
@property
def markers(self) -> List[str]:
"""
list of str : the markers that will be used for plotting.
"""
return self._markers
@property
def linestyles(self) -> List[str]:
"""
list of str : the linestyles that will be used for plotting.
"""
return self._linestyles
@property
def output_format(self) -> str:
"""
str : the format plots will be saved as.
"""
return self._output_format
@property
def output_path(self) -> str:
"""
str : the path the plots will be saved to.
"""
return self._output_path
@property
def figsize(self) -> List[float]:
"""
str(float, float) : the size of the figures that are created.
"""
return self._figsize
def adjust_legend(
self,
ax,
location: str = "upper right",
scatter_plot: bool = False,
fontsize: int = 14,
linewidth: int = 2,
):
"""
Adjusts the legend of a specified axis.
Parameters
----------
ax : ``matplotlib`` axes object
The axis whose legend we're adjusting
location : String, default "upper right". See ``matplotlib`` docs for full options
Location for the legend to be placed.
scatter_plot
For plots involved scattered-plotted data, we adjust the size and alpha of the
legend points.
Returns
-------
None. The legend is placed directly onto the axis.
"""
legend = ax.legend(loc=location)
handles = legend.legendHandles
legend.draw_frame(False)
# First adjust the text sizes.
for t in legend.get_texts():
t.set_fontsize(fontsize)
# For scatter plots, we want to increase the marker size.
if scatter_plot:
for handle in handles:
# We may have lines in the legend which we don't want to touch here.
if isinstance(handle, matplotlib.collections.PathCollection):
handle.set_alpha(1.0)
handle.set_sizes([10.0])
for handle in handles:
handle.set_linewidth(linewidth)
return ax
def update_rc_attribute(self, attribute_name: str, attribute_dict: Dict[str, Union[str, float]]) -> None:
matplotlib.rc(attribute_name, **attribute_dict) | /sage_analysis-0.2.3.tar.gz/sage_analysis-0.2.3/sage_analysis/plot_helper.py | 0.923052 | 0.5047 | plot_helper.py | pypi |
r"""
An editable Grid View Widget for Sage Jupyter Notebook.
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from .grid_view_editor import GridViewEditor, cdlink
from sage.graphs.generic_graph import GenericGraph
from ipywidgets import Layout, VBox, HBox, HTML, ValueWidget
from singleton_widgets import *
from six import text_type
textcell_layout = Layout(width='3em', height='2em', margin='0', padding='0')
textcell_wider_layout = Layout(width='7em', height='3em', margin='0', padding='0')
buttoncell_layout = Layout(width='5em', height='4em', margin='0', padding='0')
buttoncell_smaller_layout = Layout(width='2em', height='2em', margin='0', padding='0')
class BaseTextCell(TextSingleton):
r"""
Abstract class for all text cells except blank.
"""
displaytype = text_type
def __init__(self, content, position, layout, **kws):
super(BaseTextCell, self).__init__()
self.value = content
self.layout = layout
self.continuous_update = False
self.position = position
self.description_tooltip = '' # avoid None
self.add_class('gridcell')
class TextCell(BaseTextCell):
r"""A regular text grid cell
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import TextCell
sage: b = TextCell('my text', (1,2))
"""
def __init__(self, content, position, layout=textcell_layout, **kws):
super(TextCell, self).__init__(content, position, layout, **kws)
class StyledTextCell(TextCell):
r"""A class for CSS-styled text grid cells.
Not meant to be called directly.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import StyledTextCell
sage: b = StyledTextCell("ok", (1,2))
Traceback (most recent call last):
...
TraitError: Element of the '_dom_classes' trait of a StyledTextCell instance must be a unicode string, but a value of None <type 'NoneType'> was specified.
"""
disable = None
css_class = None
style = None
def __init__(self, content, position, layout=textcell_layout, **kws):
super(StyledTextCell, self).__init__(content, position, layout, **kws)
self.add_class(self.css_class)
if self.disable:
self.disabled = True
if self.style:
apply_css(self.style)
def apply_css(css_line):
try:
ip = get_ipython()
for base in ip.__class__.__mro__:
"""If we are in a notebook, we will find 'notebook' in those names"""
if 'otebook' in base.__name__:
ip.display_formatter.format(HTML("<style>%s</style>" % css_line))
break
except:
pass # We are in the test environment
def styled_text_cell(disabled=False, style_name='', style=None):
r"""A function to create CSS-styled cells.
A regular text cell has a value and a position.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import styled_text_cell
sage: styled_text_cell(disabled=True, style_name='mycssclass', style="")
<class 'traitlets.traitlets.DisabledMycssclassTextCell'>
"""
# FIXME passer la couleur en paramètre ? une chaîne CSS ?
class_name = "{}TextCell".format(style_name.capitalize())
if disabled:
class_name = "Disabled" + class_name
return type(class_name, (StyledTextCell,), {'disable': disabled, 'css_class': style_name, 'style': style})
class WiderTextCell(BaseTextCell):
r"""A regular text grid cell
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import WiderTextCell
sage: b = WiderTextCell('my text', (1,2))
"""
def __init__(self, content, position, layout=textcell_wider_layout, **kws):
super(WiderTextCell, self).__init__(content, position, layout, **kws)
class BlankCell(TextSingleton):
r"""A blank placeholder cell
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import BlankCell
sage: b = BlankCell()
"""
displaytype = text_type
def __init__(self, position=None, layout=textcell_layout, **kws):
super(BlankCell, self).__init__()
self.value = ''
self.position = position
self.layout = layout
self.disabled = True
self.add_class('blankcell')
class AddableTextCell(BaseTextCell):
r"""An addable placeholder for adding a cell to the widget
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import AddableTextCell
sage: a = AddableTextCell((3,4))
"""
def __init__(self, position, layout=textcell_layout):
super(AddableTextCell, self).__init__('', position, layout=layout, continuous_update=False)
self.add_class('addablecell')
class DisabledTextCell(BaseTextCell):
r"""A disabled text grid cell
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import DisabledTextCell
sage: b = DisabledTextCell('my text', (1,2))
"""
def __init__(self, content, position, layout=textcell_layout, **kws):
super(DisabledTextCell, self).__init__(content, position, layout=layout, **kws)
self.disabled = True
class ButtonCell(ToggleButtonSingleton):
r"""A base class for button grid cells.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import ButtonCell
sage: b = ButtonCell(True, (1,2))
"""
displaytype = bool
def __init__(self, content, position, layout=buttoncell_smaller_layout, **kws):
super(ButtonCell, self).__init__(layout=layout)
self.value = content
self.position = position
self.add_class('gridbutton')
self.set_tooltip()
def set_tooltip(self, s=None):
r"""From a position (i,j),
we just want the string 'i,j'
to use as a tooltip on buttons.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import ButtonCell
sage: b = ButtonCell(True, (42, 7))
sage: b.set_tooltip()
sage: str(b.tooltip)
'42, 7'
sage: b.set_tooltip("My new tooltip")
sage: str(b.tooltip)
'My new tooltip'
"""
if s:
self.tooltip = s
else:
self.tooltip = str(self.position)[1:-1]
class StyledButtonCell(ButtonCell):
r"""A class for CSS-styled button grid cells.
Not meant to be called directly.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import StyledButtonCell
sage: b = StyledButtonCell(True, (1,2))
"""
disable = None
css_class = None
addable = None
def __init__(self, content, position, layout=buttoncell_smaller_layout, **kws):
super(StyledButtonCell, self).__init__(content, position, layout, **kws)
if self.css_class:
self.add_class(self.css_class)
if self.disable:
self.disabled = True
if self.addable:
self.add_class('addablebutton')
def styled_button_cell(disabled=False, style_name='', addable=False):
r"""A function to create CSS-styled buttons.
A regular button has a value and a position.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import styled_button_cell
sage: styled_button_cell(disabled=True, style_name='mycssclass')
<class 'traitlets.traitlets.DisabledMycssclassButtonCell'>
"""
# FIXME passer la couleur en paramètre ? une chaîne CSS ?
class_name = "{}ButtonCell".format(style_name.capitalize())
if disabled:
class_name = "Disabled" + class_name
elif addable:
class_name = "Addable" + class_name
return type(class_name, (StyledButtonCell,), {'disable': disabled, 'css_class': style_name, 'addable': addable})
DisabledButtonCell = styled_button_cell(disabled=True)
r"""A disabled button cell.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import DisabledButtonCell
sage: b = DisabledButtonCell(True, (1,2))
sage: b.disabled
True
"""
class AddableButtonCell(ButtonCell):
r"""An addable placeholder for adding a button cell to the widget
An addable button has a position.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import AddableButtonCell
sage: a = AddableButtonCell((3,4))
"""
def __init__(self, position, layout=buttoncell_smaller_layout, **kws):
super(AddableButtonCell, self).__init__(False, position, layout, **kws)
self.add_class('addablebutton')
self.description = '+'
self.tooltip = "Click to add a cell here"
class StyledPushButton(ButtonSingleton):
r"""A class for CSS-styled push buttons.
Not meant to be called directly.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import StyledPushButton
sage: b = StyledPushButton()
"""
disable = None
css_class = None
def __init__(self, content=None, position=None, layout=buttoncell_smaller_layout, description='', placeholder=None):
super(StyledPushButton, self).__init__(layout=layout, description=description, placeholder=placeholder)
self.content = content
self.position = position
if self.disable:
self.disabled = True
self.add_class('gridbutton')
if self.css_class:
self.add_class(self.css_class)
def styled_push_button(disabled=False, style_name=''):
r"""A function to create CSS-styled push buttons.
A push button stores neither value nor position.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import styled_push_button
sage: styled_push_button(style_name='mycssclass').__name__
'MycssclassPushButton'
"""
class_name = "{}PushButton".format(style_name.capitalize())
if disabled:
class_name = "Disabled" + class_name
return type(class_name, (StyledPushButton,), {'disable': disabled, 'css_class': style_name})
BlankButton = styled_push_button(disabled=True, style_name='blankbutton')
r"""A blank placeholder button.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import BlankButton
sage: b = BlankButton()
sage: b.__class__.__name__
'DisabledBlankbuttonPushButton'
sage: b.disabled
True
sage: assert 'blankbutton' in b._dom_classes
"""
def get_model_id(w):
r"""
For some reason, our widgets seem to lose their model_id
This *hack* recovers it
"""
for u in w.widgets:
if w.widgets[u] == w:
return u
class GridViewWidget(GridViewEditor, VBox, ValueWidget):
r"""A widget for all grid-representable Sage objects
"""
def __init__(self, obj, adapter=None, display_convention='en', cell_layout=None,
cell_widget_classes=[TextCell], cell_widget_class_index=lambda x:0,
css_classes = [], css_class_index=None,
blank_widget_class=BlankCell, addable_widget_class=AddableTextCell):
r"""
Grid View Widget initialization.
INPUT:
- ``cell_widget_classes``: a list of classes for building cell widgets
- ``blank_widget_class``: a widget class for building blank cells
- ``addable_widget_class``: a widget class for building blank cells
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import *
sage: t = StandardTableaux(15).random_element()
sage: w = GridViewWidget(t)
sage: from sage.graphs.generators.families import AztecDiamondGraph
sage: az = AztecDiamondGraph(4)
sage: w = GridViewWidget(az, cell_widget_classes=[ButtonCell], blank_widget_class=BlankButton)
Compatibility with `@interact`: the widget should be a
:class:`ipywidgets.ValueWidget` and have a description field::
sage: isinstance(w, ValueWidget)
True
sage: w.description
"Grid view widget for Jupyter notebook with cell class '<class 'sage_combinat_widgets.grid_view_widget.ButtonCell'>', for object 'Subgraph of (2D Grid Graph for [8, 8])'"
Basic compabitility test::
sage: def f(x = w): return az.average_distance()
sage: f = interact(f)
Interactive function <function f at ...> with 1 widget
x: GridViewWidget(value=Aztec Diamond graph of order 4, ...)
"""
GridViewEditor.__init__(self, obj, adapter)
VBox.__init__(self)
self._model_id = get_model_id(self)
self.display_convention = display_convention
self.description = "Grid view widget for Jupyter notebook with cell class '%s', for object '%s'" % (
cell_widget_classes[0], obj)
if not cell_layout:
if issubclass(self.value.__class__, GenericGraph): # i.e. a graph
cell_layout = buttoncell_smaller_layout
else:
cell_layout = textcell_layout
self.cell_layout = cell_layout
self.cell_widget_classes = cell_widget_classes
self.cell_widget_class_index = cell_widget_class_index
self.css_classes = css_classes
self.css_class_index = css_class_index or cell_widget_class_index or (lambda x:0)
try:
self.displaytype = cell_widget_classes[0].displaytype
except:
self.displaytype = None # Stateless cells
self.cast = lambda x:self.adapter.display_to_cell(x, self.displaytype)
self.blank_widget_class = blank_widget_class
self.addable_widget_class = addable_widget_class
self.draw()
self.donottrack = False
def to_cell(self, val):
r"""
From a widget cell value `val`,
return a valid editor cell value.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import GridViewWidget
sage: t = StandardTableaux(5).random_element()
sage: w = GridViewWidget(t)
sage: w.to_cell('3')
3
"""
return self.cast(val)
def add_links(self):
r"""
Link each individual widget cell
to its corresponding trait in the editor
TESTS ::
sage: from sage.combinat.tableau import StandardTableaux
sage: from sage_combinat_widgets.grid_view_widget import GridViewWidget
sage: t = StandardTableaux(15).random_element()
sage: w1 = GridViewWidget(t)
sage: w2 = GridViewWidget(t, display_convention='fr')
sage: len(w1.links)
20
sage: assert len(w1.links) == len(w2.links)
sage: def test0(w): return (w.links[0].source[0].__class__, w.links[0].source[0].value, w.links[0].target[1])
sage: def test10(w): return (w.links[10].source[0].__class__, w.links[10].source[0].value, w.links[10].target[1])
sage: def test17(w): return (w.links[17].source[0].__class__, w.links[17].source[0].value, w.links[17].target[1])
sage: assert test0(w1) == test0(w2)
sage: assert test10(w1) == test10(w2)
sage: assert test17(w1) == test17(w2)
sage: from sage.combinat.skew_tableau import SkewTableau
sage: s = SkewTableau([[None, None, 1, 2], [None, 1], [4]])
sage: w1 = GridViewWidget(s)
sage: w2 = GridViewWidget(s, display_convention='fr')
sage: len(w1.links)
10
sage: assert len(w1.links) == len(w2.links)
sage: def test0(w): return (w.links[0].source[0].__class__, w.links[0].source[0].value, w.links[0].target[1])
sage: def test4(w): return (w.links[4].source[0].__class__, w.links[4].source[0].value, w.links[4].target[1])
sage: assert test0(w1) == test0(w2)
sage: assert test4(w1) == test4(w2)
sage: len(w2.links)
10
sage: w2.links[2].source[0].__class__
<class 'sage_combinat_widgets.grid_view_widget.TextCell'>
sage: w2.links[6].source[0].__class__
<class 'sage_combinat_widgets.grid_view_widget.AddableTextCell'>
sage: from traitlets import Bunch
sage: w2.add_cell(Bunch({'name': 'add_0_4', 'old': 0, 'new': 3, 'owner': w2, 'type': 'change'}))
sage: w2.value
[[None, None, 1, 2, 3], [None, 1], [4]]
sage: w2.links[2].source[0].__class__
<class 'sage_combinat_widgets.grid_view_widget.TextCell'>
sage: w2.links[7].source[0].__class__
<class 'sage_combinat_widgets.grid_view_widget.AddableTextCell'>
"""
for pos in self.cells.keys():
traitname = 'cell_%d_%d' % (pos)
child = self.get_child(pos)
if child and hasattr(child, 'value') and traitname in self.traits():
self.links.append(cdlink((child, 'value'), (self, traitname), self.cast))
for pos in self.addable_cells():
# A directional link to trait 'add_i_j'
traitname = 'add_%d_%d' % (pos)
child = self.get_child(pos)
if child and hasattr(child, 'value') and traitname in self.traits():
self.links.append(cdlink((child, 'value'), (self, traitname), self.cast))
def update_style(self, css_classes=None, css_class_index=None):
r"""
Update look and fell -- ie CSS classes.
Therefore avoid redrawing if overall shape is unchanged.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import *
sage: from sage.graphs.generators.families import AztecDiamondGraph
sage: az = AztecDiamondGraph(4)
sage: w = GridViewWidget(az, cell_widget_classes=[ButtonCell], blank_widget_class=BlankButton)
sage: w.children[1].children[3]._dom_classes
('gridbutton',)
sage: w.update_style(css_classes=['cl0', 'cl1', 'cl2', 'cl3'], css_class_index=lambda x:x[0]%4)
sage: w.children[1].children[3]._dom_classes
('gridbutton', 'cl1')
"""
if not css_classes:
css_classes = self.css_classes
if not css_class_index:
css_class_index = self.cell_widget_class_index
for row in self.children:
for cell in row.children:
if not hasattr(cell, 'position') or cell.position is None:
continue # Do we want to change blank cells' style?
for cl in css_classes:
cell.remove_class(cl)
cell.add_class(css_classes[css_class_index(cell.position)])
def draw(self, cell_widget_classes=None, cell_widget_class_index=None,
addable_widget_class=None, blank_widget_class=None):
r"""
Add children to the GridWidget:
- Sage object/grid editor cells
- Blank cells for empty cells in a row
- Addable cells if any
Used classes can be passed as arguments
to enable changing shapes, colors ..
"""
self.donottrack = True # Prevent any interactivity while drawing the widget
self.reset_links()
self.compute_height()
positions = sorted(list(self.cells.keys()))
rows = [[(pos, self.cells[pos]) for pos in positions if pos[0]==i] \
for i in range(self.height)]
vbox_children = []
addable_positions = self.addable_cells()
removable_positions = self.removable_cells()
addable_rows = {}
if addable_positions:
addable_rows = {
i : [pos for pos in addable_positions if pos[0]==i] \
for i in range(max([1+t[0] for t in addable_positions]))
}
if not cell_widget_classes:
cell_widget_classes = self.cell_widget_classes
if not cell_widget_class_index:
cell_widget_class_index = self.cell_widget_class_index
if not addable_widget_class:
addable_widget_class = self.addable_widget_class
if not blank_widget_class:
blank_widget_class = self.blank_widget_class
i, j = -1, -1 # initialization ; necessary for an empty grid
for i in range(self.height):
r = rows[i]
if not r: # Empty row
if not addable_rows[i]:
vbox_children.append(HBox((
blank_widget_class(layout=self.cell_layout, disabled=True),
)))
continue
hbox_children = []
for j in range(max([pos[1]+1 for pos in addable_rows[i]])):
if (i,j) in addable_positions:
hbox_children.append(addable_widget_class((i,j), layout=self.cell_layout))
else:
hbox_children.append(blank_widget_class(layout=self.cell_layout, disabled=True))
vbox_children.append(HBox((hbox_children)))
continue
j = 0
hbox_children = []
while j<=max([t[0][1] for t in rows[i]]):
if (i,j) in positions:
cell_content = self.cells[(i,j)]
cell_widget_class = cell_widget_classes[cell_widget_class_index((i,j))]
cell_display = self.adapter.cell_to_display(cell_content, self.displaytype)
cell = cell_widget_class(cell_display,
(i,j),
layout=self.cell_layout,
placeholder=cell_display)
if (i,j) in removable_positions:
if issubclass(cell_widget_class, ToggleButtonSingleton):
cell.description = '-'
cell.disabled = False
else:
cell.add_class('removablecell')
hbox_children.append(cell)
elif (i,j) in addable_positions:
# Inside the grid-represented object limits
hbox_children.append(addable_widget_class((i,j), layout=self.cell_layout))
else:
hbox_children.append(blank_widget_class(layout=self.cell_layout))
j+=1
if addable_positions and \
j > max([t[0][1] for t in rows[i]]) and (i,j) in addable_positions:
# Outside of the grid-represented object limits
hbox_children.append(self.addable_widget_class((i,j), layout=self.cell_layout))
vbox_children.append(HBox(hbox_children))
for i in addable_rows:
if i >= self.height:
row = addable_rows[i]
hbox_children = []
for j in range(max([(pos[1]+1) for pos in row])):
if (i,j) in row:
hbox_children.append(self.addable_widget_class((i,j), layout=self.cell_layout))
else:
hbox_children.append(blank_widget_class(layout=self.cell_layout))
vbox_children.append(HBox(hbox_children))
if self.display_convention == 'fr':
vbox_children.reverse()
self.children = vbox_children
self.add_links()
self.donottrack = False
def disallow_inside_focus(self):
r"""
Disallow focus for all cells except the first.
"""
for r in self.children:
for c in r.children:
if hasattr(c, 'disallow_focus'):
c.disallow_focus()
if hasattr(self.children[0].children[0], 'allow_focus'):
self.children[0].children[0].allow_focus()
def get_child(self, pos):
r"""
Get child widget corresponding to self.cells[pos]
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import GridViewWidget
sage: t = StandardTableau([[1, 4, 7, 8, 9, 10, 11], [2, 5, 13], [3, 6], [12, 15], [14]])
sage: w1 = GridViewWidget(t)
sage: w1.get_child((1,2)).value
u'13'
sage: w2 = GridViewWidget(t, display_convention='fr')
sage: w1.get_child((1,2)).value == w2.get_child((1,2)).value
True
"""
if self.display_convention == 'fr':
return self.children[self.total_height - pos[0] - 1].children[pos[1]]
return self.children[pos[0]].children[pos[1]]
def set_dirty(self, pos, val, err=None):
r"""
Set cell #pos as dirty
INPUT:
- ``pos`` -- a tuple
- ``val`` -- a(n incorrect) value for `pos`
- ``err`` -- an exception
TESTS ::
sage: from sage_combinat_widgets import GridViewWidget
sage: t = StandardTableau([[1, 2, 5, 6], [3], [4]])
sage: w = GridViewWidget(t)
sage: from traitlets import Bunch
sage: err = w.set_cell(Bunch({'name': 'cell_0_2', 'old': 5, 'new': 7, 'owner': w, 'type': 'change'}))
sage: w.set_dirty((0,2), 7, err)
sage: w.dirty
{(0, 2): 7}
sage: w.dirty_errors[(0,2)]
ValueError('the entries in each row of a semistandard tableau must be weakly increasing')
sage: w.children[0].children[2]._dom_classes
('gridcell', 'dirty')
sage: w.children[0].children[2]._tooltip
'the entries in each row of a semistandard tableau must be weakly increasing'
"""
super(GridViewWidget, self).set_dirty(pos, val, err)
child = self.get_child(pos)
child.add_class('dirty')
if err:
child.set_tooltip(self.dirty_info(pos))
def unset_dirty(self, pos):
r"""
Set a cell no more 'dirty'.
INPUT:
- ``pos`` -- a tuple
TESTS ::
sage: from sage_combinat_widgets import GridViewWidget
sage: t = StandardTableau([[1, 2, 5, 6], [3], [4]])
sage: w = GridViewWidget(t)
sage: from traitlets import Bunch
sage: err = w.set_cell(Bunch({'name': 'cell_0_2', 'old': 5, 'new': 7, 'owner': e, 'type': 'change'}))
sage: w.set_dirty((0,2), 7, err)
sage: err = w.set_cell(Bunch({'name': 'cell_2_0', 'old': 4, 'new': 9, 'owner': e, 'type': 'change'}))
sage: w.set_dirty((2,0), 9, err)
sage: w.dirty
{(0, 2): 7, (2, 0): 9}
sage: w.unset_dirty((0,2))
sage: w.dirty
{(2, 0): 9}
sage: w.children[0].children[2]._dom_classes
('gridcell',)
sage: w.children[0].children[2]._tooltip
''
"""
super(GridViewWidget, self).unset_dirty(pos)
child = self.get_child(pos)
child.remove_class('dirty')
child.set_tooltip()
def reset_dirty(self):
r"""
Reset all previously 'dirty' cells.
TESTS ::
sage: from sage_combinat_widgets import GridViewWidget
sage: t = StandardTableau([[1, 2, 5, 6], [3], [4]])
sage: w = GridViewWidget(t)
sage: from traitlets import Bunch
sage: err = w.set_cell(Bunch({'name': 'cell_0_2', 'old': 5, 'new': 7, 'owner': e, 'type': 'change'}))
sage: w.set_dirty((0,2), 7, err)
sage: err = w.set_cell(Bunch({'name': 'cell_2_0', 'old': 4, 'new': 9, 'owner': e, 'type': 'change'}))
sage: w.set_dirty((2,0), 9, err)
sage: w.dirty
{(0, 2): 7, (2, 0): 9}
sage: w.children[2].children[0]._dom_classes
('gridcell', 'removablecell', 'dirty')
sage: w.reset_dirty()
sage: w.dirty
{}
sage: w.children[2].children[0]._dom_classes
('gridcell', 'removablecell')
sage: w.children[2].children[0]._tooltip
''
"""
for pos in self.dirty:
child = self.get_child(pos)
child.remove_class('dirty')
child.set_tooltip()
super(GridViewWidget, self).reset_dirty()
def PartitionGridViewWidget(obj, display_convention='en'):
r"""
A default widget for partitions.
TESTS ::
sage: from sage_combinat_widgets.grid_view_widget import PartitionGridViewWidget
sage: sp = SkewPartition([[7, 4, 2, 1],[2, 1, 1]])
sage: w = PartitionGridViewWidget(sp, display_convention='fr')
sage: len(w.links)
17
"""
w = GridViewWidget(
obj,
cell_layout=buttoncell_smaller_layout,
cell_widget_classes=[DisabledButtonCell, ButtonCell],
addable_widget_class=AddableButtonCell,
blank_widget_class=BlankButton,
display_convention=display_convention
)
def cell_widget_class_index(x):
if x in w.removable_cells():
return 1
return 0
w.cell_widget_class_index = cell_widget_class_index
return w | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_combinat_widgets/grid_view_widget.py | 0.741861 | 0.244295 | grid_view_widget.py | pypi |
r"""
IPywidgets with simple additional features to serve as singleton units to larger widgets.
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from traitlets import HasTraits, Int, Unicode
from ipywidgets import Button, Combobox, Dropdown, HTML, HTMLMath, Text, Textarea, ToggleButton, register
JS_VERSION = '0.7.8'
class Singleton(HasTraits):
"""Additional features to an ipywidgets widget."""
_focus = Unicode().tag(sync=True)
_tooltip = Unicode('').tag(sync=True) # set '' as default value
tabindex = Int().tag(sync=True)
def set_tooltip(self, s=''):
self._tooltip = s
def focus(self):
self._focus = ''
self._focus = 'on'
def blur(self):
self._focus = ''
self._focus = 'off'
def set_tabindex(self, i=0):
self.tabindex = i
def allow_focus(self):
self.set_tabindex(0)
def disallow_focus(self):
self.set_tabindex(-1)
@register
class ButtonSingleton(Button, Singleton):
"""Button with tooltip and focus."""
_model_name = Unicode('ButtonSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('ButtonSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class ComboboxSingleton(Combobox, Singleton):
"""Combobox with tooltip and focus."""
_model_name = Unicode('ComboboxSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('ComboboxSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class DropdownSingleton(Dropdown, Singleton):
"""Dropdown with tooltip and focus."""
_model_name = Unicode('DropdownSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('DropdownSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class HTMLSingleton(HTML, Singleton):
"""HTML Math widget with tooltip and focus."""
_model_name = Unicode('HTMLSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('HTMLSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class HTMLMathSingleton(HTMLMath, Singleton):
"""HTML Math widget with tooltip and focus."""
_model_name = Unicode('HTMLMathSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('HTMLMathSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class TextSingleton(Text, Singleton):
"""Input text with tooltip and focus."""
_model_name = Unicode('TextSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('TextSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class TextareaSingleton(Textarea, Singleton):
"""Text area with tooltip and focus."""
_model_name = Unicode('TextareaSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('TextareaSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True)
@register
class ToggleButtonSingleton(ToggleButton, Singleton):
"""Toggle button with tooltip and focus."""
_model_name = Unicode('ToggleButtonSingletonModel').tag(sync=True)
_model_module = Unicode('sage-combinat-widgets').tag(sync=True)
_model_module_version = Unicode(JS_VERSION).tag(sync=True)
_view_name = Unicode('ToggleButtonSingletonView').tag(sync=True)
_view_module = Unicode('sage-combinat-widgets').tag(sync=True)
_view_module_version = Unicode(JS_VERSION).tag(sync=True) | /sage_combinat_widgets-0.7.8-py3-none-any.whl/singleton_widgets/singleton_widgets.py | 0.70477 | 0.410402 | singleton_widgets.py | pypi |
r"""
Generic Grid View Adapter
**Grid View operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~GridViewAdapter.cell_to_display` | Static method for typecasting cell content to widget display value
:meth:`~GridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~GridViewAdapter.compute_cells` | Compute object cells as a dictionary { coordinate pair : integer }
:meth:`~GridViewAdapter.from_cells` | Create a new Sage object from a cells dictionary
:meth:`~GridViewAdapter._validate` | Validate a new object
:meth:`~GridViewAdapter.get_cell` | Get the object cell content
:meth:`~GridViewAdapter.set_cell` | Set the object cell content
:meth:`~GridViewAdapter.addable_cells` | List addable cells
:meth:`~GridViewAdapter.removable_cells` | List removable cells
:meth:`~GridViewAdapter.add_cell` | Add a cell at given position
:meth:`~GridViewAdapter.remove_cell` | Remove a cell from given position
:meth:`~GridViewAdapter.append_row` | Append a row
:meth:`~GridViewAdapter.insert_row` | Insert a row at given index
:meth:`~GridViewAdapter.remove_row` | Remove a row at given index
:meth:`~GridViewAdapter.append_column` | Append a column
:meth:`~GridViewAdapter.insert_column` | Insert a column at given index
:meth:`~GridViewAdapter.remove_column` | Remove a column at given index
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
import traitlets, sage.all
from sage.all import SageObject
from sage.misc.abstract_method import abstract_method
from six import text_type
import __main__
def eval_in_main(s):
"""
Evaluate the expression `s` in the global scope
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import eval_in_main
sage: from sage.combinat.tableau import Tableaux
sage: eval_in_main("Tableaux")
<class 'sage.combinat.tableau.Tableaux'>
"""
try:
return eval(s, sage.all.__dict__)
except:
return eval(s, __main__.__dict__)
class GridViewAdapter(object):
r"""
A generic grid view adapter.
ATTRIBUTES::
* ``objclass`` -- object class for this adapter
* ``constructorname`` -- name of the constructor that builds a math object from a list
* ``traitclass`` -- cells trait class
* ``celltype`` -- cell content object type (to be defined in subclasses)
* ``cellzero`` -- cell content zero (to be defined in subclasses)
* ``addablecelltype`` -- addable cell content zero (to be defined in subclasses) -- by default = celltype
* ``addablecellzero`` -- addable cell content zero (to be defined in subclasses) -- by default == cellzero
"""
objclass = SageObject
constructorname = None
traitclass = traitlets.Instance
constructorname = None
addablecelltype = None
addablecellzero = None
@staticmethod
def cell_to_display(cell_content, display_type=text_type):
r"""
From a cell value `cell_content`,
return widget display value.
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: from six import text_type
sage: GridViewAdapter.cell_to_display(1, text_type)
'1'
sage: GridViewAdapter.cell_to_display(True, bool)
True
"""
if display_type == text_type:
return str(cell_content)
return cell_content
def display_to_cell(self, display_value, display_type=text_type):
r"""
From an unicode string `s`,
return matching cell value.
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: from six import text_type
sage: a.display_to_cell('1', text_type)
Traceback (most recent call last):
...
AttributeError: 'GridViewAdapter' object has no attribute 'celltype'
"""
if display_value:
return self.celltype(display_value)
return self.cellzero
@staticmethod
@abstract_method
def compute_cells(obj):
r"""
From an object `obj`,
return a dictionary { coordinates pair : integer }
"""
@classmethod
def _validate(cls, obj, constructorname=''):
r"""
From an object `obj`,
Try to build an object of type `cls`.
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: assert issubclass(GridViewAdapter._validate(pi).__class__, SageObject)
sage: from sage.matrix.constructor import matrix
sage: assert issubclass(GridViewAdapter._validate(pi, constructorname='matrix').__class__, BaseException)
"""
try:
if constructorname:
return eval_in_main(constructorname)(obj)
if cls.constructorname:
return eval_in_main(cls.constructorname)(obj)
if issubclass(obj.__class__, cls.objclass):
return obj
return cls.objclass(obj)
except Exception as e:
return e
@classmethod
@abstract_method
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : integer },
return a Sage object.
"""
@staticmethod
def get_cell(obj, pos):
r"""
From an object and a tuple `pos`,
return the object cell value at position `pos`.
TESTS ::
sage: from sage.matrix.constructor import Matrix
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: m = Matrix(QQ, 3, 3, range(9))/2
sage: GridViewAdapter.get_cell(m, (1,2))
5/2
sage: from sage.combinat.tableau import Tableau
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: GridViewAdapter.get_cell(t, (1,1))
7
sage: GridViewAdapter.get_cell(t, (1,6))
Traceback (most recent call last):
...
ValueError: Cell '(1, 6)' does not exist!
sage: from sage.combinat.skew_tableau import SkewTableau
sage: st = SkewTableau([[None,1,2],[3,4,5],[6]])
sage: GridViewAdapter.get_cell(st, (0,0))
sage: GridViewAdapter.get_cell(st, (1,1))
4
"""
try:
return obj[pos[0]][pos[1]]
except:
pass
try:
l = [list(x) for x in obj]
except:
raise NotImplementedError("Adapter class method 'get_cell(obj, pos)' is not implemented.")
try:
return l[pos[0]][pos[1]]
except:
raise ValueError("Cell '%s' does not exist!" % str(pos))
def make_dirty(self, l, dirty={}):
r"""
Append 'dirty' values to list 'l'.
Return a list with no empty values.
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: from sage.combinat.tableau import Tableau
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: ga = GridViewAdapter()
sage: ga.cellzero = 0
sage: ga.make_dirty(t.to_list(), {(1,2):42})
[[1, 2, 5, 6], [3, 7, 42], [4]]
sage: ga.make_dirty(t.to_list(), {(2,0):0})
[[1, 2, 5, 6], [3, 7]]
"""
for p in dirty:
if p[0] < len(l):
if p[1] < len(l[p[0]]):
if dirty[p] == self.cellzero:
del l[p[0]][p[1]]
else:
l[p[0]][p[1]] = dirty[p]
elif len(l[p[0]]) == p[1] and dirty[p] != self.cellzero:
l[p[0]].append(dirty[p])
else:
for i in range(p[0] - len(l)):
l.append([])
l.append([dirty[p]])
return [val for val in l if val]
def set_cell(self, obj, pos, val, dirty={}, constructorname=''):
r"""
From a Sage object, a position (pair of coordinates) `pos` and a value `val`,
return a new Sage object.
with a modified cell at position `pos`.
TESTS ::
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: from sage.combinat.tableau import Tableau
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: ga = GridViewAdapter()
sage: ga.set_cell(t, (1,1), 8, constructorname='Tableau')
[[1, 2, 5, 6], [3, 8], [4]]
sage: ga.cellzero = 0
sage: ga.set_cell(t, (0,3), 6, {(0,3):5}, constructorname='StandardTableau')
[[1, 2, 5, 6], [3, 7], [4]]
sage: from sage.matrix.constructor import Matrix, matrix
sage: m = Matrix(QQ, 3, 3, range(9))/2
sage: ga.set_cell(m, (0,1), 2/3, constructorname='matrix')
[ 0 2/3 1]
[3/2 2 5/2]
[ 3 7/2 4]
sage: ga.set_cell(m, (4,2), 1/2, constructorname='matrix')
Traceback (most recent call last):
...
ValueError: Position '(4, 2)' does not exist
"""
try:
l = [list(x) for x in obj]
except:
raise NotImplementedError("Adapter method 'set_cell(obj, pos, val)' is not implemented.")
l = self.make_dirty(l, dirty)
try:
l[pos[0]][pos[1]] = val
except:
raise ValueError("Position '%s' does not exist" % str(pos))
return self._validate(l, constructorname)
@staticmethod
@abstract_method(optional = True)
def addable_cells(obj):
r"""
For Sage object `obj`,
list those cell positions where a user could want to add a cell,
and get a still valid Sage object for this adapter.
"""
@staticmethod
@abstract_method(optional = True)
def removable_cells(obj):
r"""
For Sage object `obj`,
list those cell positions where a user could want to remove a cell,
and get a still valid Sage object for this adapter.
"""
@abstract_method(optional = True)
def add_cell(self, obj, pos, val, dirty={}):
r"""
This method should try to add a cell to object `obj`
at position `pos` and with value `val`.
"""
@abstract_method(optional = True)
def remove_cell(self, obj, pos, dirty={}):
r"""
This method should try to remove a cell from object `obj`
at position `pos`.
"""
@abstract_method(optional = True)
def append_row(self, obj, r=None):
r"""
This method should try to append a row to object `obj`
with values from list `r`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.append_row(S, (1,2,3)) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
@abstract_method(optional = True)
def insert_row(self, obj, index, r=None):
r"""
This method should try to insert a row to object `obj`
at index `index`, with values from list `r`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.insert_row(S, 1, (1,2,3)) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
def add_row(self, obj, index=None, r=None):
r"""
An alias for appending/inserting a row.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.add_row(S, 1, (1,2,3,4))
Traceback (most recent call last):
...
NotImplementedError: Method 'insert_row' is not implemented.
"""
if index:
try:
return self.insert_row(obj, index, r)
except:
raise NotImplementedError("Method 'insert_row' is not implemented.")
else:
try:
return self.append_row(obj, r)
except:
raise NotImplementedError("Method 'append_row' is not implemented.")
@abstract_method(optional = True)
def remove_row(self, obj, index=None):
r"""
This method should try to remove a row from object `obj`
at index `index`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.remove_row(S, 1) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
@abstract_method(optional = True)
def append_column(self, obj, r=None):
r"""
This method should try to append a column to object `obj`
with values from list `r`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.append_column(S, (1,2,3,4)) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
@abstract_method(optional = True)
def insert_column(self, obj, index, r=None):
r"""
This method should try to insert a column to object `obj`
at index `index`, with values from list `r`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.insert_column(S, 1, (1,2,3,4)) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
@abstract_method(optional = True)
def remove_column(self, obj, index=None):
r"""
This method should try to remove a column from object `obj`
at index `index`.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.remove_column(S, 2) #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: 'AbstractMethod' object is not callable
"""
def add_column(self, obj, index=None, r=None):
r"""
An alias for appending/inserting a column.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
sage: a = GridViewAdapter()
sage: a.add_column(S, 1, (1,2,3))
Traceback (most recent call last):
...
NotImplementedError: Method 'insert_column' is not implemented.
"""
if index:
try:
return self.insert_column(obj, index, r)
except:
raise NotImplementedError("Method 'insert_column' is not implemented.")
else:
try:
return self.append_column(obj, r)
except:
raise NotImplementedError("Method 'append_column' is not implemented.") | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/generic_grid_view_adapter.py | 0.926105 | 0.63775 | generic_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for matrices
**Grid View matrix operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~MatrixGridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~MatrixGridViewAdapter.compute_cells` | Compute matrix cells as a dictionary { coordinate pair : label }
:meth:`~MatrixGridViewAdapter.from_cells` | Create a new matrix from a cells dictionary
:meth:`~MatrixGridViewAdapter.addable_cells` | List addable cells
:meth:`~MatrixGridViewAdapter.removable_cells` | List removable cells
:meth:`~MatrixGridViewAdapter.append_row` | Append a row
:meth:`~MatrixGridViewAdapter.insert_row` | Insert a row at given index
:meth:`~MatrixGridViewAdapter.remove_row` | Remove a row at given index
:meth:`~MatrixGridViewAdapter.append_column` | Append a column
:meth:`~MatrixGridViewAdapter.insert_column` | Insert a column at given index
:meth:`~MatrixGridViewAdapter.remove_column` | Remove a column at given index
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.matrix.matrix2 import Matrix
from sage.matrix.constructor import matrix
from itertools import product
from sage.modules.free_module_element import vector
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
from six import text_type
class MatrixGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for matrices.
"""
objclass = Matrix
constructorname = 'matrix'
def __init__(self, obj):
r"""
Init an adapter object, set attributes `celltype` and `cellzero`.
TESTS ::
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: from sage.matrix.constructor import Matrix
sage: m = Matrix(QQ, 3, 3, range(9))/2
sage: ma = MatrixGridViewAdapter(m)
sage: ma.celltype
<type 'sage.rings.rational.Rational'>
sage: ma.cellzero
0
"""
super(MatrixGridViewAdapter, self).__init__()
self.ring = obj.base_ring()
try:
self.celltype = self.ring.element_class
except:
try:
if hasattr(self.ring, 'an_element'):
self.celltype = self.ring.an_element().__class__
elif hasattr(self.ring, 'random_element'):
self.celltype = self.ring.random_element().__class__
else:
raise TypeError("Cannot determine matrix base ring elements class.")
except:
raise TypeError("Cannot determine matrix base ring elements class.")
self.cellzero = self.ring.zero()
def display_to_cell(self, display_value, display_type=text_type):
r"""
From a widget display value `display_value`,
return matching cell value.
TESTS ::
sage: from sage.matrix.constructor import Matrix
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = Matrix(QQ, 3, 2, range(6))/2
sage: ma = MatrixGridViewAdapter(m)
sage: ma.display_to_cell('2/3')
2/3
sage: ma.display_to_cell('pi')
Traceback (most recent call last):
...
ValueError: Cannot cast display value pi to matrix cell
"""
if display_value:
try:
return self.ring(display_value)
except:
try:
return self.celltype(display_value)
except:
raise ValueError("Cannot cast display value %s to matrix cell" % (display_value))
return self.cellzero
@staticmethod
def compute_cells(obj):
r"""
From a matrix `obj`,
return a dictionary { coordinates pair : cell value (as a Sage object) }
TESTS ::
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: from sage.matrix.constructor import Matrix
sage: m = Matrix(QQ, 3, 2, range(6))/2
sage: MatrixGridViewAdapter.compute_cells(m)
{(0, 0): 0, (0, 1): 1/2, (1, 0): 1, (1, 1): 3/2, (2, 0): 2, (2, 1): 5/2}
"""
return {(i,j):obj[i][j] for (i,j) in product(range(obj.nrows()), range(len(obj[0])))}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : integer },
return a matrix with corresponding cells.
TESTS ::
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: from sage.matrix.constructor import Matrix
sage: MatrixGridViewAdapter.from_cells({(0, 0): 0, (0, 1): 1/2, (0, 2): 1, (1, 0): 3/2, (1, 1): 2, (1, 2): 5/2})
[ 0 1/2 1]
[3/2 2 5/2]
"""
nrows = max([pos[0]+1 for pos in cells])
ncols = max([pos[1]+1 for pos in cells])
return matrix([[cells[(i,j)] for j in range(ncols)] for i in range(nrows)])
@staticmethod
def addable_cells(obj):
r"""
No cell should be added in isolation
except for vectors
TESTS ::
sage: from sage.matrix.constructor import Matrix
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = Matrix(QQ, 2, 3, range(6))/2
sage: MatrixGridViewAdapter.addable_cells(m)
[]
"""
if obj.nrows() == 1:
return [(0, obj.ncols())]
if obj.ncols() == 1:
return [(obj.nrows(), 0)]
return []
@staticmethod
def removable_cells(obj):
r"""
No cell should be removed in isolation
except for vectors
TESTS ::
sage: from sage.matrix.constructor import Matrix
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = Matrix(QQ, 2, 3, range(6))/2
sage: MatrixGridViewAdapter.removable_cells(m)
[]
"""
if obj.nrows() == 1:
return [(0, obj.ncols()-1)]
if obj.ncols() == 1:
return [(obj.nrows()-1, 0)]
return []
def remove_cell(self, obj, pos, dirty={}):
r"""
What to do if the user removes a value.
(we replace the resulting blank with a zero)
TESTS ::
sage: from sage.matrix.constructor import Matrix
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = Matrix(QQ, 2, 3, range(6))/2
sage: ma = MatrixGridViewAdapter(m)
sage: ma.remove_cell(m, (1,0))
[ 0 1/2 1]
[ 0 2 5/2]
"""
obj[pos] = self.cellzero
return obj
def append_row(self, obj, r=None):
r"""
Append a row to a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: ma = MatrixGridViewAdapter(m)
sage: ma.append_row(m, (1,2,3))
[ 1 7 1]
[ 0 0 3]
[ 0 -1 2]
[ 1 0 -3]
[ 1 2 3]
"""
if not r:
return obj.stack(vector([self.cellzero] * obj.ncols()))
if len(r) > obj.ncols():
print("Row is too long. Truncating")
r = r[:obj.ncols()]
elif len(r) < obj.ncols():
r = list(r) + [self.cellzero] * (obj.ncols() - len(r))
return obj.stack(vector([self.display_to_cell(x) for x in r]))
def insert_row(self, obj, index, r=None):
r"""
Insert a row into a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: ma = MatrixGridViewAdapter(m)
sage: ma.insert_row(m, 1, (1,2,3))
[ 1 7 1]
[ 1 2 3]
[ 0 0 3]
[ 0 -1 2]
[ 1 0 -3]
"""
if not r:
r = [self.cellzero] * obj.ncols()
else:
if len(r) > obj.ncols():
print("Row is too long. Truncating")
r = r[:obj.ncols()]
elif len(r) < obj.ncols():
r = list(r) + [self.cellzero] * (obj.ncols() - len(r))
top = obj.matrix_from_rows(range(index))
bottom = obj.matrix_from_rows(range(index,obj.nrows()))
return top.stack(vector([self.display_to_cell(x) for x in r])).stack(bottom)
def remove_row(self, obj, index=None):
r"""
Remove a row from a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = S.matrix([0,1,2,3,4,5,6,7,8,9,10,11])
sage: ma = MatrixGridViewAdapter(m)
sage: ma.remove_row(m, 2)
[ 0 1 2]
[ 3 4 5]
[ 9 10 11]
sage: ma.remove_row(m)
[0 1 2]
[3 4 5]
[6 7 8]
"""
if index is None:
index = obj.nrows() - 1
return obj.delete_rows([index])
def append_column(self, obj, c=None):
r"""
Append a column to a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: ma = MatrixGridViewAdapter(m)
sage: ma.append_column(m, (1,1,1))
[ 1 7 1 1]
[ 0 0 3 1]
[ 0 -1 2 1]
[ 1 0 -3 0]
sage: ma.append_column(m, (1,1,1,1,2,2))
Column is too long. Truncating
[ 1 7 1 1]
[ 0 0 3 1]
[ 0 -1 2 1]
[ 1 0 -3 1]
"""
if not c:
return obj.augment(vector([self.cellzero]*obj.nrows()))
if len(c) > obj.nrows():
print("Column is too long. Truncating")
c = c[:obj.nrows()]
elif len(c) < obj.nrows():
c = list(c) + [self.cellzero] * (obj.nrows() - len(c))
return obj.augment(vector([self.display_to_cell(x) for x in c]))
def insert_column(self, obj, index, c=None):
r"""
Insert a column into a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: m = S.matrix([1,7,1,0,0,3,0,-1,2,1,0,-3])
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: ma = MatrixGridViewAdapter(m)
sage: ma.insert_column(m, 1, (1,1,1))
[ 1 1 7 1]
[ 0 1 0 3]
[ 0 1 -1 2]
[ 1 0 0 -3]
sage: ma.insert_column(m, 2, (1,1,1,2,2,2))
Column is too long. Truncating
[ 1 7 1 1]
[ 0 0 1 3]
[ 0 -1 1 2]
[ 1 0 2 -3]
"""
if not c:
c = [self.cellzero] * obj.nrows()
else:
if len(c) > obj.nrows():
print("Column is too long. Truncating")
c = c[:obj.nrows()]
elif len(c) < obj.nrows():
c = list(c) + [self.cellzero] * (obj.nrows() - len(c))
left = obj.matrix_from_columns(range(index))
right = obj.matrix_from_columns(range(index,obj.ncols()))
return left.augment(vector([self.display_to_cell(x) for x in c])).augment(right)
def remove_column(self, obj, index=None):
r"""
Remove a column from a matrix.
TESTS ::
sage: from sage.matrix.matrix_space import MatrixSpace
sage: S = MatrixSpace(ZZ, 4,3)
sage: from sage_widget_adapters.matrix.matrix_grid_view_adapter import MatrixGridViewAdapter
sage: m = S.matrix([0,1,2,3,4,5,6,7,8,9,10,11])
sage: ma = MatrixGridViewAdapter(m)
sage: ma.remove_column(m, 1)
[ 0 2]
[ 3 5]
[ 6 8]
[ 9 11]
sage: ma.remove_column(m)
[ 0 1]
[ 3 4]
[ 6 7]
[ 9 10]
"""
if index is None:
index = obj.ncols() - 1
return obj.delete_columns([index]) | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/matrix/matrix_grid_view_adapter.py | 0.898628 | 0.683703 | matrix_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for skew tableaux
**Grid View skew tableau operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~SkewTableauGridViewAdapter.compute_cells` | Compute skew tableau cells as a dictionary { coordinate pair : Integer }
:meth:`~SkewTableauGridViewAdapter.from_cells` | Create a new skew tableau from a cells dictionary
:meth:`~SkewTableauGridViewAdapter.addable_cells` | List addable cells
:meth:`~SkewTableauGridViewAdapter.removable_cells` | List removable cells
:meth:`~SkewTableauGridViewAdapter.add_cell` | Add a cell
:meth:`~SkewTableauGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.combinat.skew_tableau import SkewTableau
from sage.rings.integer import Integer
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
class SkewTableauGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for skew tableaux.
ATTRIBUTES::
* ``objclass`` -- SkewTableau
* ``celltype`` -- Integer
* ``cellzero`` -- Integer(0)
"""
objclass = SkewTableau
constructorname = 'SkewTableau'
celltype = Integer # i.e. sage.rings.integer.Integer
cellzero = Integer(0)
@staticmethod
def compute_cells(obj):
r"""
From a skew tableau,
return a dictionary { coordinates pair : Integer }
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: st = SkewTableau([[None, None, 1, 2], [None, 1], [4]])
sage: SkewTableauGridViewAdapter.compute_cells(st)
{(0, 2): 1, (0, 3): 2, (1, 1): 1, (2, 0): 4}
"""
return {(i,j):obj[i][j] for (i,j) in obj.cells()}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : Integer }
return a corresponding skew tableau
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: SkewTableauGridViewAdapter.from_cells({(0, 1): 2, (1, 0): 3, (2, 0): 4})
[[None, 2], [3], [4]]
"""
rows = []
for i in range(max(pos[0] for pos in cells) + 1):
rows.append([None] * (max(pos[1] for pos in cells if pos[0] == i) + 1))
for pos in cells:
rows[pos[0]][pos[1]] = cells[pos]
try:
return cls.objclass(rows)
except:
raise TypeError(
"This object is not compatible with this adapter (%s, for %s objects)" % (cls, cls.objclass))
@staticmethod
def addable_cells(obj):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: st = SkewTableau([[None, None, None, 1], [None, None, 2], [None, 1], [4]])
sage: SkewTableauGridViewAdapter.addable_cells(st)
[(0, 2), (1, 1), (2, 0), (0, 4), (1, 3), (2, 2), (3, 1), (4, 0)]
"""
return obj.shape().inner().corners() + obj.shape().outer().outside_corners()
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: st = SkewTableau([[None, None, None, 1, 2, 6], [None, None, 3, 4], [None, 1], [5]])
sage: SkewTableauGridViewAdapter.removable_cells(st)
[(0, 5), (1, 3), (2, 1), (3, 0), (0, 3), (1, 2)]
"""
ret = obj.shape().outer().corners()
for c in obj.shape().inner().outside_corners():
if not c in ret:
ret.append(c)
return ret
def add_cell(self, obj, pos, val, dirty={}):
r"""
Add cell.
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: st = SkewTableau([[None, None, None, 2], [None, 1, 1], [1], [4]])
sage: sta = SkewTableauGridViewAdapter()
sage: sta.add_cell(st, (0, 2), 1)
[[None, None, 1, 2], [None, 1, 1], [1], [4]]
sage: sta.add_cell(st, (4, 0), 7)
[[None, None, None, 2], [None, 1, 1], [1], [4], [7]]
sage: sta.add_cell(st, (1, 1), 9)
Traceback (most recent call last):
...
ValueError: Cell position '(1, 1)' is not addable.
"""
if not pos in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
sl = obj.to_list()
sl = self.make_dirty(sl, dirty)
if pos[0] >= len(obj):
sl.append([val])
elif pos in obj.shape().outer().outside_corners():
sl[pos[0]].append(val)
else:
sl[pos[0]][pos[1]] = val
return self._validate(sl)
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell.
TESTS ::
sage: from sage.combinat.skew_tableau import SkewTableau
sage: from sage_widget_adapters.combinat.skew_tableau_grid_view_adapter import SkewTableauGridViewAdapter
sage: st = SkewTableau([[None, None, None, 1, 2, 6], [None, None, 3, 4], [None, 1], [5]])
sage: sta = SkewTableauGridViewAdapter()
sage: sta.remove_cell(st, (0, 0))
Traceback (most recent call last):
...
ValueError: Cell position '(0, 0)' is not removable.
sage: sta.remove_cell(st, (0, 3))
[[None, None, None, None, 2, 6], [None, None, 3, 4], [None, 1], [5]]
sage: sta.remove_cell(st, (2, 1))
[[None, None, None, 1, 2, 6], [None, None, 3, 4], [None], [5]]
sage: st = SkewTableau([[None, None, 1, 2, 3], [None, 1], [4]])
sage: sta.remove_cell(st, (0, 4))
[[None, None, 1, 2], [None, 1], [4]]
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
sl = obj.to_list()
sl = self.make_dirty(sl, dirty)
if len(sl[pos[0]]) == 1:
del(sl[pos[0]])
elif pos in obj.shape().outer().corners():
sl[pos[0]].pop()
else:
sl[pos[0]][pos[1]] = None
return self._validate(sl) | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/combinat/skew_tableau_grid_view_adapter.py | 0.84106 | 0.677997 | skew_tableau_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for tableaux
**Grid View tableau operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~TableauGridViewAdapter.compute_cells` | Compute tableau cells as a dictionary { coordinate pair : Integer }
:meth:`~TableauGridViewAdapter.from_cells` | Create a new tableau from a cells dictionary
:meth:`~TableauGridViewAdapter.addable_cells` | List addable cells
:meth:`~TableauGridViewAdapter.add_cell` | Add a cell
:meth:`~TableauGridViewAdapter.removable_cells` | List removable cells (Tableau)
:meth:`~StandardTableauGridViewAdapter.removable_cells` | List removable cells (StandardTableau)
:meth:`~TableauGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.combinat.tableau import Tableau, StandardTableau, SemistandardTableau
from sage.rings.integer import Integer
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
class TableauGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for Young tableaux.
ATTRIBUTES::
* ``objclass`` -- Tableau
* ``celltype`` -- Integer
* ``cellzero`` -- Integer(0)
"""
objclass = Tableau
constructorname = 'Tableau'
celltype = Integer # i.e. sage.rings.integer.Integer
cellzero = Integer(0)
@staticmethod
def compute_cells(obj):
r"""
From a tableau,
return a dictionary { coordinates pair : Integer }
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: t = Tableau([[1, 2, 5, 6], [3], [4]])
sage: TableauGridViewAdapter.compute_cells(t)
{(0, 0): 1, (0, 1): 2, (0, 2): 5, (0, 3): 6, (1, 0): 3, (2, 0): 4}
"""
return {(i,j):obj[i][j] for (i,j) in obj.cells()}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : Integer }
return a corresponding tableau
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: TableauGridViewAdapter.from_cells({(0, 0): 1, (0, 1): 2, (0, 2): 5, (0, 3): 6, (1, 0): 3, (2, 0): 4})
[[1, 2, 5, 6], [3], [4]]
"""
rows = []
i = 0
while i <= max(pos[0] for pos in cells):
row = [cells[pos] for pos in cells if pos[0] == i]
row.sort()
rows.append(row)
i += 1
try:
return cls.objclass(rows)
except:
raise TypeError(
"This object is not compatible with this adapter (%s, for %s objects)" % (cls, cls.objclass))
@staticmethod
def addable_cells(obj, borders=False):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: t = Tableau([[1, 3, 4, 8, 12, 14, 15], [2, 7, 11, 13], [5, 9], [6, 10]])
sage: TableauGridViewAdapter.addable_cells(t, True)
([(0, 7), (1, 4), (2, 2), (4, 0)], [(0, 7), (1, 4), (2, 2)], [(1, 4), (2, 2), (4, 0)])
"""
addable_cells = obj.shape().outside_corners()
if not borders:
return addable_cells
no_left_border = [pos for pos in addable_cells if pos[0] < len(obj)]
no_top_border = []
prev = None
for pos in addable_cells:
if prev and pos[0] and pos[1] < prev[1]:
no_top_border.append(pos)
prev = pos
return addable_cells, no_left_border, no_top_border
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: TableauGridViewAdapter.removable_cells(t)
[(0, 3), (1, 1), (2, 0)]
"""
return obj.corners()
def add_cell(self, obj, pos, val, dirty={}):
r"""
Add cell
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: ta = TableauGridViewAdapter()
sage: ta.add_cell(t, (3, 0), 8)
[[1, 2, 5, 6], [3, 7], [4], [8]]
sage: ta.add_cell(t, (1, 2), 8)
[[1, 2, 5, 6], [3, 7, 8], [4]]
sage: ta.add_cell(t, (2, 0), 9)
Traceback (most recent call last):
...
ValueError: Cell position '(2, 0)' is not addable.
sage: ta.add_cell(t, (3, 0), 8, dirty={(3, 0):8})
[[1, 2, 5, 6], [3, 7], [4], [8]]
"""
if not pos in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
tl = self.make_dirty(obj.to_list(), dirty)
if not pos in dirty:
if pos[0] >= len(tl):
tl.append([val])
else:
tl[pos[0]].append(val)
return self._validate(tl)
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell
TESTS ::
sage: from sage.combinat.tableau import Tableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import TableauGridViewAdapter
sage: t = Tableau([[1, 2, 5, 6], [3, 7], [4]])
sage: ta = TableauGridViewAdapter()
sage: ta.remove_cell(t, (1, 1))
[[1, 2, 5, 6], [3], [4]]
sage: ta.remove_cell(t, (2, 0))
[[1, 2, 5, 6], [3, 7]]
sage: ta.remove_cell(t, (2, 1))
Traceback (most recent call last):
...
ValueError: Cell position '(2, 1)' is not removable.
sage: from sage.combinat.tableau import StandardTableau
sage: from sage_widget_adapters.combinat.tableau_grid_view_adapter import StandardTableauGridViewAdapter
sage: st = StandardTableau([[1, 2, 5, 6], [3, 7], [4]])
sage: sta = StandardTableauGridViewAdapter()
sage: sta.remove_cell(st, (1, 1))
[[1, 2, 5, 6], [3], [4]]
sage: sta.remove_cell(st, (2, 0))
ValueError('the entries in a standard tableau must be in bijection with 1,2,...,n')
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
tl = obj.to_list()
tl = self.make_dirty(tl, dirty)
tl[pos[0]].pop()
if not tl[pos[0]]:
tl.pop()
tl = [r for r in tl if r] # do not keep any empty row before the test
return self._validate(tl)
class SemistandardTableauGridViewAdapter(TableauGridViewAdapter):
r"""
Value will validate as semistandard tableau.
"""
objclass = SemistandardTableau
constructorname = 'SemistandardTableau'
class StandardTableauGridViewAdapter(SemistandardTableauGridViewAdapter):
r"""
Value will validate as standard tableau.
"""
objclass = StandardTableau
constructorname = 'StandardTableau' | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/combinat/tableau_grid_view_adapter.py | 0.799207 | 0.661585 | tableau_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for skew partitions
**Grid View skew partition operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~SkewPartitionGridViewAdapter.cell_to_display` | Static method for typecasting cell content to widget display value
:meth:`~SkewPartitionGridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~SkewPartitionGridViewAdapter.compute_cells` | Compute skew partition cells as a dictionary { coordinate pair : Integer }
:meth:`~SkewPartitionGridViewAdapter.from_cells` | Create a new skew partition from a cells dictionary
:meth:`~SkewPartitionGridViewAdapter.get_cell` | Get the skew partition cell content
:meth:`~SkewPartitionGridViewAdapter.addable_cells` | List addable cells
:meth:`~SkewPartitionGridViewAdapter.removable_cells` | List removable cells
:meth:`~SkewPartitionGridViewAdapter.add_cell` | Add a cell
:meth:`~SkewPartitionGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.combinat.skew_partition import SkewPartition
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
from six import text_type
class SkewPartitionGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for skew partitions.
ATTRIBUTES::
* ``objclass`` -- SkewPartition
* ``celltype`` -- bool
* ``cellzero`` -- False
"""
objclass = SkewPartition
celltype = bool
cellzero = False
@staticmethod
def cell_to_display(cell_content, display_type=bool):
r"""
From object cell content
to widget display value.
TESTS ::
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: SkewPartitionGridViewAdapter.cell_to_display(True)
True
sage: from six import text_type
sage: SkewPartitionGridViewAdapter.cell_to_display("my string", text_type)
''
"""
if display_type == text_type:
return ''
return cell_content
def display_to_cell(self, display_value, display_type=bool):
r"""
From widget cell value
to object display content
TESTS ::
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: pa = SkewPartitionGridViewAdapter()
sage: pa.display_to_cell(True)
True
sage: pa.display_to_cell('')
False
"""
if not display_value or display_type == text_type:
return self.cellzero
return display_value
@staticmethod
def compute_cells(obj):
r"""
From a skew partition,
return a dictionary { coordinates pair : Integer }
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[4, 2, 1],[2, 1]])
sage: SkewPartitionGridViewAdapter.compute_cells(sp)
{(0, 2): False, (0, 3): False, (1, 1): False, (2, 0): False}
"""
return {(i,j):False for (i,j) in obj.cells()}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : Integer }
return a corresponding skew partition.
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: SkewPartitionGridViewAdapter.from_cells({(0, 2): False, (0, 3): False, (1, 1): False, (2, 0): False})
[4, 2, 1] / [2, 1]
"""
height = max([pos[0] for pos in cells]) + 1
outer = [max([pos[1] for pos in cells if pos[0]==i]) + 1 for i in range(height)]
inner = [min([pos[1] for pos in cells if pos[0]==i]) for i in \
range(height) if min([pos[1] for pos in cells if pos[0]==i]) > 0]
try:
return cls.objclass([outer,inner])
except:
raise TypeError(
"This object is not compatible with this adapter (%s, for %s objects)" % (cls, cls.objclass))
@staticmethod
def get_cell(obj, pos):
r"""
Get cell value
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[4, 2, 1],[2, 1]])
sage: SkewPartitionGridViewAdapter.get_cell(sp, (1, 1))
False
sage: SkewPartitionGridViewAdapter.get_cell(sp, (1, 0))
Traceback (most recent call last):
...
ValueError: Cell '(1, 0)' not in object.
"""
try:
assert pos[0] < len(obj) and pos[1] < obj.outer()[pos[0]] and pos[1] >= obj.inner()[pos[0]]
except:
raise ValueError("Cell '%s' not in object." % str(pos))
return False
def set_cell(self, obj, pos, val, dirty={}, constructorname=''):
r"""
From a partition `obj`, a position (pair of coordinates) `pos` and a value `val`,
return a new partition with a modified cell at position `pos`.
Remove the cell if relevant, otherwise return the same partition.
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[7, 4, 2, 1],[2, 1, 1]])
sage: spa = SkewPartitionGridViewAdapter()
sage: spa.set_cell(sp, (1,6), True)
[7, 4, 2, 1] / [2, 1, 1]
sage: spa.set_cell(sp, (1,3), True)
[7, 4, 2, 1] / [2, 1, 1]
sage: spa.set_cell(sp, (1,3), False)
[7, 3, 2, 1] / [2, 1, 1]
"""
if pos in self.removable_cells(obj) and not val:
return self.remove_cell(obj, pos, dirty)
return obj
@staticmethod
def addable_cells(obj):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[4, 2, 1],[2, 1]])
sage: SkewPartitionGridViewAdapter.addable_cells(sp)
[(0, 1), (1, 0), (0, 4), (1, 2), (2, 1), (3, 0)]
"""
return obj.inner().corners() + obj.outer().outside_corners()
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: SkewPartitionGridViewAdapter.removable_cells(SkewPartition([[4, 2, 1],[2, 1]]))
[(0, 2), (1, 1), (2, 0), (0, 3)]
sage: SkewPartitionGridViewAdapter.removable_cells(SkewPartition([[7, 4, 2, 1],[2, 1, 1]]))
[(0, 2), (1, 1), (3, 0), (0, 6), (1, 3), (2, 1)]
"""
ret = obj.inner().outside_corners()
for c in obj.outer().corners():
if not c in ret:
ret.append(c)
return ret
def add_cell(self, obj, pos, val=None, dirty={}):
r"""
Add cell
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[7, 4, 2, 1],[2, 1, 1]])
sage: spa = SkewPartitionGridViewAdapter()
sage: spa.add_cell(sp, (0, 7))
[8, 4, 2, 1] / [2, 1, 1]
sage: spa.add_cell(sp, (2, 0))
[7, 4, 2, 1] / [2, 1]
sage: spa.add_cell(sp, (4, 0))
[7, 4, 2, 1, 1] / [2, 1, 1]
sage: spa.add_cell(sp, (2, 3))
Traceback (most recent call last):
...
ValueError: Cell position '(2, 3)' is not addable.
"""
if not pos in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
try:
if pos in obj.outer().outside_corners():
return self.objclass([obj.outer().add_cell(pos[0]), obj.inner()])
else:
return self.objclass([obj.outer(), obj.inner().remove_cell(pos[0])])
except:
raise ValueError("Error adding cell %s to %s" % (pos, self.objclass))
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell
TESTS ::
sage: from sage.combinat.skew_partition import SkewPartition
sage: from sage_widget_adapters.combinat.skew_partition_grid_view_adapter import SkewPartitionGridViewAdapter
sage: sp = SkewPartition([[7, 4, 2, 1],[2, 1, 1]])
sage: spa = SkewPartitionGridViewAdapter()
sage: spa.remove_cell(sp, (0, 6))
[6, 4, 2, 1] / [2, 1, 1]
sage: spa.remove_cell(sp, (1, 1))
[7, 4, 2, 1] / [2, 2, 1]
sage: spa.remove_cell(sp, (1, 2))
Traceback (most recent call last):
...
ValueError: Cell position '(1, 2)' is not removable.
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
try:
if pos in obj.outer().corners():
return self.objclass([obj.outer().remove_cell(pos[0]), obj.inner()])
else:
return self.objclass([obj.outer(), obj.inner().add_cell(pos[0])])
except:
raise ValueError("Error removing cell %s from %s" % (pos, self.objclass)) | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/combinat/skew_partition_grid_view_adapter.py | 0.868493 | 0.701534 | skew_partition_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for parallelogram polyominos
**Grid View parallelogram polyominos operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~ParallelogramPolyominoGridViewAdapter.compute_cells` | Compute parallelogram polyomino celss as a dictionary { coordinate pair : False }
:meth:`~ParallelogramPolyominoGridViewAdapter.addable_cells` | List addable cells
:meth:`~ParallelogramPolyominoGridViewAdapter.removable_cells` | List removable cells
:meth:`~ParallelogramPolyominoGridViewAdapter.add_cell` | Add a cell
:meth:`~ParallelogramPolyominoGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Henri Derycke
"""
from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
class ParallelogramPolyominoGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for parallelogram polyominos.
ATTRIBUTES::
* ``objclass`` -- ParallelogramPolyomino
* ``celltype`` -- bool
* ``cellzero`` -- False
"""
objclass = ParallelogramPolyomino
celltype = bool
cellzero = False
@staticmethod
def compute_cells(obj):
r"""
From a parallelogram polyomino,
return a dictionary { coordinates pair : False }
TESTS ::
sage: from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
sage: from sage_widget_adapters.combinat.parallelogram_polyomino_grid_view_adapter import ParallelogramPolyominoGridViewAdapter
sage: pp = ParallelogramPolyomino([[0, 1, 1],[1, 1 ,0]])
sage: ParallelogramPolyominoGridViewAdapter.compute_cells(pp)
{(0, 0): True, (0, 1): True}
"""
cells = {}
lower_heights = obj.lower_heights()
upper_heights = obj.upper_heights()
for i in range(obj.width()):
for j in range(upper_heights[i],lower_heights[i]):
cells[j,i] = True
return cells
@staticmethod
def addable_cells(obj):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
sage: from sage_widget_adapters.combinat.parallelogram_polyomino_grid_view_adapter import ParallelogramPolyominoGridViewAdapter
sage: pp = ParallelogramPolyomino([[0, 1, 0, 1], [1, 1, 0, 0]])
sage: ParallelogramPolyominoGridViewAdapter.addable_cells(pp)
[(1, 0), (2, 1), (1, 2)]
"""
cells = []
upper_heights = obj.upper_heights()
for i,c in enumerate(upper_heights[1:]):
if c != upper_heights[i]:
cells.append((c-1,i+1))
lower_heights = obj.lower_heights()
for i,c in enumerate(lower_heights[1:]):
if c != lower_heights[i]:
cells.append((lower_heights[i],i))
height, width = obj.geometry()
cells += [(height,width-1), (height-1,width)]
return cells
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
sage: from sage_widget_adapters.combinat.parallelogram_polyomino_grid_view_adapter import ParallelogramPolyominoGridViewAdapter
sage: pp = ParallelogramPolyomino([[0, 0, 1, 1], [1, 1, 0, 0]])
sage: ParallelogramPolyominoGridViewAdapter.removable_cells(pp)
[(1, 0), (0, 1)]
"""
heights = [(0,0)] + list(zip(obj.upper_heights(),obj.lower_heights()))
heights.append((heights[-1][1],)*2)
cells = []
for i in range(1,len(heights)-1):
x1,y1 = heights[i-1]
x2,y2 = heights[i]
x3,y3 = heights[i+1]
if x2+1 < y2 and x2 != x3 and x2+1 < y1:
cells.append((x2,i-1))
if x2 < y2-1 and y1 != y2 and y2-1 > x3:
cells.append((y2-1,i-1))
if len(heights) > 3:
x1,y1 = heights[-3]
x2,y2 = heights[-2]
if y1 < y2 or x2+1 == y2:
cells.append((y2-1, len(heights)-3))
return cells
def add_cell(self, obj, pos, val=None, dirty={}):
r"""
Add cell
TESTS ::
sage: from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
sage: from sage_widget_adapters.combinat.parallelogram_polyomino_grid_view_adapter import ParallelogramPolyominoGridViewAdapter
sage: pp = ParallelogramPolyomino([[0, 1, 0, 1], [1, 1, 0, 0],])
sage: ppa = ParallelogramPolyominoGridViewAdapter()
sage: ppa.add_cell(pp, (1, 0))
[[0, 0, 1, 1], [1, 1, 0, 0]]
sage: ppa.add_cell(pp, (1, 1))
Traceback (most recent call last):
...
ValueError: Cell position '(1, 1)' is not addable.
"""
if pos not in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
heights = list(zip(obj.upper_heights(),obj.lower_heights()))
upper_path = obj.upper_path()
lower_path = obj.lower_path()
height, width = obj.geometry()
i,j = pos
if i < height and j < width:
index = i+j
if heights[j][0] == i+1:
upper_path[index:index+2] = [1,0]
if heights[j][1] == i:
lower_path[index:index+2] = [0,1]
else:
if i == height:
lower_path[-1:] = [0,1]
upper_path += [0]
else:
lower_path += [1]
upper_path[-1:] = [1,0]
return ParallelogramPolyomino([lower_path, upper_path])
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell
TESTS ::
sage: from sage.combinat.parallelogram_polyomino import ParallelogramPolyomino
sage: from sage_widget_adapters.combinat.parallelogram_polyomino_grid_view_adapter import ParallelogramPolyominoGridViewAdapter
sage: pp = ParallelogramPolyomino([[0, 0, 1, 1], [1, 1, 0, 0]])
sage: ppa = ParallelogramPolyominoGridViewAdapter()
sage: ppa.remove_cell(pp, (1, 0))
[[0, 1, 0, 1], [1, 1, 0, 0]]
sage: ppa.remove_cell(pp, (1, 1))
Traceback (most recent call last):
...
ValueError: Cell position '(1, 1)' is not removable.
"""
if pos not in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
heights = list(zip(obj.upper_heights(),obj.lower_heights()))
upper_path = obj.upper_path()
lower_path = obj.lower_path()
i,j = pos
index = i+j
if len(heights) != j+1:
if heights[j][0] == i:
upper_path[index:index+2] = [0,1]
if heights[j][1]-1 == i:
lower_path[index:index+2] = [1,0]
else:
if heights[j][0] != i and heights[j][1]-1 == i:
lower_path[index:index+2] = [1]
upper_path.pop()
elif heights[j][1]-1 == i:
lower_path.pop()
upper_path[index:index+2] = [0]
else:
upper_path[index:index+2] = [0,1]
return ParallelogramPolyomino([lower_path, upper_path]) | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/combinat/parallelogram_polyomino_grid_view_adapter.py | 0.834811 | 0.515254 | parallelogram_polyomino_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for partitions
**Grid View partition operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~PartitionGridViewAdapter.cell_to_display` | Static method for typecasting cell content to widget display value
:meth:`~PartitionGridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~PartitionGridViewAdapter.compute_cells` | Compute partition cells as a dictionary { coordinate pair : Integer }
:meth:`~PartitionGridViewAdapter.from_cells` | Create a new partition from a cells dictionary
:meth:`~PartitionGridViewAdapter.get_cell` | Get the partition cell content
:meth:`~PartitionGridViewAdapter.addable_cells` | List addable cells
:meth:`~PartitionGridViewAdapter.removable_cells` | List removable cells
:meth:`~PartitionGridViewAdapter.add_cell` | Add a cell
:meth:`~PartitionGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.combinat.partition import Partition
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
from six import text_type
class PartitionGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for partitions.
ATTRIBUTES::
* ``objclass`` -- Partition
* ``celltype`` -- bool
* ``cellzero`` -- False
"""
objclass = Partition
constructorname = 'Partition'
celltype = bool
cellzero = False
@staticmethod
def cell_to_display(cell_content, display_type=bool):
r"""
From object cell content
to widget display value.
TESTS ::
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: PartitionGridViewAdapter.cell_to_display(True)
True
sage: from six import text_type
sage: PartitionGridViewAdapter.cell_to_display("my string", text_type)
''
"""
if display_type == text_type:
return ''
return cell_content
def display_to_cell(self, display_value, display_type=bool):
r"""
From widget cell value
to object display content
TESTS ::
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: pa = PartitionGridViewAdapter()
sage: pa.display_to_cell(True)
True
sage: pa.display_to_cell('')
False
"""
if not display_value or display_type == text_type:
return self.cellzero
return display_value
@staticmethod
def compute_cells(obj):
r"""
From a partition,
return a dictionary { coordinates pair : Integer }
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([3, 2, 1, 1])
sage: PartitionGridViewAdapter.compute_cells(p)
{(0, 0): False,
(0, 1): False,
(0, 2): False,
(1, 0): False,
(1, 1): False,
(2, 0): False,
(3, 0): False}
"""
return {(i,j):False for (i,j) in obj.cells()}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : Integer }
return a corresponding partition
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: PartitionGridViewAdapter.from_cells({(0, 0): False, (0, 1): False, (0, 2): True, (0, 3): False, (1, 0): False, (2, 0): True})
[4, 1, 1]
"""
partition_elements = [
len([(i, pos[1]) for pos in cells if pos[0] == i]) for i in range(max(pos[0] for pos in cells) + 1)]
try:
return cls.objclass(partition_elements)
except:
raise TypeError(
"This object is not compatible with this adapter (%s, for %s objects)" % (cls, cls.objclass))
@staticmethod
def get_cell(obj, pos):
r"""
Get cell value
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.get_cell(p, (1, 1))
False
sage: PartitionGridViewAdapter.get_cell(p, (1, 6))
Traceback (most recent call last):
...
ValueError: Cell '(1, 6)' not in partition.
"""
try:
assert pos[0] < len(obj) and pos[1] < obj[pos[0]]
except:
raise ValueError("Cell '%s' not in partition." % str(pos))
return False
def set_cell(self, obj, pos, val, dirty={}, constructorname=''):
r"""
From a partition `obj`, a position (pair of coordinates) `pos` and a value `val`,
return a new partition with a modified cell at position `pos`.
Remove the cell if relevant, otherwise return the same partition.
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.set_cell(p, (1,2), True)
[6, 5, 2, 1]
sage: pa.set_cell(p, (1,4), True)
[6, 4, 2, 1]
"""
if pos in self.removable_cells(obj) and val:
return self.remove_cell(obj, pos, dirty)
return obj
@staticmethod
def addable_cells(obj):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.addable_cells(p)
[(0, 6), (1, 5), (2, 2), (3, 1), (4, 0)]
"""
return obj.outside_corners()
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.removable_cells(p)
[(0, 5), (1, 4), (2, 1), (3, 0)]
"""
return obj.corners()
def add_cell(self, obj, pos, val=None, dirty={}):
r"""
Add cell
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.add_cell(p, (2, 2))
[6, 5, 3, 1]
sage: pa.add_cell(p, (4, 0), 42)
[6, 5, 2, 1, 1]
sage: pa.add_cell(p, (2, 0))
Traceback (most recent call last):
...
ValueError: Cell position '(2, 0)' is not addable.
"""
if not pos in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
try:
return obj.add_cell(pos[0])
except Exception as e:
return e
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.remove_cell(p, (2, 1))
[6, 5, 1, 1]
sage: pa.remove_cell(p, (1, 1))
Traceback (most recent call last):
...
ValueError: Cell position '(1, 1)' is not removable.
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
try:
return obj.remove_cell(pos[0])
except Exception as e:
return e | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/combinat/partition_grid_view_adapter.py | 0.89227 | 0.633325 | partition_grid_view_adapter.py | pypi |
r"""
Grid View Adapter for grid-representable graphs
**Grid View graphs operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~GraphGridViewAdapter.cell_to_display` | Static method for typecasting cell content to widget display value
:meth:`~GraphGridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~GraphGridViewAdapter.compute_cells` | Compute graph cells as a dictionary { coordinate pair : label }
:meth:`~GraphGridViewAdapter.from_cells` | Create a new graph from a cells dictionary
:meth:`~GraphGridViewAdapter.get_cell` | Get the graph cell content (i.e. None)
:meth:`~GraphGridViewAdapter.addable_cells` | List addable cells
:meth:`~GraphGridViewAdapter.removable_cells` | List removable cells
:meth:`~GraphGridViewAdapter.add_cell` | Add a cell
:meth:`~GraphGridViewAdapter.remove_cell` | Remove a cell
:meth:`~GraphGridViewAdapter.append_row` | Append a row
:meth:`~GraphGridViewAdapter.remove_row` | Remove a row at given index
:meth:`~GraphGridViewAdapter.append_column` | Append a column
:meth:`~GraphGridViewAdapter.remove_column` | Remove a column at given index
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.graphs.graph import Graph
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
from six import text_type
class GraphGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for grid-representable graphs.
ATTRIBUTES::
* ``objclass`` -- Graph
* ``celltype`` -- bool
* ``cellzero`` -- False
"""
objclass = Graph
celltype = bool
cellzero = False
@staticmethod
def cell_to_display(cell_content, display_type=bool):
r"""
From object cell content
to widget display value.
TESTS ::
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: GraphGridViewAdapter.cell_to_display(True)
True
sage: from six import text_type
sage: GraphGridViewAdapter.cell_to_display("my string", text_type)
''
"""
if display_type == text_type:
return ''
elif cell_content:
return cell_content
elif display_type == bool:
return False
def display_to_cell(self, display_value, display_type=bool):
r"""
From widget cell value
to object display content
TESTS ::
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: ga = GraphGridViewAdapter()
sage: ga.display_to_cell(True)
True
sage: ga.display_to_cell('')
False
"""
if not display_value or display_type == text_type:
return self.cellzero
return display_value
@staticmethod
def compute_cells(obj):
r"""
From the graph vertices,
make a dictionary { coordinates pair : None }
TESTS ::
sage: from sage.graphs.generators.families import AztecDiamondGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = AztecDiamondGraph(2)
sage: GraphGridViewAdapter.compute_cells(g)
{(0, 1): None,
(0, 2): None,
(1, 0): None,
(1, 1): None,
(1, 2): None,
(1, 3): None,
(2, 0): None,
(2, 1): None,
(2, 2): None,
(2, 3): None,
(3, 1): None,
(3, 2): None}
"""
cells = {}
for v in obj.vertices():
cells[v] = None
return cells
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : None }
return a graph with one vertex for every coordinates pair
TESTS ::
sage: from sage.graphs.generators.families import AztecDiamondGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: GraphGridViewAdapter.from_cells({(0, 0): None, (0, 1): None, (1, 0): None, (1, 1): None, (2, 0): None, (2, 1): None})
Graph on 6 vertices
"""
g = Graph()
g.add_vertices(list(cells.keys()))
return cls.objclass(g)
@staticmethod
def get_cell(obj, pos):
r"""
From a graph `graph` and a tuple `pos`,
return the object cell value at position `pos`.
TESTS ::
sage: from sage.graphs.generators.families import AztecDiamondGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = AztecDiamondGraph(2)
sage: GraphGridViewAdapter.get_cell(g, (1,3)) is None
True
"""
return None
@staticmethod
def addable_cells(obj):
r"""
No cell should be added in isolation
except for linear graphs
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((2,3))
sage: GraphGridViewAdapter.addable_cells(g)
[]
sage: g = GridGraph((1,3))
sage: GraphGridViewAdapter.addable_cells(g)
[(0, 3)]
"""
if not obj.num_verts():
return [(0,0)]
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
if row_max > 0 and col_max > 0:
return []
if row_max > 0:
return [(row_max + 1, 0)]
elif col_max > 0:
return [(0, col_max + 1)]
return [(0,1),(1,0)]
@staticmethod
def removable_cells(obj):
r"""
No cell should be removed in isolation
except for linear graphs
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((2,3))
sage: GraphGridViewAdapter.removable_cells(g)
[]
sage: g = GridGraph((1,3))
sage: GraphGridViewAdapter.removable_cells(g)
[(0, 2)]
"""
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
if row_max > 0 and col_max > 0:
return []
if row_max > 0:
return [(row_max, 0)]
elif col_max > 0:
return [(0, col_max)]
return [(0,0)]
def add_cell(self, obj, pos, val=None, dirty={}):
r"""
Add a cell to the graph.
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((1,2))
sage: ga = GraphGridViewAdapter()
sage: ga.add_cell(g, (0,2))
Grid Graph for [1, 2]: Graph on 3 vertices
"""
if not pos in self.addable_cells(obj):
raise ValueError("Position '%s' is not addable." % str(pos))
if pos in obj.vertices():
raise ValueError("This cell (position=%s) is already in the graph." % str(pos))
obj.add_vertex(pos)
return obj
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove a cell from the graph.
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((1, 2))
sage: ga = GraphGridViewAdapter()
sage: ga.remove_cell(g, (0,1))
Grid Graph for [1, 2]: Graph on 1 vertex
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
obj.delete_vertex(pos)
return obj
def append_row(self, obj):
r"""
Add a row to the graph.
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((3,2))
sage: ga = GraphGridViewAdapter()
sage: ga.append_row(g)
Grid Graph for [3, 2]: Graph on 8 vertices
"""
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
obj.add_vertices([(row_max + 1, j) for j in range(col_max + 1)])
return obj
def remove_row(self, obj, index=None):
r"""
Remove a row from the graph
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((3,2))
sage: ga = GraphGridViewAdapter()
sage: ga.remove_row(g)
Grid Graph for [3, 2]: Graph on 4 vertices
"""
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
obj.delete_vertices([(row_max, j) for j in range(col_max + 1)])
return obj
def append_column(self, obj):
r"""
Add a column to the graph.
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((3,2))
sage: ga = GraphGridViewAdapter()
sage: ga.append_column(g)
Grid Graph for [3, 2]: Graph on 9 vertices
"""
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
obj.add_vertices([(i, col_max + 1) for i in range(row_max + 1)])
return obj
def remove_column(self, obj, index=None):
r"""
Remove a column from the graph
TESTS ::
sage: from sage.graphs.generators.basic import GridGraph
sage: from sage_widget_adapters.graphs.graph_grid_view_adapter import GraphGridViewAdapter
sage: g = GridGraph((3,2))
sage: ga = GraphGridViewAdapter()
sage: ga.remove_column(g)
Grid Graph for [3, 2]: Graph on 3 vertices
"""
row_max, col_max = 0, 0
for t in obj.vertex_iterator():
row_max = max(row_max, t[0])
col_max = max(col_max, t[1])
obj.delete_vertices([(i, col_max) for i in range(row_max + 1)])
return obj | /sage_combinat_widgets-0.7.8-py3-none-any.whl/sage_widget_adapters/graphs/graph_grid_view_adapter.py | 0.92056 | 0.597432 | graph_grid_view_adapter.py | pypi |
<a href="https://sagemath.org"><img src="src/doc/common/themes/sage/static/logo_sagemath_black.svg" height="60" align="right" /></a>
# Sage: Open Source Mathematical Software
> "Creating a Viable Open Source Alternative to
> Magma, Maple, Mathematica, and MATLAB"
> Copyright (C) 2005-2022 The Sage Development Team
https://www.sagemath.org
The Sage Library is free software released under the
GNU General Public Licence GPLv2+, and included packages
have [compatible software licenses](./COPYING.txt).
[Over 800 people](https://www.sagemath.org/development-map.html)
have contributed code to Sage. In many cases, documentation
for modules and functions list the authors.
Getting Started
---------------
The [Sage Installation Guide](https://doc.sagemath.org/html/en/installation/index.html)
provides a decision tree that guides you to the type of installation
that will work best for you. This includes building from source,
obtaining Sage from a package manager, using a container image, or using
Sage in the cloud.
**This README contains self-contained instructions for building Sage from source.**
It assumes that you have already cloned the git repository or downloaded the
[sources](https://www.sagemath.org/download-source.html) in the form
of a tarball.
If you have questions or encounter problems, please do not hesitate
to email the [sage-support mailing list](https://groups.google.com/group/sage-support)
or ask on the [Ask Sage questions and answers site](https://ask.sagemath.org).
Supported Platforms
-------------------
Sage attempts to support all major Linux distributions, recent versions of
macOS, and Windows (using Windows Subsystem for Linux or
virtualization).
Detailed information on supported platforms for a specific version of Sage
can be found in the section "Availability and installation help" of the
[release tour](https://wiki.sagemath.org/ReleaseTours) for this version.
We highly appreciate contributions to Sage that fix portability bugs
and help port Sage to new platforms; let us know at the [sage-devel
mailing list](https://groups.google.com/group/sage-devel).
[Windows] Preparing the Platform
--------------------------------
The preferred way to run Sage on Windows is using the [Windows Subsystem for
Linux](https://docs.microsoft.com/en-us/windows/wsl/faq), which allows
you to install a standard Linux distribution such as Ubuntu within
your Windows. Then all instructions for installation in Linux apply.
As an alternative, you can also run Linux on Windows using Docker (see
above) or other virtualization solutions.
[macOS] Preparing the Platform
------------------------------
If your Mac uses the Apple Silicon (M1, arm64) architecture:
- If you set up your Mac by transfering files from an older Mac, make sure
that the directory ``/usr/local`` does not contain an old copy of Homebrew
(or other software) for the x86_64 architecture that you may have copied
over. Note that Homebrew for the M1 is installed in ``/opt/homebrew``, not
``/usr/local``.
- If you wish to use conda, please see the [section on
conda](https://doc.sagemath.org/html/en/installation/conda.html) in the Sage
Installation Manual for guidance.
- Otherwise, using Homebrew ("the missing package manager for macOS") from
https://brew.sh/ required because it provides a version of ``gfortran`` with
necessary changes for this platform that are not in a released upstream
version of GCC. (The ``gfortran`` package that comes with the Sage
distribution is not suitable for the M1/M2.)
If your Mac uses the Intel (x86_64) architecture:
- If you wish to use conda, please see the [section on
conda](https://doc.sagemath.org/html/en/installation/conda.html) in the Sage
Installation Manual for guidance.
- Otherwise, we strongly recommend to use Homebrew ("the missing package
manager for macOS") from https://brew.sh/, which provides the ``gfortran``
compiler and many libraries.
- Otherwise, if you do not wish to install Homebrew, you will need to install
the latest version of Xcode Command Line Tools. Open a terminal window and
run `xcode-select --install`; then click "Install" in the pop-up window. If
the Xcode Command Line Tools are already installed, you may want to check if
they need to be updated by typing `softwareupdate -l`.
Instructions to Build from Source
---------------------------------
Like many other software packages, Sage is built from source using
`./configure`, followed by `make`. However, we strongly recommend to
read the following step-by-step instructions for building Sage.
The instructions cover all of Linux, macOS, and WSL.
More details, providing a background for these instructions, can be found
in the [section "Install from Source Code"](https://doc.sagemath.org/html/en/installation/source.html).
in the Installation Guide.
1. Decide on the source/build directory (`SAGE_ROOT`):
- On personal computers, any subdirectory of your :envvar:`HOME`
directory should do.
- For example, you could use `SAGE_ROOT=~/sage/sage-x.y`, which we
will use as the running example below, where `x.y` is the
current Sage version.
- You need at least 10 GB of free disk space.
- The full path to the source directory must contain **no spaces**.
- After starting the build, you cannot move the source/build
directory without breaking things.
- You may want to avoid slow filesystems such as
[network file systems (NFS)](https://en.wikipedia.org/wiki/Network_File_System)
and the like.
- [macOS] macOS allows changing directories without using exact capitalization.
Beware of this convenience when compiling for macOS. Ignoring exact
capitalization when changing into :envvar:`SAGE_ROOT` can lead to build
errors for dependencies requiring exact capitalization in path names.
2. Download/unpack or clone the sources.
- Go to https://www.sagemath.org/download-source.html, select a mirror,
and download the file :file:`sage-x.y.tar.gz`.
This compressed archive file contains the source code for Sage and
the source for all programs on which Sage depends.
- After downloading the source tarball `sage-x.y.tar.gz` into
`~/sage/`:
$ cd ~/sage/
$ tar xf sage-x.y.tar.gz # adapt x.y; takes a while
This creates the subdirectory `sage-x.y`. Now change into it:
$ cd sage-x.y/ # adapt x.y
- [Git] Alternatively, and required for Sage development, clone the Sage
git repository:
$ ORIG=https://github.com/sagemath/sage.git
$ git clone -c core.symlinks=true --branch develop --tags $ORIG
This will create the directory `sage`. (See the section
[Setting up git](https://doc.sagemath.org/html/en/developer/git_setup.html)
and the following sections in the Sage Developer's Guide
for more information.)
Change into it and pick the branch you need, typically
the latest development branch:
$ cd sage
$ git checkout develop
- [Windows] The Sage source tree contains symbolic links, and the
build will not work if Windows line endings rather than UNIX
line endings are used.
Therefore it is crucial that you unpack the source tree from the
WSL `bash` using the WSL `tar` utility and not using other
Windows tools (including mingw). Likewise, when using `git`, it
is recommended (but not necessary) to use the WSL version of
`git`.
3. [Linux, WSL] Install the required minimal build prerequisites.
- Compilers: `gcc`, `gfortran`, `g++` (GCC 8.x to 12.x and recent
versions of Clang (LLVM) are supported).
See [build/pkgs/gcc/SPKG.rst](build/pkgs/gcc/SPKG.rst) and
[build/pkgs/gfortran/SPKG.rst](build/pkgs/gfortran/SPKG.rst)
for a discussion of suitable compilers.
- Build tools: GNU `make`, GNU `m4`, `perl` (including
``ExtUtils::MakeMaker``), `ranlib`, `git`, `tar`, `bc`.
See [build/pkgs/_prereq/SPKG.rst](build/pkgs/_prereq/SPKG.rst) for
more details.
- Python 3.4 or later, or Python 2.7, a full installation including
`urllib`; but ideally version 3.8.x, 3.9.x, or 3.10.x, which
will avoid having to build Sage's own copy of Python 3.
See [build/pkgs/python3/SPKG.rst](build/pkgs/python3/SPKG.rst)
for more details.
We have collected lists of system packages that provide these build
prerequisites. See, in the folder
[build/pkgs/_prereq/distros](build/pkgs/_prereq/distros),
the files
[arch.txt](build/pkgs/_prereq/distros/arch.txt),
[debian.txt](build/pkgs/_prereq/distros/debian.txt)
(also for Ubuntu, Linux Mint, etc.),
[fedora.txt](build/pkgs/_prereq/distros/fedora.txt)
(also for Red Hat, CentOS),
[opensuse.txt](build/pkgs/_prereq/distros/opensuse.txt),
[slackware.txt](build/pkgs/_prereq/distros/slackware.txt), and
[void.txt](build/pkgs/_prereq/distros/void.txt), or visit
https://doc.sagemath.org/html/en/reference/spkg/_prereq.html#spkg-prereq
4. [Git] If you plan to do Sage development or otherwise work with ticket branches
and not only releases, install the bootstrapping prerequisites. See the
files in the folder
[build/pkgs/_bootstrap/distros](build/pkgs/_bootstrap/distros), or
visit
https://doc.sagemath.org/html/en/reference/spkg/_bootstrap.html#spkg-bootstrap
5. [Git] If you cloned the Sage repository using `git`, bootstrap the
source tree using the following command:
$ make configure
(If the bootstrapping prerequisites are not installed, this command will
download a package providing pre-built bootstrap output instead.)
6. Sanitize the build environment. Use the command
$ env
to inspect the current environment variables, in particular `PATH`,
`PKG_CONFIG_PATH`, `LD_LIBRARY_PATH`, `CFLAGS`, `CPPFLAGS`, `CXXFLAGS`,
and `LDFLAGS` (if set).
Remove items from these (colon-separated) environment variables
that Sage should not use for its own build. In particular, remove
items if they refer to a previous Sage installation.
- [WSL] In particular, WSL imports many items from the Windows
`PATH` variable into the Linux environment, which can lead to
confusing build errors. These items typically start with `/mnt/c`.
It is best to remove all of them from the environment variables.
For example, you can set `PATH` using the command:
$ export PATH=/usr/sbin/:/sbin/:/bin/:/usr/lib/wsl/lib/
- [macOS with homebrew] Set required environment variables for the build:
$ source ./.homebrew-build-env
This is to make some of Homebrew's packages (so-called keg-only
packages) available for the build. Run it once to apply the
suggestions for the current terminal session. You may need to
repeat this command before you rebuild Sage from a new terminal
session, or after installing additional homebrew packages. (You
can also add it to your shell profile so that it gets run
automatically in all future sessions.)
7. Optionally, decide on the installation prefix (`SAGE_LOCAL`):
- Traditionally, and by default, Sage is installed into the
subdirectory hierarchy rooted at `SAGE_ROOT/local/`.
- This can be changed using `./configure --prefix=SAGE_LOCAL`,
where `SAGE_LOCAL` is the desired installation prefix, which
must be writable by the user.
If you use this option in combination with `--disable-editable`,
you can delete the entire Sage source tree after completing
the build process. What is installed in `SAGE_LOCAL` will be
a self-contained installation of Sage.
- Note that in Sage's build process, `make` builds **and**
installs (`make install` is a no-op). Therefore the
installation hierarchy must be writable by the user.
- See the installation manual for options if you want to
install into shared locations such as `/usr/local/`.
Do not attempt to build Sage as `root`.
8. Optional: It is recommended that you have both LaTeX and
the ImageMagick tools (e.g. the "convert" command) installed
since some plotting functionality benefits from them.
9. Optionally, review the configuration options, which includes
many optional packages:
$ ./configure --help
A notable option for Sage developers is the following:
- Use `./configure --enable-download-from-upstream-url` to allow
downloading packages from their upstream URL if they cannot (yet) be
found on the Sage mirrors. This is useful for trying out ticket branches
that make package upgrades.
10. Optional, but highly recommended: Set some environment variables to
customize the build.
For example, the `MAKE` environment variable controls whether to
run several jobs in parallel. On a machine with 4 processors, say,
typing `export MAKE="make -j4"` will configure the build script to
perform a parallel compilation of Sage using 4 jobs. On some
powerful machines, you might even consider `-j16`, as building with
more jobs than CPU cores can speed things up further.
To reduce the terminal output during the build, type `export V=0`.
(`V` stands for "verbosity".)
Some environment variables deserve a special mention: `CC`,
`CXX` and `FC`. These variables defining your compilers
can be set at configuration time and their values will be recorded for
further use at build time and runtime.
For an in-depth discussion of more environment variables for
building Sage, see [the installation
guide](https://doc.sagemath.org/html/en/installation/source.html#environment-variables).
11. Type `./configure`, followed by any options that you wish to use.
For example, to build Sage with `gf2x` package supplied by Sage,
use `./configure --with-system-gf2x=no`.
At the end of a successful `./configure` run, you may see messages
recommending to install extra system packages using your package
manager.
For a large [list of Sage
packages](https://trac.sagemath.org/ticket/27330), Sage is able to
detect whether an installed system package is suitable for use with
Sage; in that case, Sage will not build another copy from source.
Sometimes, the messages will recommend to install packages that are
already installed on your system. See the earlier configure
messages or the file `config.log` for explanation. Also, the
messages may recommend to install packages that are actually not
available; only the most recent releases of your distribution will
have all of these recommended packages.
12. Optional: If you choose to install the additional system packages,
a re-run of `./configure` will test whether the versions installed
are usable for Sage; if they are, this will reduce the compilation
time and disk space needed by Sage. The usage of packages may be
adjusted by `./configure` parameters (check again the output of
`./configure --help`).
13. Type `make`. That's it! Everything is automatic and
non-interactive.
If you followed the above instructions, in particular regarding the
installation of system packages recommended by the output of
`./configure` (step 10), and regarding the parallel build (step 9),
building Sage takes less than one hour on a modern computer.
(Otherwise, it can take much longer.)
The build should work fine on all fully supported platforms. If it
does not, we want to know!
14. Type `./sage` to try it out. In Sage, try for example `2 + 2`,
`plot(x^2)`, `plot3d(lambda x, y: x*y, (-1, 1), (-1, 1))`
to test a simple computation and plotting in 2D and 3D.
Type <kbd>Ctrl</kbd>+<kbd>D</kbd> or `quit` to quit Sage.
15. Optional: Type `make ptestlong` to test all examples in the documentation
(over 200,000 lines of input!) -- this takes from 10 minutes to
several hours. Don't get too disturbed if there are 2 to 3 failures,
but always feel free to email the section of `logs/ptestlong.log` that
contains errors to the [sage-support mailing list](https://groups.google.com/group/sage-support).
If there are numerous failures, there was a serious problem with your build.
16. The HTML version of the [documentation](https://doc.sagemath.org/html/en/index.html)
is built during the compilation process of Sage and resides in the directory
`local/share/doc/sage/html/`. You may want to bookmark it in your browser.
17. Optional: If you want to build the PDF version of the documentation,
run `make doc-pdf` (this requires LaTeX to be installed).
18. Optional: Install optional packages of interest to you:
get a list by typing `./sage --optional` or by visiting the
[packages documentation page](https://doc.sagemath.org/html/en/reference/spkg/).
19. Optional: Create a symlink to the installed `sage` script in a
directory in your `PATH`, for example ``/usr/local``. This will
allow you to start Sage by typing `sage` from anywhere rather than
having to either type the full path or navigate to the Sage
directory and type `./sage`. This can be done by running:
$ sudo ln -s $(./sage -sh -c 'ls $SAGE_ROOT/venv/bin/sage') /usr/local/bin
20. Optional: Set up SageMath as a Jupyter kernel in an existing Jupyter notebook
or JupyterLab installation, as described in [section
"Launching SageMath"](https://doc.sagemath.org/html/en/installation/launching.html)
in the installation manual.
Troubleshooting
---------------
If you have problems building Sage, check the Sage Installation Guide,
as well as the version-specific Sage Installation FAQ in the [Sage Release
Tour](https://wiki.sagemath.org/ReleaseTours) corresponding to the
version that you are installing.
Please do not hesitate to ask for help in the [SageMath forum
](https://ask.sagemath.org/questions/) or the [sage-support mailing
list](https://groups.google.com/forum/#!forum/sage-support). The
[Troubleshooting section in the Sage Installation Guide
](https://doc.sagemath.org/html/en/installation/troubles.html)
provides instructions on what information to provide so that we can provide
help more effectively.
Contributing to Sage
--------------------
If you'd like to contribute to Sage, we strongly recommend that you read the
[Developer's Guide](https://doc.sagemath.org/html/en/developer/index.html).
Sage has significant components written in the following languages:
C/C++, Python, Cython, Common Lisp, Fortran, and a bit of Perl.
Directory Layout
----------------
Simplified directory layout (only essential files/directories):
```
SAGE_ROOT Root directory (sage-x.y in Sage tarball)
├── build
│ └── pkgs Every package is a subdirectory here
│ ├── 4ti2/
│ …
│ └── zlib/
├── configure Top-level configure script
├── COPYING.txt Copyright information
├── pkgs Source trees of Python distribution packages
│ ├── sage-conf
│ │ ├── sage_conf.py
│ │ └── setup.py
│ ├── sage-docbuild
│ │ ├── sage_docbuild/
│ │ └── setup.py
│ ├── sage-setup
│ │ ├── sage_setup/
│ │ └── setup.py
│ ├── sage-sws2rst
│ │ ├── sage_sws2rst/
│ │ └── setup.py
│ └── sagemath-standard
│ ├── bin/
│ ├── sage -> ../../src/sage
│ └── setup.py
├── local (SAGE_LOCAL) Installation hierarchy for non-Python packages
│ ├── bin Executables
│ ├── include C/C++ headers
│ ├── lib Shared libraries, architecture-dependent data
│ ├── share Databases, architecture-independent data, docs
│ │ └── doc Viewable docs of Sage and of some components
│ └── var
│ ├── lib/sage
│ │ ├── installed/
│ │ │ Records of installed non-Python packages
│ │ ├── scripts/ Scripts for uninstalling installed packages
│ │ └── venv-python3.9 (SAGE_VENV)
│ │ │ Installation hierarchy (virtual environment)
│ │ │ for Python packages
│ │ ├── bin/ Executables and installed scripts
│ │ ├── lib/python3.9/site-packages/
│ │ │ Python modules/packages are installed here
│ │ └── var/lib/sage/
│ │ └── wheels/
│ │ Python wheels for all installed Python packages
│ │
│ └── tmp/sage/ Temporary files when building Sage
├── logs
│ ├── install.log Full install log
│ └── pkgs Build logs of individual packages
│ ├── alabaster-0.7.12.log
│ …
│ └── zlib-1.2.11.log
├── m4 M4 macros for generating the configure script
│ └── *.m4
├── Makefile Running "make" uses this file
├── prefix -> SAGE_LOCAL Convenience symlink to the installation tree
├── README.md This file
├── sage Script to start Sage
├── src Monolithic Sage library source tree
│ ├── bin/ Scripts that Sage uses internally
│ ├── doc/ Sage documentation sources
│ └── sage/ The Sage library source code
├── upstream Source tarballs of packages
│ ├── Babel-2.9.1.tar.gz
│ …
│ └── zlib-1.2.11.tar.gz
├── venv -> SAGE_VENV Convenience symlink to the virtual environment
└── VERSION.txt
```
For more details see [our Developer's Guide](https://doc.sagemath.org/html/en/developer/coding_basics.html#files-and-directory-structure).
Build System
------------
This is a brief summary of the Sage software distribution's build system.
There are two components to the full Sage system--the Sage Python library
and its associated user interfaces, and the larger software distribution of
Sage's main dependencies (for those dependencies not supplied by the user's
system).
Sage's Python library is built and installed using a `setup.py` script as is
standard for Python packages (Sage's `setup.py` is non-trivial, but not
unusual).
Most of the rest of the build system is concerned with building all of Sage's
dependencies in the correct order in relation to each other. The dependencies
included by Sage are referred to as SPKGs (i.e. "Sage Packages") and are listed
under `build/pkgs`.
The main entrypoint to Sage's build system is the top-level `Makefile` at the
root of the source tree. Unlike most normal projects that use autoconf (Sage
does as well, as described below), this `Makefile` is not generated. Instead,
it contains a few high-level targets and targets related to bootstrapping the
system. Nonetheless, we still run `make <target>` from the root of the source
tree--targets not explicitly defined in the top-level `Makefile` are passed
through to another Makefile under `build/make/Makefile`.
The latter `build/make/Makefile` *is* generated by an autoconf-generated
`configure` script, using the template in `build/make/Makefile.in`. This
includes rules for building the Sage library itself (`make sagelib`), and for
building and installing each of Sage's dependencies (e.g. `make gf2x`).
The `configure` script itself, if it is not already built, can be generated by
running the `bootstrap` script (the latter requires _GNU autotools_ being installed).
The top-level `Makefile` also takes care of this automatically.
To summarize, running a command like `make python3` at the top-level of the
source tree goes something like this:
1. `make python3`
2. run `./bootstrap` if `configure` needs updating
3. run `./configure` with any previously configured options if `build/make/Makefile`
needs updating
4. change directory into `build/make` and run the `install` script--this is
little more than a front-end to running `make -f build/make/Makefile python3`,
which sets some necessary environment variables and logs some information
5. `build/make/Makefile` contains the actual rule for building `python3`; this
includes building all of `python3`'s dependencies first (and their
dependencies, recursively); the actual package installation is performed
with the `sage-spkg` program
Relocation
----------
It is not supported to move the `SAGE_ROOT` or `SAGE_LOCAL` directory
after building Sage. If you do move the directories, you will have to
run ``make distclean`` and build Sage again from scratch.
For a system-wide installation, you have to build Sage as a "normal" user
and then as root you can change permissions. See the [Installation Guide](https://doc.sagemath.org/html/en/installation/source.html#installation-in-a-multiuser-environment)
for further information.
Redistribution
--------------
Your local Sage install is almost exactly the same as any "developer"
install. You can make changes to documentation, source, etc., and very
easily package the complete results up for redistribution just like we
do.
1. To make a binary distribution with your currently installed packages,
visit [sagemath/binary-pkg](https://github.com/sagemath/binary-pkg).
2. To make your own source tarball of Sage, type:
$ make dist
The result is placed in the directory `dist/`.
Changes to Included Software
----------------------------
All software included with Sage is copyrighted by the respective authors
and released under an open source license that is __GPL version 3 or
later__ compatible. See [COPYING.txt](./COPYING.txt) for more details.
Sources are in unmodified (as far as possible) tarballs in the
`upstream/` directory. The remaining description, version
information, patches, and build scripts are in the accompanying
`build/pkgs/<packagename>` directory. This directory is
part of the Sage git repository.
| /sage-conf-10.0b0.tar.gz/sage-conf-10.0b0/sage_root/README.md | 0.453988 | 0.707796 | README.md | pypi |
# This is originally motivated by pip, but has since been generalized. We
# should avoid running pip while uninstalling a package because that is prone
# to race conditions. This script runs pip under a lock. For details, see
# https://trac.sagemath.org/ticket/21672
import fcntl
import os
import pipes
import sys
import argparse
class FileType(argparse.FileType):
"""
Version of argparse.FileType with the option to ensure that the full path
to the file exists.
"""
def __init__(self, mode='r', makedirs=False):
# Note, the base class __init__ takes other arguments too depending on
# the Python version but we don't care about them for this purpose
super(FileType, self).__init__(mode=mode)
self._makedirs = makedirs
def __call__(self, string):
if self._makedirs and string != '-':
dirname = os.path.dirname(string)
try:
os.makedirs(dirname)
except OSError as exc:
if not os.path.isdir(dirname):
raise argparse.ArgumentTypeError(
"can't create '{0}': {1}".format(dirname, exc))
return super(FileType, self).__call__(string)
class IntOrFileType(FileType):
"""
Like FileType but also accepts an int (e.g. for a file descriptor).
"""
def __call__(self, string):
try:
return int(string)
except ValueError:
return super(IntOrFileType, self).__call__(string)
def run(argv=None):
parser = argparse.ArgumentParser(description=__doc__)
group = parser.add_mutually_exclusive_group()
group.add_argument('-s', '--shared', action='store_true',
help='create a shared lock')
# Note: A exclusive lock is created by default if no other flags are given,
# but supplying the --exclusive flag explicitly may help clarity
group.add_argument('-x', '--exclusive', action='store_true',
help='create an exclusive lock (the default)')
group.add_argument('-u', '--unlock', action='store_true',
help='remove an existing lock')
parser.add_argument('lock', metavar='LOCK',
type=IntOrFileType('w+', makedirs=True),
help='filename of the lock an integer file descriptor')
parser.add_argument('command', metavar='COMMAND', nargs=argparse.REMAINDER,
help='command to run with the lock including any '
'arguments to that command')
args = parser.parse_args(argv)
if args.shared:
locktype = fcntl.LOCK_SH
elif args.unlock:
locktype = fcntl.LOCK_UN
else:
locktype = fcntl.LOCK_EX
lock = args.lock
command = args.command
if isinstance(lock, int) and command:
parser.error('sage-flock does not accept a command when passed '
'a file descriptor number')
# First try a non-blocking lock such that we can give an informative
# message while the user is waiting.
try:
fcntl.flock(lock, locktype | fcntl.LOCK_NB)
except IOError as exc:
if locktype == fcntl.LOCK_SH:
kind = "shared"
elif locktype == fcntl.LOCK_UN:
# This shouldn't happen
sys.stderr.write(
"Unexpected error trying to unlock fd: {0}\n".format(exc))
return 1
else:
kind = "exclusive"
sys.stderr.write("Waiting for {0} lock to run {1} ... ".format(
kind, ' '.join(pipes.quote(arg) for arg in command)))
fcntl.flock(lock, locktype)
sys.stderr.write("ok\n")
if not (args.unlock or isinstance(lock, int)):
os.execvp(command[0], command)
if __name__ == '__main__':
sys.exit(run()) | /sage-conf-10.0b0.tar.gz/sage-conf-10.0b0/sage_root/build/sage_bootstrap/flock.py | 0.496826 | 0.213767 | flock.py | pypi |
from __future__ import print_function
import os
import copy
import tarfile
import stat
import subprocess
import time
from io import BytesIO
from sage_bootstrap.uncompress.filter_os_files import filter_os_files
class SageBaseTarFile(tarfile.TarFile):
"""
Same as tarfile.TarFile, but applies a reasonable umask (0022) to the
permissions of all extracted files and directories, and fixes
the encoding of file names in the tarball to be 'utf-8' instead of
depending on locale settings.
Previously this applied the user's current umask per the default behavior
of the ``tar`` utility, but this did not provide sufficiently reliable
behavior in all cases, such as when the user's umask is not strict enough.
This also sets the modified timestamps on all extracted files to the same
time (the current time), not the timestamps stored in the tarball. This
is meant to work around https://bugs.python.org/issue32773
See http://trac.sagemath.org/ticket/20218#comment:16 and
https://trac.sagemath.org/ticket/24567 for more background.
"""
umask = 0o022
def __init__(self, *args, **kwargs):
kwargs['encoding'] = 'utf-8'
# Unfortunately the only way to get the current umask is to set it
# and then restore it
super(SageBaseTarFile, self).__init__(*args, **kwargs)
# Extracted files will have this timestamp
self._extracted_mtime = time.time()
@property
def names(self):
"""
List of filenames in the archive.
Filters out names of OS-related files that shouldn't be in the
archive (.DS_Store, etc.)
"""
return filter_os_files(self.getnames())
def chmod(self, tarinfo, targetpath):
"""Apply ``self.umask`` instead of the permissions in the TarInfo."""
tarinfo = copy.copy(tarinfo)
tarinfo.mode &= ~self.umask
tarinfo.mode |= stat.S_IWUSR
tarinfo.mode &= ~(stat.S_ISUID | stat.S_ISGID)
return super(SageBaseTarFile, self).chmod(tarinfo, targetpath)
def utime(self, tarinfo, targetpath):
"""Override to keep the extraction time as the file's timestamp."""
tarinfo.mtime = self._extracted_mtime
return super(SageBaseTarFile, self).utime(tarinfo, targetpath)
def extractall(self, path='.', members=None, **kwargs):
"""
Same as tarfile.TarFile.extractall but allows filenames for
the members argument (like zipfile.ZipFile).
.. note::
The additional ``**kwargs`` are for Python 2/3 compatibility, since
different versions of this method accept additional arguments.
"""
if members:
name_to_member = dict([member.name, member] for member in self.getmembers())
members = [m if isinstance(m, tarfile.TarInfo)
else name_to_member[m]
for m in members]
return super(SageBaseTarFile, self).extractall(path=path,
members=members,
**kwargs)
def extractbytes(self, member):
"""
Return the contents of the specified archive member as bytes.
If the member does not exist, returns None.
"""
if member in self.getnames():
reader = self.extractfile(member)
return reader.read()
def _extract_member(self, tarinfo, targetpath, **kwargs):
"""
Override to ensure that our custom umask is applied over the entire
directory tree, even for directories that are not explicitly listed in
the tarball.
.. note::
The additional ``**kwargs`` are for Python 2/3 compatibility, since
different versions of this method accept additional arguments.
"""
old_umask = os.umask(self.umask)
try:
super(SageBaseTarFile, self)._extract_member(tarinfo, targetpath,
**kwargs)
finally:
os.umask(old_umask)
class SageTarFile(SageBaseTarFile):
"""
A wrapper around SageBaseTarFile such that SageTarFile(filename) is
essentially equivalent to TarFile.open(filename) which is more
flexible than the basic TarFile.__init__
"""
def __new__(cls, filename):
return SageBaseTarFile.open(filename)
@staticmethod
def can_read(filename):
"""
Given an archive filename, returns True if this class can read and
process the archive format of that file.
"""
return tarfile.is_tarfile(filename)
class SageTarXZFile(SageBaseTarFile):
"""
A ``.tar.xz`` file which is uncompressed in memory.
"""
def __new__(cls, filename):
# Read uncompressed data through a pipe
proc = subprocess.Popen(["xz", "-d", "-c", filename], stdout=subprocess.PIPE)
data, _ = proc.communicate()
return SageBaseTarFile(mode="r", fileobj=BytesIO(data))
@staticmethod
def can_read(filename):
"""
Given an archive filename, returns True if this class can read and
process the archive format of that file.
"""
devnull = open(os.devnull, 'w')
try:
subprocess.check_call(["xz", "-l", filename], stdout=devnull, stderr=devnull)
except Exception:
return False
return True | /sage-conf-10.0b0.tar.gz/sage-conf-10.0b0/sage_root/build/sage_bootstrap/uncompress/tar_file.py | 0.745954 | 0.175485 | tar_file.py | pypi |
r"""
Sage docbuild main
This module defines the Sage documentation build command::
sage --docbuild [OPTIONS] DOCUMENT (FORMAT | COMMAND)
If ``FORMAT`` is given, it builds ``DOCUMENT`` in ``FORMAT``. If ``COMMAND`` is
given, it returns information about ``DOCUMENT``.
Run ``sage --docbuild`` to get detailed explanations about
arguments and options.
Positional arguments::
DOCUMENT name of the document to build. It can be either one
of the documents listed by -D or 'file=/path/to/FILE' to build documentation
for this specific file.
FORMAT or COMMAND document output format (or command)
Standard options::
-h, --help show a help message and exit
-H, --help-all show an extended help message and exit
-D, --documents list all available DOCUMENTs
-F, --formats list all output FORMATs
-C DOC, --commands DOC list all COMMANDs for DOCUMENT DOC; use 'all' to list all
-i, --inherited include inherited members in reference manual; may be slow, may fail for PDF output
-u, --underscore include variables prefixed with '_' in reference
manual; may be slow, may fail for PDF output
-j, --mathjax, --jsmath ignored for backwards compatibility
--no-plot do not include graphics auto-generated using the '.. plot' markup
--include-tests-blocks include TESTS blocks in the reference manual
--no-pdf-links do not include PDF links in DOCUMENT 'website';
FORMATs: html, json, pickle, web
--warn-links issue a warning whenever a link is not properly
resolved; equivalent to '--sphinx-opts -n' (sphinx option: nitpicky)
--check-nested check picklability of nested classes in DOCUMENT 'reference'
--no-prune-empty-dirs do not prune empty directories in the documentation sources
-N, --no-colors do not color output; does not affect children
-q, --quiet work quietly; same as --verbose=0
-v LEVEL, --verbose LEVEL report progress at LEVEL=0 (quiet), 1 (normal), 2
(info), or 3 (debug); does not affect children
-o DIR, --output DIR if DOCUMENT is a single file ('file=...'), write output to this directory
Advanced options::
-S OPTS, --sphinx-opts OPTS pass comma-separated OPTS to sphinx-build; must
precede OPTS with '=', as in '-S=-q,-aE' or '-S="-q,-aE"'
-U, --update-mtimes before building reference manual, update
modification times for auto-generated reST files
-k, --keep-going do not abort on errors but continue as much as
possible after an error
--all-documents ARG if ARG is 'reference', list all subdocuments of
en/reference. If ARG is 'all', list all main documents
"""
import logging
import argparse
import os
import sys
import sphinx.ext.intersphinx
from sage.env import SAGE_DOC_SRC
from .builders import DocBuilder, ReferenceBuilder, get_builder, get_documents
from . import build_options
logger = logging.getLogger(__name__)
def format_columns(lst, align='<', cols=None, indent=4, pad=3, width=80):
"""
Utility function that formats a list as a simple table and returns
a Unicode string representation.
The number of columns is
computed from the other options, unless it's passed as a keyword
argument. For help on Python's string formatter, see
https://docs.python.org/library/string.html#format-string-syntax
"""
# Can we generalize this (efficiently) to other / multiple inputs
# and generators?
size = max(map(len, lst)) + pad
if cols is None:
import math
cols = math.trunc((width - indent) / size)
s = " " * indent
for i in range(len(lst)):
if i != 0 and i % cols == 0:
s += "\n" + " " * indent
s += "{0:{1}{2}}".format(lst[i], align, size)
s += "\n"
return s
def help_usage(s="", compact=False):
"""
Append and return a brief usage message for the Sage documentation builder.
If 'compact' is False, the function adds a final newline character.
"""
s += "sage --docbuild [OPTIONS] DOCUMENT (FORMAT | COMMAND)"
if not compact:
s += "\n"
return s
def help_description(s="", compact=False):
"""
Append and return a brief description of the Sage documentation builder.
If 'compact' is ``False``, the function adds a final newline character.
"""
s += "Build or return information about Sage documentation. "
s += "A DOCUMENT and either a FORMAT or a COMMAND are required."
if not compact:
s += "\n"
return s
def help_examples(s=""):
"""
Append and return some usage examples for the Sage documentation builder.
"""
s += "Examples:\n"
s += " sage --docbuild -C all\n"
s += " sage --docbuild constructions pdf\n"
s += " sage --docbuild reference html -jv3\n"
s += " sage --docbuild reference print_unincluded_modules\n"
s += " sage --docbuild developer html --sphinx-opts='-q,-aE' --verbose 2"
return s
def help_documents():
"""
Append and return a tabular list of documents, including a
shortcut 'all' for all documents, available to the Sage
documentation builder.
"""
docs = get_documents()
s = "DOCUMENTs:\n"
s += format_columns(docs)
s += "\n"
if 'reference' in docs:
s += "Other valid document names take the form 'reference/DIR', where\n"
s += "DIR is a subdirectory of SAGE_DOC_SRC/en/reference/.\n"
s += "This builds just the specified part of the reference manual.\n"
s += "DOCUMENT may also have the form 'file=/path/to/FILE', which builds\n"
s += "the documentation for the specified file.\n"
return s
def get_formats():
"""
Return a list of output formats the Sage documentation builder
will accept on the command-line.
"""
tut_b = DocBuilder('en/tutorial')
formats = tut_b._output_formats()
formats.remove('html')
return ['html', 'pdf'] + formats
def help_formats():
"""
Append and return a tabular list of output formats available to
the Sage documentation builder.
"""
return "FORMATs:\n" + format_columns(get_formats())
def help_commands(name='all'):
"""
Append and return a tabular list of commands, if any, the Sage
documentation builder can run on the indicated document. The
default is to list all commands for all documents.
"""
# To do: Generate the lists dynamically, using class attributes,
# as with the Builders above.
s = ""
command_dict = {'reference': [
'print_included_modules', 'print_modified_modules (*)',
'print_unincluded_modules', 'print_new_and_updated_modules (*)']}
for doc in command_dict:
if name == 'all' or doc == name:
s += "COMMANDs for the DOCUMENT '" + doc + "':\n"
s += format_columns(command_dict[doc])
s += "(*) Since the last build.\n"
return s
class help_message_long(argparse.Action):
"""
Print an extended help message for the Sage documentation builder
and exits.
"""
def __call__(self, parser, namespace, values, option_string=None):
help_funcs = [help_usage, help_description, help_documents,
help_formats, help_commands]
for f in help_funcs:
print(f())
parser.print_help()
print(help_examples())
sys.exit(0)
class help_message_short(argparse.Action):
"""
Print a help message for the Sage documentation builder.
The message includes command-line usage and a list of options.
The message is printed only on the first call. If error is True
during this call, the message is printed only if the user hasn't
requested a list (e.g., documents, formats, commands).
"""
def __call__(self, parser, namespace, values, option_string=None):
if not hasattr(namespace, 'printed_help'):
parser.print_help()
setattr(namespace, 'printed_help', 1)
sys.exit(0)
class help_wrapper(argparse.Action):
"""
A helper wrapper for command-line options to the Sage
documentation builder that print lists, such as document names,
formats, and document-specific commands.
"""
def __call__(self, parser, namespace, values, option_string=None):
if option_string in ['-D', '--documents']:
print(help_documents(), end="")
if option_string in ['-F', '--formats']:
print(help_formats(), end="")
if self.dest == 'commands':
print(help_commands(values), end="")
if self.dest == 'all_documents':
if values == 'reference':
b = ReferenceBuilder('reference')
refdir = os.path.join(os.environ['SAGE_DOC_SRC'], 'en', b.name)
s = b.get_all_documents(refdir)
# Put the bibliography first, because it needs to be built first:
s.remove('reference/references')
s.insert(0, 'reference/references')
elif values == 'all':
s = get_documents()
# Put the reference manual first, because it needs to be built first:
s.remove('reference')
s.insert(0, 'reference')
for d in s:
print(d)
setattr(namespace, 'printed_list', 1)
sys.exit(0)
def setup_parser():
"""
Set up and return a command-line ArgumentParser instance for the
Sage documentation builder.
"""
# Documentation: https://docs.python.org/library/argparse.html
parser = argparse.ArgumentParser(usage=help_usage(compact=True),
description=help_description(compact=True),
add_help=False)
# Standard options. Note: We use explicit option.dest names
# to avoid ambiguity.
standard = parser.add_argument_group("Standard")
standard.add_argument("-h", "--help", nargs=0, action=help_message_short,
help="show a help message and exit")
standard.add_argument("-H", "--help-all", nargs=0, action=help_message_long,
help="show an extended help message and exit")
standard.add_argument("-D", "--documents", nargs=0, action=help_wrapper,
help="list all available DOCUMENTs")
standard.add_argument("-F", "--formats", nargs=0, action=help_wrapper,
help="list all output FORMATs")
standard.add_argument("-C", "--commands", dest="commands",
type=str, metavar="DOC", action=help_wrapper,
help="list all COMMANDs for DOCUMENT DOC; use 'all' to list all")
standard.add_argument("-i", "--inherited", dest="inherited",
action="store_true",
help="include inherited members in reference manual; may be slow, may fail for PDF output")
standard.add_argument("-u", "--underscore", dest="underscore",
action="store_true",
help="include variables prefixed with '_' in reference manual; may be slow, may fail for PDF output")
standard.add_argument("-j", "--mathjax", "--jsmath", dest="mathjax",
action="store_true",
help="ignored for backwards compatibility")
standard.add_argument("--no-plot", dest="no_plot",
action="store_true",
help="do not include graphics auto-generated using the '.. plot' markup")
standard.add_argument("--include-tests-blocks", dest="skip_tests", default=True,
action="store_false",
help="include TESTS blocks in the reference manual")
standard.add_argument("--no-pdf-links", dest="no_pdf_links",
action="store_true",
help="do not include PDF links in DOCUMENT 'website'; FORMATs: html, json, pickle, web")
standard.add_argument("--warn-links", dest="warn_links",
action="store_true",
help="issue a warning whenever a link is not properly resolved; equivalent to '--sphinx-opts -n' (sphinx option: nitpicky)")
standard.add_argument("--check-nested", dest="check_nested",
action="store_true",
help="check picklability of nested classes in DOCUMENT 'reference'")
standard.add_argument("--no-prune-empty-dirs", dest="no_prune_empty_dirs",
action="store_true",
help="do not prune empty directories in the documentation sources")
standard.add_argument("--use-cdns", dest="use_cdns", default=False,
action="store_true",
help="assume internet connection and use CDNs; in particular, use MathJax CDN")
standard.add_argument("-N", "--no-colors", dest="color",
action="store_false",
help="do not color output; does not affect children")
standard.add_argument("-q", "--quiet", dest="verbose",
action="store_const", const=0,
help="work quietly; same as --verbose=0")
standard.add_argument("-v", "--verbose", dest="verbose",
type=int, default=1, metavar="LEVEL",
action="store",
help="report progress at LEVEL=0 (quiet), 1 (normal), 2 (info), or 3 (debug); does not affect children")
standard.add_argument("-o", "--output", dest="output_dir", default=None,
metavar="DIR", action="store",
help="if DOCUMENT is a single file ('file=...'), write output to this directory")
# Advanced options.
advanced = parser.add_argument_group("Advanced",
"Use these options with care.")
advanced.add_argument("-S", "--sphinx-opts", dest="sphinx_opts",
type=str, metavar="OPTS",
action="store",
help="pass comma-separated OPTS to sphinx-build; must precede OPTS with '=', as in '-S=-q,-aE' or '-S=\"-q,-aE\"'")
advanced.add_argument("-U", "--update-mtimes", dest="update_mtimes",
action="store_true",
help="before building reference manual, update modification times for auto-generated reST files")
advanced.add_argument("-k", "--keep-going", dest="keep_going",
action="store_true",
help="Do not abort on errors but continue as much as possible after an error")
advanced.add_argument("--all-documents", dest="all_documents",
type=str, metavar="ARG",
choices=['all', 'reference'],
action=help_wrapper,
help="if ARG is 'reference', list all subdocuments"
" of en/reference. If ARG is 'all', list all main"
" documents")
parser.add_argument("document", nargs='?', type=str, metavar="DOCUMENT",
help="name of the document to build. It can be either one of the documents listed by -D or 'file=/path/to/FILE' to build documentation for this specific file.")
parser.add_argument("format", nargs='?', type=str,
metavar="FORMAT or COMMAND", help='document output format (or command)')
return parser
def setup_logger(verbose=1, color=True):
r"""
Set up a Python Logger instance for the Sage documentation builder.
The optional argument sets logger's level and message format.
EXAMPLES::
sage: from sage_docbuild.__main__ import setup_logger, logger
sage: setup_logger()
sage: type(logger)
<class 'logging.Logger'>
"""
# Set up colors. Adapted from sphinx.cmdline.
import sphinx.util.console as c
if not color or not sys.stdout.isatty() or not c.color_terminal():
c.nocolor()
# Available colors: black, darkgray, (dark)red, dark(green),
# brown, yellow, (dark)blue, purple, fuchsia, turquoise, teal,
# lightgray, white. Available styles: reset, bold, faint,
# standout, underline, blink.
# Set up log record formats.
format_std = "%(message)s"
formatter = logging.Formatter(format_std)
# format_debug = "%(module)s #%(lineno)s %(funcName)s() %(message)s"
fields = ['%(module)s', '#%(lineno)s', '%(funcName)s()', '%(message)s']
colors = ['darkblue', 'darkred', 'brown', 'reset']
styles = ['reset', 'reset', 'reset', 'reset']
format_debug = ""
for i in range(len(fields)):
format_debug += c.colorize(styles[i], c.colorize(colors[i], fields[i]))
if i != len(fields):
format_debug += " "
# Note: There's also Handler.setLevel(). The argument is the
# lowest severity message that the respective logger or handler
# will pass on. The default levels are DEBUG, INFO, WARNING,
# ERROR, and CRITICAL. We use "WARNING" for normal verbosity and
# "ERROR" for quiet operation. It's possible to define custom
# levels. See the documentation for details.
if verbose == 0:
logger.setLevel(logging.ERROR)
if verbose == 1:
logger.setLevel(logging.WARNING)
if verbose == 2:
logger.setLevel(logging.INFO)
if verbose == 3:
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(format_debug)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
class IntersphinxCache:
"""
Replace sphinx.ext.intersphinx.fetch_inventory by an in-memory
cached version.
"""
def __init__(self):
self.inventories = {}
self.real_fetch_inventory = sphinx.ext.intersphinx.fetch_inventory
sphinx.ext.intersphinx.fetch_inventory = self.fetch_inventory
def fetch_inventory(self, app, uri, inv):
"""
Return the result of ``sphinx.ext.intersphinx.fetch_inventory()``
from a cache if possible. Otherwise, call
``sphinx.ext.intersphinx.fetch_inventory()`` and cache the result.
"""
t = (uri, inv)
try:
return self.inventories[t]
except KeyError:
i = self.real_fetch_inventory(app, uri, inv)
self.inventories[t] = i
return i
def main():
# Parse the command-line.
parser = setup_parser()
args = parser.parse_args()
DocBuilder._options = args
# Get the name and type (target format) of the document we are
# trying to build.
name, typ = args.document, args.format
if not name or not typ:
parser.print_help()
sys.exit(1)
# Set up module-wide logging.
setup_logger(args.verbose, args.color)
def excepthook(*exc_info):
logger.error('Error building the documentation.', exc_info=exc_info)
if build_options.INCREMENTAL_BUILD:
logger.error('''
Note: incremental documentation builds sometimes cause spurious
error messages. To be certain that these are real errors, run
"make doc-clean doc-uninstall" first and try again.''')
sys.excepthook = excepthook
# Process selected options.
if args.check_nested:
os.environ['SAGE_CHECK_NESTED'] = 'True'
if args.underscore:
os.environ['SAGE_DOC_UNDERSCORE'] = "True"
if args.sphinx_opts:
build_options.ALLSPHINXOPTS += args.sphinx_opts.replace(',', ' ') + " "
if args.no_pdf_links:
build_options.WEBSITESPHINXOPTS = " -A hide_pdf_links=1 "
if args.warn_links:
build_options.ALLSPHINXOPTS += "-n "
if args.no_plot:
os.environ['SAGE_SKIP_PLOT_DIRECTIVE'] = 'yes'
if args.skip_tests:
os.environ['SAGE_SKIP_TESTS_BLOCKS'] = 'True'
if args.use_cdns:
os.environ['SAGE_USE_CDNS'] = 'yes'
build_options.ABORT_ON_ERROR = not args.keep_going
if not args.no_prune_empty_dirs:
# Delete empty directories. This is needed in particular for empty
# directories due to "git checkout" which never deletes empty
# directories it leaves behind. See Issue #20010.
# Issue #31948: This is not parallelization-safe; use the option
# --no-prune-empty-dirs to turn it off
for dirpath, dirnames, filenames in os.walk(SAGE_DOC_SRC, topdown=False):
if not dirnames + filenames:
logger.warning('Deleting empty directory {0}'.format(dirpath))
os.rmdir(dirpath)
# Set up Intersphinx cache
_ = IntersphinxCache()
builder = getattr(get_builder(name), typ)
builder()
if __name__ == '__main__':
sys.exit(main()) | /sage-docbuild-10.0b1.tar.gz/sage-docbuild-10.0b1/sage_docbuild/__main__.py | 0.628749 | 0.21793 | __main__.py | pypi |
# Sage: a SPARQL query engine for public Linked Data providers
[](https://travis-ci.com/sage-org/sage-engine) [](https://badge.fury.io/py/sage-engine) [](https://sage-org.github.io/sage-engine/)
[Read the online documentation](https://sage-org.github.io/sage-engine/)
SaGe is a SPARQL query engine for public Linked Data providers that implements *Web preemption*. The SPARQL engine includes a smart Sage client
and a Sage SPARQL query server hosting RDF datasets (hosted using [HDT](http://www.rdfhdt.org/)).
This repository contains the **Python implementation of the SaGe SPARQL query server**.
SPARQL queries are suspended by the web server after a fixed quantum of time and resumed upon client request. Using Web preemption, Sage ensures stable response times for query execution and completeness of results under high load.
The complete approach and experimental results are available in a Research paper accepted at The Web Conference 2019, [available here](https://hal.archives-ouvertes.fr/hal-02017155/document). *Thomas Minier, Hala Skaf-Molli and Pascal Molli. "SaGe: Web Preemption for Public SPARQL Query services" in Proceedings of the 2019 World Wide Web Conference (WWW'19), San Francisco, USA, May 13-17, 2019*.
We appreciate your feedback/comments/questions to be sent to our [mailing list](mailto:sage@univ-nantes.fr) or [our issue tracker on github](https://github.com/sage-org/sage-engine/issues).
# Table of contents
* [Installation](#installation)
* [Getting started](#getting-started)
* [Server configuration](#server-configuration)
* [Starting the server](#starting-the-server)
* [Sage Docker image](#sage-docker-image)
* [Command line utilities](#command-line-utilities)
* [Documentation](#documentation)
# Installation
Installation in a [virtualenv](https://virtualenv.pypa.io/en/stable/) is **strongly advised!**
Requirements:
* Python 3.7 (*or higher*)
* [pip](https://pip.pypa.io/en/stable/)
* **gcc/clang** with **c++11 support**
* **Python Development headers**
> You should have the `Python.h` header available on your system.
> For example, for Python 3.6, install the `python3.6-dev` package on Debian/Ubuntu systems.
## Installation using pip
The core engine of the SaGe SPARQL query server with [HDT](http://www.rdfhdt.org/) as a backend can be installed as follows:
```bash
pip install sage-engine[hdt,postgres]
```
The SaGe query engine uses various **backends** to load RDF datasets.
The various backends available are installed as extras dependencies. The above command install both the HDT and PostgreSQL backends.
## Manual Installation using poetry
The SaGe SPARQL query server can also be manually installed using the [poetry](https://github.com/sdispater/poetry) dependency manager.
```bash
git clone https://github.com/sage-org/sage-engine
cd sage-engine
poetry install --extras "hdt postgres"
```
As with pip, the various SaGe backends are installed as extras dependencies, using the `--extras` flag.
# Getting started
## Server configuration
A Sage server is configured using a configuration file in [YAML syntax](http://yaml.org/).
You will find below a minimal working example of such configuration file.
A full example is available [in the `config_examples/` directory](https://github.com/sage-org/sage-engine/blob/master/config_examples/example.yaml)
```yaml
name: SaGe Test server
maintainer: Chuck Norris
quota: 75
max_results: 2000
graphs:
-
name: dbpedia
uri: http://example.org/dbpedia
description: DBPedia
backend: hdt-file
file: datasets/dbpedia.2016.hdt
```
The `quota` and `max_results` fields are used to set the maximum time quantum and the maximum number of results
allowed per request, respectively.
Each entry in the `datasets` field declare a RDF dataset with a name, description, backend and options specific to this backend.
Currently, **only** the `hdt-file` backend is supported, which allow a Sage server to load RDF datasets from [HDT files](http://www.rdfhdt.org/). Sage uses [pyHDT](https://github.com/Callidon/pyHDT) to load and query HDT files.
## Starting the server
The `sage` executable, installed alongside the Sage server, allows to easily start a Sage server from a configuration file using [Gunicorn](http://gunicorn.org/), a Python WSGI HTTP Server.
```bash
# launch Sage server with 4 workers on port 8000
sage my_config.yaml -w 4 -p 8000
```
The full usage of the `sage` executable is detailed below:
```
Usage: sage [OPTIONS] CONFIG
Launch the Sage server using the CONFIG configuration file
Options:
-p, --port INTEGER The port to bind [default: 8000]
-w, --workers INTEGER The number of server workers [default: 4]
--log-level [debug|info|warning|error]
The granularity of log outputs [default:
info]
--help Show this message and exit.
```
# SaGe Docker image
The Sage server is also available through a [Docker image](https://hub.docker.com/r/callidon/sage/).
In order to use it, do not forget to [mount in the container](https://docs.docker.com/storage/volumes/) the directory that contains you configuration file and your datasets.
```bash
docker pull callidon/sage
docker run -v path/to/config-file:/opt/data/ -p 8000:8000 callidon/sage sage /opt/data/config.yaml -w 4 -p 8000
```
# Documentation
To generate the documentation, navigate in the `docs` directory and generate the documentation
```bash
cd docs/
make html
open build/html/index.html
```
Copyright 2017-2019 - [GDD Team](https://sites.google.com/site/gddlina/), [LS2N](https://www.ls2n.fr/?lang=en), [University of Nantes](http://www.univ-nantes.fr/)
| /sage_engine-2.3.0-py3-none-any.whl/README.md | 0.530966 | 0.976446 | README.md | pypi |
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Optional, Tuple
from sage.database.db_iterator import DBIterator
class DatabaseConnector(ABC):
"""A DatabaseConnector is an abstract class for creating connectors to a database"""
def open(self):
"""Open the database connection"""
pass
def close(self):
"""Close the database connection"""
pass
def __enter__(self):
"""Implementation of the __enter__ method from the context manager spec"""
self.open()
return self
def __exit__(self, type, value, traceback):
"""Implementation of the __close__ method from the context manager spec"""
self.close()
def __del__(self):
"""Destructor"""
self.close()
@abstractmethod
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[DBIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
Example:
>>> iterator, cardinality = connector.search('?s', 'http://xmlns.com/foaf/0.1/name', '?name')
>>> print(f"The triple pattern '?s foaf:name ?o' matches {cardinality} RDF triples")
>>> for s, p, o in iterator:
>>> print(f"RDF Triple {s} {p} {o}")
"""
pass
@abstractmethod
def from_config(config: dict):
"""Build a DatabaseConnector from a dictionnary"""
pass
def open(self) -> None:
"""Open the database connection"""
pass
def close(self) -> None:
"""Close the database connection"""
pass
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""Insert a RDF triple into the RDF graph.
If not overrided, this method raises an exception as it consider the graph as read-only.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
Throws: `NotImplementedError` if the database connection is read-only.
"""
raise NotImplementedError("The RDF graph is read-only: INSERT DATA queries are not allowed")
def delete(self, ssubject: str, predicate: str, obj: str) -> None:
"""Delete a RDF triple from the RDF graph.
If not overrided, this method raises an exception as it consider the graph as read-only.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
Throws: `NotImplementedError` if the database connection is read-only.
"""
raise NotImplementedError("The RDF graph is read-only: DELETE DATA queries are not allowed")
def start_transaction(self) -> None:
"""Start a transaction (if supported by this type of connector)"""
pass
def commit_transaction(self) -> None:
"""Commit any ongoing transaction (if supported by this type of connector)"""
pass
def abort_transaction(self) -> None:
"""Abort any ongoing transaction (if supported by this type of connector)"""
pass
def __enter__(self):
"""Implementation of the __enter__ method from the context manager spec"""
self.open()
return self
def __exit__(self, type, value, traceback):
"""Implementation of the __close__ method from the context manager spec"""
self.close()
def __del__(self):
"""Destructor"""
self.close()
@property
def nb_triples(self) -> int:
"""Get the number of RDF triples in the database"""
return 0
@property
def nb_subjects(self) -> int:
"""Get the number of subjects in the database"""
return 0
@property
def nb_predicates(self) -> int:
"""Get the number of predicates in the database"""
return 0
@property
def nb_objects(self) -> int:
"""Get the number of objects in the database"""
return 0 | /sage_engine-2.3.0-py3-none-any.whl/sage/database/db_connector.py | 0.947684 | 0.374819 | db_connector.py | pypi |
from math import ceil
from typing import Dict, List, Optional, Tuple
from sage.database.db_connector import DatabaseConnector
from sage.database.postgres_backends.transaction_manager import TransactionManager
class PostgresConnector(DatabaseConnector):
"""A PostgresConnector search for RDF triples in a PostgreSQL database.
Args:
* table_name: Name of the SQL table containing RDF data.
* dbname: the database name.
* user: user name used to authenticate.
* password: password used to authenticate.
* host: database host address (defaults to UNIX socket if not provided).
* port: connection port number (defaults to 5432 if not provided).
* fetch_size: The number of SQL rows/RDF triples to fetch per batch (defaults to 500).
"""
def __init__(self, table_name: str, dbname: str, user: str, password: str, host: str = '', port: int = 5432, fetch_size: int = 500):
super(PostgresConnector, self).__init__()
self._table_name = table_name
self._manager = TransactionManager(dbname, user, password, host=host, port=port)
self._fetch_size = fetch_size
self._warmup = True
# Data used for cardinality estimation.
# They are initialized using PostgreSQL histograms, after the 1st connection to the DB.
self._avg_row_count = 0
self._subject_histograms = {
'selectivities': dict(),
'null_frac': 0,
'n_distinct': 0,
'sum_freqs': 0
}
self._predicate_histograms = {
'selectivities': dict(),
'null_frac': 0,
'n_distinct': 0,
'sum_freqs': 0
}
self._object_histograms = {
'selectivities': dict(),
'null_frac': 0,
'n_distinct': 0,
'sum_freqs': 0
}
def _fetch_histograms(self, cursor, table_name: str, attribute_name: str) -> Tuple[int, int, Dict[str, float], int]:
"""Download PostgreSQL histograms from a given table and attribute.
Args:
* cursor: A psycopg cursor.
* table_name: Name of the SQL table from which we should retrieve histograms.
* attribute_name: Table attribute from which we should retrieve histograms.
Returns:
A tuple (`null_frac`, `n_distinct`, `selectivities`, `sum_most_common_freqs`) where:
* `null_frac` is the fraction of null values in the histogram.
* `n_distinct` is the estimated number of distinct values for this attribute.
* `selectivities` is the estimated selectivities of the attribute's values in the table.
* `sum_most_common_freqs` is the num of the frequencies of the most common values for this attribute.
"""
base_query = f"SELECT null_frac, n_distinct, most_common_vals, most_common_freqs FROM pg_stats WHERE tablename = '{table_name}' AND attname = '{attribute_name}'"
cursor.execute(base_query)
record = cursor.fetchone()
null_frac, n_distinct, most_common_vals, most_common_freqs = record
# build selectivity table
selectivities = {}
cpt = 0
for common_val in most_common_vals[1:-1].split(","):
if cpt < len(most_common_freqs):
selectivities[common_val] = most_common_freqs[cpt]
cpt += 1
return (null_frac, n_distinct, selectivities, sum(most_common_freqs))
def open(self) -> None:
"""Open the connection to the PostgreSQL database and initialize histograms."""
if self._manager.is_open():
self._manager.open_connection()
# Do warmup phase if required, i.e., fetch stats for query execution
if self._warmup:
cursor = self._manager.start_transaction()
# fetch estimated table cardinality
cursor.execute(f"SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = '{self._table_name}'")
self._avg_row_count = cursor.fetchone()[0]
# fetch subject histograms
(null_frac, n_distinct, selectivities, sum_freqs) = self._fetch_histograms(cursor, self._table_name, 'subject')
self._subject_histograms = {
'selectivities': selectivities,
'null_frac': null_frac,
'n_distinct': n_distinct,
'sum_freqs': sum_freqs
}
# fetch predicate histograms
(null_frac, n_distinct, selectivities, sum_freqs) = self._fetch_histograms(cursor, self._table_name, 'predicate')
self._predicate_histograms = {
'selectivities': selectivities,
'null_frac': null_frac,
'n_distinct': n_distinct,
'sum_freqs': sum_freqs
}
# fetch object histograms
(null_frac, n_distinct, selectivities, sum_freqs) = self._fetch_histograms(cursor, self._table_name, 'object')
self._object_histograms = {
'selectivities': selectivities,
'null_frac': null_frac,
'n_distinct': n_distinct,
'sum_freqs': sum_freqs
}
# commit & close cursor
self._manager.commit()
self._warmup = False
def close(self) -> None:
"""Close the database connection"""
# commit, then close the cursor and the connection
self._manager.close_connection()
def start_transaction(self) -> None:
"""Start a PostgreSQL transaction"""
self._manager.start_transaction()
def commit_transaction(self) -> None:
"""Commit any ongoing transaction"""
self._manager.commit()
def abort_transaction(self) -> None:
"""Abort any ongoing transaction"""
self._manager.abort()
def _estimate_cardinality(self, subject, predicate, obj) -> int:
"""Estimate the cardinality of a triple pattern using PostgreSQL histograms.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* obj: Object of the triple pattern.
Returns:
The estimated cardinality of the triple pattern.
"""
# estimate the selectivity of the triple pattern using PostgreSQL histograms
selectivity = 1
# avoid division per zero when some histograms are not fully up-to-date
try:
# compute the selectivity of a bounded subject
if subject is not None:
if subject in self._subject_histograms['selectivities']:
selectivity *= self._subject_histograms['selectivities'][str(subject)]
else:
selectivity *= (1 - self._subject_histograms['sum_freqs'])/(self._subject_histograms['n_distinct'] - len(self._subject_histograms['selectivities']))
# compute the selectivity of a bounded predicate
if predicate is not None:
if predicate in self._predicate_histograms['selectivities']:
selectivity *= self._predicate_histograms['selectivities'][str(predicate)]
else:
selectivity *= (1 - self._predicate_histograms['sum_freqs'])/(self._predicate_histograms['n_distinct'] - len(self._predicate_histograms['selectivities']))
# compute the selectivity of a bounded object
if obj is not None:
if obj in self._object_histograms['selectivities']:
selectivity *= self._object_histograms['selectivities'][str(obj)]
else:
selectivity *= (1 - self._object_histograms['sum_freqs'])/(self._object_histograms['n_distinct'] - len(self._object_histograms['selectivities']))
except ZeroDivisionError:
pass
# estimate the cardinality from the estimated selectivity
cardinality = int(ceil(selectivity * self._avg_row_count))
return cardinality if cardinality > 0 else 1 | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/connector.py | 0.920047 | 0.318141 | connector.py | pypi |
import json
from typing import Optional, List, Dict, Tuple
from sage.database.db_iterator import DBIterator
class PostgresIterator(DBIterator):
"""A PostgresIterator fetches RDF triples from a versionned PostgreSQL table using batch queries and lazy loading.
Args:
* cursor: Psycopg cursor used to query the database.
* start_time: Timestamp at which the iterator should read.
* start_query: Prepared SQL query used to start iteration.
* start_params: SQL params to apply to the prepared SQL query.
* table_name: Name of the SQL table to scan.
* pattern: Triple pattern scanned.
* fetch_size: The number of SQL rows/RDF triples to fetch per batch.
"""
def __init__(self, cursor, start_time: datetime, start_query: str, start_params: List[str], table_name: str, pattern: Dict[str, str], fetch_size: int = 500):
super(PostgresIterator, self).__init__(pattern)
self._cursor = cursor
self._start_time = start_time
self._current_query = start_query
self._table_name = table_name
self._fetch_size = fetch_size
self._cursor.execute(self._current_query, start_params)
self._last_reads = self._cursor.fetchmany(size=1)
def __del__(self) -> None:
"""Destructor"""
self._cursor.close()
def last_read(self) -> str:
"""Return the index ID of the last element read"""
if not self.has_next():
return ''
triple = self._last_reads[0]
return json.dumps({
's': triple[0],
'p': triple[1],
'o': triple[2],
'ins': triple[3].isoformat(),
'del': triple[4].isoformat(),
'ts': self._start_time.isoformat()
}, separators=(',', ':'))
def next(self) -> Optional[Dict[str, str]]:
"""Return the next solution mapping or None if there are no more solutions"""
if not self.has_next():
return None
triple = self._last_reads.pop(0)
# extract timestamps from the RDF triple
insert_t = triple[3]
delete_t = triple[4]
triple = self._last_reads.pop(0)
# case 1: the current triple is in the valid version, so it is a match
if insert_t <= self._start_time and self._start_time < delete_t:
return (triple[0], triple[1], triple[2])
# case 2: do a NONE forward to trigger another iteration loop
# to find a matching RDF triple
return None
def has_next(self) -> bool:
"""Return True if there is still results to read, and False otherwise"""
if len(self._last_reads) == 0:
self._last_reads = self._cursor.fetchmany(size=self._fetch_size)
return len(self._last_reads) > 0 | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres_mvcc/iterator.py | 0.910162 | 0.370339 | iterator.py | pypi |
from datetime import datetime
from typing import List, Tuple
from sage.database.utils import get_kind
def get_start_query(subj: str, pred: str, obj: str, table_name: str) -> Tuple[str, List[str]]:
"""Get a prepared SQL query which starts scanning for a triple pattern.
Args:
* subj: Subject of the triple pattern.
* pred: Predicate of the triple pattern.
* obj: Object of the triple pattern.
* table_name: Name of the SQL table to scan for RDF triples.
Returns:
A tuple with the prepared SQL query and its parameters.
"""
kind = get_kind(subj, pred, obj)
query = f"SELECT * FROM {table_name} "
if kind == 'spo':
query += """WHERE subject = %s
AND predicate = %s
AND md5(object) = md5(%s)
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, (subj, pred, obj)
elif kind == '???':
query += "ORDER BY subject, predicate, md5(object), insert_t, delete_t"
return query, None
elif kind == 's??':
query += """WHERE subject = %s
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, [subj]
elif kind == 'sp?':
query += """WHERE subject = %s
AND predicate = %s
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, (subj, pred)
elif kind == '?p?':
query += """WHERE predicate = %s
ORDER BY predicate, md5(object), subject, insert_t, delete_t"""
return query, [pred]
elif kind == '?po':
query += """WHERE predicate = %s
AND md5(object) = md5(%s)
ORDER BY predicate, md5(object), subject, insert_t, delete_t"""
return query, (pred, obj)
elif kind == 's?o':
query += """WHERE subject = %s
AND md5(object) = md5(%s)
ORDER BY md5(object), subject, predicate, insert_t, delete_t"""
return query, (subj, obj)
elif kind == '??o':
query += """WHERE md5(object) = md5(%s)
ORDER BY md5(object), subject, predicate, insert_t, delete_t"""
return query, [obj]
else:
raise Exception(f"Unkown pattern type: {kind}")
def get_resume_query(subj: str, pred: str, obj: str, last_read: Tuple[str, str, str, datetime, datetime], table_name: str, symbol: str = ">=") -> Tuple[str, str]:
"""Get a prepared SQL query which resumes scanning for a triple pattern.
The SQL query rely on keyset pagination to resume query processing using an optimized Index Scan.
Args:
* subj: Subject of the triple pattern.
* pred: Predicate of the triple pattern.
* obj: Object of the triple pattern.
* last_read: The SQL row from which to resume scanning.
* table_name: Name of the SQL table to scan for RDF triples.
* symbol: Symbol used to perform the keyset pagination. Defaults to ">=".
Returns:
A tuple with the prepared SQL query and its parameters.
"""
last_s, last_p, last_o, last_insert_t, last_delete_t = last_read
kind = get_kind(subj, pred, obj)
query = f"SELECT * FROM {table_name} "
if kind == 'spo':
return None, None
elif kind == '???':
query += f"""WHERE (subject, predicate, md5(object), insert_t, delete_t) {symbol} (%s, %s, md5(%s), %s, %s)
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, (last_s, last_p, last_o, last_insert_t, last_delete_t)
elif kind == 's??':
query += f"""WHERE subject = %s
AND (predicate, md5(object), insert_t, delete_t) {symbol} (%s, md5(%s), %s, %s)
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, (last_s, last_p, last_o, last_insert_t, last_delete_t)
elif kind == 'sp?':
query += f"""WHERE subject = %s
AND predicate = %s
AND (md5(object), insert_t, delete_t) {symbol} (md5(%s), %s, %s)
ORDER BY subject, predicate, md5(object), insert_t, delete_t"""
return query, (last_s, last_p, last_o, last_insert_t, last_delete_t)
elif kind == '?p?':
query += f"""WHERE predicate = %s
AND (md5(object), subject, insert_t, delete_t) {symbol} (md5(%s), %s, %s, %s)
ORDER BY predicate, md5(object), subject, insert_t, delete_t"""
return query, (last_p, last_o, last_s, last_insert_t, last_delete_t)
elif kind == '?po':
query += f"""WHERE predicate = %s
AND md5(object) = md5(%s)
AND (subject, insert_t, delete_t) {symbol} (%s, %s, %s)
ORDER BY predicate, md5(object), subject, insert_t, delete_t"""
return query, (last_p, last_o, last_s, last_insert_t, last_delete_t)
elif kind == 's?o':
query += f"""WHERE subject = %s
AND md5(object) = md5(%s)
AND (predicate, insert_t, delete_t) {symbol} (%s, %s, %s)
ORDER BY md5(object), subject, predicate, insert_t, delete_t"""
return query, (last_s, last_o, last_p, last_insert_t, last_delete_t)
elif kind == '??o':
query += f"""WHERE md5(object) = md5(%s)
AND (subject, predicate, insert_t, delete_t) {symbol} (%s, %s, %s, %s)
ORDER BY md5(object), subject, predicate, insert_t, delete_t"""
return query, (last_o, last_s, last_p, last_insert_t, last_delete_t)
else:
raise Exception(f"Unkown pattern type: {kind}")
def get_insert_query(table_name: str) -> str:
"""Build a SQL query to insert a RDF triple into a MVCC-PostgreSQL table.
Argument: Name of the SQL table in which the triple will be inserted.
Returns: A prepared SQL query that can be executed with a tuple (subject, predicate, object).
"""
return f"INSERT INTO {table_name} (subject, predicate, object, insert_t, delete_t) VALUES (%s, %s, %s, transaction_timestamp(), 'infinity'::timestamp) ON CONFLICT DO NOTHING"
def get_insert_many_query(table_name: str) -> str:
"""Build a SQL query to insert several RDF triples into a MVCC-PostgreSQL table.
Argument: Name of the SQL table in which the triples will be inserted.
Returns: A prepared SQL query that can be executed with a list of tuples (subject, predicate, object).
"""
return f"INSERT INTO {table_name} (subject, predicate, object, insert_t, delete_t) VALUES %s ON CONFLICT DO NOTHING"
def get_delete_query(table_name: str) -> str:
"""Build a SQL query to delete a RDF triple from a MVCC-PostgreSQL table.
Argument: Name of the SQL table from which the triple will be deleted.
Returns: A prepared SQL query that can be executed with a tuple (subject, predicate, object).
"""
return f"""UPDATE {table_name} SET delete_t = transaction_timestamp()
WHERE subject = %s
AND predicate = %s
AND md5(object) = md5(%s)
AND delete_t = 'infinity'::timestamp""" | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres_mvcc/queries.py | 0.886635 | 0.403861 | queries.py | pypi |
import json
import logging
import coloredlogs
from datetime import datetime
from typing import Optional, List, Dict, Tuple
from uuid import uuid4
from sage.database.db_iterator import EmptyIterator
from sage.database.postgres_backends.connector import PostgresConnector
from sage.database.postgres_backends.postgres_mvcc.iterator import PostgresIterator
from sage.database.postgres_backends.postgres_mvcc.queries import get_delete_query, get_insert_query
from sage.database.postgres_backends.postgres_mvcc.queries import get_resume_query, get_start_query
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
def parse_date(str_date: str) -> datetime:
"""Convert a PostgreSQL date into a Python datetime object."""
if str_date == 'infinity':
return datetime.max
return datetime.strptime(str_date, '%Y-%m-%d %H:%M:%S+%f')
class MVCCPostgresConnector(PostgresConnector):
"""A MVCCPostgresConnector search for RDF triples in a PostgreSQL database using a timestamp-based multi-version concurrency control protocol.
Args:
* table_name: Name of the SQL table containing RDF data.
* dbname: the database name.
* user: user name used to authenticate.
* password: password used to authenticate.
* host: database host address (default to UNIX socket if not provided).
* port: connection port number (default to 5432 if not provided).
* fetch_size: The number of SQL rows/RDF triples to fetch per batch.
"""
def __init__(self, table_name: str, dbname: str, user: str, password: str, host: str = '', port: int = 5432, fetch_size: int = 500):
super(MVCCPostgresConnector, self).__init__(table_name, dbname, user, password, host, port, fetch_size)
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[PostgresIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
"""
# do warmup if necessary
self.open()
# format triple patterns for the PostgreSQL API
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
# pick a start transaction timestamp
# NB: It will be overwritten if we reload a scan from a saved state
timestamp = datetime.now() if as_of is None else as_of
# dedicated cursor used to scan this triple pattern
# WARNING: we need to use a dedicated cursor per triple pattern iterator
# otherwise, we might reset a cursor whose results were not fully consumed
if not self._manager.is_open():
self._manager.open_connection()
cursor = self._manager.get_connection().cursor(str(uuid4()))
# create a SQL query to start a new index scan
if last_read is None:
start_query, start_params = get_start_query(subject, predicate, obj, self._table_name)
else:
# empty last_read key => the scan has already been completed
if len(last_read) == 0:
return EmptyIterator(pattern), 0
# decode the saved state to get the timestamp & the last RDF triple read
last_read = json.loads(last_read)
# parse ISO timestamps into datetime objects
timestamp = datetime.fromisoformat(last_read["ts"])
last_ins_t = datetime.fromisoformat(last_read["ins"])
last_del_t = datetime.fromisoformat(last_read["del"])
last_triple = (last_read["s"], last_read["p"], last_read["o"], last_ins_t, last_del_t)
# create a SQL query to resume the index scan
start_query, start_params = get_resume_query(subject, predicate, obj, last_triple, self._table_name)
# create the iterator to yield the matching RDF triples
iterator = PostgresIterator(cursor, timestamp, start_query, start_params, self._table_name, pattern, fetch_size=self._fetch_size)
card = self._estimate_cardinality(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def from_config(config: dict) -> PostgresConnector:
"""Build a MVCCPostgresConnector from a configuration object.
The configuration object must contains the following fields: 'dbname', 'name', 'user' and 'password'.
Optional fields are: 'host', 'port' and 'fetch_size'.
"""
if 'dbname' not in config or 'name' not in config or 'user' not in config or 'password' not in config:
raise SyntaxError('A valid configuration for a MVCC-PostgreSQL connector must contains the dbname, name, user and password fields')
host = config['host'] if 'host' in config else ''
port = config['port'] if 'port' in config else 5432
fetch_size = config['fetch_size'] if 'fetch_size' in config else 500
return MVCCPostgresConnector(config['name'], config['dbname'], config['user'], config['password'], host=host, port=port, fetch_size=fetch_size)
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""Insert a RDF triple into the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary
self.open()
# start transaction
self.start_transaction()
if subject is not None and predicate is not None and obj is not None:
insert_query = get_insert_query(self._table_name)
self._update_cursor.execute(insert_query, (subject, predicate, obj))
def delete(self, subject: str, predicate: str, obj: str) -> None:
"""Delete a RDF triple from the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary
self.open()
# start transaction
self.start_transaction()
if subject is not None and predicate is not None and obj is not None:
delete_query = get_delete_query(self._table_name)
self._update_cursor.execute(delete_query, (subject, predicate, obj)) | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres_mvcc/connector.py | 0.854521 | 0.156298 | connector.py | pypi |
import json
import logging
import coloredlogs
from datetime import datetime
from math import ceil
from uuid import uuid4
from typing import Optional, Dict, Tuple
from psycopg2.extras import execute_values
from sage.database.db_iterator import EmptyIterator
from sage.database.postgres_backends.connector import PostgresConnector
from sage.database.postgres_backends.postgres_catalog.iterator import PostgresIterator
from sage.database.postgres_backends.postgres_catalog.queries import get_delete_query, get_insert_query, get_catalog_insert_many_query
from sage.database.postgres_backends.postgres_catalog.queries import get_start_query, get_resume_query
from sage.database.postgres_backends.postgres_catalog.queries import get_extract_query, get_locate_query
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
class CatalogPostgresConnector(PostgresConnector):
"""A CatalogPostgresConnector search for RDF triples in a PostgreSQL database.
Args:
* table_name: Name of the SQL table containing RDF data.
* dbname: the database name.
* user: user name used to authenticate.
* password: password used to authenticate.
* host: database host address (defaults to UNIX socket if not provided).
* port: connection port number (defaults to 5432 if not provided).
* fetch_size: The number of SQL rows/RDF triples to fetch per batch (defaults to 500).
"""
def __init__(self, table_name: str, dbname: str, user: str, password: str, host: str = '', port: int = 5432, fetch_size: int = 500):
super(CatalogPostgresConnector, self).__init__(table_name, dbname, user, password, host, port, fetch_size)
def _fetch_histograms(self, cursor, table_name: str, attribute_name: str) -> Tuple[int, int, Dict[str, float], int]:
"""Download PostgreSQL histograms from a given table and attribute when using a catalog schema.
Args:
* cursor: A psycopg cursor.
* table_name: Name of the SQL table from which we should retrieve histograms.
* attribute_name: Table attribute from which we should retrieve histograms.
Returns:
A tuple (`null_frac`, `n_distinct`, `selectivities`, `sum_most_common_freqs`) where:
* `null_frac` is the fraction of null values in the histogram.
* `n_distinct` is the estimated number of distinct values for this attribute.
* `selectivities` is the estimated selectivities of the attribute's values in the table.
* `sum_most_common_freqs` is the num of the frequencies of the most common values for this attribute.
"""
base_query = f"SELECT null_frac, n_distinct, most_common_vals, most_common_freqs FROM pg_stats WHERE tablename = '{table_name}' AND attname = '{attribute_name}'"
cursor.execute(base_query)
record = cursor.fetchone()
null_frac, n_distinct, most_common_vals, most_common_freqs = record
# build selectivity table
selectivities = {}
cpt = 0
for common_val_identifier in most_common_vals[1:-1].split(","):
if cpt < len(most_common_freqs):
cursor.execute(get_extract_query(), [(common_val_identifier, )])
result = cursor.fetchone()
common_val = "" if result is None else result[0]
selectivities[common_val] = most_common_freqs[cpt]
cpt += 1
return (null_frac, n_distinct, selectivities, sum(most_common_freqs))
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[PostgresIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
"""
# do warmup if necessary
self.open()
# format triple patterns for the PostgreSQL API
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
# dedicated cursor used to scan this triple pattern
# WARNING: we need to use a dedicated cursor per triple pattern iterator.
# Otherwise, we might reset a cursor whose results were not fully consumed.
cursor = self._manager.get_connection().cursor(str(uuid4()))
# create a SQL query to start a new index scan
if last_read is None:
start_query, start_params = get_start_query(subject, predicate, obj, self._table_name)
else:
# empty last_read key => the scan has already been completed
if len(last_read) == 0:
return EmptyIterator(pattern), 0
# otherwise, create a SQL query to resume the index scan
last_read = json.loads(last_read)
t = (last_read["s"], last_read["p"], last_read["o"])
start_query, start_params = get_resume_query(subject, predicate, obj, t, self._table_name)
# create the iterator to yield the matching RDF triples
iterator = PostgresIterator(cursor, self._manager.get_connection(), start_query, start_params, pattern, fetch_size=self._fetch_size)
card = self._estimate_cardinality(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def from_config(config: dict) -> PostgresConnector:
"""Build a CatalogPostgresConnector from a configuration object.
The configuration object must contains the following fields: 'dbname', 'name', 'user' and 'password'.
Optional fields are: 'host', 'port' and 'fetch_size'.
"""
if 'dbname' not in config or 'name' not in config or 'user' not in config or 'password' not in config:
raise SyntaxError('A valid configuration for a PostgreSQL connector must contains the dbname, user and password fields')
host = config['host'] if 'host' in config else ''
port = config['port'] if 'port' in config else 5432
fetch_size = config['fetch_size'] if 'fetch_size' in config else 500
return CatalogPostgresConnector(config['name'], config['dbname'], config['user'], config['password'], host=host, port=port, fetch_size=fetch_size)
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""Insert a RDF triple into the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
# Insert triple terms into a PostgreSQL database
insert_query = get_catalog_insert_many_query()
values = dict()
values[subject] = 0
values[predicate] = 0
values[obj] = 0
execute_values(transaction, insert_query, [ [x] for x in values.keys() ])
# Retrieve inserted RDF terms identifier
select_id_query = get_locate_query()
transaction.execute(select_id_query, [subject])
subject_id = transaction.fetchone()[0]
transaction.execute(select_id_query, [predicate])
predicate_id = transaction.fetchone()[0]
transaction.execute(select_id_query, [obj])
obj_id = transaction.fetchone()[0]
# Insert a new RDF triple into a PostgreSQL database
insert_query = get_insert_query(self._table_name)
transaction.execute(insert_query, (subject_id, predicate_id, obj_id))
self._manager.commit()
def delete(self, subject: str, predicate: str, obj: str) -> None:
"""Delete a RDF triple from the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
delete_query = get_delete_query(self._table_name)
transaction.execute(delete_query, (subject, predicate, obj))
self._manager.commit() | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres_catalog/connector.py | 0.879173 | 0.20203 | connector.py | pypi |
import json
from typing import Optional, List, Dict, Tuple
from sage.database.db_iterator import DBIterator
class PostgresIterator(DBIterator):
"""A PostgresIterator fetches RDF triples from a PostgreSQL table using batch queries and lazy loading.
Args:
* cursor: A psycopg cursor. This cursor must only be used for this iterator, to avoid side-effects.
* connection: A psycopg connection.
* start_query: Prepared SQL query executed to fetch RDF triples as SQL rows.
* start_params: Parameters to use with the prepared SQL query.
* pattern: Triple pattern scanned.
* fetch_size: The number of SQL rows/RDF triples to fetch per batch.
"""
def __init__(self, cursor, connection, start_query: str, start_params: List[str], pattern: Dict[str, str], fetch_size: int = 500):
super(PostgresIterator, self).__init__(pattern)
self._cursor = cursor
self._connection = connection
self._current_query = start_query
self._fetch_size = fetch_size
self._cursor.execute(self._current_query, start_params)
self._last_reads = self._cursor.fetchmany(size=1)
def __del__(self) -> None:
"""Destructor (close the database cursor)"""
self._cursor.close()
def last_read(self) -> str:
"""Return the index ID of the last element read"""
if not self.has_next():
return ''
triple = self._last_reads[0]
return json.dumps({
's': triple[0],
'p': triple[1],
'o': triple[2]
}, separators=(',', ':'))
def next(self) -> Optional[Dict[str, str]]:
"""Return the next solution mapping or None if there are no more solutions"""
if not self.has_next():
return None
return self._last_reads.pop(0)
def has_next(self) -> bool:
"""Return True if there is still results to read, False otherwise"""
if len(self._last_reads) == 0:
self._last_reads = self._cursor.fetchmany(size=self._fetch_size)
return len(self._last_reads) > 0 | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres/iterator.py | 0.890598 | 0.306037 | iterator.py | pypi |
import json
import logging
import coloredlogs
from datetime import datetime
from math import ceil
from typing import Optional, List, Dict, Tuple
from uuid import uuid4
from time import time
from sage.database.db_iterator import EmptyIterator
from sage.database.postgres_backends.connector import PostgresConnector
from sage.database.postgres_backends.postgres.iterator import PostgresIterator
from sage.database.postgres_backends.postgres.queries import get_delete_query, get_insert_query
from sage.database.postgres_backends.postgres.queries import get_start_query, get_resume_query
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
class DefaultPostgresConnector(PostgresConnector):
"""A DefaultPostgresConnector search for RDF triples in a PostgreSQL database.
Args:
* table_name: Name of the SQL table containing RDF data.
* dbname: the database name.
* user: user name used to authenticate.
* password: password used to authenticate.
* host: database host address (defaults to UNIX socket if not provided).
* port: connection port number (defaults to 5432 if not provided).
* fetch_size: The number of SQL rows/RDF triples to fetch per batch (defaults to 500).
"""
def __init__(self, table_name: str, dbname: str, user: str, password: str, host: str = '', port: int = 5432, fetch_size: int = 500):
super(DefaultPostgresConnector, self).__init__(table_name, dbname, user, password, host, port, fetch_size)
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[PostgresIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
"""
# do warmup if necessary
self.open()
# format triple patterns for the PostgreSQL API
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
# dedicated cursor used to scan this triple pattern
# WARNING: we need to use a dedicated cursor per triple pattern iterator.
# Otherwise, we might reset a cursor whose results were not fully consumed.
cursor = self._manager.get_connection().cursor(str(uuid4()))
# create a SQL query to start a new index scan
if last_read is None:
start_query, start_params = get_start_query(subject, predicate, obj, self._table_name)
else:
# empty last_read key => the scan has already been completed
if len(last_read) == 0:
return EmptyIterator(pattern), 0
# otherwise, create a SQL query to resume the index scan
last_read = json.loads(last_read)
t = (last_read["s"], last_read["p"], last_read["o"])
start_query, start_params = get_resume_query(subject, predicate, obj, t, self._table_name)
# create the iterator to yield the matching RDF triples
iterator = PostgresIterator(cursor, self._manager.get_connection(), start_query, start_params, pattern, fetch_size=self._fetch_size)
card = self._estimate_cardinality(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def from_config(config: dict) -> PostgresConnector:
"""Build a DefaultPostgresConnector from a configuration object.
The configuration object must contains the following fields: 'dbname', 'name', 'user' and 'password'.
Optional fields are: 'host', 'port' and 'fetch_size'.
"""
if 'dbname' not in config or 'name' not in config or 'user' not in config or 'password' not in config:
raise SyntaxError('A valid configuration for a PostgreSQL connector must contains the dbname, user and password fields')
host = config['host'] if 'host' in config else ''
port = config['port'] if 'port' in config else 5432
fetch_size = config['fetch_size'] if 'fetch_size' in config else 500
return DefaultPostgresConnector(config['name'], config['dbname'], config['user'], config['password'], host=host, port=port, fetch_size=fetch_size)
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""Insert a RDF triple into the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
insert_query = get_insert_query(self._table_name)
transaction.execute(insert_query, (subject, predicate, obj))
self._manager.commit()
def delete(self, subject: str, predicate: str, obj: str) -> None:
"""Delete a RDF triple from the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
delete_query = get_delete_query(self._table_name)
transaction.execute(delete_query, (subject, predicate, obj))
self._manager.commit() | /sage_engine-2.3.0-py3-none-any.whl/sage/database/postgres_backends/postgres/connector.py | 0.902628 | 0.158435 | connector.py | pypi |
import json
import uuid
import logging
from math import ceil
from time import time
from functools import reduce
from typing import Dict, List, Optional, Tuple
from sage.database.db_connector import DatabaseConnector
from sage.database.db_iterator import DBIterator, EmptyIterator
from sage.database.sqlite_backends.transaction_manager import TransactionManager
from sage.database.utils import get_kind
class SQliteConnector(DatabaseConnector):
"""
A SQliteConnector search for RDF triples in a SQlite database.
Constructor arguments:
- table_name `str`: Name of the SQL table containing RDF data.
- database `str`: the name of the sqlite database file.
- fetch_size `int`: how many RDF triples are fetched per SQL query (default to 500)
"""
def __init__(self, table_name: str, database: str, fetch_size: int = 500):
super(SQliteConnector, self).__init__()
self._table_name = table_name
self._manager = TransactionManager(database)
self._fetch_size = fetch_size
self._warmup = True
# Data used for cardinality estimation.
# They are initialized using SQlite statistics, after the 1st connection to the DB.
self._spo_index_stats = {
'row_count': 0,
'same_s_row_count': 0,
'same_sp_row_count': 0,
'same_spo_row_count': 0
}
self._pos_index_stats = {
'row_count': 0,
'same_p_row_count': 0,
'same_po_row_count': 0,
'same_pos_row_count': 0
}
self._osp_index_stats = {
'row_count': 0,
'same_o_row_count': 0,
'same_os_row_count': 0,
'same_osp_row_count': 0
}
def open(self):
"""Open the database connection"""
if self._manager.is_open():
self._manager.open_connection()
# Do warmup phase if required, i.e., fetch stats for query execution
if self._warmup:
cursor = self._manager.start_transaction()
# improve SQlite performance using PRAGMA
# cursor.execute("PRAGMA mmap_size=10485760")
# cursor.execute("PRAGMA cache_size=-10000")
# fetch SPO index statistics
cursor.execute(f'SELECT stat FROM sqlite_stat1 WHERE idx = \'{self._table_name}_spo_index\'')
(row_count, same_s_row_count, same_sp_row_count, same_spo_row_count) = cursor.fetchone()[0].split(' ')
self._spo_index_stats = {
'row_count': int(row_count),
'same_s_row_count': int(same_s_row_count),
'same_sp_row_count': int(same_sp_row_count),
'same_spo_row_count': int(same_spo_row_count)
}
# fetch POS index statistics
cursor.execute(f'SELECT stat FROM sqlite_stat1 WHERE idx = \'{self._table_name}_pos_index\'')
(row_count, same_p_row_count, same_po_row_count, same_pos_row_count) = cursor.fetchone()[0].split(' ')
self._pos_index_stats = {
'row_count': int(row_count),
'same_p_row_count': int(same_p_row_count),
'same_po_row_count': int(same_po_row_count),
'same_pos_row_count': int(same_pos_row_count)
}
# fetch OSP index statistics
cursor.execute(f'SELECT stat FROM sqlite_stat1 WHERE idx = \'{self._table_name}_osp_index\'')
(row_count, same_o_row_count, same_os_row_count, same_osp_row_count) = cursor.fetchone()[0].split(' ')
self._osp_index_stats = {
'row_count': int(row_count),
'same_o_row_count': int(same_o_row_count),
'same_os_row_count': int(same_os_row_count),
'same_osp_row_count': int(same_osp_row_count)
}
# commit & close cursor
self._manager.commit()
self._warmup = False
def close(self):
"""Close the database connection"""
# commit, then close the cursor and the connection
self._manager.close_connection()
def start_transaction(self):
"""Start a PostgreSQL transaction"""
# print("Process {} called start_transaction".format(os.getpid()))
self._manager.start_transaction()
def commit_transaction(self):
"""Commit any ongoing transaction"""
self._manager.commit()
def abort_transaction(self):
"""Abort any ongoing transaction"""
self._manager.abort()
def _estimate_cardinality(self, subject, predicate, obj) -> int:
"""
Estimate the cardinality of a triple pattern using SQlite statistics.
Args:
- subject ``string`` - Subject of the triple pattern
- predicate ``string`` - Predicate of the triple pattern
- obj ``string`` - Object of the triple pattern
Returns:
The estimated cardinality of the triple pattern
"""
# estimate triple cardinality using sqlite statistics (more or less a variable counting join ordering)
kind = get_kind(subject, predicate, obj)
if kind == 'spo':
return self._spo_index_stats['same_spo_row_count']
elif kind == '???':
return self._spo_index_stats['row_count']
elif kind == 's??':
return self._spo_index_stats['same_s_row_count']
elif kind == 'sp?':
return self._spo_index_stats['same_sp_row_count']
elif kind == '?p?':
return self._pos_index_stats['same_p_row_count']
elif kind == '?po':
return self._pos_index_stats['same_po_row_count']
elif kind == 's?o':
return self._osp_index_stats['same_os_row_count']
elif kind == '??o':
return self._osp_index_stats['same_o_row_count']
else:
raise Exception(f"Unkown pattern type: {kind}") | /sage_engine-2.3.0-py3-none-any.whl/sage/database/sqlite_backends/connector.py | 0.812904 | 0.275562 | connector.py | pypi |
import json
import logging
import coloredlogs
from math import ceil
from time import time
from datetime import datetime
from functools import reduce
from typing import Optional, List, Dict, Tuple
from sage.database.utils import get_kind
from sage.database.db_connector import DatabaseConnector
from sage.database.db_iterator import EmptyIterator
from sage.database.sqlite_backends.connector import SQliteConnector
from sage.database.sqlite_backends.sqlite.iterator import SQliteIterator
from sage.database.sqlite_backends.sqlite.queries import get_start_query, get_resume_query
from sage.database.sqlite_backends.sqlite.queries import get_insert_query, get_delete_query
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
class DefaultSQliteConnector(SQliteConnector):
"""
A DefaultSQliteConnector search for RDF triples in a SQlite database where triples are stored in one SQL table.
Constructor arguments:
- table_name `str`: Name of the SQL table containing RDF data.
- database `str`: the name of the sqlite database file.
- fetch_size `int`: how many RDF triples are fetched per SQL query (default to 500)
"""
def __init__(self, table_name: str, database: str, fetch_size: int = 500):
super(DefaultSQliteConnector, self).__init__(table_name, database, fetch_size)
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[SQliteIterator, int]:
"""
Get an iterator over all RDF triples matching a triple pattern.
Args:
- subject ``string`` - Subject of the triple pattern
- predicate ``string`` - Predicate of the triple pattern
- obj ``string`` - Object of the triple pattern
- last_read ``string=None`` ``optional`` - OFFSET ID used to resume scan
- as_of ``datetime=None`` ``optional`` - Perform all reads against a consistent snapshot represented by a timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern
"""
# do warmup if necessary
self.open()
# format triple patterns for the SQlite API
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
# dedicated cursor used to scan this triple pattern
# WARNING: we need to use a dedicated cursor per triple pattern iterator.
# Otherwise, we might reset a cursor whose results were not fully consumed.
cursor = self._manager.get_connection().cursor()
# create a SQL query to start a new index scan
if last_read is None:
start_query, start_params = get_start_query(subject, predicate, obj, self._table_name)
else:
# empty last_read key => the scan has already been completed
if len(last_read) == 0:
return EmptyIterator(pattern), 0
# otherwise, create a SQL query to resume the index scan
last_read = json.loads(last_read)
t = (last_read["s"], last_read["p"], last_read["o"])
start_query, start_params = get_resume_query(subject, predicate, obj, t, self._table_name)
# create the iterator to yield the matching RDF triples
iterator = SQliteIterator(
cursor, self._manager.get_connection(),
start_query, start_params,
self._table_name,
pattern,
fetch_size=self._fetch_size)
card = self._estimate_cardinality(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def from_config(config: dict) -> SQliteConnector:
"""Build a SQliteConnector from a configuration object"""
if 'database' not in config:
raise SyntaxError(
'A valid configuration for a SQlite connector must contains the database file')
table_name = config['name']
database = config['database']
fetch_size = config['fetch_size'] if 'fetch_size' in config else 500
return DefaultSQliteConnector(table_name, database, fetch_size=fetch_size)
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""
Insert a RDF triple into the RDF Graph.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
insert_query = get_insert_query(self._table_name)
transaction.execute(insert_query, (subject, predicate, obj))
self._manager.commit()
def delete(self, subject: str, predicate: str, obj: str) -> None:
"""
Delete a RDF triple from the RDF Graph.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
delete_query = get_delete_query(self._table_name)
transaction.execute(delete_query, (subject, predicate, obj))
self._manager.commit() | /sage_engine-2.3.0-py3-none-any.whl/sage/database/sqlite_backends/sqlite/connector.py | 0.885192 | 0.167015 | connector.py | pypi |
import json
import logging
import coloredlogs
from math import ceil
from time import time
from datetime import datetime
from functools import reduce
from sage.database.utils import get_kind
from sage.database.db_connector import DatabaseConnector
from sage.database.db_iterator import EmptyIterator
from sage.database.sqlite_backends.connector import SQliteConnector
from sage.database.sqlite_backends.sqlite_catalog.iterator import SQliteIterator
from sage.database.sqlite_backends.sqlite_catalog.queries import get_start_query, get_resume_query
from sage.database.sqlite_backends.sqlite_catalog.queries import get_insert_query, get_delete_query, get_catalog_insert_query
from sage.database.sqlite_backends.sqlite_catalog.queries import get_locate_query
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
class CatalogSQliteConnector(SQliteConnector):
"""
A CatalogSQliteConnector search for RDF triples in a SQlite database.
Constructor arguments:
- table_name `str`: Name of the SQL table containing RDF data.
- database `str`: the name of the sqlite database file.
- fetch_size `int`: how many RDF triples are fetched per SQL query (default to 500)
"""
def __init__(self, table_name, database, fetch_size=500):
super(CatalogSQliteConnector, self).__init__(table_name, database, fetch_size)
def __get_identifiers(self, cursor, terms):
identified_terms = list()
for term in terms:
result = cursor.execute(get_locate_query(), [term]).fetchone()
identified_terms.append(-1 if result is None else result[0])
return identified_terms
def search(self, subject, predicate, obj, last_read=None, as_of=None):
"""
Get an iterator over all RDF triples matching a triple pattern.
Args:
- subject ``string`` - Subject of the triple pattern
- predicate ``string`` - Predicate of the triple pattern
- obj ``string`` - Object of the triple pattern
- last_read ``string=None`` ``optional`` - OFFSET ID used to resume scan
- as_of ``datetime=None`` ``optional`` - Perform all reads against a consistent snapshot represented by a timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern
"""
# do warmup if necessary
self.open()
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
# dedicated cursor used to scan this triple pattern
# WARNING: we need to use a dedicated cursor per triple pattern iterator.
# Otherwise, we might reset a cursor whose results were not fully consumed.
cursor = self._manager.get_connection().cursor()
# create a SQL query to start a new index scan
if last_read is None:
start_query, start_params = get_start_query(subject, predicate, obj, self._table_name)
else:
# empty last_read key => the scan has already been completed
if len(last_read) == 0:
return EmptyIterator(pattern), 0
# otherwise, create a SQL query to resume the index scan
last_read = json.loads(last_read)
t = (last_read["s"], last_read["p"], last_read["o"])
start_query, start_params = get_resume_query(subject, predicate, obj, t, self._table_name)
start_params = self.__get_identifiers(cursor, start_params)
# create the iterator to yield the matching RDF triples
iterator = SQliteIterator(
cursor, self._manager.get_connection(),
start_query, start_params,
self._table_name,
pattern,
fetch_size=self._fetch_size)
card = self._estimate_cardinality(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def from_config(config: dict) -> SQliteConnector:
"""Build a CatalogSQliteConnector from a configuration object"""
if 'database' not in config:
raise SyntaxError(
'A valid configuration for a SQlite connector must contains the database file')
table_name = config['name']
database = config['database']
fetch_size = config['fetch_size'] if 'fetch_size' in config else 500
return CatalogSQliteConnector(table_name, database, fetch_size=fetch_size)
def insert(self, subject: str, predicate: str, obj: str) -> None:
"""
Insert a RDF triple into the RDF Graph.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
# Insert triple terms into a SQlite database
insert_query = get_catalog_insert_query()
values = dict()
values[subject] = 0
values[predicate] = 0
values[obj] = 0
transaction.executemany(insert_query, [ [x] for x in values.keys() ])
# Retrieve inserted RDF terms identifier
select_id_query = get_locate_query()
transaction.execute(select_id_query, [subject])
subject_id = transaction.fetchone()[0]
transaction.execute(select_id_query, [predicate])
predicate_id = transaction.fetchone()[0]
transaction.execute(select_id_query, [obj])
obj_id = transaction.fetchone()[0]
# Insert a new RDF triple into a SQlite database
insert_query = get_insert_query(self._table_name)
transaction.execute(insert_query, (subject_id, predicate_id, obj_id))
self._manager.commit()
def delete(self, subject: str, predicate: str, obj: str) -> None:
"""
Delete a RDF triple from the RDF Graph.
"""
# do warmup if necessary, then start a new transaction
self.open()
transaction = self._manager.start_transaction()
if subject is not None and predicate is not None and obj is not None:
delete_query = get_delete_query(self._table_name)
transaction.execute(delete_query, (subject, predicate, obj))
self._manager.commit() | /sage_engine-2.3.0-py3-none-any.whl/sage/database/sqlite_backends/sqlite_catalog/connector.py | 0.744006 | 0.167219 | connector.py | pypi |
import os.path
from typing import Optional, Tuple
from hdt import HDTDocument
from sage.database.db_connector import DatabaseConnector
from sage.database.hdt.iterator import HDTIterator
from datetime import datetime
class HDTFileConnector(DatabaseConnector):
"""A HDTFileConnector search for RDF triples in a HDT file.
Args:
* file: Path to the HDT file.
* mapped: True maps the HDT file on disk (faster), False loads everything in memory.
* indexed: True if the HDT must be loaded with indexes, False otherwise.
"""
def __init__(self, file: str, mapped=True, indexed=True):
super(HDTFileConnector, self).__init__()
self._hdt = HDTDocument(file, map=mapped, indexed=indexed)
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[HDTIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
"""
subject = subject if (subject is not None) and (not subject.startswith('?')) else ""
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else ""
obj = obj if (obj is not None) and (not obj.startswith('?')) else ""
# convert None & empty string to offset = 0
offset = 0 if last_read is None or last_read == '' else int(float(last_read))
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
iterator, card = self._hdt.search_triples(subject, predicate, obj, offset=offset)
return HDTIterator(iterator, pattern, start_offset=offset), card
@property
def nb_triples(self) -> int:
return self._hdt.total_triples
@property
def nb_subjects(self) -> int:
"""Get the number of subjects in the database"""
return self._hdt.nb_subjects
@property
def nb_predicates(self) -> int:
"""Get the number of predicates in the database"""
return self._hdt.nb_predicates
@property
def nb_objects(self) -> int:
"""Get the number of objects in the database"""
return self._hdt.nb_objects
def from_config(config: dict):
"""Build a HDTFileFactory from a configuration object.
Args:
* config: configuration object. Must contains the 'file' field.
Example:
>>> config = { "file": "./dbpedia.hdt" }
>>> connector = HDTFileConnector.from_config(config)
>>> print(f"The HDT file contains {connector.nb_triples} RDF triples")
"""
if not os.path.isfile(config["file"]):
raise Exception(f"HDT file not found: {config['file']}")
mapped = config['mapped'] if 'mapped' in config else True
indexed = config['indexed'] if 'indexed' in config else True
return HDTFileConnector(config["file"], mapped=mapped, indexed=indexed) | /sage_engine-2.3.0-py3-none-any.whl/sage/database/hdt/connector.py | 0.921296 | 0.273367 | connector.py | pypi |
import happybase
from os import getpid
from datetime import datetime
from typing import Optional, Tuple
from sage.database.db_connector import DatabaseConnector
from sage.database.hbase.iterator import HBaseIterator
from sage.database.hbase.utils import build_row_key
from sage.database.estimators import pattern_shape_estimate
from sage.database.utils import get_kind
def find_triples(connection, s, p, o):
"""Evaluate a triple pattern using the table SPO, POS or OSP"""
table = None
start_key = ''
kind = get_kind(s, p, o)
if kind == 'spo' or kind == 's??' or kind == 'sp?':
table = connection.table('spo')
start_key = build_row_key(s, p, o)
elif kind == '???':
table = connection.table('spo')
# table = connection.table('pos')
# table = connection.table('osp')
elif kind == '?p?' or kind == '?po':
table = connection.table('pos')
start_key = build_row_key(p, o, s)
elif kind == 's?o' or kind == '??o':
table = connection.table('osp')
start_key = build_row_key(o, s, p)
else:
raise Exception(f"Unkown pattern type: {kind}")
return table, start_key
def resume_triples(connection, last_read, s, p, o):
"""Resume the evaluation of a triple pattern from a RDF triple"""
table = None
kind = get_kind(s, p, o)
if kind == '???':
table = connection.table('spo')
# table = connection.table('pos')
# table = connection.table('osp')
elif kind == 'spo' or kind == 's??' or kind == 'sp?':
table = connection.table('spo')
elif kind == '?p?' or kind == '?po':
table = connection.table('pos')
elif kind == 's?o' or kind == '??o':
table = connection.table('osp')
else:
raise Exception(f"Unkown pattern type: {kind}")
return table, last_read
class HBaseConnector(DatabaseConnector):
"""A HBaseConnector allows SaGe to query RDF data stored in Apache HBase"""
def __init__(self, graph_name: int, thrift_host: str, thrift_port: int = 9090):
super(HBaseConnector, self).__init__()
self._graph_name = graph_name
self._thrift_host = thrift_host
self._thrift_port = thrift_port
self._connection = happybase.Connection(self._thrift_host, protocol="compact", transport="framed", port=self._thrift_port, table_prefix=self._graph_name)
# batches used to perform updates
self._spo_batch = None
self._pos_batch = None
self._osp_batch = None
def __del__(self) -> None:
self.close()
def open(self):
pass
def close(self):
self._connection.close()
def commit(self):
"""Commit update batches"""
if self._spo_batch is not None:
self._spo_batch.send()
if self._pos_batch is not None:
self._pos_batch.send()
if self._osp_batch is not None:
self._osp_batch.send()
# reset batches
self._spo_batch = None
self._pos_batch = None
self._osp_batch = None
def __init__batches(self):
if self._spo_batch is None:
self._spo_batch = self._connection.table('spo').batch()
if self._pos_batch is None:
self._pos_batch = self._connection.table('pos').batch()
if self._osp_batch is None:
self._osp_batch = self._connection.table('osp').batch()
def __refresh_connection(self):
try:
list(self._connection.table('spo').scan(limit=1))
except:
self._connection = happybase.Connection(self._thrift_host, protocol="compact", transport="framed", port=self._thrift_port, table_prefix=self._graph_name)
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[HBaseIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject ``string`` - Subject of the triple pattern
* predicate ``string`` - Predicate of the triple pattern
* object ``string`` - Object of the triple pattern
* last_read ``string=None`` ``optional`` - OFFSET ID used to resume scan
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern
"""
subject = subject if (subject is not None) and (not subject.startswith('?')) else None
predicate = predicate if (predicate is not None) and (not predicate.startswith('?')) else None
obj = obj if (obj is not None) and (not obj.startswith('?')) else None
pattern = {'subject': subject, 'predicate': predicate, 'object': obj}
self.__refresh_connection()
if last_read is None:
(table, row_key) = find_triples(self._connection, subject, predicate, obj)
else:
(table, row_key) = resume_triples(self._connection, last_read, subject, predicate, obj)
iterator = HBaseIterator(self._connection, table, row_key, pattern)
card = pattern_shape_estimate(subject, predicate, obj) if iterator.has_next() else 0
return iterator, card
def insert(self, s: str, p: str, o: str) -> None:
"""Insert a RDF triple into the database"""
self.__init__batches()
columns = {
b'rdf:subject': s.encode('utf-8'),
b'rdf:predicate': p.encode('utf-8'),
b'rdf:object': o.encode('utf-8')
}
spo_key = build_row_key(s, p, o)
pos_key = build_row_key(p, o, s)
osp_key = build_row_key(o, s, p)
self._spo_batch.put(spo_key, columns)
self._pos_batch.put(pos_key, columns)
self._spo_batch.put(osp_key, columns)
def delete(self, s: str, p: str, o: str) -> None:
"""Delete a RDF triple from the database"""
self.__init__batches()
spo_key = build_row_key(s, p, o)
pos_key = build_row_key(p, o, s)
osp_key = build_row_key(o, s, p)
self._spo_batch.delete(spo_key)
self._pos_batch.delete(pos_key)
self._spo_batch.delete(osp_key)
def from_config(config: dict) -> DatabaseConnector:
"""Build a HBaseConnector from a configuration object"""
if 'thrift_host' not in config:
raise SyntaxError('A valid configuration for a Apache HBase connector must contains the thrift_host field')
graph_name = config['name']
port = config['thrift_port'] if 'thrift_port' in config else 9090
return HBaseConnector(graph_name, config['thrift_host'], thrift_port=port)
@property
def nb_triples(self):
"""Get the number of RDF triples in the database"""
return 0
@property
def nb_subjects(self):
"""Get the number of subjects in the database"""
return 0
@property
def nb_predicates(self):
"""Get the number of predicates in the database"""
return 0
@property
def nb_objects(self):
"""Get the number of objects in the database"""
return 0 | /sage_engine-2.3.0-py3-none-any.whl/sage/database/hbase/connector.py | 0.78968 | 0.187951 | connector.py | pypi |
from typing import Dict, Iterable, Optional
from sage.database.core.graph import Graph
from sage.database.statefull.statefull_manager import StatefullManager
class Dataset(object):
"""A collection of RDF graphs.
Args:
* name: Name of the RDF dataset.
* description: Description of the RDF dataset.
* graphs: RDF Graphs of the dataset.
* public_url: (Optional) URL that host the SaGe server
* default_query: (Optional) A default query that can be executed against this dataset.
* analytics: Google analytics credentials.
* stateless: True if the dataset is queried in sateless mode, False if its is queried in statefull mode.
* statefull_manager: StatefullManager used to store saved plan (required in statefull mode).
"""
def __init__(self, name: str, description: str, graphs: Dict[str, Graph], public_url: Optional[str] = None, default_query: Optional[str] = None, analytics=None, stateless=True, statefull_manager: Optional[StatefullManager] = None):
super(Dataset, self).__init__()
self._name = name
self._desciption = description
self._graphs = graphs
self._public_url = public_url
self._default_query = default_query
self._analytics = analytics
self._stateless = stateless
self._statefull_manager = statefull_manager
# open the statefull manager (if needed)
if (not self._stateless) and self._statefull_manager is not None:
self._statefull_manager.open()
@property
def name(self) -> str:
return self._name
@property
def is_stateless(self) -> bool:
return self._stateless
@property
def statefull_manager(self) -> StatefullManager:
return self._statefull_manager
@property
def default_query(self):
default = {
"name": "",
"value": ""
}
return self._default_query if self._default_query is not None else default
@property
def long_description(self) -> str:
return self._desciption
@property
def public_url(self) -> str:
return self._public_url
@property
def analytics(self):
return self._analytics
@property
def maintainer(self):
# DEPRECATED
return None
def describe(self, url: str) -> Iterable[Dict[str, str]]:
"""Get a generator over dataset descriptions.
Args:
* url: Public URL of the dataset.
Yields:
Dataset descriptions as dictionnaries.
"""
for name, graph in self._graphs.items():
yield graph.describe(url)
def get_graph(self, graph_uri: str) -> Optional[Graph]:
"""Get a RDF graph given its URI, otherwise returns None.
Args:
* graph_uri: URI of the RDF graph to access.
Returns:
The RDF Graph associated with the URUI or None if it was not found.
"""
return self._graphs[graph_uri] if graph_uri in self._graphs else None
def has_graph(self, graph_uri: str) -> bool:
"""Test if a RDF graph exists in the RDF dataset.
Args:
* graph_uri: URI of the RDF graph to access.
Returns:
True if the RDF graph exists in the RDF dataset, False otherwise.
"""
return graph_uri in self._graphs | /sage_engine-2.3.0-py3-none-any.whl/sage/database/core/dataset.py | 0.943027 | 0.433742 | dataset.py | pypi |
import logging
from math import inf
from rdflib import Graph as RGraph
from rdflib.plugins.sparql import prepareQuery
from sage.database.core.dataset import Dataset
from sage.database.core.graph import Graph
from sage.database.import_manager import builtin_backends, import_backend
def load_config(config_file: str, format="ttl") -> Dataset:
"""Parse a SaGe configuration file written in RDF and load the corresponding RDF dataset.
Args:
* config_file: Path to the SaGe configuration file (in RDF format) to load.
* format: Format of the RDF configuration file (ttl, nt, n3). Defaults to Turtle (ttl).
Returns:
A RDF dataset built according to the input configuration file.
"""
# load config usinf rdflib
graph = RGraph()
graph.parse(config_file, format=format)
# available backends (populated with sage's native backends)
backends = builtin_backends()
# load custom backend (if there is some)
# TODO
# get default time quantum & maximum number of results per page
qres = graph.query("""
PREFIX sage: <http://sage.univ-nantes.fr/sage-voc#>
SELECT * WHERE {
?server a sage:SageEndpoint; foaf:name ?name.
OPTIONAL { ?server sage:longDescription ?description }
OPTIONAL { ?server sage:publicUrl ?url }
OPTIONAL { ?server sage:quantum ?quantum }
OPTIONAL { ?server sage:pageSize ?pageSize }
OPTIONAL { ?server sage:analytics ?analytics }
OPTIONAL {
?server sage:defaultQuery ?query.
?query a sage:ExampleQuery;
sage:targetGraph ?queryGraphName;
foaf:name ?queryName;
rdf:value ?queryValue.
}
}""")
if len(qres) != 1:
raise SyntaxError("A valid SaGe RDF configuration file must contains exactly one sage:SageEndpoint.")
for row in qres:
# load dataset basic informations
dataset_name = str(row.name)
public_url = str(row.url)
analytics = str(row.analytics)
if row.query is not None:
default_query = {
"dataset_name": str(row.queryGraphName),
"name": str(row.queryName),
"value": str(row.queryValue)
}
else:
default_query = None
if row.description is not None:
with open(str(row.description), "r") as file:
dataset_description = file.read()
else:
dataset_description = "A RDF dataset hosted by a SaGe server"
# get default time quantum & maximum number of results per page
if row.quantum is not None:
value = row.quantum.toPython()
if value == 'inf':
logging.warning("You are using SaGe with an infinite time quantum. Be sure to configure the Worker timeout of Gunicorn accordingly, otherwise long-running queries might be terminated.")
quantum = inf
else:
quantum = value
else:
quantum = 75
if row.pageSize is not None and row.pageSize.toPython() != 'inf':
max_results = row.pageSize.toPython()
else:
logging.warning("You are using SaGe without limitations on the number of results sent per page. This is fine, but be carefull as very large page of results can have unexpected serialization time.")
max_results = inf
break
# prepare the query used to fetch backend informations per graph
backend_query = prepareQuery("""
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX sage: <http://sage.univ-nantes.fr/sage-voc#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
SELECT ?name ?paramName ?paramValue WHERE {
?backend a sage:Backend;
foaf:name ?name;
sage:param ?param.
?param foaf:name ?paramName;
rdf:value ?paramValue.
}""")
# build all RDF graphs found in the configuration file
graphs = dict()
qres = graph.query("""
PREFIX sage: <http://sage.univ-nantes.fr/sage-voc#>
SELECT * WHERE {
?server a sage:SageEndpoint;
sage:graph ?graph.
?graph a sage:SageGraph;
foaf:name ?name ;
sage:backend ?backend.
OPTIONAL { ?graph dcterms:description ?desc . }
OPTIONAL { ?graph sage:quantum ?quantum . }
OPTIONAL { ?graph sage:pageSize ?pageSize . }
}""")
for row in qres:
# load basic information about the graph
if row.name is None:
raise SyntaxError("A valid SaGe RDF graph must have a name (declared using foaf:name)!")
g_name = row.name
g_description = row.desc if row.desc is not None else "Unnamed RDF graph with id {}".format(g_name)
g_quantum = row.quantum if row.quantum is not None else quantum
g_max_results = row.pageSize if row.pageSize is not None else max_results
# load default queries for this graph
# TODO
g_queries = list()
# g_queries = g_config["queries"] if "queries" in g_config else list()
# load the backend for this graph
g_connector = None
backend_config = dict()
backend_name = None
# fetch backend config. parameters first
backend_res = graph.query(backend_query, initBindings = { "backend": row.backend })
if len(backend_res) == 0:
logging.error(f"Graph with name '{g_name}' has a backend declared with an invalid syntax. Please check your configuration file using the documentation.")
else:
for b_row in backend_res:
backend_name = str(b_row.name)
backend_config[str(b_row.paramName)] = str(b_row.paramValue)
# load the graph connector using available backends
if backend_name in backends:
g_connector = backends[backend_name](backend_config)
else:
logging.error(f"Impossible to find the backend with name {backend_name}, declared for the RDF Graph {g_name}")
continue
# build the graph and register it
graphs[g_name] = Graph(g_name, g_description, g_connector, quantum=g_quantum, max_results=g_max_results, default_queries=g_queries)
logging.info("RDF Graph '{}' (backend: {}) successfully loaded".format(g_name, backend_name))
return Dataset(dataset_name, dataset_description, graphs, public_url=public_url, default_query=default_query, analytics=analytics) | /sage_engine-2.3.0-py3-none-any.whl/sage/database/core/rdf_config.py | 0.563258 | 0.273993 | rdf_config.py | pypi |
from datetime import datetime
from math import inf
from typing import List, Optional, Tuple
from sage.database.db_connector import DatabaseConnector
from sage.database.db_iterator import DBIterator
class Graph(object):
"""A RDF Graph with a dedicated backend used to search/store RDF triples.
Args:
* uri: URI of the RDF Graph.
* name: Name of the RDF Graph.
* description: Description of the RDF Graph.
* connector: Database connector used to search/store RDF triples in this graph.
* quantum: Time quantum associated with this graph.
* max_results: Maximum number of results per query when executing a query with this graph.
* default_queries: List of queries that can be executed with this graph.
"""
def __init__(self, uri: str, name: str, description: str, connector: DatabaseConnector, quantum=75, max_results=inf, default_queries: List[dict] = list()):
super(Graph, self).__init__()
self._uri = uri
self._name = name
self._description = description
self._connector = connector
self._quantum = quantum
self._max_results = max_results
self._example_queries = default_queries
@property
def uri(self) -> str:
return self._uri
@property
def name(self) -> str:
return self._name
@property
def description(self) -> str:
return self._description
@property
def quota(self) -> float:
return self._quantum
@property
def max_results(self) -> float:
return self._max_results
@property
def nb_triples(self) -> int:
return self._connector.nb_triples
@property
def example_queries(self) -> List[dict]:
return self._example_queries
def connector(self) -> DatabaseConnector:
"""Get the underlying DatabaseConnector for this dataset."""
return self._connector
def search(self, subject: str, predicate: str, obj: str, last_read: Optional[str] = None, as_of: Optional[datetime] = None) -> Tuple[DBIterator, int]:
"""Get an iterator over all RDF triples matching a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* object: Object of the triple pattern.
* last_read: A RDF triple ID. When set, the search is resumed for this RDF triple.
* as_of: A version timestamp. When set, perform all reads against a consistent snapshot represented by this timestamp.
Returns:
A tuple (`iterator`, `cardinality`), where `iterator` is a Python iterator over RDF triples matching the given triples pattern, and `cardinality` is the estimated cardinality of the triple pattern.
Example:
>>> iterator, cardinality = graph.search('?s', 'http://xmlns.com/foaf/0.1/name', '?name')
>>> print(f"The triple pattern '?s foaf:name ?o' matches {cardinality} RDF triples")
>>> for s, p, o in iterator:
>>> print(f"RDF Triple {s} {p} {o}")
"""
return self._connector.search(subject, predicate, obj, last_read=last_read, as_of=as_of)
def insert(self, subject: str, predicate: str, obj: str):
"""Insert a RDF triple into the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
self._connector.insert(subject, predicate, obj)
def delete(self, subject: str, predicate: str, obj: str):
"""Delete a RDF triple from the RDF graph.
Args:
* subject: Subject of the RDF triple.
* predicate: Predicate of the RDF triple.
* obj: Object of the RDF triple.
"""
self._connector.delete(subject, predicate, obj)
def commit(self) -> None:
"""Commit any ongoing transaction (at the database level)."""
self._connector.commit_transaction()
def abort(self) -> None:
"""Abort any ongoing transaction (at the database level)."""
self._connector.abort_transaction()
def describe(self, url: str) -> dict:
"""Describe the RDF Dataset in JSON-LD format."""
return {
"@context": {
"schema": "http://schema.org/",
"void": "http://rdfs.org/ns/void#",
'sage': 'http://sage.univ-nantes.fr/sage-voc#'
},
"@id": self._uri,
"@type": "http://schema.org/Dataset",
"schema:url": url,
"schema:name": self._name,
"schema:description": self._description,
"void:triples": self.nb_triples,
"void:distinctSubjects": self._connector.nb_subjects if self._connector.nb_subjects is not None else "unknown",
"void:properties": self._connector.nb_predicates if self._connector.nb_predicates is not None else "unknown",
"void:distinctObjects": self._connector.nb_objects if self._connector.nb_objects is not None else "unknown",
"sage:timeQuota": self._quantum,
"sage:maxResults": self.max_results if self.max_results is not inf else 'inf'
}
def get_query(self, q_id: str) -> Optional[str]:
"""Get an example SPARQL query associated with the graph, or None if it was not found"""
for query in self.example_queries:
if query['@id'] == q_id:
return query
return None | /sage_engine-2.3.0-py3-none-any.whl/sage/database/core/graph.py | 0.963618 | 0.465327 | graph.py | pypi |
def get_create_tables_queries(graph_name, backend):
"""Format a SQlite CREATE TABLE statement with the name of the RDF graph to insert."""
if backend == "sqlite":
return [(
f"CREATE TABLE {graph_name} ("
f"subject TEXT, "
f"predicate TEXT, "
f"object TEXT);"
)]
elif backend == "sqlite-catalog":
return [
(
f"CREATE TABLE IF NOT EXISTS catalog ("
f"id INTEGER PRIMARY KEY, "
f"value TEXT);"
),
(
f"CREATE UNIQUE INDEX IF NOT EXISTS catalog_locate_index ON catalog (value);"
),
# (
# f"CREATE INDEX IF NOT EXISTS catalog_extract_index ON catalog (id);"
# ),
(
f"CREATE TABLE {graph_name} ("
f"subject INTEGER, "
f"predicate INTEGER, "
f"object INTEGER);"
)
]
else:
raise Exception(f"Unknown backend for SQlite: {backend}")
def get_create_indexes_queries(graph_name, backend):
"""Format all SQlite CREATE INDEXES statements with the name of the RDF graph to insert."""
if backend == "sqlite" or backend == "sqlite-catalog":
return [
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_spo_index ON {graph_name} (subject,predicate,object);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_osp_index ON {graph_name} (object,subject,predicate);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_pos_index ON {graph_name} (predicate,object,subject);"
]
else:
raise Exception(f"Unknown backend for SQlite: {backend}")
def get_insert_into_query(graph_name):
"""Get an INSERT INTO statement compatible with the "executemany" function of SQlite to support the bulk loading."""
return f"INSERT INTO {graph_name} (subject,predicate,object) VALUES (?, ?, ?) ON CONFLICT DO NOTHING"
def get_insert_into_catalog_query():
"""Get an INSERT INTO statement compatible with the "executemany" function of SQlite to support the bulk loading."""
return f"INSERT INTO catalog (value) VALUES (?) ON CONFLICT DO NOTHING"
def get_select_identifier_query():
"""Get a SELECT statement to retrieve the identifier of a RDF term."""
return f"SELECT id FROM catalog WHERE value = ?"
def get_analyze_query(graph_name):
"""Format an ANALYZE query with the name of the inserted RDF graph."""
return f"ANALYZE {graph_name}" | /sage_engine-2.3.0-py3-none-any.whl/sage/cli/sqlite_utils.py | 0.663451 | 0.216043 | sqlite_utils.py | pypi |
def get_create_tables_queries(graph_name, backend):
"""Format a PostgreSQL CREATE TABLE query with the name of the RDF graph to insert."""
if backend == "postgres":
return [(
f"CREATE TABLE {graph_name} ("
f"subject TEXT, "
f"predicate TEXT, "
f"object TEXT);"
)]
elif backend == "postgres-mvcc":
return [(
f"CREATE TABLE {graph_name} ("
f"subject TEXT, "
f"predicate TEXT, "
f"object TEXT, "
f"insert_t abstime DEFAULT transaction_timestamp(), "
f"delete_t abstime DEFAULT \"infinity\");"
)]
elif backend == "postgres-catalog":
return [
(
f"CREATE TABLE IF NOT EXISTS catalog ("
f"id BIGSERIAL, "
f"value TEXT);"
),
(
f"CREATE UNIQUE INDEX IF NOT EXISTS catalog_locate_index ON catalog (md5(value));"
),
(
f"CREATE INDEX IF NOT EXISTS catalog_extract_index ON catalog using HASH (id);"
),
(
f"CREATE TABLE {graph_name} ("
f"subject BIGINT, "
f"predicate BIGINT, "
f"object BIGINT);"
)
]
else:
raise Exception(f"Unknown backend for PostgreSQL: {backend}")
def get_create_indexes_queries(graph_name, backend):
"""Format all PostgreSQL CREATE INDEX queries with the name of the RDF graph to insert."""
if backend == "postgres":
return [
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_spo_index ON {graph_name} (subject,predicate,md5(object));",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_osp_index ON {graph_name} (md5(object),subject,predicate);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_pos_index ON {graph_name} (predicate,md5(object),subject);"
]
elif backend == "postgres-mvcc":
return [
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_spo_index ON {graph_name} (subject,predicate,md5(object),insert_t abstime_ops,delete_t abstime_ops);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_osp_index ON {graph_name} (md5(object),subject,predicate,insert_t abstime_ops,delete_t abstime_ops);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_pos_index ON {graph_name} (predicate,md5(object),subject,insert_t abstime_ops,delete_t abstime_ops);"
]
elif backend == "postgres-catalog":
return [
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_spo_index ON {graph_name} (subject,predicate,md5(object));",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_osp_index ON {graph_name} (md5(object),subject,predicate);",
f"CREATE UNIQUE INDEX IF NOT EXISTS {graph_name}_pos_index ON {graph_name} (predicate,md5(object),subject);"
]
else:
raise Exception(f"Unknown backend for PostgreSQL: {backend}")
def get_insert_into_query(graph_name):
"""Get an INSERT INTO query compatible with "psycopg2.extras.execute_values" to support the bulk loading."""
return f"INSERT INTO {graph_name} (subject,predicate,object) VALUES %s ON CONFLICT DO NOTHING"
def get_insert_into_catalog_query():
"""Get an INSERT INTO query compatible with "psycopg2.extras.execute_values" to support the bulk loading."""
return f"INSERT INTO catalog (value) VALUES %s ON CONFLICT (md5(value)) DO UPDATE SET value=EXCLUDED.value RETURNING ID"
def get_analyze_query(graph_name):
"""Format an ANALYZE query with the name of the inserted RDF graph."""
return f"ANALYZE {graph_name}" | /sage_engine-2.3.0-py3-none-any.whl/sage/cli/postgres_utils.py | 0.727879 | 0.183557 | postgres_utils.py | pypi |
import sys
import re
from abc import ABC, abstractmethod
from rdflib.namespace import XSD
from rdflib.term import Literal, BNode, URIRef
from rdflib.plugins.parsers.ntriples import NTriplesParser, unquote, uriquote
uriref = r'<([^:]+:[^\s"<>]*)>'
literal = r'"([^"\\]*(?:\\.[^"\\]*)*)"'
litinfo = r'(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)|\^\^' + uriref + r')?'
exponent = r'[eE][+-]?[0-9]+'
r_wspace = re.compile(r'[ \t]*')
r_wspaces = re.compile(r'[ \t]+')
r_tail = re.compile(r'[ \t]*\.[ \t]*(#.*)?')
r_literal = re.compile(literal + litinfo)
r_integer = re.compile(r'[0-9]+')
r_decimal = re.compile(r'([0-9]+\.[0-9]*|\.[0-9]+)')
r_double = re.compile(rf'([0-9]+\.[0-9]*{exponent}|\.[0-9]+{exponent}|[0-9]+{exponent})')
r_boolean = re.compile(r'(true|false)')
class ParseError(Exception):
"""Raised Raised when an error occurs while parsing an RDF file."""
pass
class Parser(ABC):
def __init__(self, bucket_size=100):
self.bucket_size = bucket_size
self.bucket = list()
def on_bucket(self, bucket):
"""Called when a new bucket of triples is ready to be inserted into the database."""
pass
def on_error(self, error):
"""Called when an error is raised by the parser."""
pass
def on_complete(self):
"""Called when the file has been fully parsed."""
pass
@abstractmethod
def parsefile(self, file_path):
"""Parse a RDF file into bucket of triples."""
pass
class CustomNTriplesParser(Parser, NTriplesParser):
def __init__(self, bucket_size=100):
super(CustomNTriplesParser, self).__init__(bucket_size)
def parse(self):
while True:
line = self.readline()
self.line = line
if self.line is None:
if len(self.bucket) > 0:
self.on_bucket(self.bucket)
self.on_complete()
break
self.parseline()
def parseline(self):
line = self.line
try:
self.eat(r_wspace)
if (not self.line) or self.line.startswith('#'):
return # The line is empty or a comment
subject = self.subject()
subject.n3()
self.eat(r_wspaces)
predicate = self.predicate()
predicate.n3()
self.eat(r_wspaces)
object = self.object()
object.n3()
self.eat(r_tail)
subject = str(subject)
predicate = str(predicate)
if isinstance(object, Literal) or isinstance(object, BNode):
object = object.n3()
else:
object = str(object)
self.bucket.append((subject, predicate, object))
except ParseError as error:
self.on_error(error)
except:
self.on_error(ParseError(f"Invalid triple: {line}"))
finally:
if len(self.bucket) >= self.bucket_size:
self.on_bucket(self.bucket)
self.bucket = list()
def literal(self):
if self.peek('"'):
lit, lang, dtype = self.eat(r_literal).groups()
if lang:
lang = lang
else:
lang = None
if dtype:
dtype = unquote(dtype)
dtype = uriquote(dtype)
dtype = URIRef(dtype)
elif re.fullmatch(r_integer, lit):
dtype = XSD.integer
elif re.fullmatch(r_decimal, lit):
dtype = XSD.decimal
elif re.fullmatch(r_double, lit):
dtype = XSD.double
elif re.fullmatch(r_boolean, lit):
dtype = XSD.boolean
else:
dtype = None
if lang and dtype:
raise ParseError("Can't have both a language and a datatype")
lit = unquote(lit)
return Literal(lit, lang, dtype)
return False
class NTParser(CustomNTriplesParser):
def parsefile(self, file_path):
"""Parse an N-Triples file."""
self.file = open(file_path, 'r')
self.buffer = ''
self.parse()
class HDTParser(CustomNTriplesParser):
def __init__(self, bucket_size):
super(HDTParser, self).__init__(bucket_size)
self.iterator = None
def parsefile(self, file_path):
"""Parse an HDT file as an N-Triples file."""
from hdt import HDTDocument
doc = HDTDocument(file_path, indexed=False)
iterator, _ = doc.search_triples("", "", "")
self.iterator = iterator
self.parse()
def readline(self):
"""Convert triples read from an HDT file into N-Triples."""
try:
(subject, predicate, object) = next(self.iterator)
if subject.startswith('http'):
subject = f'<{subject}>'
if predicate.startswith('http'):
predicate = f'<{predicate}>'
if object.startswith('http'):
object = f'<{object}>'
return f'{subject} {predicate} {object}.'
except StopIteration:
return None
class ParserFactory():
def create_parser(format: str, bucket_size: int = 100) -> Parser:
if format == 'hdt':
return HDTParser(bucket_size)
elif format == 'nt':
return NTParser(bucket_size)
else:
raise Exception(f'Unsupported RDF format: "{format}"') | /sage_engine-2.3.0-py3-none-any.whl/sage/cli/parsers.py | 0.519278 | 0.178562 | parsers.py | pypi |
import click
import sqlite3
import coloredlogs
import logging
import time
import pylru
import sage.cli.sqlite_utils as sqlite_utils
from sage.cli.utils import load_graph, get_nb_triples
from sage.cli.parsers import ParserFactory
coloredlogs.install(level='INFO', fmt='%(asctime)s - %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
def connect_sqlite(graph):
if 'database' not in graph:
print("Error: a valid SQlite dataset must be declared with a field 'database'")
return None
database = graph['database']
return sqlite3.connect(database)
@click.command()
@click.argument("config")
@click.argument("graph_name")
@click.option('--index/--no-index', default=True,
help="Enable/disable indexing of SQL tables. The indexes can be created separately using the command sage-postgres-index")
def init_sqlite(config, graph_name, index):
"""Initialize the RDF graph GRAPH_NAME with a SQlite backend, described in the configuration file CONFIG."""
# load graph from config file
graph, backend = load_graph(config, graph_name, logger, backends=['sqlite', 'sqlite-catalog'])
# init SQlite connection
logger.info("Connecting to the SQlite server...")
connection = connect_sqlite(graph)
connection.isolation_level = None
if connection is None:
logger.error('Failed to establish a connection with SQlite')
exit(1)
logger.info("Connected to the SQlite server")
# create a cursor to interact with the database
cursor = connection.cursor()
# start a transaction
cursor.execute("BEGIN TRANSACTION")
# create the main SQL tables
logger.info("Creating SQlite tables...")
create_table_queries = sqlite_utils.get_create_tables_queries(graph_name, backend)
for query in create_table_queries:
cursor.execute(query)
logger.info("SQlite tables successfully created")
# create the additional indexes on OSP and POS
if index:
logger.info("Creating additional B-tree indexes...")
create_indexes_queries = sqlite_utils.get_create_indexes_queries(graph_name, backend)
for query in create_indexes_queries:
cursor.execute(query)
logger.info("Additional B-tree indexes successfully created")
else:
logger.info("Skipping additional indexes creation on user-demand")
# commit and cleanup connection
logger.info("Committing and cleaning up...")
cursor.execute("COMMIT")
cursor.close()
connection.close()
logger.info(f"Sage SQlite model for graph '{graph_name}' successfully initialized")
@click.command()
@click.argument("config")
@click.argument("graph_name")
def index_sqlite(config, graph_name):
"""Create the additional B-tree indexes on the RDF graph GRAPH_NAME, described in the configuration file CONFIG."""
# load graph from config file
graph, backend = load_graph(config, graph_name, logger, backends=['sqlite', 'sqlite-catalog'])
# init SQlite connection
logger.info("Connecting to the SQlite server...")
connection = connect_sqlite(graph)
connection.isolation_level = None
if connection is None:
logger.error('Failed to establish a connection with SQlite')
exit(1)
logger.info("Connected to the SQlite server")
# create a cursor to interact with the database
cursor = connection.cursor()
# start a transaction
cursor.execute("BEGIN TRANSACTION")
# create indexes
start = time.time()
logger.info("Creating additional B-tree indexes...")
create_indexes_queries = sqlite_utils.get_create_indexes_queries(graph_name, backend)
for query in create_indexes_queries:
cursor.execute(query)
stop = time.time()
logger.info(f"Additional B-tree indexes successfully created in {stop - start}s")
# rebuild table statistics
logger.info("Rebuilding table statistics...")
start = time.time()
cursor.execute(sqlite_utils.get_analyze_query(graph_name))
logger.info(f"Table statistics successfully rebuilt in {time.time() - start}s")
# commit and cleanup connection
logger.info("Committing and cleaning up...")
cursor.execute("COMMIT")
cursor.close()
connection.close()
logger.info(f"Sage SQlite model for graph '{graph_name}' successfully initialized")
def insert_bucket(cursor, bucket, graph_name, backend, block_size, cache):
if backend == 'sqlite':
insert_query = sqlite_utils.get_insert_into_query(graph_name)
cursor.executemany(insert_query, bucket)
elif backend == 'sqlite-catalog':
# Insert terms into the catalog
insert_query = sqlite_utils.get_insert_into_catalog_query()
values = dict()
cached_identifiers = dict()
for (s, p, o) in bucket:
if s in cache:
cached_identifiers[s] = cache[s]
else:
values[s] = 0
if p in cache:
cached_identifiers[p] = cache[p]
else:
values[p] = 0
if o in cache:
cached_identifiers[o] = cache[o]
else:
values[o] = 0
values = [ [term] for term in list(values.keys()) ]
cursor.executemany(insert_query, values)
# Insert triples where terms are replaced by their identifier
insert_query = sqlite_utils.get_insert_into_query(graph_name)
select_id_query = sqlite_utils.get_select_identifier_query()
values = list()
for (s, p, o) in bucket:
if s in cached_identifiers:
subject_id = cached_identifiers[s]
else:
subject_id = cursor.execute(select_id_query, [s]).fetchone()[0]
if p in cached_identifiers:
predicate_id = cached_identifiers[p]
else:
predicate_id = cursor.execute(select_id_query, [p]).fetchone()[0]
if o in cached_identifiers:
object_id = cached_identifiers[o]
else:
object_id = cursor.execute(select_id_query, [o]).fetchone()[0]
cache[s] = subject_id
cache[p] = predicate_id
cache[o] = object_id
values.append((subject_id, predicate_id, object_id))
cursor.executemany(insert_query, values)
else:
raise Exception(f'Unknown backend for SQlite: {backend}')
@click.command()
@click.argument("rdf_file")
@click.argument("config")
@click.argument("graph_name")
@click.option("-f", "--format", type=click.Choice(["nt", "hdt"]),
default="nt", show_default=True,
help="Format of the input file. Supported: nt (N-triples) and hdt (HDT).")
@click.option("--block-size", type=int,
default=100, show_default=True,
help="Block size used for the bulk loading")
@click.option("--commit-threshold", type=int,
default=500000, show_default=True,
help="Commit after sending this number of RDF triples")
@click.option("--cache-size", type=int,
default=300, show_default=True,
help="Store terms identifier when using the catalog schema to improve loading performance")
def put_sqlite(config, graph_name, rdf_file, format, block_size, commit_threshold, cache_size):
"""Insert RDF triples from file RDF_FILE into the RDF graph GRAPH_NAME, described in the configuration file CONFIG."""
# load graph from config file
graph, backend = load_graph(config, graph_name, logger, backends=['sqlite', 'sqlite-catalog'])
# init SQlite connection
logger.info("Connecting to the SQlite server...")
connection = connect_sqlite(graph)
connection.isolation_level = None
if connection is None:
logger.error('Failed to establish a connection with SQlite')
exit(1)
logger.info("Connected to the SQlite server")
# create a cursor to interact with the database
cursor = connection.cursor()
# start a transaction
cursor.execute("BEGIN TRANSACTION")
logger.info("Reading RDF source file...")
nb_triples = get_nb_triples(rdf_file, format)
logger.info(f"Found ~{nb_triples} RDF triples to ingest.")
start = time.time()
to_commit = 0
inserted = 0
dropped = 0
cache = pylru.lrucache(cache_size)
with click.progressbar(length=nb_triples, label=f"Inserting RDF triples 0/{nb_triples} - {dropped} triples dropped.") as bar:
def on_bucket(bucket):
nonlocal to_commit, inserted, dropped
insert_bucket(cursor, bucket, graph_name, backend, block_size, cache)
to_commit = to_commit + len(bucket)
if to_commit >= commit_threshold:
connection.commit()
to_commit = 0
inserted = inserted + len(bucket)
bar.label = f"Inserting RDF triples {inserted}/{nb_triples} - {dropped} triples dropped."
bar.update(len(bucket))
def on_error(error):
nonlocal dropped, inserted
dropped = dropped + 1
bar.label = f"Inserting RDF triples {inserted}/{nb_triples} - {dropped} triples dropped."
bar.update(0)
def on_complete():
nonlocal start
logger.info(f"Triples ingestion successfully completed in {time.time() - start}s")
logger.info("Rebuilding table statistics...")
start = time.time()
cursor.execute(sqlite_utils.get_analyze_query(graph_name))
logger.info(f"Table statistics successfully rebuilt in {time.time() - start}s")
logger.info("Committing and cleaning up...")
cursor.execute("COMMIT")
cursor.close()
connection.close()
logger.info(f"RDF data from file '{rdf_file}' successfully inserted into RDF graph '{graph_name}'")
logger.info("Starting RDF triples ingestion...")
parser = ParserFactory.create_parser(format, block_size)
parser.on_bucket = on_bucket
parser.on_error = on_error
parser.on_complete = on_complete
parser.parsefile(rdf_file) | /sage_engine-2.3.0-py3-none-any.whl/sage/cli/sqlite.py | 0.480479 | 0.160299 | sqlite.py | pypi |
from time import time
from typing import Dict, List, Optional, Tuple
from sage.query_engine.exceptions import DeleteInsertConflict, TooManyResults, QuantumExhausted
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import RootTree
ExecutionResults = Tuple[List[Dict[str, str]], Optional[RootTree], bool, Optional[str]]
async def executor(pipeline: PreemptableIterator, results: list, context: dict) -> None:
"""Execute a pipeline of iterator under a time quantum.
Args:
* pipeline: Root of the pipeline of iterator.
* results: List used to store query results.
* context: Information about the query execution.
Throws: Any exception raised during query execution.
"""
while pipeline.has_next():
value = await pipeline.next()
if value is not None:
results.append(value)
if len(results) >= context['max_results']:
raise TooManyResults()
class SageEngine(object):
"""SaGe query engine, used to evaluated a preemptable physical query execution plan"""
def __init__(self):
super(SageEngine, self).__init__()
async def execute(self, plan: PreemptableIterator, context: dict) -> ExecutionResults:
"""Execute a preemptable physical query execution plan under a time quantum.
Args:
* plan: Root of the pipeline of iterator.
* context: Information about the query execution.
Returns: A tuple (``results``, ``saved_plan``, ``is_done``, ``abort_reason``) where:
* ``results`` is a list of solution mappings found during query execution
* ``saved_plan`` is the state of the plan saved using protocol-buffers
* ``is_done`` is True when the plan has completed query evalution, False otherwise
* ``abort_reason`` is True if the query was aborted due a to concurrency control issue
Throws: Any exception raised during query execution.
"""
results: List[Dict[str, str]] = list()
query_done = False
root = None
abort_reason = None
try:
context['start_timestamp'] = time()
await executor(plan, results, context)
query_done = True
except QuantumExhausted:
pass
except TooManyResults:
pass
except DeleteInsertConflict as err:
abort_reason = str(err)
# save the plan if query execution is not done yet and no abort has occurred
if not query_done and abort_reason is None:
root = RootTree()
source_field = plan.serialized_name() + '_source'
getattr(root, source_field).CopyFrom(plan.save())
return (results, root, query_done, abort_reason) | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/sage_engine.py | 0.929824 | 0.335868 | sage_engine.py | pypi |
from typing import Dict, Optional
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedIndexJoinIterator, TriplePattern
from sage.query_engine.protobuf.utils import pyDict_to_protoDict
class IndexJoinIterator(PreemptableIterator):
"""A IndexJoinIterator implements an Index Loop join in a pipeline of iterators.
Args:
* left: Previous iterator in the pipeline, i.e., the outer relation of the join.
* right: Next iterator in the pipeline, i.e., the inner relation of the join.
* context: Information about the query execution.
* current_mappings: The current mappings when the join is performed.
"""
def __init__(self, left: PreemptableIterator, right: PreemptableIterator, context: dict, current_mappings: Optional[Dict[str, str]] = None):
super(IndexJoinIterator, self).__init__()
self._left = left
self._right = right
self._current_mappings = current_mappings
def __repr__(self) -> str:
return f"<IndexJoinIterator ({self._left} JOIN {self._right} WITH {self._current_mappings})>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "join"
def next_stage(self, mappings: Dict[str, str]):
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
self._current_mappings = None
self._left.next_stage(mappings)
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._left.has_next() or (self._current_mappings is not None and self._right.has_next())
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
while self._current_mappings is None or not self._right.has_next():
self._current_mappings = await self._left.next()
if self._current_mappings is None:
return None
self._right.next_stage(self._current_mappings)
mu = await self._right.next()
if mu is not None:
return {**self._current_mappings, **mu}
return None
def save(self) -> SavedIndexJoinIterator:
"""Save and serialize the iterator as a Protobuf message"""
saved_join = SavedIndexJoinIterator()
# export left source
left_field = self._left.serialized_name() + '_left'
getattr(saved_join, left_field).CopyFrom(self._left.save())
# export right source
right_field = self._right.serialized_name() + '_right'
getattr(saved_join, right_field).CopyFrom(self._right.save())
if self._current_mappings is not None:
pyDict_to_protoDict(self._current_mappings, saved_join.muc)
return saved_join | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/nlj.py | 0.962321 | 0.500183 | nlj.py | pypi |
from typing import Dict, Optional, Union
from rdflib import Literal, URIRef, Variable
from rdflib.plugins.sparql.algebra import translateQuery
from rdflib.plugins.sparql.parser import parseQuery
from rdflib.plugins.sparql.sparql import Bindings, QueryContext
from rdflib.util import from_n3
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedFilterIterator
from sage.query_engine.protobuf.utils import pyDict_to_protoDict
def to_rdflib_term(value: str) -> Union[Literal, URIRef, Variable]:
"""Convert a N3 term to a RDFLib Term.
Argument: A RDF Term in N3 format.
Returns: The RDF Term in rdflib format.
"""
if value.startswith('http'):
return URIRef(value)
elif '"^^http' in value:
index = value.find('"^^http')
value = f"{value[0:index+3]}<{value[index+3:]}>"
return from_n3(value)
class FilterIterator(PreemptableIterator):
"""A FilterIterator evaluates a FILTER clause in a pipeline of iterators.
Args:
* source: Previous iterator in the pipeline.
* expression: A SPARQL FILTER expression.
* context: Information about the query execution.
"""
def __init__(self, source: PreemptableIterator, expression: str, context: dict):
super(FilterIterator, self).__init__()
self._source = source
self._raw_expression = expression
# compile the expression using rdflib
compiled_expr = parseQuery(f"SELECT * WHERE {{?s ?p ?o . FILTER({expression})}}")
compiled_expr = translateQuery(compiled_expr)
self._prologue = compiled_expr.prologue
self._compiled_expression = compiled_expr.algebra.p.p.expr
def __repr__(self) -> str:
return f"<FilterIterator '{self._raw_expression}' on {self._source}>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "filter"
def _evaluate(self, bindings: Dict[str, str]) -> bool:
"""Evaluate the FILTER expression with a set mappings.
Argument: A set of solution mappings.
Returns: The outcome of evaluating the SPARQL FILTER on the input set of solution mappings.
"""
d = {Variable(key[1:]): to_rdflib_term(value) for key, value in bindings.items()}
b = Bindings(d=d)
context = QueryContext(bindings=b)
context.prologue = self._prologue
return self._compiled_expression.eval(context)
def next_stage(self, mappings: Dict[str, str]):
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
self._source.next_stage(mappings)
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
mu = await self._source.next()
while mu is None or not self._evaluate(mu):
mu = await self._source.next()
return mu
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._source.has_next()
def save(self) -> SavedFilterIterator:
"""Save and serialize the iterator as a Protobuf message"""
saved_filter = SavedFilterIterator()
source_field = self._source.serialized_name() + '_source'
getattr(saved_filter, source_field).CopyFrom(self._source.save())
saved_filter.expression = self._raw_expression
return saved_filter | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/filter.py | 0.953275 | 0.40439 | filter.py | pypi |
from datetime import datetime
from typing import Dict, Optional, Union
from sage.database.core.dataset import Dataset
from sage.query_engine.iterators.filter import FilterIterator
from sage.query_engine.iterators.nlj import IndexJoinIterator
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.iterators.projection import ProjectionIterator
from sage.query_engine.iterators.scan import ScanIterator
from sage.query_engine.iterators.union import BagUnionIterator
from sage.query_engine.protobuf.iterators_pb2 import (RootTree,
SavedBagUnionIterator,
SavedFilterIterator,
SavedIndexJoinIterator,
SavedProjectionIterator,
SavedScanIterator)
from sage.query_engine.protobuf.utils import protoTriple_to_dict
SavedProtobufPlan = Union[RootTree,SavedBagUnionIterator,SavedFilterIterator,SavedIndexJoinIterator,SavedProjectionIterator,SavedScanIterator]
def load(saved_plan: SavedProtobufPlan, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a preemptable physical query execution plan from a saved state.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
# unpack the plan from the serialized protobuf message
if isinstance(saved_plan, bytes):
root = RootTree()
root.ParseFromString(saved_plan)
sourceField = root.WhichOneof('source')
saved_plan = getattr(root, sourceField)
# load the plan based on the current node
if type(saved_plan) is SavedFilterIterator:
return load_filter(saved_plan, dataset, context)
if type(saved_plan) is SavedProjectionIterator:
return load_projection(saved_plan, dataset, context)
elif type(saved_plan) is SavedScanIterator:
return load_scan(saved_plan, dataset, context)
elif type(saved_plan) is SavedIndexJoinIterator:
return load_nlj(saved_plan, dataset, context)
elif type(saved_plan) is SavedBagUnionIterator:
return load_union(saved_plan, dataset, context)
else:
raise Exception(f"Unknown iterator type '{type(saved_plan)}' when loading controls")
def load_projection(saved_plan: SavedProjectionIterator, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a ProjectionIterator from a protobuf serialization.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
sourceField = saved_plan.WhichOneof('source')
source = load(getattr(saved_plan, sourceField), dataset, context)
values = saved_plan.values if len(saved_plan.values) > 0 else None
return ProjectionIterator(source, context, values)
def load_filter(saved_plan: SavedFilterIterator, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a FilterIterator from a protobuf serialization.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
sourceField = saved_plan.WhichOneof('source')
source = load(getattr(saved_plan, sourceField), dataset, context)
return FilterIterator(source, saved_plan.expression, context)
def load_scan(saved_plan: SavedScanIterator, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a ScanIterator from a protobuf serialization.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
pattern = protoTriple_to_dict(saved_plan.pattern)
connector = dataset.get_graph(pattern['graph'])
if saved_plan.timestamp is not None and saved_plan.timestamp != '':
as_of = datetime.fromisoformat(saved_plan.timestamp)
else:
as_of = None
current_mappings = None
if len(saved_plan.muc) > 0:
current_mappings = dict(saved_plan.muc)
mu = None
if len(saved_plan.mu) > 0:
mu = dict(saved_plan.mu)
return ScanIterator(connector, pattern, context, current_mappings=current_mappings, mu=mu, last_read=saved_plan.last_read, as_of=as_of)
def load_nlj(saved_plan: SavedIndexJoinIterator, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a IndexJoinIterator from a protobuf serialization.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
leftField = saved_plan.WhichOneof('left')
left = load(getattr(saved_plan, leftField), dataset, context)
rightField = saved_plan.WhichOneof('right')
right = load(getattr(saved_plan, rightField), dataset, context)
current_mappings = None
if len(saved_plan.muc) > 0:
current_mappings = dict(saved_plan.muc)
return IndexJoinIterator(left, right, context, current_mappings=current_mappings)
def load_union(saved_plan: SavedBagUnionIterator, dataset: Dataset, context: dict) -> PreemptableIterator:
"""Load a BagUnionIterator from a protobuf serialization.
Args:
* saved_plan: Saved query execution plan.
* dataset: RDF dataset used to execute the plan.
* context: Information about the query execution.
Returns:
The pipeline of iterator used to continue query execution.
"""
leftField = saved_plan.WhichOneof('left')
left = load(getattr(saved_plan, leftField), dataset, context)
rightField = saved_plan.WhichOneof('right')
right = load(getattr(saved_plan, rightField), dataset, context)
return BagUnionIterator(left, right, context) | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/loader.py | 0.949832 | 0.408749 | loader.py | pypi |
from typing import Dict, List, Optional, Tuple
class EmptyIterator(object):
"""An Iterator that yields nothing"""
def __init__(self):
super(EmptyIterator, self).__init__()
def __len__(self) -> int:
return 0
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return False
async def next(self) -> None:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
return None
class ArrayIterator(object):
"""An iterator that sequentially yields all items from a list.
Argument: List of solution mappings.
"""
def __init__(self, array: List[Dict[str, str]]):
super(ArrayIterator, self).__init__()
self._array = array
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return len(self._array) > 0
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
mu = self._array.pop(0)
return mu
def selection(triple: Tuple[str, str, str], variables: List[str]) -> Dict[str, str]:
"""Apply a selection on a RDF triple, producing a set of solution mappings.
Args:
* triple: RDF triple on which the selection is applied.
* variables: Input variables of the selection.
Returns:
A set of solution mappings built from the selection results.
Example:
>>> triple = (":Ann", "foaf:knows", ":Bob")
>>> variables = ["?s", None, "?knows"]
>>> selection(triple, variables)
{ "?s": ":Ann", "?knows": ":Bob" }
"""
bindings = dict()
if variables[0] is not None:
bindings[variables[0]] = triple[0]
if variables[1] is not None:
bindings[variables[1]] = triple[1]
if variables[2] is not None:
bindings[variables[2]] = triple[2]
return bindings
def find_in_mappings(variable: str, mappings: Dict[str, str] = dict()) -> str:
"""Find a substitution for a SPARQL variable in a set of solution mappings.
Args:
* variable: SPARQL variable to look for.
* bindings: Set of solution mappings to search in.
Returns:
The value that can be substituted for this variable.
Example:
>>> mappings = { "?s": ":Ann", "?knows": ":Bob" }
>>> find_in_mappings("?s", mappings)
":Ann"
>>> find_in_mappings("?unknown", mappings)
"?unknown"
"""
if not variable.startswith('?'):
return variable
return mappings[variable] if variable in mappings else variable
def vars_positions(subject: str, predicate: str, obj: str) -> List[str]:
"""Find the positions of SPARQL variables in a triple pattern.
Args:
* subject: Subject of the triple pattern.
* predicate: Predicate of the triple pattern.
* obj: Object of the triple pattern.
Returns:
The positions of SPARQL variables in the input triple pattern.
Example:
>>> vars_positions("?s", "http://xmlns.com/foaf/0.1/name", '"Ann"@en')
[ "?s", None, None ]
>>> vars_positions("?s", "http://xmlns.com/foaf/0.1/name", "?name")
[ "?s", None, "?name" ]
"""
return [var if var.startswith('?') else None for var in [subject, predicate, obj]]
def tuple_to_triple(s: str, p: str, o: str) -> Dict[str, str]:
"""Convert a tuple-based triple pattern into a dict-based triple pattern.
Args:
* s: Subject of the triple pattern.
* p: Predicate of the triple pattern.
* o: Object of the triple pattern.
Returns:
The triple pattern as a dictionnary.
Example:
>>> tuple_to_triple("?s", "foaf:knows", ":Bob")
{ "subject": "?s", "predicate": "foaf:knows", "object": "Bob" }
"""
return {
'subject': s,
'predicate': p,
'object': o
} | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/utils.py | 0.958953 | 0.481271 | utils.py | pypi |
from time import time
from typing import Dict, List, Optional
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedProjectionIterator
class ProjectionIterator(PreemptableIterator):
"""A ProjectionIterator evaluates a SPARQL projection (SELECT) in a pipeline of iterators.
Args:
* source: Previous iterator in the pipeline.
* projection: Projection variables.
* context: Information about the query execution.
"""
def __init__(self, source: PreemptableIterator, context: dict, projection: List[str] = None):
super(ProjectionIterator, self).__init__()
self._source = source
self._projection = projection
def __repr__(self) -> str:
return f"<ProjectionIterator SELECT {self._projection} FROM {self._source}>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "proj"
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._source.has_next()
def next_stage(self, mappings: Dict[str, str]):
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
self._source.next_stage(mappings)
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
mappings = await self._source.next()
if mappings is None:
return None
elif self._projection is None:
return mappings
return {k: v for k, v in mappings.items() if k in self._projection}
def save(self) -> SavedProjectionIterator:
"""Save and serialize the iterator as a Protobuf message"""
saved_proj = SavedProjectionIterator()
saved_proj.values.extend(self._projection)
source_field = self._source.serialized_name() + '_source'
getattr(saved_proj, source_field).CopyFrom(self._source.save())
return saved_proj | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/projection.py | 0.953427 | 0.452717 | projection.py | pypi |
from datetime import datetime
from time import time
from typing import Dict, Optional
from sage.database.db_connector import DatabaseConnector
from sage.query_engine.exceptions import QuantumExhausted
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.iterators.utils import selection, vars_positions
from sage.query_engine.protobuf.iterators_pb2 import SavedScanIterator, TriplePattern
from sage.query_engine.protobuf.utils import pyDict_to_protoDict
from sage.query_engine.iterators.utils import find_in_mappings
class ScanIterator(PreemptableIterator):
"""A ScanIterator evaluates a triple pattern over a RDF graph.
It can be used as the starting iterator in a pipeline of iterators.
Args:
* connector: The database connector that will be used to evaluate a triple pattern.
* pattern: The evaluated triple pattern.
* context: Information about the query execution.
* current_mappings: The current mappings when the scan is performed.
* mu: The last triple read when the preemption occured. This triple must be the next returned triple when the query is resumed.
* last_read: An offset ID used to resume the ScanIterator.
* as_of: Perform all reads against a consistent snapshot represented by a timestamp.
"""
def __init__(self, connector: DatabaseConnector, pattern: Dict[str, str], context: dict, current_mappings: Optional[Dict[str, str]] = None, mu: Optional[Dict[str, str]] = None, last_read: Optional[str] = None, as_of: Optional[datetime] = None):
super(ScanIterator, self).__init__()
self._connector = connector
self._pattern = pattern
self._context = context
self._variables = vars_positions(pattern['subject'], pattern['predicate'], pattern['object'])
self._current_mappings = current_mappings
self._mu = mu
self._last_read = last_read
self._start_timestamp = as_of
# Create an iterator on the database
if current_mappings is None:
it, card = self._connector.search(pattern['subject'], pattern['predicate'], pattern['object'], last_read=last_read, as_of=as_of)
self._source = it
self._cardinality = card
else:
(s, p, o) = (find_in_mappings(pattern['subject'], current_mappings), find_in_mappings(pattern['predicate'], current_mappings), find_in_mappings(pattern['object'], current_mappings))
it, card = self._connector.search(s, p, o, last_read=last_read, as_of=as_of)
self._source = it
self._cardinality = card
def __len__(self) -> int:
return self._cardinality
def __repr__(self) -> str:
return f"<ScanIterator ({self._pattern['subject']} {self._pattern['predicate']} {self._pattern['object']})>"
def serialized_name(self):
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "scan"
def last_read(self) -> str:
return self._source.last_read()
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._source.has_next() or self._mu is not None
def next_stage(self, mappings: Dict[str, str]):
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
(s, p, o) = (find_in_mappings(self._pattern['subject'], mappings), find_in_mappings(self._pattern['predicate'], mappings), find_in_mappings(self._pattern['object'], mappings))
it, card = self._connector.search(s, p, o, as_of=self._start_timestamp)
self._current_mappings = mappings
self._source = it
self._cardinality = card
self._last_read = None
self._mu = None
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if self._mu is not None:
triple = self._mu
self._mu = None
return triple
elif not self.has_next():
return None
else:
triple = self._source.next()
if triple is not None:
triple = selection(triple, self._variables)
timestamp = (time() - self._context['start_timestamp']) * 1000
if self._context['quantum'] <= timestamp:
self._mu = triple
raise QuantumExhausted()
else:
return triple
def save(self) -> SavedScanIterator:
"""Save and serialize the iterator as a Protobuf message"""
saved_scan = SavedScanIterator()
triple = TriplePattern()
triple.subject = self._pattern['subject']
triple.predicate = self._pattern['predicate']
triple.object = self._pattern['object']
triple.graph = self._pattern['graph']
saved_scan.pattern.CopyFrom(triple)
if self._current_mappings is not None:
pyDict_to_protoDict(self._current_mappings, saved_scan.muc)
saved_scan.last_read = self._source.last_read()
if self._start_timestamp is not None:
saved_scan.timestamp = self._start_timestamp.isoformat()
if self._mu is not None:
pyDict_to_protoDict(self._mu, saved_scan.mu)
return saved_scan | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/scan.py | 0.951515 | 0.309141 | scan.py | pypi |
from typing import Dict, Optional
from random import random
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedBagUnionIterator
class BagUnionIterator(PreemptableIterator):
"""A BagUnionIterator performs a SPARQL UNION with bag semantics in a pipeline of iterators.
This operator sequentially produces all solutions from the left operand,
and then do the same for the right operand.
Args:
* left: left operand of the union.
* right: right operand of the union.
* context: Information about the query execution.
"""
def __init__(self, left: PreemptableIterator, right: PreemptableIterator, context: dict):
super(BagUnionIterator, self).__init__()
self._left = left
self._right = right
def __repr__(self):
return f"<BagUnionIterator {self._left} UNION {self._right}>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "union"
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._left.has_next() or self._right.has_next()
def next_stage(self, mappings: Dict[str, str]):
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
self._left.next_stage(mappings)
self._right.next_stage(mappings)
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
elif self._left.has_next():
return await self._left.next()
else:
return await self._right.next()
def save(self) -> SavedBagUnionIterator:
"""Save and serialize the iterator as a Protobuf message"""
saved_union = SavedBagUnionIterator()
# export left source
left_field = self._left.serialized_name() + '_left'
getattr(saved_union, left_field).CopyFrom(self._left.save())
# export right source
right_field = self._right.serialized_name() + '_right'
getattr(saved_union, right_field).CopyFrom(self._right.save())
return saved_union
class RandomBagUnionIterator(BagUnionIterator):
"""A RandomBagUnionIterator performs a SPARQL UNION with bag semantics in a pipeline of iterators.
This operator randomly reads from the left and right operands to produce solution mappings.
Args:
* left: left operand of the union.
* right: right operand of the union.
* context: Information about the query execution.
"""
def __init__(self, left: PreemptableIterator, right: PreemptableIterator, context: dict):
super(BagUnionIterator, self).__init__()
self._left = left
self._right = right
def has_next(self) -> bool:
"""Return True if the iterator has more item to yield"""
return self._left.has_next() or self._right.has_next()
async def next(self) -> Optional[Dict[str, str]]:
"""Get the next item from the iterator, following the iterator protocol.
This function may contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: A set of solution mappings, or `None` if none was produced during this call.
"""
if not self.has_next():
return None
elif random() < 0.5:
if self._left.has_next():
return await self._left.next()
else:
return await self._right.next()
else:
if self._right.has_next():
return await self._right.next()
else:
return await self._left.next() | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/iterators/union.py | 0.969149 | 0.572544 | union.py | pypi |
from typing import Dict, List, Optional, Tuple
from sage.database.core.dataset import Dataset
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedDeleteData
from sage.query_engine.protobuf.utils import pyDict_to_protoDict
class DeleteOperator(PreemptableIterator):
"""A DeleteOperator deletes RDF triples from a RDF dataset.
Args:
* quads: List of RDF quads to delete from the RDF dataset.
* dataset: RDF dataset.
"""
def __init__(self, quads: List[Tuple[str, str, str, str]], dataset: Dataset):
super(DeleteOperator, self).__init__()
self._quads = quads
self._dataset = dataset
# we store how many triples were inserted in each RDF graph
self._inserted = dict()
def __repr__(self) -> str:
return f"<DeleteOperator quads={self._quads}>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "delete"
def has_next(self) -> bool:
"""Return True if the iterator has more quads to delete"""
return len(self._quads) > 0
def next_stage(self, mappings: Dict[str, str]) -> None:
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
pass
async def next(self) -> Optional[Dict[str, str]]:
"""Delete the next quad from the RDF dataset.
This function works in an iterator fashion, so it can be used in a pipeline of iterators.
It may also contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: The quad if it was successfully deleted, otwherise it returns `None`.
"""
if not self.has_next():
return None
s, p, o, g = self._quads.pop()
if self._dataset.has_graph(g):
self._dataset.get_graph(g).delete(s, p, o)
# update counters
if g in self._inserted:
self._inserted[g] += 1
else:
self._inserted[g] = 0
return {"?s": s, "?p": p, "?o": o, "?graph": g}
return None
def save(self) -> SavedDeleteData:
"""Save and serialize the iterator as a Protobuf message"""
saved = SavedDeleteData()
pyDict_to_protoDict(self._inserted, saved.nb_inserted)
return saved | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/update/delete.py | 0.952772 | 0.561996 | delete.py | pypi |
from typing import Dict, Optional
from sage.query_engine.exceptions import DeleteInsertConflict
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
class UpdateSequenceOperator(PreemptableIterator):
"""An UpdateSequenceOperator evaluates a "IF_EXISTS DELETE INSERT" query.
It is used to provide serializability per solution group.
To do so, it sequentually evaluates a IfExistsOperator, then a DeleteOperator and finally an InsertOperator.
Args:
* if_exists_op: Operator used to evaluated the IF_EXISTS clause.
* delete_op: Operator used to evaluated the DELETE clause.
* insert_op: Operator used to evaluated the INSERT clause.
"""
def __init__(self, if_exists_op: PreemptableIterator, delete_op: PreemptableIterator, insert_op: PreemptableIterator):
super(UpdateSequenceOperator, self).__init__()
self._if_exists_op = if_exists_op
self._delete_op = delete_op
self._insert_op = insert_op
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "update_sequence"
def has_next(self) -> bool:
"""Return True if the iterator has more quads to process."""
# abort if a conflict was detected
if self._if_exists_op.missing_nquads:
raise DeleteInsertConflict('A read-write conflict has been detected. It seems that a concurrent SPARQL query has already deleted some RDF triples that you previously read.')
return self._if_exists_op.has_next() or self._delete_op.has_next() or self._insert_op.has_next()
async def next(self) -> Optional[Dict[str, str]]:
"""Advance in the sequence of operations.
This function works in an iterator fashion, so it can be used in a pipeline of iterators.
It may also contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: Always `None`
Throws:
* `StopAsyncIteration` if the iterator has fnished query processing.
* `DeleteInsertConflict` if a read-write conflict is detected.
"""
# abort if a conflict was detected
if self._if_exists_op.missing_nquads:
raise DeleteInsertConflict('A read-write conflict has been detected. It seems that a concurrent SPARQL query has already deleted some RDF triples that you previously read.')
if not self.has_next():
raise StopAsyncIteration()
# advance the sequence
if self._if_exists_op.has_next():
await self._if_exists_op.next()
elif self._delete_op.has_next():
await self._delete_op.next()
elif self._insert_op.has_next():
await self._insert_op.next()
return None
def save(self) -> str:
"""Useless for this operator, as it MUST run completely inside a quantum"""
return '' | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/update/update_sequence.py | 0.939373 | 0.513607 | update_sequence.py | pypi |
from typing import Dict, List, Optional, Tuple
from sage.database.core.dataset import Dataset
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
from sage.query_engine.protobuf.iterators_pb2 import SavedInsertData
from sage.query_engine.protobuf.utils import pyDict_to_protoDict
class InsertOperator(PreemptableIterator):
"""A DeleteOperator inserts RDF triples into a RDF dataset.
Args:
* quads: List of RDF quads to insert into the RDF dataset.
* dataset: RDF dataset
"""
def __init__(self, quads: List[Tuple[str, str, str, str]], dataset: Dataset):
super(InsertOperator, self).__init__()
self._quads = quads
self._dataset = dataset
# we store how many triples were inserted in each RDF graph
self._inserted = dict()
def __repr__(self) -> str:
return f"<InsertOperator quads={self._quads}>"
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "insert"
def has_next(self) -> bool:
"""Return True if the iterator has more quads to insert"""
return len(self._quads) > 0
def next_stage(self, mappings: Dict[str, str]) -> None:
"""Propagate mappings to the bottom of the pipeline in order to compute nested loop joins"""
pass
async def next(self) -> Optional[Dict[str, str]]:
"""Insert the next quad into the RDF dataset.
This function works in an iterator fashion, so it can be used in a pipeline of iterators.
It may also contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: The quad if it was successfully inserted, otwherise it returns `None`.
"""
if not self.has_next():
return None
s, p, o, g = self._quads.pop()
if self._dataset.has_graph(g):
self._dataset.get_graph(g).insert(s, p, o)
# update counters
if g in self._inserted:
self._inserted[g] += 1
else:
self._inserted[g] = 0
return {"?s": s, "?p": p, "?o": o, "?graph": g}
return None
def save(self) -> SavedInsertData:
"""Save and serialize the iterator as a Protobuf message"""
saved = SavedInsertData()
pyDict_to_protoDict(self._inserted, saved.nb_inserted)
return saved | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/update/insert.py | 0.951695 | 0.611353 | insert.py | pypi |
from typing import Dict, Iterable, List, Tuple
from rdflib import Variable
from sage.database.core.dataset import Dataset
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
Quad = Tuple[str, str, str, str]
def apply_templates(mappings: List[Dict[str, str]], templates: List[Quad]) -> Iterable[Quad]:
"""
Returns an iterator that applies each mapping in a set to a set of quads templates
and returns the distinct quads produced.
"""
# a set used for deduplication
seen_before = set()
for mapping in mappings:
for s, p, o, g in templates:
subj, pred, obj = s, p, o
if s.startswith('?') and s in mapping:
subj = mapping[s]
if p.startswith('?') and p in mapping:
pred = mapping[p]
if o.startswith('?') and o in mapping:
obj = mapping[o]
quad = (subj, pred, obj, g)
# deduplicate quads
if quad not in seen_before:
seen_before.add(quad)
yield quad
class SerializableUpdate(PreemptableIterator):
"""A SerializableUpdate iterator evaluates a SPARQL INSERT/DELETE query as a serializable transaction.
Args:
* dataset: RDF dataset to update.
* read_input: Iterator that evaluates a WHERE clause.
* delete_templates: List of delete templates from the DELETE clause (nquads to delete).
* insert_templates: List of insert templates from the INSERT clause (nquads to insert).
"""
def __init__(self, dataset: Dataset, read_input: PreemptableIterator, delete_templates: List[Quad], insert_templates: List[Quad]):
super(SerializableUpdate, self).__init__()
self._dataset = dataset
self._read_input = read_input
self._delete_templates = delete_templates
self._insert_templates = insert_templates
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "serializable_update"
def has_next(self) -> bool:
"""Return True if the iterator has more quads to process.
This iterator has not finished to process quads iff:
* the read set is not entierly built, or
* all deletes have not been performed, or
* all insert have not been performed.
"""
return self._read_input.has_next()
async def next(self) -> None:
"""Execute the SPARQL INSERT/DELETE query.
This function blocks until the whole query has been processed.
hence, it breaks the iterator model as all the work is done in a single call to next()
It may also contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: Always `None`
Throws:
* `StopAsyncIteration` if the iterator has fnished query processing.
* `SerializationFailure` if the SPARQL UPDATE query cannot be serialized as a transaction.
"""
if not self.has_next():
raise StopAsyncIteration()
# read all mappings from the predecessor
mappings = list()
while self._read_input.has_next():
mu = await self._read_input.next()
mappings.append(mu)
# apply all deletes
for s, p, o, g in apply_templates(mappings, self._delete_templates):
if self._dataset.has_graph(g):
self._dataset.get_graph(g).delete(s, p, o)
# apply all inserts
for s, p, o, g in apply_templates(mappings, self._insert_templates):
if self._dataset.has_graph(g):
self._dataset.get_graph(g).insert(s, p, o)
return None
def save(self) -> str:
"""Useless for this operator, as it MUST run completely inside a quantum"""
return '' | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/update/serializable.py | 0.938759 | 0.429011 | serializable.py | pypi |
from datetime import datetime
from typing import Dict, List, Optional
from sage.database.core.dataset import Dataset
from sage.query_engine.iterators.preemptable_iterator import PreemptableIterator
class IfExistsOperator(PreemptableIterator):
"""An IfExistsOperator checks if all N-Quads in a set exist in the database.
It is used to provide the "serializability per solution group" consistency level.
Args:
* quads: RDF quads to validate.
* dataset: RDF dataset.
* start_time: A timestamp used to perform all reads against a consistent version of the dataset.
"""
def __init__(self, quads: List[Dict[str, str]], dataset: Dataset, start_time: datetime):
super(IfExistsOperator, self).__init__()
self._quads = quads
self._dataset = dataset
self._found_missing = False
self._start_time = start_time
def __repr__(self) -> str:
return f"<IfExistsOperator quads={self._quads}>"
@property
def missing_nquads(self) -> bool:
"""Returns True if, at the time of invocation, at least one n-quad was not found in the RDF dataset."""
return self._found_missing
def serialized_name(self) -> str:
"""Get the name of the iterator, as used in the plan serialization protocol"""
return "ifexists"
def has_next(self) -> bool:
"""Return True if the iterator has more quads to validate"""
return (not self._found_missing) and len(self._quads) > 0
async def next(self) -> Optional[Dict[str, str]]:
"""Validate the next quad using the RDF dataset.
This function works in an iterator fashion, so it can be used in a pipeline of iterators.
It may also contains `non interruptible` clauses which must
be atomically evaluated before preemption occurs.
Returns: always `None`
Throws: `StopAsyncIteration` if the iterator has no more quads to validate.
"""
if not self.has_next():
raise StopAsyncIteration()
triple = self._quads.pop()
if self._dataset.has_graph(triple['graph']):
try:
s, p, o = triple['subject'], triple['predicate'], triple['object']
iterator, _ = self._dataset.get_graph(triple['graph']).search(s, p, o, as_of=self._start_time)
self._found_missing = not iterator.has_next()
except Exception:
self._found_missing = True
else:
self._found_missing = True
return None
def save(self) -> str:
"""Useless for this operator, as it MUST run completely inside a quantum"""
return '' | /sage_engine-2.3.0-py3-none-any.whl/sage/query_engine/update/if_exists.py | 0.946658 | 0.488039 | if_exists.py | pypi |
from json import dumps
from typing import Dict, Iterable, List, Optional, Tuple
from xml.etree import ElementTree
def analyze_term(value: str) -> Tuple[str, str, Optional[str], Optional[str]]:
"""Analyze a RDF term and extract various information about it.
Argument: The RDF term to analyze.
Returns: A tuple (`value`, `type`, `extra_label`, `extra_value`) where:
* `value` is the term value.
* `type` is the type of the term (literal or uri).
* `extra_label` is the type of an extra element for this term (datatype or xml:lang).
* `extra_value` is the value of an extra element for this term.
Example:
>>> analyze_term("<http://example.org#Anna>")
("<http://example.org#Anna>", "uri", None, None)
>>> analyze_term('"Anna"')
('"Anna"', "literal", None, None)
>>> analyze_term('"Anna"@en')
('"Anna"', "literal", "xml:lang", "en")
>>> analyze_term('"Anna"^^<http://datatype.org#string>')
('"Anna"', "literal", "datatype", "<http://datatype.org#string>")
"""
# literal case
if value.startswith("\""):
extra_label, extra_value = None, None
if "\"^^<http" in value:
index = value.rfind("\"^^<http")
extra_label, extra_value = "datatype", value[index + 4:len(value) - 1]
value = value[0:index + 1]
elif "\"^^http" in value:
index = value.rfind("\"^^http")
extra_label, extra_value = "datatype", value[index + 3:]
value = value[0:index + 1]
elif "\"@" in value:
index = value.rfind("\"@")
extra_label, extra_value = "xml:lang", value[index + 2:]
value = value[0:index + 1]
return value[1:len(value) - 1], "literal", extra_label, extra_value
else:
# as the dataset is blank-node free, all other values are uris
return value, "uri", None, None
def stream_json_list(iterator: Iterable[Dict[str, str]]) -> Iterable[str]:
"""A generator for streaming a list of JSON results in an HTTP response.
Argument: An iterator which yields solutions bindings.
Yields: Solution bindings as string-encoded JSON.
"""
try:
# get first result
prev_binding = next(iterator)
for b in iterator:
yield dumps(prev_binding, separators=(',', ':')) + ','
prev_binding = b
# Now yield the last iteration without comma but with the closing brackets
yield dumps(prev_binding, separators=(',', ':'))
except StopIteration:
# StopIteration here means the length was zero, so yield a valid releases doc and stop
pass
def skolemize_one(bnode: str, url: str) -> str:
"""Skolemize a blank node.
If the input value is not a Blank node, then do nothing.
Args:
* value: RDF Blank node to skolemize.
* url: Prefix URL used for skolemization.
Returns:
The skolemized blank node, or the value itself if it was not a blank node.
"""
return f"{url}/bnode#{bnode[2:]}" if bnode.startswith("_:") else bnode
def skolemize(bindings: Iterable[Dict[str, str]], url: str) -> Iterable[Dict[str, str]]:
"""Skolemize blank nodes in a list of solution bindings.
Args:
* bindings: An iterable which yields set of solution bindings to process.
* url: Prefix URL used for skolemization.
Yields:
Solution bindings, where blank nodes have been skolemized using the input URL.
"""
for b in bindings:
r = dict()
for key, value in b.items():
r[key] = skolemize_one(value, url)
yield r
def ntriples_streaming(triples: Iterable[Tuple[str, str, str]]) -> Iterable[str]:
"""Serialize RDF triples in N-Triples string format in a iterable-fashion.
Argument: An iterable which yields RDF triples to process.
Yields: RDF triples in a string format, encoded in the N-Triples format.
"""
for s, p, o in triples:
subj = f"<{s}>" if not s.startswith("\"") else s
pred = f"<{p}>"
obj = f"<{o}>" if not o.startswith("\"") else o
yield f"{subj} {pred} {obj} .\n"
def binding_to_json(binding: Dict[str, str]) -> dict:
"""Format a set of solutions bindings in the W3C SPARQL JSON format.
Argument: A set of solution bindings.
Returns: The input set of solution bindings, encoded in the W3C SPARQL JSON format.
"""
json_binding = dict()
for variable, value in binding.items():
variable = variable[1:]
json_binding[variable] = dict()
value, type, extra_label, extra_value = analyze_term(value.strip())
json_binding[variable]["value"] = value
json_binding[variable]["type"] = type
if extra_label is not None:
json_binding[variable][extra_label] = extra_value
return json_binding
def w3c_json_streaming(bindings: Iterable[Dict[str, str]], next_link: Optional[str], stats: dict, skol_url: str) -> Iterable[str]:
"""Yield a page of SaGe results in the W3C SPARQL JSON results format, so it can be sent in an HTTP response.
Args:
* bindings: An iterable which yields set of solution bindings.
* next_link: Link to a SaGe saved plan. Use `None` if there is no one, i.e., the query execution has completed during the quantum.
* stats: Statistics about query execution.
* skol_url: URL used for the skolemization of blank nodes.
Yields:
A page of SaGe results in the W3C SPARQL JSON results format.
"""
hasNext = "true" if next_link is not None else "false"
vars = list(map(lambda x: x[1:], bindings[0].keys()))
# generate headers
yield "{\"head\":{\"vars\":["
yield ",".join(map(lambda x: f"\"{x}\"", vars))
yield f"],\"pageSize\":{len(bindings)},\"hasNext\":{hasNext},"
if next_link is not None:
yield f"\"next\":\"{next_link}\","
yield "\"stats\":" + dumps(stats, separators=(',', ':')) + "},\"results\":{\"bindings\":["
# generate results
b_iter = map(binding_to_json, skolemize(bindings, skol_url))
yield from stream_json_list(b_iter)
yield "]}}"
def raw_json_streaming(bindings: Iterable[Dict[str, str]], next_link: Optional[str], stats: dict, skol_url: str) -> Iterable[str]:
"""Yield a page of SaGe results in a non-standard JSON format, so it can be sent in an HTTP response.
Args:
* bindings: An iterable which yields set of solution bindings.
* next_link: Link to a SaGe saved plan. Use `None` if there is no one, i.e., the query execution has completed during the quantum.
* stats: Statistics about query execution.
* skol_url: URL used for the skolemization of blank nodes.
Yields:
A page of SaGe results in the W3C SPARQL JSON results format.
"""
hasNext = "true" if next_link is not None else "false"
yield "{\"bindings\":["
b_iter = skolemize(bindings, skol_url)
yield from stream_json_list(b_iter)
yield f"],\"pageSize\":{len(bindings)},\"hasNext\":{hasNext},"
if next_link is not None:
yield f"\"next\":\"{next_link}\","
else:
yield "\"next\":null,"
yield "\"stats\":" + dumps(stats, separators=(',', ':')) + "}"
def bindings_to_w3c_xml(bindings: Iterable[Dict[str, str]], skol_url: str) -> ElementTree.Element:
"""Formats a set of bindings into SPARQL results in the W3C SPARQL XML format.
Args:
* bindings: An iterable which yields set of solution bindings.
* skol_url: URL used for the skolemization of blank nodes.
Returns: The input set of solution bindings, encoded in the W3C SPARQL XML format.
"""
def convert_binding(b, root):
result_node = ElementTree.SubElement(root, "result")
for variable, value in b.items():
v_name = variable[1:]
b_node = ElementTree.SubElement(result_node, "binding", name=v_name)
value, type, extra_label, extra_value = analyze_term(value.strip())
if type == "uri":
uri_node = ElementTree.SubElement(b_node, "uri")
uri_node.text = value
elif type == "literal":
literal_node = literal_node = ElementTree.SubElement(b_node, "literal")
literal_node.text = value
if extra_label is not None:
literal_node.set(extra_label, extra_value)
return result_node
vars = list(map(lambda x: x[1:], bindings[0].keys()))
root = ElementTree.Element("sparql", xmlns="http://www.w3.org/2005/sparql-results#")
# build head
head = ElementTree.SubElement(root, "head")
for variable in vars:
ElementTree.SubElement(head, "variable", name=variable)
# build results
results = ElementTree.SubElement(root, "results")
for binding in skolemize(bindings, skol_url):
convert_binding(binding, results)
return root
def w3c_xml(bindings: Iterable[Dict[str, str]], next_link: Optional[str], stats: dict, skol_url: str) -> Iterable[str]:
"""Yield a page of SaGe results in the W3C SPARQL XML results format, so it can be sent in an HTTP response.
Args:
* bindings: An iterable which yields set of solution bindings.
* next_link: Link to a SaGe saved plan. Use `None` if there is no one, i.e., the query execution has completed during the quantum.
* stats: Statistics about query execution.
* skol_url: URL used for the skolemization of blank nodes.
Yields:
A page of SaGe results in the W3C SPARQL JSON results format.
"""
page = bindings_to_w3c_xml(bindings, skol_url)
head = page.find("head")
controls = ElementTree.SubElement(head, "controls")
hasNext_node = ElementTree.SubElement(controls, "hasNext")
hasNext_node.text = str(next_link is not None)
next_node = ElementTree.SubElement(controls, "next")
next_node.text = next_link
# TODO include stats
return ElementTree.tostring(page, encoding="utf-8").decode("utf-8") | /sage_engine-2.3.0-py3-none-any.whl/sage/http_server/responses.py | 0.922404 | 0.470797 | responses.py | pypi |
import logging
import traceback
import uvloop
from asyncio import set_event_loop_policy
from os import environ
from sys import setrecursionlimit
from time import time
from typing import Dict, List, Optional, Tuple
from urllib.parse import urlunparse
from uuid import uuid4
from fastapi import FastAPI, HTTPException, Query
from pydantic import BaseModel, Field
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse, RedirectResponse, Response, StreamingResponse
import sage.http_server.responses as responses
from sage.database.core.dataset import Dataset
from sage.database.core.yaml_config import load_config
from sage.database.descriptors import VoidDescriptor, many_void
from sage.http_server.utils import decode_saved_plan, encode_saved_plan
from sage.query_engine.iterators.loader import load
from sage.query_engine.optimizer.query_parser import parse_query
from sage.query_engine.sage_engine import SageEngine
class SagePostQuery(BaseModel):
"""Data model for the body of POST SPARQL queries"""
query: str = Field(..., description="The SPARQL query to execute.")
defaultGraph: str = Field(..., description="The URI of the default RDF graph queried.")
next: str = Field(None, description="(Optional) A next link used to resume query execution from a saved state.")
def choose_void_format(mimetypes):
if "text/turtle" in mimetypes:
return "turtle", "text/turtle"
elif "application/xml" in mimetypes:
return "xml", "application/xml"
elif "application/n-quads" in mimetypes:
return "nquads", "application/n-quads"
elif "application/trig" in mimetypes:
return "trig", "application/trig"
elif "application/json" in mimetypes or "application/json+ld" in mimetypes:
return "json-ld", "application/json"
return "ntriples", "application/n-triples"
async def execute_query(query: str, default_graph_uri: str, next_link: Optional[str], dataset: Dataset) -> Tuple[List[Dict[str, str]], Optional[str], Dict[str, str]]:
"""Execute a query using the SageEngine and returns the appropriate HTTP response.
Any failure will results in a rollback/abort on the current query execution.
Args:
* query: SPARQL query to execute.
* default_graph_uri: URI of the default RDF graph to use.
* next_link: URI to a saved plan. Can be `None` if query execution should starts from the beginning.
* dataset: RDF dataset on which the query is executed.
Returns:
A tuple (`bindings`, `next_page`, `stats`) where:
* `bindings` is a list of query results.
* `next_page` is a link to saved query execution state. Sets to `None` if query execution completed during the time quantum.
* `stats` are statistics about query execution.
Throws: Any exception that have occured during query execution.
"""
graph = None
try:
if not dataset.has_graph(default_graph_uri):
raise HTTPException(status_code=404, detail=f"RDF Graph {default_graph_uri} not found on the server.")
graph = dataset.get_graph(default_graph_uri)
context = dict()
context['quantum'] = graph.quota
context['max_results'] = graph.max_results
# decode next_link or build query execution plan
cardinalities = dict()
start = time()
if next_link is not None:
if dataset.is_stateless:
saved_plan = next_link
else:
saved_plan = dataset.statefull_manager.get_plan(next_link)
plan = load(decode_saved_plan(saved_plan), dataset, context)
else:
plan, cardinalities = parse_query(query, dataset, default_graph_uri, context)
logging.info(f'loading time: {(time() - start) * 1000}ms')
loading_time = (time() - start) * 1000
# execute query
engine = SageEngine()
bindings, saved_plan, is_done, abort_reason = await engine.execute(plan, context)
# commit or abort (if necessary)
if abort_reason is not None:
graph.abort()
raise HTTPException(status_code=500, detail=f"The SPARQL query has been aborted for the following reason: '{abort_reason}'")
else:
graph.commit()
start = time()
# encode saved plan if query execution is not done yet and there was no abort
next_page = None
if (not is_done) and abort_reason is None:
next_page = encode_saved_plan(saved_plan)
if not dataset.is_stateless:
# generate the plan ID if this is the first time we execute this plan
plan_id = next_link if next_link is not None else str(uuid4())
dataset.statefull_manager.save_plan(plan_id, next_page)
next_page = plan_id
elif is_done and (not dataset.is_stateless) and next_link is not None:
# delete the saved plan, as it will not be reloaded anymore
dataset.statefull_manager.delete_plan(next_link)
logging.info(f'export time: {(time() - start) * 1000}ms')
exportTime = (time() - start) * 1000
stats = {"cardinalities": cardinalities, "import": loading_time, "export": exportTime}
return (bindings, next_page, stats)
except Exception as err:
# abort all ongoing transactions, then forward the exception to the main loop
logging.error(traceback.format_exc())
if graph is not None:
graph.abort()
raise err
def create_response(mimetypes: List[str], bindings: List[Dict[str, str]], next_page: Optional[str], stats: dict, skol_url: str) -> Response:
"""Create an HTTP response for the results of SPARQL query execution.
Args:
* mimetypes: mimetypes from the input HTTP request.
* bindings: list of query results.
* next_link: Link to a SaGe saved plan. Use `None` if there is no one, i.e., the query execution has completed during the quantum.
* stats: Statistics about query execution.
* skol_url: URL used for the skolemization of blank nodes.
Returns:
An HTTP response built from the input mimetypes and the SPARQL query results.
"""
if "application/json" in mimetypes:
iterator = responses.raw_json_streaming(bindings, next_page, stats, skol_url)
return StreamingResponse(iterator, media_type="application/json")
elif "application/sparql-results+json" in mimetypes:
iterator = responses.w3c_json_streaming(bindings, next_page, stats, skol_url)
return StreamingResponse(iterator, media_type="application/json")
elif "application/xml" in mimetypes or "application/sparql-results+xml" in mimetypes:
iterator = responses.w3c_xml(bindings, next_page, stats)
return Response(iterator, media_type="application/xml")
return JSONResponse({
"bindings": bindings,
"next": next_page,
"stats": stats
})
def run_app(config_file: str) -> FastAPI:
"""Create the HTTP server, compatible with uvicorn/gunicorn.
Argument: SaGe configuration file, in YAML format.
Returns: The FastAPI HTTP application.
"""
# enable uvloop for SPARQL query processing
set_event_loop_policy(uvloop.EventLoopPolicy())
# set recursion depth (due to pyparsing issues)
setrecursionlimit(3000)
# create the HTTP server & activate CORS
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Build the RDF dataset from the configuration file
dataset = load_config(config_file)
@app.get("/")
async def root():
return "The SaGe SPARQL query server is running!"
@app.get("/sparql")
async def sparql_get(
request: Request,
query: str = Query(..., description="The SPARQL query to execute."),
default_graph_uri: str = Query(..., alias="default-graph-uri", description="The URI of the default RDF graph queried."),
next_link: str = Query(None, alias="next", description="(Optional) A next link used to resume query execution from a saved state.")
):
"""Execute a SPARQL query using the Web Preemption model"""
try:
mimetypes = request.headers['accept'].split(",")
server_url = urlunparse(request.url.components[0:3] + (None, None, None))
bindings, next_page, stats = await execute_query(query, default_graph_uri, next_link, dataset)
return create_response(mimetypes, bindings, next_page, stats, server_url)
except HTTPException as err:
raise err
except Exception as err:
logging.error(err)
raise HTTPException(status_code=500, detail=str(err))
@app.post("/sparql")
async def sparql_post(request: Request, item: SagePostQuery):
"""Execute a SPARQL query using the Web Preemption model"""
try:
start = time()
mimetypes = request.headers['accept'].split(",")
server_url = urlunparse(request.url.components[0:3] + (None, None, None))
exec_start = time()
bindings, next_page, stats = await execute_query(item.query, item.defaultGraph, item.next, dataset)
logging.info(f'query execution time: {(time() - exec_start) * 1000}ms')
serialization_start = time()
response = create_response(mimetypes, bindings, next_page, stats, server_url)
logging.info(f'serialization time: {(time() - serialization_start) * 1000}ms')
logging.info(f'execution time: {(time() - start) * 1000}ms')
return response
except HTTPException as err:
raise err
except Exception as err:
logging.error(err)
raise HTTPException(status_code=500, detail=str(err))
@app.get("/void/", description="Get the VoID description of the SaGe server")
async def server_void(request: Request):
"""Describe all RDF datasets hosted by the Sage endpoint"""
try:
mimetypes = request.headers['accept'].split(",")
url = urlunparse(request.url.components[0:3] + (None, None, None))
if url.endswith('/'):
url = url[0:len(url) - 1]
void_format, res_mimetype = choose_void_format(mimetypes)
description = many_void(url, dataset, void_format)
return Response(description, media_type=res_mimetype)
except Exception as err:
logging.error(err)
raise HTTPException(status_code=500, detail=str(err))
@app.get("/.well-known/void/")
async def well_known():
"""Alias for /void/"""
return RedirectResponse(url="/void/")
@app.get("/void/{graph_name}", description="Get the VoID description of a RDF Graph hosted by the SaGe server")
async def graph_void(request: Request, graph_name: str = Field(..., description="Name of the RDF Graph")):
"""Get the VoID description of a RDF Graph hosted by the SaGe server"""
graph = dataset.get_graph(graph_name)
if graph is None:
raise HTTPException(status_code=404, detail=f"RDF Graph {graph_name} not found on the server.")
try:
mimetypes = request.headers['accept'].split(",")
url = urlunparse(request.url.components[0:3] + (None, None, None))
if url.endswith('/'):
url = url[0:len(url) - 1]
descriptor = VoidDescriptor(url, graph)
void_format, res_mimetype = choose_void_format(mimetypes)
return Response(descriptor.describe(void_format), media_type=res_mimetype)
except Exception as err:
logging.error(err)
raise HTTPException(status_code=500, detail=str(err))
return app
if 'SAGE_CONFIG_FILE' in environ:
config_file = environ['SAGE_CONFIG_FILE']
app = run_app(config_file)
elif __name__ == "__main__":
raise RuntimeError("You cannot run the script server.py as a plain script. Please the use the SaGe CLI to start you own server.") | /sage_engine-2.3.0-py3-none-any.whl/sage/http_server/server.py | 0.757705 | 0.156491 | server.py | pypi |
_name = "Sage Catalog"
from sage.groups.affine_gps import catalog as affine_groups_catalog
from sage.groups.groups_catalog import presentation as presentation_groups_catalog
from sage.groups.perm_gps import permutation_groups_catalog as permutation_groups_catalog
from sage.groups.matrix_gps import catalog as matrix_groups_catalog
from sage.groups.misc_gps import misc_groups_catalog
from sage.algebras import catalog as algebras_catalog
from sage.combinat.posets.poset_examples import Posets as posets_catalog
from sage.monoids import all as monoids_catalog
from sage.graphs.graph_generators import graphs as graphs_catalog
from sage.modules import all as modules_catalog
from sage.matroids import catalog as matroids_catalog
from sage.combinat.crystals import catalog as crystals_catalog
from sage.coding import codes_catalog
from sage.game_theory.catalog import normal_form_games as games_catalog
from sage.combinat.words import word_generators as words_catalog
class fields_catalog:
r"""A catalog of fields."""
from sage.rings.finite_rings.finite_field_constructor import FiniteField
from sage.rings.complex_field import ComplexField
from sage.rings.rational_field import RationalField
from sage.rings.real_mpfr import RealField
from sage.rings.qqbar import AlgebraicRealField, AlgebraicField
presentation_groups_catalog._name = "Groups given by presentation"
permutation_groups_catalog._name = "Permutation groups"
matrix_groups_catalog._name = "Matrix groups"
affine_groups_catalog._name = "Affine groups"
misc_groups_catalog._name = "Misc groups"
monoids_catalog._name = "Monoids"
fields_catalog._name = "Fields"
algebras_catalog._name = "Algebras"
modules_catalog._name = "Modules"
graphs_catalog._name = "Graphs"
posets_catalog._name = "Posets"
crystals_catalog._name = "Crystals"
codes_catalog._name = "Codes"
matroids_catalog._name = "Matroids"
games_catalog._name = "Games"
words_catalog._name = "Words"
sage_catalogs = [
presentation_groups_catalog,
permutation_groups_catalog,
matrix_groups_catalog,
affine_groups_catalog,
misc_groups_catalog,
monoids_catalog,
fields_catalog,
algebras_catalog,
modules_catalog,
graphs_catalog,
posets_catalog,
crystals_catalog,
codes_catalog,
matroids_catalog,
games_catalog,
words_catalog,
] | /sage-explorer-0.5.3.tar.gz/sage-explorer-0.5.3/sage_explorer/_sage_catalog.py | 0.738198 | 0.310051 | _sage_catalog.py | pypi |
class GraphicalSegmentInPolygon:
def __init__(self, segment, graphical_surface):
r"""
Create a graphical segment from a graphical surface and a SegmentInPolygon.
"""
self._gs = graphical_surface
self._seg = segment
label = self.polygon_label()
self._start = self._gs.graphical_polygon(label).transform(
segment.start().point()
)
if self._seg.is_edge():
self._end = self._gs.graphical_polygon(label).transform(
self._seg.start().polygon().vertex(self._seg.edge() + 1)
)
else:
self._end = self._gs.graphical_polygon(label).transform(
segment.end().point()
)
def polygon_label(self):
return self._seg.polygon_label()
def start(self):
r"""Return the start point as a RDF Vector."""
return self._start
def end(self):
r"""Return the end point as a RDF Vector."""
return self._end
def plot(self, **options):
r"""
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1,-0.5), (3,-1))
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: seg = SegmentInPolygon(v)
sage: from flatsurf.graphical.straight_line_trajectory import GraphicalSegmentInPolygon
sage: gseg = GraphicalSegmentInPolygon(seg, s.graphical_surface())
sage: gseg.plot()
...Graphics object consisting of 1 graphics primitive
sage: gseg.plot(color='red')
...Graphics object consisting of 1 graphics primitive
"""
if self._gs.is_visible(self.polygon_label()):
from sage.plot.line import line2d
return line2d([self.start(), self.end()], **options)
else:
from sage.plot.graphics import Graphics
return Graphics()
class GraphicalStraightLineTrajectory:
r"""
Allows for the rendering of a straight-line trajectory through a graphical surface.
"""
def __init__(self, trajectory, graphical_surface=None):
if graphical_surface is None:
self._gs = trajectory.surface().graphical_surface()
else:
if trajectory.surface() != graphical_surface.get_surface():
raise ValueError
self._gs = graphical_surface
self._traj = trajectory
self._segments = [
GraphicalSegmentInPolygon(s, self._gs) for s in self._traj.segments()
]
def plot(self, **options):
r"""
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: gs = s.graphical_surface()
sage: K.<sqrt2>=NumberField(x^2-2,embedding=1)
sage: v = s.tangent_vector(0, (1,-1), (sqrt2,-1),ring=K)
sage: traj = v.straight_line_trajectory()
sage: traj.flow(100)
sage: traj.flow(-5)
sage: gtraj = traj.graphical_trajectory(gs)
sage: gs.plot() + gtraj.plot()
...Graphics object consisting of 119 graphics primitives
"""
from sage.plot.graphics import Graphics
p = Graphics()
for seg in self._segments:
p += seg.plot(**options)
return p | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/graphical/straight_line_trajectory.py | 0.92126 | 0.721056 | straight_line_trajectory.py | pypi |
from sage.rings.real_double import RDF
from sage.modules.free_module import VectorSpace
from sage.plot.polygon import polygon2d
from sage.plot.text import text
from sage.plot.line import line2d
from sage.plot.point import point2d
from flatsurf.geometry.similarity import SimilarityGroup
V = VectorSpace(RDF, 2)
class GraphicalPolygon:
r"""
Stores data necessary to draw one of the polygons from a surface.
Note that this involves converting between geometric coordinates, defined for the SimilaritySurface,
and graphical coordinates. We do this with a similarity (called transformation below).
"""
def __init__(self, polygon, transformation=None):
r"""
INPUT:
- ``polygon`` -- the actual polygon
- ``transformation`` -- a transformation to be applied to the polygon
- ``outline_color`` -- a color
- ``fill_color`` -- another color
- ``label`` -- an optional label for the polygon
- ``edge_labels`` -- one of ``False``, ``True`` or a list of labels
"""
self._p = polygon
# the following stores _transformation and _v
self.set_transformation(transformation)
def copy(self):
r"""
Return a copy of this GraphicalPolygon.
"""
return GraphicalPolygon(self._p, self.transformation())
def __repr__(self):
r"""
String representation.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: gs = s.graphical_surface()
sage: gs.graphical_polygon(0)
GraphicalPolygon with vertices [(0.0, 0.0), (2.0, -2.0), (2.0, 0.0)]
"""
return "GraphicalPolygon with vertices {}".format(self._v)
def base_polygon(self):
r"""
Return the polygon of the surface in geometric coordinates.
"""
return self._p
def transformed_vertex(self, e):
r"""
Return the graphical coordinates of the vertex in double precision.
"""
return self._transformation(self._p.vertex(e))
def xmin(self):
r"""
Return the minimal x-coordinate of a vertex.
.. TODO::
to fit with Sage conventions this should be xmin
"""
return min([v[0] for v in self._v])
def ymin(self):
r"""
Return the minimal y-coordinate of a vertex.
.. TODO::
to fit with Sage conventions this should be ymin
"""
return min([v[1] for v in self._v])
def xmax(self):
r"""
Return the maximal x-coordinate of a vertex.
.. TODO::
to fit with Sage conventions this should be xmax
"""
return max([v[0] for v in self._v])
def ymax(self):
r"""
Return the minimal y-coordinate of a vertex
.. TODO::
To fit with Sage conventions this should be ymax
"""
return max([v[1] for v in self._v])
def bounding_box(self):
r"""
Return the quadruple (x1,y1,x2,y2) where x1 and y1 are the minimal
x and y coordinates and x2 and y2 are the maximal x and y coordinates.
"""
return self.xmin(), self.ymin(), self.xmax(), self.ymax()
def transform(self, point, double_precision=True):
r"""
Return the transformation of point into graphical coordinates.
By default returned point is in double precision. This can be changed
to an exact representation by setting `double_precision` to False.
"""
if self._transformation is None:
if double_precision:
return V(point)
else:
return point
else:
if double_precision:
return V(self._transformation(point))
else:
return self._transformation(point)
def transform_back(self, point):
r"""
Return the transformation of point from graphical coordinates to the geometric coordinates
of the underlying SimilaritySurface.
"""
if self._transformation is None:
return point
else:
return (~self._transformation)(point)
def contains(self, point):
r"""
Return the transformation of point from graphical coordinates to the geometric coordinates
of the underlying SimilaritySurface.
"""
return self._p.contains_point(self.transform_back(point))
def transformation(self):
r"""
Return the transformation (similarity) which converts from
mathematical to graphical coordinates.
"""
return self._transformation
def set_transformation(self, transformation=None):
r"""Set the transformation to be applied to the polygon."""
if transformation is None:
self._transformation = SimilarityGroup(self._p.base_ring()).one()
else:
self._transformation = transformation
# recompute the location of vertices:
self._v = [V(self._transformation(v)) for v in self._p.vertices()]
def plot_polygon(self, **options):
r"""
Returns only the filled polygon.
Options are processed as in sage.plot.polygon.polygon2d except
that by default axes=False.
"""
if "axes" not in options:
options["axes"] = False
return polygon2d(self._v, **options)
def plot_label(self, label, **options):
r"""
Write the label of the polygon as text.
Set ``position`` to a pair (x,y) to determine where
the label is drawn (in graphical coordinates). If this parameter
is not provided, the label is positioned in the baricenter
of the polygon.
Other options are processed as in sage.plot.text.text.
"""
if "position" in options:
return text(str(label), options.pop("position"), **options)
else:
return text(str(label), sum(self._v) / len(self._v), **options)
def plot_edge(self, e, **options):
r"""
Plot the edge e, with e a number 0,...,n-1 with n being the number
of edges of the polygon.
Options are processed as in sage.plot.line.line2d.
"""
return line2d(
[self._v[e], self._v[(e + 1) % len(self.base_polygon().vertices())]],
**options
)
def plot_edge_label(self, i, label, **options):
r"""
Write label on the i-th edge.
A parameter ``t`` in the interval [0,1] can be provided to position the
label along the edge. A value of t=0 will position it at the starting
vertex and t=1 will position it at the terminating vertex. Defaults to
0.3.
If the parameter ``position`` can take the values "outside", "inside"
or "edge" to indicate if the label should be drawn outside the polygon,
inside the polygon or on the edge. Defaults to "inside".
A ``push_off`` perturbation parameter controls how far off the edge the label is pushed.
Other options are processed as in sage.plot.text.text.
"""
e = self._v[(i + 1) % len(self.base_polygon().vertices())] - self._v[i]
if "position" in options:
if options["position"] not in ["inside", "outside", "edge"]:
raise ValueError(
"The 'position' parameter must take the value 'inside', 'outside', or 'edge'."
)
pos = options.pop("position")
else:
pos = "inside"
if pos == "outside":
# position outside polygon.
if "horizontal_alignment" in options:
pass
elif e[1] > 0:
options["horizontal_alignment"] = "left"
elif e[1] < 0:
options["horizontal_alignment"] = "right"
else:
options["horizontal_alignment"] = "center"
if "vertical_alignment" in options:
pass
elif e[0] > 0:
options["vertical_alignment"] = "top"
elif e[0] < 0:
options["vertical_alignment"] = "bottom"
else:
options["vertical_alignment"] = "center"
elif pos == "inside":
# position inside polygon.
if "horizontal_alignment" in options:
pass
elif e[1] < 0:
options["horizontal_alignment"] = "left"
elif e[1] > 0:
options["horizontal_alignment"] = "right"
else:
options["horizontal_alignment"] = "center"
if "vertical_alignment" in options:
pass
elif e[0] < 0:
options["vertical_alignment"] = "top"
elif e[0] > 0:
options["vertical_alignment"] = "bottom"
else:
options["vertical_alignment"] = "center"
else:
# centered on edge.
if "horizontal_alignment" in options:
pass
else:
options["horizontal_alignment"] = "center"
if "vertical_alignment" in options:
pass
else:
options["vertical_alignment"] = "center"
if "t" in options:
t = RDF(options.pop("t"))
else:
t = 0.3
if "push_off" in options:
push_off = RDF(options.pop("push_off"))
else:
push_off = 0.03
if pos == "outside":
push_off = -push_off
# Now push_off stores the amount it should be pushed into the polygon
no = V((-e[1], e[0]))
return text(label, self._v[i] + t * e + push_off * no, **options)
def plot_zero_flag(self, **options):
r"""
Draw a line segment from the zero vertex toward the baricenter.
A real parameter ``t`` can be provided. If t=1, then the segment will
go all the way to the baricenter. The value of ``t`` is linear in the
length of the segment. Defaults to t=0.5.
Other options are processed as in sage.plot.line.line2d.
"""
if "t" in options:
t = RDF(options.pop("t"))
else:
t = 0.5
return line2d(
[self._v[0], self._v[0] + t * (sum(self._v) / len(self._v) - self._v[0])],
**options
)
def plot_points(self, points, **options):
r"""
Plot the points in the given collection of points.
The options are passed to point2d.
If no "zorder" option is provided then we set "zorder" to 50.
By default coordinates are taken in the underlying surface. Call with coordinates="graphical"
to use graphical coordinates instead.
"""
if "zorder" not in options:
options["zorder"] = 50
if "coordinates" not in options:
points2 = [self.transform(point) for point in points]
elif options["coordinates"] == "graphical":
points2 = [V(point) for point in points]
del options["coordinates"]
else:
raise ValueError("Invalid value of 'coordinates' option")
return point2d(points=points2, **options) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/graphical/polygon.py | 0.898379 | 0.740843 | polygon.py | pypi |
from sage.groups.group import Group
from sage.categories.groups import Groups
from sage.structure.unique_representation import UniqueRepresentation
from sage.structure.element import MultiplicativeGroupElement
from sage.algebras.quatalg.quaternion_algebra import QuaternionAlgebra
from sage.rings.integer_ring import ZZ
from flatsurf.geometry.origami import AbstractOrigami
_Q = QuaternionAlgebra(-1, -1)
_i, _j, _k = _Q.gens()
class MegaWollmilchsauGroupElement(MultiplicativeGroupElement):
@staticmethod
def quat_to_tuple(r):
r"""Convert an element in the quaternion algebra to a quadruple"""
if isinstance(r, int):
return (r, 0, 0, 0)
else:
return (r[0], r[1], r[2], r[3])
@staticmethod
def wedge(r1, r2):
r"""Wedge two quaterions. Returns an integer."""
x = MegaWollmilchsauGroupElement.quat_to_tuple(r1)
y = MegaWollmilchsauGroupElement.quat_to_tuple(r2)
return -x[0] * y[3] + x[1] * y[2] - x[2] * y[1] + x[3] * y[0]
def __init__(self, parent, i, r, q):
if parent is None:
raise ValueError("The parent must be provided")
# I should assert that the element lives in the domain of the group.
if i not in ZZ:
raise ValueError
if r not in _Q:
raise ValueError
# Actually q should be in {1,-1,-i,i,j,-j,k,-k}. I'm not testing for that.
if q not in _Q:
raise ValueError
# There is one more condition. The group doesn't have full image...
self._i = i
self._r = r
self._q = q
self._parent = parent
MultiplicativeGroupElement.__init__(self, parent)
def _repr_(self):
return "[" + str(self._i) + ", " + str(self._r) + ", " + str(self._q) + "]"
def _cmp_(self, other):
return (
(self._i > other._i - self._i < other._i)
or (self._r > other._r - self._r < other._r)
or (self._q > other._q - self._q < other._q)
)
def _mul_(self, m):
return MegaWollmilchsauGroupElement(
self._parent,
self._i
+ m._i
+ MegaWollmilchsauGroupElement.wedge(self._r, self._q * m._r),
self._r + self._q * m._r,
self._q * m._q,
)
def __invert__(self):
q1 = ~self._q
r1 = -(q1 * self._r)
i1 = -(self._i + MegaWollmilchsauGroupElement.wedge(r1, q1 * self._r))
return MegaWollmilchsauGroupElement(self._parent, i1, r1, q1)
def _div_(self, m):
return self._mul_(m.__invert__())
def __hash__(self):
return (
67 * hash(self._i)
+ 23 * hash(MegaWollmilchsauGroupElement.quat_to_tuple(self._r))
- 17 * hash(MegaWollmilchsauGroupElement.quat_to_tuple(self._q))
)
class MegaWollmilchsauGroup(UniqueRepresentation, Group):
Element = MegaWollmilchsauGroupElement
def _element_constructor_(self, *args, **kwds):
if len(args) != 1:
return self.element_class(self, *args, **kwds)
x = args[0]
return self.element_class(self, x, **kwds)
def __init__(self):
Group.__init__(self, category=Groups().Infinite())
def _repr_(self):
return "MegaWollmilchsauGroup"
def a(self):
return self.element_class(self, 0, 1, _i)
def b(self):
return self.element_class(self, 0, 1, _j)
def one(self):
return self.element_class(self, 0, 0, 1)
def gens(self):
return (self.a(), self.b())
def is_abelian(self):
return False
def _an_element_(self):
return self.a()
def some_elements(self):
return [self.a(), self.b()]
def _test_relations(self, **options):
tester = self._tester(**options)
a, b = self.gens()
e = self.one()
tester.assertEqual(a**4, e)
tester.assertEqual(b**4, e)
tester.assertEqual((a * b) ** 4, e)
tester.assertEqual((a / b) ** 4, e)
tester.assertEqual((a * a * b) ** 4, e)
tester.assertEqual((a * a / b) ** 4, e)
tester.assertNotEqual((a * b / a / b) ** 2, e)
class MegaWollmilchsau(AbstractOrigami):
def __init__(self):
self._G = self._domain = MegaWollmilchsauGroup()
self._a, self._b = self._G.gens()
self._ai = ~self._a
self._bi = ~self._b
def up(self, label):
return self._b * label
def down(self, label):
return self._bi * label
def right(self, label):
return self._a * label
def left(self, label):
return self._ai * label
def _repr_(self):
return "MegaWollmilchsau Origami" | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/mega_wollmilchsau.py | 0.800809 | 0.424531 | mega_wollmilchsau.py | pypi |
from sage.misc.cachefunc import cached_method
from sage.structure.element import MultiplicativeGroupElement, parent
from sage.structure.unique_representation import UniqueRepresentation
from sage.categories.groups import Groups
from sage.all import Rings
from sage.modules.free_module_element import vector
from sage.groups.group import Group
from sage.rings.integer import Integer
from sage.rings.integer_ring import ZZ
from sage.modules.free_module_element import FreeModuleElement
from sage.structure.element import is_Matrix
ZZ_0 = Integer(0)
ZZ_1 = Integer(1)
ZZ_m1 = -ZZ_1
class Similarity(MultiplicativeGroupElement):
r"""
Class for a similarity of the plane with possible reflection.
Construct the similarity (x,y) mapsto (ax-by+s,bx+ay+t) if sign=1,
and (ax+by+s,bx-ay+t) if sign=-1
"""
def __init__(self, p, a, b, s, t, sign):
r"""
Construct the similarity (x,y) mapsto (ax-by+s,bx+ay+t) if sign=1,
and (ax+by+s,bx-ay+t) if sign=-1
"""
if p is None:
raise ValueError("The parent must be provided")
if parent(a) is not p.base_ring():
raise ValueError("wrong parent for a")
if parent(b) is not p.base_ring():
raise ValueError("wrong parent for b")
if parent(s) is not p.base_ring():
raise ValueError("wrong parent for s")
if parent(t) is not p.base_ring():
raise ValueError("wrong parent for t")
if parent(sign) is not ZZ or not sign.is_unit():
raise ValueError("sign must be either 1 or -1.")
self._a = a
self._b = b
self._s = s
self._t = t
self._sign = sign
MultiplicativeGroupElement.__init__(self, p)
def sign(self):
return self._sign
def is_translation(self):
r"""
Return whether this element is a translation.
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,2)).is_translation()
True
sage: S((1,0,3,-1/2)).is_translation()
True
sage: S((0,1,0,0)).is_translation()
False
"""
return self._sign.is_one() and self._a.is_one() and self._b.is_zero()
def is_half_translation(self):
r"""
Return whether this element is a half translation.
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,2)).is_half_translation()
True
sage: S((-1, 0, 0, 2)).is_half_translation()
True
sage: S((0,1,0,0)).is_half_translation()
False
"""
return (
self._sign.is_one()
and (self._a.is_one() or ((-self._a).is_one()))
and self._b.is_zero()
)
def is_orientable(self):
return self._sign.is_one()
def is_rotation(self):
r"""
Check whether this element is a rotation
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,2)).is_rotation()
False
sage: S((0,-1,0,0)).is_rotation()
True
sage: S.one().is_rotation()
True
"""
return self.is_one() or (self.det().is_one() and not self.is_translation())
def is_isometry(self):
r"""
Check whether this element is an isometry
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S.one().is_isometry()
True
sage: S((0,1,0,0)).is_isometry()
True
sage: S((0,1,0,0,-1)).is_isometry()
True
sage: S((1,1,0,0)).is_isometry()
False
sage: S((3,-1/2)).is_isometry()
True
"""
det = self.det()
return det.is_one() or (-det).is_one()
def det(self):
r"""
Return the determinant of this element
"""
return self._sign * (self._a * self._a + self._b * self._b)
def _mul_(self, right):
r"""
Composition
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,2)) * S((3,-5)) == S((4,-3))
True
sage: from itertools import product
sage: a1 = S((0,2,0,0,1))
sage: a2 = S((1,0,0,0,-1))
sage: a3 = S((1,1,0,0))
sage: a4 = S((1,0,-1,1))
sage: a5 = S((2,-1,3/5,2/3,-1))
sage: for g1,g2,g3 in product([a1,a2,a3,a4,a5], repeat=3):
....: assert g1.matrix()*g2.matrix() == (g1*g2).matrix()
....: assert (g1*g2).matrix()*g3.matrix() == (g1*g2*g3).matrix()
"""
a = self._a * right._a - self._sign * self._b * right._b
b = self._b * right._a + self._sign * self._a * right._b
s = self._a * right._s - self._sign * self._b * right._t + self._s
t = self._b * right._s + self._sign * self._a * right._t + self._t
sign = self._sign * right._sign
P = self.parent()
return P.element_class(P, a, b, s, t, sign)
def __invert__(self):
r"""
Invert a similarity.
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: from itertools import product
sage: for a in [S((0,2,0,0,1)), S((1,0,0,0,-1)), S((1,1,0,0)),
....: S((1,0,-1,1)), S((2,-1,3/5,2/3,-1))]:
....: assert (a*~a).is_one() and (~a*a).is_one()
"""
P = self.parent()
sign = self._sign
det = self.det()
a = sign * self._a / det
b = -self._b / det
return P.element_class(
P,
a,
b,
-a * self._s + sign * b * self._t,
-b * self._s - sign * a * self._t,
sign,
)
def _div_(self, right):
det = right.det()
inv_a = right._sign * right._a
inv_b = -right._b
inv_s = -right._sign * right._a * right._s - right._sign * right._b * right._t
inv_t = right._b * right._s - right._a * right._t
a = (self._a * inv_a - self._sign * self._b * inv_b) / det
b = (self._b * inv_a + self._sign * self._a * inv_b) / det
s = (self._a * inv_s - self._sign * self._b * inv_t) / det + self._s
t = (self._b * inv_s + self._sign * self._a * inv_t) / det + self._t
return self.parent().element_class(
self.parent(),
self.base_ring()(a),
self.base_ring()(b),
self.base_ring()(s),
self.base_ring()(t),
self._sign * right._sign,
)
def __hash__(self):
return (
73 * hash(self._a)
- 19 * hash(self._b)
+ 13 * hash(self._s)
+ 53 * hash(self._t)
+ 67 * hash(self._sign)
)
def __call__(self, w, ring=None):
r"""
Return the image of ``w`` under the similarity. Here ``w`` may be a
convex polygon or a vector (or something that can be indexed in the
same way as a vector). If a ring is provided, the objects returned will
be defined over this ring.
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(AA)
sage: a = S((1,-1,AA(2).sqrt(),0))
sage: a((1,2))
(4.414213562373095?, 1)
sage: a.matrix()*vector((1,2,1))
(4.414213562373095?, 1, 1)
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: SG = SimilarityGroup(QQ)
sage: from flatsurf import Polygon
sage: p = Polygon(vertices=[(0, 0), (1, 0), (1, 1), (0, 1)])
sage: g = SG.an_element()**2
sage: g
(x, y) |-> (25*x + 4, 25*y + 10)
sage: g(p)
Polygon(vertices=[(4, 10), (29, 10), (29, 35), (4, 35)])
sage: g(p, ring=AA).category()
Category of convex simple euclidean polygons over Algebraic Real Field
"""
if ring is not None and ring not in Rings():
raise TypeError("ring must be a ring")
from flatsurf.geometry.polygon import EuclideanPolygon
if isinstance(w, EuclideanPolygon) and w.is_convex():
if ring is None:
ring = self.parent().base_ring()
from flatsurf import Polygon
try:
return Polygon(vertices=[self(v) for v in w.vertices()], base_ring=ring)
except ValueError:
if not self._sign.is_one():
raise ValueError("Similarity must be orientation preserving.")
# Not sure why this would happen:
raise
if ring is None:
if self._sign.is_one():
return vector(
[
self._a * w[0] - self._b * w[1] + self._s,
self._b * w[0] + self._a * w[1] + self._t,
]
)
else:
return vector(
[
self._a * w[0] + self._b * w[1] + self._s,
self._b * w[0] - self._a * w[1] + self._t,
]
)
else:
if self._sign.is_one():
return vector(
ring,
[
self._a * w[0] - self._b * w[1] + self._s,
self._b * w[0] + self._a * w[1] + self._t,
],
)
else:
return vector(
ring,
[
self._a * w[0] + self._b * w[1] + self._s,
self._b * w[0] - self._a * w[1] + self._t,
],
)
def _repr_(self):
r"""
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S.one()
(x, y) |-> (x, y)
sage: S((1,-2/3))
(x, y) |-> (x + 1, y - 2/3)
sage: S((-1,0,2/3,3))
(x, y) |-> (-x + 2/3, -y + 3)
sage: S((-1,0,2/3,3,-1))
(x, y) |-> (-x + 2/3, y + 3)
"""
R = self.parent().base_ring()["x", "y"]
x, y = R.gens()
return "(x, y) |-> ({}, {})".format(
self._a * x - self._sign * self._b * y + self._s,
self._b * x + self._sign * self._a * y + self._t,
)
def __eq__(self, other):
r"""
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,0)) == S((1,0))
True
sage: S((1,0)) == S((0,1))
False
sage: S((1,0,0,0)) == S((0,1,0,0))
False
sage: S((1,0,0,0,1)) == S((1,0,0,0,-1))
False
"""
if other is None:
return False
if type(other) == int:
return False
if self.parent() != other.parent():
return False
return (
self._a == other._a
and self._b == other._b
and self._s == other._s
and self._t == other._t
and self._sign == other._sign
)
def __ne__(self, other):
return not (self == other)
def matrix(self):
r"""
Return the 3x3 matrix representative of this element
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,-2/3,1,1,-1)).matrix()
[ 1 -2/3 1]
[-2/3 -1 1]
[ 0 0 1]
"""
P = self.parent()
M = P._matrix_space_3x3()
z = P._ring.zero()
o = P._ring.one()
return M(
[
self._a,
-self._sign * self._b,
self._s,
self._b,
+self._sign * self._a,
self._t,
z,
z,
o,
]
)
def derivative(self):
r"""
Return the 2x2 matrix corresponding to the derivative of this element
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,-2/3,1,1,-1)).derivative()
[ 1 -2/3]
[-2/3 -1]
"""
M = self.parent()._matrix_space_2x2()
return M([self._a, -self._sign * self._b, self._b, self._sign * self._a])
class SimilarityGroup(UniqueRepresentation, Group):
r"""
The group of possibly orientation reversing similarities in the plane.
This is the group generated by rotations, translations and dilations.
"""
Element = Similarity
def __init__(self, base_ring):
r"""
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: TestSuite(SimilarityGroup(QQ)).run()
sage: TestSuite(SimilarityGroup(AA)).run()
"""
self._ring = base_ring
Group.__init__(self, category=Groups().Infinite())
@cached_method
def _matrix_space_2x2(self):
from sage.matrix.matrix_space import MatrixSpace
return MatrixSpace(self._ring, 2)
@cached_method
def _matrix_space_3x3(self):
from sage.matrix.matrix_space import MatrixSpace
return MatrixSpace(self._ring, 3)
@cached_method
def _vector_space(self):
from sage.modules.free_module import VectorSpace
return VectorSpace(self._ring, 2)
def _element_constructor_(self, *args, **kwds):
r"""
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: S = SimilarityGroup(QQ)
sage: S((1,1)) # translation
(x, y) |-> (x + 1, y + 1)
sage: V = QQ^2
sage: S(V((1,-1)))
(x, y) |-> (x + 1, y - 1)
sage: S(vector((1,1)))
(x, y) |-> (x + 1, y + 1)
"""
if len(args) == 1:
x = args[0]
else:
x = args
a = self._ring.one()
b = s = t = self._ring.zero()
sign = ZZ_1
# TODO: 2x2 and 3x3 matrix input
if isinstance(x, (tuple, list)):
if len(x) == 2:
s, t = map(self._ring, x)
elif len(x) == 4:
a, b, s, t = map(self._ring, x)
elif len(x) == 5:
a, b, s, t = map(self._ring, x[:4])
sign = ZZ(x[4])
else:
raise ValueError(
"can not construct a similarity from a list of length {}".format(
len(x)
)
)
elif is_Matrix(x):
# a -sb
# b sa
if x.nrows() == x.ncols() == 2:
a, c, b, d = x.list()
if a == d and b == -c:
sign = ZZ_1
elif a == -d and b == c:
sign = ZZ_m1
else:
raise ValueError("not a similarity matrix")
elif x.nrows() == x.ncols() == 3:
raise NotImplementedError
else:
raise ValueError("invalid dimension for matrix input")
elif isinstance(x, FreeModuleElement):
if len(x) == 2:
if x.base_ring() is self._ring:
s, t = x
else:
s, t = map(self._ring, x)
else:
raise ValueError("invalid dimension for vector input")
else:
p = parent(x)
if self._ring.has_coerce_map_from(p):
a = self._ring(x)
else:
raise ValueError(
"element in %s cannot be used to create element in %s" % (p, self)
)
if (a * a + b * b).is_zero():
raise ValueError("not invertible")
return self.element_class(self, a, b, s, t, sign)
def _coerce_map_from_(self, S):
if self._ring.has_coerce_map_from(S):
return True
if isinstance(S, SimilarityGroup):
return self._ring.has_coerce_map_from(S._ring)
def _repr_(self):
r"""
TESTS::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: SimilarityGroup(QQ)
Similarity group over Rational Field
"""
return "Similarity group over {}".format(self._ring)
def one(self):
r"""
EXAMPLES::
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: SimilarityGroup(QQ).one()
(x, y) |-> (x, y)
sage: SimilarityGroup(QQ).one().is_one()
True
"""
return self.element_class(
self,
self._ring.one(), # a
self._ring.zero(), # b
self._ring.zero(), # s
self._ring.zero(), # t
ZZ_1,
) # sign
def _an_element_(self):
r"""
Return a typical element of this group.
EXAMPLES:
sage: from flatsurf.geometry.similarity import SimilarityGroup
sage: SimilarityGroup(QQ)._an_element_()
(x, y) |-> (3*x + 4*y + 2, 4*x - 3*y - 1)
sage: SimilarityGroup(QQ).an_element()
(x, y) |-> (3*x + 4*y + 2, 4*x - 3*y - 1)
.. SEEALSO::
:meth:`sage.structure.parent.Parent.an_element` which relies on
this method and should be called instead
"""
return self(3, 4, 2, -1, -1)
def is_abelian(self):
return False
def base_ring(self):
return self._ring
def similarity_from_vectors(u, v, matrix_space=None):
r"""
Return the unique similarity matrix that maps ``u`` to ``v``.
EXAMPLES::
sage: from flatsurf.geometry.similarity import similarity_from_vectors
sage: V = VectorSpace(QQ,2)
sage: u = V((1,0))
sage: v = V((0,1))
sage: m = similarity_from_vectors(u,v); m
[ 0 -1]
[ 1 0]
sage: m*u == v
True
sage: u = V((2,1))
sage: v = V((1,-2))
sage: m = similarity_from_vectors(u,v); m
[ 0 1]
[-1 0]
sage: m * u == v
True
An example built from the Pythagorean triple 3^2 + 4^2 = 5^2::
sage: u2 = V((5,0))
sage: v2 = V((3,4))
sage: m = similarity_from_vectors(u2,v2); m
[ 3/5 -4/5]
[ 4/5 3/5]
sage: m * u2 == v2
True
Some test over number fields::
sage: K.<sqrt2> = NumberField(x^2-2, embedding=1.4142)
sage: V = VectorSpace(K,2)
sage: u = V((sqrt2,0))
sage: v = V((1, 1))
sage: m = similarity_from_vectors(u,v); m
[ 1/2*sqrt2 -1/2*sqrt2]
[ 1/2*sqrt2 1/2*sqrt2]
sage: m*u == v
True
sage: m = similarity_from_vectors(u, 2*v); m
[ sqrt2 -sqrt2]
[ sqrt2 sqrt2]
sage: m*u == 2*v
True
"""
if u.parent() is not v.parent():
raise ValueError
if matrix_space is None:
from sage.matrix.matrix_space import MatrixSpace
matrix_space = MatrixSpace(u.base_ring(), 2)
if u == v:
return matrix_space.one()
sqnorm_u = u[0] * u[0] + u[1] * u[1]
cos_uv = (u[0] * v[0] + u[1] * v[1]) / sqnorm_u
sin_uv = (u[0] * v[1] - u[1] * v[0]) / sqnorm_u
m = matrix_space([cos_uv, -sin_uv, sin_uv, cos_uv])
m.set_immutable()
return m | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/similarity.py | 0.869202 | 0.371963 | similarity.py | pypi |
from collections import deque
from flatsurf.geometry.euclidean import line_intersection
from flatsurf.geometry.surface_objects import SaddleConnection
# Vincent question:
# using deque has the disadvantage of losing the initial points
# ideally doig
# my_line[i]
# we should always access to the same element
# I wanted to be able to flow backward thus inserting at the beginning of a list.
# Perhaps it would be better to model this on a deque-like class that is indexed by
# all integers rather than just the non-negative ones? Do you know of such
# a class? Alternately, we could store an offset.
def get_linearity_coeff(u, v):
r"""
Given the two 2-dimensional vectors ``u`` and ``v``, return ``a`` so that
``v = a*u``
If the vectors are not colinear, a ``ValueError`` is raised.
EXAMPLES::
sage: from flatsurf.geometry.straight_line_trajectory import get_linearity_coeff
sage: V = VectorSpace(QQ,2)
sage: get_linearity_coeff(V((1,0)), V((2,0)))
2
sage: get_linearity_coeff(V((2,0)), V((1,0)))
1/2
sage: get_linearity_coeff(V((0,1)), V((0,2)))
2
sage: get_linearity_coeff(V((0,2)), V((0,1)))
1/2
sage: get_linearity_coeff(V((1,2)), V((-2,-4)))
-2
sage: get_linearity_coeff(V((1,1)), V((-1,1)))
Traceback (most recent call last):
...
ValueError: non colinear
"""
if u[0]:
a = v[0] / u[0]
if v[1] != a * u[1]:
raise ValueError("non colinear")
return a
elif v[0]:
raise ValueError("non colinear")
elif u[1]:
return v[1] / u[1]
else:
raise ValueError("zero vector")
class SegmentInPolygon:
r"""
Maximal segment in a polygon of a similarity surface
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1/3,-1/4), (0,1))
sage: SegmentInPolygon(v)
Segment in polygon 0 starting at (1/3, -1/3) and ending at (1/3, 0)
"""
def __init__(self, start, end=None):
if end is not None:
# WARNING: here we assume that both start and end are on the
# boundary
self._start = start
self._end = end
else:
self._end = start.forward_to_polygon_boundary()
self._start = self._end.forward_to_polygon_boundary()
def __hash__(self):
r"""
TESTS::
sage: from flatsurf import similarity_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1/3,-1/4), (0,1))
sage: h = hash(SegmentInPolygon(v))
"""
return hash((self._start, self._end))
def __eq__(self, other):
return (
type(self) is type(other)
and self._start == other._start
and self._end == other._end
)
def __ne__(self, other):
return (
type(self) is not type(other)
or self._start != other._start
or self._end != other._end
)
def __repr__(self):
r"""
TESTS::
sage: from flatsurf import similarity_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (0,0), (3,-1))
sage: SegmentInPolygon(v)
Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3)
"""
return "Segment in polygon {} starting at {} and ending at {}".format(
self.polygon_label(), self.start().point(), self.end().point()
)
def start(self):
r"""
Return the tangent vector associated to the start of a trajectory pointed forward.
"""
return self._start
def start_is_singular(self):
return self._start.is_based_at_singularity()
def end(self):
r"""
Return a TangentVector associated to the end of a trajectory, pointed backward.
"""
return self._end
def end_is_singular(self):
return self._end.is_based_at_singularity()
def is_edge(self):
if not self.start_is_singular() or not self.end_is_singular():
return False
vv = self.start().vector()
vertex = self.start().vertex()
ww = self.start().polygon().edge(vertex)
from flatsurf.geometry.euclidean import is_parallel
return is_parallel(vv, ww)
def edge(self):
if not self.is_edge():
raise ValueError("Segment asked for edge when not an edge")
return self.start().vertex()
def polygon_label(self):
return self._start.polygon_label()
def invert(self):
return SegmentInPolygon(self._end, self._start)
def next(self):
r"""
Return the next segment obtained by continuing straight through the end point.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import SegmentInPolygon
sage: s = similarity_surfaces.example()
sage: s.polygon(0)
Polygon(vertices=[(0, 0), (2, -2), (2, 0)])
sage: s.polygon(1)
Polygon(vertices=[(0, 0), (2, 0), (1, 3)])
sage: v = s.tangent_vector(0, (0,0), (3,-1))
sage: seg = SegmentInPolygon(v)
sage: seg
Segment in polygon 0 starting at (0, 0) and ending at (2, -2/3)
sage: seg.next()
Segment in polygon 1 starting at (2/3, 2) and ending at (14/9, 4/3)
"""
if self.end_is_singular():
raise ValueError("Cannot continue from singularity")
return SegmentInPolygon(self._end.invert())
def previous(self):
if self.end_is_singular():
raise ValueError("Cannot continue from singularity")
return SegmentInPolygon(self._start.invert()).invert()
class AbstractStraightLineTrajectory:
def surface(self):
raise NotImplementedError
def combinatorial_length(self):
raise NotImplementedError
def segment(self, i):
raise NotImplementedError
def is_closed(self):
raise NotImplementedError
def segments(self):
raise NotImplementedError
def __repr__(self):
start = self.segment(0).start()
end = self.segment(-1).end()
return "Straight line trajectory made of {} segments from {} in polygon {} to {} in polygon {}".format(
self.combinatorial_length(),
start.point(),
start.polygon_label(),
end.point(),
end.polygon_label(),
)
def plot(self, *args, **options):
r"""
Plot this trajectory by converting to a graphical trajectory.
If any arguments are provided in `*args` it must be only one argument
containing a GraphicalSurface. The keyword arguments in `**options` are
passed on to
:func:`flatsurf.graphical.straight_line_trajectory.GraphicalStraightLineTrajectory.plot`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.square_torus()
sage: v = T.tangent_vector(0, (0,0), (5,7))
sage: L = v.straight_line_trajectory()
sage: L.plot()
...Graphics object consisting of 1 graphics primitive
sage: L.plot(color='red')
...Graphics object consisting of 1 graphics primitive
"""
if len(args) > 1:
raise ValueError(
"SimilaritySurface.plot() can take at most one non-keyword argument."
)
if len(args) == 1:
from flatsurf.graphical.surface import GraphicalSurface
if not isinstance(args[0], GraphicalSurface):
raise ValueError(
"If an argument is provided, it must be a GraphicalSurface."
)
return self.graphical_trajectory(graphical_surface=args[0]).plot(**options)
return self.graphical_trajectory().plot(**options)
def graphical_trajectory(self, graphical_surface=None, **options):
r"""
Returns a ``GraphicalStraightLineTrajectory`` corresponding to this
trajectory in the provided ``GraphicalSurface``.
"""
from flatsurf.graphical.straight_line_trajectory import (
GraphicalStraightLineTrajectory,
)
if graphical_surface is None:
graphical_surface = self.surface().graphical_surface()
return GraphicalStraightLineTrajectory(self, graphical_surface, **options)
def cylinder(self):
r"""
If this is a closed orbit, return the associated maximal cylinder.
Raises a ValueError if this trajectory is not closed.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.regular_octagon()
sage: v = s.tangent_vector(0,(1/2,0),(sqrt(2),1))
sage: traj = v.straight_line_trajectory()
sage: traj.flow(4)
sage: traj.is_closed()
True
sage: cyl = traj.cylinder()
sage: cyl.area() # a = sqrt(2)
a + 1
sage: cyl.holonomy()
(3*a + 4, 2*a + 3)
sage: cyl.edges()
(2, 3, 3, 2, 4)
"""
# Note: may not be defined.
if not self.is_closed():
raise ValueError(
"Cylinder is only defined for closed straight-line trajectories."
)
from .surface_objects import Cylinder
coding = self.coding()
label = coding[0][0]
edges = [e for _, e in coding[1:]]
edges.append(self.surface().opposite_edge(coding[0][0], coding[0][1])[1])
return Cylinder(self.surface(), label, edges)
def coding(self, alphabet=None):
r"""
Return the coding of this trajectory with respect to the sides of the
polygons
INPUT:
- ``alphabet`` -- an optional dictionary ``(lab,nb) -> letter``. If some
labels are avoided then these crossings are ignored.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: v = t.tangent_vector(0, (1/2,0), (5,6))
sage: l = v.straight_line_trajectory()
sage: alphabet = {(0,0): 'a', (0,1): 'b', (0,2):'a', (0,3): 'b'}
sage: l.coding()
[(0, 0), (0, 1)]
sage: l.coding(alphabet)
['a', 'b']
sage: l.flow(10); l.flow(-10)
sage: l.coding()
[(0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2), (0, 1), (0, 2)]
sage: print(''.join(l.coding(alphabet)))
ababababa
sage: v = t.tangent_vector(0, (1/2,0), (7,13))
sage: l = v.straight_line_trajectory()
sage: l.flow(10); l.flow(-10)
sage: print(''.join(l.coding(alphabet)))
aabaabaababaabaabaab
For a closed trajectory, the last label (corresponding also to the
starting point) is not considered::
sage: v = t.tangent_vector(0, (1/5,1/7), (1,1))
sage: l = v.straight_line_trajectory()
sage: l.flow(10)
sage: l.is_closed()
True
sage: l.coding(alphabet)
['a', 'b']
Check that the saddle connections that are obtained in the torus get the
expected coding::
sage: for _ in range(10): # long time (.6s)
....: x = ZZ.random_element(1,30)
....: y = ZZ.random_element(1,30)
....: x,y = x/gcd(x,y), y/gcd(x,y)
....: v = t.tangent_vector(0, (0,0), (x,y))
....: l = v.straight_line_trajectory()
....: l.flow(200); l.flow(-200)
....: w = ''.join(l.coding(alphabet))
....: assert Word(w+'ab'+w).is_balanced()
....: assert Word(w+'ba'+w).is_balanced()
....: assert w.count('a') == y-1
....: assert w.count('b') == x-1
"""
coding = []
segments = self.segments()
s = segments[0]
start = s.start()
if start._position._position_type == start._position.EDGE_INTERIOR:
p = s.polygon_label()
e = start._position.get_edge()
lab = (p, e) if alphabet is None else alphabet.get((p, e))
if lab is not None:
coding.append(lab)
for i in range(len(segments) - 1):
s = segments[i]
end = s.end()
p = s.polygon_label()
e = end._position.get_edge()
lab = (p, e) if alphabet is None else alphabet.get((p, e))
if lab is not None:
coding.append(lab)
s = segments[-1]
end = s.end()
if (
end._position._position_type == end._position.EDGE_INTERIOR
and end.invert() != start
):
p = s.polygon_label()
e = end._position.get_edge()
lab = (p, e) if alphabet is None else alphabet.get((p, e))
if lab is not None:
coding.append(lab)
return coding
def initial_tangent_vector(self):
return self.segment(0).start()
def terminal_tangent_vector(self):
return self.segment(-1).end()
def intersects(self, traj, count_singularities=False):
r"""
Return true if this trajectory intersects the other trajectory.
"""
try:
next(self.intersections(traj, count_singularities=count_singularities))
except StopIteration:
return False
return True
def intersections(self, traj, count_singularities=False, include_segments=False):
r"""
Return the set of SurfacePoints representing the intersections
of this trajectory with the provided trajectory or SaddleConnection.
Singularities will be included only if count_singularities is
set to True.
If include_segments is True, it iterates over triples consisting of the SurfacePoint,
and two sets. The first set consists of segments of this trajectory that contain the point
and the second set consists of segments of traj that contain the point.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.square_torus()
sage: traj1 = s.tangent_vector(0,(1/2,0),(1,1)).straight_line_trajectory()
sage: traj1.flow(3)
sage: traj1.is_closed()
True
sage: traj2 = s.tangent_vector(0,(1/2,0),(-1,1)).straight_line_trajectory()
sage: traj2.flow(3)
sage: traj2.is_closed()
True
sage: sum(1 for _ in traj1.intersections(traj2))
2
sage: for p, (segs1, segs2) in traj1.intersections(traj2, include_segments=True):
....: print(p)
....: print(len(segs1), len(segs2))
Point (1/2, 0) of polygon 0
2 2
Point (0, 1/2) of polygon 0
2 2
"""
# Partition the segments making up the trajectories by label.
if isinstance(traj, SaddleConnection):
traj = traj.trajectory()
lab_to_seg1 = {}
for seg1 in self.segments():
label = seg1.polygon_label()
if label in lab_to_seg1:
lab_to_seg1[label].append(seg1)
else:
lab_to_seg1[label] = [seg1]
lab_to_seg2 = {}
for seg2 in traj.segments():
label = seg2.polygon_label()
if label in lab_to_seg2:
lab_to_seg2[label].append(seg2)
else:
lab_to_seg2[label] = [seg2]
intersection_points = set()
if include_segments:
segments = {}
for label, seg_list_1 in lab_to_seg1.items():
if label in lab_to_seg2:
seg_list_2 = lab_to_seg2[label]
for seg1 in seg_list_1:
for seg2 in seg_list_2:
x = line_intersection(
seg1.start().point(),
seg1.start().point() + seg1.start().vector(),
seg2.start().point(),
seg2.start().point() + seg2.start().vector(),
)
if x is not None:
pos = (
self.surface()
.polygon(seg1.polygon_label())
.get_point_position(x)
)
if pos.is_inside() and (
count_singularities or not pos.is_vertex()
):
new_point = self.surface().point(
seg1.polygon_label(), x
)
if new_point not in intersection_points:
intersection_points.add(new_point)
if include_segments:
segments[new_point] = ({seg1}, {seg2})
else:
yield new_point
elif include_segments:
segments[new_point][0].add(seg1)
segments[new_point][1].add(seg2)
if include_segments:
yield from segments.items()
class StraightLineTrajectory(AbstractStraightLineTrajectory):
r"""
Straight-line trajectory in a similarity surface.
EXAMPLES::
# Demonstrate the handling of edges
sage: from flatsurf import translation_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectory
sage: p = SymmetricGroup(2)('(1,2)')
sage: s = translation_surfaces.origami(p,p)
sage: traj = StraightLineTrajectory(s.tangent_vector(1,(0,0),(1,0)))
sage: traj
Straight line trajectory made of 1 segments from (0, 0) in polygon 1 to (1, 1) in polygon 2
sage: traj.is_saddle_connection()
True
sage: traj2 = StraightLineTrajectory(s.tangent_vector(1,(0,0),(0,1)))
sage: traj2
Straight line trajectory made of 1 segments from (1, 0) in polygon 2 to (0, 1) in polygon 1
sage: traj2.is_saddle_connection()
True
"""
def __init__(self, tangent_vector):
self._segments = deque()
seg = SegmentInPolygon(tangent_vector)
self._segments.append(seg)
self._setup_forward()
self._setup_backward()
self._s = tangent_vector.surface()
def surface(self):
return self._s
def segment(self, i):
r"""
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: O = translation_surfaces.regular_octagon()
sage: v = O.tangent_vector(0, (1,1), (33,45))
sage: L = v.straight_line_trajectory()
sage: L.segment(0)
Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a +
1, 15/26*a + 1)
sage: L.flow(-1)
sage: L.segment(0)
Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending
at (4/15, a + 1)
sage: L.flow(1)
sage: L.segment(2)
Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at
(9/26*a + 11/13, 17/26*a + 15/13)
"""
return self.segments()[i]
def combinatorial_length(self):
return len(self.segments())
def segments(self):
return self._segments
def _setup_forward(self):
v = self.terminal_tangent_vector()
if v.is_based_at_singularity():
self._forward = None
else:
self._forward = v.invert()
def _setup_backward(self):
v = self.initial_tangent_vector()
if v.is_based_at_singularity():
self._backward = None
else:
self._backward = v.invert()
def is_forward_separatrix(self):
return self._forward is None
def is_backward_separatrix(self):
return self._backward is None
def is_saddle_connection(self):
return (self._forward is None) and (self._backward is None)
def is_closed(self):
r"""
Test whether this is a closed trajectory.
By convention, by a closed trajectory we mean a trajectory without any
singularities.
.. SEEALSO::
:meth:`is_saddle_connection`
EXAMPLES:
An example in a cone surface covered by the torus::
sage: from flatsurf import MutableOrientedSimilaritySurface, polygons
sage: p = polygons.square()
sage: s = MutableOrientedSimilaritySurface(p.base_ring())
sage: s.add_polygon(p)
0
sage: s.glue((0, 0), (0, 3))
sage: s.glue((0, 1), (0, 2))
sage: s.set_immutable()
sage: t = s
sage: v = t.tangent_vector(0, (1/2,0), (1/3,7/5))
sage: l = v.straight_line_trajectory()
sage: l.is_closed()
False
sage: l.flow(100)
sage: l.is_closed()
True
sage: v = t.tangent_vector(0, (1/2,0), (1/3,2/5))
sage: l = v.straight_line_trajectory()
sage: l.flow(100)
sage: l.is_closed()
False
sage: l.is_saddle_connection()
False
sage: l.flow(-100)
sage: l.is_saddle_connection()
True
"""
return (not self.is_forward_separatrix()) and self._forward.differs_by_scaling(
self.initial_tangent_vector()
)
def flow(self, steps):
r"""
Append or prepend segments to the trajectory.
If steps is positive, attempt to append this many segments.
If steps is negative, attempt to prepend this many segments.
Will fail gracefully the trajectory hits a singularity or closes up.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1,-1/2), (3,-1))
sage: traj = v.straight_line_trajectory()
sage: traj
Straight line trajectory made of 1 segments from (1/4, -1/4) in polygon 0 to (2, -5/6) in polygon 0
sage: traj.flow(1)
sage: traj
Straight line trajectory made of 2 segments from (1/4, -1/4) in polygon 0 to (61/36, 11/12) in polygon 1
sage: traj.flow(-1)
sage: traj
Straight line trajectory made of 3 segments from (15/16, 45/16) in polygon 1 to (61/36, 11/12) in polygon 1
"""
while (
steps > 0 and (not self.is_forward_separatrix()) and (not self.is_closed())
):
self._segments.append(SegmentInPolygon(self._forward))
self._setup_forward()
steps -= 1
while (
steps < 0 and (not self.is_backward_separatrix()) and (not self.is_closed())
):
self._segments.appendleft(SegmentInPolygon(self._backward).invert())
self._setup_backward()
steps += 1
class StraightLineTrajectoryTranslation(AbstractStraightLineTrajectory):
r"""
Straight line trajectory in a translation surface.
This is similar to :class:`StraightLineTrajectory` but implemented using
interval exchange maps. It should be faster than the implementation via
segments and flowing in polygons.
This class only stores a list of triples ``(p, e, x)`` where:
- ``p`` is a label of a polygon
- ``e`` is the number of some edge in ``p``
- ``x`` is the position of the point in ``e`` (be careful that it is not
necessarily a number between 0 and 1. It is given relatively to the length
of the induced interval in the iet)
"""
def __init__(self, tangent_vector):
self._vector = tangent_vector.vector()
self._s = tangent_vector.surface()
seg = SegmentInPolygon(tangent_vector)
if seg.is_edge():
self._points = None
self._edge = seg
return
start = seg.start()
pos = start._position
if pos._position_type == pos.EDGE_INTERIOR:
i = pos.get_edge()
elif pos._position_type == pos.VERTEX:
i = pos.get_vertex()
else:
raise RuntimeError("PROBLEM!")
p = start.polygon_label()
poly = self._s.polygon(p)
T = self._get_iet(p)
x = get_linearity_coeff(
poly.vertex(i + 1) - poly.vertex(i), start.point() - poly.vertex(i)
)
x *= T.length_bot(i)
self._points = deque() # we store triples (lab, edge, rel_pos)
self._points.append((p, i, x))
def _next(self, p, e, x):
r"""
Return the image of ``(p, e, x)``
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: S = SymmetricGroup(3)
sage: r = S('(1,2)')
sage: u = S('(1,3)')
sage: o = translation_surfaces.origami(r,u)
sage: v = o.tangent_vector(1, (1/3,1/7), (5,13))
sage: L = StraightLineTrajectoryTranslation(v)
sage: t0 = (1,0,1/3)
sage: t1 = L._next(*t0)
sage: t2 = L._next(*t1)
sage: t0,t1,t2
((1, 0, 1/3), (3, 0, 16/3), (1, 0, 31/3))
sage: assert L._previous(*t2) == t1
sage: assert L._previous(*t1) == t0
"""
e, x = self._get_iet(p).forward_image(e, x)
p, e = self._s.opposite_edge(p, e)
return (p, e, x)
def _previous(self, p, e, x):
r"""
Return the preimage of ``(p, e, x)``
"""
p, e = self._s.opposite_edge(p, e)
e, x = self._get_iet(p).backward_image(e, x)
return (p, e, x)
def combinatorial_length(self):
if self._points is None:
return 1
return len(self._points)
def _get_iet(self, label):
polygon = self._s.polygon(label)
try:
return self._iets[polygon]
except AttributeError:
self._iets = {polygon: polygon.flow_map(self._vector)}
except KeyError:
self._iets[polygon] = polygon.flow_map(self._vector)
return self._iets[polygon]
def segment(self, i):
r"""
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: O = translation_surfaces.regular_octagon()
sage: v = O.tangent_vector(0, (1,1), (33,45))
sage: L = StraightLineTrajectoryTranslation(v)
sage: L.segment(0)
Segment in polygon 0 starting at (4/15, 0) and ending at (11/26*a +
1, 15/26*a + 1)
sage: L.flow(-1)
sage: L.segment(0)
Segment in polygon 0 starting at (-1/2*a, 7/22*a + 7/11) and ending
at (4/15, a + 1)
sage: L.flow(1)
sage: L.segment(2)
Segment in polygon 0 starting at (-1/13*a, 1/13*a) and ending at
(9/26*a + 11/13, 17/26*a + 15/13)
"""
if self._points is None:
return self._edge
lab, e0, x0 = self._points[i]
iet = self._get_iet(lab)
e1, x1 = iet.forward_image(e0, x0)
poly = self._s.polygon(lab)
l0 = iet.length_bot(e0)
l1 = iet.length_top(e1)
point0 = poly.vertex(e0) + poly.edge(e0) * x0 / l0
point1 = poly.vertex(e1) + poly.edge(e1) * (l1 - x1) / l1
v0 = self._s.tangent_vector(
lab, point0, self._vector, ring=self._vector.base_ring()
)
v1 = self._s.tangent_vector(
lab, point1, -self._vector, ring=self._vector.base_ring()
)
return SegmentInPolygon(v0, v1)
def segments(self):
r"""
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: s = translation_surfaces.square_torus()
sage: v = s.tangent_vector(0, (0,0), (1,1+AA(5).sqrt()), ring=AA)
sage: L = StraightLineTrajectoryTranslation(v)
sage: L.flow(2)
sage: L.segments()
[Segment in polygon 0 starting at (0, 0) and ending at (0.3090169943749474?, 1),
Segment in polygon 0 starting at (0.3090169943749474?, 0) and ending at (0.618033988749895?, 1),
Segment in polygon 0 starting at (0.618033988749895?, 0) and ending at (0.9270509831248423?, 1)]
"""
return [self.segment(i) for i in range(self.combinatorial_length())]
def is_closed(self):
if self._points is None:
raise NotImplementedError
return self._points[0] == self._next(*self._points[-1])
def is_forward_separatrix(self):
if self._points is None:
return True
p1, e1, x1 = self._next(*self._points[-1])
return x1.is_zero()
def is_backward_separatrix(self):
return self._points is None or self._points[0][2].is_zero()
def is_saddle_connection(self):
r"""
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectoryTranslation
sage: torus = translation_surfaces.square_torus()
sage: v = torus.tangent_vector(0, (1/2,1/2), (1,1))
sage: S = StraightLineTrajectoryTranslation(v)
sage: S.is_saddle_connection()
True
sage: v = torus.tangent_vector(0, (1/3,2/3), (1,2))
sage: S = StraightLineTrajectoryTranslation(v)
sage: S.is_saddle_connection()
False
sage: S.flow(1)
sage: S.is_saddle_connection()
True
"""
return self._points is None or (
self.is_forward_separatrix() and self.is_backward_separatrix()
)
def flow(self, steps):
if self._points is None:
return
if steps > 0:
t = self._points[-1]
for i in range(steps):
t = self._next(*t)
if t == self._points[0] or t[2].is_zero():
break
self._points.append(t)
elif steps < 0:
t = self._points[0]
for i in range(-steps):
if t[2].is_zero():
break
t = self._previous(*t)
if t == self._points[-1]:
# closed curve or backward separatrix
break
self._points.appendleft(t) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/straight_line_trajectory.py | 0.913286 | 0.638723 | straight_line_trajectory.py | pypi |
from sage.rings.rational_field import QQ
from flatsurf.geometry.surface import OrientedSimilaritySurface
class AbstractOrigami(OrientedSimilaritySurface):
r"""
Abstract base class for (connected) origamis.
"""
def __init__(self, domain, root=None, base_label=None, category=None):
self._domain = domain
if base_label is not None:
import warnings
warnings.warn(
"base_label has been deprecated as a keyword argument for AbstractOrigami and will be removed in a future version of sage-flatsurf; use root instead"
)
root = base_label
base_label = None
if root is None:
root = domain.an_element()
self._root = root
from flatsurf.geometry.categories import TranslationSurfaces
if category is None:
category = TranslationSurfaces()
category &= TranslationSurfaces().WithoutBoundary().Connected()
finite = domain.is_finite()
if finite:
category &= category.FiniteType()
else:
category &= category.InfiniteType()
from flatsurf.geometry.polygon import polygons
self._square = polygons.square()
super().__init__(QQ, category=category)
def roots(self):
return (self._root,)
def labels(self):
from flatsurf.geometry.surface import LabelsFromView
return LabelsFromView(self, self._domain)
def is_mutable(self):
return False
def up(self, label):
raise NotImplementedError
def down(self, label):
raise NotImplementedError
def right(self, label):
raise NotImplementedError
def left(self, label):
raise NotImplementedError
def _repr_(self):
return "Some AbstractOrigami"
def polygon_labels(self):
return self._domain
def polygon(self, lab):
if lab not in self._domain:
# Updated to print a possibly useful error message
raise ValueError("Label " + str(lab) + " is not in the domain")
return self._square
def opposite_edge(self, p, e):
if p not in self._domain:
raise ValueError
if e == 0:
return self.down(p), 2
if e == 1:
return self.right(p), 3
if e == 2:
return self.up(p), 0
if e == 3:
return self.left(p), 1
raise ValueError
class Origami(AbstractOrigami):
def __init__(
self,
r,
u,
rr=None,
uu=None,
domain=None,
root=None,
base_label=None,
category=None,
):
if domain is None:
domain = r.parent().domain()
self._r = r
self._u = u
if rr is None:
rr = ~r
else:
for a in domain.some_elements():
if r(rr(a)) != a:
raise ValueError("r o rr is not identity on %s" % a)
if rr(r(a)) != a:
raise ValueError("rr o r is not identity on %s" % a)
if uu is None:
uu = ~u
else:
for a in domain.some_elements():
if u(uu(a)) != a:
raise ValueError("u o uu is not identity on %s" % a)
if uu(u(a)) != a:
raise ValueError("uu o u is not identity on %s" % a)
self._perms = [uu, r, u, rr] # down,right,up,left
AbstractOrigami.__init__(
self, domain, root=root, base_label=base_label, category=category
)
def opposite_edge(self, p, e):
if p not in self._domain:
raise ValueError(
"Polygon label p=" + str(p) + " is not in domain=" + str(self._domain)
)
if e < 0 or e > 3:
raise ValueError("Edge value e=" + str(e) + " does not satisfy 0<=e<4.")
return self._perms[e](p), (e + 2) % 4
def up(self, label):
return self.opposite_edge(label, 2)[0]
def down(self, label):
return self.opposite_edge(label, 0)[0]
def right(self, label):
return self.opposite_edge(label, 1)[0]
def left(self, label):
return self.opposite_edge(label, 3)[0]
def _repr_(self):
return "Origami defined by r=%s and u=%s" % (self._r, self._u)
def __eq__(self, other):
if not isinstance(other, Origami):
return False
return (
self._perms == other._perms
and self._domain is other._domain
and self.roots() == other.roots()
)
def __hash__(self):
return hash((Origami, tuple(self._perms), self._domain, self.roots())) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/origami.py | 0.829216 | 0.3919 | origami.py | pypi |
from sage.structure.sage_object import SageObject
class FlowPolygonMap(SageObject):
r"""
The map obtained as the return map of the flow on the sides of a (convex)
polygon.
Formally, this can be defined as follows: one start with two partition into
(finitely many) intervals of a given interval. The map corresponds to
changing from the first partition to the second. In other words, points are
identified as pairs ``(i, x)`` where ``i`` is an atom and ``x`` is the
relative position in this atom.
Contrarily to an interval exchange transformation, things here are going
from bottom to top.
Note that this could also be used for homothetic surfaces. And to some
extent to half translation surface.
EXAMPLES::
sage: from flatsurf.geometry.interval_exchange_transformation import FlowPolygonMap
sage: T = FlowPolygonMap(QQ, [0,1,2], [2,3,1], [2,1,0], [1,3,2])
sage: [T.forward_image(0,x) for x in range(3)]
[(2, 0), (1, 0), (1, 1)]
sage: [T.forward_image(1,x) for x in range(4)]
[(1, 1), (1, 2), (0, 0), (0, 1)]
sage: [T.forward_image(2,x) for x in range(1)]
[(0, 1)]
"""
def __init__(self, ring, bot_labels, bot_lengths, top_labels, top_lengths):
r"""
INPUT:
- ``ring`` -- the base ring for the lengths of the interval
- ``bot_labels`` -- labels for the bottom partition
- ``bot_lengths`` -- lengths for the bottom partition
- ``top_labels`` -- labels for the top partition
- ``top_lengths`` -- lengths for top partition
"""
if not all(x > ring.zero() for x in bot_lengths):
raise ValueError
if not all(x > ring.zero() for x in top_lengths):
raise ValueError
if len(bot_labels) != len(bot_lengths):
raise ValueError
if len(top_labels) != len(top_lengths):
raise ValueError
if sum(top_lengths) != sum(bot_lengths):
raise ValueError
self._ring = ring
self._bot_labels = bot_labels
self._top_labels = top_labels
self._bot_labels_to_index = {j: i for i, j in enumerate(bot_labels)}
self._top_labels_to_index = {j: i for i, j in enumerate(top_labels)}
if len(self._bot_labels) != len(self._bot_labels_to_index):
raise ValueError("non unique labels for bot: {}".format(bot_labels))
if len(self._top_labels) != len(self._top_labels_to_index):
raise ValueError("non unique labels in top: {}".format(top_labels))
self._bot_lengths = list(map(ring, bot_lengths))
self._top_lengths = list(map(ring, top_lengths))
# forward image of intervals
it = 0
lt = x1 = top_lengths[it]
self._forward_images = []
for ib in range(len(bot_lengths)):
lenb = bot_lengths[ib]
self._forward_images.append((it, lt - x1))
while lenb and lenb >= x1:
lenb -= x1
it += 1
if it < len(top_lengths):
lt = x1 = top_lengths[it]
else:
lt = ring.zero()
if lenb:
x1 -= lenb
# backward image of intervals
ib = 0
lb = x1 = bot_lengths[ib]
self._backward_images = []
for it in range(len(top_lengths)):
lent = top_lengths[it]
self._backward_images.append((ib, lb - x1))
while lent and lent >= x1:
lent -= x1
ib += 1
if ib < len(bot_lengths):
lb = x1 = bot_lengths[ib]
else:
lb = ring.zero()
if lent:
x1 -= lent
def length_bot(self, i):
i = self._bot_labels_to_index[i]
return self._bot_lengths[i]
def length_top(self, i):
i = self._top_labels_to_index[i]
return self._top_lengths[i]
def _repr_(self):
s = ["Flow polygon map:"]
s.append(" " + " ".join(str(x) for x in self._top_labels))
s.append(" " + " ".join(str(x) for x in self._bot_labels))
s.append("top lengths: {}".format(self._top_lengths))
s.append("bot lengths: {}".format(self._bot_lengths))
return "\n".join(s)
def forward_image(self, i, x):
r"""
Return the forward image.
EXAMPLES::
sage: from flatsurf.geometry.interval_exchange_transformation import FlowPolygonMap
Singularities are always sent to a ``(i,0)``::
sage: T = FlowPolygonMap(QQ, [0,1], [2,1], [2,3,4], [1,1,1])
sage: T.forward_image(0, 0)
(2, 0)
sage: T.forward_image(0, 1) # could have equally been (2, 1)
(3, 0)
"""
i = self._bot_labels_to_index[i]
if x < self._ring.zero() or x > self._bot_lengths[i]:
raise ValueError("x = {} is out of the interval".format(x))
j, y = self._forward_images[i]
if x + y < self._top_lengths[j]:
return (self._top_labels[j], x + y)
x -= self._top_lengths[j] - y
j += 1
while x > self._top_lengths[j]:
x -= self._top_lengths[j]
j += 1
return (self._top_labels[j], x)
def backward_image(self, i, x):
r"""
EXAMPLES::
sage: from flatsurf.geometry.interval_exchange_transformation import \
....: FlowPolygonMap
sage: x = polygen(ZZ)
sage: K.<sqrt2> = NumberField(x^2 - 2, embedding=AA(2).sqrt())
sage: T = FlowPolygonMap(K, [0,1,2],
....: [sqrt2,1+sqrt2,1], [2,0,1], [1,sqrt2,1+sqrt2])
sage: T.backward_image(*T.forward_image(1, 1))
(1, 1)
sage: T.backward_image(*T.forward_image(0, 1))
(0, 1)
sage: T.backward_image(*T.forward_image(0, sqrt2-1))
(0, sqrt2 - 1)
sage: T.backward_image(*T.forward_image(1, sqrt2-1))
(1, sqrt2 - 1)
Singularities are always sent to a ``(i,0)``::
sage: T = FlowPolygonMap(QQ, [2,3,4], [1,1,1], [0,1], [2,1])
sage: T.backward_image(0, 0)
(2, 0)
sage: T.backward_image(0, 1) # could have equally been (2, 1)
(3, 0)
TESTS::
sage: T = FlowPolygonMap(K, [0,1,2],
....: [5*sqrt2,1,1], [2,1,0], [1,1,5*sqrt2])
sage: for x in range(1,8):
....: p0 = (0,x)
....: for n in range(5):
....: p1 = T.forward_image(*p0)
....: assert T.backward_image(*p1) == p0, "p0 = {}, p1 = {}".format(p0,p1)
....: p0 = p1
"""
i = self._top_labels_to_index[i]
if x < self._ring.zero() or x > self._top_lengths[i]:
raise ValueError("x = {} is out of the interval".format(x))
j, y = self._backward_images[i]
if x + y < self._bot_lengths[j]:
return (self._bot_labels[j], x + y)
x -= self._bot_lengths[j] - y
j += 1
while x > self._bot_lengths[j]:
x -= self._bot_lengths[j]
j += 1
return (self._bot_labels[j], x) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/interval_exchange_transformation.py | 0.885699 | 0.69701 | interval_exchange_transformation.py | pypi |
from sage.misc.cachefunc import cached_method
from sage.structure.element import parent
from sage.categories.groups import Groups
from sage.groups.group import Group
from sage.structure.element import MultiplicativeGroupElement
from sage.structure.unique_representation import UniqueRepresentation
def intersection(i0, j0, i1, j1):
r"""
Intersection inside a polygon.
In case of equality we consider that i0 < i1 < j1 < j0
INPUT:
- ``i0``, ``j0`` -- start and end of the first curve
- ``i1``, ``j1`` -- start and end of the second curve
TESTS::
sage: from flatsurf.geometry.fundamental_group import intersection
sage: intersection(3,0,3,2)
1
sage: intersection(0,1,1,2)
1
sage: intersection(0,2,3,1)
-1
sage: intersection(0,3,3,2)
0
sage: intersection(1,1,1,1)
0
sage: intersection(3,2,3,2)
0
sage: intersection(3,2,0,3)
0
sage: intersection(0,3,0,3)
0
sage: intersection(0,2,0,2)
0
"""
if i0 <= j0: # that is i0 < j0 or i0 == j0
return (i0 <= i1 <= j0) - (i0 <= j1 <= j0)
else:
return (j0 < j1 < i0) - (j0 < i1 < i0)
class Path(MultiplicativeGroupElement):
# activating the following somehow break the discovery of the Python _mul_
# method below...
# __slots__ = ['_polys', '_edges', '_edges_rev']
def __init__(self, parent, polys, edge, edge_rev, check=True, reduced=False):
self._polys = tuple(polys)
self._edges = tuple(edge)
self._edges_rev = tuple(edge_rev)
MultiplicativeGroupElement.__init__(self, parent)
if not reduced:
self._reduce()
if check:
self._check()
def _reduce(self):
r"""
Remove
"""
pass
def _poly_cross_dict(self):
d = {p: [] for p in self.parent()._s.labels()}
d[self._polys[0]].append((self._edges_rev[-1], self._edges[0]))
for i in range(1, len(self._polys) - 1):
p = self._polys[i]
e0 = self._edges_rev[i - 1]
e1 = self._edges[i]
d[p].append((e0, e1))
return d
def __hash__(self):
return hash((self._polys, self._edges))
def __eq__(self, other):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: F = t.fundamental_group()
sage: a,b = F.gens()
sage: a == b
False
sage: a*b == b*a
False
sage: a*b == a*b
True
"""
return (
parent(self) is parent(other)
and self._polys == other._polys
and self._edges == other._edges
)
def __ne__(self, other):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: F = t.fundamental_group()
sage: a,b = F.gens()
sage: a != b
True
sage: a*b != b*a
True
sage: a*b != a*b
False
"""
return (
parent(self) is not parent(other)
or self._polys != other._polys
or self._edges != other._edges
)
def _check(self):
if not (len(self._polys) - 1 == len(self._edges) == len(self._edges_rev)):
raise ValueError(
"polys = {}\nedges = {}\nedges_rev={}".format(
self._polys, self._edges, self._edges_rev
)
)
assert self._polys[0] == self.parent()._b == self._polys[-1]
def is_one(self):
return not self._edges
def _repr_(self):
return "".join(
"{} --{}-- ".format(p, e) for p, e in zip(self._polys, self._edges)
) + "{}".format(self._polys[-1])
def _mul_(self, other):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: a,b = t.fundamental_group().gens()
sage: a*b
0 --0-- 0 --1-- 0
"""
sp = self._polys[:]
se = self._edges[:]
se_r = self._edges_rev[:]
op = other._polys[:]
oe = other._edges[:]
oe_r = other._edges_rev[:]
if sp[-1] != op[0]:
return None
i = 0
while i < len(se) and i < len(oe) and se[-i - 1] == oe_r[i]:
i += 1
P = self.parent()
return P.element_class(
P,
sp[: len(sp) - i] + op[i + 1 :],
se[: len(se) - i] + oe[i:],
se_r[: len(se_r) - i] + oe_r[i:],
)
def __invert__(self):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: o = translation_surfaces.octagon_and_squares()
sage: F = o.fundamental_group()
sage: a1,a2,a3,a4,a5,a6 = F.gens()
sage: (a1 * a2 * ~a2 * ~a1).is_one()
True
sage: (a1 * ~a2 * a2 * a1) == a1 * a1
True
"""
P = self.parent()
return P.element_class(
P, self._polys[::-1], self._edges_rev[::-1], self._edges[::-1]
)
def intersection(self, other):
r"""
The intersection number of this element with ``other``.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: a,b = t.fundamental_group().gens()
sage: a.intersection(b)
1
sage: b.intersection(a)
-1
This is an Abelian invariant::
sage: x1 = a*b*b*~a*~a*b*b*a*~b*~b
sage: x2 = a*b*a*b
sage: x3 = ~b*~b*a
sage: (x1*x2).intersection(x3) == x1.intersection(x3) + x2.intersection(x3)
True
sage: (x2*x1*~x2).intersection(x2*x3*~x2) == x1.intersection(x3)
True
A little bit more involved example::
sage: S = SymmetricGroup(4)
sage: r = S('(1,2)(3,4)')
sage: u = S('(2,3)')
sage: o = translation_surfaces.origami(r,u)
sage: F = o.fundamental_group()
sage: x = F([1,2,2,3])
sage: x.intersection(x)
0
sage: a = F.gens()
sage: m = matrix(ZZ, 5, lambda i,j: a[i].intersection(a[j]))
sage: m
[ 0 1 0 0 0]
[-1 0 -1 0 0]
[ 0 1 0 1 0]
[ 0 0 -1 0 -1]
[ 0 0 0 1 0]
A slightly more involved example::
sage: S = SymmetricGroup(8)
sage: r = S('(1,2,3,4,5,6,7,8)')
sage: u = S('(1,8,5,4)(2,3)(6,7)')
sage: o = translation_surfaces.origami(r,u)
sage: a = o.fundamental_group().gens()
sage: m = matrix(ZZ, 9, lambda i,j: a[i].intersection(a[j]))
sage: m
[ 0 -1 1 -1 1 -1 -1 -1 1]
[ 1 0 1 0 0 -1 -1 -1 1]
[-1 -1 0 0 0 0 0 0 0]
[ 1 0 0 0 -1 0 0 0 0]
[-1 0 0 1 0 0 0 0 0]
[ 1 1 0 0 0 0 0 0 0]
[ 1 1 0 0 0 0 0 1 -1]
[ 1 1 0 0 0 0 -1 0 -1]
[-1 -1 0 0 0 0 1 1 0]
"""
si = self._poly_cross_dict()
oi = other._poly_cross_dict()
n = 0
for p in self.parent()._s.labels():
for e0, e1 in si[p]:
for f0, f1 in oi[p]:
n += intersection(e0, e1, f0, f1)
return n
class FundamentalGroup(UniqueRepresentation, Group):
r"""
The fundamental group of a punctured surface
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: t = translation_surfaces.square_torus()
sage: TestSuite(t.fundamental_group()).run()
"""
Element = Path
def __init__(self, surface, base):
if not surface.is_finite_type():
raise ValueError("the method only work for finite surfaces")
self._s = surface
self._b = base
Group.__init__(self, category=Groups().Infinite())
def _element_constructor_(self, *args):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: S = SymmetricGroup(4)
sage: r = S('(1,2)(3,4)')
sage: u = S('(2,3)')
sage: o = translation_surfaces.origami(r,u)
sage: F = o.fundamental_group()
sage: F([1,1])
1 --1-- 2 --1-- 1
sage: F([1,2,2,3])
1 --1-- 2 --2-- 3 --2-- 2 --3-- 1
sage: F([1,2,3])
Traceback (most recent call last):
...
AssertionError
"""
if len(args) == 1 and isinstance(args[0], (tuple, list)):
args = args[0]
s = self._s
p = [self._b]
e = []
er = []
for i in args:
i = int(i) % len(s.polygon(p[-1]).vertices())
q, j = s.opposite_edge(p[-1], i)
p.append(q)
e.append(i)
er.append(j)
return self.element_class(self, p, e, er)
def _repr_(self):
return "Fundamental group of {} based at polygon {}".format(self._s, self._b)
@cached_method
def one(self):
return self.element_class(self, [self._b], [], [])
@cached_method
def gens(self):
r"""
Return a generating set
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = SymmetricGroup(8)
sage: r = S('(1,2,3,4,5,6,7,8)')
sage: u = S('(1,8,5,4)(2,3)(6,7)')
sage: o = translation_surfaces.origami(r,u)
sage: len(o.fundamental_group().gens())
9
"""
p = self._b
s = self._s
tree = {} # a tree whose root is base_label
basis = []
tree[p] = (None, None, None)
wait = [] # list of edges of the dual graph, ie p1 -- (e1,e2) --> p2
for e in range(len(s.polygon(p).vertices())):
pp, ee = s.opposite_edge(p, e)
wait.append((pp, ee, p, e))
while wait:
p1, e1, p2, e2 = wait.pop()
assert p2 in tree
if p1 in tree: # new cycle?
if (p1, e1) > (p2, e2):
continue
polys = [p1]
edges = []
edges_rev = []
p1, e, e_back = tree[p1]
while p1 is not None:
edges.append(e_back)
edges_rev.append(e)
polys.append(p1)
p1, e, e_back = tree[p1]
polys.reverse()
edges.reverse()
edges_rev.reverse()
polys.append(p2)
edges.append(e1)
edges_rev.append(e2)
p2, e, e_back = tree[p2]
while p2 is not None:
edges.append(e)
edges_rev.append(e_back)
polys.append(p2)
p2, e, e_back = tree[p2]
basis.append((polys, edges, edges_rev))
else: # new branch
tree[p1] = (p2, e1, e2)
for e in range(len(s.polygon(p1).vertices())):
if e != e1:
pp, ee = s.opposite_edge(p1, e)
wait.append((pp, ee, p1, e))
basis.sort()
return tuple([self.element_class(self, p, e, er) for p, e, er in basis]) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/fundamental_group.py | 0.872062 | 0.512998 | fundamental_group.py | pypi |
from sage.rings.all import ZZ, QQ, RIF, AA, NumberField, polygen
from sage.modules.all import VectorSpace, vector
from sage.structure.coerce import py_scalar_parent
from sage.structure.element import get_coercion_model, parent
from sage.misc.cachefunc import cached_method
from sage.structure.sequence import Sequence
from flatsurf.geometry.polygon import (
polygons,
EuclideanPolygon,
Polygon,
)
from flatsurf.geometry.surface import (
OrientedSimilaritySurface,
MutableOrientedSimilaritySurface,
)
from flatsurf.geometry.origami import Origami
ZZ_1 = ZZ(1)
ZZ_2 = ZZ(2)
def flipper_nf_to_sage(K, name="a"):
r"""
Convert a flipper number field into a Sage number field
.. NOTE::
Currently, the code is not careful at all with root isolation.
EXAMPLES::
sage: import flipper # optional - flipper # random output due to matplotlib warnings with some combinations of setuptools and matplotlib
sage: import realalg # optional - flipper
sage: from flatsurf.geometry.similarity_surface_generators import flipper_nf_to_sage
sage: K = realalg.RealNumberField([-2r] + [0r]*5 + [1r]) # optional - flipper
sage: K_sage = flipper_nf_to_sage(K) # optional - flipper
sage: K_sage # optional - flipper
Number Field in a with defining polynomial x^6 - 2 with a = 1.122462048309373?
sage: AA(K_sage.gen()) # optional - flipper
1.122462048309373?
"""
r = K.lmbda.interval()
lower = r.lower * ZZ(10) ** (-r.precision)
upper = r.upper * ZZ(10) ** (-r.precision)
p = QQ["x"](K.coefficients)
s = AA.polynomial_root(p, RIF(lower, upper))
return NumberField(p, name, embedding=s)
def flipper_nf_element_to_sage(x, K=None):
r"""
Convert a flipper number field element into Sage
EXAMPLES::
sage: from flatsurf.geometry.similarity_surface_generators import flipper_nf_element_to_sage
sage: import flipper # optional - flipper
sage: T = flipper.load('SB_6') # optional - flipper
sage: h = T.mapping_class('s_0S_1S_2s_3s_4s_3S_5') # optional - flipper
sage: flipper_nf_element_to_sage(h.dilatation()) # optional - flipper
a
sage: AA(_) # optional - flipper
6.45052513748511?
"""
if K is None:
K = flipper_nf_to_sage(x.field)
coeffs = list(map(QQ, x.coefficients))
coeffs.extend([0] * (K.degree() - len(coeffs)))
return K(coeffs)
class EInfinitySurface(OrientedSimilaritySurface):
r"""
The surface based on the `E_\infinity` graph.
The biparite graph is shown below, with edges numbered::
0 1 2 -2 3 -3 4 -4
*---o---*---o---*---o---*---o---*...
|
|-1
o
Here, black vertices are colored ``*``, and white ``o``.
Black nodes represent vertical cylinders and white nodes
represent horizontal cylinders.
"""
def __init__(self, lambda_squared=None, field=None):
if lambda_squared is None:
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
R = PolynomialRing(ZZ, "x")
x = R.gen()
field = NumberField(
x**3 - ZZ(5) * x**2 + ZZ(4) * x - ZZ(1), "r", embedding=AA(ZZ(4))
)
self._lambda_squared = field.gen()
else:
if field is None:
self._lambda_squared = lambda_squared
field = lambda_squared.parent()
else:
self._lambda_squared = field(lambda_squared)
from flatsurf.geometry.categories import TranslationSurfaces
super().__init__(
field,
category=TranslationSurfaces().InfiniteType().Connected().WithoutBoundary(),
)
def is_compact(self):
r"""
Return whether this surface is compact as a topological space, i.e.,
return ``False``.
This implements
:meth:`flatsurf.geometry.categories.topological_surfaces.TopologicalSurfaces.ParentMethods.is_compact`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S.is_compact()
False
"""
return False
def is_mutable(self):
r"""
Return whether this surface is mutable, i.e., return ``False``.
This implements
:meth:`flatsurf.geometry.categories.topological_surfaces.TopologicalSurfaces.ParentMethods.is_mutable`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S.is_mutable()
False
"""
return False
def roots(self):
r"""
Return root labels for the polygons forming the connected
components of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.roots`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S.roots()
(0,)
"""
return (ZZ(0),)
def _repr_(self):
r"""
Return a printable representation of this surface.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S
EInfinitySurface(r)
"""
return f"EInfinitySurface({repr(self._lambda_squared)})"
@cached_method
def get_white(self, n):
r"""Get the weight of the white endpoint of edge n."""
if n == 0 or n == 1:
return self._lambda_squared
if n == -1:
return self._lambda_squared - 1
if n == 2:
return 1 - 3 * self._lambda_squared + self._lambda_squared**2
if n > 2:
x = self.get_white(n - 1)
y = self.get_black(n)
return self._lambda_squared * y - x
return self.get_white(-n)
@cached_method
def get_black(self, n):
r"""Get the weight of the black endpoint of edge n."""
if n == 0:
return self.base_ring().one()
if n == 1 or n == -1 or n == 2:
return self._lambda_squared - 1
if n > 2:
x = self.get_black(n - 1)
y = self.get_white(n - 1)
return y - x
return self.get_black(1 - n)
def polygon(self, lab):
r"""
Return the polygon labeled by ``lab``.
"""
if lab not in self.labels():
raise ValueError("lab (=%s) not a valid label" % lab)
return polygons.rectangle(2 * self.get_black(lab), self.get_white(lab))
def labels(self):
r"""
Return the labels of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.labels`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S.labels()
(0, 1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, …)
"""
from flatsurf.geometry.surface import LabelsFromView
return LabelsFromView(self, ZZ, finite=False)
def opposite_edge(self, p, e):
r"""
Return the pair ``(pp,ee)`` to which the edge ``(p,e)`` is glued to.
"""
if p == 0:
if e == 0:
return (0, 2)
if e == 1:
return (1, 3)
if e == 2:
return (0, 0)
if e == 3:
return (1, 1)
if p == 1:
if e == 0:
return (-1, 2)
if e == 1:
return (0, 3)
if e == 2:
return (2, 0)
if e == 3:
return (0, 1)
if p == -1:
if e == 0:
return (2, 2)
if e == 1:
return (-1, 3)
if e == 2:
return (1, 0)
if e == 3:
return (-1, 1)
if p == 2:
if e == 0:
return (1, 2)
if e == 1:
return (-2, 3)
if e == 2:
return (-1, 0)
if e == 3:
return (-2, 1)
if p > 2:
if e % 2:
return -p, (e + 2) % 4
else:
return 1 - p, (e + 2) % 4
else:
if e % 2:
return -p, (e + 2) % 4
else:
return 1 - p, (e + 2) % 4
def __hash__(self):
r"""
Return a hash value for this surface that is compatible with
:meth:`__eq__`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: hash(translation_surfaces.e_infinity_surface()) == hash(translation_surfaces.e_infinity_surface())
True
"""
return hash((self.base_ring(), self._lambda_squared))
def __eq__(self, other):
r"""
Return whether this surface is indistinguishable from ``other``.
See :meth:`SimilaritySurfaces.FiniteType._test_eq_surface` for details
on this notion of equality.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.e_infinity_surface()
sage: S == S
True
"""
if not isinstance(other, EInfinitySurface):
return False
return (
self._lambda_squared == other._lambda_squared
and self.base_ring() == other.base_ring()
)
class TFractalSurface(OrientedSimilaritySurface):
r"""
The TFractal surface.
The TFractal surface is a translation surface of finite area built from
infinitely many polygons. The basic building block is the following polygon::
w/r w w/r
+---+------+---+
| 1 | 2 | 3 | h2
+---+------+---+
| 0 | h1
+------+
w
where ``w``, ``h1``, ``h2``, ``r`` are some positive numbers. Default values
are ``w=h1=h2=1`` and ``r=2``.
.. TODO::
In that surface, the linear flow can be computed more efficiently using
only one affine interval exchange transformation with 5 intervals. But
the underlying geometric construction is not a covering.
Warning: we can not play at the same time with tuples and element of a
cartesian product (see Sage trac ticket #19555)
"""
def __init__(self, w=ZZ_1, r=ZZ_2, h1=ZZ_1, h2=ZZ_1):
from sage.combinat.words.words import Words
field = Sequence([w, r, h1, h2]).universe()
if not field.is_field():
field = field.fraction_field()
from flatsurf.geometry.categories import TranslationSurfaces
super().__init__(
field,
category=TranslationSurfaces()
.InfiniteType()
.WithoutBoundary()
.Compact()
.Connected(),
)
self._w = field(w)
self._r = field(r)
self._h1 = field(h1)
self._h2 = field(h2)
self._words = Words("LR", finite=True, infinite=False)
self._wL = self._words("L")
self._wR = self._words("R")
self._root = (self._words(""), 0)
def roots(self):
r"""
Return root labels for the polygons forming the connected
components of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.roots`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.t_fractal()
sage: S.roots()
((word: , 0),)
"""
return (self._root,)
def is_mutable(self):
r"""
Return whether this surface is mutable, i.e., return ``False``.
This implements
:meth:`flatsurf.geometry.categories.topological_surfaces.TopologicalSurfaces.ParentMethods.is_mutable`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.t_fractal()
sage: S.is_mutable()
False
"""
return False
def _repr_(self):
return "The T-fractal surface with parameters w=%s, r=%s, h1=%s, h2=%s" % (
self._w,
self._r,
self._h1,
self._h2,
)
@cached_method
def labels(self):
r"""
Return the labels of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.labels`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.t_fractal()
sage: S.labels()
((word: , 0), (word: , 2), (word: , 3), (word: , 1), (word: R, 2), (word: R, 0), (word: L, 2), (word: L, 0), (word: R, 3), (word: R, 1), (word: L, 3), (word: L, 1), (word: RR, 2), (word: RR, 0), (word: RL, 2), (word: RL, 0), …)
"""
from sage.sets.finite_enumerated_set import FiniteEnumeratedSet
from sage.categories.cartesian_product import cartesian_product
labels = cartesian_product([self._words, FiniteEnumeratedSet([0, 1, 2, 3])])
from flatsurf.geometry.surface import LabelsFromView
return LabelsFromView(self, labels, finite=False)
def opposite_edge(self, p, e):
r"""
Labeling of polygons::
wl,0 wr,0
+-----+---------+------+
| | | |
| w,1 | w,2 | w,3 |
| | | |
+-----+---------+------+
| |
| w,0 |
| |
+---------+
w
and we always have: bot->0, right->1, top->2, left->3
EXAMPLES::
sage: import flatsurf.geometry.similarity_surface_generators as sfg
sage: T = sfg.tfractal_surface()
sage: W = T._words
sage: w = W('LLRLRL')
sage: T.opposite_edge((w,0),0)
((word: LLRLR, 1), 2)
sage: T.opposite_edge((w,0),1)
((word: LLRLRL, 0), 3)
sage: T.opposite_edge((w,0),2)
((word: LLRLRL, 2), 0)
sage: T.opposite_edge((w,0),3)
((word: LLRLRL, 0), 1)
"""
w, i = p
w = self._words(w)
i = int(i)
e = int(e)
if e == 0:
f = 2
elif e == 1:
f = 3
elif e == 2:
f = 0
elif e == 3:
f = 1
else:
raise ValueError("e (={!r}) must be either 0,1,2 or 3".format(e))
if i == 0:
if e == 0:
if w.is_empty():
lab = (w, 2)
elif w[-1] == "L":
lab = (w[:-1], 1)
elif w[-1] == "R":
lab = (w[:-1], 3)
if e == 1:
lab = (w, 0)
if e == 2:
lab = (w, 2)
if e == 3:
lab = (w, 0)
elif i == 1:
if e == 0:
lab = (w + self._wL, 2)
if e == 1:
lab = (w, 2)
if e == 2:
lab = (w + self._wL, 0)
if e == 3:
lab = (w, 3)
elif i == 2:
if e == 0:
lab = (w, 0)
if e == 1:
lab = (w, 3)
if e == 2:
if w.is_empty():
lab = (w, 0)
elif w[-1] == "L":
lab = (w[:-1], 1)
elif w[-1] == "R":
lab = (w[:-1], 3)
if e == 3:
lab = (w, 1)
elif i == 3:
if e == 0:
lab = (w + self._wR, 2)
if e == 1:
lab = (w, 1)
if e == 2:
lab = (w + self._wR, 0)
if e == 3:
lab = (w, 2)
else:
raise ValueError("i (={!r}) must be either 0,1,2 or 3".format(i))
# the fastest label constructor
return lab, f
def polygon(self, lab):
r"""
Return the polygon with label ``lab``::
w/r w/r
+---+------+---+
| 1 | 2 | 3 |
| | | | h2
+---+------+---+
| 0 | h1
+------+
w
EXAMPLES::
sage: import flatsurf.geometry.similarity_surface_generators as sfg
sage: T = sfg.tfractal_surface()
sage: T.polygon(('L',0))
Polygon(vertices=[(0, 0), (1/2, 0), (1/2, 1/2), (0, 1/2)])
sage: T.polygon(('LRL',0))
Polygon(vertices=[(0, 0), (1/8, 0), (1/8, 1/8), (0, 1/8)])
"""
w = self._words(lab[0])
return (1 / self._r ** w.length()) * self._base_polygon(lab[1])
@cached_method
def _base_polygon(self, i):
if i == 0:
w = self._w
h = self._h1
if i == 1 or i == 3:
w = self._w / self._r
h = self._h2
if i == 2:
w = self._w
h = self._h2
return Polygon(
base_ring=self.base_ring(), edges=[(w, 0), (0, h), (-w, 0), (0, -h)]
)
def __hash__(self):
r"""
Return a hash value for this surface that is compatible with
:meth:`__eq__`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: hash(translation_surfaces.t_fractal()) == hash(translation_surfaces.t_fractal())
True
"""
return hash((self._w, self._h1, self._r, self._h2))
def __eq__(self, other):
r"""
Return whether this surface is indistinguishable from ``other``.
See :meth:`SimilaritySurfaces.FiniteType._test_eq_surface` for details
on this notion of equality.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: translation_surfaces.t_fractal() == translation_surfaces.t_fractal()
True
"""
if not isinstance(other, TFractalSurface):
return False
return (
self._w == other._w
and self._h1 == other._h1
and self._r == other._r
and self._h2 == other._h2
)
def tfractal_surface(w=ZZ_1, r=ZZ_2, h1=ZZ_1, h2=ZZ_1):
return TFractalSurface(w, r, h1, h2)
class SimilaritySurfaceGenerators:
r"""
Examples of similarity surfaces.
"""
@staticmethod
def example():
r"""
Construct a SimilaritySurface from a pair of triangles.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: ex = similarity_surfaces.example()
sage: ex
Genus 1 Surface built from 2 isosceles triangles
TESTS::
sage: TestSuite(ex).run()
sage: from flatsurf.geometry.categories import SimilaritySurfaces
sage: ex in SimilaritySurfaces()
True
"""
s = MutableOrientedSimilaritySurface(QQ)
s.add_polygon(
Polygon(vertices=[(0, 0), (2, -2), (2, 0)], base_ring=QQ), label=0
)
s.add_polygon(Polygon(vertices=[(0, 0), (2, 0), (1, 3)], base_ring=QQ), label=1)
s.glue((0, 0), (1, 1))
s.glue((0, 1), (1, 2))
s.glue((0, 2), (1, 0))
s.set_immutable()
return s
@staticmethod
def self_glued_polygon(P):
r"""
Return the HalfTranslationSurface formed by gluing all edges of P to themselves.
EXAMPLES::
sage: from flatsurf import Polygon, similarity_surfaces
sage: p = Polygon(edges=[(2,0),(-1,3),(-1,-3)])
sage: s = similarity_surfaces.self_glued_polygon(p)
sage: s
Half-Translation Surface in Q_0(-1^4) built from an isosceles triangle
sage: TestSuite(s).run()
"""
s = MutableOrientedSimilaritySurface(P.base_ring())
s.add_polygon(P)
for i in range(len(P.vertices())):
s.glue((0, i), (0, i))
s.set_immutable()
return s
@staticmethod
def billiard(P, rational=None):
r"""
Return the ConeSurface associated to the billiard in the polygon ``P``.
INPUT:
- ``P`` -- a polygon
- ``rational`` -- a boolean or ``None`` (default: ``None``) -- whether
to assume that all the angles of ``P`` are a rational multiple of π.
EXAMPLES::
sage: from flatsurf import Polygon, similarity_surfaces
sage: P = Polygon(vertices=[(0,0), (1,0), (0,1)])
sage: Q = similarity_surfaces.billiard(P, rational=True)
doctest:warning
...
UserWarning: the rational keyword argument of billiard() has been deprecated and will be removed in a future version of sage-flatsurf; rationality checking is now faster so this is not needed anymore
sage: Q
Genus 0 Rational Cone Surface built from 2 isosceles triangles
sage: from flatsurf.geometry.categories import ConeSurfaces
sage: Q in ConeSurfaces().Rational()
True
sage: M = Q.minimal_cover(cover_type="translation")
sage: M
Minimal Translation Cover of Genus 0 Rational Cone Surface built from 2 isosceles triangles
sage: TestSuite(M).run()
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: M in TranslationSurfaces()
True
A non-convex examples (L-shape polygon)::
sage: P = Polygon(vertices=[(0,0), (2,0), (2,1), (1,1), (1,2), (0,2)])
sage: Q = similarity_surfaces.billiard(P)
sage: TestSuite(Q).run()
sage: M = Q.minimal_cover(cover_type="translation")
sage: TestSuite(M).run()
sage: M.stratum()
H_2(2, 0^5)
A quadrilateral from Eskin-McMullen-Mukamel-Wright::
sage: from flatsurf import Polygon
sage: P = Polygon(angles=(1, 1, 1, 7))
sage: S = similarity_surfaces.billiard(P)
sage: TestSuite(S).run()
sage: S = S.minimal_cover(cover_type="translation")
sage: TestSuite(S).run()
sage: S = S.erase_marked_points() # optional: pyflatsurf
sage: TestSuite(S).run()
sage: S, _ = S.normalized_coordinates()
sage: TestSuite(S).run()
Unfolding a triangle with non-algebraic lengths::
sage: from flatsurf import EuclideanPolygonsWithAngles
sage: E = EuclideanPolygonsWithAngles((3, 3, 5))
sage: from pyexactreal import ExactReals # optional: exactreal
sage: R = ExactReals(E.base_ring()) # optional: exactreal
sage: angles = (3, 3, 5)
sage: slopes = EuclideanPolygonsWithAngles(*angles).slopes()
sage: P = Polygon(angles=angles, edges=[R.random_element() * slopes[0]]) # optional: exactreal
sage: S = similarity_surfaces.billiard(P); S # optional: exactreal
Genus 0 Rational Cone Surface built from 2 isosceles triangles
sage: TestSuite(S).run() # long time (6s), optional: exactreal
sage: from flatsurf.geometry.categories import ConeSurfaces
sage: S in ConeSurfaces()
True
"""
if not isinstance(P, EuclideanPolygon):
raise TypeError("invalid input")
if rational is not None:
import warnings
warnings.warn(
"the rational keyword argument of billiard() has been deprecated and will be removed in a future version of sage-flatsurf; rationality checking is now faster so this is not needed anymore"
)
from flatsurf.geometry.categories import ConeSurfaces
category = ConeSurfaces()
if P.is_rational():
category = category.Rational()
V = P.base_ring() ** 2
if not P.is_convex():
# triangulate non-convex ones
base_ring = P.base_ring()
comb_edges = P.triangulation()
vertices = P.vertices()
comb_triangles = SimilaritySurfaceGenerators._billiard_build_faces(
len(vertices), comb_edges
)
triangles = []
internal_edges = [] # list (p1, e1, p2, e2)
external_edges = [] # list (p1, e1)
edge_to_lab = {}
for num, (i, j, k) in enumerate(comb_triangles):
triangles.append(
Polygon(
vertices=[vertices[i], vertices[j], vertices[k]],
base_ring=base_ring,
)
)
edge_to_lab[(i, j)] = (num, 0)
edge_to_lab[(j, k)] = (num, 1)
edge_to_lab[(k, i)] = (num, 2)
for num, (i, j, k) in enumerate(comb_triangles):
if (j, i) in edge_to_lab:
num2, e2 = edge_to_lab[j, i]
internal_edges.append((num, 0, num2, e2))
else:
external_edges.append((num, 0))
if (k, j) in edge_to_lab:
num2, e2 = edge_to_lab[k, j]
internal_edges.append((num, 1, num2, e2))
else:
external_edges.append((num, 1))
if (i, k) in edge_to_lab:
num2, e2 = edge_to_lab[i, k]
internal_edges.append((num, 2, num2, e2))
else:
external_edges.append((num, 1))
P = triangles
else:
internal_edges = []
external_edges = [(0, i) for i in range(len(P.vertices()))]
base_ring = P.base_ring()
P = [P]
surface = MutableOrientedSimilaritySurface(base_ring, category=category)
m = len(P)
for p in P:
surface.add_polygon(p)
for p in P:
surface.add_polygon(
Polygon(edges=[V((-x, y)) for x, y in reversed(p.edges())])
)
for p1, e1, p2, e2 in internal_edges:
surface.glue((p1, e1), (p2, e2))
ne1 = len(surface.polygon(p1).vertices())
ne2 = len(surface.polygon(p2).vertices())
surface.glue((m + p1, ne1 - e1 - 1), (m + p2, ne2 - e2 - 1))
for p, e in external_edges:
ne = len(surface.polygon(p).vertices())
surface.glue((p, e), (m + p, ne - e - 1))
surface.set_immutable()
return surface
@staticmethod
def _billiard_build_faces(n, edges):
r"""
Given a combinatorial list of pairs ``edges`` forming a cell-decomposition
of a polygon (with vertices labeled from ``0`` to ``n-1``) return the list
of cells.
This is a helper method for :meth:`billiard`.
EXAMPLES::
sage: from flatsurf.geometry.similarity_surface_generators import SimilaritySurfaceGenerators
sage: SimilaritySurfaceGenerators._billiard_build_faces(4, [(0,2)])
[[0, 1, 2], [2, 3, 0]]
sage: SimilaritySurfaceGenerators._billiard_build_faces(4, [(1,3)])
[[1, 2, 3], [3, 0, 1]]
sage: SimilaritySurfaceGenerators._billiard_build_faces(5, [(0,2), (0,3)])
[[0, 1, 2], [3, 4, 0], [0, 2, 3]]
sage: SimilaritySurfaceGenerators._billiard_build_faces(5, [(0,2)])
[[0, 1, 2], [2, 3, 4, 0]]
sage: SimilaritySurfaceGenerators._billiard_build_faces(5, [(1,4)])
[[1, 2, 3, 4], [4, 0, 1]]
sage: SimilaritySurfaceGenerators._billiard_build_faces(5, [(1,3),(3,0)])
[[1, 2, 3], [3, 4, 0], [0, 1, 3]]
"""
polygons = [list(range(n))]
for u, v in edges:
j = None
for i, p in enumerate(polygons):
if u in p and v in p:
if j is not None:
raise RuntimeError
j = i
if j is None:
raise RuntimeError
p = polygons[j]
i0 = p.index(u)
i1 = p.index(v)
if i0 > i1:
i0, i1 = i1, i0
polygons[j] = p[i0 : i1 + 1]
polygons.append(p[i1:] + p[: i0 + 1])
return polygons
@staticmethod
def polygon_double(P):
r"""
Return the ConeSurface associated to the billiard in the polygon ``P``.
Differs from billiard(P) only in the graphical display. Here, we display
the polygons separately.
"""
from sage.matrix.constructor import matrix
n = len(P.vertices())
r = matrix(2, [-1, 0, 0, 1])
Q = Polygon(edges=[r * v for v in reversed(P.edges())])
surface = MutableOrientedSimilaritySurface(P.base_ring())
surface.add_polygon(P, label=0)
surface.add_polygon(Q, label=1)
for i in range(n):
surface.glue((0, i), (1, n - i - 1))
surface.set_immutable()
return surface
@staticmethod
def right_angle_triangle(w, h):
r"""
TESTS::
sage: from flatsurf import similarity_surfaces
sage: R = similarity_surfaces.right_angle_triangle(2, 3)
sage: R
Genus 0 Cone Surface built from 2 right triangles
sage: from flatsurf.geometry.categories import ConeSurfaces
sage: R in ConeSurfaces()
True
sage: TestSuite(R).run()
"""
F = Sequence([w, h]).universe()
if not F.is_field():
F = F.fraction_field()
V = VectorSpace(F, 2)
s = MutableOrientedSimilaritySurface(F)
s.add_polygon(
Polygon(base_ring=F, edges=[V((w, 0)), V((-w, h)), V((0, -h))]), label=0
)
s.add_polygon(
Polygon(base_ring=F, edges=[V((0, h)), V((-w, -h)), V((w, 0))]), label=1
)
s.glue((0, 0), (1, 2))
s.glue((0, 1), (1, 1))
s.glue((0, 2), (1, 0))
s.set_immutable()
return s
similarity_surfaces = SimilaritySurfaceGenerators()
class DilationSurfaceGenerators:
@staticmethod
def basic_dilation_torus(a):
r"""
Return a dilation torus built from a `1 \times 1` square and a `a
\times 1` rectangle. Each edge of the square is glued to the opposite
edge of the rectangle. This results in horizontal edges glued by a
dilation with a scaling factor of a, and vertical edges being glued by
translation::
b a
+----+---------+
| 0 | 1 |
c | | | c
+----+---------+
a b
EXAMPLES::
sage: from flatsurf import dilation_surfaces
sage: ds = dilation_surfaces.basic_dilation_torus(AA(sqrt(2)))
sage: ds
Genus 1 Positive Dilation Surface built from a square and a rectangle
sage: from flatsurf.geometry.categories import DilationSurfaces
sage: ds in DilationSurfaces().Positive()
True
sage: TestSuite(ds).run()
"""
s = MutableOrientedSimilaritySurface(a.parent().fraction_field())
s.add_polygon(
Polygon(base_ring=s.base_ring(), edges=[(0, 1), (-1, 0), (0, -1), (1, 0)]),
label=0,
)
s.add_polygon(
Polygon(base_ring=s.base_ring(), edges=[(0, 1), (-a, 0), (0, -1), (a, 0)]),
label=1,
)
# label 1
s.glue((0, 0), (1, 2))
s.glue((0, 1), (1, 3))
s.glue((0, 2), (1, 0))
s.glue((0, 3), (1, 1))
s.set_roots([0])
s.set_immutable()
return s
@staticmethod
def genus_two_square(a, b, c, d):
r"""
A genus two dilation surface is returned.
The unit square is made into an octagon by marking a point on
each of its edges. Then opposite sides of this octagon are
glued together by translation. (Since we currently require strictly
convex polygons, we subdivide the square into a hexagon and two
triangles as depicted below.) The parameters ``a``, ``b``, ``c``, and
``d`` should be real numbers strictly between zero and one. These
represent the lengths of an edge of the resulting octagon, as below::
c
+--+-------+
d |2/ |
|/ |
+ 0 +
| /|
| /1| b
+-------+--+
a
The other edges will have length `1-a`, `1-b`, `1-c`, and `1-d`.
Dilations used to glue edges will be by factors `c/a`, `d/b`,
`(1-c)/(1-a)` and `(1-d)/(1-b)`.
EXAMPLES::
sage: from flatsurf import dilation_surfaces
sage: ds = dilation_surfaces.genus_two_square(1/2, 1/3, 1/4, 1/5)
sage: ds
Genus 2 Positive Dilation Surface built from 2 right triangles and a hexagon
sage: from flatsurf.geometry.categories import DilationSurfaces
sage: ds in DilationSurfaces().Positive()
True
sage: TestSuite(ds).run()
"""
field = Sequence([a, b, c, d]).universe().fraction_field()
s = MutableOrientedSimilaritySurface(QQ)
hexagon = Polygon(
edges=[(a, 0), (1 - a, b), (0, 1 - b), (-c, 0), (c - 1, -d), (0, d - 1)],
base_ring=field,
)
s.add_polygon(hexagon, label=0)
s.set_roots([0])
triangle1 = Polygon(base_ring=field, edges=[(1 - a, 0), (0, b), (a - 1, -b)])
s.add_polygon(triangle1, label=1)
triangle2 = Polygon(base_ring=field, edges=[(1 - c, d), (c - 1, 0), (0, -d)])
s.add_polygon(triangle2, label=2)
s.glue((0, 0), (0, 3))
s.glue((0, 2), (0, 5))
s.glue((0, 1), (1, 2))
s.glue((0, 4), (2, 0))
s.glue((1, 0), (2, 1))
s.glue((1, 1), (2, 2))
s.set_immutable()
return s
dilation_surfaces = DilationSurfaceGenerators()
class HalfTranslationSurfaceGenerators:
# TODO: ideally, we should be able to construct a non-convex polygon and make the construction
# below as a special case of billiard unfolding.
@staticmethod
def step_billiard(w, h):
r"""
Return a (finite) step billiard associated to the given widths ``w`` and heights ``h``.
EXAMPLES::
sage: from flatsurf import half_translation_surfaces
sage: S = half_translation_surfaces.step_billiard([1,1,1,1], [1,1/2,1/3,1/5])
sage: S
StepBilliard(w=[1, 1, 1, 1], h=[1, 1/2, 1/3, 1/5])
sage: from flatsurf.geometry.categories import DilationSurfaces
sage: S in DilationSurfaces()
True
sage: TestSuite(S).run()
"""
n = len(h)
if len(w) != n:
raise ValueError
if n < 2:
raise ValueError("w and h must have length at least 2")
H = sum(h)
W = sum(w)
R = Sequence(w + h).universe()
base_ring = R.fraction_field()
P = []
Prev = []
x = 0
y = H
for i in range(n - 1):
P.append(
Polygon(
vertices=[
(x, 0),
(x + w[i], 0),
(x + w[i], y - h[i]),
(x + w[i], y),
(x, y),
],
base_ring=base_ring,
)
)
x += w[i]
y -= h[i]
assert x == W - w[-1]
assert y == h[-1]
P.append(
Polygon(
vertices=[(x, 0), (x + w[-1], 0), (x + w[-1], y), (x, y)],
base_ring=base_ring,
)
)
Prev = [
Polygon(
vertices=[(x, -y) for x, y in reversed(p.vertices())],
base_ring=base_ring,
)
for p in P
]
S = MutableOrientedSimilaritySurface(base_ring)
S.rename(
"StepBilliard(w=[%s], h=[%s])"
% (", ".join(map(str, w)), ", ".join(map(str, h)))
)
for p in P:
S.add_polygon(p) # get labels 0, ..., n-1
for p in Prev:
S.add_polygon(p) # get labels n, n+1, ..., 2n-1
# reflection gluings
# (gluings between the polygon and its reflection)
S.glue((0, 4), (n, 4))
S.glue((n - 1, 0), (2 * n - 1, 2))
S.glue((n - 1, 1), (2 * n - 1, 1))
S.glue((n - 1, 2), (2 * n - 1, 0))
for i in range(n - 1):
# glue((polygon1, edge1), (polygon2, edge2))
S.glue((i, 0), (n + i, 3))
S.glue((i, 2), (n + i, 1))
S.glue((i, 3), (n + i, 0))
# translation gluings
S.glue((n - 2, 1), (n - 1, 3))
S.glue((2 * n - 2, 2), (2 * n - 1, 3))
for i in range(n - 2):
S.glue((i, 1), (i + 1, 4))
S.glue((n + i, 2), (n + i + 1, 4))
S.set_immutable()
return S
half_translation_surfaces = HalfTranslationSurfaceGenerators()
class TranslationSurfaceGenerators:
r"""
Common and less common translation surfaces.
"""
@staticmethod
def square_torus(a=1):
r"""
Return flat torus obtained by identification of the opposite sides of a
square.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.square_torus()
sage: T
Translation Surface in H_1(0) built from a square
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: T in TranslationSurfaces()
True
sage: TestSuite(T).run()
Rational directions are completely periodic::
sage: v = T.tangent_vector(0, (1/33, 1/257), (13,17))
sage: L = v.straight_line_trajectory()
sage: L.flow(13+17)
sage: L.is_closed()
True
TESTS::
sage: TestSuite(T).run()
"""
return TranslationSurfaceGenerators.torus((a, 0), (0, a))
@staticmethod
def torus(u, v):
r"""
Return the flat torus obtained as the quotient of the plane by the pair
of vectors ``u`` and ``v``.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.torus((1, AA(2).sqrt()), (AA(3).sqrt(), 3))
sage: T
Translation Surface in H_1(0) built from a quadrilateral
sage: T.polygon(0)
Polygon(vertices=[(0, 0), (1, 1.414213562373095?), (2.732050807568878?, 4.414213562373095?), (1.732050807568878?, 3)])
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: T in TranslationSurfaces()
True
"""
u = vector(u)
v = vector(v)
field = Sequence([u, v]).universe().base_ring()
if isinstance(field, type):
field = py_scalar_parent(field)
if not field.is_field():
field = field.fraction_field()
s = MutableOrientedSimilaritySurface(field)
p = Polygon(vertices=[(0, 0), u, u + v, v], base_ring=field)
s.add_polygon(p)
s.glue((0, 0), (0, 2))
s.glue((0, 1), (0, 3))
s.set_immutable()
return s
@staticmethod
def veech_2n_gon(n):
r"""
The regular 2n-gon with opposite sides identified.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.veech_2n_gon(5)
sage: s
Translation Surface in H_2(1^2) built from a regular decagon
sage: TestSuite(s).run()
"""
p = polygons.regular_ngon(2 * n)
s = MutableOrientedSimilaritySurface(p.base_ring())
s.add_polygon(p)
for i in range(2 * n):
s.glue((0, i), (0, (i + n) % (2 * n)))
s.set_immutable()
return s
@staticmethod
def veech_double_n_gon(n):
r"""
A pair of regular n-gons with each edge of one identified to an edge of the other to make a translation surface.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s=translation_surfaces.veech_double_n_gon(5)
sage: s
Translation Surface in H_2(2) built from 2 regular pentagons
sage: TestSuite(s).run()
"""
from sage.matrix.constructor import Matrix
p = polygons.regular_ngon(n)
s = MutableOrientedSimilaritySurface(p.base_ring())
m = Matrix([[-1, 0], [0, -1]])
s.add_polygon(p, label=0)
s.add_polygon(m * p, label=1)
for i in range(n):
s.glue((0, i), (1, i))
s.set_immutable()
return s
@staticmethod
def regular_octagon():
r"""
Return the translation surface built from the regular octagon by
identifying opposite sides.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.regular_octagon()
sage: T
Translation Surface in H_2(2) built from a regular octagon
sage: TestSuite(T).run()
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: T in TranslationSurfaces()
True
"""
return translation_surfaces.veech_2n_gon(4)
@staticmethod
def mcmullen_genus2_prototype(w, h, t, e, rel=0, base_ring=None):
r"""
McMullen prototypes in the stratum H(2).
These prototype appear at least in McMullen "Teichmüller curves in genus
two: Discriminant and spin" (2004). The notation from that paper are
quadruple ``(a, b, c, e)`` which translates in our notation as
``w = b``, ``h = c``, ``t = a`` (and ``e = e``).
The associated discriminant is `D = e^2 + 4 wh`.
If ``rel`` is a positive parameter (less than w-lambda) the surface belongs
to the eigenform locus in H(1,1).
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: from surface_dynamics import AbelianStratum
sage: prototypes = {
....: 5: [(1,1,0,-1)],
....: 8: [(1,1,0,-2), (2,1,0,0)],
....: 9: [(2,1,0,-1)],
....: 12: [(1,2,0,-2), (2,1,0,-2), (3,1,0,0)],
....: 13: [(1,1,0,-3), (3,1,0,-1), (3,1,0,1)],
....: 16: [(3,1,0,-2), (4,1,0,0)],
....: 17: [(1,2,0,-3), (2,1,0,-3), (2,2,0,-1), (2,2,1,-1), (4,1,0,-1), (4,1,0,1)],
....: 20: [(1,1,0,-4), (2,2,1,-2), (4,1,0,-2), (4,1,0,2)],
....: 21: [(1,3,0,-3), (3,1,0,-3)],
....: 24: [(1,2,0,-4), (2,1,0,-4), (3,2,0,0)],
....: 25: [(2,2,0,-3), (2,2,1,-3), (3,2,0,-1), (4,1,0,-3)]}
sage: for D in sorted(prototypes): # long time (.5s)
....: for w,h,t,e in prototypes[D]:
....: T = translation_surfaces.mcmullen_genus2_prototype(w,h,t,e)
....: assert T.stratum() == AbelianStratum(2)
....: assert (D.is_square() and T.base_ring() is QQ) or (T.base_ring().polynomial().discriminant() == D)
An example with some relative homology::
sage: U8 = translation_surfaces.mcmullen_genus2_prototype(2,1,0,0,1/4) # discriminant 8
sage: U8
Translation Surface in H_2(1^2) built from a rectangle and a quadrilateral
sage: U12 = translation_surfaces.mcmullen_genus2_prototype(3,1,0,0,3/10) # discriminant 12
sage: U12
Translation Surface in H_2(1^2) built from a rectangle and a quadrilateral
sage: U8.stratum()
H_2(1^2)
sage: U8.base_ring().polynomial().discriminant()
8
sage: U8.j_invariant()
(
[4 0]
(0), (0), [0 2]
)
sage: U12.stratum()
H_2(1^2)
sage: U12.base_ring().polynomial().discriminant()
12
sage: U12.j_invariant()
(
[6 0]
(0), (0), [0 2]
)
"""
w = ZZ(w)
h = ZZ(h)
t = ZZ(t)
e = ZZ(e)
g = w.gcd(h)
if (
w <= 0
or h <= 0
or t < 0
or t >= g
or not g.gcd(t).gcd(e).is_one()
or e + h >= w
):
raise ValueError("invalid parameters")
x = polygen(QQ)
poly = x**2 - e * x - w * h
if poly.is_irreducible():
if base_ring is None:
emb = AA.polynomial_root(poly, RIF(0, w))
K = NumberField(poly, "l", embedding=emb)
λ = K.gen()
else:
K = base_ring
roots = poly.roots(K, multiplicities=False)
if len(roots) != 2:
raise ValueError("invalid base ring")
roots.sort(key=lambda x: x.numerical_approx())
assert roots[0] < 0 and roots[0] > 0
λ = roots[1]
else:
if base_ring is None:
K = QQ
else:
K = base_ring
D = e**2 + 4 * w * h
d = D.sqrt()
λ = (e + d) / 2
try:
rel = K(rel)
except TypeError:
K = get_coercion_model().common_parent(K, parent(rel))
λ = K(λ)
rel = K(rel)
# (lambda,lambda) square on top
# twisted (w,0), (t,h)
s = MutableOrientedSimilaritySurface(K)
if rel:
if rel < 0 or rel > w - λ:
raise ValueError("invalid rel argument")
s.add_polygon(
Polygon(vertices=[(0, 0), (λ, 0), (λ + rel, λ), (rel, λ)], base_ring=K)
)
s.add_polygon(
Polygon(
vertices=[
(0, 0),
(rel, 0),
(rel + λ, 0),
(w, 0),
(w + t, h),
(λ + rel + t, h),
(t + λ, h),
(t, h),
],
base_ring=K,
)
)
s.glue((0, 1), (0, 3))
s.glue((0, 0), (1, 6))
s.glue((0, 2), (1, 1))
s.glue((1, 2), (1, 4))
s.glue((1, 3), (1, 7))
s.glue((1, 0), (1, 5))
else:
s.add_polygon(
Polygon(vertices=[(0, 0), (λ, 0), (λ, λ), (0, λ)], base_ring=K)
)
s.add_polygon(
Polygon(
vertices=[(0, 0), (λ, 0), (w, 0), (w + t, h), (λ + t, h), (t, h)],
base_ring=K,
)
)
s.glue((0, 1), (0, 3))
s.glue((0, 0), (1, 4))
s.glue((0, 2), (1, 0))
s.glue((1, 1), (1, 3))
s.glue((1, 2), (1, 5))
s.set_immutable()
return s
@staticmethod
def lanneau_nguyen_genus3_prototype(w, h, t, e):
r"""
Return the Lanneau--Ngyuen prototype in the stratum H(4) with
parameters ``w``, ``h``, ``t``, and ``e``.
The associated discriminant is `e^2 + 8 wh`.
INPUT:
- ``w`` -- a positive integer
- ``h`` -- a positive integer
- ``t`` -- a non-negative integer strictly less than the gcd of ``w``
and ``h``
- ``e`` -- an integer strictly less than ``w - 2h``
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.lanneau_nguyen_genus3_prototype(2,1,0,-1)
sage: T.stratum()
H_3(4)
REFERENCES:
- Erwan Lanneau, and Duc-Manh Nguyen, "Teichmüller curves generated by
Weierstrass Prym eigenforms in genus 3 and genus 4", Journal of
Topology 7.2, 2014, pp.475-522
"""
from sage.all import gcd
w = ZZ(w)
h = ZZ(h)
t = ZZ(t)
e = ZZ(e)
if not w > 0:
raise ValueError("w must be positive")
if not h > 0:
raise ValueError("h must be positive")
if not e + 2 * h < w:
raise ValueError("e + 2h < w must hold")
if not t >= 0:
raise ValueError("t must be non-negative")
if not t < gcd(w, h):
raise ValueError("t must be smaller than the gcd of w and h")
if not gcd([w, h, t, e]) == 1:
raise ValueError("w, h, t, e must be coprime")
K = QQ
D = K(e**2 + 8 * w * h)
if not D.is_square():
from sage.all import QuadraticField
K = QuadraticField(D)
D = K(D)
d = D.sqrt()
λ = (e + d) / 2
# (λ, λ) square in the middle
# twisted (w, 0), (t, h) on top and bottom
s = MutableOrientedSimilaritySurface(K)
from flatsurf import Polygon
s.add_polygon(
Polygon(
vertices=[(0, 0), (λ, 0), (w, 0), (w + t, h), (λ + t, h), (t, h)],
base_ring=K,
)
)
s.add_polygon(Polygon(vertices=[(0, 0), (λ, 0), (λ, λ), (0, λ)], base_ring=K))
s.add_polygon(
Polygon(
vertices=[
(0, 0),
(w - λ, 0),
(w, 0),
(w + t, h),
(w - λ + t, h),
(t, h),
],
base_ring=K,
)
)
s.glue((0, 0), (2, 3))
s.glue((0, 1), (0, 3))
s.glue((0, 2), (0, 5))
s.glue((0, 4), (1, 0))
s.glue((1, 1), (1, 3))
s.glue((1, 2), (2, 1))
s.glue((2, 0), (2, 4))
s.glue((2, 2), (2, 5))
s.set_immutable()
return s
@staticmethod
def lanneau_nguyen_genus4_prototype(w, h, t, e):
r"""
Return the Lanneau--Ngyuen prototype in the stratum H(6) with
parameters ``w``, ``h``, ``t``, and ``e``.
The associated discriminant is `D = e^2 + 4 wh`.
INPUT:
- ``w`` -- a positive integer
- ``h`` -- a positive integer
- ``t`` -- a non-negative integer strictly smaller than the gcd of
``w`` and ``h``
- ``e`` -- a positive integer strictly smaller than `2w - \sqrt{D}`
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: T = translation_surfaces.lanneau_nguyen_genus4_prototype(1,1,0,-2)
sage: T.stratum()
H_4(6)
REFERENCES:
- Erwan Lanneau, and Duc-Manh Nguyen, "Weierstrass Prym eigenforms in
genus four", Journal of the Institute of Mathematics of Jussieu,
19.6, 2020, pp.2045-2085
"""
from sage.all import gcd
w = ZZ(w)
h = ZZ(h)
t = ZZ(t)
e = ZZ(e)
if not w > 0:
raise ValueError("w must be positive")
if not h > 0:
raise ValueError("h must be positive")
if not t >= 0:
raise ValueError("t must be non-negative")
if not t < gcd(w, h):
raise ValueError("t must be smaller than the gcd of w and h")
if not gcd([w, h, t, e]) == 1:
raise ValueError("w, h, t, e must be coprime")
K = QQ
D = K(e**2 + 4 * w * h)
if not D.is_square():
from sage.all import QuadraticField
K = QuadraticField(D)
D = K(D)
d = D.sqrt()
λ = (e + d) / 2
if not λ < w:
raise ValueError("λ must be smaller than w")
if λ == w / 2:
raise ValueError("λ and w/2 must be distinct")
# (λ/2, λ/2) squares on top and bottom
# twisted (w/2, 0), (t/2, h/2) in the middle
s = MutableOrientedSimilaritySurface(K)
from flatsurf import Polygon
s.add_polygon(
Polygon(
vertices=[(0, 0), (λ / 2, 0), (λ / 2, λ / 2), (0, λ / 2)], base_ring=K
)
)
s.add_polygon(
Polygon(
vertices=[
(0, 0),
(w / 2 - λ, 0),
(w / 2 - λ / 2, 0),
(w / 2, 0),
(w / 2 + t / 2, h / 2),
(w / 2 - λ + t / 2, h / 2),
(t / 2, h / 2),
],
base_ring=K,
)
)
s.add_polygon(
Polygon(
vertices=[
(0, 0),
(λ, 0),
(w / 2, 0),
(w / 2 + t / 2, h / 2),
(λ + t / 2, h / 2),
(λ / 2 + t / 2, h / 2),
(t / 2, h / 2),
],
base_ring=K,
)
)
s.add_polygon(
Polygon(
vertices=[(0, 0), (λ / 2, 0), (λ / 2, λ / 2), (0, λ / 2)], base_ring=K
)
)
s.glue((0, 0), (2, 4))
s.glue((0, 1), (0, 3))
s.glue((0, 2), (1, 2))
s.glue((1, 0), (1, 5))
s.glue((1, 1), (3, 2))
s.glue((1, 3), (1, 6))
s.glue((1, 4), (2, 0))
s.glue((2, 1), (2, 3))
s.glue((2, 2), (2, 6))
s.glue((2, 5), (3, 0))
s.glue((3, 1), (3, 3))
s.set_immutable()
return s
@staticmethod
def mcmullen_L(l1, l2, l3, l4):
r"""
Return McMullen's L shaped surface with parameters l1, l2, l3, l4.
Polygon labels and lengths are marked below::
+-----+
| |
| 1 |l1
| |
| | l4
+-----+---------+
| | |
| 0 | 2 |l2
| | |
+-----+---------+
l3
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.mcmullen_L(1,1,1,1)
sage: s
Translation Surface in H_2(2) built from 3 squares
sage: TestSuite(s).run()
TESTS::
sage: from flatsurf import translation_surfaces
sage: L = translation_surfaces.mcmullen_L(1r, 1r, 1r, 1r)
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: L in TranslationSurfaces()
True
"""
field = Sequence([l1, l2, l3, l4]).universe()
if isinstance(field, type):
field = py_scalar_parent(field)
if not field.is_field():
field = field.fraction_field()
s = MutableOrientedSimilaritySurface(field)
s.add_polygon(
Polygon(edges=[(l3, 0), (0, l2), (-l3, 0), (0, -l2)], base_ring=field)
)
s.add_polygon(
Polygon(edges=[(l3, 0), (0, l1), (-l3, 0), (0, -l1)], base_ring=field)
)
s.add_polygon(
Polygon(edges=[(l4, 0), (0, l2), (-l4, 0), (0, -l2)], base_ring=field)
)
s.glue((0, 0), (1, 2))
s.glue((0, 1), (2, 3))
s.glue((0, 2), (1, 0))
s.glue((0, 3), (2, 1))
s.glue((1, 1), (1, 3))
s.glue((2, 0), (2, 2))
s.set_immutable()
return s
@staticmethod
def ward(n):
r"""
Return the surface formed by gluing a regular 2n-gon to two regular n-gons.
These surfaces have Veech's lattice property due to work of Ward.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.ward(3)
sage: s
Translation Surface in H_1(0^3) built from 2 equilateral triangles and a regular hexagon
sage: TestSuite(s).run()
sage: s = translation_surfaces.ward(7)
sage: s
Translation Surface in H_6(10) built from 2 regular heptagons and a regular tetradecagon
sage: TestSuite(s).run()
"""
if n < 3:
raise ValueError
o = ZZ_2 * polygons.regular_ngon(2 * n)
p1 = Polygon(edges=[o.edge((2 * i + n) % (2 * n)) for i in range(n)])
p2 = Polygon(edges=[o.edge((2 * i + n + 1) % (2 * n)) for i in range(n)])
s = MutableOrientedSimilaritySurface(o.base_ring())
s.add_polygon(o)
s.add_polygon(p1)
s.add_polygon(p2)
for i in range(n):
s.glue((1, i), (0, 2 * i))
s.glue((2, i), (0, 2 * i + 1))
s.set_immutable()
return s
@staticmethod
def octagon_and_squares():
r"""
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: os = translation_surfaces.octagon_and_squares()
sage: os
Translation Surface in H_3(4) built from 2 squares and a regular octagon
sage: TestSuite(os).run()
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: os in TranslationSurfaces()
True
"""
return translation_surfaces.ward(4)
@staticmethod
def cathedral(a, b):
r"""
Return the cathedral surface with parameters ``a`` and ``b``.
For any parameter ``a`` and ``b``, the cathedral surface belongs to the
so-called Gothic locus described in McMullen, Mukamel, Wright "Cubic
curves and totally geodesic subvarieties of moduli space" (2017)::
1
<--->
/\ 2a
/ \ +------+
a b| | a / \
+----+ +---+ +
| | | | |
1| P0 |P1 |P2 | P3 |
| | | | |
+----+ +---+ +
b| | \ /
\ / +------+
\/
If a and b satisfies
.. MATH::
a = x + y \sqrt(d) \qquad b = -3x -3/2 + 3y \sqrt(d)
for some rational x,y and d >= 0 then it is a Teichmüller curve.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: C = translation_surfaces.cathedral(1,2)
sage: C
Translation Surface in H_4(2^3) built from 2 squares, a hexagon with 4 marked vertices and an octagon
sage: TestSuite(C).run()
sage: from pyexactreal import ExactReals # optional: exactreal
sage: K = QuadraticField(5, embedding=AA(5).sqrt())
sage: R = ExactReals(K) # optional: exactreal
sage: C = translation_surfaces.cathedral(K.gen(), R.random_element([0.1, 0.2])) # optional: exactreal
sage: C # optional: exactreal
Translation Surface in H_4(2^3) built from 2 rectangles, a hexagon with 4 marked vertices and an octagon
sage: C.stratum() # optional: exactreal
H_4(2^3)
sage: TestSuite(C).run() # long time (6s), optional: exactreal
"""
ring = Sequence([a, b]).universe()
if isinstance(ring, type):
ring = py_scalar_parent(ring)
if not ring.has_coerce_map_from(QQ):
ring = ring.fraction_field()
a = ring(a)
b = ring(b)
s = MutableOrientedSimilaritySurface(ring)
half = QQ((1, 2))
p0 = Polygon(base_ring=ring, vertices=[(0, 0), (a, 0), (a, 1), (0, 1)])
p1 = Polygon(
base_ring=ring,
vertices=[
(a, 0),
(a, -b),
(a + half, -b - half),
(a + 1, -b),
(a + 1, 0),
(a + 1, 1),
(a + 1, b + 1),
(a + half, b + 1 + half),
(a, b + 1),
(a, 1),
],
)
p2 = Polygon(
base_ring=ring,
vertices=[(a + 1, 0), (2 * a + 1, 0), (2 * a + 1, 1), (a + 1, 1)],
)
p3 = Polygon(
base_ring=ring,
vertices=[
(2 * a + 1, 0),
(2 * a + 1 + half, -half),
(4 * a + 1 + half, -half),
(4 * a + 2, 0),
(4 * a + 2, 1),
(4 * a + 1 + half, 1 + half),
(2 * a + 1 + half, 1 + half),
(2 * a + 1, 1),
],
)
s.add_polygon(p0)
s.add_polygon(p1)
s.add_polygon(p2)
s.add_polygon(p3)
s.glue((0, 0), (0, 2))
s.glue((0, 1), (1, 9))
s.glue((0, 3), (3, 3))
s.glue((1, 0), (1, 3))
s.glue((1, 1), (3, 4))
s.glue((1, 2), (3, 6))
s.glue((1, 4), (2, 3))
s.glue((1, 5), (1, 8))
s.glue((1, 6), (3, 0))
s.glue((1, 7), (3, 2))
s.glue((2, 0), (2, 2))
s.glue((2, 1), (3, 7))
s.glue((3, 1), (3, 5))
s.set_immutable()
return s
@staticmethod
def arnoux_yoccoz(genus):
r"""
Construct the Arnoux-Yoccoz surface of genus 3 or greater.
This presentation of the surface follows Section 2.3 of
Joshua P. Bowman's paper "The Complete Family of Arnoux-Yoccoz
Surfaces."
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.arnoux_yoccoz(4)
sage: s
Translation Surface in H_4(3^2) built from 16 triangles
sage: TestSuite(s).run()
sage: s.is_delaunay_decomposed()
True
sage: s = s.canonicalize()
sage: s
Translation Surface in H_4(3^2) built from 16 triangles
sage: field=s.base_ring()
sage: a = field.gen()
sage: from sage.matrix.constructor import Matrix
sage: m = Matrix([[a,0],[0,~a]])
sage: ss = m*s
sage: ss = ss.canonicalize()
sage: s.cmp(ss) == 0
True
The Arnoux-Yoccoz pseudo-Anosov are known to have (minimal) invariant
foliations with SAF=0::
sage: S3 = translation_surfaces.arnoux_yoccoz(3)
sage: Jxx, Jyy, Jxy = S3.j_invariant()
sage: Jxx.is_zero() and Jyy.is_zero()
True
sage: Jxy
[ 0 2 0]
[ 2 -2 0]
[ 0 0 2]
sage: S4 = translation_surfaces.arnoux_yoccoz(4)
sage: Jxx, Jyy, Jxy = S4.j_invariant()
sage: Jxx.is_zero() and Jyy.is_zero()
True
sage: Jxy
[ 0 2 0 0]
[ 2 -2 0 0]
[ 0 0 2 2]
[ 0 0 2 0]
"""
g = ZZ(genus)
if g < 3:
raise ValueError
x = polygen(AA)
p = sum([x**i for i in range(1, g + 1)]) - 1
cp = AA.common_polynomial(p)
alpha_AA = AA.polynomial_root(cp, RIF(1 / 2, 1))
field = NumberField(alpha_AA.minpoly(), "alpha", embedding=alpha_AA)
a = field.gen()
V = VectorSpace(field, 2)
p = [None for i in range(g + 1)]
q = [None for i in range(g + 1)]
p[0] = V(((1 - a**g) / 2, a**2 / (1 - a)))
q[0] = V((-(a**g) / 2, a))
p[1] = V((-(a ** (g - 1) + a**g) / 2, (a - a**2 + a**3) / (1 - a)))
p[g] = V((1 + (a - a**g) / 2, (3 * a - 1 - a**2) / (1 - a)))
for i in range(2, g):
p[i] = V(((a - a**i) / (1 - a), a / (1 - a)))
for i in range(1, g + 1):
q[i] = V(
(
(2 * a - a**i - a ** (i + 1)) / (2 * (1 - a)),
(a - a ** (g - i + 2)) / (1 - a),
)
)
s = MutableOrientedSimilaritySurface(field)
T = [None] * (2 * g + 1)
Tp = [None] * (2 * g + 1)
from sage.matrix.constructor import Matrix
m = Matrix([[1, 0], [0, -1]])
for i in range(1, g + 1):
# T_i is (P_0,Q_i,Q_{i-1})
T[i] = s.add_polygon(
Polygon(
base_ring=field,
edges=[q[i] - p[0], q[i - 1] - q[i], p[0] - q[i - 1]],
)
)
# T_{g+i} is (P_i,Q_{i-1},Q_{i})
T[g + i] = s.add_polygon(
Polygon(
base_ring=field,
edges=[q[i - 1] - p[i], q[i] - q[i - 1], p[i] - q[i]],
)
)
# T'_i is (P'_0,Q'_{i-1},Q'_i)
Tp[i] = s.add_polygon(m * s.polygon(T[i]))
# T'_{g+i} is (P'_i,Q'_i, Q'_{i-1})
Tp[g + i] = s.add_polygon(m * s.polygon(T[g + i]))
for i in range(1, g):
s.glue((T[i], 0), (T[i + 1], 2))
s.glue((Tp[i], 2), (Tp[i + 1], 0))
for i in range(1, g + 1):
s.glue((T[i], 1), (T[g + i], 1))
s.glue((Tp[i], 1), (Tp[g + i], 1))
# P 0 Q 0 is paired with P' 0 Q' 0, ...
s.glue((T[1], 2), (Tp[g], 2))
s.glue((Tp[1], 0), (T[g], 0))
# P1Q1 is paired with P'_g Q_{g-1}
s.glue((T[g + 1], 2), (Tp[2 * g], 2))
s.glue((Tp[g + 1], 0), (T[2 * g], 0))
# P1Q0 is paired with P_{g-1} Q_{g-1}
s.glue((T[g + 1], 0), (T[2 * g - 1], 2))
s.glue((Tp[g + 1], 2), (Tp[2 * g - 1], 0))
# PgQg is paired with Q1P2
s.glue((T[2 * g], 2), (T[g + 2], 0))
s.glue((Tp[2 * g], 0), (Tp[g + 2], 2))
for i in range(2, g - 1):
# PiQi is paired with Q'_i P'_{i+1}
s.glue((T[g + i], 2), (Tp[g + i + 1], 2))
s.glue((Tp[g + i], 0), (T[g + i + 1], 0))
s.set_immutable()
return s
@staticmethod
def from_flipper(h):
r"""
Build a (half-)translation surface from a flipper pseudo-Anosov.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: import flipper # optional - flipper
A torus example::
sage: t1 = (0r,1r,2r) # optional - flipper
sage: t2 = (~0r,~1r,~2r) # optional - flipper
sage: T = flipper.create_triangulation([t1,t2]) # optional - flipper
sage: L1 = T.lamination([1r,0r,1r]) # optional - flipper
sage: L2 = T.lamination([0r,1r,1r]) # optional - flipper
sage: h1 = L1.encode_twist() # optional - flipper
sage: h2 = L2.encode_twist() # optional - flipper
sage: h = h1*h2^(-1r) # optional - flipper
sage: f = h.flat_structure() # optional - flipper
sage: ts = translation_surfaces.from_flipper(h) # optional - flipper
sage: ts # optional - flipper; computation of the stratum fails here, see #227
Half-Translation Surface built from 2 triangles
sage: TestSuite(ts).run() # optional - flipper
sage: from flatsurf.geometry.categories import HalfTranslationSurfaces # optional: flipper
sage: ts in HalfTranslationSurfaces() # optional: flipper
True
A non-orientable example::
sage: T = flipper.load('SB_4') # optional - flipper
sage: h = T.mapping_class('s_0S_1s_2S_3s_1S_2') # optional - flipper
sage: h.is_pseudo_anosov() # optional - flipper
True
sage: S = translation_surfaces.from_flipper(h) # optional - flipper
sage: TestSuite(S).run() # optional - flipper
sage: len(S.polygons()) # optional - flipper
4
sage: from flatsurf.geometry.similarity_surface_generators import flipper_nf_element_to_sage
sage: a = flipper_nf_element_to_sage(h.dilatation()) # optional - flipper
"""
f = h.flat_structure()
x = next(iter(f.edge_vectors.values())).x
K = flipper_nf_to_sage(x.field)
V = VectorSpace(K, 2)
edge_vectors = {
i: V(
(flipper_nf_element_to_sage(e.x, K), flipper_nf_element_to_sage(e.y, K))
)
for i, e in f.edge_vectors.items()
}
to_polygon_number = {
k: (i, j) for i, t in enumerate(f.triangulation) for j, k in enumerate(t)
}
from flatsurf import MutableOrientedSimilaritySurface
S = MutableOrientedSimilaritySurface(K)
for i, t in enumerate(f.triangulation):
try:
poly = Polygon(base_ring=K, edges=[edge_vectors[i] for i in tuple(t)])
except ValueError:
raise ValueError(
"t = {}, edges = {}".format(
t, [edge_vectors[i].n(digits=6) for i in t]
)
)
S.add_polygon(poly)
for i, t in enumerate(f.triangulation):
for j, k in enumerate(t):
S.glue((i, j), to_polygon_number[~k])
S.set_immutable()
return S
@staticmethod
def origami(r, u, rr=None, uu=None, domain=None):
r"""
Return the origami defined by the permutations ``r`` and ``u``.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = SymmetricGroup(3)
sage: r = S('(1,2)')
sage: u = S('(1,3)')
sage: o = translation_surfaces.origami(r,u)
sage: o
Origami defined by r=(1,2) and u=(1,3)
sage: o.stratum()
H_2(2)
sage: TestSuite(o).run()
"""
return Origami(r, u, rr, uu, domain)
@staticmethod
def infinite_staircase():
r"""
Return the infinite staircase built as an origami.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.infinite_staircase()
sage: S
The infinite staircase
sage: TestSuite(S).run()
"""
return TranslationSurfaceGenerators._InfiniteStaircase()
class _InfiniteStaircase(Origami):
def __init__(self):
super().__init__(
self._vertical,
self._horizontal,
self._vertical,
self._horizontal,
domain=ZZ,
root=ZZ(0),
)
def is_compact(self):
return False
def _vertical(self, x):
if x % 2:
return x + 1
return x - 1
def _horizontal(self, x):
if x % 2:
return x - 1
return x + 1
def _position_function(self, n):
from flatsurf.geometry.similarity import SimilarityGroup
SG = SimilarityGroup(QQ)
if n % 2 == 0:
return SG((n // 2, n // 2))
else:
return SG((n // 2, n // 2 + 1))
def __repr__(self):
return "The infinite staircase"
def __hash__(self):
return 1337
def __eq__(self, other):
r"""
Return whether this surface is indistinguishable from ``other``.
See :meth:`SimilaritySurfaces.FiniteType._test_eq_surface` for details
on this notion of equality.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.infinite_staircase()
sage: S == S
True
"""
return isinstance(other, TranslationSurfaceGenerators._InfiniteStaircase)
def graphical_surface(self, *args, **kwargs):
default_position_function = kwargs.pop(
"default_position_function", self._position_function
)
graphical_surface = super().graphical_surface(
*args, default_position_function=default_position_function, **kwargs
)
graphical_surface.make_all_visible(limit=10)
return graphical_surface
@staticmethod
def t_fractal(w=ZZ_1, r=ZZ_2, h1=ZZ_1, h2=ZZ_1):
r"""
Return the T-fractal with parameters ``w``, ``r``, ``h1``, ``h2``.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: tf = translation_surfaces.t_fractal() # long time (.4s)
sage: tf # long time (see above)
The T-fractal surface with parameters w=1, r=2, h1=1, h2=1
sage: TestSuite(tf).run() # long time (see above)
"""
return tfractal_surface(w, r, h1, h2)
@staticmethod
def e_infinity_surface(lambda_squared=None, field=None):
r"""
The translation surface based on the `E_\infinity` graph.
The biparite graph is shown below, with edges numbered::
0 1 2 -2 3 -3 4 -4
*---o---*---o---*---o---*---o---*...
|
|-1
o
Here, black vertices are colored ``*``, and white ``o``.
Black nodes represent vertical cylinders and white nodes
represent horizontal cylinders.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.e_infinity_surface()
sage: TestSuite(s).run() # long time (1s)
"""
return EInfinitySurface(lambda_squared, field)
@staticmethod
def chamanara(alpha):
r"""
Return the Chamanara surface with parameter ``alpha``.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: C = translation_surfaces.chamanara(1/2)
sage: C
Minimal Translation Cover of Chamanara surface with parameter 1/2
TESTS::
sage: TestSuite(C).run()
sage: from flatsurf.geometry.categories import TranslationSurfaces
sage: C in TranslationSurfaces()
True
"""
from .chamanara import chamanara_surface
return chamanara_surface(alpha)
translation_surfaces = TranslationSurfaceGenerators() | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/similarity_surface_generators.py | 0.928753 | 0.42913 | similarity_surface_generators.py | pypi |
# Limit for clockwise_to and counter_clockwise_to in SimilaritySurfaceTangentVector.
rotate_limit = 100
class SimilaritySurfaceTangentVector:
r"""
Tangent vector to a similarity surface.
EXAMPLES::
sage: from flatsurf import translation_surfaces
Examples on edges in direction of edges::
sage: s = translation_surfaces.square_torus()
sage: s.tangent_vector(0, (1/2, 0), (1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1/2, 0) with vector (1, 0)
sage: s.tangent_vector(0, (1/2, 0), (-1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1/2, 1) with vector (-1, 0)
sage: s.tangent_vector(0, (1/2, 1), (1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1/2, 0) with vector (1, 0)
sage: s.tangent_vector(0, (1/2, 1), (-1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1/2, 1) with vector (-1, 0)
sage: s.tangent_vector(0, (0, 1/2), (0, 1))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 1/2) with vector (0, 1)
sage: s.tangent_vector(0, (0, 1/2), (0, -1))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 1/2) with vector (0, -1)
sage: s.tangent_vector(0, (1, 1/2), (0, 1))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 1/2) with vector (0, 1)
sage: s.tangent_vector(0, (1, 1/2), (0, -1))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 1/2) with vector (0, -1)
Examples on vertices in direction of edges::
sage: s = translation_surfaces.square_torus()
sage: s.tangent_vector(0, (0, 0), (1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 0) with vector (1, 0)
sage: s.tangent_vector(0, (1, 0), (-1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 1) with vector (-1, 0)
sage: s.tangent_vector(0, (0, 1), (1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 0) with vector (1, 0)
sage: s.tangent_vector(0, (1, 1), (-1, 0))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 1) with vector (-1, 0)
sage: s.tangent_vector(0, (0, 0), (0, 1))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 0) with vector (0, 1)
sage: s.tangent_vector(0, (0, 1), (0, -1))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 1) with vector (0, -1)
sage: s.tangent_vector(0, (1, 0), (0, 1))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 0) with vector (0, 1)
sage: s.tangent_vector(0, (1, 1), (0, -1))
SimilaritySurfaceTangentVector in polygon 0 based at (0, 1) with vector (0, -1)
"""
def __init__(self, tangent_bundle, polygon_label, point, vector):
from flatsurf.geometry.euclidean import ccw, is_anti_parallel
self._bundle = tangent_bundle
p = self.surface().polygon(polygon_label)
pos = p.get_point_position(point)
if not vector:
raise NotImplementedError("vector must be non-zero")
if pos.is_in_interior():
self._polygon_label = polygon_label
self._point = point
self._vector = vector
self._position = pos
elif pos.is_in_edge_interior():
e = pos.get_edge()
edge_v = p.edge(e)
if ccw(edge_v, vector) < 0 or is_anti_parallel(edge_v, vector):
# Need to move point and vector to opposite edge.
label2, e2 = self.surface().opposite_edge(polygon_label, e)
similarity = self.surface().edge_transformation(polygon_label, e)
point2 = similarity(point)
vector2 = similarity.derivative() * vector
self._polygon_label = label2
self._point = point2
self._vector = vector2
self._position = (
self.surface().polygon(label2).get_point_position(point2)
)
else:
self._polygon_label = polygon_label
self._point = point
self._vector = vector
self._position = pos
elif pos.is_vertex():
v = pos.get_vertex()
p = self.surface().polygon(polygon_label)
# subsequent edge:
edge1 = p.edge(v)
# prior edge:
edge0 = p.edge((v - 1) % len(p.vertices()))
wp1 = ccw(edge1, vector)
wp0 = ccw(edge0, vector)
if wp1 < 0 or wp0 < 0:
raise ValueError(
"Singular point with vector pointing away from polygon"
)
if wp0 == 0:
# vector points backward along edge 0
label2, e2 = self.surface().opposite_edge(
polygon_label, (v - 1) % len(p.vertices())
)
similarity = self.surface().edge_transformation(
polygon_label, (v - 1) % len(p.vertices())
)
point2 = similarity(point)
vector2 = similarity.derivative() * vector
self._polygon_label = label2
self._point = point2
self._vector = vector2
self._position = (
self.surface().polygon(label2).get_point_position(point2)
)
else:
# vector points along edge1 in that directior or points into polygons interior
self._polygon_label = polygon_label
self._point = point
self._vector = vector
self._position = pos
else:
raise ValueError("Provided point lies outside the indexed polygon")
self._point.set_immutable()
self._vector.set_immutable()
def __repr__(self):
return (
"SimilaritySurfaceTangentVector in polygon "
+ repr(self._polygon_label)
+ " based at "
+ repr(self._point)
+ " with vector "
+ repr(self._vector)
)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.surface() == other.surface()
and self.polygon_label() == other.polygon_label()
and self.point() == other.point()
and self.vector() == other.vector()
)
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
r"""
TESTS::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.square_torus()
sage: for y in [0,1]:
....: for d in [1,-1]:
....: h = hash(s.tangent_vector(0, (1/2, y), (d, 0)))
"""
return hash((self._bundle, self._polygon_label, self._point, self._vector))
def surface(self):
r"""Return the underlying surface."""
return self._bundle.surface()
def is_based_at_singularity(self):
r"""
Return the truth value of the statement 'the base point for this vector is a singularity.'
"""
return self._position.is_vertex()
def vertex(self):
r"""Return the index of the vertex."""
return self._position.get_vertex()
def is_in_boundary_of_polygon(self):
r"""
Return the truth value of the statement
'the base point for this vector lies on the boundary of
one of the polygons making up the surface.'
"""
return self._position.is_in_boundary()
def position(self):
r"""
Return the PolygonPosition representing the location of
the basepoint of the vector in the polygon that contains it.
"""
return self._position
def bundle(self):
r"""Return the tangent bundle containing this vector."""
return self._bundle
def polygon_label(self):
return self._polygon_label
def polygon(self):
return self.surface().polygon(self.polygon_label())
def point(self):
r"""
Return the base point of this tangent vector as a vector.
The coordinates of output are given with respect to the polygon it
belongs to.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1/2,0), (0,1))
sage: v.point()
(1/2, 0)
sage: parent(_)
Vector space of dimension 2 over Rational Field
"""
return self._point
def vector(self):
r"""
Return the coordinates of this vector within the assigned polygon.
EXAMPLES::
sage: from flatsurf import similarity_surfaces
sage: s = similarity_surfaces.example()
sage: v = s.tangent_vector(0, (1/2,0), (0,1))
sage: v.vector()
(0, 1)
sage: parent(_)
Vector space of dimension 2 over Rational Field
"""
return self._vector
def edge_pointing_along(self):
r"""
Returns the pair of (p,e) where p is the polygon label at the base point,
and e is the edge this vector points along or none if it does not point
along an edge. Here pointing along means that the vector is based at
a vertex and represents the vector joining this edge to the next vertex."""
if self.is_based_at_singularity():
e = self.vertex()
if self.vector() == self.polygon().edge(e):
return (self.polygon_label(), e)
return None
def differs_by_scaling(self, another_tangent_vector):
r"""
Returns true if the other vector just differs by scaling. This means they should lie
in the same polygon, be based at the same point, and point in the same direction.
"""
from flatsurf.geometry.euclidean import is_parallel
return (
self.polygon_label() == another_tangent_vector.polygon_label()
and self.point() == another_tangent_vector.point()
and is_parallel(self.vector(), another_tangent_vector.vector())
)
def invert(self):
r"""
Returns the negation of this tangent vector.
Raises a ValueError if the vector is based at a singularity.'
"""
if self.is_based_at_singularity():
raise ValueError("Can't invert tangent vector based at a singularity.")
return SimilaritySurfaceTangentVector(
self.bundle(), self.polygon_label(), self.point(), -self.vector()
)
def forward_to_polygon_boundary(self):
r"""
Flows forward (in the direction of the tangent vector) until the end
of the polygon is reached.
Returns the tangent vector based at the endpoint which point backward along the trajectory.
NOTES::
We return the backward trajectory, because continuing forward does not make sense if a
singularity is reached. You can obtain the forward vector by subsequently applying invert().
EXAMPLES::
sage: from flatsurf.geometry.similarity_surface_generators import SimilaritySurfaceGenerators
sage: s = SimilaritySurfaceGenerators.example()
sage: from flatsurf.geometry.tangent_bundle import SimilaritySurfaceTangentBundle
sage: tb = SimilaritySurfaceTangentBundle(s)
sage: s.polygon(0)
Polygon(vertices=[(0, 0), (2, -2), (2, 0)])
sage: s.polygon(1)
Polygon(vertices=[(0, 0), (2, 0), (1, 3)])
sage: from flatsurf.geometry.tangent_bundle import SimilaritySurfaceTangentVector
sage: V = tb.surface().base_ring()**2
sage: v = SimilaritySurfaceTangentVector(tb, 0, V((0,0)), V((3,-1)))
sage: v
SimilaritySurfaceTangentVector in polygon 0 based at (0, 0) with vector (3, -1)
sage: v2 = v.forward_to_polygon_boundary()
sage: v2
SimilaritySurfaceTangentVector in polygon 0 based at (2, -2/3) with vector (-3, 1)
sage: v2.invert()
SimilaritySurfaceTangentVector in polygon 1 based at (2/3, 2) with vector (4, -3)
"""
p = self.polygon()
point2, pos2 = p.flow_to_exit(self.point(), self.vector())
# diff=point2-point
new_vector = SimilaritySurfaceTangentVector(
self.bundle(), self.polygon_label(), point2, -self.vector()
)
return new_vector
def straight_line_trajectory(self):
r"""
Return the straight line trajectory associated to this vector.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s = translation_surfaces.square_torus()
sage: v = s.tangent_vector(0, (0,0), (1,1))
sage: v.straight_line_trajectory()
Straight line trajectory made of 1 segments from (0, 0) in polygon 0 to (1, 1) in polygon 0
sage: l = v.straight_line_trajectory()
sage: l
Straight line trajectory made of 1 segments from (0, 0) in polygon 0 to (1, 1) in polygon 0
sage: l.is_saddle_connection()
True
sage: v = s.tangent_vector(0, (0,0), (1,1+AA(5).sqrt()), ring=AA)
sage: l = v.straight_line_trajectory()
sage: l.flow(20)
sage: l.segment(20)
Segment in polygon 0 starting at (0.9442719099991588?, 0) and ending at (1, 0.1803398874989485?)
"""
from flatsurf.geometry.straight_line_trajectory import StraightLineTrajectory
return StraightLineTrajectory(self)
def clockwise_to(self, w, code=False):
r"""
Return the new tangent vector obtained by rotating this one in the clockwise
direction until the vector is parallel to w, and scaling so that the length matches
that of w.
Note that we always do some rotation so that if w is parallel to this vector, then a
-360 degree rotation is performed.
The vector w must be nonzero.
On an infinite surface, this is potentially an infinite calculation
so we impose a limit (representing the maximal number of polygons
that must be rotated through.) This is the variable rotate_limit
in this package.
If code is True, we compute the sequences of numbers associated to edges
crossed as a list. We return a pair consisting of the newly computing
tangent vector an this code. This is currently only implemented when
based at a singularity.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s=translation_surfaces.regular_octagon()
sage: v=s.tangent_vector(0,(0,0),(1,1))
sage: v.clockwise_to((-1,-1))
SimilaritySurfaceTangentVector in polygon 0 based at (0, a + 1) with vector (-1, -1)
sage: v.clockwise_to((1,1))
SimilaritySurfaceTangentVector in polygon 0 based at (-1/2*a, 1/2*a) with vector (1, 1)
sage: v.clockwise_to((1,1), code=True)
(SimilaritySurfaceTangentVector in polygon 0 based at (-1/2*a, 1/2*a) with vector (1, 1), [0, 5, 2])
"""
if not w:
raise ValueError("w must be non-zero")
if self.is_based_at_singularity():
s = self.surface()
v1 = self.vector()
label = self.polygon_label()
vertex = self.vertex()
v2 = s.polygon(label).edge(vertex)
from sage.matrix.constructor import Matrix
der = Matrix(s.base_ring(), [[1, 0], [0, 1]])
if code:
codes = []
from flatsurf.geometry.euclidean import ccw
for count in range(rotate_limit):
if ccw(v2, w) >= 0 and ccw(w, v1) > 0:
# We've found it!
break
if code:
codes.append(vertex)
label2, edge2 = s.opposite_edge(label, vertex)
der = der * s.edge_matrix(label2, edge2)
v1 = der * (-s.polygon(label2).edge(edge2))
label = label2
vertex = (edge2 + 1) % len(s.polygon(label2).vertices())
v2 = der * (s.polygon(label2).edge(vertex))
assert count < rotate_limit, "Reached limit!"
if code:
return (
self.surface().tangent_vector(
label, s.polygon(label).vertex(vertex), w
),
codes,
)
else:
return self.surface().tangent_vector(
label, s.polygon(label).vertex(vertex), w
)
else:
raise NotImplementedError(
"Rotating tangent vectors is only implemnted when at a singularity"
)
def counterclockwise_to(self, w, code=False):
r"""
Return the new tangent vector obtained by rotating this one in the counterclockwise
direction until the vector is parallel to w, and scaling so that the length matches
that of w.
Note that we always do some rotation so that if w is parallel to this vector, then a
360 degree rotation is performed.
The vector w must be nonzero.
On an infinite surface, this is potentially an infinite calculation
so we impose a limit (representing the maximal number of polygons
that must be rotated through.) This is the variable rotate_limit
in this package.
If code is True, we compute the sequences of numbers associated to edges
crossed as a list. We return a pair consisting of the newly computing
tangent vector an this code. This is currently only implemented when
based at a singularity.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: s=translation_surfaces.regular_octagon()
sage: v=s.tangent_vector(0,(0,0),(1,1))
sage: v.counterclockwise_to((-1,-1))
SimilaritySurfaceTangentVector in polygon 0 based at (1/2*a + 1, 1/2*a + 1) with vector (-1, -1)
sage: v.counterclockwise_to((1,1))
SimilaritySurfaceTangentVector in polygon 0 based at (1, 0) with vector (1, 1)
sage: v.counterclockwise_to((1,1), code=True)
(SimilaritySurfaceTangentVector in polygon 0 based at (1, 0) with vector (1, 1), [7, 2, 5])
"""
if not w:
raise ValueError("w must be non-zero")
if self.is_based_at_singularity():
s = self.surface()
v1 = self.vector()
label = self.polygon_label()
vertex = self.vertex()
previous_vertex = (vertex - 1 + len(s.polygon(label).vertices())) % len(
s.polygon(label).vertices()
)
v2 = -s.polygon(label).edge(previous_vertex)
from sage.matrix.constructor import Matrix
der = Matrix(s.base_ring(), [[1, 0], [0, 1]])
if code:
codes = []
from flatsurf.geometry.euclidean import ccw
if not (ccw(v1, w) > 0 and ccw(w, v2) > 0):
for count in range(rotate_limit):
label2, edge2 = s.opposite_edge(label, previous_vertex)
if code:
codes.append(previous_vertex)
der = der * s.edge_matrix(label2, edge2)
label = label2
vertex = edge2
previous_vertex = (
vertex - 1 + len(s.polygon(label).vertices())
) % len(s.polygon(label).vertices())
v1 = der * (s.polygon(label).edge(vertex))
v2 = der * (-s.polygon(label).edge(previous_vertex))
if ccw(v1, w) >= 0 and ccw(w, v2) > 0:
# We've found it!
break
assert count < rotate_limit, "Reached limit!"
if code:
return (
self.surface().tangent_vector(
label, s.polygon(label).vertex(vertex), w
),
codes,
)
else:
return self.surface().tangent_vector(
label, s.polygon(label).vertex(vertex), w
)
else:
raise NotImplementedError(
"Rotating tangent vectors is only implemnted when at a singularity"
)
def plot(self, **kwargs):
r"""
Return a plot of this tangent vector.
EXAMPLES::
sage: import flatsurf
sage: S = flatsurf.translation_surfaces.veech_double_n_gon(5)
sage: v = S.tangent_vector(0, (1/8, 1/4), (1/2, 1/4))
sage: S.plot() + v.plot()
Graphics object consisting of 22 graphics primitives
Any keyword arguments are passed on to the underlying plot method from SageMath::
sage: S.plot() + v.plot(color="red")
Graphics object consisting of 22 graphics primitives
"""
return self.vector().plot(
**{"start": self.point(), "width": 1, "arrowsize": 2, **kwargs}
)
class SimilaritySurfaceTangentBundle:
r"""
Construct the tangent bundle of a given similarity surface.
Needs work: We should check for coercion from the base_ring of the surface
"""
def __init__(self, similarity_surface, ring=None):
self._s = similarity_surface
if ring is None:
self._base_ring = self._s.base_ring()
else:
self._base_ring = ring
from sage.modules.free_module import VectorSpace
self._V = VectorSpace(self._base_ring, 2)
def __call__(self, polygon_label, point, vector):
r"""
Construct a tangent vector from a polygon label, a point in the polygon and a vector. The point and the vector should have coordinates
in the base field."""
return SimilaritySurfaceTangentVector(
self, polygon_label, self._V(point), self._V(vector)
)
def __repr__(self):
return "Tangent bundle of {!r} defined over {!r}".format(
self._s, self._base_ring
)
def base_ring(self):
return self._base_ring
field = base_ring
def vector_space(self):
r"""
Return the vector space over the field of the bundle.
"""
return self._V
def surface(self):
r"""Return the surface this bundle is over."""
return self._s
def edge(self, polygon_label, edge_index):
r"""Return the vector leaving a vertex of the polygon which under straight-line flow travels
counterclockwise around the boundary of the polygon along the edge with the provided index.
The length of the vector matches the length of the indexed edge.
EXAMPLES::
sage: from flatsurf.geometry.similarity_surface_generators import SimilaritySurfaceGenerators
sage: s = SimilaritySurfaceGenerators.example()
sage: from flatsurf.geometry.tangent_bundle import SimilaritySurfaceTangentBundle
sage: tb = SimilaritySurfaceTangentBundle(s)
sage: s.polygon(0)
Polygon(vertices=[(0, 0), (2, -2), (2, 0)])
sage: tb.edge(0,0)
SimilaritySurfaceTangentVector in polygon 0 based at (0, 0) with vector (2, -2)
"""
polygon = self.surface().polygon(polygon_label)
point = polygon.vertex(edge_index)
vector = polygon.edge(edge_index)
return SimilaritySurfaceTangentVector(self, polygon_label, point, vector)
def clockwise_edge(self, polygon_label, edge_index):
r"""Return the vector leaving a vertex of the polygon which under straight-line flow travels
*clockwise* around the boundary of the polygon along the edge with the provided index.
The length of the vector matches the length of the indexed edge.
Note that the point will be based in the polygon opposite the provided edge.
EXAMPLES::
sage: from flatsurf.geometry.similarity_surface_generators import SimilaritySurfaceGenerators
sage: s = SimilaritySurfaceGenerators.example()
sage: from flatsurf.geometry.tangent_bundle import SimilaritySurfaceTangentBundle
sage: tb = SimilaritySurfaceTangentBundle(s)
sage: s.polygon(0)
Polygon(vertices=[(0, 0), (2, -2), (2, 0)])
sage: s.polygon(1)
Polygon(vertices=[(0, 0), (2, 0), (1, 3)])
sage: s.opposite_edge(0, 0)
(1, 1)
sage: tb.clockwise_edge(0,0)
SimilaritySurfaceTangentVector in polygon 1 based at (2, 0) with vector (-1, 3)
"""
polygon = self.surface().polygon(polygon_label)
point = polygon.vertex(edge_index + 1)
vector = -polygon.edge(edge_index)
return SimilaritySurfaceTangentVector(self, polygon_label, point, vector) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/tangent_bundle.py | 0.925752 | 0.899872 | tangent_bundle.py | pypi |
from sage.rings.integer_ring import ZZ
from sage.rings.rational_field import QQ
from sage.rings.number_field.number_field import NumberField
from sage.rings.qqbar import AA, number_field_elements_from_algebraics
from sage.structure.sage_object import SageObject
from sage.matrix.constructor import matrix
from sage.modules.free_module_element import vector
from sage.geometry.polyhedron.constructor import Polyhedron
from sage.functions.other import sqrt
from flatsurf.geometry.straight_line_trajectory import (
StraightLineTrajectory,
SegmentInPolygon,
)
from flatsurf.geometry.tangent_bundle import SimilaritySurfaceTangentVector
class ConeSurfaceToPolyhedronMap(SageObject):
r"""
A map sending objects defined on a ConeSurface built from a polyhedron to the polyhedron. Currently, this
works to send a trajectory to a list of points.
This class should not be called directly. You get an object of this type from polyhedron_to_cone_surface.
"""
def __init__(self, cone_surface, polyhedron, mapping_data):
self._s = cone_surface
self._p = polyhedron
self._md = mapping_data
def __call__(self, o):
r"""
This method is used to convert from an object on the cone surface to an object on the polyhedron.
Currently works with
- StraightLineTrajectory -- returns the corresponding list of points on the polyhedron
- SegmentInPolygon -- returns the corresponding pair of points on the polyhedron
- SimilaritySurfaceTangentVector -- returns a pair of points corresponding to the image point and image of the tangent vector.
"""
if isinstance(o, StraightLineTrajectory):
points = []
it = iter(o.segments())
s = next(it)
label = s.polygon_label()
points.append(self._md[label][0] + self._md[label][1] * s.start().point())
points.append(self._md[label][0] + self._md[label][1] * s.end().point())
for s in it:
label = s.polygon_label()
points.append(self._md[label][0] + self._md[label][1] * s.end().point())
return points
if isinstance(o, SegmentInPolygon):
# Return the pair of images of the endpoints.
label = o.polygon_label()
return (
self._md[label][0] + self._md[label][1] * o.start().point(),
self._md[label][0] + self._md[label][1] * o.end().point(),
)
if isinstance(o, SimilaritySurfaceTangentVector):
# Map to a pair of vectors conisting of the image of the basepoint and the image of the vector.
label = o.polygon_label()
point = o.point()
vector = o.vector()
return (
self._md[label][0] + self._md[label][1] * point,
self._md[label][1] * vector,
)
raise ValueError("Failed to recognize type of passed object")
def __eq__(self, other):
r"""
Return whether this map is indistinguishable from ``other``.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import Polyhedron, polyhedron_to_cone_surface
sage: vertices=[]
sage: for i in range(3):
....: temp=vector([1 if k==i else 0 for k in range(3)])
....: for j in range(-1,3,2):
....: vertices.append(j*temp)
sage: octahedron=Polyhedron(vertices=vertices)
sage: surface, surface_to_octahedron = polyhedron_to_cone_surface(octahedron,scaling_factor=AA(1/sqrt(2))) # long time (.5s)
sage: surface_to_octahedron == surface_to_octahedron # long time (see above)
True
"""
if not isinstance(other, ConeSurfaceToPolyhedronMap):
return False
return self._s == other._s and self._p == other._p and self._md == other._md
def __ne__(self, other):
r"""
Return whether this map is distinguishable from ``other``.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import Polyhedron, polyhedron_to_cone_surface
sage: vertices=[]
sage: for i in range(3):
....: temp=vector([1 if k==i else 0 for k in range(3)])
....: for j in range(-1,3,2):
....: vertices.append(j*temp)
sage: octahedron=Polyhedron(vertices=vertices)
sage: surface, surface_to_octahedron = polyhedron_to_cone_surface(octahedron,scaling_factor=AA(1/sqrt(2))) # long time (.3s)
sage: surface_to_octahedron != surface_to_octahedron # long time (see above)
False
"""
return not (self == other)
def polyhedron_to_cone_surface(polyhedron, use_AA=False, scaling_factor=ZZ(1)):
r"""
Construct the Euclidean Cone Surface associated to the surface of a polyhedron and a map
from the cone surface to the polyhedron.
INPUT:
- ``polyhedron`` -- A 3-dimensional polyhedron, which should be defined over something that coerces into AA
- ``use_AA`` -- If True, the surface returned will be defined over AA. If false, the algorithm will find the smallest NumberField and write the field there.
- ``scaling_factor`` -- The surface returned will have a metric scaled by multiplication by this factor (compared with the original polyhendron). This can be used to produce a surface defined over a smaller NumberField.
OUTPUT:
A pair consisting of a ConeSurface and a ConeSurfaceToPolyhedronMap.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import Polyhedron, polyhedron_to_cone_surface
sage: vertices=[]
sage: for i in range(3):
....: temp=vector([1 if k==i else 0 for k in range(3)])
....: for j in range(-1,3,2):
....: vertices.append(j*temp)
sage: octahedron=Polyhedron(vertices=vertices)
sage: surface,surface_to_octahedron = \
....: polyhedron_to_cone_surface(octahedron,scaling_factor=AA(1/sqrt(2)))
sage: TestSuite(surface).run()
sage: TestSuite(surface_to_octahedron).run() # long time (.4s)
sage: len(surface.polygons())
8
sage: surface.base_ring()
Number Field in a with defining polynomial y^2 - 3 with a = 1.732050807568878?
sage: sqrt3=surface.base_ring().gen()
sage: tangent_bundle=surface.tangent_bundle()
sage: v=tangent_bundle(0,(0,0),(sqrt3,2))
sage: traj=v.straight_line_trajectory()
sage: traj.flow(10)
sage: traj.is_saddle_connection()
True
sage: traj.combinatorial_length()
8
sage: path3d = surface_to_octahedron(traj)
sage: len(path3d)
9
sage: # We will show that the length of the path is sqrt(42):
sage: total_length = 0
sage: for i in range(8):
....: start = path3d[i]
....: end = path3d[i+1]
....: total_length += (vector(end)-vector(start)).norm()
sage: ZZ(total_length**2)
42
"""
if polyhedron.dim() != 3:
raise ValueError
c = polyhedron.center()
vertices = polyhedron.vertices()
vertex_order = {}
for i, v in enumerate(vertices):
vertex_order[v] = i
faces = polyhedron.faces(2)
face_order = {}
face_edges = []
face_vertices = []
face_map_data = []
for i, f in enumerate(faces):
face_order[f] = i
edges = f.as_polyhedron().faces(1)
face_edges_temp = set()
for edge in edges:
edge_temp = set()
for vertex in edge.vertices():
v = vertex.vector()
v.set_immutable()
edge_temp.add(v)
face_edges_temp.add(frozenset(edge_temp))
last_edge = next(iter(face_edges_temp))
v = next(iter(last_edge))
face_vertices_temp = [v]
for j in range(len(face_edges_temp) - 1):
for edge in face_edges_temp:
if v in edge and edge != last_edge:
# bingo
last_edge = edge
for vv in edge:
if vv != v:
v = vv
face_vertices_temp.append(vv)
break
break
v0 = face_vertices_temp[0]
v1 = face_vertices_temp[1]
v2 = face_vertices_temp[2]
n = (v1 - v0).cross_product(v2 - v0)
if (v0 - c).dot_product(n) < 0:
n = -n
face_vertices_temp.reverse()
v0 = face_vertices_temp[0]
v1 = face_vertices_temp[1]
v2 = face_vertices_temp[2]
face_vertices.append(face_vertices_temp)
n = n / AA(n.norm())
w = v1 - v0
w = w / AA(w.norm())
m = 1 / scaling_factor * matrix(AA, [w, n.cross_product(w), n]).transpose()
mi = ~m
mi_submatrix = mi.submatrix(0, 0, 2, 3)
face_map_data.append(
(
v0, # translation to bring origin in plane to v0
m.submatrix(0, 0, 3, 2),
-mi_submatrix * v0,
mi_submatrix,
)
)
it = iter(face_vertices_temp)
v_last = next(it)
face_edge_dict = {}
j = 0
for v in it:
edge = frozenset([v_last, v])
face_edge_dict[edge] = j
j += 1
v_last = v
v = next(iter(face_vertices_temp))
edge = frozenset([v_last, v])
face_edge_dict[edge] = j
face_edges.append(face_edge_dict)
gluings = {}
for p1, face_edge_dict1 in enumerate(face_edges):
for edge, e1 in face_edge_dict1.items():
found = False
for p2, face_edge_dict2 in enumerate(face_edges):
if p1 != p2 and edge in face_edge_dict2:
e2 = face_edge_dict2[edge]
gluings[(p1, e1)] = (p2, e2)
found = True
break
if not found:
print(p1)
print(e1)
print(edge)
raise RuntimeError("Failed to find glued edge")
polygon_vertices_AA = []
for p, vs in enumerate(face_vertices):
trans = face_map_data[p][2]
m = face_map_data[p][3]
polygon_vertices_AA.append([trans + m * v for v in vs])
from flatsurf import MutableOrientedSimilaritySurface
if use_AA is True:
from flatsurf import Polygon
S = MutableOrientedSimilaritySurface(AA)
for vs in polygon_vertices_AA:
S.add_polygon(Polygon(vertices=vs, base_ring=AA))
for x, y in gluings.items():
S.glue(x, y)
S.set_immutable()
return S, ConeSurfaceToPolyhedronMap(S, polyhedron, face_map_data)
else:
elts = []
for vs in polygon_vertices_AA:
for v in vs:
elts.append(v[0])
elts.append(v[1])
# Find the best number field:
field, elts2, hom = number_field_elements_from_algebraics(elts, minimal=True)
if field == QQ:
# Defined over the rationals!
polygon_vertices_field2 = []
j = 0
for vs in polygon_vertices_AA:
vs2 = []
for v in vs:
vs2.append(vector(field, [elts2[j], elts2[j + 1]]))
j = j + 2
polygon_vertices_field2.append(vs2)
S = MutableOrientedSimilaritySurface(field)
from flatsurf import Polygon
for vs in polygon_vertices_field2:
S.add_polygon(Polygon(vertices=vs, base_ring=field))
for x, y in gluings.items():
S.glue(x, y)
S.set_immutable()
return S, ConeSurfaceToPolyhedronMap(S, polyhedron, face_map_data)
else:
# Unfortunately field doesn't come with an real embedding (which is given by hom!)
# So, we make a copy of the field, and add the embedding.
field2 = NumberField(
field.polynomial(), name="a", embedding=hom(field.gen())
)
# The following converts from field to field2:
hom2 = field.hom(im_gens=[field2.gen()])
polygon_vertices_field2 = []
j = 0
for vs in polygon_vertices_AA:
vs2 = []
for v in vs:
vs2.append(vector(field2, [hom2(elts2[j]), hom2(elts2[j + 1])]))
j = j + 2
polygon_vertices_field2.append(vs2)
S = MutableOrientedSimilaritySurface(field2)
from flatsurf import Polygon
for vs in polygon_vertices_field2:
S.add_polygon(Polygon(vertices=vs, base_ring=field2))
for x, y in gluings.items():
S.glue(x, y)
S.set_immutable()
return S, ConeSurfaceToPolyhedronMap(S, polyhedron, face_map_data)
def platonic_tetrahedron():
r"""Produce a triple consisting of a polyhedral version of the platonic tetrahedron,
the associated cone surface, and a ConeSurfaceToPolyhedronMap from the surface
to the polyhedron.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import platonic_tetrahedron
sage: polyhedron,surface,surface_to_polyhedron = platonic_tetrahedron()
sage: TestSuite(surface).run()
"""
vertices = []
for x in range(-1, 3, 2):
for y in range(-1, 3, 2):
vertices.append(vector(QQ, (x, y, x * y)))
p = Polyhedron(vertices=vertices)
s, m = polyhedron_to_cone_surface(p, scaling_factor=AA(1 / sqrt(2)))
return p, s, m
def platonic_cube():
r"""Produce a triple consisting of a polyhedral version of the platonic cube,
the associated cone surface, and a ConeSurfaceToPolyhedronMap from the surface
to the polyhedron.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import platonic_cube
sage: polyhedron,surface,surface_to_polyhedron = platonic_cube()
sage: TestSuite(surface).run()
"""
vertices = []
for x in range(-1, 3, 2):
for y in range(-1, 3, 2):
for z in range(-1, 3, 2):
vertices.append(vector(QQ, (x, y, z)))
p = Polyhedron(vertices=vertices)
s, m = polyhedron_to_cone_surface(p, scaling_factor=QQ(1) / 2)
return p, s, m
def platonic_octahedron():
r"""Produce a triple consisting of a polyhedral version of the platonic octahedron,
the associated cone surface, and a ConeSurfaceToPolyhedronMap from the surface
to the polyhedron.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import platonic_octahedron
sage: polyhedron,surface,surface_to_polyhedron = platonic_octahedron() # long time (.3s)
sage: TestSuite(surface).run() # long time (see above)
"""
vertices = []
for i in range(3):
temp = vector(QQ, [1 if k == i else 0 for k in range(3)])
for j in range(-1, 3, 2):
vertices.append(j * temp)
octahedron = Polyhedron(vertices=vertices)
surface, surface_to_octahedron = polyhedron_to_cone_surface(
octahedron, scaling_factor=AA(sqrt(2))
)
return octahedron, surface, surface_to_octahedron
def platonic_dodecahedron():
r"""Produce a triple consisting of a polyhedral version of the platonic dodecahedron,
the associated cone surface, and a ConeSurfaceToPolyhedronMap from the surface
to the polyhedron.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import platonic_dodecahedron
sage: polyhedron, surface, surface_to_polyhedron = platonic_dodecahedron() # long time (1s)
sage: TestSuite(surface).run() # long time (.8s)
"""
vertices = []
phi = AA(1 + sqrt(5)) / 2
F = NumberField(phi.minpoly(), "phi", embedding=phi)
phi = F.gen()
for x in range(-1, 3, 2):
for y in range(-1, 3, 2):
for z in range(-1, 3, 2):
vertices.append(vector(F, (x, y, z)))
for x in range(-1, 3, 2):
for y in range(-1, 3, 2):
vertices.append(vector(F, (0, x * phi, y / phi)))
vertices.append(vector(F, (y / phi, 0, x * phi)))
vertices.append(vector(F, (x * phi, y / phi, 0)))
scale = AA(2 / sqrt(1 + (phi - 1) ** 2 + (1 / phi - 1) ** 2))
p = Polyhedron(vertices=vertices)
s, m = polyhedron_to_cone_surface(p, scaling_factor=scale)
return p, s, m
def platonic_icosahedron():
r"""Produce a triple consisting of a polyhedral version of the platonic icosahedron,
the associated cone surface, and a ConeSurfaceToPolyhedronMap from the surface
to the polyhedron.
EXAMPLES::
sage: from flatsurf.geometry.polyhedra import platonic_icosahedron
sage: polyhedron,surface,surface_to_polyhedron = platonic_icosahedron() # long time (.9s)
sage: TestSuite(surface).run() # long time (see above)
"""
vertices = []
phi = AA(1 + sqrt(5)) / 2
F = NumberField(phi.minpoly(), "phi", embedding=phi)
phi = F.gen()
for i in range(3):
for s1 in range(-1, 3, 2):
for s2 in range(-1, 3, 2):
p = 3 * [None]
p[i] = s1 * phi
p[(i + 1) % 3] = s2
p[(i + 2) % 3] = 0
vertices.append(vector(F, p))
p = Polyhedron(vertices=vertices)
s, m = polyhedron_to_cone_surface(p)
return p, s, m | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/polyhedra.py | 0.8809 | 0.491517 | polyhedra.py | pypi |
from flatsurf.geometry.surface import OrientedSimilaritySurface
from flatsurf.geometry.mappings import SurfaceMapping
from sage.misc.cachefunc import cached_method
class GL2RImageSurface(OrientedSimilaritySurface):
r"""
The GL(2,R) image of an oriented similarity surface.
EXAMPLE::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: SS = r * S
sage: S.canonicalize() == SS.canonicalize()
True
TESTS::
sage: TestSuite(SS).run()
sage: from flatsurf.geometry.half_dilation_surface import GL2RImageSurface
sage: isinstance(SS, GL2RImageSurface)
True
"""
def __init__(self, surface, m, ring=None, category=None):
if surface.is_mutable():
if surface.is_finite_type():
from flatsurf.geometry.surface import MutableOrientedSimilaritySurface
self._s = MutableOrientedSimilaritySurface.from_surface(surface)
else:
raise ValueError("Can not apply matrix to mutable infinite surface.")
else:
self._s = surface
det = m.determinant()
if det > 0:
self._det_sign = 1
elif det < 0:
self._det_sign = -1
else:
raise ValueError("Can not apply matrix with zero determinant to surface.")
if m.is_mutable():
from sage.all import matrix
m = matrix(m, immutable=True)
self._m = m
if ring is None:
if m.base_ring() == self._s.base_ring():
base_ring = self._s.base_ring()
else:
from sage.structure.element import get_coercion_model
cm = get_coercion_model()
base_ring = cm.common_parent(m.base_ring(), self._s.base_ring())
else:
base_ring = ring
if category is None:
category = surface.category()
super().__init__(base_ring, category=category)
def roots(self):
r"""
Return root labels for the polygons forming the connected
components of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.roots`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.roots()
(0,)
"""
return self._s.roots()
def is_compact(self):
r"""
Return whether this surface is compact as a topological space.
This implements
:meth:`flatsurf.geometry.categories.topological_surfaces.TopologicalSurfaces.ParentMethods.is_compact`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.is_compact()
True
"""
return self._s.is_compact()
def is_mutable(self):
r"""
Return whether this surface is mutable, i.e., return ``False``.
This implements
:meth:`flatsurf.geometry.categories.topological_surfaces.TopologicalSurfaces.ParentMethods.is_mutable`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.is_mutable()
False
"""
return False
def is_translation_surface(self, positive=True):
r"""
Return whether this surface is a translation surface, i.e., glued
edges can be transformed into each other by translations.
This implements
:meth:`flatsurf.geometry.categories.similarity_surfaces.SimilaritySurfaces.ParentMethods.is_translation_surface`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.is_translation_surface()
True
"""
return self._s.is_translation_surface(positive=positive)
@cached_method
def polygon(self, lab):
r"""
Return the polygon with ``label``.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.polygon`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.polygon(0)
Polygon(vertices=[(0, 0), (a, -a), (a + 2, -a), (2*a + 2, 0), (2*a + 2, 2), (a + 2, a + 2), (a, a + 2), (0, 2)])
"""
if self._det_sign == 1:
p = self._s.polygon(lab)
edges = [self._m * p.edge(e) for e in range(len(p.vertices()))]
from flatsurf import Polygon
return Polygon(edges=edges, base_ring=self.base_ring())
else:
p = self._s.polygon(lab)
edges = [
self._m * (-p.edge(e)) for e in range(len(p.vertices()) - 1, -1, -1)
]
from flatsurf import Polygon
return Polygon(edges=edges, base_ring=self.base_ring())
def labels(self):
r"""
Return the labels of this surface.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.labels`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.labels()
(0, 1, 2)
"""
return self._s.labels()
def opposite_edge(self, p, e):
r"""
Return the polygon label and edge index when crossing over the ``edge``
of the polygon ``label``.
This implements
:meth:`flatsurf.geometry.categories.polygonal_surfaces.PolygonalSurfaces.ParentMethods.opposite_edge`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: S = r * S
sage: S.opposite_edge(0, 0)
(2, 0)
"""
if self._det_sign == 1:
return self._s.opposite_edge(p, e)
else:
polygon = self._s.polygon(p)
pp, ee = self._s.opposite_edge(p, len(polygon.vertices()) - 1 - e)
polygon2 = self._s.polygon(pp)
return pp, len(polygon2.vertices()) - 1 - ee
def __repr__(self):
r"""
Return a printable representation of this surface.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: matrix([[0, 1], [1, 0]]) * S
Translation Surface in H_3(4) built from 2 squares and a regular octagon
sage: matrix([[0, 2], [1, 0]]) * S
Translation Surface in H_3(4) built from a rhombus, a rectangle and an octagon
"""
if self.is_finite_type():
from flatsurf.geometry.surface import MutableOrientedSimilaritySurface
S = MutableOrientedSimilaritySurface.from_surface(self)
S.set_immutable()
return repr(S)
return f"GL2RImageSurface of {self._s!r}"
def __hash__(self):
r"""
Return a hash value for this surface that is compatible with
:meth:`__eq__`.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: r = matrix(ZZ,[[0, 1], [1, 0]])
sage: hash(r * S) == hash(r * S)
True
"""
return hash((self._s, self._m))
def __eq__(self, other):
r"""
Return whether this image is indistinguishable from ``other``.
See :meth:`SimilaritySurfaces.FiniteType._test_eq_surface` for details
on this notion of equality.
EXAMPLES::
sage: from flatsurf import translation_surfaces
sage: S = translation_surfaces.octagon_and_squares()
sage: m = matrix(ZZ,[[0, 1], [1, 0]])
sage: m * S == m * S
True
"""
if not isinstance(other, GL2RImageSurface):
return False
return (
self._s == other._s
and self._m == other._m
and self.base_ring() == other.base_ring()
)
class GL2RMapping(SurfaceMapping):
r"""
This class pushes a surface forward under a matrix.
Note that for matrices of negative determinant we need to relabel edges (because
edges must have a counterclockwise cyclic order). For each n-gon in the surface,
we relabel edges according to the involution `e \mapsto n-1-e`.
EXAMPLE::
sage: from flatsurf import translation_surfaces
sage: s=translation_surfaces.veech_2n_gon(4)
sage: from flatsurf.geometry.half_dilation_surface import GL2RMapping
sage: mat=Matrix([[2,1],[1,1]])
sage: m=GL2RMapping(s,mat)
sage: TestSuite(m.codomain()).run()
"""
def __init__(self, s, m, ring=None, category=None):
r"""
Hit the surface s with the 2x2 matrix m which should have positive determinant.
"""
codomain = GL2RImageSurface(s, m, ring=ring, category=category or s.category())
self._m = m
self._im = ~m
SurfaceMapping.__init__(self, s, codomain)
def push_vector_forward(self, tangent_vector):
r"""Applies the mapping to the provided vector."""
return self.codomain().tangent_vector(
tangent_vector.polygon_label(),
self._m * tangent_vector.point(),
self._m * tangent_vector.vector(),
)
def pull_vector_back(self, tangent_vector):
r"""Applies the inverse of the mapping to the provided vector."""
return self.domain().tangent_vector(
tangent_vector.polygon_label(),
self._im * tangent_vector.point(),
self._im * tangent_vector.vector(),
) | /sage_flatsurf-0.5.2-py3-none-any.whl/flatsurf/geometry/half_dilation_surface.py | 0.940106 | 0.711149 | half_dilation_surface.py | pypi |
import numpy as np
from sage import utils, core
from tqdm.auto import tqdm
from scipy.stats import norm
def estimate_total(imputer, X, Y, batch_size, loss_fn):
'''Estimate sum of SAGE values.'''
N = 0
mean_loss = 0
marginal_loss = 0
num_features = imputer.num_groups
for i in range(np.ceil(len(X) / batch_size).astype(int)):
x = X[i * batch_size:(i + 1) * batch_size]
y = Y[i * batch_size:(i + 1) * batch_size]
N += len(x)
# All features.
pred = imputer(x, np.ones((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
mean_loss += np.sum(loss - mean_loss) / N
# No features.
pred = imputer(x, np.zeros((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
marginal_loss += np.sum(loss - marginal_loss) / N
return marginal_loss - mean_loss
def estimate_holdout_importance(imputer, X, Y, batch_size, loss_fn, batches, rng):
'''Estimate the impact of holding out features individually.'''
N, _ = X.shape
num_features = imputer.num_groups
all_loss = 0
holdout_importance = np.zeros(num_features)
S = np.ones((batch_size, num_features), dtype=bool)
# Sample the same batches for all features.
for it in range(batches):
# Sample minibatch.
mb = rng.choice(N, batch_size)
x = X[mb]
y = Y[mb]
# Update loss with all features.
all_loss += np.sum(loss_fn(imputer(x, S), y) - all_loss) / (it + 1)
# Loss with features held out.
for i in range(num_features):
S[:, i] = 0
holdout_importance[i] += (
np.sum(loss_fn(imputer(x, S), y) - holdout_importance[i])
/ (it + 1))
S[:, i] = 1
return holdout_importance - all_loss
class SignEstimator:
'''
Estimate SAGE values to a lower precision, focusing on the sign. Based on
the IteratedEstimator strategy of calculating values one at a time.
Args:
imputer: model that accommodates held out features.
loss: loss function ('mse', 'cross entropy').
random_state: random seed, enables reproducibility.
'''
def __init__(self,
imputer,
loss='cross entropy',
random_state=None):
self.imputer = imputer
self.loss_fn = utils.get_loss(loss, reduction='none')
self.random_state = random_state
def __call__(self,
X,
Y=None,
batch_size=512,
sign_confidence=0.99,
narrow_thresh=0.025,
optimize_ordering=True,
ordering_batches=1,
verbose=False,
bar=True):
'''
Estimate SAGE values.
Args:
X: input data.
Y: target data. If None, model output will be used.
batch_size: number of examples to be processed in parallel, should be
set to a large value.
sign_confidence: confidence level on sign.
narrow_thresh: threshold for detecting that the standard deviation is
small enough
optimize_ordering: whether to guess an ordering of features based on
importance. May accelerate convergence.
ordering_batches: number of minibatches while determining ordering.
verbose: print progress messages.
bar: display progress bar.
Convergence for each SAGE value is detected when one of two conditions
holds: (1) the sign is known with high confidence (given by
sign_confidence), or (2) the standard deviation of the Gaussian
confidence interval is sufficiently narrow (given by narrow_thresh).
Returns: Explanation object.
'''
# Set random state.
self.rng = np.random.default_rng(seed=self.random_state)
# Determine explanation type.
if Y is not None:
explanation_type = 'SAGE'
else:
explanation_type = 'Shapley Effects'
# Verify model.
N, _ = X.shape
num_features = self.imputer.num_groups
X, Y = utils.verify_model_data(self.imputer, X, Y, self.loss_fn,
batch_size)
# Verify thresholds.
assert 0 < narrow_thresh < 1
assert 0.9 <= sign_confidence < 1
sign_thresh = 1 / norm.ppf(sign_confidence)
# For detecting convergence.
total = estimate_total(self.imputer, X, Y, batch_size, self.loss_fn)
upper_val = max(total / num_features, 0)
lower_val = min(total / num_features, 0)
# Feature ordering.
if optimize_ordering:
if verbose:
print('Determining feature ordering...')
holdout_importance = estimate_holdout_importance(
self.imputer, X, Y, batch_size, self.loss_fn, ordering_batches, self.rng
)
if verbose:
print('Done')
# Use np.abs in case there are large negative contributors.
ordering = list(np.argsort(np.abs(holdout_importance))[::-1])
else:
ordering = list(range(num_features))
# Set up bar.
if bar:
bar = tqdm(total=1)
# Iterated sampling.
tracker_list = []
for i, ind in enumerate(ordering):
tracker = utils.ImportanceTracker()
it = 0
converged = False
while not converged:
# Sample data.
mb = self.rng.choice(N, batch_size)
x = X[mb]
y = Y[mb]
# Sample subset of features.
S = utils.sample_subset_feature(num_features, batch_size, ind)
# Loss with feature excluded.
y_hat = self.imputer(x, S)
loss_discluded = self.loss_fn(y_hat, y)
# Loss with feature included.
S[:, ind] = 1
y_hat = self.imputer(x, S)
loss_included = self.loss_fn(y_hat, y)
# Calculate delta sample.
tracker.update(loss_discluded - loss_included)
# Calculate progress.
val = tracker.values.item()
std = tracker.std.item()
gap = max(max(upper_val, val) - min(lower_val, val), 1e-12)
converged_sign = (std / max(np.abs(val), 1e-12)) < sign_thresh
converged_narrow = (std / gap) < narrow_thresh
# Print progress message.
if verbose:
print('Sign Ratio = {:.4f} (Converge at {:.4f}), '
'Narrow Ratio = {:.4f} (Converge at {:.4f})'.format(
std / np.abs(val), sign_thresh,
std / gap, narrow_thresh))
# Check for convergence.
converged = converged_sign or converged_narrow
if converged:
if verbose:
print('Detected feature convergence')
# Skip bar ahead.
if bar:
bar.n = np.around(bar.total * (i+1) / num_features, 4)
bar.refresh()
# Update convergence estimation.
elif bar:
N_sign = (it+1) * ((std / np.abs(val)) / sign_thresh) ** 2
N_narrow = (it+1) * ((std / gap) / narrow_thresh) ** 2
N_est = min(N_sign, N_narrow)
bar.n = np.around((i + (it+1) / N_est) / num_features, 4)
bar.refresh()
# Increment iteration variable.
it += 1
if verbose:
print('Done with feature {}'.format(i))
tracker_list.append(tracker)
# Adjust min max value.
upper_val = max(upper_val, tracker.values.item())
lower_val = min(lower_val, tracker.values.item())
if bar:
bar.close()
# Extract SAGE values.
reverse_ordering = [ordering.index(ind) for ind in range(num_features)]
values = np.array(
[tracker_list[ind].values.item() for ind in reverse_ordering])
std = np.array(
[tracker_list[ind].std.item() for ind in reverse_ordering])
return core.Explanation(values, std, explanation_type) | /sage_importance-0.0.5-py3-none-any.whl/sage/sign_estimator.py | 0.845017 | 0.427935 | sign_estimator.py | pypi |
import numpy as np
import warnings
from sage import utils
def verify_nonoverlapping(groups):
'''Verify that no index is present in more than one group.'''
used_inds = np.concatenate(groups)
return np.all(np.unique(used_inds, return_counts=True)[1] == 1)
class GroupedImputer:
'''GroupedImputer base class.'''
def __init__(self, model, groups, total, remaining_features):
self.model = utils.model_conversion(model)
# Verify that groups are non-overlapping.
if not verify_nonoverlapping(groups):
raise ValueError('groups must be non-overlapping')
# Groups matrix.
self.groups_mat = np.zeros((len(groups) + 1, total), dtype=bool)
for i, group in enumerate(groups):
self.groups_mat[i, group] = 1
self.groups_mat[-1, :] = 1 - np.sum(self.groups_mat, axis=0)
# For features that are not specified in any group.
self.remaining = remaining_features
def __call__(self, x, S):
'''Calling a GroupedImputer should evaluate the model with the
specified subset of features.'''
raise NotImplementedError
def inclusion_matrix(self, S):
S = np.hstack((S, np.zeros((len(S), 1), dtype=bool)))
S[:, -1] = self.remaining
return np.matmul(S, self.groups_mat)
class GroupedDefaultImputer(GroupedImputer):
'''Replace features with default values.'''
def __init__(self, model, values, groups, remaining_features=0):
super().__init__(model, groups, values.shape[-1], remaining_features)
if values.ndim == 1:
values = values[np.newaxis]
elif values[0] != 1:
raise ValueError('values shape must be (dim,) or (1, dim)')
self.values = values
self.values_repeat = values
self.num_groups = len(groups)
def __call__(self, x, S):
# Prepare x.
if len(x) != len(self.values_repeat):
self.values_repeat = self.values.repeat(len(x), 0)
# Prepare S.
S = self.inclusion_matrix(S)
# Replace specified indices.
x_ = x.copy()
x_[~S] = self.values_repeat[~S]
# Make predictions.
return self.model(x_)
class GroupedMarginalImputer(GroupedImputer):
'''Marginalizing out removed features with their marginal distribution.'''
def __init__(self, model, data, groups, remaining_features=0):
super().__init__(model, groups, data.shape[1], remaining_features)
self.data = data
self.data_repeat = data
self.samples = len(data)
self.num_groups = len(groups)
if len(data) > 1024:
warnings.warn('using {} background samples may lead to slow '
'runtime, consider using <= 1024'.format(
len(data)), RuntimeWarning)
def __call__(self, x, S):
# Prepare x.
n = len(x)
x = x.repeat(self.samples, 0)
# Prepare S.
S = self.inclusion_matrix(S)
S = S.repeat(self.samples, 0)
# Prepare samples.
if len(self.data_repeat) != self.samples * n:
self.data_repeat = np.tile(self.data, (n, 1))
# Replace specified indices.
x_ = x.copy()
x_[~S] = self.data_repeat[~S]
# Make predictions.
pred = self.model(x_)
pred = pred.reshape(-1, self.samples, *pred.shape[1:])
return np.mean(pred, axis=1) | /sage_importance-0.0.5-py3-none-any.whl/sage/grouped_imputers.py | 0.724188 | 0.379723 | grouped_imputers.py | pypi |
import pickle
import numpy as np
from sage import plotting
class Explanation:
'''
For storing and plotting Explanations.
Args:
values: explanation values.
std: standard deviation confidence intervals for explanation values.
explanation_type: 'SAGE' or 'Shapley Effects' (used only for plotting).
'''
def __init__(self, values, std, explanation_type='SAGE'):
self.values = values
self.std = std
self.explanation_type = explanation_type
def plot(self,
feature_names=None,
sort_features=True,
max_features=np.inf,
orientation='horizontal',
error_bars=True,
confidence_level=0.95,
capsize=5,
color='tab:green',
title='Feature Importance',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
figsize=(10, 7),
return_fig=False):
'''
Plot SAGE values.
Args:
feature_names: list of feature names.
sort_features: whether to sort features by their SAGE values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
error_bars: whether to include standard deviation error bars.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
color: bar chart color.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
return plotting.plot(
self, feature_names, sort_features, max_features, orientation,
error_bars, confidence_level, capsize, color, title, title_size,
tick_size, tick_rotation, label_size, figsize, return_fig)
def comparison(self,
other_values,
comparison_names=None,
feature_names=None,
sort_features=True,
max_features=np.inf,
orientation='vertical',
error_bars=True,
confidence_level=0.95,
capsize=5,
colors=None,
title='Feature Importance Comparison',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
legend_loc=None,
figsize=(10, 7),
return_fig=False):
'''
Plot comparison with another set of SAGE values.
Args:
other_values: another SAGE values object.
comparison_names: tuple of names for each SAGE value object.
feature_names: list of feature names.
sort_features: whether to sort features by their SAGE values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
error_bars: whether to include standard deviation error bars.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
colors: colors for each set of SAGE values.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
legend_loc: legend location.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
return plotting.comparison_plot(
(self, other_values), comparison_names, feature_names,
sort_features, max_features, orientation, error_bars,
confidence_level, capsize, colors, title, title_size, tick_size,
tick_rotation, label_size, legend_loc, figsize, return_fig)
def plot_sign(self,
feature_names,
sort_features=True,
max_features=np.inf,
orientation='horizontal',
confidence_level=0.95,
capsize=5,
title='Feature Importance Sign',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
figsize=(10, 7),
return_fig=False):
'''
Plot SAGE values, focusing on their sign.
Args:
feature_names: list of feature names.
sort_features: whether to sort features by their SAGE values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
return plotting.plot_sign(
self, feature_names, sort_features, max_features, orientation,
confidence_level, capsize, title, title_size, tick_size,
tick_rotation, label_size, figsize, return_fig)
def save(self, filename):
'''Save Explanation object.'''
if isinstance(filename, str):
with open(filename, 'wb') as f:
pickle.dump(self, f)
else:
raise TypeError('filename must be str')
def __repr__(self):
with np.printoptions(precision=2, threshold=12, floatmode='fixed'):
return '{} Explanation(\n (Mean): {}\n (Std): {}\n)'.format(
self.explanation_type, self.values, self.std)
def load(filename):
'''Load Explanation object.'''
with open(filename, 'rb') as f:
sage_values = pickle.load(f)
if isinstance(sage_values, Explanation):
return sage_values
else:
raise ValueError('object is not instance of Explanation class') | /sage_importance-0.0.5-py3-none-any.whl/sage/core.py | 0.891961 | 0.629461 | core.py | pypi |
import warnings
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
def plot(explanation,
feature_names=None,
sort_features=True,
max_features=np.inf,
orientation='horizontal',
error_bars=True,
confidence_level=0.95,
capsize=5,
color='tab:green',
title='Feature Importance',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
figsize=(10, 7),
return_fig=False):
'''
Plot SAGE values.
Args:
explanation: Explanation object.
feature_names: list of feature names.
sort_features: whether to sort features by their values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
error_bars: whether to include standard deviation error bars.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
color: bar chart color.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
# Default feature names.
if feature_names is None:
feature_names = ['Feature {}'.format(i) for i in
range(len(explanation.values))]
else:
if isinstance(feature_names, list):
feature_names = np.array(feature_names)
# Sort features if necessary.
if len(feature_names) > max_features:
sort_features = True
# Perform sorting.
values = explanation.values
std = explanation.std
if sort_features:
argsort = np.argsort(values)[::-1]
values = values[argsort]
std = std[argsort]
feature_names = feature_names[argsort]
# Remove extra features if necessary.
if len(feature_names) > max_features:
feature_names = (list(feature_names[:max_features])
+ ['Remaining Features'])
values = (list(values[:max_features])
+ [np.sum(values[max_features:])])
std = (list(std[:max_features])
+ [np.sum(std[max_features:] ** 2) ** 0.5])
# Warn if too many features.
if len(feature_names) > 50:
warnings.warn('Plotting {} features may make figure too crowded, '
'consider using max_features'.format(
len(feature_names)), Warning)
# Discard std if necessary.
if not error_bars:
std = None
else:
assert 0 < confidence_level < 1
std = std * norm.ppf(0.5 + confidence_level / 2)
# Make plot.
fig = plt.figure(figsize=figsize)
ax = fig.gca()
if orientation == 'horizontal':
# Bar chart.
ax.barh(np.arange(len(feature_names))[::-1], values,
color=color, xerr=std, capsize=capsize)
# Feature labels.
if tick_rotation is not None:
raise ValueError('rotation not supported for horizontal charts')
ax.set_yticks(np.arange(len(feature_names))[::-1])
ax.set_yticklabels(feature_names, fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('')
ax.set_xlabel('{} value'.format(explanation.explanation_type),
fontsize=label_size)
ax.tick_params(axis='x', labelsize=tick_size)
elif orientation == 'vertical':
# Bar chart.
ax.bar(np.arange(len(feature_names)), values, color=color,
yerr=std, capsize=capsize)
# Feature labels.
if tick_rotation is None:
tick_rotation = 45
if tick_rotation < 90:
ha = 'right'
rotation_mode = 'anchor'
else:
ha = 'center'
rotation_mode = 'default'
ax.set_xticks(np.arange(len(feature_names)))
ax.set_xticklabels(feature_names, rotation=tick_rotation, ha=ha,
rotation_mode=rotation_mode,
fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('{} value'.format(explanation.explanation_type),
fontsize=label_size)
ax.set_xlabel('')
ax.tick_params(axis='y', labelsize=tick_size)
else:
raise ValueError('orientation must be horizontal or vertical')
# Remove spines.
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_title(title, fontsize=title_size)
plt.tight_layout()
if return_fig:
return fig
else:
return
def comparison_plot(comparison_explanations,
comparison_names=None,
feature_names=None,
sort_features=True,
max_features=np.inf,
orientation='vertical',
error_bars=True,
confidence_level=0.95,
capsize=5,
colors=('tab:green', 'tab:blue'),
title='Feature Importance Comparison',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
legend_loc=None,
figsize=(10, 7),
return_fig=False):
'''
Plot comparison between two different Explanation objects.
Args:
comparison_explanations: tuple of Explanation objects to be compared.
comparison_names: tuple of names for each Explanation object.
feature_names: list of feature names.
sort_features: whether to sort features by their SAGE values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
error_bars: whether to include standard deviation error bars.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
colors: colors for each set of SAGE values.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
legend_loc: legend location.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
# Default feature names.
if feature_names is None:
feature_names = ['Feature {}'.format(i) for i in
range(len(comparison_explanations[0].values))]
else:
if isinstance(feature_names, list):
feature_names = np.array(feature_names)
# Default comparison names.
num_comps = len(comparison_explanations)
if num_comps not in (2, 3, 4, 5):
raise ValueError('only support comparisons for 2-5 explanations')
if comparison_names is None:
comparison_names = ['Explanation {}'.format(i) for i in
range(num_comps)]
# Default colors.
if colors is None:
colors = ['tab:green', 'tab:blue', 'tab:purple',
'tab:orange', 'tab:pink'][:num_comps]
# Determine explanation type.
unique_types = np.unique([explanation.explanation_type
for explanation in comparison_explanations])
if len(unique_types) == 1:
explanation_type = unique_types[0]
else:
explanation_type = 'Importance'
# Sort features if necessary.
if len(feature_names) > max_features:
sort_features = True
# Extract values.
values = [sage_values.values for sage_values in comparison_explanations]
std = [sage_values.std for sage_values in comparison_explanations]
# Perform sorting.
if sort_features:
argsort = np.argsort(values[0])[::-1]
values = [sage_values[argsort] for sage_values in values]
std = [stddev[argsort] for stddev in std]
feature_names = feature_names[argsort]
# Remove extra features if necessary.
if len(feature_names) > max_features:
feature_names = (list(feature_names[:max_features])
+ ['Remaining Features'])
values = [
list(sage_values[:max_features])
+ [np.sum(sage_values[max_features:])]
for sage_values in values]
std = [list(stddev[:max_features])
+ [np.sum(stddev[max_features:] ** 2) ** 0.5]
for stddev in std]
# Warn if too many features.
if len(feature_names) > 50:
warnings.warn('Plotting {} features may make figure too crowded, '
'consider using max_features'.format(
len(feature_names)), Warning)
# Discard std if necessary.
if not error_bars:
std = [None for _ in std]
else:
assert 0 < confidence_level < 1
std = [stddev * norm.ppf(0.5 + confidence_level / 2) for stddev in std]
# Make plot.
width = 0.8 / num_comps
fig = plt.figure(figsize=figsize)
ax = fig.gca()
if orientation == 'horizontal':
# Bar chart.
enumeration = enumerate(zip(values, std, comparison_names, colors))
for i, (sage_values, stddev, name, color) in enumeration:
pos = - 0.4 + width / 2 + width * i
ax.barh(np.arange(len(feature_names))[::-1] - pos,
sage_values, height=width, color=color, xerr=stddev,
capsize=capsize, label=name)
# Feature labels.
if tick_rotation is not None:
raise ValueError('rotation not supported for horizontal charts')
ax.set_yticks(np.arange(len(feature_names))[::-1])
ax.set_yticklabels(feature_names, fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('')
ax.set_xlabel('{} value'.format(explanation_type), fontsize=label_size)
ax.tick_params(axis='x', labelsize=tick_size)
elif orientation == 'vertical':
# Bar chart.
enumeration = enumerate(zip(values, std, comparison_names, colors))
for i, (sage_values, stddev, name, color) in enumeration:
pos = - 0.4 + width / 2 + width * i
ax.bar(np.arange(len(feature_names)) + pos,
sage_values, width=width, color=color, yerr=stddev,
capsize=capsize, label=name)
# Feature labels.
if tick_rotation is None:
tick_rotation = 45
if tick_rotation < 90:
ha = 'right'
rotation_mode = 'anchor'
else:
ha = 'center'
rotation_mode = 'default'
ax.set_xticks(np.arange(len(feature_names)))
ax.set_xticklabels(feature_names, rotation=tick_rotation, ha=ha,
rotation_mode=rotation_mode,
fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('{} value'.format(explanation_type), fontsize=label_size)
ax.set_xlabel('')
ax.tick_params(axis='y', labelsize=tick_size)
# Remove spines.
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
plt.legend(loc=legend_loc, fontsize=label_size)
ax.set_title(title, fontsize=title_size)
plt.tight_layout()
if return_fig:
return fig
else:
return
def plot_sign(explanation,
feature_names,
sort_features=True,
max_features=np.inf,
orientation='horizontal',
confidence_level=0.95,
capsize=5,
title='Feature Importance Sign',
title_size=20,
tick_size=16,
tick_rotation=None,
label_size=16,
figsize=(10, 7),
return_fig=False):
'''
Plot SAGE values, focusing on their sign.
Args:
explanation: Explanation object.
feature_names: list of feature names.
sort_features: whether to sort features by their SAGE values.
max_features: number of features to display.
orientation: horizontal (default) or vertical.
confidence_level: confidence interval coverage (e.g., 95%).
capsize: error bar cap width.
title: plot title.
title_size: font size for title.
tick_size: font size for feature names and numerical values.
tick_rotation: tick rotation for feature names (vertical plots only).
label_size: font size for label.
figsize: figure size (if fig is None).
return_fig: whether to return matplotlib figure object.
'''
# Default feature names.
if feature_names is None:
feature_names = ['Feature {}'.format(i) for i in
range(len(explanation.values))]
else:
if isinstance(feature_names, list):
feature_names = np.array(feature_names)
# Find confidence interval width.
values = explanation.values
std = explanation.std
assert 0 < confidence_level < 1
std = std * norm.ppf(0.5 + confidence_level / 2)
# Set colors.
colors = []
for val, width in zip(values, std):
if val > 0 and val - width > 0:
colors.append('tab:green')
elif val < 0 and val + width < 0:
colors.append('tab:red')
else:
colors.append('tab:blue')
colors = np.array(colors)
# Sort features if necessary.
if len(feature_names) > max_features:
sort_features = True
# Remove extra features if necessary.
if len(feature_names) > max_features:
# Sort by magnitude.
argsort = np.argsort(np.abs(values))[::-1]
values = values[argsort]
std = std[argsort]
feature_names = feature_names[argsort]
colors = colors[argsort]
# Keep highest magnitude features.
new_value = np.sum(values[max_features:])
new_std = np.sum(std[max_features:] ** 2) ** 0.5
values = np.array(list(values[:max_features]) + [new_value])
std = np.array(list(std[:max_features]) + [new_std])
colors = np.array(list(colors[:max_features]) + ['tab:purple'])
feature_names = np.array(list(feature_names[:max_features])
+ ['Remaining Features'])
# Perform sorting.
if sort_features:
argsort = np.argsort(values)[::-1]
values = values[argsort]
std = std[argsort]
feature_names = feature_names[argsort]
colors = colors[argsort]
# Warn if too many features.
if len(feature_names) > 50:
warnings.warn('Plotting {} features may make figure too crowded, '
'consider using max_features'.format(
len(feature_names)), Warning)
# Make plot.
fig = plt.figure(figsize=figsize)
ax = fig.gca()
if orientation == 'horizontal':
# Bar chart.
ax.barh(np.arange(len(feature_names))[::-1], std,
left=values - std, color=colors, edgecolor='black',
linewidth=0.5)
ax.barh(np.arange(len(feature_names))[::-1], std,
left=values, color=colors, edgecolor='black',
linewidth=0.5)
ax.axvline(0, color='black', linewidth=0.5)
# Feature labels.
if tick_rotation is not None:
raise ValueError('rotation not supported for horizontal charts')
ax.set_yticks(np.arange(len(feature_names))[::-1])
ax.set_yticklabels(feature_names, fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('')
ax.set_xlabel('{} value'.format(explanation.explanation_type),
fontsize=label_size)
ax.tick_params(axis='x', labelsize=tick_size)
elif orientation == 'vertical':
# Bar chart.
ax.bar(np.arange(len(feature_names)), std, bottom=values - std,
color=colors, edgecolor='black', linewidth=0.5)
ax.bar(np.arange(len(feature_names)), std, bottom=values,
color=colors, edgecolor='black', linewidth=0.5)
ax.axhline(0, color='black', linewidth=0.5)
# Feature labels.
if tick_rotation is None:
tick_rotation = 45
if tick_rotation < 90:
ha = 'right'
rotation_mode = 'anchor'
else:
ha = 'center'
rotation_mode = 'default'
ax.set_xticks(np.arange(len(feature_names)))
ax.set_xticklabels(feature_names, rotation=tick_rotation, ha=ha,
rotation_mode=rotation_mode,
fontsize=label_size)
# Axis labels and ticks.
ax.set_ylabel('{} value'.format(explanation.explanation_type),
fontsize=label_size)
ax.set_xlabel('')
ax.tick_params(axis='y', labelsize=tick_size)
else:
raise ValueError('orientation must be horizontal or vertical')
# Remove spines.
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.set_title(title, fontsize=title_size)
plt.tight_layout()
if return_fig:
return fig
else:
return | /sage_importance-0.0.5-py3-none-any.whl/sage/plotting.py | 0.835013 | 0.546254 | plotting.py | pypi |
import joblib
import numpy as np
from sage import utils, core
from tqdm.auto import tqdm
class PermutationEstimator:
'''
Estimate SAGE values by unrolling permutations of feature indices.
Args:
imputer: model that accommodates held out features.
loss: loss function ('mse', 'cross entropy').
n_jobs: number of jobs for parallel processing.
random_state: random seed, enables reproducibility.
'''
def __init__(self,
imputer,
loss='cross entropy',
n_jobs=1,
random_state=None):
self.imputer = imputer
self.loss_fn = utils.get_loss(loss, reduction='none')
self.random_state = random_state
self.n_jobs = joblib.effective_n_jobs(n_jobs)
if n_jobs != 1:
print(f'PermutationEstimator will use {self.n_jobs} jobs')
def __call__(self,
X,
Y=None,
batch_size=512,
detect_convergence=True,
thresh=0.025,
n_permutations=None,
min_coalition=0.0,
max_coalition=1.0,
verbose=False,
bar=True):
'''
Estimate SAGE values.
Args:
X: input data.
Y: target data. If None, model output will be used.
batch_size: number of examples to be processed in parallel, should be
set to a large value.
detect_convergence: whether to stop when approximately converged.
thresh: threshold for determining convergence.
n_permutations: number of permutations to unroll.
min_coalition: minimum coalition size (int or float).
max_coalition: maximum coalition size (int or float).
verbose: print progress messages.
bar: display progress bar.
The default behavior is to detect convergence based on the width of the
SAGE values' confidence intervals. Convergence is defined by the ratio
of the maximum standard deviation to the gap between the largest and
smallest values.
Returns: Explanation object.
'''
# Set random state.
self.rng = np.random.default_rng(seed=self.random_state)
# Determine explanation type.
if Y is not None:
explanation_type = 'SAGE'
else:
explanation_type = 'Shapley Effects'
# Verify model.
N, _ = X.shape
num_features = self.imputer.num_groups
X, Y = utils.verify_model_data(self.imputer, X, Y, self.loss_fn,
batch_size)
# Determine min/max coalition sizes.
if isinstance(min_coalition, float):
min_coalition = int(min_coalition * num_features)
if isinstance(max_coalition, float):
max_coalition = int(max_coalition * num_features)
assert min_coalition >= 0
assert max_coalition <= num_features
assert min_coalition < max_coalition
if min_coalition > 0 or max_coalition < num_features:
explanation_type = 'Relaxed ' + explanation_type
# Possibly force convergence detection.
if n_permutations is None:
n_permutations = 1e20
if not detect_convergence:
detect_convergence = True
if verbose:
print('Turning convergence detection on')
if detect_convergence:
assert 0 < thresh < 1
# Set up bar.
n_loops = int(np.ceil(n_permutations / (batch_size * self.n_jobs)))
if bar:
if detect_convergence:
bar = tqdm(total=1)
else:
bar = tqdm(total=n_loops * self.n_jobs * batch_size)
# Setup.
tracker = utils.ImportanceTracker()
for it in range(n_loops):
# Sample data.
batches = []
for _ in range(self.n_jobs):
idxs = self.rng.choice(N, batch_size)
batches.append((X[idxs], Y[idxs]))
# Get results from parallel processing of batches.
results = joblib.Parallel(n_jobs=self.n_jobs)(
joblib.delayed(self._process_sample)(x, y, num_features, min_coalition, max_coalition)
for x, y in batches
)
for scores, sample_counts in results:
tracker.update(scores, sample_counts)
# Calculate progress.
std = np.max(tracker.std)
gap = max(tracker.values.max() - tracker.values.min(), 1e-12)
ratio = std / gap
# Print progress message.
if verbose:
if detect_convergence:
print(f'StdDev Ratio = {ratio:.4f} '
f'(Converge at {thresh:.4f})')
else:
print(f'StdDev Ratio = {ratio:.4f}')
# Check for convergence.
if detect_convergence:
if ratio < thresh:
if verbose:
print('Detected convergence')
# Skip bar ahead.
if bar:
bar.n = bar.total
bar.refresh()
break
# Update progress bar.
if bar and detect_convergence:
# Update using convergence estimation.
N_est = (it + 1) * (ratio / thresh) ** 2
bar.n = np.around((it + 1) / N_est, 4)
bar.refresh()
if bar and not detect_convergence:
# Simply update number of permutations.
bar.update(self.n_jobs)
if bar:
bar.close()
return core.Explanation(tracker.values, tracker.std, explanation_type)
def _process_sample(self, x, y, num_features, min_coalition, max_coalition):
# Setup.
batch_size = len(x)
arange = np.arange(batch_size)
scores = np.zeros((batch_size, num_features))
S = np.zeros((batch_size, num_features), dtype=bool)
permutations = np.tile(np.arange(num_features), (batch_size, 1))
# Sample permutations.
for i in range(batch_size):
self.rng.shuffle(permutations[i])
# Calculate sample counts.
if min_coalition > 0 or max_coalition < num_features:
sample_counts = np.zeros(num_features, dtype=int)
for i in range(batch_size):
sample_counts[permutations[i, min_coalition:max_coalition]] += 1
else:
sample_counts = None
# Add necessary features to minimum coalition.
for i in range(min_coalition):
# Add next feature.
inds = permutations[:, i]
S[arange, inds] = 1
# Make prediction with minimum coalition.
y_hat = self.imputer(x, S)
prev_loss = self.loss_fn(y_hat, y)
# Add all remaining features.
for i in range(min_coalition, max_coalition):
# Add next feature.
inds = permutations[:, i]
S[arange, inds] = 1
# Make prediction with missing features.
y_hat = self.imputer(x, S)
loss = self.loss_fn(y_hat, y)
# Calculate delta sample.
scores[arange, inds] = prev_loss - loss
prev_loss = loss
return scores, sample_counts | /sage_importance-0.0.5-py3-none-any.whl/sage/permutation_estimator.py | 0.895563 | 0.397003 | permutation_estimator.py | pypi |
import sys
import numpy as np
def model_conversion(model):
'''Convert model to callable.'''
if safe_isinstance(model, 'sklearn.base.ClassifierMixin'):
return lambda x: model.predict_proba(x)
elif safe_isinstance(model, 'sklearn.base.RegressorMixin'):
return lambda x: model.predict(x)
elif safe_isinstance(model, 'catboost.CatBoostClassifier'):
return lambda x: model.predict_proba(x)
elif safe_isinstance(model, 'catboost.CatBoostRegressor'):
return lambda x: model.predict(x)
elif safe_isinstance(model, 'lightgbm.basic.Booster'):
return lambda x: model.predict(x)
elif safe_isinstance(model, 'xgboost.core.Booster'):
import xgboost
return lambda x: model.predict(xgboost.DMatrix(x))
elif safe_isinstance(model, 'torch.nn.Module'):
print('Setting up imputer for PyTorch model, assuming that any '
'necessary output activations are applied properly. If '
'not, please set up nn.Sequential with nn.Sigmoid or nn.Softmax')
import torch
model.eval()
device = next(model.parameters()).device
return lambda x: model(torch.tensor(
x, dtype=torch.float32, device=device)).cpu().data.numpy()
elif safe_isinstance(model, 'keras.Model'):
print('Setting up imputer for keras model, assuming that any '
'necessary output activations are applied properly. If not, '
'please set up keras.Sequential with keras.layers.Softmax()')
return lambda x: model(x, training=False).numpy()
elif callable(model):
# Assume model is compatible function or callable object.
return model
else:
raise ValueError('model cannot be converted automatically, '
'please convert to a lambda function')
def dataset_output(imputer, X, batch_size):
'''Get model output for entire dataset.'''
Y = []
for i in range(int(np.ceil(len(X) / batch_size))):
x = X[i*batch_size:(i+1)*batch_size]
pred = imputer(x, np.ones((len(x), imputer.num_groups), dtype=bool))
Y.append(pred)
return np.concatenate(Y)
def verify_model_data(imputer, X, Y, loss, batch_size):
'''Ensure that model and data are set up properly.'''
check_labels = True
if Y is None:
print('Calculating model sensitivity (Shapley Effects, not SAGE)')
check_labels = False
Y = dataset_output(imputer, X, batch_size)
# Fix output shape for classification tasks.
if isinstance(loss, CrossEntropyLoss):
if Y.shape == (len(X),):
Y = Y[:, np.newaxis]
if Y.shape[1] == 1:
Y = np.concatenate([1 - Y, Y], axis=1)
if isinstance(loss, CrossEntropyLoss):
x = X[:batch_size]
probs = imputer(x, np.ones((len(x), imputer.num_groups), dtype=bool))
# Check labels shape.
if check_labels:
Y = Y.astype(int)
if Y.shape == (len(X),):
# This is the preferred shape.
pass
elif Y.shape == (len(X), 1):
Y = Y[:, 0]
else:
raise ValueError('labels shape should be (batch,) or (batch, 1)'
' for cross entropy loss')
if (probs.ndim == 1) or (probs.shape[1] == 1):
# Check label encoding.
if check_labels:
unique_labels = np.unique(Y)
if np.array_equal(unique_labels, np.array([0, 1])):
# This is the preferred labeling.
pass
elif np.array_equal(unique_labels, np.array([-1, 1])):
# Set -1 to 0.
Y = Y.copy()
Y[Y == -1] = 0
else:
raise ValueError('labels for binary classification must be '
'[0, 1] or [-1, 1]')
# Check for valid probability outputs.
valid_probs = np.all(np.logical_and(probs >= 0, probs <= 1))
elif probs.ndim == 2:
# Multiclass output, check for valid probability outputs.
valid_probs = np.all(np.logical_and(probs >= 0, probs <= 1))
ones = np.sum(probs, axis=1)
valid_probs = valid_probs and np.allclose(ones, np.ones(ones.shape))
else:
raise ValueError('prediction has too many dimensions')
if not valid_probs:
raise ValueError('predictions are not valid probabilities')
return X, Y
class ImportanceTracker:
'''For tracking feature importance using a dynamic average.'''
def __init__(self):
self.mean = 0
self.sum_squares = 0
self.N = 0
def update(self, scores, num_samples=None):
'''
Update mean and sum of squares using Welford's algorithm.
Args:
scores: array of consisting of n samples with shape (n, dim).
num_samples: array of size (dim,) representing the number of samples
for each dimension. For sparse updates, with void samples
represented by zeros.
'''
if num_samples is None:
# Welford's algorithm.
self.N += len(scores)
diff = scores - self.mean
self.mean += np.sum(diff, axis=0) / self.N
diff2 = scores - self.mean
self.sum_squares += np.sum(diff * diff2, axis=0)
else:
# Welford's algorithm with correction for void samples.
assert num_samples.shape == scores.shape[1:]
self.N = self.N + num_samples
num_void = len(scores) - num_samples
orig_mean = np.copy(self.mean)
diff = scores - self.mean
self.mean += (
np.sum(diff, axis=0) +
self.mean * num_void) / np.maximum(self.N, 1)
diff2 = scores - self.mean
self.sum_squares += (
np.sum(diff * diff2, axis=0) -
orig_mean * self.mean * num_void)
@property
def values(self):
return self.mean
@property
def var(self):
# print('sum_squares', self.sum_squares)
return self.sum_squares / (np.maximum(self.N, 1) ** 2)
@property
def std(self):
return self.var ** 0.5
class MSELoss:
'''MSE loss that sums over non-batch dimensions.'''
def __init__(self, reduction='mean'):
assert reduction in ('none', 'mean')
self.reduction = reduction
def __call__(self, pred, target):
# Add dimension if necessary.
if target.shape[-1] == 1 and len(target.shape) - len(pred.shape) == 1:
pred = np.expand_dims(pred, -1)
elif pred.shape[-1] == 1 and len(pred.shape) - len(target.shape) == 1:
target = np.expand_dims(target, -1)
elif not target.shape == pred.shape:
raise ValueError('shape mismatch, pred has shape {} and target '
'has shape {}'.format(pred.shape, target.shape))
loss = np.sum(
np.reshape((pred - target) ** 2, (len(pred), -1)), axis=1)
if self.reduction == 'mean':
return np.mean(loss)
else:
return loss
class CrossEntropyLoss:
'''Cross entropy loss that expects probabilities.'''
def __init__(self, reduction='mean'):
assert reduction in ('none', 'mean')
self.reduction = reduction
def __call__(self, pred, target, eps=1e-12):
# Clip.
pred = np.clip(pred, eps, 1 - eps)
# Add a dimension to prediction probabilities if necessary.
if pred.ndim == 1:
pred = pred[:, np.newaxis]
if pred.shape[1] == 1:
pred = np.append(1 - pred, pred, axis=1)
# Calculate loss.
if target.ndim == 1:
# Class labels.
loss = - np.log(pred[np.arange(len(pred)), target])
elif target.ndim == 2:
# Probabilistic labels.
loss = - np.sum(target * np.log(pred), axis=1)
else:
raise ValueError('incorrect labels shape for cross entropy loss')
if self.reduction == 'mean':
return np.mean(loss)
else:
return loss
def get_loss(loss, reduction='mean'):
'''Get loss function by name.'''
if loss == 'cross entropy':
loss_fn = CrossEntropyLoss(reduction=reduction)
elif loss == 'mse':
loss_fn = MSELoss(reduction=reduction)
else:
raise ValueError('unsupported loss: {}'.format(loss))
return loss_fn
def sample_subset_feature(input_size, n, ind):
'''
Sample a subset of features where a given feature index must not be
included. This helper function is used for estimating Shapley values, so
the subset is sampled by 1) sampling the number of features to be included
from a uniform distribution, and 2) sampling the features to be included.
'''
S = np.zeros((n, input_size), dtype=bool)
choices = list(range(input_size))
del choices[ind]
for row in S:
inds = np.random.choice(
choices, size=np.random.choice(input_size), replace=False)
row[inds] = 1
return S
def safe_isinstance(obj, class_str):
'''Check isinstance without requiring imports.'''
if not isinstance(class_str, str):
return False
module_name, class_name = class_str.rsplit('.', 1)
if module_name not in sys.modules:
return False
module = sys.modules[module_name]
class_type = getattr(module, class_name, None)
if class_type is None:
return False
return isinstance(obj, class_type) | /sage_importance-0.0.5-py3-none-any.whl/sage/utils.py | 0.746416 | 0.459076 | utils.py | pypi |
import numpy as np
from sage import utils, core
from tqdm.auto import tqdm
def estimate_total(imputer, X, Y, batch_size, loss_fn):
'''Estimate sum of SAGE values.'''
N = 0
mean_loss = 0
marginal_loss = 0
num_features = imputer.num_groups
for i in range(np.ceil(len(X) / batch_size).astype(int)):
x = X[i * batch_size:(i + 1) * batch_size]
y = Y[i * batch_size:(i + 1) * batch_size]
N += len(x)
# All features.
pred = imputer(x, np.ones((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
mean_loss += np.sum(loss - mean_loss) / N
# No features.
pred = imputer(x, np.zeros((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
marginal_loss += np.sum(loss - marginal_loss) / N
return marginal_loss - mean_loss
def estimate_holdout_importance(imputer, X, Y, batch_size, loss_fn, batches, rng):
'''Estimate the impact of holding out features individually.'''
N, _ = X.shape
num_features = imputer.num_groups
all_loss = 0
holdout_importance = np.zeros(num_features)
S = np.ones((batch_size, num_features), dtype=bool)
# Sample the same batches for all features.
for it in range(batches):
# Sample minibatch.
mb = rng.choice(N, batch_size)
x = X[mb]
y = Y[mb]
# Update loss with all features.
all_loss += np.sum(loss_fn(imputer(x, S), y) - all_loss) / (it + 1)
# Loss with features held out.
for i in range(num_features):
S[:, i] = 0
holdout_importance[i] += (
np.sum(loss_fn(imputer(x, S), y) - holdout_importance[i])
/ (it + 1))
S[:, i] = 1
return holdout_importance - all_loss
class IteratedEstimator:
'''
Estimate SAGE values one at a time by sampling subsets of features.
Args:
imputer: model that accommodates held out features.
loss: loss function ('mse', 'cross entropy').
random_state: random seed, enables reproducibility.
'''
def __init__(self,
imputer,
loss='cross entropy',
random_state=None):
self.imputer = imputer
self.loss_fn = utils.get_loss(loss, reduction='none')
self.random_state = random_state
def __call__(self,
X,
Y=None,
batch_size=512,
detect_convergence=True,
thresh=0.025,
n_samples=None,
optimize_ordering=True,
ordering_batches=1,
verbose=False,
bar=True):
'''
Estimate SAGE values.
Args:
X: input data.
Y: target data. If None, model output will be used.
batch_size: number of examples to be processed in parallel, should be
set to a large value.
detect_convergence: whether to stop when approximately converged.
thresh: threshold for determining convergence
n_samples: number of samples to take per feature.
optimize_ordering: whether to guess an ordering of features based on
importance. May accelerate convergence.
ordering_batches: number of minibatches while determining ordering.
verbose: print progress messages.
bar: display progress bar.
The default behavior is to detect each feature's convergence based on
the ratio of its standard deviation to the gap between the largest and
smallest values. Since neither value is known initially, we begin with
estimates (upper_val, lower_val) and update them as more features are
analyzed.
Returns: Explanation object.
'''
# Set random state.
self.rng = np.random.default_rng(seed=self.random_state)
# Determine explanation type.
if Y is not None:
explanation_type = 'SAGE'
else:
explanation_type = 'Shapley Effects'
# Verify model.
N, _ = X.shape
num_features = self.imputer.num_groups
X, Y = utils.verify_model_data(self.imputer, X, Y, self.loss_fn,
batch_size)
# Possibly force convergence detection.
if n_samples is None:
n_samples = 1e20
if not detect_convergence:
detect_convergence = True
if verbose:
print('Turning convergence detection on')
if detect_convergence:
assert 0 < thresh < 1
# For detecting convergence.
total = estimate_total(self.imputer, X, Y, batch_size, self.loss_fn)
upper_val = max(total / num_features, 0)
lower_val = 0
# Feature ordering.
if optimize_ordering:
if verbose:
print('Determining feature ordering...')
holdout_importance = estimate_holdout_importance(
self.imputer, X, Y, batch_size, self.loss_fn, ordering_batches,
self.rng)
if verbose:
print('Done')
# Use np.abs in case there are large negative contributors.
ordering = list(np.argsort(np.abs(holdout_importance))[::-1])
else:
ordering = list(range(num_features))
# Set up bar.
n_loops = int(n_samples / batch_size)
if bar:
if detect_convergence:
bar = tqdm(total=1)
else:
bar = tqdm(total=n_loops * batch_size * num_features)
# Iterated sampling.
tracker_list = []
for i, ind in enumerate(ordering):
tracker = utils.ImportanceTracker()
for it in range(n_loops):
# Sample data.
mb = np.random.choice(N, batch_size)
x = X[mb]
y = Y[mb]
# Sample subset of features.
S = utils.sample_subset_feature(num_features, batch_size, ind)
# Loss with feature excluded.
y_hat = self.imputer(x, S)
loss_discluded = self.loss_fn(y_hat, y)
# Loss with feature included.
S[:, ind] = 1
y_hat = self.imputer(x, S)
loss_included = self.loss_fn(y_hat, y)
# Calculate delta sample.
tracker.update(loss_discluded - loss_included)
if bar and (not detect_convergence):
bar.update(batch_size)
# Calculate progress.
std = tracker.std.item()
gap = (
max(upper_val, tracker.values.item()) -
min(lower_val, tracker.values.item()))
gap = max(gap, 1e-12)
ratio = std / gap
# Print progress message.
if verbose:
if detect_convergence:
print(f'StdDev Ratio = {ratio:.4f} '
f'(Converge at {thresh:.4f})')
else:
print('StdDev Ratio = {:.4f}'.format(ratio))
# Check for convergence.
if detect_convergence:
if ratio < thresh:
if verbose:
print('Detected feature convergence')
# Skip bar ahead.
if bar:
bar.n = np.around(
bar.total * (i + 1) / num_features, 4)
bar.refresh()
break
# Update convergence estimation.
if bar and detect_convergence:
N_est = (it + 1) * (ratio / thresh) ** 2
bar.n = np.around((i + (it + 1) / N_est) / num_features, 4)
bar.refresh()
if verbose:
print(f'Done with feature {i}')
tracker_list.append(tracker)
# Adjust min max value.
upper_val = max(upper_val, tracker.values.item())
lower_val = min(lower_val, tracker.values.item())
if bar:
bar.close()
# Extract SAGE values.
reverse_ordering = [ordering.index(ind) for ind in range(num_features)]
values = np.array(
[tracker_list[ind].values.item() for ind in reverse_ordering])
std = np.array(
[tracker_list[ind].std.item() for ind in reverse_ordering])
return core.Explanation(values, std, explanation_type) | /sage_importance-0.0.5-py3-none-any.whl/sage/iterated_estimator.py | 0.833731 | 0.455804 | iterated_estimator.py | pypi |
import numpy as np
from sage import utils, core
from tqdm.auto import tqdm
def calculate_A(num_features):
'''Calculate A parameter's exact form.'''
p_coaccur = (
(np.sum((np.arange(2, num_features) - 1)
/ (num_features - np.arange(2, num_features)))) /
(num_features * (num_features - 1) *
np.sum(1 / (np.arange(1, num_features)
* (num_features - np.arange(1, num_features))))))
A = np.eye(num_features) * 0.5 + (1 - np.eye(num_features)) * p_coaccur
return A
def estimate_constraints(imputer, X, Y, batch_size, loss_fn):
'''
Estimate loss when no features are included and when all features are
included. This is used to enforce constraints.
'''
N = 0
mean_loss = 0
marginal_loss = 0
num_features = imputer.num_groups
for i in range(np.ceil(len(X) / batch_size).astype(int)):
x = X[i * batch_size:(i + 1) * batch_size]
y = Y[i * batch_size:(i + 1) * batch_size]
N += len(x)
# All features.
pred = imputer(x, np.ones((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
mean_loss += np.sum(loss - mean_loss) / N
# No features.
pred = imputer(x, np.zeros((len(x), num_features), dtype=bool))
loss = loss_fn(pred, y)
marginal_loss += np.sum(loss - marginal_loss) / N
return - marginal_loss, - mean_loss
def calculate_result(A, b, total, b_sum_squares, n):
'''Calculate regression coefficients and uncertainty estimates.'''
num_features = A.shape[1]
A_inv_one = np.linalg.solve(A, np.ones(num_features))
A_inv_vec = np.linalg.solve(A, b)
values = (
A_inv_vec -
A_inv_one * (np.sum(A_inv_vec) - total) / np.sum(A_inv_one))
# Calculate variance.
try:
b_sum_squares = 0.5 * (b_sum_squares + b_sum_squares.T)
b_cov = b_sum_squares / (n ** 2)
# TODO this fails in situations where model is invariant to features.
cholesky = np.linalg.cholesky(b_cov)
L = (
np.linalg.solve(A, cholesky) +
np.matmul(np.outer(A_inv_one, A_inv_one), cholesky)
/ np.sum(A_inv_one))
beta_cov = np.matmul(L, L.T)
var = np.diag(beta_cov)
std = var ** 0.5
except np.linalg.LinAlgError:
# b_cov likely is not PSD due to insufficient samples.
std = np.ones(num_features) * np.nan
return values, std
class KernelEstimator:
'''
Estimate SAGE values by fitting weighted linear model.
This is an unbiased estimator designed for stochastic cooperative games,
described in https://arxiv.org/abs/2012.01536
Args:
imputer: model that accommodates held out features.
loss: loss function ('mse', 'cross entropy').
random_state: random seed, enables reproducibility.
'''
def __init__(self,
imputer,
loss='cross entropy',
random_state=None):
self.imputer = imputer
self.loss_fn = utils.get_loss(loss, reduction='none')
self.random_state = random_state
def __call__(self,
X,
Y=None,
batch_size=512,
detect_convergence=True,
thresh=0.025,
n_samples=None,
verbose=False,
bar=True,
check_every=5):
'''
Estimate SAGE values by fitting linear regression model.
Args:
X: input data.
Y: target data. If None, model output will be used.
batch_size: number of examples to be processed in parallel, should be
set to a large value.
detect_convergence: whether to stop when approximately converged.
thresh: threshold for determining convergence.
n_samples: number of permutations to unroll.
verbose: print progress messages.
bar: display progress bar.
check_every: number of batches between convergence checks.
The default behavior is to detect convergence based on the width of the
SAGE values' confidence intervals. Convergence is defined by the ratio
of the maximum standard deviation to the gap between the largest and
smallest values.
Returns: Explanation object.
'''
# Set random state.
self.rng = np.random.default_rng(seed=self.random_state)
# Determine explanation type.
if Y is not None:
explanation_type = 'SAGE'
else:
explanation_type = 'Shapley Effects'
# Verify model.
N, _ = X.shape
num_features = self.imputer.num_groups
X, Y = utils.verify_model_data(
self.imputer, X, Y, self.loss_fn, batch_size)
# Possibly force convergence detection.
if n_samples is None:
n_samples = 1e20
if not detect_convergence:
detect_convergence = True
if verbose:
print('Turning convergence detection on')
if detect_convergence:
assert 0 < thresh < 1
# Weighting kernel (probability of each subset size).
weights = np.arange(1, num_features)
weights = 1 / (weights * (num_features - weights))
weights = weights / np.sum(weights)
# Estimate null and grand coalition values.
null, grand = estimate_constraints(
self.imputer, X, Y, batch_size, self.loss_fn)
total = grand - null
# Set up bar.
n_loops = int(n_samples / batch_size)
if bar:
if detect_convergence:
bar = tqdm(total=1)
else:
bar = tqdm(total=n_loops * batch_size)
# Setup.
A = calculate_A(num_features)
n = 0
b = 0
b_sum_squares = 0
# Sample subsets.
for it in range(n_loops):
# Sample data.
mb = self.rng.choice(N, batch_size)
x = X[mb]
y = Y[mb]
# Sample subsets.
S = np.zeros((batch_size, num_features), dtype=bool)
num_included = self.rng.choice(num_features - 1, size=batch_size,
p=weights) + 1
for row, num in zip(S, num_included):
inds = self.rng.choice(num_features, size=num, replace=False)
row[inds] = 1
# Calculate loss.
y_hat = self.imputer(x, S)
loss = - self.loss_fn(y_hat, y) - null
b_orig = S.astype(float) * loss[:, np.newaxis]
# Calculate loss with inverted subset (for variance reduction).
S = np.logical_not(S)
y_hat = self.imputer(x, S)
loss = - self.loss_fn(y_hat, y) - null
b_inv = S.astype(float) * loss[:, np.newaxis]
# Welford's algorithm.
n += batch_size
b_sample = 0.5 * (b_orig + b_inv)
b_diff = b_sample - b
b += np.sum(b_diff, axis=0) / n
b_diff2 = b_sample - b
b_sum_squares += np.sum(
np.matmul(np.expand_dims(b_diff, 2),
np.expand_dims(b_diff2, 1)), axis=0)
# Update bar (if not detecting convergence).
if bar and (not detect_convergence):
bar.update(batch_size)
if (it + 1) % check_every == 0:
# Calculate progress.
values, std = calculate_result(A, b, total, b_sum_squares, n)
gap = max(values.max() - values.min(), 1e-12)
ratio = std.max() / gap
# Print progress message.
if verbose:
if detect_convergence:
print(f'StdDev Ratio = {ratio:.4f} '
f'(Converge at {thresh:.4f})')
else:
print(f'StdDev Ratio = {ratio:.4f}')
# Check for convergence.
if detect_convergence:
if ratio < thresh:
if verbose:
print('Detected convergence')
# Skip bar ahead.
if bar:
bar.n = bar.total
bar.refresh()
break
# Update convergence estimation.
if bar and detect_convergence:
N_est = (it + 1) * (ratio / thresh) ** 2
bar.n = np.around((it + 1) / N_est, 4)
bar.refresh()
# Calculate SAGE values.
values, std = calculate_result(A, b, total, b_sum_squares, n)
return core.Explanation(np.squeeze(values), std, explanation_type) | /sage_importance-0.0.5-py3-none-any.whl/sage/kernel_estimator.py | 0.784402 | 0.53522 | kernel_estimator.py | pypi |
import numpy as np
import warnings
from sage import utils
class Imputer:
'''Imputer base class.'''
def __init__(self, model):
self.model = utils.model_conversion(model)
def __call__(self, x, S):
raise NotImplementedError
class DefaultImputer(Imputer):
'''Replace features with default values.'''
def __init__(self, model, values):
super().__init__(model)
if values.ndim == 1:
values = values[np.newaxis]
elif values[0] != 1:
raise ValueError('values shape must be (dim,) or (1, dim)')
self.values = values
self.values_repeat = values
self.num_groups = values.shape[1]
def __call__(self, x, S):
# Prepare x.
if len(x) != len(self.values_repeat):
self.values_repeat = self.values.repeat(len(x), 0)
# Replace specified indices.
x_ = x.copy()
x_[~S] = self.values_repeat[~S]
# Make predictions.
return self.model(x_)
class MarginalImputer(Imputer):
'''Marginalizing out removed features with their marginal distribution.'''
def __init__(self, model, data):
super().__init__(model)
self.data = data
self.data_repeat = data
self.samples = len(data)
self.num_groups = data.shape[1]
if len(data) > 1024:
warnings.warn('using {} background samples may lead to slow '
'runtime, consider using <= 1024'.format(
len(data)), RuntimeWarning)
def __call__(self, x, S):
# Prepare x and S.
n = len(x)
x = x.repeat(self.samples, 0)
S = S.repeat(self.samples, 0)
# Prepare samples.
if len(self.data_repeat) != self.samples * n:
self.data_repeat = np.tile(self.data, (n, 1))
# Replace specified indices.
x_ = x.copy()
x_[~S] = self.data_repeat[~S]
# Make predictions.
pred = self.model(x_)
pred = pred.reshape(-1, self.samples, *pred.shape[1:])
return np.mean(pred, axis=1) | /sage_importance-0.0.5-py3-none-any.whl/sage/imputers.py | 0.649356 | 0.359701 | imputers.py | pypi |
import os
import pandas as pd
github_data_url = 'https://github.com/iancovert/sage/raw/master/data/'
def airbnb():
'''
Airbnb listing data from Kaggle.
Located at: https://www.kaggle.com/dgomonov/new-york-city-airbnb-open-data
'''
path = os.path.join(github_data_url, 'AB_NYC_2019.csv')
df = pd.read_table(path, sep=',', header=0, index_col=None)
# Type conversions
df['name'] = df['name'].astype(str)
df['host_name'] = df['host_name'].astype(str)
df['last_review'] = pd.to_datetime(df['last_review'])
return df
def bank():
'''
Bank marketing data from UCI dataset repository.
Located at: https://archive.ics.uci.edu/ml/datasets/bank+marketing
'''
columns = [
'Age', 'Job', 'Marital', 'Education', 'Default', 'Balance', 'Housing',
'Loan', 'Contact', 'Day', 'Month', 'Duration', 'Campaign', 'Prev Days',
'Prev Contacts', 'Prev Outcome', 'Success']
path = os.path.join(github_data_url, 'bank-full.csv')
df = pd.read_table(path, sep=';', header=None, index_col=None, skiprows=1,
names=columns)
# Convert label.
df['Success'] = (df['Success'] == 'yes')
return df
def bike():
'''
Bike sharing dataset from Kaggle competition.
Located at: https://www.kaggle.com/c/bike-sharing-demand
'''
path = os.path.join(github_data_url, 'bike.csv')
df = pd.read_table(path, sep=',', header=0, index_col=None)
columns = df.columns.tolist()
# Split and remove datetime column.
df['datetime'] = pd.to_datetime(df['datetime'])
df['year'] = df['datetime'].dt.year
df['month'] = df['datetime'].dt.month
df['day'] = df['datetime'].dt.day
df['hour'] = df['datetime'].dt.hour
df = df.drop('datetime', axis=1)
# Reorder and rename columns.
df = df[['year', 'month', 'day', 'hour'] + columns[1:]]
df.columns = list(map(str.title, df.columns))
return df
def credit():
'''
German credit quality dataset from UCI dataset repository.
Located at: https://archive.ics.uci.edu/ml/datasets/South+German+Credit+%28UPDATE%29
'''
columns = [
'Checking Status', 'Duration', 'Credit History', 'Purpose',
'Credit Amount', 'Savings Account/Bonds', 'Employment Since',
'Installment Rate', 'Personal Status', 'Debtors/Guarantors',
'Residence Duration', 'Property Type', 'Age',
'Other Installment Plans', 'Housing Ownership',
'Number Existing Credits', 'Job', 'Number Liable', 'Telephone',
'Foreign Worker', 'Good Customer'
]
path = os.path.join(github_data_url, 'SouthGermanCredit.asc')
return pd.read_table(path, sep=' ', header=None, index_col=None,
names=columns, skiprows=1) | /sage_importance-0.0.5-py3-none-any.whl/sage/datasets.py | 0.606265 | 0.355355 | datasets.py | pypi |
# sage-numerical-backends-gurobi: Gurobi mixed integer linear programming backend for SageMath
[](https://pypi.org/project/sage-numerical-backends-gurobi/ "PyPI: sage-numerical-backends-gurobi")
`GurobiBackend` has previously been available as part of the [SageMath](http://www.sagemath.org/) source tree,
from which it is built as an "optional extension" if the proprietary Gurobi library and header files have been symlinked into `$SAGE_LOCAL` manually.
Because of the proprietary nature of the Gurobi software, `GurobiBackend` is not available in any binary distributions of SageMath.
The present standalone Python package `sage-numerical-backends-gurobi` has been created from the SageMath sources, version 9.0.beta10; the in-tree version of `GurobiBackend` has been removed in [Sage ticket #28175](https://trac.sagemath.org/ticket/28175). The present package can be installed on top of various Sage installations using `pip`, including older versions of Sage such as 8.1 (as shipped by Ubuntu bionic 18.04LTS).
## Installation of Gurobi
Install Gurobi according to the instructions on the website,
which includes obtaining a license key.
- On a Linux system, after unpacking the Gurobi archive in the desired location,
such as `/opt`, set the environment variable `GUROBI_HOME` to the directory containing the subdirectories `bin`, `lib`, ...:
$ export GUROBI_HOME=/opt/gurobi900/linux64
Then adjust your `PATH` (or create symbolic links) so that the interactive Gurobi shell `gurobi.sh` can be found from your `PATH`:
$ export PATH="$GUROBI_HOME/bin:$PATH"
- On macOS, the Gurobi installer should make the interactive Gurobi shell ``gurobi.sh`` available in `/usr/local/bin` and therefore from your ``PATH``.
Verify this by typing the shell command ``gurobi.sh``::
$ gurobi.sh
Python 3.7.4 (default, Aug 27 2019, 11:27:39)
...
Gurobi Interactive Shell (mac64), Version 9.0.0
Copyright (c) 2019, Gurobi Optimization, LLC
Type "help()" for help
gurobi>
If this does not work, adjust your ``PATH`` (or create symbolic links) so
that ``gurobi.sh`` is found.
## Installation of this package
This package finds the Gurobi installation using the `GUROBI_HOME` environment variable. (On macOS, it suffices to have `gurobi.sh` in your ``PATH``.)
An alternative method of build configuration is to set compiler/linker flags appropriately.
In [SageMath 9.1 and newer](https://wiki.sagemath.org/ReleaseTours/sage-9.1#Easier_installation_of_optional_linear_and_mixed_integer_linear_optimization_backends), this package is available as an optional SPKG and can be installed using
$ sage -i sage_numerical_backends_gurobi
Alternatively, you can install this package from PyPI using
$ sage -python -m pip install sage-numerical-backends-gurobi
or from a checked out source tree using
$ sage -python -m pip install .
or from GitHub using
$ sage -python -m pip install git+https://github.com/sagemath/sage-numerical-backends-gurobi
(See [`build.yml` in the related package sage-numerical-backends-coin package](https://github.com/sagemath/sage-numerical-backends-coin/blob/master/.github/workflows/build.yml) for details about package prerequisites on various systems.)
## Using this package
To obtain a solver (backend) instance:
sage: from sage_numerical_backends_gurobi.gurobi_backend import GurobiBackend
sage: GurobiBackend()
<sage_numerical_backends_gurobi.gurobi_backend.GurobiBackend object at 0x7fb72c2c7528>
Equivalently:
sage: from sage_numerical_backends_gurobi.gurobi_backend import GurobiBackend
sage: from sage.numerical.backends.generic_backend import get_solver
sage: get_solver(solver=GurobiBackend)
<sage_numerical_backends_gurobi.gurobi_backend.GurobiBackend object at 0x7fe21ffbe2b8>
To use this solver (backend) with [`MixedIntegerLinearProgram`](http://doc.sagemath.org/html/en/reference/numerical/sage/numerical/mip.html):
sage: from sage_numerical_backends_gurobi.gurobi_backend import GurobiBackend
sage: M = MixedIntegerLinearProgram(solver=GurobiBackend)
sage: M.get_backend()
<sage_numerical_backends_gurobi.gurobi_backend.GurobiBackend object at 0x7fb72c2c7868>
To make it available as the solver named `'Gurobi'`, we need to make the new module
known as `sage.numerical.backends.gurobi_backend` (note dots, not underscores), using
the following commands:
sage: import sage_numerical_backends_gurobi.gurobi_backend as gurobi_backend, sage.numerical.backends as backends, sys
sage: sys.modules['sage.numerical.backends.gurobi_backend'] = backends.gurobi_backend = gurobi_backend
If these commands are executed in a Sage session before any `MixedIntegerLinearProgram` is created, then
the new `'Gurobi'` solver wins over the `'GLPK'` solver in the selection of the default MIP backend.
To select the `'Gurobi'` solver explicitly as the default MIP backend, additionally use the following command.
sage: default_mip_solver('Gurobi')
To make these settings permanent, add the above 2 + 1 commands to your `~/.sage/init.sage` file.
Note that this setting will not affect doctesting (`sage -t`) because this file is ignored in doctesting mode.
## Running doctests
To run the (limited) testsuite of this package, use:
$ sage setup.py test
If no Gurobi license is available, the testing is skipped without error.
To run the Sage testsuite with the default MIP solver set to the backend provided by this package, use:
$ sage setup.py check_sage_testsuite
If no Gurobi license is available, the testing is skipped without error.
## Running tests with tox
The doctests can also be invoked using `tox`:
$ tox -e local
$ tox -e local check_sage_testsuite.py
Testing multiple installed Gurobi versions in parallel (see `tox.ini`):
$ tox -p auto
## Overriding the default solver by patching the Sage installation
Another method is to patch the module in permanently to the sage installation (at your own risk).
This method will affect doctesting.
$ sage -c 'import os; import sage.numerical.backends as dm; import sage_numerical_backends_gurobi.gurobi_backend as sm; s = sm.__file__; f = os.path.basename(s); d = os.path.join(dm.__path__[0], f); (os.path.exists(d) or os.path.lexists(d)) and os.remove(d); os.symlink(s, d);'
Or use the script [`patch_into_sage_module.py`](patch_into_sage_module.py) in the source distribution that does the same:
$ sage -c 'load("patch_into_sage_module.py")'
Success: Patched in the module as sage.numerical.backends.gurobi_backend
Verify with [`check_get_solver_with_name.py`](check_get_solver_with_name.py) that the patching script has worked:
$ sage -c 'load("check_get_solver_with_name.py")'
Success: get_solver(solver='gurobi') gives <sage_numerical_backends_gurobi.gurobi_backend.GurobiBackend object at 0x7f8f20218528>
| /sage_numerical_backends_gurobi-9.3.1.tar.gz/sage_numerical_backends_gurobi-9.3.1/README.md | 0.890109 | 0.737087 | README.md | pypi |
import sage.numerical.backends.glpk_backend as backend
from sage.numerical.backends.glpk_backend \
import glp_bs, glp_nl, glp_nu
from sage.modules.all import vector
from sage_numerical_interactive_mip.backends.abstract_backend_dictionary \
import LPAbstractBackendDictionary
from sage.numerical.interactive_simplex_method import variable
class LPGLPKBackendDictionary(LPAbstractBackendDictionary):
r"""
Construct a dictionary for an LP problem from an backend.
INPUT:
- ``backend`` -- the backend where the dictionary is
constructed from
OUTPUT:
- a :class:`backend dictionary for an LP problem
<LPGLPKBackendDictionary>`
EXAMPLES:
One needs an instance of :class:`GLPKBackend` to initialize
this class::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPGLPKBackendDictionary(b)
sage: d
LP problem dictionary (use typeset mode to see details)
"""
def __init__(self, backend):
r"""
See :class:`LPGLPKBackendDictionary` for documentation.
TESTS::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPGLPKBackendDictionary(b)
sage: TestSuite(d).run(skip=['_test_pickling'])
An exception will be raised if the problem is not in standard form
i.e. with <= constraints and >= 0 variable bounds::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(8 * x[0] + 2 * x[1], min=17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPGLPKBackendDictionary(b)
Traceback (most recent call last):
...
AttributeError: Problem constraints not in standard form.
"""
super(LPGLPKBackendDictionary, self).__init__(backend)
def basic_variables(self):
r"""
Return the basic variables of ``self``.
OUTPUT:
- a vector
EXAMPLES:
Setting up the problem::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
Use function in :class:`LPGLPKBackendDictionary`::
sage: d = LPGLPKBackendDictionary(b)
Use function in :class:`InteractiveLPProblem`::
sage: lp, basis = p.interactive_lp_problem()
sage: lpd = lp.dictionary(*basis)
Compare results::
sage: d.basic_variables()
(x_0, x_1)
sage: lpd.basic_variables()
(x_0, x_1)
"""
col_basics = tuple(
self._x[i]
for i in range(self._backend.ncols())
if self._backend.get_col_stat(i) == glp_bs
)
row_basics = tuple(
self._x[i + self._backend.ncols()]
for i in range(self._backend.nrows())
if self._backend.get_row_stat(i) == glp_bs
)
return vector(col_basics + row_basics)
def constant_terms(self):
r"""
Return the constant terms of relations of ``self``.
OUTPUT:
- a vector.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: d.constant_terms()
(1.3, 3.3)
"""
col_const = tuple(
self._backend.get_variable_value(i)
for i in range(self._backend.ncols())
if self._backend.get_col_stat(i) == glp_bs
)
row_const = tuple(
self._backend.row_bounds(i)[1] - self._backend.get_row_prim(i)
for i in range(self._backend.nrows())
if self._backend.get_row_stat(i) == glp_bs
)
return vector(col_const + row_const)
def column_coefficients(self, v):
r"""
Return coefficients of a nonbasic variable.
INPUT:
- ``v`` -- a nonbasic variable of ``self``, can be given as a string, an
actual variable, or an integer interpreted as the index of a variable
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: vars = d.nonbasic_variables()
sage: vars
(x_0, x_1, w_0, w_2)
sage: d.enter(vars[0])
sage: d.entering_coefficients() # indirect doctest
(5.0, 36.0, 26.0)
sage: d.enter(vars[1])
sage: d.entering_coefficients() # indirect doctest
(0.0, 1.0, 2.0)
"""
if v is not None:
v = variable(self.coordinate_ring(), v)
if v not in self.nonbasic_variables():
raise ValueError("variable must be nonbasic")
index = tuple(self._x).index(v)
# Reverse signs for auxiliary variables
def reverse_sign_for_auxiliary(i_v):
i, v = i_v
return (i, v) if i < self._backend.nrows() else (i, -v)
if index < self._backend.ncols():
tab_col = map(reverse_sign_for_auxiliary,
zip(*self._backend.eval_tab_col(
index + self._backend.nrows())))
else:
tab_col = map(reverse_sign_for_auxiliary,
zip(*self._backend.eval_tab_col(
index - self._backend.ncols())))
# Sort the coefficients so coefficients of
# problem variables comes first
l = [0] * (self._backend.nrows())
for (i, v) in tab_col:
if i < self._backend.nrows():
symbol = self._x[i + self._backend.ncols()]
else:
symbol = self._x[i - self._backend.nrows()]
pos = tuple(self.basic_variables()).index(symbol)
l[pos] = v
return vector(l)
def row_coefficients(self, v):
r"""
Return coefficients of the basic variable ``v``.
These are the coefficients with which nonbasic variables are subtracted
in the relation for ``v``.
INPUT:
- ``v`` -- a basic variable of ``self``, can be given as a string, an
actual variable, or an integer interpreted as the index of a variable
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: vars = d.basic_variables()
sage: vars
(x_2, x_3, w_1)
sage: d.leave(vars[0])
sage: d.leaving_coefficients() # indirect doctest
(5.0, 0.0, 0.0, 1.0)
sage: d.leave(vars[1])
sage: d.leaving_coefficients() # indirect doctest
(36.0, 1.0, 1.0, 7.0)
"""
if v is not None:
v = variable(self.coordinate_ring(), v)
if v not in self.basic_variables():
raise ValueError("variable must be basic")
index = tuple(self._x).index(v)
# Reverse signs for auxiliary variables
def reverse_sign_for_auxiliary(i_v):
i, v = i_v
return (i, v) if i < self._backend.nrows() else (i, -v)
def reverse_sign_for_nonauxiliary(i_v):
i, v = i_v
return (i, -v) if i < self._backend.nrows() else (i, v)
if index < self._backend.ncols():
raw_row = self._backend.eval_tab_row(
index + self._backend.nrows())
tab_row = map(reverse_sign_for_auxiliary, zip(*raw_row))
else:
raw_row = self._backend.eval_tab_row(
index - self._backend.ncols())
tab_row = map(reverse_sign_for_nonauxiliary, zip(*raw_row))
l = [0] * (self._backend.ncols())
for (i, v) in tab_row:
if i < self._backend.nrows():
symbol = self._x[i + self._backend.ncols()]
else:
symbol = self._x[i - self._backend.nrows()]
pos = tuple(self.nonbasic_variables()).index(symbol)
l[pos] = v
return vector(l)
def nonbasic_variables(self):
r"""
Return non-basic variables of ``self``.
OUTPUT:
- a vector
EXAMPLES:
Setting up the problem::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
Use function in :class:`LPGLPKBackendDictionary`::
sage: d = LPGLPKBackendDictionary(b)
Use function in :class:`InteractiveLPProblem`::
sage: lp, basis = p.interactive_lp_problem()
sage: lpd = lp.dictionary(*basis)
Compare results:
sage: d.nonbasic_variables()
(w_0, w_1)
sage: lpd.nonbasic_variables()
(w_0, w_1)
"""
col_nonbasics = tuple(
self._x[i]
for i in range(self._backend.ncols())
if self._backend.get_col_stat(i) != glp_bs
)
row_nonbasics = tuple(
self._x[i + self._backend.ncols()]
for i in range(self._backend.nrows())
if self._backend.get_row_stat(i) != glp_bs
)
return vector(col_nonbasics + row_nonbasics)
def objective_coefficients(self):
r"""
Return coefficients of the objective of ``self``.
OUTPUT:
- a vector
EXAMPLES:
Setting up the problem::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
Use function in :class:`LPGLPKBackendDictionary`::
sage: d = LPGLPKBackendDictionary(b)
Use function in :class:`InteractiveLPProblem`::
sage: lp, basis = p.interactive_lp_problem()
sage: lpd = lp.dictionary(*basis)
Compare results::
sage: d.objective_coefficients() # rel tol 1e-9
(-0.58, -0.76)
sage: lpd.objective_coefficients() # rel tol 1e-9
(-0.58, -0.76)
"""
col_coefs = tuple(
self._backend.get_col_dual(i)
for i in range(self._backend.ncols())
if self._backend.get_col_stat(i) != glp_bs
)
row_coefs = tuple(
-self._backend.get_row_dual(i)
for i in range(self._backend.nrows())
if self._backend.get_row_stat(i) != glp_bs
)
return vector(col_coefs + row_coefs)
def objective_name(self):
r"""
Return the objective name of ``self``.
OUTPUT:
- a symbolic expression
"""
return SR("obj")
def update(self):
r"""
Update ``self`` using previously set entering and leaving variables.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: d.objective_value()
1331.0
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
sage: d.enter(d.nonbasic_variables()[0])
sage: d.basic_variables()
(x_2, x_3, w_1)
sage: d.leave(d.basic_variables()[0])
sage: d.objective_value()
1331.0
sage: d.update()
sage: d.basic_variables()
(x_0, x_3, w_1)
sage: d.nonbasic_variables()
(x_1, x_2, w_0, w_2)
sage: d.objective_value()
261.8
TESTS:
An error will be raised if the pivot selected is zero::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: d.leave(d.basic_variables()[0])
sage: d.leaving_coefficients()
(5.0, 0.0, 0.0, 1.0)
sage: d.enter(d.nonbasic_variables()[1])
sage: d.leave(d.basic_variables()[0])
sage: d.update()
Traceback (most recent call last):
...
ValueError: incompatible choice of entering and leaving variables
"""
entering = self._entering
if entering is None:
raise ValueError("entering variable must be set before updating")
leaving = self._leaving
if leaving is None:
raise ValueError("leaving variable must be set before updating")
matching_index = tuple(self.basic_variables()).index(leaving)
coef = self.entering_coefficients()[matching_index]
if coef == 0:
raise ValueError("incompatible choice of entering and leaving "
"variables")
entering_index = tuple(self._x).index(entering)
if entering_index < self._backend.ncols():
self._backend.set_col_stat(entering_index, glp_bs)
else:
self._backend.set_row_stat(entering_index - self._backend.ncols(),
glp_bs)
leaving_index = tuple(self._x).index(leaving)
if leaving_index < self._backend.ncols():
self._backend.set_col_stat(leaving_index, glp_nl)
else:
self._backend.set_row_stat(leaving_index - self._backend.ncols(),
glp_nu)
if self._backend.warm_up() != 0:
raise AttributeError("Warm up failed.")
def add_row(self, nonbasic_coef, constant, slack_variable,
integer_slack_variable=False):
r"""
Update a dictionary with an additional row based on a given dictionary.
INPUT:
- ``nonbasic_coef``-- a list of nonbasic coefficients for the new row
- ``constant``-- a number of the constant term for the new row
- ``slack_variable``-- a string of the name for the new slack variable
- ``integer_slack_variable``-- (default: False) a boolean value
indicating if the new slack variable is integer or not.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.glpk_backend_dictionary \
import LPGLPKBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True, \
solver="GLPK")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: import sage.numerical.backends.glpk_backend as backend
sage: b.solver_parameter(\
backend.glp_simplex_or_intopt, backend.glp_simplex_only)
sage: b.solve()
0
sage: d = LPGLPKBackendDictionary(b)
sage: d.basic_variables()
(x_2, x_3, w_1)
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
sage: d.objective_coefficients()
(-486.0, -10.0, -13.0, -95.0)
sage: d.add_row(range(3,7), 2, 'z_0')
sage: d.objective_coefficients()
(-486.0, -10.0, -13.0, -95.0)
sage: d.basic_variables()
(x_2, x_3, w_1, z_0)
sage: d.leave(d.basic_variables()[3])
sage: d.leaving_coefficients()
(3.0, 4.0, 5.0, 6.0)
sage: b.solve()
0
sage: d.basic_variables()
(x_2, x_3, w_1, z_0)
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
Variables have 0 as their coefficient will not show up in the
tableau:
sage: d.add_row(range(-2, 2), 5, 'z_1')
sage: d.get_backend().row(4)
([2, 1, 0], [-1.0, -1.0, -7.0])
"""
if len(nonbasic_coef) != self._backend.ncols():
raise ValueError("Length of nonbasic coefficients incompatible")
# Convert to problem variable coefficients
coef_pairs, constant = (
self._nonbasic_coef_to_problem_coef_(nonbasic_coef, constant)
)
self._backend.add_linear_constraint(
coef_pairs, None, constant, slack_variable
)
# Update buffered variables
row_index = self._backend.nrows() - 1
self._load_new_variable_(
index=row_index,
name=self._backend.row_name(row_index),
auxiliary=True
)
# Update basis status in the backend
self._backend.set_row_stat(self._backend.nrows() - 1, glp_bs)
if self._backend.warm_up() != 0:
raise AttributeError("Warm up failed.") | /sage_numerical_interactive_mip-0.2.1.tar.gz/sage_numerical_interactive_mip-0.2.1/sage_numerical_interactive_mip/backends/glpk_backend_dictionary.py | 0.807423 | 0.440469 | glpk_backend_dictionary.py | pypi |
from sage.modules.all import vector
from sage_numerical_interactive_mip.backends.abstract_backend_dictionary \
import LPAbstractBackendDictionary
from sage.numerical.interactive_simplex_method import variable
class LPCoinBackendDictionary(LPAbstractBackendDictionary):
r"""
Construct a dictionary for an LP problem from an backend.
INPUT:
- ``backend`` -- the backend that the dictionary is
constructed from
OUTPUT:
- a :class:`backend dictionary for an LP problem
<LPCoinBackendDictionary>`
EXAMPLES:
One needs an instance of :class:`CoinBackend` to initialize
this class::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPCoinBackendDictionary(b)
sage: d
LP problem dictionary (use typeset mode to see details)
"""
def __init__(self, backend):
r"""
See :class:`LPCoinBackendDictionary` for documentation.
TESTS::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPCoinBackendDictionary(b)
sage: TestSuite(d).run(skip=['_test_pickling'])
An exception will be raised if the problem is not in standard form
i.e. with <= constraints and >= 0 variable bounds::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(8 * x[0] + 2 * x[1], min=17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: d = LPCoinBackendDictionary(b)
Traceback (most recent call last):
...
AttributeError: Problem constraints not in standard form.
"""
super(LPCoinBackendDictionary, self).__init__(backend)
def basic_variables(self):
r"""
Return the basic variables of ``self``.
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.basic_variables()
(x_0, x_1)
"""
col_stat, row_stat = self._backend.get_basis_status()
col_basics = tuple(
self._x[i]
for i in range(self._backend.ncols())
if col_stat[i] == 1
)
row_basics = tuple(
self._x[i + self._backend.ncols()]
for i in range(self._backend.nrows())
if row_stat[i] == 1
)
return vector(col_basics + row_basics)
def constant_terms(self):
r"""
Return the constant terms of relations of ``self``.
OUTPUT:
- a vector.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.constant_terms()
(1.3, 3.3)
"""
col_stat, row_stat = self._backend.get_basis_status()
col_const = tuple(
self._backend.get_variable_value(i)
for i in range(self._backend.ncols())
if col_stat[i] == 1
)
row_const = tuple(
self._backend.row_bounds(i)[1]
- self._backend.get_variable_value(self._backend.ncols() + i)
for i in range(self._backend.nrows())
if row_stat[i] == 1
)
return vector(col_const + row_const)
def column_coefficients(self, v):
r"""
Return coefficients of a nonbasic variable.
INPUT:
- ``v`` -- a nonbasic variable of ``self``, can be given as a string, an
actual variable, or an integer interpreted as the index of a variable
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: vars = d.nonbasic_variables()
sage: vars
(x_0, x_1, w_0, w_2)
sage: d.enter(vars[0])
sage: d.entering_coefficients() # indirect doctest
(36.0, 26.0, 5.0)
sage: d.enter(vars[1])
sage: d.entering_coefficients() # indirect doctest
(1.0, 2.0, 0.0)
"""
if v is not None:
v = variable(self.coordinate_ring(), v)
if v not in self.nonbasic_variables():
raise ValueError("variable must be nonbasic")
index = tuple(self._x).index(v)
return vector(self._backend.get_binva_col(index))
def row_coefficients(self, v):
r"""
Return coefficients of the basic variable ``v``.
These are the coefficients with which nonbasic variables are subtracted
in the relation for ``v``.
INPUT:
- ``v`` -- a basic variable of ``self``, can be given as a string, an
actual variable, or an integer interpreted as the index of a variable
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: vars = d.basic_variables()
sage: vars
(x_2, x_3, w_1)
sage: d.leave(vars[0])
sage: d.leaving_coefficients() # indirect doctest
(5.0, 0.0, 0.0, 1.0)
sage: d.leave(vars[1])
sage: d.leaving_coefficients() # indirect doctest
(36.0, 1.0, 1.0, 7.0)
"""
if v is not None:
v = variable(self.coordinate_ring(), v)
if v not in self.basic_variables():
raise ValueError("variable must be basic")
var_index = tuple(self._x).index(v)
row_indices = self._backend.get_basics()
row_index = tuple(row_indices).index(var_index)
from sage.misc.flatten import flatten
row = flatten(self._backend.get_binva_row(row_index))
nonbasic_indicies = [self._x.index(v)
for v in self.nonbasic_variables()]
return vector([row[i] for i in nonbasic_indicies])
def nonbasic_variables(self):
r"""
Return non-basic variables of ``self``.
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(-x[0] + x[1] <= 2)
sage: p.add_constraint(8 * x[0] + 2 * x[1] <= 17)
sage: p.set_objective(5.5 * x[0] + 2.1 * x[1])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.nonbasic_variables()
(w_0, w_1)
"""
col_stat, row_stat = self._backend.get_basis_status()
col_nonbasics = tuple(
self._x[i]
for i in range(self._backend.ncols())
if col_stat[i] != 1
)
row_nonbasics = tuple(
self._x[i + self._backend.ncols()]
for i in range(self._backend.nrows())
if row_stat[i] != 1
)
return vector(col_nonbasics + row_nonbasics)
def objective_coefficients(self):
r"""
Return coefficients of the objective of ``self``.
OUTPUT:
- a vector
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.objective_coefficients()
(-486.0, -10.0, -13.0, -95.0)
"""
col_stat, row_stat = self._backend.get_basis_status()
cost = self._backend.get_reduced_cost()
price = self._backend.get_row_price()
col_coefs = tuple(
-cost[i]
for i in range(self._backend.ncols())
if col_stat[i] != 1
)
row_coefs = tuple(
price[i]
for i in range(self._backend.nrows())
if row_stat[i] != 1
)
return vector(col_coefs + row_coefs)
def objective_name(self):
r"""
Return the objective name of ``self``.
OUTPUT:
- a symbolic expression
"""
return SR("obj")
def update(self):
r"""
Update ``self`` using previously set entering and leaving variables.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.objective_value()
1331.0
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
sage: d.enter(d.nonbasic_variables()[0])
sage: d.basic_variables()
(x_2, x_3, w_1)
sage: d.leave(d.basic_variables()[0])
sage: d.objective_value()
1331.0
sage: d.update()
sage: d.basic_variables()
(x_0, x_3, w_1)
sage: d.nonbasic_variables()
(x_1, x_2, w_0, w_2)
sage: d.objective_value() # rel tol 1e-9
261.8
TESTS:
An error will be raised if the pivot selected is zero::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.leave(d.basic_variables()[0])
sage: d.leaving_coefficients()
(5.0, 0.0, 0.0, 1.0)
sage: d.enter(d.nonbasic_variables()[1])
sage: d.leave(d.basic_variables()[0])
sage: d.update()
Traceback (most recent call last):
...
ValueError: incompatible choice of entering and leaving variables
"""
entering = self._entering
if entering is None:
raise ValueError("entering variable must be set before updating")
leaving = self._leaving
if leaving is None:
raise ValueError("leaving variable must be set before updating")
matching_index = tuple(self.nonbasic_variables()).index(entering)
coef = self.leaving_coefficients()[matching_index]
if coef == 0:
raise ValueError("incompatible choice of entering and leaving "
"variables")
col_stat, row_stat = self._backend.get_basis_status()
entering_index = self._x.index(entering)
leaving_index = self._x.index(leaving)
if entering_index < self._backend.ncols():
col_stat[entering_index] = 1
else:
row_stat[entering_index - self._backend.ncols()] = 1
if leaving_index < self._backend.ncols():
col_stat[leaving_index] = 3
else:
row_stat[leaving_index - self._backend.ncols()] = 3
self._backend.set_basis_status(col_stat, row_stat)
def add_row(self, nonbasic_coef, constant, slack_variable,
integer_slack_variable=False):
r"""
Update a dictionary with an additional row based on a given dictionary.
INPUT:
- ``nonbasic_coef``-- a list of nonbasic coefficients for the new row
- ``constant``-- a number of the constant term for the new row
- ``slack_variable``-- a string of the name for the new slack variable
- ``integer_slack_variable``-- (default: False) a boolean value
indicating if the new slack variable is integer or not.
EXAMPLES::
sage: from sage_numerical_interactive_mip.backends.coin_backend_dictionary \
import LPCoinBackendDictionary
sage: p = MixedIntegerLinearProgram(maximization=True,\
solver="Coin")
sage: x = p.new_variable(nonnegative=True)
sage: p.add_constraint(x[0] + x[1] - 7*x[2] + x[3] <= 22)
sage: p.add_constraint(x[1] + 2*x[2] - x[3] <= 13)
sage: p.add_constraint(5*x[0] + x[2] <= 11)
sage: p.set_objective(2*x[0] + 3*x[1] + 4*x[2] + 13*x[3])
sage: b = p.get_backend()
sage: b.solve()
0
sage: d = LPCoinBackendDictionary(b)
sage: d.basic_variables()
(x_2, x_3, w_1)
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
sage: d.objective_coefficients()
(-486.0, -10.0, -13.0, -95.0)
sage: d.add_row(range(3,7), 2, 'z_0')
sage: d.objective_coefficients()
(-486.0, -10.0, -13.0, -95.0)
sage: d.basic_variables()
(x_2, x_3, w_1, z_0)
sage: d.leave(d.basic_variables()[3])
sage: d.leaving_coefficients()
(3.0, 4.0, 5.0, 6.0)
sage: b.solve()
0
sage: d.basic_variables()
(x_2, x_3, w_1, z_0)
sage: d.nonbasic_variables()
(x_0, x_1, w_0, w_2)
Variables have 0 as their coefficient will not show up in the
tableau:
sage: d.add_row(range(-2, 2), 5, 'z_1')
sage: d.get_backend().row(4)
([0, 1, 2], [-7.0, -1.0, -1.0])
"""
if len(nonbasic_coef) != self._backend.ncols():
raise ValueError("Length of nonbasic coefficients incompatible")
# Convert to problem variable coefficients
coef_pairs, constant = (
self._nonbasic_coef_to_problem_coef_(nonbasic_coef, constant)
)
self._backend.add_linear_constraint(
coef_pairs, None, constant, slack_variable
)
# Update buffered variables
row_index = self._backend.nrows() - 1
self._load_new_variable_(
index=row_index,
name=self._backend.row_name(row_index),
auxiliary=True
)
# Update basis status in the backend
curr_basis = self._backend.get_basis_status()
curr_basis[1].append(1)
self._backend.set_basis_status(*curr_basis) | /sage_numerical_interactive_mip-0.2.1.tar.gz/sage_numerical_interactive_mip-0.2.1/sage_numerical_interactive_mip/backends/coin_backend_dictionary.py | 0.856453 | 0.446434 | coin_backend_dictionary.py | pypi |
sage_doc_url = "http://doc.sagemath.org/html/en/"
sage_documents = [
"a_tour_of_sage", "constructions", "developer", "faq",
"installation", "prep", "reference", "thematic_tutorials",
"tutorial"
]
sage_modules = [
"algebras", "databases", "game_theory", "logic", "monoids", "quadratic_forms",
"semirings", "arithgroup", "data_structures", "graphs", "manifolds",
"notebook", "quat_algebras", "stats", "asymptotic",
"diophantine_approximation", "groups", "matrices", "number_fields",
"quivers", "structure", "calculus", "discrete_geometry", "hecke", "matroids",
"numerical", "references", "tensor", "categories", "doctest",
"history_and_license", "misc", "padics", "repl", "tensor_free_modules",
"coding", "dynamics", "homology", "modabvar", "parallel",
"riemannian_geometry", "coercion", "finance", "hyperbolic_geometry",
"modfrm", "plot3d", "rings", "combinat", "finite_rings", "interfaces",
"modfrm_hecketriangle", "plotting", "rings_numerical", "constants",
"function_fields", "knots", "modmisc", "polynomial_rings", "rings_standard",
"cryptography", "functions", "lfunctions", "modsym", "power_series", "sat",
"curves", "games", "libs", "modules", "probability", "schemes",
]
import os
import sys
pythonversion = sys.version.split(' ')[0]
def setup(app):
"""
Initialize this Sphinx extension
"""
app.setup_extension('sphinx.ext.todo')
app.setup_extension('sphinx.ext.mathjax')
app.setup_extension("sphinx.ext.intersphinx")
app.config.intersphinx_mapping.update({
'https://docs.python.org/': None
})
app.config.intersphinx_mapping.update({
sage_doc_url + doc + "/": None
for doc in sage_documents
})
app.config.intersphinx_mapping.update({
sage_doc_url + "reference/" + module: None
for module in sage_modules
})
app.setup_extension("sphinx.ext.extlinks")
app.config.extlinks.update({
'python': ('https://docs.python.org/release/'+pythonversion+'/%s', ''),
# Sage trac ticket shortcuts. For example, :trac:`7549` .
'trac': ('https://trac.sagemath.org/%s', 'trac ticket #'),
'wikipedia': ('https://en.wikipedia.org/wiki/%s', 'Wikipedia article '),
'arxiv': ('http://arxiv.org/abs/%s', 'Arxiv '),
'oeis': ('https://oeis.org/%s', 'OEIS sequence '),
'doi': ('https://dx.doi.org/%s', 'doi:'),
'pari': ('http://pari.math.u-bordeaux.fr/dochtml/help/%s', 'pari:'),
'mathscinet': ('http://www.ams.org/mathscinet-getitem?mr=%s', 'MathSciNet ')
})
app.config.html_theme = 'sage'
def themes_path():
"""
Retrieve the location of the themes directory from the location of this package
This is taken from Sphinx's theme documentation
"""
package_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(package_dir, 'themes') | /sage-package-0.0.7.tar.gz/sage-package-0.0.7/sage_package/sphinx.py | 0.559531 | 0.495484 | sphinx.py | pypi |
from urllib.request import urlopen, Request
import mimetypes
import string
import random
def id_generator(size=26, chars=string.ascii_uppercase + string.digits) -> str:
"""
substitute for mimetools.choose_boundary()
"""
return u''.join(random.choice(chars) for _ in range(size))
def post_multipart(url, fields, files):
"""
Post fields and files to an http host as multipart/form-data.
fields is a sequence of (name, value) elements for regular form
fields. files is a sequence of (name, filename, value) elements
for data to be uploaded as files
Return the server's response page.
"""
content_type, body = encode_multipart_formdata(fields, files)
headers = {'Content-Type': content_type,
'Content-Length': str(len(body))}
r = Request(url, body, headers)
return urlopen(r).read().decode('utf-8')
def by(utf_string: str) -> bytes:
"""
py2: takes a unicode object and return a str object
py3: takes a str object and return a bytes object
"""
return utf_string.encode('utf8')
def encode_multipart_formdata(fields, files):
"""
fields is a sequence of (name, value) elements for regular form
fields. files is a sequence of (name, filename, value) elements
for data to be uploaded as files
Return (content_type, body) ready for httplib.HTTP instance
EXAMPLES::
In [2]: encode_multipart_formdata([],[])
Out[2]:
('multipart/form-data; boundary=JPS2ZAVEEIQZW6K5JVQB1IJE2W',
'--JPS2ZAVEEIQZW6K5JVQB1IJE2W--\r\n')
"""
# BOUNDARY = mimetools.choose_boundary()
UTF_BOUNDARY = id_generator()
BOUNDARY = by(UTF_BOUNDARY)
CRLF = by(u'\r\n')
dd = by(u'--')
L = []
if isinstance(fields, dict):
fields = fields.items()
for (key, value) in fields:
L.append(dd + BOUNDARY)
L.append(by(u'Content-Disposition: form-data; name="{}"'.format(key)))
L.append(by(u''))
L.append(by(value))
for (key, filename, value) in files:
L.append(dd + BOUNDARY)
cont = u'Content-Disposition: form-data; name="{}"; filename="{}"'
L.append(by(cont.format(key, filename)))
L.append(by(u'Content-Type: {}'.format(get_content_type(filename))))
L.append(by(u''))
L.append(value) # here are bytes ??
L.append(dd + BOUNDARY + dd)
L.append(by(u''))
body = CRLF.join(L) # body is (str in py2 / bytes in py3)
content_type = 'multipart/form-data; boundary={}'.format(UTF_BOUNDARY)
return content_type, body
def get_content_type(filename: str) -> str:
return mimetypes.guess_type(filename)[0] or 'application/octet-stream' | /sage_patchbot-3.0.2-py3-none-any.whl/sage_patchbot/http_post_file.py | 0.547706 | 0.171425 | http_post_file.py | pypi |
from __future__ import annotations
from typing import Iterator
import textwrap
from datetime import datetime
def format_trac(text: str) -> str:
text = text.strip()
accumulator = []
for line in text.splitlines():
line = '\n'.join(textwrap.wrap(line, 78))
accumulator.append(line)
return '\n'.join(accumulator)
def make_time(time) -> datetime:
"""
Convert xmlrpc DateTime objects to datetime.datetime
"""
if isinstance(time, datetime):
return time
return datetime.strptime(time.value, "%Y%m%dT%H:%M:%S")
def TicketChange(changelog_entry):
time, author, change, data1, data2, data3 = changelog_entry
# print(time, author, change, data1, data2, data3)
if change == 'comment':
return TicketComment_class(time, author, change, data1, data2, data3)
return TicketChange_class(time, author, change, data=(data1, data2, data3))
class TicketChange_class(object):
def __init__(self, time, author: str, change, data=None):
self._time = make_time(time)
self._author = author
self._change = change
if data:
self._data = data
else:
self._data = ('', '', 1)
def get_data(self) -> str:
try:
return ' [' + str(self._data) + ']'
except AttributeError:
return ''
@property
def ctime(self):
return self._time
@property
def ctime_str(self) -> str:
return str(self.ctime)
@property
def author(self) -> str:
return self._author
@property
def change(self):
return self._change
@property
def change_capitalized(self):
return self._change.capitalize()
@property
def old(self):
return self._data[0]
@property
def new(self):
return self._data[1]
@property
def change_action(self) -> str:
if self.old == '':
return u'set to {change.new}'.format(change=self)
elif self.new == '':
return u'{change.old} deleted'.format(change=self)
else:
txt = u'changed from {change.old} to {change.new}'
return txt.format(change=self)
def __repr__(self) -> str:
txt = self._author + u' changed ' + self._change
txt += self.get_data()
return txt
class TicketComment_class(TicketChange_class):
def __init__(self, time, author: str, change, data1, data2, data3):
TicketChange_class.__init__(self, time, author, change)
self._number = data1
self._comment = data2
@property
def number(self):
return self._number
@property
def comment(self):
return self._comment
@property
def comment_formatted(self) -> str:
return format_trac(self.comment)
def __repr__(self) -> str:
return self.author + ' commented "' + \
self.comment + '" [' + self.number + ']'
def TracTicket(ticket_number: int, server_proxy) -> TracTicket_class:
from xml.parsers.expat import ExpatError
ticket_number = int(ticket_number)
try:
change_log = server_proxy.ticket.changeLog(ticket_number)
except ExpatError:
print('Failed to parse the trac changelog, malformed XML!')
change_log = []
data = server_proxy.ticket.get(ticket_number)
ticket_changes = [TicketChange(entry) for entry in change_log]
return TracTicket_class(data[0], data[1], data[2], data[3], ticket_changes)
class TracTicket_class(object):
def __init__(self, number: int, ctime, mtime, data, change_log=None):
self._number = number
self._ctime = make_time(ctime)
self._mtime = make_time(mtime)
self._last_viewed = None
self._download_time = None
self._data = data
self._change_log = change_log
@property
def timestamp(self):
"""
Timestamp for XML-RPC calls
The timestamp is an integer that must be set in subsequent
ticket.update() XMLRPC calls to trac.
"""
return self._data['_ts']
@property
def number(self) -> int:
return self._number
__int__ = number
@property
def title(self) -> str:
return self._data.get('summary', '<no summary>')
@property
def ctime(self):
return self._ctime
@property
def mtime(self):
return self._mtime
@property
def ctime_str(self) -> str:
return str(self.ctime)
@property
def mtime_str(self) -> str:
return str(self.mtime)
@property
def branch(self) -> str:
return self._data.get('branch', '').strip()
@property
def dependencies(self) -> str:
return self._data.get('dependencies', '')
@property
def description(self) -> str:
default = '+++ no description +++'
return self._data.get('description', default)
@property
def description_formatted(self):
return format_trac(self.description)
def change_iter(self) -> Iterator:
for change in self._change_log:
yield change
def comment_iter(self) -> Iterator:
for change in self._change_log:
if isinstance(change, TicketComment_class):
yield change
def grouped_comment_iter(self):
change_iter = iter(self._change_log)
try:
change = next(change_iter)
except StopIteration:
raise
def sort_key(c):
return (-int(c.change == 'comment'), c.change)
while True:
stop = False
time = change.ctime
accumulator = [(sort_key(change), change)]
while True:
try:
change = next(change_iter)
except StopIteration:
stop = True
break
if change.ctime == time:
accumulator.append((sort_key(change), change))
else:
break
yield tuple(c[1] for c in sorted(accumulator))
if stop:
raise StopIteration
@property
def author(self):
return self._data.get('author', '<no author>')
@property
def cc(self):
return self._data.get('cc', '')
@property
def component(self):
return self._data.get('component', '')
@property
def reviewer(self):
return self._data.get('reviewer', '<no reviewer>')
@property
def reporter(self):
return self._data.get('reporter', '<no reporter>')
@property
def milestone(self):
return self._data.get('milestone', '<no milestone>')
@property
def owner(self):
return self._data.get('owner', '<no owner>')
@property
def priority(self):
return self._data.get('priority', '<no priority>')
@property
def commit(self):
return self._data.get('commit', '')
@property
def keywords(self):
return self._data.get('keywords', '')
@property
def ticket_type(self):
return self._data.get('type', '<no type>')
@property
def upstream(self):
return self._data.get('upstream', '<no upstream status>')
@property
def status(self):
return self._data.get('status', '<no status>')
@property
def resolution(self):
return self._data.get('resolution', '<no resolution>')
@property
def work_issues(self):
return self._data.get('work_issues', '') | /sage_patchbot-3.0.2-py3-none-any.whl/sage_patchbot/trac_ticket.py | 0.686475 | 0.255646 | trac_ticket.py | pypi |
from __future__ import annotations
from sage_patchbot.server.db import tickets, logs
def get_tickets_with_many_reports(N: int) -> list[int]:
"""
Retrieve the tickets with more than N reports.
INPUT: N an integer
OUTPUT: list of ticket numbers
"""
return [t['id'] for t in tickets.find()
if 'reports' in t and len(t['reports']) > N]
def purge_tickets_with_many_reports(N: int, n: int):
"""
For all tickets with more than N reports, keep only the latest n reports.
INPUT: integers N, n
.. WARNING:: Use with caution!
"""
assert n < N
longs = get_tickets_with_many_reports(N)
for fi in longs:
old = tickets.find_one({'id': fi})['reports']
tickets.update_one({'id': fi}, {'$set': {"reports": old[-n:]}})
def get_pending_logs(year: int):
"""
Retrieve an iterator over ``Pending`` logs for the given ``year``.
INPUT: an integer, for example 2019
OUTPUT: an iterator over database entries
"""
return logs.find({'_id': {'$regex': f"/log/Pending/.*/{year}"}})
def count_pending_logs(year: int) -> int:
"""
Count the number of ``Pending`` logs for the given ``year``.
INPUT: an integer, for example 2019
OUTPUT: an integer
"""
logs_year = get_pending_logs(year)
return logs_year.count()
def purge_pending_logs(year: int):
"""
Delete all ``Pending`` logs for the given ``year``.
INPUT: an integer, for example 2019
.. WARNING:: Use with caution!
"""
year_logs = get_pending_logs(year)
for ell in year_logs:
logs.delete(ell._file['_id'])
def purge_pending_in_tickets(liste: list[int]):
"""
Delete all ``Pending`` logs for all given tickets.
INPUT: a list of trac ticket numbers, such as [8954, 22453]
.. WARNING:: Use with caution!
"""
for l in liste:
pending_logs = logs.find({'_id': {'$regex': f"/log/Pending/{l}/"}})
for ell in pending_logs:
logs.delete(ell._file['_id'])
def count_logs(year: int, month: int, day=0) -> int:
"""
Return the numbers of logs for a given period.
INPUT: year and month as numbers, such as 2019, 3
optionally also the day as a number
OUTPUT: integer
"""
if not day:
reg = f"/log/.*/{year}-{month:02d}.*"
else:
reg = f"/log/.*/{year}-{month:02d}-{day:02d}.*"
period_logs = logs.find({'_id': {'$regex': reg}})
return period_logs.count()
def extraction_machine(list_of_logs: list) -> list[str]:
"""
Extract, from a list of database entries, the full names
of the machines that sent these reports.
INPUT: a list or iterator of some ``logs`` database entries
OUTPUT: a sorted list of short machine names
In [11]: h = get_pending_logs(2021)
In [12]: extraction_machine(h)
Out[12]: ['panke', 'pc72-math', 'petitbonum']
"""
file_names = [g._file['_id'].split('/') for g in list_of_logs]
file_names = [[txt for txt in f if txt != 'Pending']
for f in file_names]
return sorted(set(f[-2] for f in file_names))
def machines_actives(year: int, month: int) -> list[str]:
"""
Return the list of machines that were active during the period.
INPUT: integers for year and month
OUTPUT: list of short machine names
In [13]: machines_actives(2021, 8)
Out[13]:
['convex63',
'panke',
'pascaline',
'pc72-math',
'petitbonum',
'tmonteil-lxc1',
'tmonteil-lxc2',
'tmonteil-lxc3',
'tmonteil-lxc4']
"""
bads = logs.find({'_id': {'$regex': f"/log/.*/{year}-{month:02d}.*"}})
return extraction_machine(bads) | /sage_patchbot-3.0.2-py3-none-any.whl/sage_patchbot/server/tools.py | 0.914958 | 0.487917 | tools.py | pypi |
# Automated cloud deployment
The tool is used to determine a deployment plan for a series of application using a given amount of Virtual Machine (VM) offers
at its disposal. It was designed to be highly configurable and easy to use.
## Setup
Install poetry:
```shell
pip3 install poetry
```
Install needed libraries:
```shell
poetry install
```
on Windows, you mya need to run this due to alias problem:
```shell
python3 -m poetry install
```
Furthermore, you need the Minzinc and CPLEX applications installed as their binaries are required to use the following solvers:
- Chuffed
- Google OR-Tools
- Gecode
- CPLEX
To install them, please use the following links:
- Minizinc [https://www.minizinc.org/]
- CPLEX [https://www.ibm.com/support/pages/downloading-ibm-ilog-cplex-optimization-studio-v1290]
- Google OR-Tools [https://developers.google.com/optimization/install/download]
Enter the shell:
```shell
poetry shell
```
## Creating your own use-case
Although the tool comes with some model use-cases, you can also add your own by following these steps:
1. Decide the format of the model (MiniZinc / JSON / Both)
2. Create the model of the application (see guide below)
3. (Optional) For MiniZinc models, create a surrogate model so the script can compute the maximum number of VMs to be used.
4. Run the script *add_useCase.py* which will configure the application for your newly created model
5. (Optional) provide a list of Virtual Machine offers, following the same format as those provided as model
Note that the offer list must be in the same format as that of the model.
## Running the tool
There are two simple steps in running the tool:
1. Select the configuration you would like to run with (see the configuration options below)
- **NOTE** Make sure to disable the solvers that aren't compatible with the format of your sources.
2. Run the script *runTests.py*
## Configuration
The configuration of the tool can be found inside the **config** folder, and is split into several JSON files.
### Config.json
This file stores general configuration: from paths to general options for running your tests.
- Source-Config
Stores the configuration regarding models
- **Type** : The type of solver the models work with
- **Path** : The path where the models are found
- **Extension** : The format of the models
- **Formalization** : The way the general constraints are represented.
- Input-Config
Stores data regarding the input fed to solvers when runnign the tests
- **Type** : The type of solver which accepts the input files
- **Path** : The path of the input files
- **Extension** : The format of the input files
- **Offer Numbers** : The amount of virtual machine offers found in an input file.
- Test-Config
Stores data regarding the way the tests are carried out
- **Repetitions** : How many times a specific use case is tested
- **Symmetry Breaking** : Denotes whether static symmetry breaking techniques should be employed when running the tests
- **Symmetry Breakers** : Denote all methods of static symmetry breaking which should be tested
- **Output path** : Denotes where the test results should be placed. The format is *Output-Path/SymmetryBreaker/Solver/UseCase_Offers.csv*
- **Solver Config File** : Points to where solver-related settings are stored
- **Use Case Config File** : Points where use-case specific settings are stored
### Solvers.json
This file stores solver related configuration.
- MiniZinc-Solvers
Stores information regarding the different SAT solvers used for testing
- **Name** : The name of the solver
- **Keywd** : A unique identifier of the solver
- **Enabled** : Denotes whether tests should be run with this solver
- JSON-Solvers
Stores information regarding the different SMT solvers used for testing
- **Name** : The name of the solver
- **Keywd** : A unique identifier of the solver
- **Enabled** : Denotes whether tests should be run with this solver
### SymmetryBreaking.json
This file stores information required for the usage of symmetry breaking methods with SAT Models.
- SB-Constraints
Stores information about what constraints should be added for different symmetry breakers.
- **Tag** : A unique identifier of the symmetry breaking technique
- **Constraint** : The constraint added to the model. A parameter between brackets will be replaced at runtime with an actual value.
### UseCases.json
This file stores information about each use case.
- Use-Cases
Stores the configuration of each use-case
- **Name** : The name of the use-case
- **Enabled** : Denotes whether the use case should be tested.
- **Model-Name** : The name of the model used when testing the use case.
- **Scaling-Components** : Denotes whether the model has components which can be scaled regardless of other restrictions.
- Components
If the model supports scaling components, then here is stored a list of all components which can be scaled
- **Name** : The name of the component
- **Lower Bound** : The least amount of instances to be deployed
- **Upper Bound** : The maximum amount instances to be deployed
- **Surrogate-Problem** : Denotes whether the model has an associated surrogate problem which must be run first
- **Surrogate-Model-Name** : The name of the model for the surrogate problem
| /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/README.md | 0.507324 | 0.959573 | README.md | pypi |
from Solvers.Core.Restrictions.RestrictionHardware import RestrictionHardware
from datetime import datetime
class ManuverSolver(object):
def init_problem(
self,
problem,
solver_type,
default_offers_encoding=True,
sb_option=None,
smt2lib=None,
smt2libsol=None,
cplexLPPath=None,
use_vm_vector_in_encoding=False,
offers_list_filtered=False,
):
self.__constMap = {}
self.problem = problem
self.nrVM = self.problem.nrVM
self.nrOffers = len(self.problem.offers_list)
self.nrComp = self.problem.nrComp
self.vm_with_offers = {}
self.vmIds_for_fixedComponents = set()
if solver_type == "optimize":
self.solverTypeOptimize = True # optimize, debug
else:
self.solverTypeOptimize = False
self.offers_list = self.problem.offers_list
self.sb_option = sb_option
self.smt2lib = smt2lib
self.smt2libsol = smt2libsol
self.cplexLPPath = cplexLPPath
self.use_vm_vector_in_encoding = use_vm_vector_in_encoding
self.offers_list_filtered = offers_list_filtered
self.default_offers_encoding = default_offers_encoding
self._initSolver()
self._symmetry_breaking()
for restriction in self.problem.restrictionsList:
restriction.generateRestrictions(self)
self._hardware_and_offers_restrictionns(1)
def run(self):
print("Start evaluation")
def _symmetry_breaking(self):
print("Parent class simetry breaking")
def _hardware_and_offers_restrictionns(self, scale_factor=1):
print("Parent class offers restrictions")
def _initSolver(self):
print("Start solver initialization")
def RestrictionConflict(self, alphaCompId, conflictCompsIdList):
"""
Constraint describing the conflict between components. The 2 params. should not be placed on the same VM
:param alphaCompId: id of the first conflict component
:param conflictCompsIdList: id of the second conflict component
:return: None
"""
print("Parent class RestrictionConflict")
def RestrictionOneToOneDependency(self, alphaCompId, betaCompId):
"""
Contraint describing that alphaCompId and betaCompId should be deployed on the same VM
:param alphaCompId: id of the first component
:param betaCompId: id of the second component
:return: None
"""
print("Parent class RestrictionOneToOneDependency")
def RestrictionManyToManyDependency(self, alphaCompId, betaCompId, relation):
"""
The number of instances of component alphaCompId depends on the number of instances of component betaCompId
:param alphaCompId: id of the first component
:param betaCompId: id of the second component
:param relation: one of the strings in the set {"=", "<=", ">="}
"=": sum(instances of alpha component) == sum(instances of beta component)
"<=": sum(instances of alpha component) <= sum(instances of beta component)
">=": sum(instances of alpha component) >= sum(instances of beta component)
:return: None
"""
print("Parent class RestrictionManyToManyDependency")
def RestrictionOneToManyDependency(self, alphaCompId, betaCompId, noInstances):
"""
At each alphaCompId component should be deployed noInstances betaCompId components
:param alphaCompId: id of the first component
:param betaCompId: id of the second component
:param noInstances: depending instances number
:return: None
"""
print("Parent class RestrictionOneToManyDependency")
def RestrictionRangeBound(self, compsIdList, lowerBound, upperBound):
"""
Defines a lower and upper bound of instances that a component must have
:param compsIdList: list of components
:param lowerBound: a positive number
:param upperBound: a positive number
:return:
"""
for i in range(len(compsIdList)):
compsIdList[i] -= 1
print("Parent class RestrictionRangeBound")
def RestrictionFullDeployment(self, alphaCompId, notInConflictCompsIdList):
"""
Adds the fact that the component alphaCompId must be deployed on all machines except the ones that contain
components that alphaCompId alpha is in conflict with
:param alphaCompId: the component which must be fully deployed
:param notInConflictCompsIdList: the list of components that alphaCompId is not in conflict in
:return: None
"""
print("Parent class RestrictionFullDeployment")
def RestrictionRequireProvideDependency(
self, alphaCompId, betaCompId, alphaCompIdInstances, betaCompIdInstances
):
"""
The number of instances of component alpha depends on the number of instances of component beta
:param alphaCompId: id of the first component
:param betaCompId: id of the second component
:param alphaCompIdInstances: number of instances of component alphaCompId
:param betaCompIdInstances: number of instances of component betaCompId
:return: None
"""
# self.problem.logger.debug("RestrictionRequireProvideDependency: alphaCompId={}, betaCompId={}, alphaCompIdInstances={}, "
# "betaCompIdInstances={}".format(alphaCompId, betaCompId, alphaCompIdInstances, betaCompIdInstances))
print("Parent class RestrictionRequireProvideDependency")
def RestrictionAlphaOrBeta(self, alphaCompId, betaCompId):
"""
Describes the fact that alphaCompId or betaCompId not both
:param alphaCompId: id of the first component
:param betaCompId: id of the second component
:return:
"""
print("Parent class RestrictionAlphaOrBeta")
def run(self):
"""
Invokes the solving of the problem (solution and additional effect like creation of special files)
:param smt2lib: string indicating a file name storing the SMT2LIB encoding of the problem
:param smt2libsol: string indicating a file name storing the solution of the problem together with a model (if applicable)
:return:
"""
print("Parent class run")
def RestrictionFixComponentOnVM(self, comp_id, vm_id, value):
"""
Force placing component on a specific VM
:param comp_id: the ID of the component
:param vm_id: the ID of the VM
:return: None
"""
print("Parent RestrictionFixComponentOnVM")
def RestrictionPriceOrder(self, start_vm_id, end_vm_id):
print("Parent RestrictionPriceOrder")
def get_current_time(self):
now = datetime.now()
current_time = now.strftime("%H:%M:%S") | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/ManuverSolver.py | 0.78502 | 0.36659 | ManuverSolver.py | pypi |
import time
from ortools.constraint_solver import pywrapcp
class CP_Solver_Got_Nr_Instances:
def __init__(self, problem, solver_type, nr_of_solution_limit):
self.problem = problem
self.nrComp = problem.nrComp
self.option = solver_type
self.__optimizePrice = (
False # variable used to add logic for price minimization
)
self.__nr_of_solution_limit = nr_of_solution_limit
self.__defineSolver(self.option)
def __defineSolver(self, option):
# define solver
parameters = pywrapcp.Solver.DefaultSolverParameters()
self.solver = pywrapcp.Solver("maneuver_CP_GOT_ll", parameters)
self.cost = None
# define some cut limits
time_limit = 50000 # 4 minute
branch_limit = 100000000
failures_limit = 100000000
solutions_limit = self.__nr_of_solution_limit # 10000
self.limits = self.solver.Limit(
time_limit, branch_limit, failures_limit, solutions_limit, True
)
self.__defineVarinables()
variables = self.components
if option == "FIRST_UNBOUND_MIN":
self.decision_builder = self.solver.Phase(
variables,
self.solver.CHOOSE_FIRST_UNBOUND,
self.solver.ASSIGN_MIN_VALUE,
)
elif option == "FIRST_UNBOUND_MAX":
self.decision_builder = self.solver.Phase(
variables,
self.solver.CHOOSE_FIRST_UNBOUND,
self.solver.ASSIGN_MAX_VALUE,
)
elif option == "FIRST_UNBOUND_RANDOM":
self.decision_builder = self.solver.Phase(
variables,
self.solver.CHOOSE_FIRST_UNBOUND,
self.solver.ASSIGN_RANDOM_VALUE,
)
elif option == "LOWEST_MIN_MIN":
self.decision_builder = self.solver.Phase(
variables, self.solver.CHOOSE_LOWEST_MIN, self.solver.ASSIGN_MIN_VALUE
)
elif option == "LOWEST_MIN_MAX":
self.decision_builder = self.solver.Phase(
variables, self.solver.CHOOSE_LOWEST_MIN, self.solver.ASSIGN_MAX_VALUE
)
elif option == "LOWEST_MIN_RANDOM":
self.decision_builder = self.solver.Phase(
variables,
self.solver.CHOOSE_LOWEST_MIN,
self.solver.ASSIGN_RANDOM_VALUE,
)
elif option == "RANDOM_MIN":
self.decision_builder = self.solver.Phase(
variables, self.solver.CHOOSE_RANDOM, self.solver.ASSIGN_MIN_VALUE
)
elif option == "RANDOM_MAX":
self.decision_builder = self.solver.Phase(
variables, self.solver.CHOOSE_RANDOM, self.solver.ASSIGN_MAX_VALUE
)
elif option == "RANDOM_RANDOM":
self.decision_builder = self.solver.Phase(
variables, self.solver.CHOOSE_RANDOM, self.solver.ASSIGN_RANDOM_VALUE
)
def __defineVarinables(self):
self.components = [
self.solver.IntVar(0, 100000, "C%i" % j) for j in range(0, self.nrComp)
]
self.cost = self.solver.IntVar(0, 1000000, "cost")
self.solver.Add(
self.cost == self.solver.Sum(component for component in self.components)
)
def RestrictionUpperLowerEqualBound(self, compsIdList, n1, operation):
"""
Constraint that defines the number of instances that a component must have
:param compsIdList:
:param n1: a positive limit for components number
:param operation: should be one of the strings {"<=","==",">="}
"<=": sum(compsIdList) <= n1
">=": sum(compsIdList) >= n1
"==": sum(compsIdList) == n1
"""
if operation == "<=":
self.solver.Add(
self.solver.Sum([self.components[compId] for compId in compsIdList])
<= n1
)
elif operation == ">=":
self.solver.Add(
self.solver.Sum([self.components[compId] for compId in compsIdList])
>= n1
)
elif operation == "==":
self.solver.Add(
self.solver.Sum([self.components[compId] for compId in compsIdList])
== n1
)
def RestrictionRangeBound(self, compsIdList, n1, n2):
"""
Constrains that defines the number of instances that a component must have
:param compsIdList:
:param n1: a positive lower limit for components number
:param n2: a positive upper limit for components number
"""
self.solver.Add(
self.solver.Sum([self.components[compId] for compId in compsIdList]) >= n1
)
self.solver.Add(
self.solver.Sum([self.components[compId] for compId in compsIdList]) <= n2
)
def RestrictionManyToManyDependency(self, alphaCompId, betaCompId, operation):
"""
The number of instance of component alpha depends on the number of instances of component beta
:param alphaCompId: ID of component alpha, ID should be in set {1,...,N}
:param betaCompId: ID of component beta, ID should be in set {1,...,N}
:param operation: one of the strings in set {"==", "<=", ">="}
"==": sum(instances of alpha component) == sum(instances of beta component)
"<=": sum(instances of alpha component) <= sum(instances of beta component)
">=": sum(instances of alpha component) >= sum(instances of beta component)
:return: None
"""
if operation == "<=":
self.solver.Add(self.components[alphaCompId] <= self.components[betaCompId])
elif operation == ">=":
self.solver.Add(self.components[alphaCompId] >= self.components[betaCompId])
elif operation == "==":
self.solver.Add(self.components[alphaCompId] == self.components[betaCompId])
def RestrictionOneToManyDependency(self, alphaCompId, betaCompId, n):
"""
For one alpha component should be deployed n beta components
:param alphaCompId: ID of component alpha, ID should be in set {1,...,N}
:param betaCompId: ID of component beta, ID should be in set {1,...,N}
:param n: depending instances number
:return:python /Applications/CPLEX_Studio1210/python/setup.py install
"""
self.solver.Add(
n * self.components[alphaCompId] - self.components[betaCompId] > 0
)
self.solver.Add(
n * self.components[alphaCompId] - self.components[betaCompId] <= n
)
def RestrictionAlphaOrBeta(self, alphaCompId, betaCompId):
self.solver.Add(
self.solver.Sum(
[self.components[alphaCompId] > 0, self.components[betaCompId] > 0]
)
== 1
)
def RestrictionRequireProvideDependency(self, alphaCompId, betaCompId, n, m):
# print(self.solver)
self.solver.Add(
n * self.components[alphaCompId] * (self.components[betaCompId] > 0)
<= m * self.components[betaCompId]
)
# self.solver.Add(Or(self.components[betaCompId] == 0, n * self.components[alphaCompId] <= m * self.components[betaCompId]))
# self.solver.AddBoolOr(self.components[betaCompId] == 0, n * self.components[alphaCompId] <= m * self.components[betaCompId])
def __runMinimizationProblem(self):
"""
Minimize mumber of virtual machines
:return:
"""
# problem objective is to minimize price
self.objective = self.solver.Minimize(self.cost, 1)
# Create a solution collector.
self.collector = self.solver.LastSolutionCollector()
# Add the decision variables.
self.collector.Add(self.components)
# Add the objective.
self.collector.AddObjective(self.cost)
startTime = time.process_time()
self.solver.Solve(
self.decision_builder, [self.limits, self.objective, self.collector]
)
stopTime = time.process_time()
return startTime, stopTime
def __runCPProblem(self):
"""
Just run CP problem
:return:
"""
# Create a solution collector.
self.collector = self.solver.AllSolutionCollector()
# Add the decision variables.
self.collector.Add(self.components)
startTime = time.process_time()
self.solver.Solve(self.decision_builder, [self.limits, self.collector])
stopTime = time.process_time()
return startTime, stopTime
def __rebuild_solution(self, solutionIndex):
_components = []
for comp in self.components:
_components.append(self.collector.Value(solutionIndex, comp))
return _components
def run(self):
startTime, stopTime = self.__runMinimizationProblem()
_components = []
if self.collector.SolutionCount() > 0:
best_solution = self.collector.SolutionCount() - 1
_components = self.__rebuild_solution(best_solution)
# self.NUMARUL_DE_CONFIGURATII
self.problem.logger.debug(
"Nr of instances for each component: {}".format(_components)
)
return (stopTime - startTime), _components
def RestrictionConflict(self, comp_id, conflict_comps_id_list):
return
def constraintsHardware(self, components_values, available_configurations):
return
def RestrictionOneToOneDependency(self, compId, relatedCompId):
return
def RestrictionFullDeployment(self, compId, compsIdList):
return
def RestrictionManyToManyDependencyNew(self, alphaCompId, betaCompId, n, m):
self.solver.Add(
m * self.components[betaCompId] - n * self.components[alphaCompId] >= 0
) | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/CP_Solver_Number_of_Instances.py | 0.648132 | 0.252741 | CP_Solver_Number_of_Instances.py | pypi |
class Component:
def __init__(
self,
id,
name,
cpus,
gpu,
memory,
storageSize,
storageType,
storageValue,
netIn,
netOut,
netConnections,
keywords,
operatingSystem,
):
self.id = id
self.name = name
# hardware description
self.HC = cpus
self.HCType = gpu
self.HM = memory
self.HS = storageSize
self.HSType = storageType
self.HSValue = storageValue
# network description
self.NIn = netIn
self.NOut = netOut
self.NConnections = netConnections
# other information
self.keywords = keywords
self.operatingSystem = operatingSystem
# minimum number of instances
self.minimumNumberOfInstances = 0
self.numberOfConflictComponents = 0
self.orComponentsList = []
self.dependenceComponentsList = set()
self.conflictComponentsList = set()
self.fullDeployedComponent = False
self.numberOfInstancesDependences = set()
def getInstances(self):
return self.minimumNumberOfInstances
def getMinimumPossibleNumberOfInstances(self, comps_set):
"""
Get minimum nr of instances for fix components
If the number of instances of a set of components depends on a value, then the minimum number of instances of
the component in the set is the minimum of the number of instances of all components
:return:
"""
if len(self.numberOfInstancesDependences) == 0:
return self.minimumNumberOfInstances
else:
minimum = self.minimumNumberOfInstances
for comp_id in self.numberOfInstancesDependences:
if minimum > comps_set[comp_id].minimumNumberOfInstances:
minimum = comps_set[comp_id].minimumNumberOfInstances
return minimum
def __repr__(self):
return (
"ID: {} Name: {} Hardware [CPUs: {} GPU: {} Memory: {} StorageSize: {} StorageType: {} StorageValue: {}]"
" Network[ In: {} Out: {} Connections: {} ] Keywords: {} OperatingSystem: {}".format(
self.id,
self.name,
self.HC,
self.HCType,
self.HM,
self.HS,
self.HSType,
self.HSValue,
self.NIn,
self.NOut,
self.NConnections,
self.keywords,
self.operatingSystem,
)
)
def getComponentHardWareResources(self):
"""
Used for CP Solver in order to add restrictions regarding hardware requirements
:return: a list with component hardware restrictions
"""
return [self.HC, self.HM, self.HS]
def getResorces(self):
"""
Function used by EA in order to obtain a aggregation of component hardware resources
:return:
"""
return self.HC * self.HM * self.HS | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/Component.py | 0.747339 | 0.232528 | Component.py | pypi |
import numpy
class RestrictionAlphaOrBeta:
def __init__(self, alphaCompId, betaCompId, problem):
self.alphaCompId = alphaCompId - 1
self.betaCompId = betaCompId - 1
problem.componentsList[self.alphaCompId].orComponentsList.append(
self.betaCompId
)
problem.componentsList[self.betaCompId].orComponentsList.append(
self.alphaCompId
)
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionAlphaOrBeta(self.alphaCompId, self.betaCompId)
def __repr__(self):
return "RestrictionAlphaOrBeta: Component with id {} or component with id {} id deployed".format(
self.alphaCompId, self.betaCompId
)
def eval(self, solutionMatrix):
"""
Counts how many components conflicts are not respected into current solution
:param solutionMatrix:
:return:
"""
if (numpy.sum(solutionMatrix[self.alphaCompId]) > 0) + (
numpy.sum(solutionMatrix[self.betaCompId]) > 0
) == 1:
return 0
return 1
class RestrictionConflict:
def __init__(self, compId, compsIsList, problem):
self.compId = compId - 1
self.conflictCompsIdList = []
for comp_id in compsIsList:
self.conflictCompsIdList.append(comp_id - 1)
for conflict_comp_id in self.conflictCompsIdList:
problem.R[self.compId][conflict_comp_id] = 1
problem.R[conflict_comp_id][self.compId] = 1
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionConflict(self.compId, self.conflictCompsIdList)
def __repr__(self):
return "RestrictionConflict: component with id {} in conflict with components with id {}".format(
self.compId, self.conflictCompsIdList
)
def eval(self, solutionMatrix):
"""
Counts how many components conflicts are not respected into current solution
:param solutionMatrix:
:return:
"""
vms = len(solutionMatrix[0])
viotatedRestrictions = 0
for conflictCompId in self.conflictCompsIdList:
for j in range(vms):
if (
solutionMatrix[self.compId][j] == 1
and solutionMatrix[conflictCompId][j] == 1
):
viotatedRestrictions += 1
return viotatedRestrictions | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/Restrictions/RestrictionConflicts.py | 0.615781 | 0.251119 | RestrictionConflicts.py | pypi |
import numpy
class RestrictionOneToOneDependency: # DependencesCorelation:
def __init__(self, component, dependentComponent, problem):
self.compId = component - 1
self.relatedCompId = dependentComponent - 1
problem.D[self.compId][self.relatedCompId] = 1
problem.D[self.relatedCompId][self.compId] = 1
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionOneToOneDependency(self.compId, self.relatedCompId)
def __repr__(self):
return "OneToOneDependency restriction: if component with id {} is on a VM component with id {} is also".format(
self.compId, self.relatedCompId
)
def eval(self, solutionMatrix):
vms = len(solutionMatrix[0])
viotatedRestrictions = 0
for j in range(vms):
if solutionMatrix[self.compId][j] != solutionMatrix[self.relatedCompId][j]:
viotatedRestrictions += 1
return viotatedRestrictions
class RestrictionManyToManyDependency: # DependencesOnTotalNumber:
def __init__(self, alphaComp, betaComp, sign, problem):
self.alphaCompId = alphaComp - 1
self.betaCompId = betaComp - 1
self.sign = sign
self.problem = problem
self.problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionManyToManyDependency(
self.alphaCompId, self.betaCompId, self.sign
)
def __repr__(self):
return "RestrictionManyToManyDependency: component id {} number of instances on all VM is {} then component id {} number".format(
self.alphaCompId, self.sign, self.betaCompId
)
def eval(self, solutionMatrix):
sumCompIdNr = numpy.sum(solutionMatrix[self.alphaCompId])
sumRelatedComponentId = numpy.sum(solutionMatrix[self.betaCompId])
viotatedRestrictions = 0
if self.sign == "==":
if sumCompIdNr != sumRelatedComponentId:
viotatedRestrictions = 1
elif self.sign == ">=":
if sumCompIdNr < sumRelatedComponentId:
viotatedRestrictions = 1
elif self.sign == "<=":
if sumCompIdNr > sumRelatedComponentId:
viotatedRestrictions = 1
return viotatedRestrictions
class RestrictionManyToManyDependencyNew: # DependencesOnTotalNumber:
def __init__(self, alphaComp, betaComp, n, m, problem):
self.alphaCompId = alphaComp - 1
self.betaCompId = betaComp - 1
self.n = n
self.m = m
self.problem = problem
self.problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionManyToManyDependencyNew(
self.alphaCompId, self.betaCompId, self.n, self.m
)
def __repr__(self):
return "RestrictionManyToManyDependencyNew: component id {} number of instances on all VM is {} then component id {} {} number".format(
self.alphaCompId, self.n, self.m, self.betaCompId
)
def eval(self, solutionMatrix):
return 0
class RestrictionOneToManyDependency:
def __init__(self, component, dependentComponent, instancesNumber, problem):
self.compAlphaId = component - 1 # provider
self.compBetaId = dependentComponent - 1 # consumer
self.n = instancesNumber
self.problem = problem
self.problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionOneToManyDependency(self.compAlphaId, self.compBetaId, self.n)
def __repr__(self):
return "RestrictionOneToManyDependency: for one component with id {} should be deployed {} components with id {}".format(
self.compAlphaId, self.n, self.compBetaId
)
def eval(self, solutionMatrix):
sumAlpha = numpy.sum(solutionMatrix[self.compAlphaId])
sumBeta = numpy.sum(solutionMatrix[self.compBetaId])
viotatedRestrictions = 0
s = self.n * sumAlpha - sumBeta
if s < 0 or s >= self.n:
viotatedRestrictions = 1
self.problem.logger.debug(
"RestrictionOneToManyDependency violated: {} {} {}".format(
viotatedRestrictions, s, self.n
)
)
return viotatedRestrictions | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/Restrictions/RestrictionDependences.py | 0.683102 | 0.3078 | RestrictionDependences.py | pypi |
import numpy
class RestrictionFullDeployment:
def __init__(self, component, componentList, problem):
self.compId = component - 1
self.compsIdList = []
for compId in componentList:
self.compsIdList.append(compId - 1)
problem.R[self.compId][compId - 1] = 1
problem.R[compId - 1][self.compId] = 1
self.problem = problem
problem.componentsList[self.compId].fullDeployedComponent = True
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionFullDeployment(self.compId, self.compsIdList)
def __repr__(self):
return (
"RestrictionFullDeployment: component with id {} should be deployed on all VM that does not contain "
"components which it is in conflict, conflicted components list {}".format(
self.compId, self.compsIdList
)
)
def eval(self, solutionMatrix):
_viotatedRestrictions = 0
vmsAquired = numpy.sum(
solutionMatrix, axis=0
) # nr componente pe fiecare masina
for j in range(len(vmsAquired)):
if vmsAquired[j] != 0:
conflict = False
for conflictId in self.compsIdList:
if solutionMatrix[conflictId][j] == 1:
conflict = True # a component that is in conflict with full deploy component
break
if conflict and solutionMatrix[self.compId][j] == 1:
_viotatedRestrictions += 1
elif conflict == False and solutionMatrix[self.compId][j] == 0:
_viotatedRestrictions += 1
# print("Full deployment check: ", self)
if _viotatedRestrictions > 0:
self.problem.logger.debug(
"violated full deployment: {}".format(_viotatedRestrictions)
)
return _viotatedRestrictions
class RestrictionUpperLowerEqualBound:
def __init__(self, componentList, sign, n, problem):
self.compsIdList = []
for comp_id in componentList:
self.compsIdList.append(comp_id - 1)
self.sign = sign
self.n = n
self.problem = problem
self.problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionUpperLowerEqualBound(self.compsIdList, self.n, self.sign)
def __repr__(self):
return "RestrictionUpperLowerEqualBound: components with ids {} {} {}".format(
self.compsIdList, self.sign, self.n
)
def eval(self, solutionMatrix):
sum = 0
for compId in self.compsIdList:
sum += numpy.sum(solutionMatrix[compId])
viotatedRestrictions = 0
if self.sign == "==":
if sum != self.n:
viotatedRestrictions = 1
elif self.sign == ">=":
if sum < self.n:
viotatedRestrictions = 1
elif self.sign == "<=":
if sum > self.n:
viotatedRestrictions = 1
if viotatedRestrictions == 1:
# daca componenta in or constraints
for compId in self.compsIdList:
if (
len(self.problem.componentsList[compId].orComponentsList)
and sum == 0
): # NU E CORECT TOTAL MAI GANDESTETE LA CAZUL GENERAL
if sum == 0:
s = 0
for compId in self.problem.componentsList[
compId
].orComponentsList:
s += numpy.sum(solutionMatrix[compId])
if s > 0:
viotatedRestrictions = 0
if viotatedRestrictions == 1:
self.problem.logger.debug(
"RestrictionUpperLowerEqualBound violated:(sum_comp {} = {}) {} {}".format(
self.compsIdList, sum, self.sign, self.n
)
)
return viotatedRestrictions
class RestrictionRangeBound:
def __init__(self, componentList, n1, n2, problem):
self.compsIdList = []
for comp_id in componentList:
self.compsIdList.append(comp_id - 1)
self.n1 = n1
self.n2 = n2
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionRangeBound(self.compsIdList, self.n1, self.n2)
def __repr__(self):
return "RestrictionRangeBound: {} <= components with id list {} <= {}".format(
self.n1, self.compsIdList, self.n2
)
def eval(self, solutionMatrix):
_sum = 0
for compId in self.compsIdsList:
_sum += numpy.sum(solutionMatrix[compId])
_viotatedRestrictions = 0
if _sum < self.n1 or _sum > self.n2:
_viotatedRestrictions = 1
self.problem.logger.debug(
"RestrictionRangeBound violated: {} <= (sum_comp {} = {})<= {}".format(
self.n1, self.compsIdList, _sum, self.n2
)
)
return _viotatedRestrictions
class RestrictionRequireProvideDependency:
def __init__(self, alphaComp, betaComp, nAlphaBeta, nBetaAlpha, problem):
self.alphaCompId = alphaComp - 1 # consumer
self.betaCompId = betaComp - 1 # provider
self.nAlphaBeta = nAlphaBeta # n
self.nBetaAlpha = nBetaAlpha # m
self.problem = problem
problem.logger.info(self)
def generateRestrictions(self, solver):
solver.RestrictionRequireProvideDependency(
self.alphaCompId, self.betaCompId, self.nAlphaBeta, self.nBetaAlpha
)
def __repr__(self):
return (
"RestrictionRequireProvideDependency: each component with id {} consumes at least {} components with id {}, "
"each component with id {} serves at least {} components with id {}".format(
self.alphaCompId,
self.nAlphaBeta,
self.betaCompId,
self.betaCompId,
self.nBetaAlpha,
self.alphaCompId,
)
)
def eval(self, solutionMatrix):
_viotatedRestrictions = 0
if (
self.nAlphaBeta * numpy.sum(solutionMatrix[self.alphaCompId])
> self.nBetaAlpha * numpy.sum(solutionMatrix[self.betaCompId])
and numpy.sum(solutionMatrix[self.betaCompId]) != 0
):
_viotatedRestrictions = 1
self.problem.logger.debug(
"RestrictionRequireProvideDependency violated {} {} * {} <= {} * {}".format(
_viotatedRestrictions,
self.nAlphaBeta,
numpy.sum(solutionMatrix[self.alphaCompId]),
self.nBetaAlpha,
numpy.sum(solutionMatrix[self.betaCompId]),
)
)
return _viotatedRestrictions | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Core/Restrictions/RestrictionNumberOfInstances.py | 0.437583 | 0.283564 | RestrictionNumberOfInstances.py | pypi |
from Solvers.Formalization1.CPLEX.CP_CPLEX_Solver import CPlex_Solver_Parent
from Solvers.Core.ManuverSolver_SB import ManuverSolver_SB
class CPlex_Solver_SB_Enc_AllCombinationsOffers(CPlex_Solver_Parent, ManuverSolver_SB):
def _define_variables(self):
"""
Creates the variables used in the solver and the constraints on them as well as others (offers encoding,
usage vector, etc.)
:return: None
"""
# VM usage vector vm in {0, 1}, k = 1..M; vm_k = 1 if at least one component is assigned to vm_k.
self.vm = {
j: self.model.binary_var(name="vm{0}".format(j + 1))
for j in range(self.nr_vms)
}
# Assignment matrix a_{alpha,k}: 1 if component alpha is on machine k, 0 otherwise
self.a = {
(i, j): self.model.binary_var(name="C{0}_VM{1}".format(i + 1, j + 1))
for i in range(self.nr_comps)
for j in range(self.nr_vms)
}
for j in range(self.nr_vms):
self.model.add_equivalence(
self.vm[j],
self.model.sum(self.a[i, j] for i in range(self.nr_comps)) >= 1,
name="c{0}_vm_allocated".format(j),
)
# Variables for offers description
maxType = len(self.offers_list)
self.vmType = {
(j): self.model.integer_var(
lb=0, ub=maxType, name="vmType{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
minProc = min(self.offers_list[t][1] for t in range(len(self.offers_list)))
maxProc = max(self.offers_list[t][1] for t in range(len(self.offers_list)))
self.ProcProv = {
(j): self.model.integer_var(
lb=minProc, ub=maxProc, name="ProcProv{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
minMem = min(self.offers_list[t][2] for t in range(len(self.offers_list)))
maxMem = max(self.offers_list[t][2] for t in range(len(self.offers_list)))
self.MemProv = {
(j): self.model.integer_var(
lb=minMem, ub=maxMem, name="MemProv{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
minSto = min(self.offers_list[t][3] for t in range(len(self.offers_list)))
maxSto = max(self.offers_list[t][3] for t in range(len(self.offers_list)))
self.StorageProv = {
(j): self.model.integer_var(
lb=minSto, ub=maxSto, name="StorageProv{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
maxPrice = max(
self.offers_list[t][len(self.offers_list[0]) - 1]
for t in range(len(self.offers_list))
)
self.PriceProv = {
(j): self.model.integer_var(
lb=0, ub=maxPrice, name="PriceProv{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
# If a machine is not leased then its price is 0
for j in range(self.nr_vms):
self.model.add_indicator(
self.vm[j],
self.PriceProv[j] == 0,
active_value=0,
name="c{0}_vm_free_price_0".format(j),
)
def _hardware_and_offers_restrictionns(self, scaleFactor):
"""
Describes the hardware requirements for each component
:param componentsRequirements: list of components requirements as given by the user
:return: None
"""
for k in range(self.nr_vms):
self.model.add_constraint(
ct=self.model.sum(
self.a[i, k] * (self.problem.componentsList[i].HC)
for i in range(self.nr_comps)
)
<= self.ProcProv[k],
ctname="c_hard_cpu",
)
self.model.add_constraint(
ct=self.model.sum(
self.a[i, k] * (self.problem.componentsList[i].HM)
for i in range(self.nr_comps)
)
<= self.MemProv[k],
ctname="c_hard_mem",
)
self.model.add_constraint(
ct=self.model.sum(
self.a[i, k] * (self.problem.componentsList[i].HS)
for i in range(self.nr_comps)
)
<= self.StorageProv[k],
ctname="c_hard_storage",
)
index_constraint = 0
for vm_id in range(self.nr_vms):
cnt = 0
for offer in self.offers_list:
cnt += 1
index_constraint += 1
var = self.model.binary_var(name="aux_hard{0}".format(index_constraint))
ct = self.model.add_equivalence(var, self.vmType[vm_id] == cnt)
self.model.add_indicator(
var,
self.PriceProv[vm_id] == int(offer[len(self.offers_list[0]) - 1]),
active_value=1,
name="c_order_vm_price".format(vm_id),
)
self.model.add_indicator(
var,
(self.ProcProv[vm_id] == int(offer[1])),
name="c_order_vm_cpu".format(vm_id),
)
self.model.add_indicator(
var,
(self.MemProv[vm_id] == int(offer[2])),
name="c_order_vm_memory".format(vm_id),
)
self.model.add_indicator(
var,
(self.StorageProv[vm_id] == int(offer[3])),
name="c_order_vm_storage".format(vm_id),
)
lst = [
(self.vmType[vm_id] == offer)
for offer in range(1, len(self.offers_list) + 1)
]
ct = self.model.add_indicator(self.vm[vm_id], self.vmType[vm_id] >= 1)
def _same_type(self, var, vm_id):
self.model.add_equivalence(var, self.vmType[vm_id] == self.vmType[vm_id + 1])
def _get_solution_vm_type(self):
vm_types = []
for index, var in self.vmType.items():
vm_types.append(var.solution_value)
return vm_types | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Formalization1/CPLEX/CP_CPLEX_Solver_Enc_AllCombinationsOffers.py | 0.661048 | 0.305671 | CP_CPLEX_Solver_Enc_AllCombinationsOffers.py | pypi |
from Solvers.Formalization2.CPLEX.CP_CPLEX_Solver import CPlex_Solver_Parent
from Solvers.Core.ManuverSolver_SB import ManuverSolver_SB
class CPlex_Solver_SB_Enc_AllCombinationsOffers(CPlex_Solver_Parent, ManuverSolver_SB):
def _define_variables(self):
"""
Creates the variables used in the solver and the constraints on them as well as others (offers encoding,
usage vector, etc.)
:return: None
"""
# Assignment matrix a_{alpha,k}: 1 if component alpha is on machine k, 0 otherwise
self.a = {
(i, j, k): self.model.binary_var(
name="C{0}_VM{1}_OF{2}".format(i + 1, j + 1, k + 1)
)
for i in range(self.nr_comps)
for j in range(self.nr_vms)
for k in range(self.nrOffers)
}
# Variables for offers description
self.vmType = {
(j, k): self.model.binary_var(name="vmType{0}_of{1}".format(j + 1, k + 1))
for j in range(self.nr_vms)
for k in range(self.nrOffers)
}
maxPrice = max(
self.offers_list[t][len(self.offers_list[0]) - 1]
for t in range(len(self.offers_list))
)
self.PriceProv = {
(j): self.model.integer_var(
lb=0, ub=maxPrice, name="PriceProv{0}".format(j + 1)
)
for j in range(self.nr_vms)
}
# A machine can only have one type
for j in range(self.nr_vms):
self.model.add_constraint(
ct=self.model.sum(self.vmType[j, k] for k in range(self.nrOffers)) <= 1
)
# A component can only have one type
for i in range(self.nr_comps):
for j in range(self.nr_vms):
self.model.add_constraint(
ct=self.model.sum(self.a[i, j, k] for k in range(self.nrOffers))
<= 1
)
def _hardware_and_offers_restrictionns(self, scaleFactor):
"""
Describes the hardware requirements for each component
:param componentsRequirements: list of components requirements as given by the user
:return: None
"""
for j in range(self.nr_vms):
for k in range(self.nrOffers):
self.model.add_constraint(
ct=self.model.sum(
self.a[i, j, k] * (self.problem.componentsList[i].HC)
for i in range(self.nr_comps)
)
<= self.offers_list[k][1] * self.vmType[j, k],
ctname="c_hard_cpu",
)
self.model.add_constraint(
ct=self.model.sum(
self.a[i, j, k] * (self.problem.componentsList[i].HM)
for i in range(self.nr_comps)
)
<= self.offers_list[k][2] * self.vmType[j, k],
ctname="c_hard_mem",
)
self.model.add_constraint(
ct=self.model.sum(
self.a[i, j, k] * (self.problem.componentsList[i].HS)
for i in range(self.nr_comps)
)
<= self.offers_list[k][3] * self.vmType[j, k],
ctname="c_hard_storage",
)
self.model.add_if_then(
self.vmType[j, k] == 1, self.PriceProv[j] == self.offers_list[k][-1]
)
def _same_type(self, var, vm_id):
self.model.add_equivalence(var, self.vmType[vm_id] == self.vmType[vm_id + 1])
def _get_solution_vm_type(self):
vm_types = []
for index, var in self.vmType.items():
vm_types.append(var.solution_value)
return vm_types | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/Solvers/Formalization2/CPLEX/CP_CPLEX_Solver_Enc_AllCombinationsOffers.py | 0.700792 | 0.348202 | CP_CPLEX_Solver_Enc_AllCombinationsOffers.py | pypi |
from src.conflictGraph import getMaxClique
from json import load
import src.init
"""
This script is used when employing symmetry breaking techniques with MiniZinc Models.
The script generates the required constraints which are to be added for the respective
symmetry breaker.
"""
def addBreakerConstraint(symmetry_breaker: str = None, start: int = 0):
"""
Constructs the constraint for symmetry breakers other than FV.
Args:
symmetry_breaker (str, optional): The tag of the symmetry breaker. Defaults to None.
start (int, optional): The index of the first VM affected by the symmetry breaker. Defaults to 0.
Returns:
constraint (str): The constraint to be added to the model.
"""
with open(
f"{src.init.settings['MiniZinc']['symmetry_breaking_config']}", "r"
) as file:
settings = load(file)
constraint = None
for breaker in settings["SB-Constraints"]:
if breaker["Tag"] == symmetry_breaker:
constraint = breaker["Constraint"]
break
if constraint != None:
constraint = constraint.replace("[Start]", str(start))
return constraint
def buildComponentConstraints(
component: str, inst: int, startVM: int, Clist: list = []
):
"""
Returns the list of constraints for setting the FV script
for a specific component.
Args:
component (str): The name of the component
inst (int): The number of instances for that component
startVM (int): The first machine to be assigned.
Clist (list, optional): A list of components in conflict with the current component. Defaults to [].
Returns:
constraints (list): A list of constraints to be added
endVM (int): The first machine free of assignment
"""
endVM = startVM + inst
constraints = []
for i in range(inst):
constraint = f"constraint AssignmentMatrix[{component},{startVM+i+1}] = 1;\n"
constraints.append(constraint)
for i in range(inst):
for c in Clist:
if component != c:
constraints.append(
f"constraint AssignmentMatrix[{c}, {startVM+i+1}] = 0;\n"
)
return constraints, endVM
def getFVConstraints(use_case: str, scaling_components: list = []):
"""
Returns a list of constraints to be inserted in the MiniZinc model.
Args:
use_case (str): The name of the use-case
scaling_components (list, optional): A list of scaling components and their instances.
Returns:
constraints (list): A list of constraints to be added inside the MiniZinc model.
"""
map, instances = getMaxClique(use_case, scaling_components)
constraints = []
clique = {}
start = 0
# The clique format should be as follows:
# { "COMP_NAME" : INST }
for i in instances:
for key in map.keys():
if i in map[key]:
try:
clique[key] += 1
break
except KeyError:
clique[key] = 1
break
Clist = []
for component in clique.keys():
Clist.append(component)
for component in clique.keys():
output = buildComponentConstraints(component, clique[component], start, Clist)
for constraint in output[0]:
constraints.append(constraint)
start = output[1]
return constraints, start
def getSymmetryBreakerConstraints(
sym_breaker: str, use_case: str, scaling_components: list = []
):
"""
Constructs all constraints for a symmetry breaker.
Args:
sym_breaker (str): The tag of the symmetry breaking technique
use_case (str): The name of the use case
scaling_components (list, optional): A list of scaling components and their instances. Defaults to []
Returns:
constraints (list): A list of all additional constraints.
"""
constraints = []
start = 0
if sym_breaker.startswith("FV"):
output = getFVConstraints(use_case, scaling_components)
for constraint in output[0]:
constraints.append(constraint)
sym_breaker = sym_breaker[2::]
start = output[1]
constraints.append(addBreakerConstraint(sym_breaker, start + 1))
return constraints | /sage-rec-engine-0.2.tar.gz/sage-rec-engine-0.2/src/sym_breaker.py | 0.856377 | 0.510863 | sym_breaker.py | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.