text stringlengths 81 112k |
|---|
Constrói uma :class:`RespostaAtivarSAT` a partir do retorno
informado.
:param unicode retorno: Retorno da função ``AtivarSAT``.
def analisar(retorno):
"""Constrói uma :class:`RespostaAtivarSAT` a partir do retorno
informado.
:param unicode retorno: Retorno da função ``AtivarSAT``.
"""
resposta = analisar_retorno(forcar_unicode(retorno),
funcao='AtivarSAT',
classe_resposta=RespostaAtivarSAT,
campos=(
('numeroSessao', int),
('EEEEE', unicode),
('mensagem', unicode),
('cod', unicode),
('mensagemSEFAZ', unicode),
('CSR', unicode),
),
campos_alternativos=[
# se a ativação falhar espera-se o padrão de campos
# no retorno...
RespostaSAT.CAMPOS,
]
)
if resposta.EEEEE not in (
ATIVADO_CORRETAMENTE,
CSR_ICPBRASIL_CRIADO_SUCESSO,):
raise ExcecaoRespostaSAT(resposta)
return resposta |
Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
def create_view_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, buffer_size: int=1) -> ViewTask:
"""Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
"""
... |
Load configuration from yaml source(s), cached to only run once
def from_yaml():
""" Load configuration from yaml source(s), cached to only run once """
default_yaml_str = snippets.get_snippet_content('hatchery.yml')
ret = yaml.load(default_yaml_str, Loader=yaml.RoundTripLoader)
for config_path in CONFIG_LOCATIONS:
config_path = os.path.expanduser(config_path)
if os.path.isfile(config_path):
with open(config_path) as config_file:
config_dict = yaml.load(config_file, Loader=yaml.RoundTripLoader)
if config_dict is None:
continue
for k, v in config_dict.items():
if k not in ret.keys():
raise ConfigError(
'found garbage key "{}" in {}'.format(k, config_path)
)
ret[k] = v
return ret |
Load configuration from .pypirc file, cached to only run once
def from_pypirc(pypi_repository):
""" Load configuration from .pypirc file, cached to only run once """
ret = {}
pypirc_locations = PYPIRC_LOCATIONS
for pypirc_path in pypirc_locations:
pypirc_path = os.path.expanduser(pypirc_path)
if os.path.isfile(pypirc_path):
parser = configparser.SafeConfigParser()
parser.read(pypirc_path)
if 'distutils' not in parser.sections():
continue
if 'index-servers' not in parser.options('distutils'):
continue
if pypi_repository not in parser.get('distutils', 'index-servers'):
continue
if pypi_repository in parser.sections():
for option in parser.options(pypi_repository):
ret[option] = parser.get(pypi_repository, option)
if not ret:
raise ConfigError(
'repository does not appear to be configured in pypirc ({})'.format(pypi_repository) +
', remember that it needs an entry in [distutils] and its own section'
)
return ret |
Create a temporary pypirc file for interaction with twine
def pypirc_temp(index_url):
""" Create a temporary pypirc file for interaction with twine """
pypirc_file = tempfile.NamedTemporaryFile(suffix='.pypirc', delete=False)
print(pypirc_file.name)
with open(pypirc_file.name, 'w') as fh:
fh.write(PYPIRC_TEMPLATE.format(index_name=PYPIRC_TEMP_INDEX_NAME, index_url=index_url))
return pypirc_file.name |
Get a versioned interface matching the given version and ui_version.
version is a string in the form "1.0.2".
def get_api(version: str, ui_version: str=None) -> API_1:
"""Get a versioned interface matching the given version and ui_version.
version is a string in the form "1.0.2".
"""
ui_version = ui_version if ui_version else "~1.0"
return _get_api_with_app(version, ui_version, ApplicationModule.app) |
Return the mask created by this graphic as extended data.
.. versionadded:: 1.0
Scriptable: Yes
def mask_xdata_with_shape(self, shape: DataAndMetadata.ShapeType) -> DataAndMetadata.DataAndMetadata:
"""Return the mask created by this graphic as extended data.
.. versionadded:: 1.0
Scriptable: Yes
"""
mask = self._graphic.get_mask(shape)
return DataAndMetadata.DataAndMetadata.from_data(mask) |
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
def end(self, value: typing.Union[float, NormPointType]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
self.set_property("end", value) |
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
def start(self, value: typing.Union[float, NormPointType]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
self.set_property("start", value) |
Set the data.
:param data: A numpy ndarray.
.. versionadded:: 1.0
Scriptable: Yes
def data(self, data: numpy.ndarray) -> None:
"""Set the data.
:param data: A numpy ndarray.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__data_item.set_data(numpy.copy(data)) |
Return the extended data of this data item display.
Display data will always be 1d or 2d and either int, float, or RGB data type.
.. versionadded:: 1.0
Scriptable: Yes
def display_xdata(self) -> DataAndMetadata.DataAndMetadata:
"""Return the extended data of this data item display.
Display data will always be 1d or 2d and either int, float, or RGB data type.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_data_channel = self.__display_item.display_data_channel
return display_data_channel.get_calculated_display_values(True).display_data_and_metadata |
Set the dimensional calibrations.
:param dimensional_calibrations: A list of calibrations, must match the dimensions of the data.
.. versionadded:: 1.0
Scriptable: Yes
def set_dimensional_calibrations(self, dimensional_calibrations: typing.List[CalibrationModule.Calibration]) -> None:
"""Set the dimensional calibrations.
:param dimensional_calibrations: A list of calibrations, must match the dimensions of the data.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__data_item.set_dimensional_calibrations(dimensional_calibrations) |
Get the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
def get_metadata_value(self, key: str) -> typing.Any:
"""Get the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
"""
return self._data_item.get_metadata_value(key) |
Set the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
def set_metadata_value(self, key: str, value: typing.Any) -> None:
"""Set the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
"""
self._data_item.set_metadata_value(key, value) |
Return the graphics attached to this data item.
.. versionadded:: 1.0
Scriptable: Yes
def graphics(self) -> typing.List[Graphic]:
"""Return the graphics attached to this data item.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [Graphic(graphic) for graphic in self.__display_item.graphics] |
Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
def add_point_region(self, y: float, x: float) -> Graphic:
"""Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
"""
graphic = Graphics.PointGraphic()
graphic.position = Geometry.FloatPoint(y, x)
self.__display_item.add_graphic(graphic)
return Graphic(graphic) |
Return the mask by combining any mask graphics on this data item as extended data.
.. versionadded:: 1.0
Scriptable: Yes
def mask_xdata(self) -> DataAndMetadata.DataAndMetadata:
"""Return the mask by combining any mask graphics on this data item as extended data.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_data_channel = self.__display_item.display_data_channel
shape = display_data_channel.display_data_shape
mask = numpy.zeros(shape)
for graphic in self.__display_item.graphics:
if isinstance(graphic, (Graphics.SpotGraphic, Graphics.WedgeGraphic, Graphics.RingGraphic, Graphics.LatticeGraphic)):
mask = numpy.logical_or(mask, graphic.get_mask(shape))
return DataAndMetadata.DataAndMetadata.from_data(mask) |
Return the data item associated with this display panel.
.. versionadded:: 1.0
Scriptable: Yes
def data_item(self) -> DataItem:
"""Return the data item associated with this display panel.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_panel = self.__display_panel
if not display_panel:
return None
data_item = display_panel.data_item
return DataItem(data_item) if data_item else None |
Set the data item associated with this display panel.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
This will replace whatever data item, browser, or controller is currently in the display panel with the single
data item.
.. versionadded:: 1.0
Scriptable: Yes
def set_data_item(self, data_item: DataItem) -> None:
"""Set the data item associated with this display panel.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
This will replace whatever data item, browser, or controller is currently in the display panel with the single
data item.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_panel = self.__display_panel
if display_panel:
display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None
display_panel.set_display_panel_display_item(display_item) |
Add a data item to the group.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
.. versionadded:: 1.0
Scriptable: Yes
def add_data_item(self, data_item: DataItem) -> None:
"""Add a data item to the group.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None
if display_item:
self.__data_group.append_display_item(display_item) |
Close the task.
.. versionadded:: 1.0
This method must be called when the task is no longer needed.
def close(self) -> None:
"""Close the task.
.. versionadded:: 1.0
This method must be called when the task is no longer needed.
"""
self.__data_channel_buffer.stop()
self.__data_channel_buffer.close()
self.__data_channel_buffer = None
if not self.__was_playing:
self.__hardware_source.stop_playing() |
Record data and return a list of data_and_metadata objects.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
def record(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]:
"""Record data and return a list of data_and_metadata objects.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
"""
if frame_parameters:
self.__hardware_source.set_record_frame_parameters(self.__hardware_source.get_frame_parameters_from_dict(frame_parameters))
if channels_enabled is not None:
for channel_index, channel_enabled in enumerate(channels_enabled):
self.__hardware_source.set_channel_enabled(channel_index, channel_enabled)
self.__hardware_source.start_recording()
return self.__hardware_source.get_next_xdatas_to_finish(timeout) |
Create a record task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:return: The :py:class:`RecordTask` object.
:rtype: :py:class:`RecordTask`
Callers should call close on the returned task when finished.
See :py:class:`RecordTask` for examples of how to use.
def create_record_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None) -> RecordTask:
"""Create a record task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:return: The :py:class:`RecordTask` object.
:rtype: :py:class:`RecordTask`
Callers should call close on the returned task when finished.
See :py:class:`RecordTask` for examples of how to use.
"""
return RecordTask(self.__hardware_source, frame_parameters, channels_enabled) |
Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
def create_view_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, buffer_size: int=1) -> ViewTask:
"""Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
"""
return ViewTask(self.__hardware_source, frame_parameters, channels_enabled, buffer_size) |
Grabs the next frame to finish and returns it as data and metadata.
.. versionadded:: 1.0
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
If the view is not already started, it will be started automatically.
Scriptable: Yes
def grab_next_to_finish(self, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]:
"""Grabs the next frame to finish and returns it as data and metadata.
.. versionadded:: 1.0
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
If the view is not already started, it will be started automatically.
Scriptable: Yes
"""
self.start_playing()
return self.__hardware_source.get_next_xdatas_to_finish(timeout) |
Set the value of a control asynchronously.
:param name: The name of the control (string).
:param value: The control value (float).
:param options: A dict of custom options to pass to the instrument for setting the value.
Options are:
value_type: local, delta, output. output is default.
confirm, confirm_tolerance_factor, confirm_timeout: confirm value gets set.
inform: True to keep dependent control outputs constant by adjusting their internal values. False is
default.
Default value of confirm is False.
Default confirm_tolerance_factor is 1.0. A value of 1.0 is the nominal tolerance for that control. Passing a
higher tolerance factor (for example 1.5) will increase the permitted error margin and passing lower tolerance
factor (for example 0.5) will decrease the permitted error margin and consequently make a timeout more likely.
The tolerance factor value 0.0 is a special value which removes all checking and only waits for any change at
all and then returns.
Default confirm_timeout is 16.0 (seconds).
Raises exception if control with name doesn't exist.
Raises TimeoutException if confirm is True and timeout occurs.
.. versionadded:: 1.0
Scriptable: Yes
def set_control_output(self, name: str, value: float, *, options: dict=None) -> None:
"""Set the value of a control asynchronously.
:param name: The name of the control (string).
:param value: The control value (float).
:param options: A dict of custom options to pass to the instrument for setting the value.
Options are:
value_type: local, delta, output. output is default.
confirm, confirm_tolerance_factor, confirm_timeout: confirm value gets set.
inform: True to keep dependent control outputs constant by adjusting their internal values. False is
default.
Default value of confirm is False.
Default confirm_tolerance_factor is 1.0. A value of 1.0 is the nominal tolerance for that control. Passing a
higher tolerance factor (for example 1.5) will increase the permitted error margin and passing lower tolerance
factor (for example 0.5) will decrease the permitted error margin and consequently make a timeout more likely.
The tolerance factor value 0.0 is a special value which removes all checking and only waits for any change at
all and then returns.
Default confirm_timeout is 16.0 (seconds).
Raises exception if control with name doesn't exist.
Raises TimeoutException if confirm is True and timeout occurs.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__instrument.set_control_output(name, value, options) |
Return the value of a float property.
:return: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
def get_property_as_float(self, name: str) -> float:
"""Return the value of a float property.
:return: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
"""
return float(self.__instrument.get_property(name)) |
Set the value of a float property.
:param name: The name of the property (string).
:param value: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
def set_property_as_float(self, name: str, value: float) -> None:
"""Set the value of a float property.
:param name: The name of the property (string).
:param value: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__instrument.set_property(name, float(value)) |
Return the list of data items.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
def data_items(self) -> typing.List[DataItem]:
"""Return the list of data items.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self.__document_model.data_items] |
Return the list of display items.
:return: The list of :py:class:`nion.swift.Facade.Display` objects.
.. versionadded:: 1.0
Scriptable: Yes
def display_items(self) -> typing.List[Display]:
"""Return the list of display items.
:return: The list of :py:class:`nion.swift.Facade.Display` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [Display(display_item) for display_item in self.__document_model.display_items] |
Return the list of data items that are data sources for the data item.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
def get_source_data_items(self, data_item: DataItem) -> typing.List[DataItem]:
"""Return the list of data items that are data sources for the data item.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self._document_model.get_source_data_items(data_item._data_item)] if data_item else None |
Return the dependent data items the data item argument.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
def get_dependent_data_items(self, data_item: DataItem) -> typing.List[DataItem]:
"""Return the dependent data items the data item argument.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self._document_model.get_dependent_data_items(data_item._data_item)] if data_item else None |
Create an empty data item in the library.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
def create_data_item(self, title: str=None) -> DataItem:
"""Create an empty data item in the library.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
data_item = DataItemModule.DataItem()
data_item.ensure_data_source()
if title is not None:
data_item.title = title
self.__document_model.append_data_item(data_item)
return DataItem(data_item) |
Create a data item in the library from an ndarray.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data: The data (ndarray).
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem:
"""Create a data item in the library from an ndarray.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data: The data (ndarray).
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
return self.create_data_item_from_data_and_metadata(DataAndMetadata.DataAndMetadata.from_data(data), title) |
Create a data item in the library from a data and metadata object.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data_and_metadata: The data and metadata.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
def create_data_item_from_data_and_metadata(self, data_and_metadata: DataAndMetadata.DataAndMetadata, title: str=None) -> DataItem:
"""Create a data item in the library from a data and metadata object.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data_and_metadata: The data and metadata.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
data_item = DataItemModule.new_data_item(data_and_metadata)
if title is not None:
data_item.title = title
self.__document_model.append_data_item(data_item)
return DataItem(data_item) |
Copy a data item.
.. versionadded:: 1.0
Scriptable: No
def copy_data_item(self, data_item: DataItem) -> DataItem:
"""Copy a data item.
.. versionadded:: 1.0
Scriptable: No
"""
data_item = copy.deepcopy(data_item._data_item)
self.__document_model.append_data_item(data_item)
return DataItem(data_item) |
Snapshot a data item. Similar to copy but with a data snapshot.
.. versionadded:: 1.0
Scriptable: No
def snapshot_data_item(self, data_item: DataItem) -> DataItem:
"""Snapshot a data item. Similar to copy but with a data snapshot.
.. versionadded:: 1.0
Scriptable: No
"""
data_item = data_item._data_item.snapshot()
self.__document_model.append_data_item(data_item)
return DataItem(data_item) |
Get (or create) a data group.
:param title: The title of the data group.
:return: The new :py:class:`nion.swift.Facade.DataGroup` object.
:rtype: :py:class:`nion.swift.Facade.DataGroup`
.. versionadded:: 1.0
Scriptable: Yes
def get_or_create_data_group(self, title: str) -> DataGroup:
"""Get (or create) a data group.
:param title: The title of the data group.
:return: The new :py:class:`nion.swift.Facade.DataGroup` object.
:rtype: :py:class:`nion.swift.Facade.DataGroup`
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataGroup(self.__document_model.get_or_create_data_group(title)) |
Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def get_data_item_for_hardware_source(self, hardware_source, channel_id: str=None, processor_id: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
assert hardware_source is not None
hardware_source_id = hardware_source._hardware_source.hardware_source_id
document_model = self._document_model
data_item_reference_key = document_model.make_data_item_reference_key(hardware_source_id, channel_id, processor_id)
return self.get_data_item_for_reference_key(data_item_reference_key, create_if_needed=create_if_needed, large_format=large_format) |
Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def get_data_item_for_reference_key(self, data_item_reference_key: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
document_model = self._document_model
data_item_reference = document_model.get_data_item_reference(data_item_reference_key)
data_item = data_item_reference.data_item
if data_item is None and create_if_needed:
data_item = DataItemModule.DataItem(large_format=large_format)
data_item.ensure_data_source()
document_model.append_data_item(data_item)
document_model.setup_channel(data_item_reference_key, data_item)
data_item.session_id = document_model.session_id
data_item = document_model.get_data_item_reference(data_item_reference_key).data_item
return DataItem(data_item) if data_item else None |
Get the data item with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def get_data_item_by_uuid(self, data_item_uuid: uuid_module.UUID) -> DataItem:
"""Get the data item with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
data_item = self._document_model.get_data_item_by_uuid(data_item_uuid)
return DataItem(data_item) if data_item else None |
Get the graphic with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def get_graphic_by_uuid(self, graphic_uuid: uuid_module.UUID) -> Graphic:
"""Get the graphic with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
for display_item in self._document_model.display_items:
for graphic in display_item.graphics:
if graphic.uuid == graphic_uuid:
return Graphic(graphic)
return None |
Return whether the library value for the given key exists.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
def has_library_value(self, key: str) -> bool:
"""Return whether the library value for the given key exists.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
return bool(getattr(ApplicationData.get_session_metadata_model(), field_id, None))
return False |
Get the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
def get_library_value(self, key: str) -> typing.Any:
"""Get the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
return getattr(ApplicationData.get_session_metadata_model(), field_id)
raise KeyError() |
Set the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
def set_library_value(self, key: str, value: typing.Any) -> None:
"""Set the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
setattr(ApplicationData.get_session_metadata_model(), field_id, value)
return
raise KeyError() |
Delete the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
def delete_library_value(self, key: str) -> None:
"""Delete the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
setattr(ApplicationData.get_session_metadata_model(), field_id, None)
return
raise KeyError() |
Return the list of display panels currently visible.
.. versionadded:: 1.0
Scriptable: Yes
def all_display_panels(self) -> typing.List[DisplayPanel]:
"""Return the list of display panels currently visible.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DisplayPanel(display_panel) for display_panel in self.__document_controller.workspace_controller.display_panels] |
Return display panel with the identifier.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def get_display_panel_by_id(self, identifier: str) -> DisplayPanel:
"""Return display panel with the identifier.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
display_panel = next(
(display_panel for display_panel in self.__document_controller.workspace_controller.display_panels if
display_panel.identifier.lower() == identifier.lower()), None)
return DisplayPanel(display_panel) if display_panel else None |
Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
def display_data_item(self, data_item: DataItem, source_display_panel=None, source_data_item=None):
"""Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
for display_panel in self.__document_controller.workspace_controller.display_panels:
if display_panel.data_item == data_item._data_item:
display_panel.request_focus()
return DisplayPanel(display_panel)
result_display_panel = self.__document_controller.next_result_display_panel()
if result_display_panel:
display_item = self.__document_controller.document_model.get_display_item_for_data_item(data_item._data_item)
result_display_panel.set_display_panel_display_item(display_item)
result_display_panel.request_focus()
return DisplayPanel(result_display_panel)
return None |
Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
def show_get_string_message_box(self, caption: str, text: str, accepted_fn, rejected_fn=None, accepted_text: str=None, rejected_text: str=None) -> None:
"""Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
"""
workspace = self.__document_controller.workspace_controller
workspace.pose_get_string_message_box(caption, text, accepted_fn, rejected_fn, accepted_text, rejected_text) |
Create a data item in the library from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem:
"""Create a data item in the library from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
"""
return DataItem(self.__document_controller.add_data(data, title)) |
Create a data item in the library from the data and metadata.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data_and_metadata` instead.
Scriptable: No
def create_data_item_from_data_and_metadata(self, data_and_metadata: DataAndMetadata.DataAndMetadata, title: str=None) -> DataItem:
"""Create a data item in the library from the data and metadata.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data_and_metadata` instead.
Scriptable: No
"""
data_item = DataItemModule.new_data_item(data_and_metadata)
if title is not None:
data_item.title = title
self.__document_controller.document_model.append_data_item(data_item)
return DataItem(data_item) |
Get (or create) a data group.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
def get_or_create_data_group(self, title: str) -> DataGroup:
"""Get (or create) a data group.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
"""
return DataGroup(self.__document_controller.document_model.get_or_create_data_group(title)) |
Return the document windows.
.. versionadded:: 1.0
Scriptable: Yes
def document_windows(self) -> typing.List[DocumentWindow]:
"""Return the document windows.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DocumentWindow(document_controller) for document_controller in self.__application.document_controllers] |
Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
def create_calibration(self, offset: float=None, scale: float=None, units: str=None) -> CalibrationModule.Calibration:
"""Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
"""
return CalibrationModule.Calibration(offset, scale, units) |
Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
def create_data_descriptor(self, is_sequence: bool, collection_dimension_count: int, datum_dimension_count: int) -> DataAndMetadata.DataDescriptor:
"""Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataAndMetadata.DataDescriptor(is_sequence, collection_dimension_count, datum_dimension_count) |
Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
def create_data_and_metadata(self, data: numpy.ndarray, intensity_calibration: CalibrationModule.Calibration = None,
dimensional_calibrations: typing.List[CalibrationModule.Calibration] = None, metadata: dict = None,
timestamp: str = None, data_descriptor: DataAndMetadata.DataDescriptor = None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataAndMetadata.new_data_and_metadata(data, intensity_calibration, dimensional_calibrations, metadata, timestamp, data_descriptor) |
Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
def create_data_and_metadata_from_data(self, data: numpy.ndarray, intensity_calibration: CalibrationModule.Calibration=None, dimensional_calibrations: typing.List[CalibrationModule.Calibration]=None, metadata: dict=None, timestamp: str=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
"""
return self.create_data_and_metadata(numpy.copy(data), intensity_calibration, dimensional_calibrations, metadata, timestamp) |
Create an I/O handler that reads and writes a single data_and_metadata.
:param io_handler_delegate: A delegate object :py:class:`DataAndMetadataIOHandlerInterface`
.. versionadded:: 1.0
Scriptable: No
def create_data_and_metadata_io_handler(self, io_handler_delegate):
"""Create an I/O handler that reads and writes a single data_and_metadata.
:param io_handler_delegate: A delegate object :py:class:`DataAndMetadataIOHandlerInterface`
.. versionadded:: 1.0
Scriptable: No
"""
class DelegateIOHandler(ImportExportManager.ImportExportHandler):
def __init__(self):
super().__init__(io_handler_delegate.io_handler_id, io_handler_delegate.io_handler_name, io_handler_delegate.io_handler_extensions)
def read_data_elements(self, ui, extension, file_path):
data_and_metadata = io_handler_delegate.read_data_and_metadata(extension, file_path)
data_element = ImportExportManager.create_data_element_from_extended_data(data_and_metadata)
return [data_element]
def can_write(self, data_and_metadata, extension):
return io_handler_delegate.can_write_data_and_metadata(data_and_metadata, extension)
def write_display_item(self, ui, display_item: DisplayItemModule.DisplayItem, file_path: str, extension: str) -> None:
data_item = display_item.data_item
if data_item:
self.write_data_item(ui, data_item, file_path, extension)
def write_data_item(self, ui, data_item, file_path, extension):
data_and_metadata = data_item.xdata
data = data_and_metadata.data
if data is not None:
if hasattr(io_handler_delegate, "write_data_item"):
io_handler_delegate.write_data_item(DataItem(data_item), file_path, extension)
else:
assert hasattr(io_handler_delegate, "write_data_and_metadata")
io_handler_delegate.write_data_and_metadata(data_and_metadata, file_path, extension)
class IOHandlerReference:
def __init__(self):
self.__io_handler_delegate = io_handler_delegate
self.__io_handler = DelegateIOHandler()
ImportExportManager.ImportExportManager().register_io_handler(self.__io_handler)
def __del__(self):
self.close()
def close(self):
if self.__io_handler_delegate:
io_handler_delegate_close_fn = getattr(self.__io_handler_delegate, "close", None)
if io_handler_delegate_close_fn:
io_handler_delegate_close_fn()
ImportExportManager.ImportExportManager().unregister_io_handler(self.__io_handler)
self.__io_handler_delegate = None
return IOHandlerReference() |
Create a utility panel that can be attached to a window.
.. versionadded:: 1.0
Scriptable: No
The panel_delegate should respond to the following:
(property, read-only) panel_id
(property, read-only) panel_name
(property, read-only) panel_positions (a list from "top", "bottom", "left", "right", "all")
(property, read-only) panel_position (from "top", "bottom", "left", "right", "none")
(method, required) create_panel_widget(ui), returns a widget
(method, optional) close()
def create_panel(self, panel_delegate):
"""Create a utility panel that can be attached to a window.
.. versionadded:: 1.0
Scriptable: No
The panel_delegate should respond to the following:
(property, read-only) panel_id
(property, read-only) panel_name
(property, read-only) panel_positions (a list from "top", "bottom", "left", "right", "all")
(property, read-only) panel_position (from "top", "bottom", "left", "right", "none")
(method, required) create_panel_widget(ui), returns a widget
(method, optional) close()
"""
panel_id = panel_delegate.panel_id
panel_name = panel_delegate.panel_name
panel_positions = getattr(panel_delegate, "panel_positions", ["left", "right"])
panel_position = getattr(panel_delegate, "panel_position", "none")
properties = getattr(panel_delegate, "panel_properties", None)
workspace_manager = Workspace.WorkspaceManager()
def create_facade_panel(document_controller, panel_id, properties):
panel = Panel(document_controller, panel_id, properties)
ui = UserInterface(self.__ui_version, document_controller.ui)
document_controller = DocumentWindow(document_controller)
panel.widget = panel_delegate.create_panel_widget(ui, document_controller)._widget
return panel
class PanelReference:
def __init__(self):
self.__panel_delegate = panel_delegate
workspace_manager.register_panel(create_facade_panel, panel_id, panel_name, panel_positions, panel_position, properties)
def __del__(self):
self.close()
def close(self):
if self.__panel_delegate:
panel_delegate_close_fn = getattr(self.__panel_delegate, "close", None)
if panel_delegate_close_fn:
panel_delegate_close_fn()
workspace_manager.unregister_panel(panel_id)
self.__panel_delegate = None
return PanelReference() |
Return the hardware source API matching the hardware_source_id and version.
.. versionadded:: 1.0
Scriptable: Yes
def get_hardware_source_by_id(self, hardware_source_id: str, version: str):
"""Return the hardware source API matching the hardware_source_id and version.
.. versionadded:: 1.0
Scriptable: Yes
"""
actual_version = "1.0.0"
if Utility.compare_versions(version, actual_version) > 0:
raise NotImplementedError("Hardware API requested version %s is greater than %s." % (version, actual_version))
hardware_source = HardwareSourceModule.HardwareSourceManager().get_hardware_source_for_hardware_source_id(hardware_source_id)
return HardwareSource(hardware_source) if hardware_source else None |
Return the library object.
.. versionadded:: 1.0
Scriptable: Yes
def library(self) -> Library:
"""Return the library object.
.. versionadded:: 1.0
Scriptable: Yes
"""
assert self.__app.document_model
return Library(self.__app.document_model) |
Create a cost matrix from a profit matrix by calling
'inversion_function' to invert each value. The inversion
function must take one numeric argument (of any type) and return
another numeric argument which is presumed to be the cost inverse
of the original profit.
This is a static method. Call it like this:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, inversion_func)
For example:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, lambda x : sys.maxsize - x)
:Parameters:
profit_matrix : list of lists
The matrix to convert from a profit to a cost matrix
inversion_function : function
The function to use to invert each entry in the profit matrix
:rtype: list of lists
:return: The converted matrix
def make_cost_matrix(profit_matrix, inversion_function):
"""
Create a cost matrix from a profit matrix by calling
'inversion_function' to invert each value. The inversion
function must take one numeric argument (of any type) and return
another numeric argument which is presumed to be the cost inverse
of the original profit.
This is a static method. Call it like this:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, inversion_func)
For example:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, lambda x : sys.maxsize - x)
:Parameters:
profit_matrix : list of lists
The matrix to convert from a profit to a cost matrix
inversion_function : function
The function to use to invert each entry in the profit matrix
:rtype: list of lists
:return: The converted matrix
"""
cost_matrix = []
for row in profit_matrix:
cost_matrix.append([inversion_function(value) for value in row])
return cost_matrix |
Convenience function: Displays the contents of a matrix of integers.
:Parameters:
matrix : list of lists
Matrix to print
msg : str
Optional message to print before displaying the matrix
def print_matrix(matrix, msg=None):
"""
Convenience function: Displays the contents of a matrix of integers.
:Parameters:
matrix : list of lists
Matrix to print
msg : str
Optional message to print before displaying the matrix
"""
import math
if msg is not None:
print(msg)
# Calculate the appropriate format width.
width = 0
for row in matrix:
for val in row:
width = max(width, int(math.log10(val)) + 1)
# Make the format string
format = '%%%dd' % width
# Print the matrix
for row in matrix:
sep = '['
for val in row:
sys.stdout.write(sep + format % val)
sep = ', '
sys.stdout.write(']\n') |
Pad a possibly non-square matrix to make it square.
:Parameters:
matrix : list of lists
matrix to pad
pad_value : int
value to use to pad the matrix
:rtype: list of lists
:return: a new, possibly padded, matrix
def pad_matrix(self, matrix, pad_value=0):
"""
Pad a possibly non-square matrix to make it square.
:Parameters:
matrix : list of lists
matrix to pad
pad_value : int
value to use to pad the matrix
:rtype: list of lists
:return: a new, possibly padded, matrix
"""
max_columns = 0
total_rows = len(matrix)
for row in matrix:
max_columns = max(max_columns, len(row))
total_rows = max(max_columns, total_rows)
new_matrix = []
for row in matrix:
row_len = len(row)
new_row = row[:]
if total_rows > row_len:
# Row too short. Pad it.
new_row += [pad_value] * (total_rows - row_len)
new_matrix += [new_row]
while len(new_matrix) < total_rows:
new_matrix += [[pad_value] * total_rows]
return new_matrix |
Compute the indexes for the lowest-cost pairings between rows and
columns in the database. Returns a list of (row, column) tuples
that can be used to traverse the matrix.
:Parameters:
cost_matrix : list of lists
The cost matrix. If this cost matrix is not square, it
will be padded with zeros, via a call to ``pad_matrix()``.
(This method does *not* modify the caller's matrix. It
operates on a copy of the matrix.)
**WARNING**: This code handles square and rectangular
matrices. It does *not* handle irregular matrices.
:rtype: list
:return: A list of ``(row, column)`` tuples that describe the lowest
cost path through the matrix
def compute(self, cost_matrix):
"""
Compute the indexes for the lowest-cost pairings between rows and
columns in the database. Returns a list of (row, column) tuples
that can be used to traverse the matrix.
:Parameters:
cost_matrix : list of lists
The cost matrix. If this cost matrix is not square, it
will be padded with zeros, via a call to ``pad_matrix()``.
(This method does *not* modify the caller's matrix. It
operates on a copy of the matrix.)
**WARNING**: This code handles square and rectangular
matrices. It does *not* handle irregular matrices.
:rtype: list
:return: A list of ``(row, column)`` tuples that describe the lowest
cost path through the matrix
"""
self.C = self.pad_matrix(cost_matrix)
self.n = len(self.C)
self.original_length = len(cost_matrix)
self.original_width = len(cost_matrix[0])
self.row_covered = [False for i in range(self.n)]
self.col_covered = [False for i in range(self.n)]
self.Z0_r = 0
self.Z0_c = 0
self.path = self.__make_matrix(self.n * 2, 0)
self.marked = self.__make_matrix(self.n, 0)
done = False
step = 1
steps = { 1 : self.__step1,
2 : self.__step2,
3 : self.__step3,
4 : self.__step4,
5 : self.__step5,
6 : self.__step6 }
while not done:
try:
func = steps[step]
step = func()
except KeyError:
done = True
# Look for the starred columns
results = []
for i in range(self.original_length):
for j in range(self.original_width):
if self.marked[i][j] == 1:
results += [(i, j)]
return results |
Create an *n*x*n* matrix, populating it with the specific value.
def __make_matrix(self, n, val):
"""Create an *n*x*n* matrix, populating it with the specific value."""
matrix = []
for i in range(n):
matrix += [[val for j in range(n)]]
return matrix |
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
def __step1(self):
"""
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
"""
C = self.C
n = self.n
for i in range(n):
minval = min(self.C[i])
# Find the minimum value for this row and subtract that minimum
# from every element in the row.
for j in range(n):
self.C[i][j] -= minval
return 2 |
Find a zero (Z) in the resulting matrix. If there is no starred
zero in its row or column, star Z. Repeat for each element in the
matrix. Go to Step 3.
def __step2(self):
"""
Find a zero (Z) in the resulting matrix. If there is no starred
zero in its row or column, star Z. Repeat for each element in the
matrix. Go to Step 3.
"""
n = self.n
for i in range(n):
for j in range(n):
if (self.C[i][j] == 0) and \
(not self.col_covered[j]) and \
(not self.row_covered[i]):
self.marked[i][j] = 1
self.col_covered[j] = True
self.row_covered[i] = True
self.__clear_covers()
return 3 |
Cover each column containing a starred zero. If K columns are
covered, the starred zeros describe a complete set of unique
assignments. In this case, Go to DONE, otherwise, Go to Step 4.
def __step3(self):
"""
Cover each column containing a starred zero. If K columns are
covered, the starred zeros describe a complete set of unique
assignments. In this case, Go to DONE, otherwise, Go to Step 4.
"""
n = self.n
count = 0
for i in range(n):
for j in range(n):
if self.marked[i][j] == 1:
self.col_covered[j] = True
count += 1
if count >= n:
step = 7 # done
else:
step = 4
return step |
Find a noncovered zero and prime it. If there is no starred zero
in the row containing this primed zero, Go to Step 5. Otherwise,
cover this row and uncover the column containing the starred
zero. Continue in this manner until there are no uncovered zeros
left. Save the smallest uncovered value and Go to Step 6.
def __step4(self):
"""
Find a noncovered zero and prime it. If there is no starred zero
in the row containing this primed zero, Go to Step 5. Otherwise,
cover this row and uncover the column containing the starred
zero. Continue in this manner until there are no uncovered zeros
left. Save the smallest uncovered value and Go to Step 6.
"""
step = 0
done = False
row = -1
col = -1
star_col = -1
while not done:
(row, col) = self.__find_a_zero()
if row < 0:
done = True
step = 6
else:
self.marked[row][col] = 2
star_col = self.__find_star_in_row(row)
if star_col >= 0:
col = star_col
self.row_covered[row] = True
self.col_covered[col] = False
else:
done = True
self.Z0_r = row
self.Z0_c = col
step = 5
return step |
Construct a series of alternating primed and starred zeros as
follows. Let Z0 represent the uncovered primed zero found in Step 4.
Let Z1 denote the starred zero in the column of Z0 (if any).
Let Z2 denote the primed zero in the row of Z1 (there will always
be one). Continue until the series terminates at a primed zero
that has no starred zero in its column. Unstar each starred zero
of the series, star each primed zero of the series, erase all
primes and uncover every line in the matrix. Return to Step 3
def __step5(self):
"""
Construct a series of alternating primed and starred zeros as
follows. Let Z0 represent the uncovered primed zero found in Step 4.
Let Z1 denote the starred zero in the column of Z0 (if any).
Let Z2 denote the primed zero in the row of Z1 (there will always
be one). Continue until the series terminates at a primed zero
that has no starred zero in its column. Unstar each starred zero
of the series, star each primed zero of the series, erase all
primes and uncover every line in the matrix. Return to Step 3
"""
count = 0
path = self.path
path[count][0] = self.Z0_r
path[count][1] = self.Z0_c
done = False
while not done:
row = self.__find_star_in_col(path[count][1])
if row >= 0:
count += 1
path[count][0] = row
path[count][1] = path[count-1][1]
else:
done = True
if not done:
col = self.__find_prime_in_row(path[count][0])
count += 1
path[count][0] = path[count-1][0]
path[count][1] = col
self.__convert_path(path, count)
self.__clear_covers()
self.__erase_primes()
return 3 |
Add the value found in Step 4 to every element of each covered
row, and subtract it from every element of each uncovered column.
Return to Step 4 without altering any stars, primes, or covered
lines.
def __step6(self):
"""
Add the value found in Step 4 to every element of each covered
row, and subtract it from every element of each uncovered column.
Return to Step 4 without altering any stars, primes, or covered
lines.
"""
minval = self.__find_smallest()
for i in range(self.n):
for j in range(self.n):
if self.row_covered[i]:
self.C[i][j] += minval
if not self.col_covered[j]:
self.C[i][j] -= minval
return 4 |
Find the smallest uncovered value in the matrix.
def __find_smallest(self):
"""Find the smallest uncovered value in the matrix."""
minval = sys.maxsize
for i in range(self.n):
for j in range(self.n):
if (not self.row_covered[i]) and (not self.col_covered[j]):
if minval > self.C[i][j]:
minval = self.C[i][j]
return minval |
Find the first uncovered element with value 0
def __find_a_zero(self):
"""Find the first uncovered element with value 0"""
row = -1
col = -1
i = 0
n = self.n
done = False
while not done:
j = 0
while True:
if (self.C[i][j] == 0) and \
(not self.row_covered[i]) and \
(not self.col_covered[j]):
row = i
col = j
done = True
j += 1
if j >= n:
break
i += 1
if i >= n:
done = True
return (row, col) |
Find the first starred element in the specified row. Returns
the column index, or -1 if no starred element was found.
def __find_star_in_row(self, row):
"""
Find the first starred element in the specified row. Returns
the column index, or -1 if no starred element was found.
"""
col = -1
for j in range(self.n):
if self.marked[row][j] == 1:
col = j
break
return col |
Find the first starred element in the specified row. Returns
the row index, or -1 if no starred element was found.
def __find_star_in_col(self, col):
"""
Find the first starred element in the specified row. Returns
the row index, or -1 if no starred element was found.
"""
row = -1
for i in range(self.n):
if self.marked[i][col] == 1:
row = i
break
return row |
Find the first prime element in the specified row. Returns
the column index, or -1 if no starred element was found.
def __find_prime_in_row(self, row):
"""
Find the first prime element in the specified row. Returns
the column index, or -1 if no starred element was found.
"""
col = -1
for j in range(self.n):
if self.marked[row][j] == 2:
col = j
break
return col |
Clear all covered matrix cells
def __clear_covers(self):
"""Clear all covered matrix cells"""
for i in range(self.n):
self.row_covered[i] = False
self.col_covered[i] = False |
Erase all prime markings
def __erase_primes(self):
"""Erase all prime markings"""
for i in range(self.n):
for j in range(self.n):
if self.marked[i][j] == 2:
self.marked[i][j] = 0 |
Update contingency table with new values without creating a new object.
def update(self, a, b, c, d):
"""
Update contingency table with new values without creating a new object.
"""
self.table.ravel()[:] = [a, b, c, d]
self.N = self.table.sum() |
Frequency Bias.
Formula: (a+b)/(a+c)
def bias(self):
"""
Frequency Bias.
Formula: (a+b)/(a+c)"""
return (self.table[0, 0] + self.table[0, 1]) / (self.table[0, 0] + self.table[1, 0]) |
Gilbert's Score or Threat Score or Critical Success Index a/(a+b+c)
def csi(self):
"""Gilbert's Score or Threat Score or Critical Success Index a/(a+b+c)"""
return self.table[0, 0] / (self.table[0, 0] + self.table[0, 1] + self.table[1, 0]) |
Equitable Threat Score, Gilbert Skill Score, v, (a - R)/(a + b + c - R), R=(a+b)(a+c)/N
def ets(self):
"""Equitable Threat Score, Gilbert Skill Score, v, (a - R)/(a + b + c - R), R=(a+b)(a+c)/N"""
r = (self.table[0, 0] + self.table[0, 1]) * (self.table[0, 0] + self.table[1, 0]) / self.N
return (self.table[0, 0] - r) / (self.table[0, 0] + self.table[0, 1] + self.table[1, 0] - r) |
Doolittle (Heidke) Skill Score. 2(ad-bc)/((a+b)(b+d) + (a+c)(c+d))
def hss(self):
"""Doolittle (Heidke) Skill Score. 2(ad-bc)/((a+b)(b+d) + (a+c)(c+d))"""
return 2 * (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / (
(self.table[0, 0] + self.table[0, 1]) * (self.table[0, 1] + self.table[1, 1]) +
(self.table[0, 0] + self.table[1, 0]) * (self.table[1, 0] + self.table[1, 1])) |
Peirce (Hansen-Kuipers, True) Skill Score (ad - bc)/((a+c)(b+d))
def pss(self):
"""Peirce (Hansen-Kuipers, True) Skill Score (ad - bc)/((a+c)(b+d))"""
return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \
((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1])) |
Clayton Skill Score (ad - bc)/((a+b)(c+d))
def css(self):
"""Clayton Skill Score (ad - bc)/((a+b)(c+d))"""
return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \
((self.table[0, 0] + self.table[0, 1]) * (self.table[1, 0] + self.table[1, 1])) |
Load scikit-learn decision tree ensemble object from file.
Parameters
----------
filename : str
Name of the pickle file containing the tree object.
Returns
-------
tree ensemble object
def load_tree_object(filename):
"""
Load scikit-learn decision tree ensemble object from file.
Parameters
----------
filename : str
Name of the pickle file containing the tree object.
Returns
-------
tree ensemble object
"""
with open(filename) as file_obj:
tree_ensemble_obj = pickle.load(file_obj)
return tree_ensemble_obj |
Write each decision tree in an ensemble to a file.
Parameters
----------
tree_ensemble_obj : sklearn.ensemble object
Random Forest or Gradient Boosted Regression object
output_filename : str
File where trees are written
attribute_names : list
List of attribute names to be used in place of indices if available.
def output_tree_ensemble(tree_ensemble_obj, output_filename, attribute_names=None):
"""
Write each decision tree in an ensemble to a file.
Parameters
----------
tree_ensemble_obj : sklearn.ensemble object
Random Forest or Gradient Boosted Regression object
output_filename : str
File where trees are written
attribute_names : list
List of attribute names to be used in place of indices if available.
"""
for t, tree in enumerate(tree_ensemble_obj.estimators_):
print("Writing Tree {0:d}".format(t))
out_file = open(output_filename + ".{0:d}.tree", "w")
#out_file.write("Tree {0:d}\n".format(t))
tree_str = print_tree_recursive(tree.tree_, 0, attribute_names)
out_file.write(tree_str)
#out_file.write("\n")
out_file.close()
return |
Recursively writes a string representation of a decision tree object.
Parameters
----------
tree_obj : sklearn.tree._tree.Tree object
A base decision tree object
node_index : int
Index of the node being printed
attribute_names : list
List of attribute names
Returns
-------
tree_str : str
String representation of decision tree in the same format as the parf library.
def print_tree_recursive(tree_obj, node_index, attribute_names=None):
"""
Recursively writes a string representation of a decision tree object.
Parameters
----------
tree_obj : sklearn.tree._tree.Tree object
A base decision tree object
node_index : int
Index of the node being printed
attribute_names : list
List of attribute names
Returns
-------
tree_str : str
String representation of decision tree in the same format as the parf library.
"""
tree_str = ""
if node_index == 0:
tree_str += "{0:d}\n".format(tree_obj.node_count)
if tree_obj.feature[node_index] >= 0:
if attribute_names is None:
attr_val = "{0:d}".format(tree_obj.feature[node_index])
else:
attr_val = attribute_names[tree_obj.feature[node_index]]
tree_str += "b {0:d} {1} {2:0.4f} {3:d} {4:1.5e}\n".format(node_index,
attr_val,
tree_obj.weighted_n_node_samples[node_index],
tree_obj.n_node_samples[node_index],
tree_obj.threshold[node_index])
else:
if tree_obj.max_n_classes > 1:
leaf_value = "{0:d}".format(tree_obj.value[node_index].argmax())
else:
leaf_value = "{0}".format(tree_obj.value[node_index][0][0])
tree_str += "l {0:d} {1} {2:0.4f} {3:d}\n".format(node_index,
leaf_value,
tree_obj.weighted_n_node_samples[node_index],
tree_obj.n_node_samples[node_index])
if tree_obj.children_left[node_index] > 0:
tree_str += print_tree_recursive(tree_obj, tree_obj.children_left[node_index], attribute_names)
if tree_obj.children_right[node_index] > 0:
tree_str += print_tree_recursive(tree_obj, tree_obj.children_right[node_index], attribute_names)
return tree_str |
Computes the mask used to create the training and validation set
def set_classifier_mask(self, v, base_mask=True):
"""Computes the mask used to create the training and validation set"""
base = self._base
v = tonparray(v)
a = np.unique(v)
if a[0] != -1 or a[1] != 1:
raise RuntimeError("The labels must be -1 and 1 (%s)" % a)
mask = np.zeros_like(v)
cnt = min([(v == x).sum() for x in a]) * base._tr_fraction
cnt = int(round(cnt))
for i in a:
index = np.where((v == i) & base_mask)[0]
np.random.shuffle(index)
mask[index[:cnt]] = True
base._mask = SparseArray.fromlist(mask)
return SparseArray.fromlist(v) |
Computes the mask used to create the training and validation set
def set_regression_mask(self, v):
"""Computes the mask used to create the training and validation set"""
base = self._base
index = np.arange(v.size())
np.random.shuffle(index)
ones = np.ones(v.size())
ones[index[int(base._tr_fraction * v.size()):]] = 0
base._mask = SparseArray.fromlist(ones) |
Fitness function in the training set
def fitness(self, v):
"Fitness function in the training set"
base = self._base
if base._classifier:
if base._multiple_outputs:
hy = SparseArray.argmax(v.hy)
fit_func = base._fitness_function
if fit_func == 'macro-F1' or fit_func == 'a_F1':
f1_score = self.score
mf1, mf1_v = f1_score.a_F1(base._y_klass, hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'DotF1' or fit_func == 'g_F1':
f1_score = self.score
mf1, mf1_v = f1_score.g_F1(base._y_klass, hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'DotRecallDotPrecision' or fit_func == 'g_g_recall_precision':
f1_score = self.score
mf1, mf1_v = f1_score.g_g_recall_precision(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'BER' or fit_func == 'a_recall':
f1_score = self.score
mf1, mf1_v = f1_score.a_recall(base._y_klass, hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'DotRecall' or fit_func == 'g_recall':
f1_score = self.score
mf1, mf1_v = f1_score.g_recall(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'macro-Precision' or fit_func == 'a_precision':
f1_score = self.score
mf1, mf1_v = f1_score.a_precision(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'DotPrecision' or fit_func == 'g_precision':
f1_score = self.score
mf1, mf1_v = f1_score.g_precision(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'accDotMacroF1':
f1_score = self.score
mf1, mf1_v = f1_score.accDotMacroF1(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'macro-RecallF1':
f1_score = self.score
mf1, mf1_v = f1_score.macroRecallF1(base._y_klass, hy,
base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'F1':
f1_score = self.score
f1_index = self._base._F1_index
index = self.min_class if f1_index < 0 else f1_index
mf1, mf1_v = f1_score.F1(index, base._y_klass,
hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'RecallDotPrecision' or fit_func == 'g_recall_precision':
f1_score = self.score
mf1, mf1_v = f1_score.g_recall_precision(self.min_class,
base._y_klass,
hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
elif fit_func == 'ER' or fit_func == 'accuracy':
f1_score = self.score
mf1, mf1_v = f1_score.accuracy(base._y_klass,
hy, base._mask_ts.index)
v._error = mf1_v - 1
v.fitness = mf1 - 1
else:
raise RuntimeError('Unknown fitness function %s' % base._fitness_function)
else:
v.fitness = -base._ytr.SSE(v.hy * base._mask)
else:
if base._multiple_outputs:
_ = np.mean([a.SAE(b.mul(c)) for a, b, c in zip(base._ytr, v.hy, base._mask)])
v.fitness = - _
else:
v.fitness = -base._ytr.SAE(v.hy * base._mask) |
Fitness function in the validation set
In classification it uses BER and RSE in regression
def fitness_vs(self, v):
"""Fitness function in the validation set
In classification it uses BER and RSE in regression"""
base = self._base
if base._classifier:
if base._multiple_outputs:
v.fitness_vs = v._error
# if base._fitness_function == 'macro-F1':
# v.fitness_vs = v._error
# elif base._fitness_function == 'BER':
# v.fitness_vs = v._error
# elif base._fitness_function == 'macro-Precision':
# v.fitness_vs = v._error
# elif base._fitness_function == 'accDotMacroF1':
# v.fitness_vs = v._error
# elif base._fitness_function == 'macro-RecallF1':
# v.fitness_vs = v._error
# elif base._fitness_function == 'F1':
# v.fitness_vs = v._error
# else:
# v.fitness_vs = - v._error.dot(base._mask_vs) / base._mask_vs.sum()
else:
v.fitness_vs = -((base.y - v.hy.sign()).sign().fabs() *
base._mask_vs).sum()
else:
mask = base._mask
y = base.y
hy = v.hy
if not isinstance(mask, list):
mask = [mask]
y = [y]
hy = [hy]
fit = []
for _mask, _y, _hy in zip(mask, y, hy):
m = (_mask + -1).fabs()
x = _y * m
y = _hy * m
a = (x - y).sq().sum()
b = (x + -x.sum() / x.size()).sq().sum()
fit.append(-a / b)
v.fitness_vs = np.mean(fit) |
Set the fitness to a new node.
Returns false in case fitness is not finite
def set_fitness(self, v):
"""Set the fitness to a new node.
Returns false in case fitness is not finite"""
base = self._base
self.fitness(v)
if not np.isfinite(v.fitness):
self.del_error(v)
return False
if base._tr_fraction < 1:
self.fitness_vs(v)
if not np.isfinite(v.fitness_vs):
self.del_error(v)
return False
self.del_error(v)
return True |
Constrói uma :class:`RespostaCancelarUltimaVenda` a partir do
retorno informado.
:param unicode retorno: Retorno da função ``CancelarUltimaVenda``.
def analisar(retorno):
"""Constrói uma :class:`RespostaCancelarUltimaVenda` a partir do
retorno informado.
:param unicode retorno: Retorno da função ``CancelarUltimaVenda``.
"""
resposta = analisar_retorno(forcar_unicode(retorno),
funcao='EnviarDadosVenda',
classe_resposta=RespostaCancelarUltimaVenda,
campos=(
('numeroSessao', int),
('EEEEE', unicode),
('CCCC', unicode),
('mensagem', unicode),
('cod', unicode),
('mensagemSEFAZ', unicode),
('arquivoCFeBase64', unicode),
('timeStamp', as_datetime),
('chaveConsulta', unicode),
('valorTotalCFe', Decimal),
('CPFCNPJValue', unicode),
('assinaturaQRCODE', unicode),
),
campos_alternativos=[
# se a venda falhar apenas os primeiros seis campos
# especificados na ER deverão ser retornados...
(
('numeroSessao', int),
('EEEEE', unicode),
('CCCC', unicode),
('mensagem', unicode),
('cod', unicode),
('mensagemSEFAZ', unicode),
),
# por via das dúvidas, considera o padrão de campos,
# caso não haja nenhuma coincidência...
RespostaSAT.CAMPOS,
]
)
if resposta.EEEEE not in ('07000',):
raise ExcecaoRespostaSAT(resposta)
return resposta |
Convert a data element to xdata. No data copying occurs.
The data element can have the following keys:
data (required)
is_sequence, collection_dimension_count, datum_dimension_count (optional description of the data)
spatial_calibrations (optional list of spatial calibration dicts, scale, offset, units)
intensity_calibration (optional intensity calibration dict, scale, offset, units)
metadata (optional)
properties (get stored into metadata.hardware_source)
one of either timestamp or datetime_modified
if datetime_modified (dst, tz) it is converted and used as timestamp
then timezone gets stored into metadata.description.timezone.
def convert_data_element_to_data_and_metadata_1(data_element) -> DataAndMetadata.DataAndMetadata:
"""Convert a data element to xdata. No data copying occurs.
The data element can have the following keys:
data (required)
is_sequence, collection_dimension_count, datum_dimension_count (optional description of the data)
spatial_calibrations (optional list of spatial calibration dicts, scale, offset, units)
intensity_calibration (optional intensity calibration dict, scale, offset, units)
metadata (optional)
properties (get stored into metadata.hardware_source)
one of either timestamp or datetime_modified
if datetime_modified (dst, tz) it is converted and used as timestamp
then timezone gets stored into metadata.description.timezone.
"""
# data. takes ownership.
data = data_element["data"]
dimensional_shape = Image.dimensional_shape_from_data(data)
is_sequence = data_element.get("is_sequence", False)
dimension_count = len(Image.dimensional_shape_from_data(data))
adjusted_dimension_count = dimension_count - (1 if is_sequence else 0)
collection_dimension_count = data_element.get("collection_dimension_count", 2 if adjusted_dimension_count in (3, 4) else 0)
datum_dimension_count = data_element.get("datum_dimension_count", adjusted_dimension_count - collection_dimension_count)
data_descriptor = DataAndMetadata.DataDescriptor(is_sequence, collection_dimension_count, datum_dimension_count)
# dimensional calibrations
dimensional_calibrations = None
if "spatial_calibrations" in data_element:
dimensional_calibrations_list = data_element.get("spatial_calibrations")
if len(dimensional_calibrations_list) == len(dimensional_shape):
dimensional_calibrations = list()
for dimension_calibration in dimensional_calibrations_list:
offset = float(dimension_calibration.get("offset", 0.0))
scale = float(dimension_calibration.get("scale", 1.0))
units = dimension_calibration.get("units", "")
units = str(units) if units is not None else str()
if scale != 0.0:
dimensional_calibrations.append(Calibration.Calibration(offset, scale, units))
else:
dimensional_calibrations.append(Calibration.Calibration())
# intensity calibration
intensity_calibration = None
if "intensity_calibration" in data_element:
intensity_calibration_dict = data_element.get("intensity_calibration")
offset = float(intensity_calibration_dict.get("offset", 0.0))
scale = float(intensity_calibration_dict.get("scale", 1.0))
units = intensity_calibration_dict.get("units", "")
units = str(units) if units is not None else str()
if scale != 0.0:
intensity_calibration = Calibration.Calibration(offset, scale, units)
# properties (general tags)
metadata = dict()
if "metadata" in data_element:
metadata.update(Utility.clean_dict(data_element.get("metadata")))
if "properties" in data_element and data_element["properties"]:
hardware_source_metadata = metadata.setdefault("hardware_source", dict())
hardware_source_metadata.update(Utility.clean_dict(data_element.get("properties")))
# dates are _local_ time and must use this specific ISO 8601 format. 2013-11-17T08:43:21.389391
# time zones are offsets (east of UTC) in the following format "+HHMM" or "-HHMM"
# daylight savings times are time offset (east of UTC) in format "+MM" or "-MM"
# timezone is for conversion and is the Olson timezone string.
# datetime.datetime.strptime(datetime.datetime.isoformat(datetime.datetime.now()), "%Y-%m-%dT%H:%M:%S.%f" )
# datetime_modified, datetime_modified_tz, datetime_modified_dst, datetime_modified_tzname is the time at which this image was modified.
# datetime_original, datetime_original_tz, datetime_original_dst, datetime_original_tzname is the time at which this image was created.
timestamp = data_element.get("timestamp", datetime.datetime.utcnow())
datetime_item = data_element.get("datetime_modified", Utility.get_datetime_item_from_utc_datetime(timestamp))
local_datetime = Utility.get_datetime_from_datetime_item(datetime_item)
dst_value = datetime_item.get("dst", "+00")
tz_value = datetime_item.get("tz", "+0000")
timezone = datetime_item.get("timezone")
time_zone = { "dst": dst_value, "tz": tz_value}
if timezone is not None:
time_zone["timezone"] = timezone
# note: dst is informational only; tz already include dst
tz_adjust = (int(tz_value[1:3]) * 60 + int(tz_value[3:5])) * (-1 if tz_value[0] == '-' else 1)
utc_datetime = local_datetime - datetime.timedelta(minutes=tz_adjust) # tz_adjust already contains dst_adjust
timestamp = utc_datetime
return DataAndMetadata.new_data_and_metadata(data,
intensity_calibration=intensity_calibration,
dimensional_calibrations=dimensional_calibrations,
metadata=metadata,
timestamp=timestamp,
data_descriptor=data_descriptor,
timezone=timezone,
timezone_offset=tz_value) |
Segment forecast tracks to only output data contined within a
region in the CONUS, as defined by the mapfile.
Args:
csv_path(str): Path to the full CONUS csv file.
file_dict_key(str): Dictionary key for the csv files,
currently either 'track_step' or 'track_total'
out_path (str): Path to output new segmented csv files.
Returns:
Segmented forecast tracks in a csv file.
def output_sector_csv(self,csv_path,file_dict_key,out_path):
"""
Segment forecast tracks to only output data contined within a
region in the CONUS, as defined by the mapfile.
Args:
csv_path(str): Path to the full CONUS csv file.
file_dict_key(str): Dictionary key for the csv files,
currently either 'track_step' or 'track_total'
out_path (str): Path to output new segmented csv files.
Returns:
Segmented forecast tracks in a csv file.
"""
csv_file = csv_path + "{0}_{1}_{2}_{3}.csv".format(
file_dict_key,
self.ensemble_name,
self.member,
self.run_date.strftime(self.date_format))
if exists(csv_file):
csv_data = pd.read_csv(csv_file)
if self.inds is None:
lon_obj = csv_data.loc[:,"Centroid_Lon"]
lat_obj = csv_data.loc[:,"Centroid_Lat"]
self.inds = np.where((self.ne_lat>=lat_obj)&(self.sw_lat<=lat_obj)\
&(self.ne_lon>=lon_obj)&(self.sw_lon<=lon_obj))[0]
if np.shape(self.inds)[0] > 0:
csv_data = csv_data.reindex(np.array(self.inds))
sector_csv_filename = out_path + "{0}_{1}_{2}_{3}.csv".format(
file_dict_key,
self.ensemble_name,
self.member,
self.run_date.strftime(self.date_format))
print("Output sector csv file " + sector_csv_filename)
csv_data.to_csv(sector_csv_filename,
na_rep="nan",
float_format="%0.5f",
index=False)
os.chmod(sector_csv_filename, 0o666)
else:
print('No {0} {1} sector data found'.format(self.member,
self.run_date.strftime("%Y%m%d")))
else:
print('No {0} {1} csv file found'.format(self.member,
self.run_date.strftime("%Y%m%d")))
return |
Segment patches of forecast tracks to only output data contined within a
region in the CONUS, as defined by the mapfile.
Args:
netcdf_path (str): Path to the full CONUS netcdf patch file.
out_path (str): Path to output new segmented netcdf files.
patch_radius (int): Size of the patch radius.
config (dict): Dictonary containing information about data and
ML variables
Returns:
Segmented patch netcdf files.
def output_sector_netcdf(self,netcdf_path,out_path,patch_radius,config):
"""
Segment patches of forecast tracks to only output data contined within a
region in the CONUS, as defined by the mapfile.
Args:
netcdf_path (str): Path to the full CONUS netcdf patch file.
out_path (str): Path to output new segmented netcdf files.
patch_radius (int): Size of the patch radius.
config (dict): Dictonary containing information about data and
ML variables
Returns:
Segmented patch netcdf files.
"""
nc_data = self.load_netcdf_data(netcdf_path,patch_radius)
if nc_data is not None:
out_filename = out_path + "{0}_{1}_{2}_model_patches.nc".format(
self.ensemble_name,
self.run_date.strftime(self.date_format),
self.member)
out_file = Dataset(out_filename, "w")
out_file.createDimension("p", np.shape(nc_data.variables['p'])[0])
out_file.createDimension("row", np.shape(nc_data.variables['row'])[0])
out_file.createDimension("col", np.shape(nc_data.variables['col'])[0])
out_file.createVariable("p", "i4", ("p",))
out_file.createVariable("row", "i4", ("row",))
out_file.createVariable("col", "i4", ("col",))
out_file.variables["p"][:] = nc_data.variables['p'][:]
out_file.variables["row"][:] = nc_data.variables['row'][:]
out_file.variables["col"][:] = nc_data.variables['col'][:]
out_file.Conventions = "CF-1.6"
out_file.title = "{0} Storm Patches for run {1} member {2}".format(self.ensemble_name,
self.run_date.strftime(self.date_format),
self.member)
out_file.object_variable = config.watershed_variable
meta_variables = ["lon", "lat", "i", "j", "x", "y", "masks"]
meta_units = ["degrees_east", "degrees_north", "", "", "m", "m", ""]
center_vars = ["time", "centroid_lon", "centroid_lat", "centroid_i", "centroid_j", "track_id", "track_step"]
center_units = ["hours since {0}".format(self.run_date.strftime("%Y-%m-%d %H:%M:%S")),
"degrees_east",
"degrees_north",
"",
"",
"",
""]
label_columns = ["Matched", "Max_Hail_Size", "Num_Matches", "Shape", "Location", "Scale"]
for m, meta_variable in enumerate(meta_variables):
if meta_variable in ["i", "j", "masks"]:
dtype = "i4"
else:
dtype = "f4"
m_var = out_file.createVariable(meta_variable, dtype, ("p", "row", "col"), complevel=1, zlib=True)
m_var.long_name = meta_variable
m_var.units = meta_units[m]
for c, center_var in enumerate(center_vars):
if center_var in ["time", "track_id", "track_step"]:
dtype = "i4"
else:
dtype = "f4"
c_var = out_file.createVariable(center_var, dtype, ("p",), zlib=True, complevel=1)
c_var.long_name = center_var
c_var.units =center_units[c]
for storm_variable in config.storm_variables:
s_var = out_file.createVariable(storm_variable + "_curr", "f4", ("p", "row", "col"), complevel=1, zlib=True)
s_var.long_name = storm_variable
s_var.units = ""
for potential_variable in config.potential_variables:
p_var = out_file.createVariable(potential_variable + "_prev", "f4", ("p", "row", "col"),
complevel=1, zlib=True)
p_var.long_name = potential_variable
p_var.units = ""
if config.train:
for label_column in label_columns:
if label_column in ["Matched", "Num_Matches"]:
dtype = "i4"
else:
dtype = "f4"
l_var = out_file.createVariable(label_column, dtype, ("p",), zlib=True, complevel=1)
l_var.long_name = label_column
l_var.units = ""
out_file.variables["time"][:] = nc_data.variables['time'][:]
for c_var in ["lon", "lat"]:
out_file.variables["centroid_" + c_var][:] = nc_data.variables['centroid_' + c_var][:]
for c_var in ["i", "j"]:
out_file.variables["centroid_" + c_var][:] = nc_data.variables["centroid_" + c_var][:]
out_file.variables["track_id"][:] = nc_data.variables['track_id'][:]
out_file.variables["track_step"][:] = nc_data.variables['track_step'][:]
for meta_var in meta_variables:
if meta_var in ["lon", "lat"]:
out_file.variables[meta_var][:] = nc_data.variables[meta_var][:]
else:
out_file.variables[meta_var][:] = nc_data.variables[meta_var][:]
for storm_variable in config.storm_variables:
out_file.variables[storm_variable + "_curr"][:] = nc_data.variables[storm_variable + '_curr'][:]
for p_variable in config.potential_variables:
out_file.variables[p_variable + "_prev"][:] = nc_data.variables[p_variable + '_prev'][:]
if config.train:
for label_column in label_columns:
try:
out_file.variables[label_column][:] = nc_data.variables[label_column][:]
except:
out_file.variables[label_column][:] = 0
out_file.close()
print("Output sector nc file " + out_filename)
else:
print('No {0} {1} netcdf file/sector data found'.format(self.member,
self.run_date.strftime("%Y%m%d")))
return |
Return a json-clean dict. Will log info message for failures.
def clean_dict(d0, clean_item_fn=None):
"""
Return a json-clean dict. Will log info message for failures.
"""
clean_item_fn = clean_item_fn if clean_item_fn else clean_item
d = dict()
for key in d0:
cleaned_item = clean_item_fn(d0[key])
if cleaned_item is not None:
d[key] = cleaned_item
return d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.