repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
enpheeph
|
enpheeph-main/src/enpheeph/abc/modelsummaryabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
class ModelSummaryABC(abc.ABC):
pass
| 1,594
| 39.897436
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'handlers',
'helpers',
'injections',
'integrations',
'utils',
},
submod_attrs={
'handlers': [
'InjectionHandler',
'LibraryHandlerPluginABC',
'PyTorchHandlerPlugin',
'injectionhandler',
'libraryhandlerpluginabc',
'plugins',
'pytorchhandlerplugin',
],
'helpers': [
'FaultModelABC',
'ModelSummaryABC',
'ModelSummaryTorchinfo',
'abc',
'faultmodel',
'faultmodelabc',
'faultmodels',
'layersummaryabc',
'modelsummaryabc',
'modelsummarytorchinfo',
'plugins',
'sensitivityanalysis',
'summaries',
],
'injections': [
'AutoPyTorchMaskPlugin',
'CSVStoragePluginABC',
'CuPyPyTorchMaskPlugin',
'CustomBase',
'CustomBaseClass',
'DenseSparseOutputPyTorchFault',
'ExperimentRun',
'ExperimentRunBaseMixin',
'ExperimentRunProtocol',
'FPQuantizedOutputPyTorchFault',
'Fault',
'FaultABC',
'FaultBaseMixin',
'FaultProtocol',
'IndexingPlugin',
'IndexingPluginABC',
'Injection',
'InjectionABC',
'InjectionProtocol',
'LowLevelTorchMaskPluginABC',
'Monitor',
'MonitorABC',
'MonitorBaseMixin',
'MonitorProtocol',
'NumPyPyTorchMaskPlugin',
'OutputPyTorchFault',
'OutputPyTorchMonitor',
'PandasCSVStoragePlugin',
'PolymorphicMixin',
'PrunedDenseToSparseWeightPyTorchFault',
'PyTorchInjectionABC',
'PyTorchMaskMixin',
'PyTorchMonitorPostProcessorMixin',
'PyTorchSparseInterfaceMixin',
'PyTorchSparseInterfacePluginABC',
'PyTorchTensorObjectValidatorMixin',
'QuantizedOutputPyTorchFault',
'SNNOutputNorseFault',
'SQLStoragePluginABC',
'SQLiteStoragePlugin',
'Session',
'SessionBaseMixin',
'SessionProtocol',
'StoragePluginABC',
'WeightPyTorchFault',
'abc',
'autopytorchmaskplugin',
'csv',
'csvdataclasses',
'csvstorageplugin',
'csvstoragepluginabc',
'cupypytorchmaskplugin',
'densesparseoutputpytorchfault',
'faultabc',
'fix_pysqlite',
'fpquantizedoutputpytorchfault',
'indexing',
'indexingplugin',
'indexingpluginabc',
'injectionabc',
'lowleveltorchmaskpluginabc',
'mask',
'mixins',
'monitorabc',
'numpypytorchmaskplugin',
'outputpytorchfault',
'outputpytorchmonitor',
'plugins',
'pruneddensetosparseactivationpytorchfault',
'pruneddensetosparseweightpytorchfault',
'pysqlite_begin_emission_fix_on_connect',
'pytorchinjectionabc',
'pytorchmaskmixin',
'pytorchmonitorpostprocessormixin',
'pytorchquantizationmixin',
'pytorchsparseinterfacemixin',
'pytorchsparseinterfacepluginabc',
'pytorchtensorobjectvalidatormixin',
'quantizedoutputpytorchfault',
'set_sqlite_pragma',
'snnoutputnorsefault',
'sparse',
'sql',
'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses',
'sqlitestorageplugin',
'sqlstoragepluginabc',
'sqlutils',
'storage',
'storagepluginabc',
'storagetypings',
'torch_geometric_mean',
'utils',
'weightpytorchfault',
],
'integrations': [
'InjectionCallback',
'injectioncallback',
'pytorchlightning',
],
'utils': [
'ActiveDimensionIndexType',
'AnyIndexType',
'AnyMaskType',
'ArrayType',
'BaseInjectionLocation',
'BitFaultMaskInfo',
'BitFaultValue',
'BitIndexInfo',
'BitWidth',
'DimensionDictType',
'DimensionIndexType',
'DimensionLocationIndexType',
'DimensionLocationMaskType',
'DimensionType',
'Endianness',
'FaultLocation',
'FaultLocationMixin',
'FaultMaskOperation',
'FaultMaskValue',
'HandlerStatus',
'IDGenerator',
'IDGeneratorSubclass',
'Index1DType',
'IndexMultiDType',
'IndexTimeType',
'InjectionLocationABC',
'LocationMixin',
'LocationModuleNameMixin',
'LocationOptionalMixin',
'LowLevelMaskArrayType',
'Mask1DType',
'MaskMultiDType',
'ModelType',
'MonitorLocation',
'MonitorMetric',
'ParameterType',
'PathType',
'ShapeType',
'SkipIfErrorContextManager',
'TensorType',
'camel_to_snake',
'classes',
'compare_version',
'constants',
'dataclasses',
'enums',
'functions',
'get_object_library',
'imports',
'is_module_available',
'typings',
],
},
)
def __dir__():
return __all__
__all__ = ['ActiveDimensionIndexType', 'AnyIndexType', 'AnyMaskType',
'ArrayType', 'AutoPyTorchMaskPlugin', 'BaseInjectionLocation',
'BitFaultMaskInfo', 'BitFaultValue', 'BitIndexInfo', 'BitWidth',
'CSVStoragePluginABC', 'CuPyPyTorchMaskPlugin', 'CustomBase',
'CustomBaseClass', 'DenseSparseOutputPyTorchFault',
'DimensionDictType', 'DimensionIndexType',
'DimensionLocationIndexType', 'DimensionLocationMaskType',
'DimensionType', 'Endianness', 'ExperimentRun',
'ExperimentRunBaseMixin', 'ExperimentRunProtocol',
'FPQuantizedOutputPyTorchFault', 'Fault', 'FaultABC',
'FaultBaseMixin', 'FaultLocation', 'FaultLocationMixin',
'FaultMaskOperation', 'FaultMaskValue', 'FaultModelABC',
'FaultProtocol', 'HandlerStatus', 'IDGenerator',
'IDGeneratorSubclass', 'Index1DType', 'IndexMultiDType',
'IndexTimeType', 'IndexingPlugin', 'IndexingPluginABC', 'Injection',
'InjectionABC', 'InjectionCallback', 'InjectionHandler',
'InjectionLocationABC', 'InjectionProtocol',
'LibraryHandlerPluginABC', 'LocationMixin',
'LocationModuleNameMixin', 'LocationOptionalMixin',
'LowLevelMaskArrayType', 'LowLevelTorchMaskPluginABC', 'Mask1DType',
'MaskMultiDType', 'ModelSummaryABC', 'ModelSummaryTorchinfo',
'ModelType', 'Monitor', 'MonitorABC', 'MonitorBaseMixin',
'MonitorLocation', 'MonitorMetric', 'MonitorProtocol',
'NumPyPyTorchMaskPlugin', 'OutputPyTorchFault',
'OutputPyTorchMonitor', 'PandasCSVStoragePlugin', 'ParameterType',
'PathType', 'PolymorphicMixin',
'PrunedDenseToSparseWeightPyTorchFault', 'PyTorchHandlerPlugin',
'PyTorchInjectionABC', 'PyTorchMaskMixin',
'PyTorchMonitorPostProcessorMixin', 'PyTorchSparseInterfaceMixin',
'PyTorchSparseInterfacePluginABC',
'PyTorchTensorObjectValidatorMixin', 'QuantizedOutputPyTorchFault',
'SNNOutputNorseFault', 'SQLStoragePluginABC', 'SQLiteStoragePlugin',
'Session', 'SessionBaseMixin', 'SessionProtocol', 'ShapeType',
'SkipIfErrorContextManager', 'StoragePluginABC', 'TensorType',
'WeightPyTorchFault', 'abc', 'autopytorchmaskplugin',
'camel_to_snake', 'classes', 'compare_version', 'constants', 'csv',
'csvdataclasses', 'csvstorageplugin', 'csvstoragepluginabc',
'cupypytorchmaskplugin', 'dataclasses',
'densesparseoutputpytorchfault', 'enums', 'faultabc', 'faultmodel',
'faultmodelabc', 'faultmodels', 'fix_pysqlite',
'fpquantizedoutputpytorchfault', 'functions', 'get_object_library',
'handlers', 'helpers', 'imports', 'indexing', 'indexingplugin',
'indexingpluginabc', 'injectionabc', 'injectioncallback',
'injectionhandler', 'injections', 'integrations',
'is_module_available', 'layersummaryabc', 'libraryhandlerpluginabc',
'lowleveltorchmaskpluginabc', 'mask', 'mixins', 'modelsummaryabc',
'modelsummarytorchinfo', 'monitorabc', 'numpypytorchmaskplugin',
'outputpytorchfault', 'outputpytorchmonitor', 'plugins',
'pruneddensetosparseactivationpytorchfault',
'pruneddensetosparseweightpytorchfault',
'pysqlite_begin_emission_fix_on_connect', 'pytorchhandlerplugin',
'pytorchinjectionabc', 'pytorchlightning', 'pytorchmaskmixin',
'pytorchmonitorpostprocessormixin', 'pytorchquantizationmixin',
'pytorchsparseinterfacemixin', 'pytorchsparseinterfacepluginabc',
'pytorchtensorobjectvalidatormixin', 'quantizedoutputpytorchfault',
'sensitivityanalysis', 'set_sqlite_pragma', 'snnoutputnorsefault',
'sparse', 'sql', 'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses', 'sqlitestorageplugin', 'sqlstoragepluginabc',
'sqlutils', 'storage', 'storagepluginabc', 'storagetypings',
'summaries', 'torch_geometric_mean', 'typings', 'utils',
'weightpytorchfault']
# </AUTOGEN_INIT>
| 12,581
| 37.477064
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/monitorabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enpheeph.injections.abc.injectionabc
class MonitorABC(enpheeph.injections.abc.injectionabc.InjectionABC):
pass
| 1,664
| 41.692308
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/storagepluginabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import datetime
import typing
import enpheeph.injections.plugins.storage.utils.storagetypings
import enpheeph.utils.dataclasses
class StoragePluginABC(abc.ABC):
# the id of the current experiment
experiment_id: typing.Optional[int]
session_id: typing.Optional[int]
@abc.abstractmethod
def get_experiments(
self,
id_: typing.Optional[int] = None,
running: typing.Optional[bool] = None,
completed: typing.Optional[bool] = None,
start_time: typing.Optional[datetime.datetime] = None,
total_duration: typing.Optional[datetime.timedelta] = None,
golden_run_flag: typing.Optional[bool] = None,
injection_locations: typing.Optional[
typing.Sequence[enpheeph.utils.dataclasses.InjectionLocationABC]
] = None,
# in the future we will add also model_info
) -> typing.List[
enpheeph.injections.plugins.storage.utils.storagetypings.ExperimentRunProtocol,
]:
pass
@abc.abstractmethod
def create_experiment(
self,
injection_locations: typing.Sequence[
enpheeph.utils.dataclasses.InjectionLocationABC
],
# in the future also model_info
running: bool = True,
golden_run_flag: bool = False,
# the id for the golden run
# if None we skip this part
golden_run_id: typing.Optional[int] = None,
start_time: typing.Optional[datetime.datetime] = None,
extra_experiment_info: typing.Optional[
typing.Dict[typing.Any, typing.Any]
] = None,
) -> int:
pass
@abc.abstractmethod
def create_session(
self,
extra_session_info: typing.Optional[typing.Dict[typing.Any, typing.Any]] = None,
) -> int:
pass
@abc.abstractmethod
def complete_experiment(
self,
total_duration: typing.Optional[datetime.timedelta] = None,
) -> None:
pass
@abc.abstractmethod
def complete_session(
self,
) -> None:
pass
@abc.abstractmethod
def add_experiment_metrics(
self, metrics: typing.Dict[typing.Any, typing.Any]
) -> None:
pass
@abc.abstractmethod
def add_experiment_golden_run(self, golden_run_id: int) -> None:
pass
@abc.abstractmethod
def add_payload(
self,
location: enpheeph.utils.dataclasses.InjectionLocationABC,
payload: typing.Dict[typing.Any, typing.Any],
) -> None:
pass
| 4,058
| 32.545455
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/layersummaryabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 1,539
| 45.666667
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/faultmodelabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
class FaultModelABC(abc.ABC):
@abc.abstractmethod
def test(self):
pass
| 1,640
| 39.02439
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/abc/csvstoragepluginabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import datetime
import typing
import enpheeph.injections.plugins.storage.abc.storagepluginabc
import enpheeph.injections.plugins.storage.utils.storagetypings
import enpheeph.utils.dataclasses
import enpheeph.utils.typings
# import enpheeph.injections.plugins.storage.csv.utils.csvdataclasses as csvdataclasses
class CSVStoragePluginABC(
enpheeph.injections.plugins.storage.abc.storagepluginabc.StoragePluginABC,
):
# the id of the current experiment
experiment_id: typing.Optional[int]
session_id: typing.Optional[int]
@abc.abstractmethod
def get_experiments(
self,
id_: typing.Optional[int] = None,
running: typing.Optional[bool] = None,
completed: typing.Optional[bool] = None,
start_time: typing.Optional[datetime.datetime] = None,
total_duration: typing.Optional[datetime.timedelta] = None,
golden_run_flag: typing.Optional[bool] = None,
injection_locations: typing.Optional[
typing.Sequence[enpheeph.utils.dataclasses.InjectionLocationABC]
] = None,
# in the future we will add also model_info
) -> typing.List[
enpheeph.injections.plugins.storage.utils.storagetypings.ExperimentRunProtocol,
]:
pass
@abc.abstractmethod
def create_experiment(
self,
injection_locations: typing.Sequence[
enpheeph.utils.dataclasses.InjectionLocationABC
],
# in the future also model_info
running: bool = True,
golden_run_flag: bool = False,
# the id for the golden run
# if None we skip this part
golden_run_id: typing.Optional[int] = None,
start_time: typing.Optional[datetime.datetime] = None,
extra_experiment_info: typing.Optional[
typing.Dict[typing.Any, typing.Any]
] = None,
) -> int:
pass
@abc.abstractmethod
def create_session(
self,
extra_session_info: typing.Optional[typing.Dict[typing.Any, typing.Any]] = None,
) -> int:
pass
@abc.abstractmethod
def complete_experiment(
self,
total_duration: typing.Optional[datetime.timedelta] = None,
) -> None:
pass
@abc.abstractmethod
def complete_session(
self,
) -> None:
pass
@abc.abstractmethod
def add_experiment_metrics(
self, metrics: typing.Dict[typing.Any, typing.Any]
) -> None:
pass
@abc.abstractmethod
def add_experiment_golden_run(self, golden_run_id: int) -> None:
pass
@abc.abstractmethod
def add_payload(
self,
location: enpheeph.utils.dataclasses.InjectionLocationABC,
payload: typing.Dict[typing.Any, typing.Any],
) -> None:
pass
| 4,317
| 33
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/pruneddensetosparseactivationpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchsparseinterfacemixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
class PrunedDenseToSparseWeightPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
enpheeph.injections.mixins.pytorchsparseinterfacemixin.PyTorchSparseInterfaceMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> None:
target = self.get_sparse_injection_parameter(output)
self.indexing_plugin.select_active_dimensions(
[enpheeph.utils.enums.DimensionType.Tensor],
autoshift_to_boundaries=True,
)
self.generate_mask(target, tensor_only=None, force_recompute=True)
target = self.inject_mask(target, tensor_only=None)
output = self.set_sparse_injection_parameter(output, target).to_dense()
self.indexing_plugin.reset_active_dimensions()
return output
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
| 4,498
| 34.148438
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/snnoutputnorsefault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# type: ignore[misc,assignment,name-defined,unreachable,union-attr,attr-defined,operator]
# flake8: noqa
# we ignore mypy/flake8 errors here as this injection needs to be refactored
import typing
import norse
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
class SNNOutputNorseFault(
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
):
def __init__(
self,
fault_location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with very long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
):
super().__init__()
if fault_location.time_index is None:
raise ValueError("time_index must be passed in the injection for SNNs")
self.fault_location = fault_location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
self.timestep_counter = None
@property
def module_name(self) -> str:
return self.fault_location.module_name
# this hook assumes that for each forward call, the initial state at the
# first execution point is None
# in this way we can count and locate precisely the timesteps, using only
# the forward hook and without modifying the norse code
# NOTE: it would not work if the initial state used as input is different
# from None, so be careful
def snn_output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> "torch.Tensor":
if input[1] is None:
self.timestep_counter = 0
elif isinstance(input[1], tuple):
self.timestep_counter += 1
else:
raise RuntimeError("Not compatible with this way of calling")
# find a way to check if we are in the index range
# we simply check the different possibilities
time_index = self.fault_location.time_index
if isinstance(time_index, slice):
index = range(time_index.start, time_index.stop, time_index.step)
elif isinstance(time_index, typing.Sequence):
index = time_index
elif isinstance(time_index, type(Ellipsis)):
index = range(self.timestep_counter + 1)
elif isinstance(time_index, int):
index = (time_index,)
else:
raise IndexError("Unsupported time_index for SNN fault injection")
# if the current counter is in the index, then we inject the fault
if self.timestep_counter in index:
self.generate_mask(output)
masked_output = self.inject_mask(output)
return masked_output
else:
return output
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
if not isinstance(module, norse.torch.module.snn.SNNCell):
raise RuntimeError(
"Currently SNN injection supports only SNNCell from norse"
)
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
def teardown(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle.remove()
self.handle = None
self.mask = None
return module
| 5,250
| 35.72028
| 89
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/outputpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
class OutputPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
handle: typing.Optional["torch.utils.hooks.RemovableHandle"]
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> "torch.Tensor":
self.generate_mask(output, tensor_only=True)
masked_output = self.inject_mask(output, tensor_only=False)
return masked_output
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
| 4,218
| 34.158333
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/fpquantizedoutputpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchsparseinterfacemixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
class FPQuantizedOutputPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> None:
import torch
# here we need to generate target with a proper mixin
# in our case we use torch.int32, and we multiply by 2 ** 24 as to have a
# dynamic range of [-128, 127] in fp32 while having
# 2 ** -24 as precision in int32,~6e-08 which should be more than enough
shift_factor = 2**24
target_dtype = torch.int32
original_dtype = output.dtype
target = output * shift_factor
target = target.to(target_dtype)
self.generate_mask(output, tensor_only=True)
target = self.inject_mask(target, tensor_only=False)
# we divide the result
target = target.to(dtype=original_dtype)
target /= shift_factor
return target
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
| 4,619
| 34
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/quantizedoutputpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchsparseinterfacemixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
class QuantizedOutputPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> None:
import torch
# here we need to generate target with a proper mixin
# in our case we use torch.int32, and we multiply by 2 ** 24 as to have a
# dynamic range of [-128, 127] in fp32 while having
# 2 ** -24 as precision in int32,~6e-08 which should be more than enough
shift_factor = 2**24
target_dtype = torch.int32
original_dtype = output.dtype
target = output * shift_factor
target = target.to(target_dtype)
self.generate_mask(output, tensor_only=True)
target = self.inject_mask(target, tensor_only=False)
# we divide the result
target = target.to(dtype=original_dtype)
target /= shift_factor
return target
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
| 4,617
| 33.984848
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/pruneddensetosparseweightpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import copy
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchsparseinterfacemixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.injections.plugins.indexing.abc.indexingpluginabc
import enpheeph.utils.dataclasses
import enpheeph.utils.enums
# we move this import down
if typing.TYPE_CHECKING:
import torch
class PrunedDenseToSparseWeightPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
enpheeph.injections.mixins.pytorchsparseinterfacemixin.PyTorchSparseInterfaceMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
backup: typing.Optional["torch.Tensor"]
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.abc.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.indexing.abc.
indexingpluginabc.IndexingPluginABC
# fmt: on
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.abc.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.backup = None
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def inject_weight(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
if self.backup is not None:
raise ValueError(
"This method must be called only when setting up the injection"
)
# first we get the element to be injected
weight = getattr(
module,
# sometimes type: ignore[arg-type] might be required for the following line
# mypy gives error as parameter_name can be None, but it cannot be since
# the dataclass checks for the validity
# so we simply cast it here
typing.cast(str, self.location.parameter_name),
)
# we back it up to restore it later
self.backup = copy.deepcopy(weight)
# we call the mixin interface to access the specific element, be it index or
# values of the sparse tensor
target_sparse_element = self.get_sparse_injection_parameter(weight)
# we select the dimensions to be accessed, which are all of them since we have
# no batches in the target sparse element
self.indexing_plugin.select_active_dimensions(
dimensions=[enpheeph.utils.enums.DimensionType.Tensor],
autoshift_to_boundaries=True,
)
# we generate the mask specific for this element
self.generate_mask(
target_sparse_element,
tensor_only=True,
batches_exist=False,
)
# we inject the mask
masked_sparse_element = self.inject_mask(
target_sparse_element,
tensor_only=True,
batches_exist=False,
)
# we update the weight with the new sparse element, using the sparse mixin
masked_weight = self.set_sparse_injection_parameter(
weight, masked_sparse_element
)
# we need to convert the masked weight to the proper class
masked_weight_corrected = self.convert_tensor_to_proper_class(
masked_weight, weight
)
# we set the masked weight in the proper location, overwriting the one that was
# backupped
# this is needed as it is impossible to modify the weight in-place, so the
# conversion is dense -> sparse -> sparse element -> injected sparse element ->
# new sparse tensor -> new dense
setattr(
module,
# sometimes type: ignore[arg-type] might be required for the following line
# mypy gives error as parameter_name can be None, but it cannot be since
# the dataclass checks for the validity
# so we simply cast it here
typing.cast(str, self.location.parameter_name),
masked_weight_corrected,
)
# we reset the active plugin dimensions, as they might be different in the next
# run, especially if the plugin is shared across multiple classes
self.indexing_plugin.reset_active_dimensions()
return module
def restore_weight(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
if self.backup is None:
raise ValueError(
"This method must be called only when tearing down the injection"
)
setattr( # type: ignore[unreachable]
module,
typing.cast(str, self.location.parameter_name),
copy.deepcopy(self.backup),
)
self.backup = None
return module
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
module = self.inject_weight(module)
return module
# we need to override the teardown as it is not common to the normal hook
# teardowns
def teardown(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
module = self.restore_weight(module)
return module
| 7,989
| 35.318182
| 87
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'densesparseoutputpytorchfault',
'fpquantizedoutputpytorchfault',
'mixins',
'outputpytorchfault',
'outputpytorchmonitor',
'plugins',
'pruneddensetosparseactivationpytorchfault',
'pruneddensetosparseweightpytorchfault',
'quantizedoutputpytorchfault',
'snnoutputnorsefault',
'weightpytorchfault',
},
submod_attrs={
'abc': [
'FaultABC',
'InjectionABC',
'MonitorABC',
'PyTorchInjectionABC',
'faultabc',
'injectionabc',
'monitorabc',
'pytorchinjectionabc',
],
'densesparseoutputpytorchfault': [
'DenseSparseOutputPyTorchFault',
],
'fpquantizedoutputpytorchfault': [
'FPQuantizedOutputPyTorchFault',
],
'mixins': [
'PyTorchMaskMixin',
'PyTorchMonitorPostProcessorMixin',
'PyTorchSparseInterfaceMixin',
'PyTorchTensorObjectValidatorMixin',
'pytorchmaskmixin',
'pytorchmonitorpostprocessormixin',
'pytorchquantizationmixin',
'pytorchsparseinterfacemixin',
'pytorchtensorobjectvalidatormixin',
'torch_geometric_mean',
],
'outputpytorchfault': [
'OutputPyTorchFault',
],
'outputpytorchmonitor': [
'OutputPyTorchMonitor',
],
'plugins': [
'AutoPyTorchMaskPlugin',
'CSVStoragePluginABC',
'CuPyPyTorchMaskPlugin',
'CustomBase',
'CustomBaseClass',
'ExperimentRun',
'ExperimentRunBaseMixin',
'ExperimentRunProtocol',
'Fault',
'FaultBaseMixin',
'FaultProtocol',
'IndexingPlugin',
'IndexingPluginABC',
'Injection',
'InjectionProtocol',
'LowLevelTorchMaskPluginABC',
'Monitor',
'MonitorBaseMixin',
'MonitorProtocol',
'NumPyPyTorchMaskPlugin',
'PandasCSVStoragePlugin',
'PolymorphicMixin',
'PyTorchSparseInterfacePluginABC',
'SQLStoragePluginABC',
'SQLiteStoragePlugin',
'Session',
'SessionBaseMixin',
'SessionProtocol',
'StoragePluginABC',
'abc',
'autopytorchmaskplugin',
'csv',
'csvdataclasses',
'csvstorageplugin',
'csvstoragepluginabc',
'cupypytorchmaskplugin',
'fix_pysqlite',
'indexing',
'indexingplugin',
'indexingpluginabc',
'lowleveltorchmaskpluginabc',
'mask',
'numpypytorchmaskplugin',
'pysqlite_begin_emission_fix_on_connect',
'pytorchsparseinterfacepluginabc',
'set_sqlite_pragma',
'sparse',
'sql',
'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses',
'sqlitestorageplugin',
'sqlstoragepluginabc',
'sqlutils',
'storage',
'storagepluginabc',
'storagetypings',
'utils',
],
'pruneddensetosparseactivationpytorchfault': [
'PrunedDenseToSparseWeightPyTorchFault',
],
'pruneddensetosparseweightpytorchfault': [
'PrunedDenseToSparseWeightPyTorchFault',
],
'quantizedoutputpytorchfault': [
'QuantizedOutputPyTorchFault',
],
'snnoutputnorsefault': [
'SNNOutputNorseFault',
],
'weightpytorchfault': [
'WeightPyTorchFault',
],
},
)
def __dir__():
return __all__
__all__ = ['AutoPyTorchMaskPlugin', 'CSVStoragePluginABC',
'CuPyPyTorchMaskPlugin', 'CustomBase', 'CustomBaseClass',
'DenseSparseOutputPyTorchFault', 'ExperimentRun',
'ExperimentRunBaseMixin', 'ExperimentRunProtocol',
'FPQuantizedOutputPyTorchFault', 'Fault', 'FaultABC',
'FaultBaseMixin', 'FaultProtocol', 'IndexingPlugin',
'IndexingPluginABC', 'Injection', 'InjectionABC',
'InjectionProtocol', 'LowLevelTorchMaskPluginABC', 'Monitor',
'MonitorABC', 'MonitorBaseMixin', 'MonitorProtocol',
'NumPyPyTorchMaskPlugin', 'OutputPyTorchFault',
'OutputPyTorchMonitor', 'PandasCSVStoragePlugin',
'PolymorphicMixin', 'PrunedDenseToSparseWeightPyTorchFault',
'PyTorchInjectionABC', 'PyTorchMaskMixin',
'PyTorchMonitorPostProcessorMixin', 'PyTorchSparseInterfaceMixin',
'PyTorchSparseInterfacePluginABC',
'PyTorchTensorObjectValidatorMixin', 'QuantizedOutputPyTorchFault',
'SNNOutputNorseFault', 'SQLStoragePluginABC', 'SQLiteStoragePlugin',
'Session', 'SessionBaseMixin', 'SessionProtocol',
'StoragePluginABC', 'WeightPyTorchFault', 'abc',
'autopytorchmaskplugin', 'csv', 'csvdataclasses',
'csvstorageplugin', 'csvstoragepluginabc', 'cupypytorchmaskplugin',
'densesparseoutputpytorchfault', 'faultabc', 'fix_pysqlite',
'fpquantizedoutputpytorchfault', 'indexing', 'indexingplugin',
'indexingpluginabc', 'injectionabc', 'lowleveltorchmaskpluginabc',
'mask', 'mixins', 'monitorabc', 'numpypytorchmaskplugin',
'outputpytorchfault', 'outputpytorchmonitor', 'plugins',
'pruneddensetosparseactivationpytorchfault',
'pruneddensetosparseweightpytorchfault',
'pysqlite_begin_emission_fix_on_connect', 'pytorchinjectionabc',
'pytorchmaskmixin', 'pytorchmonitorpostprocessormixin',
'pytorchquantizationmixin', 'pytorchsparseinterfacemixin',
'pytorchsparseinterfacepluginabc',
'pytorchtensorobjectvalidatormixin', 'quantizedoutputpytorchfault',
'set_sqlite_pragma', 'snnoutputnorsefault', 'sparse', 'sql',
'sqlalchemy_begin_emission_pysqlite', 'sqldataclasses',
'sqlitestorageplugin', 'sqlstoragepluginabc', 'sqlutils', 'storage',
'storagepluginabc', 'storagetypings', 'torch_geometric_mean',
'utils', 'weightpytorchfault']
# </AUTOGEN_INIT>
| 8,989
| 36.302905
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/densesparseoutputpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchsparseinterfacemixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
class DenseSparseOutputPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
enpheeph.injections.mixins.pytorchsparseinterfacemixin.PyTorchSparseInterfaceMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def output_fault_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> None:
target = self.get_sparse_injection_parameter(output)
self.indexing_plugin.select_active_dimensions(
[enpheeph.utils.enums.DimensionType.Tensor],
autoshift_to_boundaries=True,
)
self.generate_mask(target, tensor_only=None, force_recompute=True)
target = self.inject_mask(target, tensor_only=None)
output = self.set_sparse_injection_parameter(output, target).to_dense()
self.indexing_plugin.reset_active_dimensions()
return output
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_fault_hook)
return module
| 4,490
| 34.085938
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/outputpytorchmonitor.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.abc.monitorabc
import enpheeph.injections.plugins.indexing.abc.indexingpluginabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmonitorpostprocessormixin
import enpheeph.injections.plugins.storage.abc.storagepluginabc
import enpheeph.utils.dataclasses
import enpheeph.utils.enums
# so flake does not complain about the imports being not at the top after the if
if typing.TYPE_CHECKING:
import torch
class OutputPyTorchMonitor(
enpheeph.injections.abc.monitorabc.MonitorABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
(
# black has issues with very long names
# fmt: off
enpheeph.injections.mixins.
pytorchmonitorpostprocessormixin.PyTorchMonitorPostProcessorMixin
# fmt: on
),
):
enabled_metrics: enpheeph.utils.enums.MonitorMetric
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
location: enpheeph.utils.dataclasses.MonitorLocation
move_to_first: bool
storage_plugin: (
enpheeph.injections.plugins.storage.abc.storagepluginabc.StoragePluginABC
)
def __init__(
self,
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
),
location: enpheeph.utils.dataclasses.MonitorLocation,
enabled_metrics: enpheeph.utils.enums.MonitorMetric,
storage_plugin: (
enpheeph.injections.plugins.storage.abc.storagepluginabc.StoragePluginABC
),
move_to_first: bool = True,
):
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.enabled_metrics = enabled_metrics
self.storage_plugin = storage_plugin
self.move_to_first = move_to_first
self.handle = None
@property
def module_name(self) -> str:
return self.location.module_name
# this is compatible with PyTorch hook arguments and return type
def output_monitor_hook(
self,
module: "torch.nn.Module",
input: typing.Union[typing.Tuple["torch.Tensor"], "torch.Tensor"],
output: "torch.Tensor",
) -> None:
self.indexing_plugin.select_active_dimensions(
[
enpheeph.utils.enums.DimensionType.Batch,
enpheeph.utils.enums.DimensionType.Tensor,
],
autoshift_to_boundaries=True,
fill_empty_index=True,
filler=slice(None, None),
)
# NOTE: no support for bit_index yet
postprocess = self.postprocess(
output[
self.indexing_plugin.join_indices(
dimension_indices=self.location.dimension_index,
)
]
)
self.storage_plugin.add_payload(location=self.location, payload=postprocess)
def setup(self, module: "torch.nn.Module") -> "torch.nn.Module":
self.handle = module.register_forward_hook(self.output_monitor_hook)
if self.move_to_first:
# we push the current hook to the beginning of the queue,
# as this is
# for a monitor and its deployment must be before
# the fault injection
# we use move_to_end with last=False to move it to the beginning
# of the OrderedDict
# mypy has issues with Optional being set before, as it does not check them
# sometimes the following 2 lines fail, use type: ignore[union-attr]
# for both
self.handle.hooks_dict_ref().move_to_end(
self.handle.id,
last=False,
)
return module
| 5,405
| 37.070423
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/weightpytorchfault.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import copy
import typing
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.injections.mixins.pytorchmaskmixin
import enpheeph.injections.mixins.pytorchtensorobjectvalidatormixin
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.dataclasses
# we move this import down
if typing.TYPE_CHECKING:
import torch
# no need to use handles here as the change is done when the injection is setup
class WeightPyTorchFault(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.pytorchinjectionabc.PyTorchInjectionABC,
enpheeph.injections.mixins.pytorchmaskmixin.PyTorchMaskMixin,
(
# fmt: off
enpheeph.injections.mixins.
pytorchtensorobjectvalidatormixin.PyTorchTensorObjectValidatorMixin
# fmt: on
),
):
backup: typing.Optional["torch.Tensor"]
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
def __init__(
self,
indexing_plugin: enpheeph.injections.plugins.indexing.abc.indexingpluginabc,
location: enpheeph.utils.dataclasses.FaultLocation,
low_level_torch_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
),
) -> None:
super().__init__()
self.indexing_plugin = indexing_plugin
self.location = location
self.low_level_plugin = low_level_torch_plugin
self.backup = None
self.handle = None
self.mask = None
@property
def module_name(self) -> str:
return self.location.module_name
def inject_weight(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
if self.backup is not None:
raise ValueError(
"This method must be called only when setting up the injection"
)
weight = getattr(
module,
# sometimes type: ignore[arg-type] might be required for the following line
# mypy gives error as parameter_name can be None, but it cannot be since
# the dataclass checks for the validity
# so we simply cast it here
typing.cast(str, self.location.parameter_name),
)
self.backup = copy.deepcopy(weight)
self.generate_mask(
weight,
tensor_only=True,
batches_exist=False,
)
masked_weight = self.inject_mask(
weight,
tensor_only=True,
batches_exist=False,
)
setattr(
module,
# sometimes type: ignore[arg-type] might be required for the following line
# mypy gives error as parameter_name can be None, but it cannot be since
# the dataclass checks for the validity
# so we simply cast it here
typing.cast(str, self.location.parameter_name),
masked_weight,
)
return module
def restore_weight(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
if self.backup is None:
raise ValueError(
"This method must be called only when tearing down the injection"
)
setattr( # type: ignore[unreachable]
module,
typing.cast(str, self.location.parameter_name),
copy.deepcopy(self.backup),
)
self.backup = None
return module
def setup(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
module = self.inject_weight(module)
return module
# we need to override the teardown as it is not common to the normal hook
# teardowns
def teardown(
self,
module: "torch.nn.Module",
) -> "torch.nn.Module":
module = self.restore_weight(module)
return module
| 5,994
| 32.49162
| 87
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'indexing',
'mask',
'sparse',
'storage',
},
submod_attrs={
'indexing': [
'IndexingPlugin',
'IndexingPluginABC',
'abc',
'indexingplugin',
'indexingpluginabc',
],
'mask': [
'AutoPyTorchMaskPlugin',
'CuPyPyTorchMaskPlugin',
'LowLevelTorchMaskPluginABC',
'NumPyPyTorchMaskPlugin',
'abc',
'autopytorchmaskplugin',
'cupypytorchmaskplugin',
'lowleveltorchmaskpluginabc',
'numpypytorchmaskplugin',
],
'sparse': [
'PyTorchSparseInterfacePluginABC',
'abc',
'pytorchsparseinterfacepluginabc',
],
'storage': [
'CSVStoragePluginABC',
'CustomBase',
'CustomBaseClass',
'ExperimentRun',
'ExperimentRunBaseMixin',
'ExperimentRunProtocol',
'Fault',
'FaultBaseMixin',
'FaultProtocol',
'Injection',
'InjectionProtocol',
'Monitor',
'MonitorBaseMixin',
'MonitorProtocol',
'PandasCSVStoragePlugin',
'PolymorphicMixin',
'SQLStoragePluginABC',
'SQLiteStoragePlugin',
'Session',
'SessionBaseMixin',
'SessionProtocol',
'StoragePluginABC',
'abc',
'csv',
'csvdataclasses',
'csvstorageplugin',
'csvstoragepluginabc',
'fix_pysqlite',
'pysqlite_begin_emission_fix_on_connect',
'set_sqlite_pragma',
'sql',
'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses',
'sqlitestorageplugin',
'sqlstoragepluginabc',
'sqlutils',
'storagepluginabc',
'storagetypings',
'utils',
],
},
)
def __dir__():
return __all__
__all__ = ['AutoPyTorchMaskPlugin', 'CSVStoragePluginABC',
'CuPyPyTorchMaskPlugin', 'CustomBase', 'CustomBaseClass',
'ExperimentRun', 'ExperimentRunBaseMixin', 'ExperimentRunProtocol',
'Fault', 'FaultBaseMixin', 'FaultProtocol', 'IndexingPlugin',
'IndexingPluginABC', 'Injection', 'InjectionProtocol',
'LowLevelTorchMaskPluginABC', 'Monitor', 'MonitorBaseMixin',
'MonitorProtocol', 'NumPyPyTorchMaskPlugin',
'PandasCSVStoragePlugin', 'PolymorphicMixin',
'PyTorchSparseInterfacePluginABC', 'SQLStoragePluginABC',
'SQLiteStoragePlugin', 'Session', 'SessionBaseMixin',
'SessionProtocol', 'StoragePluginABC', 'abc',
'autopytorchmaskplugin', 'csv', 'csvdataclasses',
'csvstorageplugin', 'csvstoragepluginabc', 'cupypytorchmaskplugin',
'fix_pysqlite', 'indexing', 'indexingplugin', 'indexingpluginabc',
'lowleveltorchmaskpluginabc', 'mask', 'numpypytorchmaskplugin',
'pysqlite_begin_emission_fix_on_connect',
'pytorchsparseinterfacepluginabc', 'set_sqlite_pragma', 'sparse',
'sql', 'sqlalchemy_begin_emission_pysqlite', 'sqldataclasses',
'sqlitestorageplugin', 'sqlstoragepluginabc', 'sqlutils', 'storage',
'storagepluginabc', 'storagetypings', 'utils']
# </AUTOGEN_INIT>
| 6,027
| 34.251462
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/sparse/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
},
submod_attrs={
'abc': [
'PyTorchSparseInterfacePluginABC',
'pytorchsparseinterfacepluginabc',
],
},
)
def __dir__():
return __all__
__all__ = ['PyTorchSparseInterfacePluginABC', 'abc',
'pytorchsparseinterfacepluginabc']
# </AUTOGEN_INIT>
| 3,677
| 33.698113
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/indexing/indexingplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections.abc
import copy
import typing
import enpheeph.injections.plugins.indexing.abc.indexingpluginabc
import enpheeph.utils.constants
import enpheeph.utils.dataclasses
import enpheeph.utils.enums
import enpheeph.utils.typings
class IndexingPlugin(
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
):
# it is Optional so that we can use None
active_dimension_index: typing.Optional[
typing.List[enpheeph.utils.typings.ActiveDimensionIndexType]
]
dimension_dict: enpheeph.utils.typings.DimensionDictType
def __init__(
self, dimension_dict: enpheeph.utils.typings.DimensionDictType
) -> None:
self.dimension_dict = dimension_dict
self.reset_active_dimensions()
# to select a set of dimensions to be used as active when selecting tensor indices
# by default no dimension is considered active
def select_active_dimensions(
self,
dimensions: collections.abc.Container[enpheeph.utils.enums.DimensionType],
# if True, we will move all the indices so that the first index is 0
# and the last is -1
autoshift_to_boundaries: bool = False,
# if True we fill the empty indices with the filler
# if False we will skip them
fill_empty_index: bool = True,
# the filler to use, defaults to : for a single dimension,
# which is slice(None, None)
filler: typing.Any = slice(None, None),
) -> typing.List[enpheeph.utils.typings.ActiveDimensionIndexType]:
# we invert the dimension dict to easily look it up
# as we will be using the indices to look it up instead of the names
inverted_dimension_dict = {v: k for k, v in self.dimension_dict.items()}
# we get the highest index for both the positive and the negative indices
# in terms of absolute value
# we filter the Ellipsis to avoid mypy errors
# **NOTE**: improve the typing here
no_ellipsis_dimension_dict_values: typing.List[int] = typing.cast(
typing.List[int,],
[x for x in self.dimension_dict.values() if x != Ellipsis],
)
longest_positive_range: int = max(
(x for x in no_ellipsis_dimension_dict_values if x >= 0),
# we use -1 default so that range(-1 + 1) = []
default=-1,
)
longest_negative_range: int = min(
(x for x in no_ellipsis_dimension_dict_values if x < 0),
# we use the number right outside the range to get an empty list
default=0,
)
# this list contains all the possible indices including Ellipsis
total_indices: typing.List[enpheeph.utils.typings.DimensionIndexType] = list(
# we cover all the indices to the maximum,
# including the maximum itself,
# hence the + 1
range(longest_positive_range + 1),
)
# we need to split the list creation otherwise mypy complains of different types
total_indices += [Ellipsis]
total_indices += list(
# we create the list going from the most negative index to 0
# 0 is excluded
range(
longest_negative_range,
0,
),
)
# we save the filling and the valid indices in the following list
dimension_index: typing.List[
enpheeph.utils.typings.ActiveDimensionIndexType,
] = []
for index in total_indices:
# the index is saved if it is present in the dimensions to be selected
# here we still don't consider the autoshift
if (
index in inverted_dimension_dict
and inverted_dimension_dict[index] in dimensions
):
dimension_index.append(inverted_dimension_dict[index])
# if the index is not included, we then check if we need to fill it
# due to fill_empty_index
elif fill_empty_index:
dimension_index.append(filler)
if autoshift_to_boundaries:
# we remove all the elements at the beginning/end of the list
# that are fillers
i = 0
# infinite loop, but there is a break
# **NOTE**: probably it can be optimized further
while 1:
# we start from 0, and for each filler we match we remove it
if dimension_index[i] == filler:
del dimension_index[i]
# if the element is not a filler than the start is done and we check the
# end using -1
elif i == 0:
i = -1
# if both the element is not a filler and the index is at the end, it
# means we are done
else:
break
# we copy the dimensions and we return them
self.active_dimension_index = copy.deepcopy(dimension_index)
return copy.deepcopy(self.active_dimension_index)
# to reset the active dimensions to the empty dimension dict
def reset_active_dimensions(self) -> None:
self.active_dimension_index = None
# to join indices following the order provided by the active_dimension dict
def join_indices(
self,
dimension_indices: enpheeph.utils.typings.DimensionLocationIndexType,
) -> enpheeph.utils.typings.AnyIndexType:
if self.active_dimension_index is None:
raise ValueError(
"First select the active dimensions with select_active_dimensions"
)
index: typing.List[enpheeph.utils.typings.Index1DType] = []
for i in self.active_dimension_index:
# if we have an enum as index we check it from the given dimensions
if isinstance(i, enpheeph.utils.enums.DimensionType):
# to check if we have a sequence of sequence we want each element
# to be a sequence and have no elements which are integers, as
# the other allowed values represent sequences
sequence_of_sequence = isinstance(
dimension_indices[i], collections.abc.Sequence
) and not any(
isinstance(j, int)
# we use typing.cast to avoid mypy complaining
for j in typing.cast(
typing.Sequence[typing.Any],
dimension_indices[i],
)
)
# if it is a sequence of sequences we extend the index with all the
# sub-sequences, as it will cover multiple dimensions
if sequence_of_sequence:
index.extend(
typing.cast(
typing.Tuple[enpheeph.utils.typings.Index1DType, ...],
dimension_indices[i],
),
)
# otherwise it covers only 1 dimension so we append the element directly
else:
index.append(
typing.cast(
enpheeph.utils.typings.Index1DType,
dimension_indices[i],
),
)
# if the element is not an enum it will be a filler,
# so we append it directly
else:
index.append(i)
return copy.deepcopy(tuple(index))
# to filter a size/shape array depending on the active dimension index
# by selecting only the dimensions with the enum
def filter_dimensions(
self,
# a normal size/shape array
dimensions: typing.Sequence[int],
) -> typing.Tuple[int, ...]:
if self.active_dimension_index is None:
raise ValueError(
"First select the active dimensions with select_active_dimensions"
)
enum_types = [
e
for e in self.active_dimension_index
if isinstance(e, enpheeph.utils.enums.DimensionType)
]
active_dimension_index: typing.List[
enpheeph.utils.typings.ActiveDimensionIndexType
] = copy.deepcopy(self.active_dimension_index)
for e in enum_types:
if self.dimension_dict[e] == Ellipsis:
while len(dimensions) > len(active_dimension_index):
active_dimension_index.insert(active_dimension_index.index(e), e)
# this is executed if the loop exits normally
else:
if len(dimensions) != len(active_dimension_index):
raise ValueError(
"dimensions must be the same length of active_dimension_index "
"if no Ellipsis are used"
)
return_dimensions = []
for d, ind in zip(dimensions, active_dimension_index):
if isinstance(ind, enpheeph.utils.enums.DimensionType):
return_dimensions.append(d)
return tuple(return_dimensions)
| 10,662
| 42.880658
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/indexing/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'indexingplugin',
},
submod_attrs={
'abc': [
'IndexingPluginABC',
'indexingpluginabc',
],
'indexingplugin': [
'IndexingPlugin',
],
},
)
def __dir__():
return __all__
__all__ = ['IndexingPlugin', 'IndexingPluginABC', 'abc', 'indexingplugin',
'indexingpluginabc']
# </AUTOGEN_INIT>
| 2,994
| 30.861702
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/mask/numpypytorchmaskplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.functions
import enpheeph.utils.imports
if typing.TYPE_CHECKING or (
enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.NUMPY_NAME]
and enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]
):
import numpy
import torch
class NumPyPyTorchMaskPlugin(
# we disable black to avoid too long line issue in flake8
# fmt: off
(
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
),
# fmt: on
):
def to_torch(self, array: "numpy.ndarray") -> "torch.Tensor":
return torch.from_numpy(array)
def from_torch(self, tensor: "torch.Tensor") -> "numpy.ndarray":
return tensor.numpy()
def to_bitwise_type(self, array: "numpy.ndarray") -> "numpy.ndarray":
return array.view(numpy.dtype(f"u{array.dtype.itemsize}"))
def to_target_type(
self, array: "numpy.ndarray", target: "numpy.ndarray"
) -> "numpy.ndarray":
return array.view(target.dtype)
def make_mask_array_from_index(
self,
int_mask: int,
mask_index: enpheeph.utils.typings.AnyIndexType,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
) -> "numpy.ndarray":
# we convert the placeholder
placeholder = self.from_torch(torch_placeholder)
# we convert the integer value representing the fill value into
# an element with unsigned type and correct size
fill_value = numpy.array(
int_fill_value,
dtype=numpy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
# we broadcast it onto the correct shape
# NOTE: broadcast_to creates a view, so the view is not writeable
# we have to make a copy of it to be able to write the mask in it
mask = numpy.broadcast_to(fill_value, shape).copy()
# we set the indices to the mask value
mask[mask_index] = int_mask
# we convert the mask to the right dtype
mask = mask.view(dtype=placeholder.dtype)
# we return the mask
return mask
def make_mask_array_from_mask(
self,
int_mask: int,
mask: enpheeph.utils.typings.AnyMaskType,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
) -> "numpy.ndarray":
# we convert the placeholder
placeholder = self.from_torch(torch_placeholder)
# we convert the integer value representing the fill value into
# an element with unsigned type and correct size
fill_value = numpy.array(
int_fill_value,
dtype=numpy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
# we broadcast it onto the correct shape
# NOTE: broadcast_to creates a view, so the view is not writeable
# we have to make a copy of it to be able to write the mask in it
fill_value_array = numpy.broadcast_to(fill_value, shape).copy()
# we create an array with the same shape as the input for the int_mask
# as then we will choose the correct element using numpy.where
# since our mask is a boolean array
int_mask_array: "numpy.ndarray" = (
numpy.ones(
shape,
dtype=numpy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
* int_mask
)
# we set the indices to the mask value
# mask must become an array
final_mask = numpy.where(numpy.asarray(mask), int_mask_array, fill_value_array)
# we convert the mask to the right dtype
final_mask = final_mask.view(dtype=placeholder.dtype)
# we return the mask
return final_mask
def make_mask_array(
self,
int_mask: int,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
mask: typing.Optional[enpheeph.utils.typings.AnyMaskType] = None,
mask_index: typing.Optional[enpheeph.utils.typings.AnyIndexType] = None,
) -> "numpy.ndarray":
if mask is None and mask_index is None:
raise ValueError("only one between mask and mask_index can be None")
elif mask is not None and mask_index is not None:
raise ValueError(
"at most one between mask and mask_index can be different from None"
)
elif mask is None:
return self.make_mask_array_from_index(
int_mask=int_mask,
mask_index=mask_index,
int_fill_value=int_fill_value,
shape=shape,
torch_placeholder=torch_placeholder,
)
elif mask_index is None:
return self.make_mask_array_from_mask(
int_mask=int_mask,
mask=mask,
int_fill_value=int_fill_value,
shape=shape,
torch_placeholder=torch_placeholder,
)
| 6,674
| 38.97006
| 87
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/mask/cupypytorchmaskplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.functions
import enpheeph.utils.imports
if typing.TYPE_CHECKING or (
enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.CUPY_NAME]
and enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]
):
import cupy
import torch
import torch.utils.dlpack
class CuPyPyTorchMaskPlugin(
# we disable black to avoid too long line issue in flake8
# fmt: off
(
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
),
# fmt: on
):
def to_torch(self, array: "cupy.ndarray") -> "torch.Tensor":
return torch.utils.dlpack.from_dlpack(array.toDlpack())
def from_torch(self, tensor: "torch.Tensor") -> "cupy.ndarray":
return cupy.fromDlpack(torch.utils.dlpack.to_dlpack(tensor))
def to_bitwise_type(self, array: "cupy.ndarray") -> "cupy.ndarray":
return array.view(cupy.dtype(f"u{array.dtype.itemsize}"))
def to_target_type(
self, array: "cupy.ndarray", target: "cupy.ndarray"
) -> "cupy.ndarray":
return array.view(target.dtype)
def make_mask_array_from_index(
self,
int_mask: int,
mask_index: enpheeph.utils.typings.AnyIndexType,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
) -> "cupy.ndarray":
# we convert the placeholder
placeholder = self.from_torch(torch_placeholder)
# we convert the integer value representing the fill value into
# an element with unsigned type and correct size, as well as correct
# device for cupy
with placeholder.device:
fill_value = cupy.array(
int_fill_value,
dtype=cupy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
# we broadcast it onto the correct shape
# we need to copy it to avoid issues with broadcasting
mask = cupy.broadcast_to(fill_value, shape).copy()
# we set the indices to the mask value
mask[mask_index] = int_mask
# we convert the mask to the right dtype
mask = mask.view(dtype=placeholder.dtype)
# we return the mask
return mask
def make_mask_array_from_mask(
self,
int_mask: int,
mask: enpheeph.utils.typings.AnyMaskType,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
) -> "cupy.ndarray":
# we convert the placeholder
placeholder = self.from_torch(torch_placeholder)
# we convert the integer value representing the fill value into
# an element with unsigned type and correct size, as well as correct
# device for cupy
with placeholder.device:
fill_value = cupy.array(
int_fill_value,
dtype=cupy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
# we broadcast it onto the correct shape
# we need to copy it to avoid issues with broadcasting
fill_value_array = cupy.broadcast_to(fill_value, shape).copy()
# we create an array with the same shape as the input for the int_mask
# as then we will choose the correct element using cupy.where
# since our mask is a boolean array
int_mask_array: "cupy.ndarray" = (
cupy.ones(
shape,
dtype=cupy.dtype(f"u{str(placeholder.dtype.itemsize)}"),
)
* int_mask
)
# we set the indices to the mask value
# mask must become an array
final_mask = cupy.where(
cupy.asarray(mask), int_mask_array, fill_value_array
)
# we convert the mask to the right dtype
final_mask = final_mask.view(dtype=placeholder.dtype)
# we return the mask
return final_mask
def make_mask_array(
self,
int_mask: int,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
mask: typing.Optional[enpheeph.utils.typings.AnyMaskType] = None,
mask_index: typing.Optional[enpheeph.utils.typings.AnyIndexType] = None,
) -> "cupy.ndarray":
if mask is None and mask_index is None:
raise ValueError("only one between mask and mask_index can be None")
elif mask is not None and mask_index is not None:
raise ValueError(
"at most one between mask and mask_index can be different from None"
)
elif mask is None:
return self.make_mask_array_from_index(
int_mask=int_mask,
mask_index=mask_index,
int_fill_value=int_fill_value,
shape=shape,
torch_placeholder=torch_placeholder,
)
elif mask_index is None:
return self.make_mask_array_from_mask(
int_mask=int_mask,
mask=mask,
int_fill_value=int_fill_value,
shape=shape,
torch_placeholder=torch_placeholder,
)
| 6,908
| 39.168605
| 85
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/mask/autopytorchmaskplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.injections.plugins.mask
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.utils.functions
import enpheeph.utils.imports
import enpheeph.utils.typings
if typing.TYPE_CHECKING:
import torch
import enpheeph.injections.plugins.mask.numpypytorchmaskplugin
import enpheeph.injections.plugins.mask.cupypytorchmaskplugin
else:
if enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]:
import torch
if enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.CUPY_NAME]:
import enpheeph.injections.plugins.mask.cupypytorchmaskplugin
if enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.NUMPY_NAME]:
import enpheeph.injections.plugins.mask.numpypytorchmaskplugin
class AutoPyTorchMaskPlugin(
# we disable black to avoid too long line issue in flake8
# fmt: off
(
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
),
# fmt: on
):
CPU_TORCH_DEVICE = "cpu"
GPU_TORCH_DEVICE = "cuda"
FROM_TORCH = {
CPU_TORCH_DEVICE: enpheeph.injections.plugins.mask.NumPyPyTorchMaskPlugin()
if enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.NUMPY_NAME]
else None,
GPU_TORCH_DEVICE: enpheeph.injections.plugins.mask.CuPyPyTorchMaskPlugin()
if enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.CUPY_NAME]
else None,
}
TO_TORCH = {
enpheeph.utils.imports.CUPY_NAME: FROM_TORCH[GPU_TORCH_DEVICE],
enpheeph.utils.imports.NUMPY_NAME: FROM_TORCH[CPU_TORCH_DEVICE],
}
def _get_from_torch_plugin_instance(
self, tensor: "torch.Tensor"
) -> (
enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
):
plugin_instance = self.FROM_TORCH[tensor.device.type]
if plugin_instance is None:
raise ValueError(
"Check the requirements as the current plugin is " "not available"
)
return plugin_instance
def _get_to_torch_plugin_instance(
self,
array: enpheeph.utils.typings.ArrayType,
) -> (
enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
):
plugin_instance = self.TO_TORCH[
typing.cast(
str,
enpheeph.utils.functions.get_object_library(array),
)
]
if plugin_instance is None:
raise ValueError(
"Check the requirements as the current plugin is " "not available"
)
return plugin_instance
def to_torch(self, array: enpheeph.utils.typings.ArrayType) -> "torch.Tensor":
plugin_instance = self._get_to_torch_plugin_instance(array)
return typing.cast("torch.Tensor", plugin_instance.to_torch(array))
def from_torch(self, tensor: "torch.Tensor") -> enpheeph.utils.typings.ArrayType:
plugin_instance = self._get_from_torch_plugin_instance(tensor)
return plugin_instance.from_torch(tensor)
def to_bitwise_type(
self, array: enpheeph.utils.typings.ArrayType
) -> enpheeph.utils.typings.ArrayType:
plugin_instance = self._get_to_torch_plugin_instance(array)
return plugin_instance.to_bitwise_type(array)
def to_target_type(
self,
array: enpheeph.utils.typings.ArrayType,
target: enpheeph.utils.typings.ArrayType,
) -> enpheeph.utils.typings.ArrayType:
plugin_instance = self._get_to_torch_plugin_instance(array)
return plugin_instance.to_target_type(array, target)
def make_mask_array(
self,
int_mask: int,
int_fill_value: int,
shape: typing.Sequence[int],
torch_placeholder: "torch.Tensor",
mask: typing.Optional[enpheeph.utils.typings.AnyMaskType] = None,
mask_index: typing.Optional[enpheeph.utils.typings.AnyIndexType] = None,
) -> enpheeph.utils.typings.ArrayType:
return self._get_from_torch_plugin_instance(torch_placeholder).make_mask_array(
int_mask=int_mask,
mask_index=mask_index,
mask=mask,
int_fill_value=int_fill_value,
shape=shape,
torch_placeholder=torch_placeholder,
)
| 5,947
| 38.653333
| 98
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/mask/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'autopytorchmaskplugin',
'cupypytorchmaskplugin',
'numpypytorchmaskplugin',
},
submod_attrs={
'abc': [
'LowLevelTorchMaskPluginABC',
'lowleveltorchmaskpluginabc',
],
'autopytorchmaskplugin': [
'AutoPyTorchMaskPlugin',
],
'cupypytorchmaskplugin': [
'CuPyPyTorchMaskPlugin',
],
'numpypytorchmaskplugin': [
'NumPyPyTorchMaskPlugin',
],
},
)
def __dir__():
return __all__
__all__ = ['AutoPyTorchMaskPlugin', 'CuPyPyTorchMaskPlugin',
'LowLevelTorchMaskPluginABC', 'NumPyPyTorchMaskPlugin', 'abc',
'autopytorchmaskplugin', 'cupypytorchmaskplugin',
'lowleveltorchmaskpluginabc', 'numpypytorchmaskplugin']
# </AUTOGEN_INIT>
| 3,424
| 31.932692
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/storage/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'csv',
'sql',
'utils',
},
submod_attrs={
'abc': [
'StoragePluginABC',
'storagepluginabc',
],
'csv': [
'CSVStoragePluginABC',
'ExperimentRun',
'Fault',
'Injection',
'Monitor',
'PandasCSVStoragePlugin',
'abc',
'csvdataclasses',
'csvstorageplugin',
'csvstoragepluginabc',
'utils',
],
'sql': [
'CustomBase',
'CustomBaseClass',
'ExperimentRun',
'ExperimentRunBaseMixin',
'Fault',
'FaultBaseMixin',
'Injection',
'Monitor',
'MonitorBaseMixin',
'PolymorphicMixin',
'SQLStoragePluginABC',
'SQLiteStoragePlugin',
'Session',
'SessionBaseMixin',
'abc',
'fix_pysqlite',
'pysqlite_begin_emission_fix_on_connect',
'set_sqlite_pragma',
'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses',
'sqlitestorageplugin',
'sqlstoragepluginabc',
'sqlutils',
'utils',
],
'utils': [
'ExperimentRunProtocol',
'FaultProtocol',
'InjectionProtocol',
'MonitorProtocol',
'SessionProtocol',
'storagetypings',
],
},
)
def __dir__():
return __all__
__all__ = ['CSVStoragePluginABC', 'CustomBase', 'CustomBaseClass',
'ExperimentRun', 'ExperimentRunBaseMixin', 'ExperimentRunProtocol',
'Fault', 'FaultBaseMixin', 'FaultProtocol', 'Injection',
'InjectionProtocol', 'Monitor', 'MonitorBaseMixin',
'MonitorProtocol', 'PandasCSVStoragePlugin', 'PolymorphicMixin',
'SQLStoragePluginABC', 'SQLiteStoragePlugin', 'Session',
'SessionBaseMixin', 'SessionProtocol', 'StoragePluginABC', 'abc',
'csv', 'csvdataclasses', 'csvstorageplugin', 'csvstoragepluginabc',
'fix_pysqlite', 'pysqlite_begin_emission_fix_on_connect',
'set_sqlite_pragma', 'sql', 'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses', 'sqlitestorageplugin', 'sqlstoragepluginabc',
'sqlutils', 'storagepluginabc', 'storagetypings', 'utils']
# </AUTOGEN_INIT>
| 5,031
| 32.546667
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/storage/csv/csvstorageplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# import typing
import enpheeph.injections.plugins.storage.csv.abc.csvstoragepluginabc
# import enpheeph.utils.dataclasses
# import enpheeph.utils.typings
# import enpheeph.injections.plugins.storage.csv.utils.csvdataclasses as csvdataclasses
class PandasCSVStoragePlugin(
enpheeph.injections.plugins.storage.csv.abc.csvstoragepluginabc.CSVStoragePluginABC
):
pass
| 1,918
| 38.979167
| 87
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/storage/csv/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'csvstorageplugin',
'utils',
},
submod_attrs={
'abc': [
'CSVStoragePluginABC',
'csvstoragepluginabc',
],
'csvstorageplugin': [
'PandasCSVStoragePlugin',
],
'utils': [
'ExperimentRun',
'Fault',
'Injection',
'Monitor',
'csvdataclasses',
],
},
)
def __dir__():
return __all__
__all__ = ['CSVStoragePluginABC', 'ExperimentRun', 'Fault', 'Injection',
'Monitor', 'PandasCSVStoragePlugin', 'abc', 'csvdataclasses',
'csvstorageplugin', 'csvstoragepluginabc', 'utils']
# </AUTOGEN_INIT>
| 4,045
| 33
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/storage/sql/sqlitestorageplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import sqlalchemy
import sqlalchemy.dialects.sqlite
import sqlalchemy.engine.url
import sqlalchemy.ext.compiler
import sqlalchemy.sql.expression
import sqlalchemy.types
import enpheeph.injections.plugins.storage.sql.abc.sqlstoragepluginabc
import enpheeph.injections.plugins.storage.sql.utils.sqlutils
import enpheeph.injections.plugins.storage.abc.storagepluginabc
import enpheeph.utils.dataclasses
import enpheeph.utils.typings
import enpheeph.injections.plugins.storage.sql.utils.sqldataclasses as sqldataclasses
class SQLiteStoragePlugin(
# we disable black to avoid too long line issue in flake8
# fmt: off
(
enpheeph.injections.plugins.storage.sql.abc.
sqlstoragepluginabc.SQLStoragePluginABC
),
# fmt: on
):
DEFAULT_EXTRA_ENGINE_ARGS: typing.Dict[str, typing.Any] = {
"future": True,
}
def __init__(
self,
db_url: str,
# if True the SQLAlchemy engine prints all the queries in SQL
# it is useful for debugging purposes
extra_engine_args: typing.Dict[str, typing.Any] = DEFAULT_EXTRA_ENGINE_ARGS,
):
# we generate the current engine
# we set the current experiment id to None
# NOTE: we use experiment id so that we can reload the experiment for each
# new Session we create
self.experiment_id: typing.Optional[int] = None
self.session_id = None
self.db_url = db_url
self.extra_engine_args = extra_engine_args
self.engine = self.init_engine(self.db_url, self.extra_engine_args)
@classmethod
def init_engine(
cls,
db_url: str,
extra_engine_args: typing.Dict[str, typing.Any] = DEFAULT_EXTRA_ENGINE_ARGS,
) -> sqlalchemy.engine.Engine:
# we create the engine
engine = sqlalchemy.create_engine(db_url, **extra_engine_args)
# we implement the fix if we are using pysqlite
# to check, we get the dialect class from the url
dialect: typing.Type[
sqlalchemy.engine.Dialect
] = sqlalchemy.engine.url.make_url(db_url).get_dialect()
# if pysqlite is in the dialect class name, we fix the engine for pysqlite
if "pysqlite" in dialect.__qualname__:
sqldataclasses.fix_pysqlite(engine)
# we create all the tables in the engine
sqldataclasses.CustomBase.metadata.create_all(engine)
return engine
| 3,973
| 36.847619
| 85
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/plugins/storage/sql/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'abc',
'sqlitestorageplugin',
'utils',
},
submod_attrs={
'abc': [
'SQLStoragePluginABC',
'sqlstoragepluginabc',
],
'sqlitestorageplugin': [
'SQLiteStoragePlugin',
],
'utils': [
'CustomBase',
'CustomBaseClass',
'ExperimentRun',
'ExperimentRunBaseMixin',
'Fault',
'FaultBaseMixin',
'Injection',
'Monitor',
'MonitorBaseMixin',
'PolymorphicMixin',
'Session',
'SessionBaseMixin',
'fix_pysqlite',
'pysqlite_begin_emission_fix_on_connect',
'set_sqlite_pragma',
'sqlalchemy_begin_emission_pysqlite',
'sqldataclasses',
'sqlutils',
],
},
)
def __dir__():
return __all__
__all__ = ['CustomBase', 'CustomBaseClass', 'ExperimentRun',
'ExperimentRunBaseMixin', 'Fault', 'FaultBaseMixin', 'Injection',
'Monitor', 'MonitorBaseMixin', 'PolymorphicMixin',
'SQLStoragePluginABC', 'SQLiteStoragePlugin', 'Session',
'SessionBaseMixin', 'abc', 'fix_pysqlite',
'pysqlite_begin_emission_fix_on_connect', 'set_sqlite_pragma',
'sqlalchemy_begin_emission_pysqlite', 'sqldataclasses',
'sqlitestorageplugin', 'sqlstoragepluginabc', 'sqlutils', 'utils']
# </AUTOGEN_INIT>
| 4,813
| 34.138686
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/pytorchtensorobjectvalidatormixin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import typing
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.utils.dataclasses
import enpheeph.utils.functions
import enpheeph.utils.imports
import enpheeph.utils.typings
if (
typing.TYPE_CHECKING
or enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]
):
import torch
class PyTorchTensorObjectValidatorMixin(abc.ABC):
@staticmethod
def convert_tensor_to_proper_class(
source: "torch.Tensor", target: "torch.Tensor"
) -> "torch.Tensor":
# to avoid issues if we are using sub-classes like torch.nn.Parameter,
# we call tensor.__class__ to create a new object with the proper content
# however this cannot be done for torch.Tensor itself as it would requiring
# copying the tensor parameter
if target.__class__ == torch.Tensor:
return source
elif isinstance(source, torch.Tensor):
return target.__class__(source)
else:
raise TypeError("Wrong type for source")
| 2,582
| 38.738462
| 84
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/pytorchquantizationmixin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 1,539
| 45.666667
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/pytorchmonitorpostprocessormixin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import typing
import enpheeph.utils.classes
import enpheeph.utils.dataclasses
import enpheeph.utils.enums
import enpheeph.utils.functions
import enpheeph.utils.imports
if (
typing.TYPE_CHECKING
or enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]
):
import torch
def torch_geometric_mean(tensor: "torch.Tensor", dim: int = -1) -> "torch.Tensor":
log_x: "torch.Tensor" = torch.log(tensor)
result: "torch.Tensor" = torch.exp(torch.mean(log_x, dim=dim))
return result
class PyTorchMonitorPostProcessorMixin(abc.ABC):
enabled_metrics: enpheeph.utils.enums.MonitorMetric
monitor_location: enpheeph.utils.dataclasses.MonitorLocation
def postprocess(self, tensor: "torch.Tensor") -> typing.Dict[str, typing.Any]:
dict_ = {}
skip_if_error = enpheeph.utils.classes.SkipIfErrorContextManager(
NotImplementedError
)
metric_class = self.enabled_metrics.__class__
if metric_class.StandardDeviation in self.enabled_metrics:
with skip_if_error:
dict_[metric_class.StandardDeviation.name] = torch.std(
tensor, unbiased=True
).item()
if metric_class.Maximum in self.enabled_metrics:
with skip_if_error:
dict_[metric_class.Maximum.name] = torch.max(tensor).item()
if metric_class.Minimum in self.enabled_metrics:
with skip_if_error:
dict_[metric_class.Minimum.name] = torch.min(tensor).item()
if metric_class.ArithmeticMean in self.enabled_metrics:
with skip_if_error:
dict_[metric_class.ArithmeticMean.name] = torch.mean(tensor).item()
if metric_class.GeometricMean in self.enabled_metrics:
with skip_if_error:
dict_[metric_class.GeometricMean.name] = torch_geometric_mean(
tensor
).item()
return dict_
| 3,515
| 38.505618
| 84
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/pytorchmaskmixin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import typing
import enpheeph.injections.plugins.indexing.abc.indexingpluginabc
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.utils.dataclasses
import enpheeph.utils.functions
import enpheeph.utils.imports
import enpheeph.utils.typings
if (
typing.TYPE_CHECKING
or enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]
):
import torch
class PyTorchMaskMixin(abc.ABC):
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
# the used variables in the functions, must be initialized properly
location: enpheeph.utils.dataclasses.FaultLocation
low_level_plugin: (
# black has issues with long names
# fmt: off
enpheeph.injections.plugins.mask.
lowleveltorchmaskpluginabc.LowLevelTorchMaskPluginABC
# fmt: on
)
mask: typing.Optional["torch.Tensor"]
# Callables
convert_tensor_to_proper_class: typing.Callable[
["torch.Tensor", "torch.Tensor"],
"torch.Tensor",
]
def set_tensor_only_indexing(
self,
# this flag is used to consider batches as an extra dimension
# if enabled we fill the emtpy index due to missing batch/other dimensions
# otherwise it is not filled, leading to Tensor dimension covering the whole
# array
batches_exist: bool = True,
) -> None:
self.indexing_plugin.select_active_dimensions(
[
enpheeph.utils.enums.DimensionType.Tensor,
],
autoshift_to_boundaries=False,
fill_empty_index=batches_exist,
filler=slice(None, None),
)
def set_batch_tensor_indexing(self) -> None:
self.indexing_plugin.select_active_dimensions(
[
enpheeph.utils.enums.DimensionType.Batch,
enpheeph.utils.enums.DimensionType.Tensor,
],
autoshift_to_boundaries=False,
fill_empty_index=True,
filler=slice(None, None),
)
# mask is both set in self and returned
def generate_mask(
self,
tensor: "torch.Tensor",
force_recompute: bool = False,
# if True we use set_tensor_only_indexing, if False we use
# set_batch_tensor_indexing
# if explicitly non-boolean, we skip it, to allow for custom configurations
tensor_only: typing.Optional[bool] = True,
# this flag is used to consider batches as an extra dimension when using
# tensor_only, it has no effect if tensor_only is false
batches_exist: bool = True,
) -> "torch.Tensor":
if self.mask is None or force_recompute:
# NOTE: the following process is used to process the index,
# based on bitwidth and type
# the index may start from a non-compatible form, which is then
# checked and verified against the PyTorch indexing capabilities
# we get the dtype to compute its length in bytes, the return
# intermediate value is the dimension of the dtype in bytes
bytewidth = tensor.element_size()
# we create the boolean mask in torch, depending on whether we
# use 0 or 1 to fill the non-selected values
bit_mask_info = (
enpheeph.utils.dataclasses.BitFaultMaskInfo.from_bit_fault_value(
self.location.bit_fault_value
)
)
bool_mask: "torch.Tensor" = torch.tensor(
[bit_mask_info.fill_value] * bytewidth * 8, dtype=torch.bool
)
# we set the selected bits to the value provided by the fault
# locator
bool_mask[self.location.bit_index] = bit_mask_info.mask_value
# we get the correct indices from the boolean mask
# we convert it to indices in standard Python to create the final
# integer representation
indices: typing.List[int] = torch.where(bool_mask)[0].tolist()
# we get the final integer representation for the mask
int_mask = sum(2**i for i in indices)
# placeholder for having device and dtype to be converted
tensor_placeholder: "torch.Tensor" = torch.zeros(
0,
device=tensor.device,
dtype=tensor.dtype,
requires_grad=False,
)
# we set up the indices depending on the flag
# if the flag is different, we leave the existing active dimensions
if tensor_only is True:
self.set_tensor_only_indexing(batches_exist=batches_exist)
elif tensor_only is False:
self.set_batch_tensor_indexing()
tensor_shape = self.indexing_plugin.filter_dimensions(
tensor.shape,
)
# we get the values for mask and mask_index
# if they are None we use None otherwise we get it from the dict
# with default as None
mask = (
self.location.dimension_mask.get(
enpheeph.utils.enums.DimensionType.Tensor, None
)
if self.location.dimension_mask is not None
else None
)
mask_index = (
self.location.dimension_index.get(
enpheeph.utils.enums.DimensionType.Tensor, None
)
if self.location.dimension_index is not None
else None
)
# we create the low-level mask
# using the filtered dimensions
# we only need the tensor_index, as we do not cover the time/batch
# dimensions
mask_array = self.low_level_plugin.make_mask_array(
int_mask=int_mask,
# we give only the tensor dimension as possible mask
mask=mask,
# we use only the tensor index as the mask will be the same even
# across different batches/time-steps
# so it can be expanded/repeated later
mask_index=mask_index,
int_fill_value=(2 ** (bytewidth * 8) - 1) * bit_mask_info.fill_value,
shape=tensor_shape,
torch_placeholder=tensor_placeholder,
)
# we convert the mask back to PyTorch
mask = self.low_level_plugin.to_torch(mask_array)
# the indices are reset if we have set them up ourselvels
if isinstance(tensor_only, bool):
self.indexing_plugin.reset_active_dimensions()
else:
mask = self.mask
self.mask = mask
return self.mask
# we return the injected tensor
def inject_mask(
self,
tensor: "torch.Tensor",
# if True we use set_tensor_only_indexing, if False we use
# set_batch_tensor_indexing
# if explicitly non-boolean, we skip it, to allow for custom configurations
tensor_only: typing.Optional[bool] = True,
# this flag is used to consider batches as an extra dimension when using
# tensor_only, it has no effect if tensor_only is false
batches_exist: bool = True,
) -> "torch.Tensor":
if self.mask is None:
raise RuntimeError("Please call generate_mask before injection")
bit_mask_info = (
enpheeph.utils.dataclasses.BitFaultMaskInfo.from_bit_fault_value(
self.location.bit_fault_value
)
)
# we set up the indices depending on the flag
if tensor_only is True:
self.set_tensor_only_indexing(batches_exist=batches_exist)
elif tensor_only is False:
self.set_batch_tensor_indexing()
selected_batches_tensor = tensor[
self.indexing_plugin.join_indices(
{
**self.location.dimension_index,
**{
enpheeph.utils.enums.DimensionType.Tensor: ...,
},
},
)
]
low_level_tensor = self.low_level_plugin.from_torch(
selected_batches_tensor,
)
# mypy generates an error since self.mask can be None
# however we call self.generate_mask that will set the mask or raise errors
# stopping the execution
low_level_mask = self.low_level_plugin.from_torch(
# we use expand as to expand the mask onto the selected batches
# dimension
# expand creates views, so we should not change the elements in place,
# but it is doable as we are working on the mask which will not be modified
# sometimes the following line fails with mypy, use type: ignore[arg-type]
self.mask.expand_as(selected_batches_tensor)
)
bitwise_tensor = self.low_level_plugin.to_bitwise_type(low_level_tensor)
bitwise_mask = self.low_level_plugin.to_bitwise_type(low_level_mask)
bitwise_injected_tensor = bit_mask_info.operation.value(
bitwise_tensor,
bitwise_mask,
)
low_level_injected_tensor = self.low_level_plugin.to_target_type(
bitwise_injected_tensor,
low_level_tensor,
)
injected_tensor = self.low_level_plugin.to_torch(low_level_injected_tensor)
final_injected_tensor = injected_tensor[
self.indexing_plugin.join_indices(
{
**self.location.dimension_index,
**{
enpheeph.utils.enums.DimensionType.Tensor: ...,
},
},
)
]
# the indices are reset if we have set them up ourselvels
if isinstance(tensor_only, bool):
self.indexing_plugin.reset_active_dimensions()
# conversion to proper class
return self.convert_tensor_to_proper_class(final_injected_tensor, tensor)
| 11,796
| 40.10453
| 87
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'pytorchmaskmixin',
'pytorchmonitorpostprocessormixin',
'pytorchquantizationmixin',
'pytorchsparseinterfacemixin',
'pytorchtensorobjectvalidatormixin',
},
submod_attrs={
'pytorchmaskmixin': [
'PyTorchMaskMixin',
],
'pytorchmonitorpostprocessormixin': [
'PyTorchMonitorPostProcessorMixin',
'torch_geometric_mean',
],
'pytorchsparseinterfacemixin': [
'PyTorchSparseInterfaceMixin',
],
'pytorchtensorobjectvalidatormixin': [
'PyTorchTensorObjectValidatorMixin',
],
},
)
def __dir__():
return __all__
__all__ = ['PyTorchMaskMixin', 'PyTorchMonitorPostProcessorMixin',
'PyTorchSparseInterfaceMixin', 'PyTorchTensorObjectValidatorMixin',
'pytorchmaskmixin', 'pytorchmonitorpostprocessormixin',
'pytorchquantizationmixin', 'pytorchsparseinterfacemixin',
'pytorchtensorobjectvalidatormixin', 'torch_geometric_mean']
# </AUTOGEN_INIT>
| 3,646
| 33.40566
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/injections/mixins/pytorchsparseinterfacemixin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import typing
import enpheeph.injections.plugins.indexing.abc.indexingpluginabc
import enpheeph.injections.plugins.mask.abc.lowleveltorchmaskpluginabc
import enpheeph.injections.abc.pytorchinjectionabc
import enpheeph.utils.dataclasses
import enpheeph.utils.functions
import enpheeph.utils.imports
import enpheeph.utils.typings
if typing.TYPE_CHECKING:
import torch
elif enpheeph.utils.imports.MODULE_AVAILABILITY[enpheeph.utils.imports.TORCH_NAME]:
import torch
class PyTorchSparseInterfaceMixin(abc.ABC):
# we need the index plugin to simplify the handling of the indices
indexing_plugin: (
enpheeph.injections.plugins.indexing.abc.indexingpluginabc.IndexingPluginABC
)
# the used variables in the functions, must be initialized properly
location: enpheeph.utils.dataclasses.BaseInjectionLocation
def _check_sparse_index_flag(self) -> bool:
# mypy has some issues in recognizing the enum names if taken from a name itself
# e.g. A.a.a
# we use separate values to avoid this issue
# however we still require typing from the enum,
# which limits the customizability of the interface, as before it could be any
# compatible enum but now it must be this specific one
# **NOTE**: a possible alternative is using .value at the end to extract the
# correct enum, which does nothing
# however value returns the integer value, so it is still not a clean trick
sparse_index_flag = (
self.location.parameter_type.Sparse | self.location.parameter_type.Index
)
return sparse_index_flag in self.location.parameter_type
def _check_sparse_value_flag(self) -> bool:
# mypy has some issues in recognizing the enum names if taken from a name itself
# e.g. A.a.a
# we use separate values to avoid this issue
# however we still require typing from the enum,
# which limits the customizability of the interface, as before it could be any
# compatible enum but now it must be this specific one
# **NOTE**: a possible alternative is using .value at the end to extract the
# correct enum, which does nothing
# however value returns the integer value, so it is still not a clean trick
sparse_value_flag = (
self.location.parameter_type.Sparse | self.location.parameter_type.Value
)
return sparse_value_flag in self.location.parameter_type
def get_sparse_injection_parameter(
self,
tensor: "torch.Tensor",
) -> "torch.Tensor":
sparse_target = tensor.to_sparse()
if self._check_sparse_index_flag():
target = sparse_target.indices()
elif self._check_sparse_value_flag():
target = sparse_target.values()
else:
raise ValueError("This operation is not supported with sparse tensors")
return target
def set_sparse_injection_parameter(
self,
target: "torch.Tensor",
new_value: "torch.Tensor",
) -> "torch.Tensor":
sparse_target = target.to_sparse()
if self._check_sparse_index_flag():
other_sparse_element = sparse_target.values()
new_target = torch.sparse_coo_tensor(
indices=new_value, values=other_sparse_element
)
elif self._check_sparse_value_flag():
other_sparse_element = sparse_target.indices()
new_target = torch.sparse_coo_tensor(
indices=other_sparse_element, values=new_value
)
else:
raise ValueError("This operation is not supported with sparse tensors")
# FIXME: how should we approach the sparse-to-dense conversion? maybe with a
# plugin? so that we can support different sparse representations without
# having to write code in the main code base
return new_target.to_dense()
| 5,496
| 41.945313
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items() for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
"{module_name}.{name}".format(module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
"{module_name}.{submodname}".format(
module_name=module_name, submodname=submodname
)
)
attr = getattr(module, name)
else:
raise AttributeError(
"No {module_name} attribute {name}".format(
module_name=module_name, name=name
)
)
globals()[name] = attr
return attr
if os.environ.get("EAGER_IMPORT", ""):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
"faultmodels",
"summaries",
},
submod_attrs={
"faultmodels": [
"FaultModelABC",
"abc",
"faultmodel",
"faultmodelabc",
],
"summaries": [
"ModelSummaryABC",
"ModelSummaryTorchinfo",
"abc",
"layersummaryabc",
"modelsummaryabc",
"modelsummarytorchinfo",
"plugins",
"sensitivityanalysis",
],
},
)
def __dir__():
return __all__
__all__ = [
"FaultModelABC",
"ModelSummaryABC",
"ModelSummaryTorchinfo",
"abc",
"faultmodel",
"faultmodelabc",
"faultmodels",
"layersummaryabc",
"modelsummaryabc",
"modelsummarytorchinfo",
"plugins",
"sensitivityanalysis",
"summaries",
]
| 3,640
| 29.090909
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/summaries/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items() for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
"{module_name}.{name}".format(module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
"{module_name}.{submodname}".format(
module_name=module_name, submodname=submodname
)
)
attr = getattr(module, name)
else:
raise AttributeError(
"No {module_name} attribute {name}".format(
module_name=module_name, name=name
)
)
globals()[name] = attr
return attr
if os.environ.get("EAGER_IMPORT", ""):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
"abc",
"modelsummarytorchinfo",
"plugins",
},
submod_attrs={
"abc": [
"ModelSummaryABC",
"layersummaryabc",
"modelsummaryabc",
],
"modelsummarytorchinfo": [
"ModelSummaryTorchinfo",
],
"plugins": [
"abc",
"sensitivityanalysis",
],
},
)
def __dir__():
return __all__
__all__ = [
"ModelSummaryABC",
"ModelSummaryTorchinfo",
"abc",
"layersummaryabc",
"modelsummaryabc",
"modelsummarytorchinfo",
"plugins",
"sensitivityanalysis",
]
| 3,440
| 29.451327
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/summaries/modelsummarytorchinfo.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import torchinfo
import enpheeph.helpers.summaries.abc.modelsummaryabc
class ModelSummaryTorchinfo(
enpheeph.helpers.summaries.abc.modelsummaryabc.ModelSummaryABC
):
def __init__(self, sensitivity_analysis_plugin=None):
self.sensitivity_analysis_plugin = sensitivity_analysis_plugin
def gather_summary(self, model, input_size):
self.summary = torchinfo.summary(
model=model, input_size=input_size, batch_dim=1, verbose=0
)
def compute_layer_set(self):
pass
| 2,062
| 38.673077
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/summaries/plugins/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items() for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
"{module_name}.{name}".format(module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
"{module_name}.{submodname}".format(
module_name=module_name, submodname=submodname
)
)
attr = getattr(module, name)
else:
raise AttributeError(
"No {module_name} attribute {name}".format(
module_name=module_name, name=name
)
)
globals()[name] = attr
return attr
if os.environ.get("EAGER_IMPORT", ""):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
"sensitivityanalysis",
},
submod_attrs={
"sensitivityanalysis": [
"abc",
],
},
)
def __dir__():
return __all__
__all__ = ["abc", "sensitivityanalysis"]
| 3,024
| 31.526882
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/summaries/plugins/sensitivityanalysis/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items() for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
"{module_name}.{name}".format(module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
"{module_name}.{submodname}".format(
module_name=module_name, submodname=submodname
)
)
attr = getattr(module, name)
else:
raise AttributeError(
"No {module_name} attribute {name}".format(
module_name=module_name, name=name
)
)
globals()[name] = attr
return attr
if os.environ.get("EAGER_IMPORT", ""):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
"abc",
},
submod_attrs={},
)
def __dir__():
return __all__
__all__ = ["abc"]
| 2,917
| 31.786517
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/faultmodels/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items() for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
"{module_name}.{name}".format(module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
"{module_name}.{submodname}".format(
module_name=module_name, submodname=submodname
)
)
attr = getattr(module, name)
else:
raise AttributeError(
"No {module_name} attribute {name}".format(
module_name=module_name, name=name
)
)
globals()[name] = attr
return attr
if os.environ.get("EAGER_IMPORT", ""):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
"abc",
"faultmodel",
},
submod_attrs={
"abc": [
"FaultModelABC",
"faultmodelabc",
],
},
)
def __dir__():
return __all__
__all__ = ["FaultModelABC", "abc", "faultmodel", "faultmodelabc"]
| 3,078
| 31.410526
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/helpers/faultmodels/faultmodel.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 1,539
| 45.666667
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/handlers/injectionhandler.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.handlers.plugins.libraryhandlerpluginabc
import enpheeph.injections.abc.injectionabc
import enpheeph.utils.enums
import enpheeph.utils.typings
class InjectionHandler(object):
active_injections: typing.List[enpheeph.injections.abc.injectionabc.InjectionABC]
injections: typing.List[enpheeph.injections.abc.injectionabc.InjectionABC]
library_handler_plugin: (
enpheeph.handlers.plugins.libraryhandlerpluginabc.LibraryHandlerPluginABC
)
status: enpheeph.utils.enums.HandlerStatus
def __init__(
self,
injections: typing.List[enpheeph.injections.abc.injectionabc.InjectionABC],
library_handler_plugin: (
enpheeph.handlers.plugins.libraryhandlerpluginabc.LibraryHandlerPluginABC
),
):
self.injections = list(injections)
self.library_handler_plugin = library_handler_plugin
self.active_injections = []
self.status = enpheeph.utils.enums.HandlerStatus.Idle
def setup(
self, model: enpheeph.utils.typings.ModelType
) -> enpheeph.utils.typings.ModelType:
self.lock_running_status()
model = self.library_handler_plugin.library_setup(model, self.active_injections)
return model
def teardown(
self, model: enpheeph.utils.typings.ModelType
) -> enpheeph.utils.typings.ModelType:
model = self.library_handler_plugin.library_teardown(
model, self.active_injections
)
self.unlock_running_status()
return model
def check_running_status(self) -> bool:
# mypy has errors with enums, might be fixed using py.typed
return self.status == self.status.Running # type: ignore[comparison-overlap]
def lock_running_status(self) -> bool:
if self.check_running_status():
raise RuntimeError(
"This function shouldn't have been called " "with a running execution"
)
# mypy has errors with enums, might be fixed using py.typed
self.status = self.status.Running # type: ignore[assignment]
# we return True if the operation is successful
return True
def unlock_running_status(self) -> bool:
if not self.check_running_status():
raise RuntimeError("Handler should have been running")
# mypy has errors with enums, might be fixed using py.typed
self.status = self.status.Idle # type: ignore[assignment]
# we return True if the operation is successful
return True
# if None for the arguments, we will activate all the faults
# it returns the active injections
def activate(
self,
injections: typing.Optional[
typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]
] = None,
) -> typing.List[enpheeph.injections.abc.injectionabc.InjectionABC]:
if self.check_running_status():
print("Cannot do anything while running, try after the execution")
return self.active_injections
if injections is None:
injections = self.injections
# we use a dict to filter the duplicates in injections + self.active_injections
# otherwise bad things might happen in the SQL as the same object will be
# processed multiple times
filtered_injections = {
inj: counter
for counter, inj in enumerate(list(injections) + self.active_injections)
}.keys()
self.active_injections = [
inj for inj in filtered_injections if inj in self.injections
]
return self.active_injections
# if None we will deactivate everything
# it returns the active injections
def deactivate(
self,
# here Sequence is fine as we are simply iterating over/checking presence
injections: typing.Optional[
typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]
] = None,
) -> typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]:
if self.check_running_status():
print("Cannot do anything while running, try after the execution")
return self.active_injections
if injections is None:
injections = self.injections
self.active_injections = [
inj
for inj in self.active_injections
if inj not in injections and inj in self.injections
]
return self.active_injections
# to add injections to the current list of injections
def add_injections(
self,
injections: typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC],
) -> typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]:
if self.check_running_status():
print("Cannot do anything while running, try after the execution")
return self.injections
# we use a dict to filter the duplicates in injections + self.active_injections
# otherwise bad things might happen in the SQL as the same object will be
# processed multiple times
filtered_injections = {
inj: counter
for counter, inj in enumerate(list(injections) + self.injections)
}.keys()
self.injections = list(filtered_injections)
# we call activate with the list of active injections to remove
# the ones not included
self.activate(self.active_injections)
return self.injections
# to remove injections from the current list
# if None we remove all of them
def remove_injections(
self,
injections: typing.Optional[
typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]
] = None,
) -> typing.Sequence[enpheeph.injections.abc.injectionabc.InjectionABC]:
if self.check_running_status():
print("Cannot do anything while running, try after the execution")
return self.injections
if injections is None:
injections = self.injections
self.injections = [inj for inj in self.injections if inj not in injections]
# we call activate with the list of active injections to remove
# the ones not included
self.activate(self.active_injections)
return self.injections
| 7,881
| 38.019802
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/handlers/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'injectionhandler',
'plugins',
},
submod_attrs={
'injectionhandler': [
'InjectionHandler',
],
'plugins': [
'LibraryHandlerPluginABC',
'PyTorchHandlerPlugin',
'libraryhandlerpluginabc',
'pytorchhandlerplugin',
],
},
)
def __dir__():
return __all__
__all__ = ['InjectionHandler', 'LibraryHandlerPluginABC',
'PyTorchHandlerPlugin', 'injectionhandler',
'libraryhandlerpluginabc', 'plugins', 'pytorchhandlerplugin']
# </AUTOGEN_INIT>
| 3,171
| 31.701031
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/handlers/plugins/pytorchhandlerplugin.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import enpheeph.handlers.plugins.libraryhandlerpluginabc
import enpheeph.injections.abc.injectionabc
import enpheeph.utils.typings
# we plac it after so flake8 does not complain about not-at-the-top imports
if typing.TYPE_CHECKING:
import torch
class PyTorchHandlerPlugin(
(enpheeph.handlers.plugins.libraryhandlerpluginabc.LibraryHandlerPluginABC),
):
def library_setup(
self,
model: enpheeph.utils.typings.ModelType,
active_injections: typing.List[
enpheeph.injections.abc.injectionabc.InjectionABC
],
) -> enpheeph.utils.typings.ModelType:
for inj in active_injections:
module = self.get_module(model, inj.location.module_name)
new_module = inj.setup(module)
self.set_module(model, inj.location.module_name, new_module)
return model
def library_teardown(
self,
model: enpheeph.utils.typings.ModelType,
active_injections: typing.List[
enpheeph.injections.abc.injectionabc.InjectionABC
],
) -> enpheeph.utils.typings.ModelType:
for inj in active_injections:
module = self.get_module(model, inj.location.module_name)
new_module = inj.teardown(module)
self.set_module(model, inj.location.module_name, new_module)
return model
def get_module(
self, model: "torch.nn.Module", full_module_name: str
) -> "torch.nn.Module":
dest_module = model
for submodule in full_module_name.split("."):
dest_module = getattr(dest_module, submodule)
return dest_module
def set_module(
self,
model: "torch.nn.Module",
full_module_name: str,
module: "torch.nn.Module",
) -> None:
dest_module = model
module_names_split = full_module_name.split(".")
module_names = module_names_split[:-1]
target_module_name = module_names_split[-1]
for submodule in module_names:
dest_module = getattr(dest_module, submodule)
setattr(dest_module, target_module_name, module)
| 3,666
| 37.6
| 80
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/handlers/plugins/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'libraryhandlerpluginabc',
'pytorchhandlerplugin',
},
submod_attrs={
'libraryhandlerpluginabc': [
'LibraryHandlerPluginABC',
],
'pytorchhandlerplugin': [
'PyTorchHandlerPlugin',
],
},
)
def __dir__():
return __all__
__all__ = ['LibraryHandlerPluginABC', 'PyTorchHandlerPlugin',
'libraryhandlerpluginabc', 'pytorchhandlerplugin']
# </AUTOGEN_INIT>
| 3,042
| 31.72043
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/classes.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections.abc
import types
import typing
IDGeneratorSubclass = typing.TypeVar("IDGeneratorSubclass", bound="IDGenerator")
# base class for generating sequential IDs for different instances
# there is the possibility of setting the start value, as well as the sharing of the
# different flags with a common base class
class IDGenerator(object):
# these are the defaults for all the options
# this is taken from the root if shared
_INSTANCE_ID_COUNTER: typing.Optional[int] = None
_INSTANCE_ID_COUNTER_RESET_VALUE: int = 0
# this flag is for each class
_INSTANCE_ID_COUNTER_USE_SHARED: bool = False
# we need a flag to know which one is the root
# if none of them have it, we resort to the base class, IDGenerator
_INSTANCE_ID_COUNTER_SHARED_ROOT_FLAG: bool = False
# we define the typing for each class instance, to avoid mypy errors
_unique_instance_id: int
@property
def unique_instance_id(self) -> int:
return self._unique_instance_id
# we override init_subclass, to get the arguments from the class definition
# we can set the reset value for the counter (starting value)
# we can also set on a per-class basis whether the class is to be considered a
# root and whether it should use a shared counter or use its own
@classmethod
def __init_subclass__(
cls: typing.Type[IDGeneratorSubclass],
reset_value: int = _INSTANCE_ID_COUNTER_RESET_VALUE,
use_shared: bool = _INSTANCE_ID_COUNTER_USE_SHARED,
shared_root_flag: bool = _INSTANCE_ID_COUNTER_SHARED_ROOT_FLAG,
**kwargs: typing.Any,
) -> None:
# we set the class defaults overriding the root defaults
cls._INSTANCE_ID_COUNTER_RESET_VALUE = reset_value
cls._INSTANCE_ID_COUNTER_USE_SHARED = use_shared
cls._INSTANCE_ID_COUNTER_SHARED_ROOT_FLAG = shared_root_flag
# this call with reset=True is **FUNDAMENTAL** for not sharing the counter
# otherwise the subclass would receive the setup done on the parent
# and this would cause the child to have a reference to the parent class
# attribute, breaking the independency
cls._setup_id_counter(reset=True)
# we ignore the problem with object.__init_subclass__
# this class is supposed to be sub-classed, so it will handle general kwargs
# for other parent classes
super().__init_subclass__(**kwargs) # type: ignore[call-arg]
# we have to use the shared flag if the flag is set
# we go through the mros (which are from the most specific class backward to object)
# to reach a class which has the root flag enabled
# if this does not happen, we go through the mros from object down until we find
# the deepest root which has an id counter
@classmethod
def _get_root_with_id(
cls: typing.Type[IDGeneratorSubclass],
) -> typing.Type[IDGeneratorSubclass]:
if cls._INSTANCE_ID_COUNTER_USE_SHARED:
for cls_ in cls.mro():
if hasattr(cls_, "_INSTANCE_ID_COUNTER") and getattr(
cls_, "_INSTANCE_ID_COUNTER_SHARED_ROOT_FLAG", False
):
return cls_
for cls_ in reversed(cls.mro()):
if hasattr(cls_, "_INSTANCE_ID_COUNTER"):
return cls_
return cls
# we setup the counter, which is reset to the original value if the counter is
# initially None or it is forced
# **IT IS FUNDAMENTAL** to run it with reset so that each class counter is set
# otherwise it will use the root one in a shared configuration
@classmethod
def _setup_id_counter(
cls: typing.Type[IDGeneratorSubclass], reset: bool = False
) -> None:
cls_ = cls._get_root_with_id()
if reset or cls_._INSTANCE_ID_COUNTER is None:
cls_._INSTANCE_ID_COUNTER = cls_._INSTANCE_ID_COUNTER_RESET_VALUE
# we update the counter in the correct class
@classmethod
def _update_id_counter(cls: typing.Type[IDGeneratorSubclass]) -> None:
cls_ = cls._get_root_with_id()
cls_._setup_id_counter(reset=False)
# we ignore this type error as we setup the id counter in the previous line
cls_._INSTANCE_ID_COUNTER += 1 # type: ignore[operator]
# to return the id counter
@classmethod
def _get_id_counter(cls: typing.Type[IDGeneratorSubclass]) -> typing.Optional[int]:
cls_ = cls._get_root_with_id()
return cls_._INSTANCE_ID_COUNTER
# to set the id in the current instance
# this is supposed to be called during __new__, to set the instance id after the
# reset
def _set_instance_id(self: IDGeneratorSubclass, reset: bool = False) -> None:
self._setup_id_counter(reset=reset)
# frozen instance trick
# no need for the trick in the classmethods
object.__setattr__(self, "_unique_instance_id", self._get_id_counter())
self._update_id_counter()
# we override new to set the instance id
def __new__(
cls: typing.Type[IDGeneratorSubclass], *args: typing.Any, **kwargs: typing.Any
) -> IDGeneratorSubclass:
obj: IDGeneratorSubclass = super().__new__(cls)
obj._set_instance_id()
return obj
class SkipIfErrorContextManager(object):
def __init__(
self,
# use typing.Type as type is not subscriptable in Python 3.8
error: typing.Union[
typing.Type[BaseException], typing.Sequence[typing.Type[BaseException]]
],
string_to_check: typing.Optional[str] = None,
) -> None:
# we save the error in a tuple if it is a single class
if not isinstance(error, collections.abc.Sequence):
error = (error,)
error = tuple(error)
# we check for each element to be a BaseException subclass
for e in error:
if not issubclass(e, BaseException):
raise TypeError(f"Not a valid BaseException subclass: {e}")
self.error = error
self.string_to_check = string_to_check
def __enter__(self) -> None:
pass
# how to type a context manager
# https://adamj.eu/tech/2021/07/04/python-type-hints-how-to-type-a-context-manager/
def __exit__(
self,
# use typing.Type as type is not subscriptable in Python 3.8
exc_type: typing.Optional[typing.Type[BaseException]],
exc_val: typing.Optional[BaseException],
exc_tb: typing.Optional[types.TracebackType],
) -> typing.Optional[bool]:
# if we have received the error to be caught with its string, we return True
# to avoid the error from propagating
if exc_type is not None and exc_val is not None:
error_presence = exc_type in self.error
string_check = (
self.string_to_check in str(exc_val)
if self.string_to_check is not None
else True
)
return error_presence and string_check
| 8,569
| 41.85
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/enums.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enum
import operator
class BitFaultValue(enum.Enum):
Random = enum.auto()
StuckAtZero = enum.auto()
StuckAtOne = enum.auto()
BitFlip = enum.auto()
class BitWidth(enum.IntEnum):
OneByte = 8
TwoBytes = 16
ThreeBytes = 24
FourBytes = 32
FiveBytes = 40
SixBytes = 48
SevenBytes = 56
EightBytes = 64
FloatingPoint16 = TwoBytes
FloatingPoint32 = FourBytes
FloatingPoint64 = EightBytes
Int32 = FourBytes
Int64 = EightBytes
class DimensionType(enum.Enum):
BitLevel = enum.auto()
Batch = enum.auto()
Tensor = enum.auto()
Time = enum.auto()
# NOTE: this endianness does not represent the actual endianness of the machine,
# only the endianness seen in the Python objects when accessing them
class Endianness(enum.Enum):
Little = "<"
Big = ">"
MSBAtIndexZero = Big
LSBAtIndexZero = Little
class FaultMaskOperation(enum.Enum):
InPlaceXor = operator.ixor
InPlaceAnd = operator.iand
InPlaceOr = operator.ior
Xor = operator.xor
And = operator.and_
Or = operator.or_
class FaultMaskValue(enum.IntEnum):
One = 1
Zero = 0
class HandlerStatus(enum.Enum):
Running = enum.auto()
Idle = enum.auto()
class ImportName(enum.Enum):
Cupy = "cupy"
Norse = "norse"
Numpy = "numpy"
PyTorch = "torch"
PyTorchLightning = "pytorch_lightning"
SQLAlchemy = "sqlalchemy"
# we use flag so that different metrics can be composed together
class MonitorMetric(enum.Flag):
StandardDeviation = enum.auto()
Maximum = enum.auto()
Minimum = enum.auto()
ArithmeticMean = enum.auto()
GeometricMean = enum.auto()
class ParameterType(enum.Flag):
# network type
DNN = enum.auto()
SNN = enum.auto()
# sub-network type, as we need special care for RNN
RNN = enum.auto()
# parameter type
Weight = enum.auto()
Activation = enum.auto()
State = enum.auto()
# state types
LIF = enum.auto()
# variables saved in state
Voltage = enum.auto()
Current = enum.auto()
# tensor type
Dense = enum.auto()
PrunedDense = enum.auto()
Sparse = enum.auto()
# sparse coordinates type
COO = enum.auto()
CSR = enum.auto()
# sparse coordinates
Index = enum.auto()
Value = enum.auto()
# complex types
DNNWeightDense = DNN | Weight | Dense
DNNActivationDense = DNN | Activation | Dense
SNNLIFStateVoltageDense = SNN | State | LIF | Voltage | Dense
| 4,043
| 25.431373
| 80
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/storagetypings.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import datetime
import typing
# NOTE: we use typing.Protocol as it is quite difficult to make abc.ABC work with
# SQLAlchemy, so in this way it is easier to use for the different
@typing.runtime_checkable
class ExperimentRunProtocol(typing.Protocol):
id_: int
running: bool
completed: bool
start_time: typing.Optional[datetime.datetime]
total_duration: typing.Optional[datetime.timedelta]
golden_run_flag: bool
metrics: typing.Optional[typing.Dict[str, typing.Any]]
polymorphic_discriminator: typing.Optional[str]
injections: typing.Optional[typing.Sequence["InjectionProtocol"]]
golden_run: typing.Optional["ExperimentRunProtocol"]
golden_run_id: typing.Optional[int]
injected_runs: typing.Optional[typing.Sequence["ExperimentRunProtocol"]]
@typing.runtime_checkable
class InjectionProtocol(typing.Protocol):
location: typing.Any
internal_id: int
experiment_run_id: typing.Optional[int]
experiment_run: typing.Optional["ExperimentRunProtocol"]
@typing.runtime_checkable
class FaultProtocol(InjectionProtocol, typing.Protocol):
pass
@typing.runtime_checkable
class MonitorProtocol(InjectionProtocol, typing.Protocol):
payload: typing.Optional[typing.Dict[str, typing.Any]]
@typing.runtime_checkable
class SessionProtocol(typing.Protocol):
experiment_runs: typing.Optional[typing.List["ExperimentRunProtocol"]]
| 2,942
| 34.457831
| 81
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/sqlutils.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# import sqlalchemy
# import sqlalchemy.dialects.postgresql
# import sqlalchemy.ext.compiler
# import sqlalchemy.sql.functions
# import sqlalchemy.types
# to have utc timestamps in UTC for the database
# the default function func.time() returns the local time
# UTC TIMESTAMP SQL
# class utcnow(sqlalchemy.sql.functions.FunctionElement):
# type = sqlalchemy.types.DateTime()
# @sqlalchemy.ext.compiler.compiles(utcnow, "postgresql")
# def pg_utcnow(element, compiler, **kw):
# return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
# END UTC TIMESTAMP SQL
| 2,099
| 37.888889
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/constants.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enpheeph.utils.enums
import enpheeph.utils.typings
NORSE_DIMENSION_DICT: enpheeph.utils.typings.DimensionDictType = {
enpheeph.utils.enums.DimensionType.Time: 0,
enpheeph.utils.enums.DimensionType.Batch: 1,
enpheeph.utils.enums.DimensionType.Tensor: ...,
}
PYTORCH_DIMENSION_DICT: enpheeph.utils.typings.DimensionDictType = {
enpheeph.utils.enums.DimensionType.Batch: 0,
enpheeph.utils.enums.DimensionType.Tensor: ...,
}
| 1,232
| 38.774194
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/functions.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import re
import typing
CAMEL_TO_SNAKE_REGEX: re.Pattern[str] = re.compile(
"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))"
)
# this function is required to convert CamelCase to snake_case
def camel_to_snake(camel: str) -> str:
# from https://stackoverflow.com/a/12867228
return CAMEL_TO_SNAKE_REGEX.sub(r"_\1", camel).lower()
def get_object_library(obj: typing.Any) -> str | None:
module = getattr(obj.__class__, "__module__", None)
# to be safe we return None if the module is not a string
return module.split(".")[0] if isinstance(module, str) else None
| 2,122
| 39.056604
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/typings.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import types
import typing
# we fake import cupy, numpy and torch to silence mypy
if typing.TYPE_CHECKING:
import cupy
import numpy
import torch
import enpheeph.utils.enums
# for the active_dimension_index
ActiveDimensionIndexType = typing.Union[
enpheeph.utils.enums.DimensionType,
types.EllipsisType,
]
# we could even add bit and other parameters in here
AnyIndexType = typing.Union[
"Index1DType",
"IndexMultiDType",
]
AnyMaskType = typing.Union[
"Mask1DType",
"MaskMultiDType",
]
ArrayType = typing.Union[
"cupy.ndarray",
"numpy.ndarray",
]
DimensionDictType = typing.Dict[
enpheeph.utils.enums.DimensionType,
"DimensionIndexType",
]
DimensionIndexType = typing.Union[
int,
types.EllipsisType,
# **NOTE**: we do not support tuples yet, one can duplicate enum values to have
# multiple indices with similar names
# typing.Tuple[int, ...],
]
DimensionLocationIndexType = typing.Dict[
enpheeph.utils.enums.DimensionType,
AnyIndexType,
]
DimensionLocationMaskType = typing.Dict[
enpheeph.utils.enums.DimensionType,
AnyMaskType,
]
# we use Tuple and not Sequence to allow hashability
# mypy reports error if one of the types is not valid
Index1DType = typing.Union[
int,
slice,
types.EllipsisType,
# we need List as Tuple is seen as multiple dimensions when indexing
# **NOTE**: this might give problems with hashing in the dataclasses
list[int],
]
IndexMultiDType = typing.Union[
int,
slice,
types.EllipsisType,
# we use Tuple as in this case we need to cover multiple dimensions
tuple[Index1DType, ...],
]
IndexTimeType = Index1DType
Mask1DType = typing.Sequence[bool]
MaskMultiDType = typing.Union[
Mask1DType,
typing.Sequence[Mask1DType],
]
LowLevelMaskArrayType = typing.Union[
"cupy.ndarray",
"numpy.ndarray",
]
ModelType = "torch.nn.Module"
ShapeType = tuple[int, ...]
TensorType = typing.Union[
ArrayType,
"torch.Tensor",
]
| 3,537
| 28.239669
| 83
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/csvdataclasses.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import datetime
import dataclasses
import typing
@dataclasses.dataclass(init=True, repr=True, eq=True, order=True)
class ExperimentRun(object):
id_: int
running: bool = False
completed: bool = False
start_time: typing.Optional[datetime.datetime] = None
total_duration: typing.Optional[datetime.timedelta] = None
golden_run_flag: bool = False
metrics: typing.Optional[typing.Dict[str, typing.Any]] = None
polymorphic_discriminator = None
injections: typing.Optional[typing.Sequence["Injection"]] = None
golden_run: typing.Optional["ExperimentRun"] = None
golden_run_id: typing.Optional[int] = None
injected_runs: typing.Optional[typing.Sequence["ExperimentRun"]] = None
@dataclasses.dataclass(init=True, repr=True, eq=True, order=True)
class Injection(object):
location: typing.Any
internal_id: int
experiment_run_id: typing.Optional[int] = None
experiment_run: typing.Optional["ExperimentRun"] = None
@dataclasses.dataclass(init=True, repr=True, eq=True, order=True)
class Fault(Injection):
pass
@dataclasses.dataclass(init=True, repr=True, eq=True, order=True)
class Monitor(Injection):
payload: typing.Optional[typing.Dict[str, typing.Any]] = None
| 2,776
| 35.064935
| 77
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/dataclasses.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import abc
import dataclasses
import typing
import typing_extensions
import enpheeph.utils.classes
import enpheeph.utils.enums
import enpheeph.utils.typings
# all the following dataclasses are frozen as their arguments should not change
# this also simplifies the handling of PickleType for the SQL storage plugin
#
# here are all the info required for injecting faults in a bit
# we need a dataclass so that we can convert the BitFaultValue type into
# a mask with fill values
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class BitFaultMaskInfo(object):
# to convert bit faults into arguments for the fault mask
BIT_FAULT_VALUE_TO_BIT_FAULT_MASK_INFO_ARGS = {
enpheeph.utils.enums.BitFaultValue.StuckAtZero: {
"operation": enpheeph.utils.enums.FaultMaskOperation.And,
"mask_value": enpheeph.utils.enums.FaultMaskValue.Zero,
"fill_value": enpheeph.utils.enums.FaultMaskValue.One,
},
enpheeph.utils.enums.BitFaultValue.StuckAtOne: {
"operation": enpheeph.utils.enums.FaultMaskOperation.Or,
"mask_value": enpheeph.utils.enums.FaultMaskValue.One,
"fill_value": enpheeph.utils.enums.FaultMaskValue.Zero,
},
enpheeph.utils.enums.BitFaultValue.BitFlip: {
"operation": enpheeph.utils.enums.FaultMaskOperation.Xor,
"mask_value": enpheeph.utils.enums.FaultMaskValue.One,
"fill_value": enpheeph.utils.enums.FaultMaskValue.Zero,
},
}
operation: enpheeph.utils.enums.FaultMaskOperation
mask_value: enpheeph.utils.enums.FaultMaskValue
fill_value: enpheeph.utils.enums.FaultMaskValue
@classmethod
def from_bit_fault_value(
cls,
bit_fault_value: enpheeph.utils.enums.BitFaultValue,
) -> typing_extensions.Self:
dict_: typing.Dict[
str, typing.Any
] = cls.BIT_FAULT_VALUE_TO_BIT_FAULT_MASK_INFO_ARGS[bit_fault_value]
return cls(**dict_)
# we can safely assume that the dimension will be 1 only, as this is supposed
# to be used internally from a linear array of bits
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class BitIndexInfo(object):
bit_index: enpheeph.utils.typings.Index1DType
# we can use an enum if only a set of bitwidths is allowed
# bitwidth: enpheeph.utils.enums.BitWidth
bitwidth: int
# this is equivalent for big endian
# NOTE: endianness is not required when we are working at Python level
# this is because all LSBs are positioned at bit 0 when accessing an
# integer, while the corresponding string has MSB at 0
endianness: enpheeph.utils.enums.Endianness = (
enpheeph.utils.enums.Endianness.MSBAtIndexZero
)
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class LocationModuleNameMixin(object):
# name of the module to be targeted
module_name: str
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class LocationMixin(object):
# parameter, activation or weight type
parameter_type: enpheeph.utils.enums.ParameterType
# same for the bit injection info
bit_index: enpheeph.utils.typings.Index1DType
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class LocationOptionalMixin(object):
# name of parameters to get, default is None as it is required if it is not
# an activation injection
parameter_name: typing.Optional[str] = None
# batch/tensor/time indices are now inside the dimension_index
dimension_index: typing.Optional[
enpheeph.utils.typings.DimensionLocationIndexType
] = None
# mask for batch/tensor/time
dimension_mask: typing.Optional[
enpheeph.utils.typings.DimensionLocationMaskType
] = None
def __post_init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
# not needed, it should be done in sub-classes
# super().__post_init__(*args, **kwargs)
not_activation_type = (
self.parameter_type.Activation # type: ignore[attr-defined]
not in self.parameter_type # type: ignore[attr-defined]
)
at_least_one_dimension = (
self.dimension_index is not None or self.dimension_mask is not None
)
if not_activation_type and self.parameter_name is None:
raise ValueError(
"'parameter_name' must be provided "
"if the type of parameter is not an activation"
)
if not at_least_one_dimension:
raise ValueError(
"at least one between 'dimension_index' and "
"'dimension_mask' must be given"
)
else:
dim_index = self.dimension_index if self.dimension_index is not None else {}
dim_mask = self.dimension_mask if self.dimension_mask is not None else {}
overlap_dimension = set(dim_index.keys()).intersection(dim_mask.keys())
if overlap_dimension:
raise ValueError("dimensions overlap some indices")
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class FaultLocationMixin(object):
# value of fault to be injected
bit_fault_value: enpheeph.utils.enums.BitFaultValue
# the order of the parameters is from last to first
# so the ones with defaults should be at the beginning
# NOTE: we define post-init to generate the id for each class
# if overriding post-init in the subclasses, call it with super() for id generation
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class InjectionLocationABC(
LocationModuleNameMixin,
enpheeph.utils.classes.IDGenerator,
abc.ABC,
object,
shared_root_flag=True,
):
pass
# here we define a common base injection location, to use the basic parameters
# which are in common to Monitor and Fault
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class BaseInjectionLocation(
LocationMixin,
InjectionLocationABC,
use_shared=True,
):
pass
# the order of the parameters is from last to first
# so the ones with defaults should be at the beginning
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class MonitorLocation(
LocationOptionalMixin,
BaseInjectionLocation,
use_shared=True,
):
pass
# the order of the parameters is from last to first
# so the ones with defaults should be at the beginning
@dataclasses.dataclass(init=True, repr=True, eq=True, frozen=True, unsafe_hash=True)
class FaultLocation(
LocationOptionalMixin,
FaultLocationMixin,
BaseInjectionLocation,
use_shared=True,
):
pass
| 8,368
| 37.925581
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/sqldataclasses.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# there is a bug in mypy with sqlalchemy
# when using __mapper_args__ in declared_attr
# https://github.com/sqlalchemy/sqlalchemy/issues/7321
# a possible solution is to use @classmethod before to avoid crashing
# the other is to skip the file, creating errors in the dependencies
# the solution might be skipping the whole folder
# it will be enabled again once fixed
import dataclasses
import datetime
import typing
import sqlalchemy
import sqlalchemy.dialects.postgresql
import sqlalchemy.ext.compiler
import sqlalchemy.ext.mutable
import sqlalchemy.inspection
import sqlalchemy.orm
import sqlalchemy.orm.decl_api
import sqlalchemy.sql.expression
import sqlalchemy.types
import enpheeph.injections.plugins.storage.sql.utils.sqlutils
import enpheeph.utils.dataclasses
import enpheeph.utils.enums
import enpheeph.utils.functions
# this string is used to identify
# the SQLAlchemy metadata in each field of each dataclass
SQLALCHEMY_METADATA_KEY: str = "sqlalchemy"
# we define the metadata with the registry and the base class to identify
# rows in tables
# mapper_registry = sqlalchemy.orm.registry()
# we don't need it if we use only dataclasses
# or sqlalchemy.orm.declarative_base() if we don't use the mapper_registry
# Base: sqlalchemy.orm.decl_api.DeclarativeMeta = mapper_registry.generate_base()
#
# defining our custom base class,
# we can define attributes which are common across the different
# NOTE: the whole assumption here is that we can have inheritance but they **must** be
# connected with a joined table inheritance
@sqlalchemy.orm.declarative_mixin
class CustomBaseClass(object):
# ClassVar to avoid the field to be considerate in dataclasses
ID_NAME: typing.ClassVar[str] = "id_"
# PARENT_CLASS: typing.Type['CustomBaseClass']
@classmethod
@property
def snake_case_class_name(cls) -> str:
snake_case_name: str = enpheeph.utils.functions.camel_to_snake(cls.__name__)
return snake_case_name
# cascading is not applicable to __magic__ attributes
# however this is called by all classes, even children, unless overwritten
@sqlalchemy.orm.declared_attr
def __tablename__(cls) -> sqlalchemy.orm.Mapped[typing.Optional[str]]:
if sqlalchemy.orm.has_inherited_table(cls):
# if it is a inherited class, we don't need the tablename as we are using
# directly the joined table inheritance
return None
else:
return cls.snake_case_class_name
# NOTE: id is created after an object is committed to the SQL DB
# we are using no table for the subclasses so we cannot have a primary id
# for each subclasses
@sqlalchemy.orm.declared_attr.cascading
def id_(cls) -> sqlalchemy.orm.Mapped[typing.Optional[int]]:
# not required, it gives out
# sqlalchemy.exc.ArgumentError:
# Can't place primary key columns on an inherited class with no table.
# ^ this error if __tablename__ is None
if sqlalchemy.orm.has_inherited_table(cls):
return None
# return sqlalchemy.Column(
# cls.ID_NAME,
# sqlalchemy.ForeignKey(
# f"{cls.PARENT_CLASS.__tablename__}.{cls.PARENT_CLASS.ID_NAME}"
# ),
# primary_key=True
# )
else:
# ID_NAME changes only the column name inside the SQL, at ORM-level is
# always id_
return sqlalchemy.Column(cls.ID_NAME, sqlalchemy.Integer, primary_key=True)
# look for specific args for PostgreSQL
# __table_args__ = {'mysql_engine': 'InnoDB'}
CustomBase = sqlalchemy.orm.declarative_base(cls=CustomBaseClass)
# relationships are better in the base class unless you need multiple inheritance,
# see Injection for polymorphism while ExperimentBaseMixin has only single inheritance
@sqlalchemy.orm.declarative_mixin
class SessionBaseMixin(object):
@sqlalchemy.orm.declared_attr
def extra_session_info(
self,
) -> sqlalchemy.orm.Mapped[typing.Optional[typing.Dict[typing.Any, typing.Any]]]:
return sqlalchemy.Column(
sqlalchemy.ext.mutable.MutableDict.as_mutable(sqlalchemy.PickleType)
)
# no need for the dataclass if we are instantiating everything normally and we
# don't need other __magic__ methods from dataclass
@dataclasses.dataclass(init=True, repr=True, eq=True)
class Session(SessionBaseMixin, CustomBase):
def EXPERIMENT_RUN_CLASS_ID_LAMBDA():
return ExperimentRun.id_
def EXPERIMENT_RUN_CLASS_LAMBDA():
return ExperimentRun
# we use backref to since it is a one-to-many from Session to ExperimentRun
EXPERIMENT_RUN_BACKREF_NAME: typing.ClassVar[str] = "session"
@sqlalchemy.orm.declared_attr
def experiment_runs(
cls,
) -> sqlalchemy.orm.Mapped[typing.Optional[typing.List["ExperimentRun"]]]:
return sqlalchemy.orm.relationship(
cls.EXPERIMENT_RUN_CLASS_LAMBDA,
backref=cls.EXPERIMENT_RUN_BACKREF_NAME,
)
# NOTE: declarative mixin is only useful for MyPy,
# it does not provide any extra functionality
@sqlalchemy.orm.declarative_mixin
class ExperimentRunBaseMixin(object):
# NOTE: all of these declared_attr need to be mapped using mapped_registry.mapped
# or inherit from a Base class
# then these attributes will become settable in the init of the corresponding
# class, much like a dataclass
# NOTE: we use cascading so that the definition propagates also to the children
@sqlalchemy.orm.declared_attr.cascading
def running(cls) -> sqlalchemy.orm.Mapped[bool]:
return sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
@sqlalchemy.orm.declared_attr.cascading
def completed(cls) -> sqlalchemy.orm.Mapped[bool]:
return sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
@sqlalchemy.orm.declared_attr.cascading
def start_time(cls) -> sqlalchemy.orm.Mapped[typing.Optional[datetime.datetime]]:
return sqlalchemy.Column(sqlalchemy.DateTime)
@sqlalchemy.orm.declared_attr.cascading
def total_duration(
cls,
) -> sqlalchemy.orm.Mapped[typing.Optional[datetime.timedelta]]:
return sqlalchemy.Column(sqlalchemy.Interval)
@sqlalchemy.orm.declared_attr.cascading
def golden_run_flag(cls) -> sqlalchemy.orm.Mapped[bool]:
return sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
@sqlalchemy.orm.declared_attr.cascading
def metrics(
self,
) -> sqlalchemy.orm.Mapped[typing.Optional[typing.Dict[typing.Any, typing.Any]]]:
return sqlalchemy.Column(
sqlalchemy.ext.mutable.MutableDict.as_mutable(sqlalchemy.PickleType)
)
# this column contains an extra dict payload containing extra info for the
# experiment
@sqlalchemy.orm.declared_attr.cascading
def extra_experiment_info(
self,
) -> sqlalchemy.orm.Mapped[typing.Optional[typing.Dict[typing.Any, typing.Any]]]:
return sqlalchemy.Column(
sqlalchemy.ext.mutable.MutableDict.as_mutable(sqlalchemy.PickleType)
)
@sqlalchemy.orm.declarative_mixin
class PolymorphicMixin(object):
POLYMORPHIC_DISCRIMINATOR_NAME: typing.ClassVar[str] = "polymorphic_discriminator"
@sqlalchemy.orm.declared_attr
def __mapper_args__(
cls: typing.Type[CustomBaseClass],
) -> sqlalchemy.orm.Mapped[typing.Dict[str, str]]:
if sqlalchemy.orm.has_inherited_table(cls):
# the name is the snake_case name of the class since __tablename__ is not
# defined for the children classes
return {
"polymorphic_identity": cls.snake_case_class_name,
}
else:
# for the parent class we use the tablename as identity
return {
"polymorphic_identity": cls.__tablename__,
"polymorphic_on": cls.POLYMORPHIC_DISCRIMINATOR_NAME,
}
# this is defined only for the main class
@sqlalchemy.orm.declared_attr
def polymorphic_discriminator(cls) -> sqlalchemy.orm.Mapped[typing.Optional[str]]:
if sqlalchemy.orm.has_inherited_table(cls):
return None
else:
return sqlalchemy.Column(sqlalchemy.String)
# no need for the dataclass if we are instantiating everything normally and we
# don't need other __magic__ methods from dataclass
@dataclasses.dataclass(init=True, repr=True, eq=True)
class ExperimentRun(ExperimentRunBaseMixin, PolymorphicMixin, CustomBase):
# FIXME: add support for ModelInfo, which might be a one-to-many from the ModelInfo
# side
def INJECTION_CLASS_LAMBDA():
return Injection
def INJECTION_FOREIGN_KEY_LAMBDA():
return Injection.experiment_run_id
INJECTION_BACKPOPULATES_NAME: typing.ClassVar[str] = "experiment_run"
def SESSION_CLASS_ID_LAMBDA():
return Session.id_
# relationship for having a list of InjectedRun subjected to this GoldenRun
# we also create golden_run as referral back to the golden run
# foreign_keys is the golden_run_id containing the ID of the golden run
# to connect the many remote side with InjectedRun
# back to the one local side of the golden run
@sqlalchemy.orm.declared_attr
def injected_runs(cls) -> sqlalchemy.orm.Mapped[typing.Sequence["ExperimentRun"]]:
return sqlalchemy.orm.relationship(
cls.__name__,
backref=sqlalchemy.orm.backref("golden_run", remote_side=[cls.id_]),
foreign_keys=f"{cls.__name__}.golden_run_id",
cascade="all, delete-orphan",
)
@sqlalchemy.orm.declared_attr
def golden_run_id(cls) -> sqlalchemy.orm.Mapped[typing.Optional[int]]:
return sqlalchemy.Column(
sqlalchemy.ForeignKey(f"{cls.__tablename__}.{cls.ID_NAME}")
)
@sqlalchemy.orm.declared_attr
def session_id(cls) -> sqlalchemy.orm.Mapped[int]:
return sqlalchemy.Column(sqlalchemy.ForeignKey(cls.SESSION_CLASS_ID_LAMBDA()))
# a list of all the injections in this experiment
@sqlalchemy.orm.declared_attr
def injections(cls) -> sqlalchemy.orm.Mapped[typing.Sequence["Injection"]]:
return sqlalchemy.orm.relationship(
cls.INJECTION_CLASS_LAMBDA,
back_populates=cls.INJECTION_BACKPOPULATES_NAME,
foreign_keys=cls.INJECTION_FOREIGN_KEY_LAMBDA,
cascade="all, delete-orphan",
)
# no need for the dataclass if we are instantiating everything normally and we
# don't need other __magic__ methods from dataclass
@dataclasses.dataclass(init=True, repr=True, eq=True)
class Injection(PolymorphicMixin, CustomBase):
def EXPERIMENT_RUN_CLASS_ID_LAMBDA():
return ExperimentRun.id_
def EXPERIMENT_RUN_CLASS_LAMBDA():
return ExperimentRun
EXPERIMENT_RUN_BACKPOPULATES_NAME: typing.ClassVar[str] = "injections"
# NOTE: cascading does not work if it's not a mixin or an abstract class
# @sqlalchemy.orm.declared_attr.cascading
@sqlalchemy.orm.declared_attr
def experiment_run_id(cls) -> sqlalchemy.orm.Mapped[typing.Optional[int]]:
return sqlalchemy.Column(
sqlalchemy.ForeignKey(cls.EXPERIMENT_RUN_CLASS_ID_LAMBDA())
)
@sqlalchemy.orm.declared_attr
def experiment_run(cls) -> sqlalchemy.orm.Mapped[typing.Optional["ExperimentRun"]]:
return sqlalchemy.orm.relationship(
cls.EXPERIMENT_RUN_CLASS_LAMBDA,
back_populates=cls.EXPERIMENT_RUN_BACKPOPULATES_NAME,
)
@sqlalchemy.orm.declared_attr
def location(self) -> sqlalchemy.orm.Mapped[typing.Any]:
return sqlalchemy.Column(sqlalchemy.PickleType, nullable=False)
@sqlalchemy.orm.declared_attr
def internal_id(self) -> sqlalchemy.orm.Mapped[int]:
return sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
@sqlalchemy.orm.declarative_mixin
class FaultBaseMixin(object):
pass
@dataclasses.dataclass(init=True, repr=True, eq=True)
class Fault(FaultBaseMixin, Injection):
# not needed
# PARENT_CLASS: typing.Type[CustomBaseClass] = Injection
# ID_NAME: str = "fault_id"
pass
@sqlalchemy.orm.declarative_mixin
class MonitorBaseMixin(object):
@sqlalchemy.orm.declared_attr
def payload(
self,
) -> sqlalchemy.orm.Mapped[typing.Optional[typing.Dict[typing.Any, typing.Any]]]:
return sqlalchemy.Column(
sqlalchemy.ext.mutable.MutableDict.as_mutable(sqlalchemy.PickleType)
)
@dataclasses.dataclass(init=True, repr=True, eq=True)
class Monitor(MonitorBaseMixin, Injection):
pass
def set_sqlite_pragma(dbapi_connection, connection_record) -> None:
# enable foreign keys
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
def pysqlite_begin_emission_fix_on_connect(dbapi_connection, connection_record) -> None:
# disable pysqlite's emitting of the BEGIN statement entirely.
# also stops it from emitting COMMIT before any DDL.
dbapi_connection.isolation_level = None
def sqlalchemy_begin_emission_pysqlite(conn) -> None:
# emit our own BEGIN
conn.exec_driver_sql("BEGIN")
# we call all the previous functions to connect all the event listeners from the engine
# if the listener already exists, we skip it
def fix_pysqlite(engine) -> None:
if not sqlalchemy.event.contains(engine, "connect", set_sqlite_pragma):
sqlalchemy.event.listen(engine, "connect", set_sqlite_pragma)
if not sqlalchemy.event.contains(
engine, "connect", pysqlite_begin_emission_fix_on_connect
):
sqlalchemy.event.listen(
engine, "connect", pysqlite_begin_emission_fix_on_connect
)
if not sqlalchemy.event.contains(
engine, "begin", sqlalchemy_begin_emission_pysqlite
):
sqlalchemy.event.listen(engine, "begin", sqlalchemy_begin_emission_pysqlite)
| 15,468
| 37.769424
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'classes',
'constants',
'dataclasses',
'enums',
'functions',
'imports',
'typings',
},
submod_attrs={
'classes': [
'IDGenerator',
'IDGeneratorSubclass',
'SkipIfErrorContextManager',
],
'dataclasses': [
'BaseInjectionLocation',
'BitFaultMaskInfo',
'BitIndexInfo',
'FaultLocation',
'FaultLocationMixin',
'InjectionLocationABC',
'LocationMixin',
'LocationModuleNameMixin',
'LocationOptionalMixin',
'MonitorLocation',
],
'enums': [
'BitFaultValue',
'BitWidth',
'DimensionType',
'Endianness',
'FaultMaskOperation',
'FaultMaskValue',
'HandlerStatus',
'MonitorMetric',
'ParameterType',
],
'functions': [
'camel_to_snake',
'get_object_library',
],
'imports': [
'compare_version',
'is_module_available',
],
'typings': [
'ActiveDimensionIndexType',
'AnyIndexType',
'AnyMaskType',
'ArrayType',
'DimensionDictType',
'DimensionIndexType',
'DimensionLocationIndexType',
'DimensionLocationMaskType',
'Index1DType',
'IndexMultiDType',
'IndexTimeType',
'LowLevelMaskArrayType',
'Mask1DType',
'MaskMultiDType',
'ModelType',
'PathType',
'ShapeType',
'TensorType',
],
},
)
def __dir__():
return __all__
__all__ = ['ActiveDimensionIndexType', 'AnyIndexType', 'AnyMaskType',
'ArrayType', 'BaseInjectionLocation', 'BitFaultMaskInfo',
'BitFaultValue', 'BitIndexInfo', 'BitWidth', 'DimensionDictType',
'DimensionIndexType', 'DimensionLocationIndexType',
'DimensionLocationMaskType', 'DimensionType', 'Endianness',
'FaultLocation', 'FaultLocationMixin', 'FaultMaskOperation',
'FaultMaskValue', 'HandlerStatus', 'IDGenerator',
'IDGeneratorSubclass', 'Index1DType', 'IndexMultiDType',
'IndexTimeType', 'InjectionLocationABC', 'LocationMixin',
'LocationModuleNameMixin', 'LocationOptionalMixin',
'LowLevelMaskArrayType', 'Mask1DType', 'MaskMultiDType',
'ModelType', 'MonitorLocation', 'MonitorMetric', 'ParameterType',
'PathType', 'ShapeType', 'SkipIfErrorContextManager', 'TensorType',
'camel_to_snake', 'classes', 'compare_version', 'constants',
'dataclasses', 'enums', 'functions', 'get_object_library',
'imports', 'is_module_available', 'typings']
# </AUTOGEN_INIT>
| 5,497
| 32.938272
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/utils/imports.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# here we could use importlib.resources but it does not provide the get_distribution
# method, so we keep using pkg_resources for now
# we can use importlib.metadata.distribution, as we only need the version from
# pkg_resources, or even importlib.metadata.version
import importlib.metadata
import importlib.util
import packaging.requirements
import packaging.specifiers
import packaging.version
import enpheeph.utils.enums
# we use the spec from importlib to check the availability of a library
# if it is not None it exists
def is_module_available(module_name: str) -> bool:
# we check the spec for the presence of a library
try:
return importlib.util.find_spec(name=module_name) is not None
except ModuleNotFoundError:
return False
# to compare version we use the packaging specifier which checks
# if the found version from the installed package is compatible with the given
# specifier
def compare_version(
module_name: str,
version_specifier: packaging.specifiers.SpecifierSet,
) -> bool:
if not is_module_available(module_name=module_name):
return False
version = packaging.version.parse(importlib.metadata.version(module_name))
return version_specifier.contains(version)
# for checking the availability we simply compare with the requirements
# for extra flags it is as easy as parsing a custom requirements and
# getting the specifier
_enpheeph_raw_requirements = importlib.metadata.requires("enpheeph")
ENPHEEPH_REQUIREMENTS: tuple[packaging.requirements.Requirement, ...] = tuple(
packaging.requirements.Requirement(_req)
for _req in (
_enpheeph_raw_requirements if _enpheeph_raw_requirements is not None else ()
)
)
MODULE_AVAILABILITY: dict[enpheeph.utils.enums.ImportName, bool] = {}
for _mod_enum in enpheeph.utils.enums.ImportName.__members__.values():
# we use next on filter as filter is a generator so using next we get the first
# value, which supposedly should also be the only one
_version_specifier: packaging.specifiers.SpecifierSet = next(
filter(lambda x: x.name == _mod_enum.value, ENPHEEPH_REQUIREMENTS)
).specifier
MODULE_AVAILABILITY[_mod_enum] = compare_version(
module_name=_mod_enum.value, version_specifier=_version_specifier
)
| 3,827
| 40.608696
| 84
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/integrations/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'pytorchlightning',
},
submod_attrs={
'pytorchlightning': [
'InjectionCallback',
'injectioncallback',
],
},
)
def __dir__():
return __all__
__all__ = ['InjectionCallback', 'injectioncallback', 'pytorchlightning']
# </AUTOGEN_INIT>
| 2,891
| 31.494382
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/integrations/pytorchlightning/injectioncallback.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections
import copy
import datetime
import typing
import warnings
import enpheeph.handlers.injectionhandler
import enpheeph.injections.plugins.storage.abc.storagepluginabc
import enpheeph.utils.imports
if (
enpheeph.utils.imports.MODULE_AVAILABILITY[
enpheeph.utils.imports.PYTORCH_LIGHTNING_NAME
]
or typing.TYPE_CHECKING
):
import pytorch_lightning
import pytorch_lightning.callbacks
# to suppress all warnings
warnings.filterwarnings("ignore")
class InjectionCallback(pytorch_lightning.callbacks.Callback):
experiment_time_start: typing.Optional[datetime.datetime]
first_golden_run: typing.Union[bool, int]
injection_handler: enpheeph.handlers.injectionhandler.InjectionHandler
metrics: typing.DefaultDict[
int, typing.DefaultDict[int, typing.DefaultDict[typing.Any, typing.Any]]
]
metrics_save_frequency: typing.Optional[int]
storage_plugin: typing.Optional[
(enpheeph.injections.plugins.storage.abc.storagepluginabc.StoragePluginABC)
]
test_epoch: int
def __init__(
self,
injection_handler: (enpheeph.handlers.injectionhandler.InjectionHandler),
storage_plugin: typing.Optional[
(enpheeph.injections.plugins.storage.abc.storagepluginabc.StoragePluginABC)
] = None,
# number of batches every which to save the metrics
# additionally we save at the end of each epoch
metrics_save_frequency: typing.Optional[int] = None,
# if True, we use the first test run as golden run
# otherwise, we expect it to be a valid id for the golden run reference
first_golden_run: typing.Union[bool, int] = True,
# extra session info
extra_session_info: typing.Optional[typing.Dict[typing.Any, typing.Any]] = None,
# extra experiment info which can be used to identify experiments
extra_experiment_info: typing.Optional[
typing.Dict[typing.Any, typing.Any]
] = None,
):
self.experiment_time_start = None
self.injection_handler = injection_handler
self.storage_plugin = storage_plugin
# this number is used to indicate how often to save the results
# in terms of batch index
self.metrics_save_frequency = metrics_save_frequency
self.first_golden_run = first_golden_run
self.extra_experiment_info = extra_experiment_info
self.extra_session_info = extra_session_info
self.test_epoch: int = 0
# we use a defaultdict inside a defaultdict, so that when we access epoch, batch
# we generate an empty dict
# when we save this metric in the storage, it becomes a normal dict with
# default_factory being reset to None
self.metrics: typing.DefaultDict[
int, typing.DefaultDict[int, typing.DefaultDict[typing.Any, typing.Any]]
] = collections.defaultdict(
# mypy has issues with nested defaultdict
lambda: collections.defaultdict(dict) # type: ignore[arg-type]
)
# we create a new Session which will be closed on __del__
self.storage_plugin.create_session(extra_session_info=extra_session_info)
def __del__(self, *args, **kwargs):
self.storage_plugin.complete_session()
# not needed
# super().__del__(*args, **kwargs)
def on_test_start(
self,
trainer: pytorch_lightning.Trainer,
pl_module: pytorch_lightning.LightningModule,
) -> None:
self.test_epoch = 0
self.metrics = collections.defaultdict(
# mypy has issues with nested defaultdict
lambda: collections.defaultdict(dict) # type: ignore[arg-type]
)
self.injection_handler.setup(pl_module)
# FIXME: use a MockStorage implementation
# to allow this without checking for None
if self.storage_plugin is not None:
self.experiment_time_start = datetime.datetime.utcnow()
self.storage_plugin.create_experiment(
# we create an experiment with the active injections
injection_locations=[
inj.location for inj in self.injection_handler.active_injections
],
running=True,
# we enable the golden run for the first execution only if the flag is
# True
golden_run_flag=self.first_golden_run is True,
# we pass the id if the first_golden_run is an integer for the
# experiment id
# otherwise None to disable it
golden_run_id=self.first_golden_run
if isinstance(self.first_golden_run, int)
else None,
# we use UTC for dates as it is generic
start_time=self.experiment_time_start,
extra_experiment_info=self.extra_experiment_info,
)
# it will be True at most at the first iteration as we change it into int
if self.first_golden_run is True:
# casting as experiment_id is set, so it cannot be None
experiment_id = typing.cast(int, self.storage_plugin.experiment_id)
# we set the first_golden_run to the golden run id if the first test is
# a golden run
self.first_golden_run = experiment_id
def on_test_end(
self,
trainer: pytorch_lightning.Trainer,
pl_module: pytorch_lightning.LightningModule,
) -> None:
self.save_metrics(trainer, test_epoch=-1, batch_idx=-1)
self.test_epoch = 0
if self.storage_plugin is not None:
duration = (
datetime.datetime.utcnow() - self.experiment_time_start
if self.experiment_time_start is not None
else None
)
self.storage_plugin.complete_experiment(
total_duration=duration,
)
# we reset the start time
self.experiment_time_start = None
self.injection_handler.teardown(pl_module)
def on_test_epoch_start(
self,
trainer: pytorch_lightning.Trainer,
pl_module: pytorch_lightning.LightningModule,
) -> None:
pass
def on_test_epoch_end(
self,
trainer: pytorch_lightning.Trainer,
pl_module: pytorch_lightning.LightningModule,
) -> None:
self.save_metrics(trainer, test_epoch=self.test_epoch, batch_idx=-1)
self.test_epoch += 1
def on_test_batch_end(
self,
trainer: pytorch_lightning.Trainer,
pl_module: pytorch_lightning.LightningModule,
outputs: typing.Optional[pytorch_lightning.utilities.types.STEP_OUTPUT],
batch: typing.Any,
batch_idx: int,
dataloader_idx: int,
) -> None:
if (
self.metrics_save_frequency is not None
and not batch_idx % self.metrics_save_frequency
):
self.save_metrics(trainer, test_epoch=self.test_epoch, batch_idx=batch_idx)
def save_metrics(
self,
trainer: pytorch_lightning.Trainer,
# we use -1 for the final result, can be substituted by globally
# defined constant
test_epoch: int,
# we use -1 for the complete results at the end of the test
# it could be substituted by a fixed constant in the future
batch_idx: int,
) -> None:
# if the storage_plugin is None, we skip all the computations
if self.storage_plugin is not None:
# we save the metrics only if the storage is available
self.metrics[test_epoch][batch_idx] = copy.deepcopy(
# mypy has issues with nested defaultdict
# we need to save all the metrics, with progress bar < callback < logged
{
**trainer.progress_bar_metrics,
**trainer.callback_metrics,
**trainer.logged_metrics,
}
)
self.metrics[test_epoch][batch_idx] = {
k: v.item() for k, v in self.metrics[test_epoch][batch_idx].items()
}
# we copy the metrics, so we can change the defaultdict behaviour without
# changing the original
metrics = copy.deepcopy(self.metrics)
# we remove all the default factories so that a missing key gives KeyError
metrics.default_factory = None
for el in metrics.values():
el.default_factory = None
self.storage_plugin.add_experiment_metrics(metrics)
| 10,215
| 38.444015
| 88
|
py
|
enpheeph
|
enpheeph-main/src/enpheeph/integrations/pytorchlightning/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# we ignore mypy/flake8/black as this file is autogenerated
# we ignore this specific error because of AUTOGEN_INIT
# mypy: ignore-errors
# the following flake8 syntax is wrong, as it will be read as generic noqa, but we use
# it to remember the errors appearing in the __init__.py
# additionally this is not caught by pygrep-hooks as it counts only "type: ignore" and
# "noqa", both with starting #
# flake8: noqa: E302,E305
# fmt: off
# this is required so that the mkinit script will generate the init imports only in this
# section
# <AUTOGEN_INIT>
def lazy_import(module_name, submodules, submod_attrs):
import importlib
import os
name_to_submod = {
func: mod for mod, funcs in submod_attrs.items()
for func in funcs
}
def __getattr__(name):
if name in submodules:
attr = importlib.import_module(
'{module_name}.{name}'.format(
module_name=module_name, name=name)
)
elif name in name_to_submod:
submodname = name_to_submod[name]
module = importlib.import_module(
'{module_name}.{submodname}'.format(
module_name=module_name, submodname=submodname)
)
attr = getattr(module, name)
else:
raise AttributeError(
'No {module_name} attribute {name}'.format(
module_name=module_name, name=name))
globals()[name] = attr
return attr
if os.environ.get('EAGER_IMPORT', ''):
for name in name_to_submod.values():
__getattr__(name)
for attrs in submod_attrs.values():
for attr in attrs:
__getattr__(attr)
return __getattr__
__getattr__ = lazy_import(
__name__,
submodules={
'injectioncallback',
},
submod_attrs={
'injectioncallback': [
'InjectionCallback',
],
},
)
def __dir__():
return __all__
__all__ = ['InjectionCallback', 'injectioncallback']
# </AUTOGEN_INIT>
| 2,840
| 31.284091
| 88
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/comparisons/pytorchfi/pytorchfi_results/script.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
from pytorchfi.core import fault_injection as pfi_core
import datetime
import random
class AlexNet(nn.Module):
"""
AlexNet for CIFAR10. FC layers are removed. Paddings are adjusted.
Without BN, the start learning rate should be 0.01
(c) YANG, Wei
"""
def __init__(self, num_classes=10):
super(AlexNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=5),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
)
self.classifier = nn.Linear(256, num_classes)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def alexnet(**kwargs):
"""
AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
"""
model = AlexNet(**kwargs)
return model
class Custom_Sampler(torch.utils.data.Sampler):
def __init__(self, data):
self.data = data
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
def _get_custom_sampler(singleIndex, total):
indices = random.choices([singleIndex], k=total)
return Custom_Sampler(indices)
def main(reps=100):
torch.manual_seed(0)
batchsize = 10000
workers = 1
channels = 3
img_size = 32
transform = transforms.Compose(
[
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
]
)
testset = torchvision.datasets.CIFAR10(
root="/shared/ml/datasets/vision/CIFAR10/",
train=False,
download=True,
transform=transform,
)
custom_sampler = _get_custom_sampler(0, batchsize)
val_loader = torch.utils.data.DataLoader(
testset,
batch_size=batchsize,
shuffle=False,
num_workers=workers,
sampler=custom_sampler,
)
model = alexnet(num_classes=10)
golden_times = []
for _i in range(reps):
model.eval().cuda()
golden_outputs = []
time_now = datetime.datetime.utcnow()
with torch.no_grad():
for imgs, _label in iter(val_loader):
imgs = imgs.cuda()
golden_outputs.append(model(imgs))
print(f"Golden Time Execution: {datetime.datetime.utcnow() - time_now}")
# print(len(golden_outputs))
# print(golden_outputs[0].shape)
golden_times.append(str(datetime.datetime.utcnow() - time_now))
batch_i = list(range(batchsize))
layer_i = [0] * batchsize
c_i = [0] * batchsize
h_i = [1] * batchsize
w_i = [1] * batchsize
inj_value_i = [10000.0] * batchsize
inj = pfi_core(
model,
batchsize,
input_shape=[channels, img_size, img_size],
use_cuda=True,
)
corrupt_times = []
for _i in range(reps):
corrupt_outputs = []
time_now = datetime.datetime.utcnow()
with torch.no_grad():
for imgs, _label in iter(val_loader):
corrupt_model = inj.declare_neuron_fi(
batch=batch_i,
layer_num=layer_i,
dim1=c_i,
dim2=h_i,
dim3=w_i,
value=inj_value_i,
)
corrupt_model.eval().cuda()
imgs = imgs.cuda()
corrupt_outputs.append(corrupt_model(imgs))
print(f"Corrupt Time Execution: {datetime.datetime.utcnow() - time_now}")
# print(len(corrupt_outputs))
# print(corrupt_outputs[0].shape)
corrupt_times.append(str(datetime.datetime.utcnow() - time_now))
counter = 0
for g_out, c_out in zip(golden_outputs, corrupt_outputs):
if torch.all(c_out.eq(g_out)):
counter += 1
# print(f"Correct: {counter / len(golden_outputs)}")
print("golden," + ",".join(golden_times))
print("corrupt," + ",".join(corrupt_times))
if __name__ == "__main__":
main(reps=100)
| 6,245
| 29.617647
| 85
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/comparisons/tensorfi2/alexnet-cifar10.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import sys
import typing
import flash
import flash.image
import pytorch_lightning
import torch
import torchmetrics
import torchvision
import enpheeph
import enpheeph.injections.plugins.indexing.indexingplugin
CURRENT_DIR = pathlib.Path(__file__).absolute().parent
RESULTS_DIRECTORY = CURRENT_DIR / "results" / "alexnet-cifar10"
WEIGHTS_FILE = RESULTS_DIRECTORY / "weights" / "alexnet-cifar10.pt"
LOG_DIRECTORY = RESULTS_DIRECTORY / "injection_results"
WEIGHTS_FILE.parent.mkdir(parents=True, exist_ok=True)
LOG_DIRECTORY.mkdir(parents=True, exist_ok=True)
CIFAR_DIRECTORY = pathlib.Path("/shared/ml/datasets/vision/") / "CIFAR10"
class AlexNetLightningModule(pytorch_lightning.LightningModule):
def __init__(self, pretrained: bool = True, num_classes: int = 1000) -> None:
super().__init__()
self.num_classes = num_classes
self.pretrained = pretrained
self.model = torchvision.models.AlexNet(num_classes=num_classes)
if self.pretrained:
# must be accessed with sys.modules otherwise it uses the function
# which is imported from the sub-module
# we use type: ignore as mypy cannot check torchvision typings
# we have to split it otherwise black creates problems
mod = sys.modules["torchvision.models.alexnet"]
state_dict = torch.hub.load_state_dict_from_url(
mod.model_urls["alexnet"], # type: ignore[attr-defined]
progress=True,
)
# we must filter the mismatching keys in the state dict
# we generate the current model state dict
model_state_dict = self.model.state_dict()
filtered_state_dict = {
k: v_new
# we select the new value if the dimension is the same as with the old
# one
if v_new.size() == v_old.size()
# otherwise we use the initialized one from the model
else v_old
for (k, v_old), v_new in zip(
model_state_dict.items(),
state_dict.values(),
)
}
self.model.load_state_dict(filtered_state_dict, strict=False)
self.normalizer_fn = torch.nn.Softmax(dim=-1)
self.accuracy_fn = torchmetrics.Accuracy()
self.loss_fn = torch.nn.CrossEntropyLoss()
self.save_hyperparameters()
# we initialize the weights
self.init_weights()
def init_weights(self) -> None:
# this initialization is similar to the ResNet one
# taken from https://github.com/Lornatang/AlexNet-PyTorch/
# @ alexnet_pytorch/model.py#L63
for m in self.modules():
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(
m.weight, mode="fan_out", nonlinearity="relu"
)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.BatchNorm2d):
torch.nn.init.constant_(m.weight, 1)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.Linear):
torch.nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
def forward(self, inpt: torch.Tensor) -> torch.Tensor:
return self.model(inpt)
def configure_optimizers(self) -> torch.optim.Optimizer:
optimizer = torch.optim.SGD(self.parameters(), lr=1e-2)
return optimizer
def inference(
self,
batch: typing.Union[
torch.Tensor,
typing.Dict[flash.core.data.data_source.DefaultDataKeys, torch.Tensor],
],
batch_idx: int,
) -> typing.Dict[str, torch.Tensor]:
# we need to check for the batch to be a flash batch or to be a standard tuple
# as otherwise it may not be compatible
if isinstance(batch, dict):
x = batch.get(flash.core.data.data_source.DefaultDataKeys.INPUT, None)
y = batch.get(flash.core.data.data_source.DefaultDataKeys.TARGET, None)
if x is None or y is None:
raise ValueError("Incompatible input for the batch")
else:
x, y = batch
output = self.forward(x)
return {
"loss": self.loss_fn(output, y),
"accuracy": self.accuracy_fn(self.normalizer_fn(output), y),
}
def training_step(
self,
batch: typing.Union[
torch.Tensor,
typing.Dict[flash.core.data.data_source.DefaultDataKeys, torch.Tensor],
],
batch_idx: int,
) -> torch.Tensor:
res = self.inference(batch, batch_idx)
self.log_dict(
{"train_loss": res["loss"], "train_accuracy": res["accuracy"]},
prog_bar=True,
on_step=True,
on_epoch=True,
logger=True,
)
return res["loss"]
def validation_step(
self,
batch: typing.Union[
torch.Tensor,
typing.Dict[flash.core.data.data_source.DefaultDataKeys, torch.Tensor],
],
batch_idx: int,
) -> None:
res = self.inference(batch, batch_idx)
self.log_dict(
{"val_loss": res["loss"], "val_accuracy": res["accuracy"]},
prog_bar=True,
on_step=True,
on_epoch=True,
logger=True,
)
def test_step(
self,
batch: typing.Union[
torch.Tensor,
typing.Dict[flash.core.data.data_source.DefaultDataKeys, torch.Tensor],
],
batch_idx: int,
) -> None:
res = self.inference(batch, batch_idx)
self.log_dict(
{"test_loss": res["loss"], "test_accuracy": res["accuracy"]},
prog_bar=True,
on_step=True,
on_epoch=True,
logger=True,
)
pytorch_lightning.seed_everything(seed=41, workers=True)
storage_plugin = enpheeph.injections.plugins.storage.SQLiteStoragePlugin(
db_url="sqlite:///" + str(LOG_DIRECTORY / "database.sqlite")
)
pytorch_mask_plugin = enpheeph.injections.plugins.NumPyPyTorchMaskPlugin()
pytorch_handler_plugin = enpheeph.handlers.plugins.PyTorchHandlerPlugin()
monitor_1 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="model.features.0",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
fault_1 = enpheeph.injections.OutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name="model.features.0",
parameter_type=enpheeph.utils.enums.ParameterType.Weight,
parameter_name="weight",
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (
...,
0,
0,
),
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=[10, 16, 31],
bit_fault_value=enpheeph.utils.enums.BitFaultValue.StuckAtOne,
),
low_level_torch_plugin=pytorch_mask_plugin,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_2 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="model.features.0",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_3 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="model.classifier.1",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (slice(10, 100),),
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
fault_2 = enpheeph.injections.OutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name="model.classifier.1",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (slice(10, 100),),
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=...,
bit_fault_value=enpheeph.utils.enums.BitFaultValue.StuckAtOne,
),
low_level_torch_plugin=pytorch_mask_plugin,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_4 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="model.classifier.1",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (slice(10, 100),),
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.indexingplugin.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
injection_handler = enpheeph.handlers.InjectionHandler(
injections=[monitor_1, fault_1, monitor_2, monitor_3, fault_2, monitor_4],
library_handler_plugin=pytorch_handler_plugin,
)
callback = enpheeph.integrations.pytorchlightning.InjectionCallback(
injection_handler=injection_handler,
storage_plugin=storage_plugin,
)
trainer = pytorch_lightning.Trainer(
callbacks=[callback],
deterministic=True,
enable_checkpointing=False,
max_epochs=10,
# one can use gpu but some functions will not be deterministic, so deterministic
# must be set to False
accelerator="cpu",
devices=1,
# if one uses spawn or dp it will fail as sqlite connector is not picklable
# strategy="ddp",
)
model = AlexNetLightningModule(num_classes=10, pretrained=False)
# transform = torchvision.transforms.Compose(
# [
# #torchvision.transforms.ToTensor(),
# torchvision.transforms.Normalize(
# (0.5, 0.5, 0.5),
# (0.5, 0.5, 0.5),
# ),
# torchvision.transforms.RandomHorizontalFlip(),
# ]
# )
cifar_train = torchvision.datasets.CIFAR10(
str(CIFAR_DIRECTORY),
train=True,
download=True,
)
cifar_test = torchvision.datasets.CIFAR10(
str(CIFAR_DIRECTORY),
train=False,
download=True,
)
datamodule = flash.image.ImageClassificationData.from_datasets(
train_dataset=cifar_train,
test_dataset=cifar_test,
val_split=0.2,
num_workers=64,
batch_size=32,
)
if not WEIGHTS_FILE.exists():
trainer.fit(
model,
train_dataloaders=datamodule.train_dataloader(),
val_dataloaders=datamodule.val_dataloader(),
)
trainer.save_checkpoint(str(WEIGHTS_FILE))
model = model.load_from_checkpoint(str(WEIGHTS_FILE))
# no injections/monitors
print("\n\nBaseline, no injection or monitors\n")
trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)
# we enable only the monitors
# we use this as baseline, no injections
callback.injection_handler.activate([monitor_1, monitor_2, monitor_3, monitor_4])
print("\n\nBaseline, no injection, only monitors\n")
trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)
# we enable the faults
callback.injection_handler.activate([fault_1, fault_2])
print("\n\nWeight + activation injection\n")
trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)
# we disable the faults
callback.injection_handler.deactivate([fault_1, fault_2])
print("\n\nBaseline again, no injection, only monitors\n")
# we test again to reach same results as before injection
trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)
| 14,253
| 34.108374
| 87
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/injector_script.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import argparse
import collections.abc
import datetime
import functools
import importlib
import operator
import os
import pathlib
import random
import sys
import typing
import flash
import pytorch_lightning
import torch
import torch.quantization
import torchinfo
import enpheeph.injections.fpquantizedoutputpytorchfault
import enpheeph.injections.monitorabc
# for pickle to avoid explosion
if str(pathlib.Path(__file__).parent / "results/configs/snn_training") not in sys.path:
sys.path.append(str(pathlib.Path(__file__).parent / "results/configs/snn_training"))
sys.path.pop()
CURRENT_DIR = pathlib.Path(__file__).absolute().parent
RESULTS_DIRECTORY = CURRENT_DIR / "results"
DATASET_DIRECTORY = pathlib.Path("/shared/ml/datasets/vision/")
# it overwrites the keys with the new value
def recursive_dict_update(original: typing.Dict, mergee: typing.Dict) -> typing.Dict:
for k, v in mergee.items():
if k in original and isinstance(original[k], collections.abc.Mapping):
original[k] = recursive_dict_update(original[k], v)
else:
original[k] = v
return original
def safe_recursive_instantiate_dict(config: typing.Any) -> typing.Any:
# if we have a mapping-like, e.g. dict, we check whether it must be directly
# instantiated
# if yes we return the final object, otherwise we call the function on each
# value in the dict
if isinstance(config, collections.abc.Mapping):
# we use issubset to allow extra values if needed for other purposes
# which are not used in this instantiation and will be lost
if set(config.keys()) == {"callable", "callable_args"}:
# we need to pass the instantiated version of the config dict
return config["callable"](
**safe_recursive_instantiate_dict(config["callable_args"])
)
# otherwise we create a copy and we instantiate each value with the
# corresponding key
# copy.deepcopy does not work, we skip it
new_config = config
for key, value in config.items():
new_config[key] = safe_recursive_instantiate_dict(value)
return new_config
# if we have a sequence-like, e.g. list, we create the same class
# where each element is instantiated
elif isinstance(config, (list, tuple, set)):
new_config = config.__class__(
[safe_recursive_instantiate_dict(v) for v in config]
)
return new_config
# if we have a generic element, e.g. str, we return it as-is
else:
return config
def compute_layer_module_name(
layer: torchinfo.layer_info.LayerInfo,
) -> str:
# with this while loop we compute the layer name from the layer itself
# we simply join all the parent variable names until we reach the main model
module_name = layer.var_name
p = layer.parent_info
# we need to skip the main model as it would add an extra dot
# we can find it as its depth is 0
while p is not None and p.depth > 0:
module_name = p.var_name + "." + module_name
p = p.parent_info
return module_name
# we can create the injections
def create_injections_for_layer_with_randomness_value(
config: typing.Dict[str, typing.Any],
layer: torchinfo.layer_info.LayerInfo,
randomness: float,
) -> typing.Generator[enpheeph.utils.data_classes.InjectionLocationABC, None, None]:
module_name = compute_layer_module_name(layer=layer)
# we check if the layer is ok to run a fault injection on
if not layer.is_leaf_layer or not layer.executed:
return []
print(f"Layer: {module_name}\nRandomness: {randomness}\n\n")
injections = []
# inj_type = "activation"
# inj_type = "quantized_activation"
inj_type = "sparse_activation"
# inj_type = "weight"
if inj_type == "activation":
# we multiply by a very small number > 1 to increase the range and cover also 1
# we skip the batch size as the first dimension
shape = layer.output_size[1:]
if (
config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
)
!= enpheeph.utils.constants.PYTORCH_DIMENSION_DICT
):
# we remove the extra time dimension if it is an SNN
shape = shape[1:]
mask = torch.rand(*shape, device="cpu") * 1.00000001 <= randomness
inj = enpheeph.injections.OutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name=module_name,
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Batch: ...,
enpheeph.utils.enums.DimensionType.Time: ...,
},
dimension_mask={
enpheeph.utils.enums.DimensionType.Tensor: mask.tolist(),
},
bit_index=random.sample(
list(range(config.get("injection_config", {}).get("bitwidth", 32))),
1,
),
bit_fault_value=enpheeph.utils.enums.BitFaultValue.BitFlip,
),
low_level_torch_plugin=enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin(),
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
),
)
elif inj_type == "sparse_activation":
shape = layer.output_size[1:]
approx_n_elements = functools.reduce(operator.mul, shape)
inj = enpheeph.injections.DenseSparseOutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name=module_name,
parameter_type=enpheeph.utils.enums.ParameterType.Activation
| enpheeph.utils.enums.ParameterType.Sparse
| enpheeph.utils.enums.ParameterType.Value,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: random.sample(
list(range(approx_n_elements)),
abs(int((random.random() - randomness) * approx_n_elements)),
),
},
dimension_mask=None,
bit_index=random.sample(
list(range(config.get("injection_config", {}).get("bitwidth", 32))),
1,
),
bit_fault_value=enpheeph.utils.enums.BitFaultValue.BitFlip,
),
low_level_torch_plugin=enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin(),
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
),
)
elif inj_type == "quantized_activation":
# we multiply by a very small number > 1 to increase the range and cover also 1
# we skip the batch size as the first dimension
shape = layer.output_size[1:]
if (
config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
)
!= enpheeph.utils.constants.PYTORCH_DIMENSION_DICT
):
# we remove the extra time dimension if it is an SNN
shape = shape[1:]
mask = torch.rand(*shape, device="cpu") * 1.00000001 <= randomness
inj = enpheeph.injections.fpquantizedoutputpytorchfault.FPQuantizedOutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name=module_name,
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Batch: ...,
enpheeph.utils.enums.DimensionType.Time: ...,
},
dimension_mask={
enpheeph.utils.enums.DimensionType.Tensor: mask.tolist(),
},
bit_index=random.sample(
list(range(config.get("injection_config", {}).get("bitwidth", 32))),
1,
),
bit_fault_value=enpheeph.utils.enums.BitFaultValue.BitFlip,
),
low_level_torch_plugin=enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin(),
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
),
)
elif inj_type == "weight":
# we multiply by a very small number > 1 to increase the range and cover also 1
# we skip the batch size as the first dimension
mask = (
torch.rand(*layer.module.weight.shape, device="cpu") * 1.00000001
<= randomness
)
inj = enpheeph.injections.WeightPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name=module_name,
parameter_type=enpheeph.utils.enums.ParameterType.Weight,
parameter_name="weight",
dimension_index={
enpheeph.utils.enums.DimensionType.Batch: ...,
enpheeph.utils.enums.DimensionType.Time: ...,
},
dimension_mask={
enpheeph.utils.enums.DimensionType.Tensor: mask.tolist(),
},
bit_index=random.sample(
list(range(config.get("injection_config", {}).get("bitwidth", 32))),
1,
),
bit_fault_value=enpheeph.utils.enums.BitFaultValue.BitFlip,
),
low_level_torch_plugin=enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin(),
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=config.get("injection_config", {}).get(
"indexing_dimension_dict",
enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
),
)
injections.append(inj)
return injections
def setup_argument_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"--config",
type=pathlib.Path,
required=True,
)
parser.add_argument(
"--model-weight-file",
type=pathlib.Path,
required=True,
)
parser.add_argument(
"--storage-file",
type=pathlib.Path,
required=True,
)
parser.add_argument(
"--csv-results",
type=pathlib.Path,
default=pathlib.Path(os.devnull),
)
mutex_quantize_group = parser.add_mutually_exclusive_group()
mutex_quantize_group.add_argument(
"--static-quantize",
action="store_true",
)
mutex_quantize_group.add_argument(
"--dynamic-quantize",
action="store_true",
)
mutex_device_group = parser.add_mutually_exclusive_group()
mutex_device_group.add_argument(
"--cpu",
action="store_true",
)
mutex_device_group.add_argument(
"--gpu",
action="store_true",
)
injection_type_group = parser.add_mutually_exclusive_group()
injection_type_group.add_argument(
"--random",
action="store_true",
)
injection_type_group.add_argument(
"--custom",
action="store_true",
)
return parser
def main(args=None):
parser = setup_argument_parser()
namespace = parser.parse_args(args=args)
# here we append the path of the configuration to sys.path so that it can
# be easily imported
sys.path.append(str(namespace.config.parent))
# we import the module by taking its name
config_module = importlib.import_module(namespace.config.with_suffix("").name)
# we select the devices on which we run the simulation
if namespace.gpu:
gpu_config = importlib.import_module("gpu_config")
device_config = gpu_config.config()
elif namespace.cpu:
cpu_config = importlib.import_module("cpu_config")
device_config = cpu_config.config()
else:
device_config = {}
if namespace.random:
random_config = importlib.import_module("random_multi_config")
injection_config = random_config.config()
else:
injection_config = {}
# we remove the previously appended path to leave it as is
sys.path.pop()
# we instantiate the config from the imported module
initial_config = config_module.config(
dataset_directory=DATASET_DIRECTORY,
model_weight_file=namespace.model_weight_file,
storage_file=namespace.storage_file,
)
config = recursive_dict_update(initial_config, device_config)
config = recursive_dict_update(initial_config, injection_config)
config = safe_recursive_instantiate_dict(config)
pytorch_lightning.seed_everything(**config.get("seed_everything", {}))
trainer = config["trainer"]
model = config["model"]
# model = config["model_post_init"](model)
datamodule = config["datamodule"]
# if the static quantization was selected
# we train the model for an additional epoch (set in the default trainer config)
# to be able to create the proper static quantization weights + activations
# **NOTE**: static quantization is not supported on GPU
if namespace.static_quantize:
config["injection_handler"].deactivate()
trainer.callbacks.append(
pytorch_lightning.callbacks.QuantizationAwareTraining()
)
trainer.fit(
model,
datamodule=datamodule,
)
# with the dynamic quantization we quantize only the weights by a fixed
# configuration
# **NOTE**: dynamic quantization does not work on GPU
elif namespace.dynamic_quantize:
model = torch.quantization.quantize_dynamic(
model,
qconfig_spec=config.get("dynamic_quantization_config", {}).get(
"qconfig",
{
torch.nn.Linear,
torch.nn.LSTM,
torch.nn.GRU,
torch.nn.LSTMCell,
torch.nn.RNNCell,
torch.nn.GRUCell,
torch.nn.EmbeddingBag,
},
),
dtype=config.get("dynamic_quantization_config", {}).get(
"qdtype",
torch.qint8,
),
# we need to force in-place otherwise Flash Models cannot be deep-copied
inplace=True,
)
print("\n\nNo injections at all\n\n")
config["injection_handler"].deactivate()
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
namespace.csv_results.parent.mkdir(parents=True, exist_ok=True)
with namespace.csv_results.open("a") as f:
f.write(
f"randomness,layer_name,execution_time,{','.join(str(x) for x in res.keys())}\n"
)
f.write(
f"0,-,{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
if config.get("injection_config", {}).get("custom", True):
# we do only monitors if we activate any injection
if config["injection_handler"].activate(
[
monitor
for monitor in config["injection_handler"].injections
if isinstance(monitor, enpheeph.injections.monitorabc.MonitorABC)
]
):
print("\n\nOnly monitors\n\n")
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
with namespace.csv_results.open("a") as f:
f.write(
f"0,-,{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
print("\n\nAll injections\n\n")
config["injection_handler"].activate()
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
with namespace.csv_results.open("a") as f:
f.write(
f"custom,custom,{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
else:
inp = next(iter(datamodule.test_dataloader()))
if isinstance(inp, dict):
inp = inp[flash.core.data.data_source.DefaultDataKeys.INPUT]
shape = list(inp.shape)
else:
inp = inp[0]
shape = list(inp.shape)
shape[1] = 1
# otherwise it does not work for SNNs
shape[0] = 1
# we take the shape from the datamodule
summary = torchinfo.summary(model=model, input_size=shape, device="cpu")
#
allowed_layers = config.get("injection_config", {}).get("layers", None)
for r in config.get("injection_config", {}).get("randomness", []):
for layer in summary.summary_list:
if (
allowed_layers is not None
and compute_layer_module_name(layer) not in allowed_layers
):
continue
config["injection_handler"].remove_injections()
injections = create_injections_for_layer_with_randomness_value(
config=config, layer=layer, randomness=r
)
config["injection_handler"].add_injections(injections)
config["injection_handler"].deactivate()
# we do only monitors if we activate any injection
if config["injection_handler"].activate(
[
monitor
for monitor in config["injection_handler"].injections
if isinstance(
monitor, enpheeph.injections.monitorabc.MonitorABC
)
]
):
print("\n\nOnly monitors\n\n")
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
with namespace.csv_results.open("a") as f:
f.write(
f"0,-,{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
if config["injection_handler"].activate():
print("\n\nAll injections\n\n")
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
with namespace.csv_results.open("a") as f:
f.write(
f"{str(r)},{compute_layer_module_name(layer)},{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
print("\n\nAgain no injections at all\n\n")
config["injection_handler"].deactivate()
time = datetime.datetime.utcnow()
res = trainer.test(
model,
dataloaders=datamodule.test_dataloader(),
)[
0
] # we have only one test dataloader
execution_time = datetime.datetime.utcnow() - time
with namespace.csv_results.open("a") as f:
f.write(
f"0,-,{str(execution_time.total_seconds())},{','.join(str(x) for x in res.values())}\n"
)
if __name__ == "__main__":
main()
| 22,693
| 38.605585
| 155
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/injection_results/csv_min_randomness_parser.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import sys
import pandas
csv_path = pathlib.Path(sys.argv[1])
dest_folder = pathlib.Path(sys.argv[2])
dest_folder.mkdir(parents=True, exist_ok=True)
csv = pandas.read_csv(csv_path)
split_csv = {}
for r in csv["randomness"].unique().tolist():
split_csv[r] = csv[csv["randomness"] == r]
randomness_csv = {}
for r, c in split_csv.items():
randomness_csv[r] = c.iloc[[c["test_accuracy"].argmin()]]
randomness = pandas.concat(randomness_csv.values())
randomness.to_csv(dest_folder / "randomness.csv")
| 1,307
| 32.538462
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/injection_results/csv_parser.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import sys
import pandas
csv_path = pathlib.Path(sys.argv[1])
dest_folder = pathlib.Path(sys.argv[2])
dest_folder.mkdir(parents=True, exist_ok=True)
csv = pandas.read_csv(csv_path)
split_csv = {}
for l in csv["layer_name"].unique().tolist():
split_csv[l] = csv[csv["layer_name"] == l]
for l, c in split_csv.items():
c.to_csv(dest_folder / (l + ".csv"))
| 1,162
| 33.205882
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_random_multi_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import enpheeph.injections
import enpheeph.injections.plugins
import random_multi_config
import snn_dvsgesture_config
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
config = snn_dvsgesture_config.config(
dataset_directory=dataset_directory,
model_weight_file=model_weight_file,
storage_file=storage_file,
)
config.update(random_multi_config.config())
# custom is used to avoid the random injections
config["injection_config"]["layers"] = [
# only conv2d, linear is not working
"sequential.2",
"sequential.6",
# linear does not work yet
# "sequential.11",
# "sequential.13"
]
config["injection_config"][
"indexing_dimension_dict"
] = enpheeph.utils.constants.NORSE_DIMENSION_DICT
return config
| 1,765
| 30.535714
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/carla_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import flash.image
def config(
*,
dataset_directory: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"datamodule": {
"callable": flash.image.SemanticSegmentationData.from_folders,
"callable_args": {
"batch_size": 32,
"image_size": [256, 256],
"num_classes": 101,
"num_workers": 64,
"test_folder": str(
dataset_directory
/ "carla-data-capture/20180528-100vehicles-100pedestrians/CameraRGB/"
),
"test_target_folder": str(
dataset_directory
/ "carla-data-capture/20180528-100vehicles-100pedestrians/CameraSeg/"
),
},
},
}
| 1,645
| 33.291667
| 89
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/base_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import pytorch_lightning
def config(
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"seed_everything": {
"seed": 42,
"workers": True,
},
"model": {},
"datamodule": {},
"injection_handler": {},
"trainer": {
"callable": pytorch_lightning.Trainer,
"callable_args": {
"callbacks": [
pytorch_lightning.callbacks.TQDMProgressBar(
refresh_rate=10,
)
],
"deterministic": True,
"enable_checkpointing": False,
"max_epochs": 1,
# one can use gpu but some functions will not be deterministic,
# so deterministic
# must be set to False
"accelerator": "gpu",
"devices": 1,
# if one uses spawn or dp it will fail
# as sqlite connector is not picklable
# "strategy": "ddp",
},
},
}
| 1,884
| 32.660714
| 79
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/image_classification_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import flash
import flash.image
import enpheeph
import enpheeph.injections.plugins.mask.autopytorchmaskplugin
import base_config
import cifar10_config
import quantization_config
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
pytorch_handler_plugin = enpheeph.handlers.plugins.PyTorchHandlerPlugin()
storage_plugin = enpheeph.injections.plugins.storage.SQLiteStoragePlugin(
db_url="sqlite:///" + str(storage_file)
)
injection_handler = enpheeph.handlers.InjectionHandler(
injections=[],
library_handler_plugin=pytorch_handler_plugin,
)
model = {
"callable": flash.image.ImageClassifier.load_from_checkpoint,
"callable_args": {
"checkpoint_path": str(model_weight_file),
# issues with loading GPU model on CPU
# it should work with PyTorch but there must be some problems with
# PyTorch Lightning/Flash leading to use some GPU memory
"map_location": "cpu",
},
}
config = base_config.config()
# datamodule update
config.update(cifar10_config.config(dataset_directory=dataset_directory))
# dynamic quantization update
config.update(quantization_config.config())
config["model"] = model
# update the Trainer with flash as we are using flash models, to avoid
# compatibility issues such as CUDA out of memory on CPU-only
config["trainer"]["callable"] = flash.Trainer
# we delay the instantiation of the callback to allow the saving of the
# current configuration
callback = enpheeph.integrations.pytorchlightning.InjectionCallback(
injection_handler=injection_handler,
storage_plugin=storage_plugin,
extra_session_info=config,
)
config["trainer"]["callable_args"]["callbacks"].append(callback)
# to save the injection handler to enable/disable faults
config["injection_handler"] = injection_handler
# to save the callback to access to the same storage plugin
config["injection_callback"] = callback
# custom is used to avoid the random injections
config["injection_config"] = {}
return config
| 3,094
| 33.775281
| 78
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/dvs128gesture_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import snn_training.dvs128gesturedatamodule
def config(
*,
dataset_directory: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"datamodule": {
"callable": snn_training.dvs128gesturedatamodule.DVS128GestureDataModule,
"callable_args": {
"batch_size": 4,
"data_dir": str(dataset_directory / "snn/DVS128Gesture"),
"drop_last": False,
"num_workers": 64,
"pin_memory": False,
"shuffle": False,
"val_split": 0.2,
},
},
}
| 2,203
| 35.733333
| 85
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/cifar10_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import flash
import torchvision
def config(
*,
dataset_directory: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"datamodule": {
"callable": flash.image.ImageClassificationData.from_datasets,
"callable_args": {
"train_dataset": torchvision.datasets.CIFAR10(
str(dataset_directory / "CIFAR10"),
train=True,
download=True,
),
"test_dataset": torchvision.datasets.CIFAR10(
str(dataset_directory / "CIFAR10"),
train=False,
download=True,
),
"val_split": 0.2,
"num_workers": 64,
"batch_size": 32,
},
},
}
| 1,651
| 32.04
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/semantic_segmentantion_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import flash
import flash.image
import enpheeph
import enpheeph.injections.plugins.mask.autopytorchmaskplugin
import base_config
import carla_config
import quantization_config
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
pytorch_handler_plugin = enpheeph.handlers.plugins.PyTorchHandlerPlugin()
storage_plugin = enpheeph.injections.plugins.storage.SQLiteStoragePlugin(
db_url="sqlite:///" + str(storage_file)
)
injection_handler = enpheeph.handlers.InjectionHandler(
injections=[],
library_handler_plugin=pytorch_handler_plugin,
)
model = {
"callable": flash.image.SemanticSegmentation.load_from_checkpoint,
"callable_args": {
"checkpoint_path": str(model_weight_file),
# issues with loading GPU model on CPU
# it should work with PyTorch but there must be some problems with
# PyTorch Lightning/Flash leading to use some GPU memory
"map_location": "cpu",
},
}
config = base_config.config()
# datamodule update
config.update(carla_config.config(dataset_directory=dataset_directory))
# dynamic quantization update
config.update(quantization_config.config())
config["model"] = model
# update the Trainer with flash as we are using flash models, to avoid
# compatibility issues such as CUDA out of memory on CPU-only
config["trainer"]["callable"] = flash.Trainer
# semantic segmentation must use deterministic=False
config["trainer"]["callable_args"]["deterministic"] = False
# we delay the instantiation of the callback to allow the saving of the
# current configuration
callback = enpheeph.integrations.pytorchlightning.InjectionCallback(
injection_handler=injection_handler,
storage_plugin=storage_plugin,
extra_session_info=config,
)
config["trainer"]["callable_args"]["callbacks"].append(callback)
# to save the injection handler to enable/disable faults
config["injection_handler"] = injection_handler
# to save the callback to access to the same storage plugin
config["injection_callback"] = callback
# custom is used to avoid the random injections
config["injection_config"] = {}
return config
| 3,217
| 33.978261
| 78
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_dvsgesture_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import flash
import flash.image
import enpheeph
import enpheeph.injections.plugins.mask.autopytorchmaskplugin
import base_config
import dvs128gesture_config
import quantization_config
import snn_training.dvs128gesturesnnmodule
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
pytorch_handler_plugin = enpheeph.handlers.plugins.PyTorchHandlerPlugin()
storage_plugin = enpheeph.injections.plugins.storage.SQLiteStoragePlugin(
db_url="sqlite:///" + str(storage_file)
)
injection_handler = enpheeph.handlers.InjectionHandler(
injections=[],
library_handler_plugin=pytorch_handler_plugin,
)
model = {
"callable": snn_training.dvs128gesturesnnmodule.DVS128GestureSNNModule.load_from_checkpoint,
"callable_args": {
"checkpoint_path": str(model_weight_file),
# issues with loading GPU model on CPU
# it should work with PyTorch but there must be some problems with
# PyTorch Lightning/Flash leading to use some GPU memory
"map_location": "cpu",
},
}
config = base_config.config()
# datamodule update
config.update(dvs128gesture_config.config(dataset_directory=dataset_directory))
# dynamic quantization update
config.update(quantization_config.config())
config["model"] = model
# update the Trainer with flash as we are using flash models, to avoid
# compatibility issues such as CUDA out of memory on CPU-only
config["trainer"]["callable"] = flash.Trainer
# we delay the instantiation of the callback to allow the saving of the
# current configuration
callback = enpheeph.integrations.pytorchlightning.InjectionCallback(
injection_handler=injection_handler,
storage_plugin=storage_plugin,
extra_session_info=config,
)
config["trainer"]["callable_args"]["callbacks"].append(callback)
# to save the injection handler to enable/disable faults
config["injection_handler"] = injection_handler
# to save the callback to access to the same storage plugin
config["injection_callback"] = callback
# custom is used to avoid the random injections
config["injection_config"] = {
"indexing_dimension_dict": enpheeph.utils.constants.NORSE_DIMENSION_DICT,
}
return config
| 3,267
| 34.521739
| 100
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_dvsgesture_config_single.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import enpheeph.injections
import enpheeph.injections.plugins
import snn_dvsgesture_config
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
config = snn_dvsgesture_config.config(
dataset_directory=dataset_directory,
model_weight_file=model_weight_file,
storage_file=storage_file,
)
config["injection_callback"].storage_plugin
pytorch_mask_plugin = (
enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin()
)
fault_2 = enpheeph.injections.OutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
# 2/6 is conv, 11/13 is linear
# 3 is lif
module_name="sequential.2",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (slice(10, 15), ...),
enpheeph.utils.enums.DimensionType.Batch: ...,
enpheeph.utils.enums.DimensionType.Time: ...,
},
bit_index=[31],
bit_fault_value=enpheeph.utils.enums.BitFaultValue.StuckAtOne,
),
low_level_torch_plugin=pytorch_mask_plugin,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.NORSE_DIMENSION_DICT,
),
)
config["injection_handler"].add_injections(
injections=[fault_2],
)
# custom is used to avoid the random injections
config["injection_config"] = {
"custom": True,
}
return config
| 3,295
| 35.622222
| 86
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/gpu_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
def config(
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"model": {
"callable_args": {
"map": "cuda",
}
},
"trainer": {
"callable_args": {
"accelerator": "gpu",
"devices": 1,
},
},
}
| 1,142
| 29.891892
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/random_multi_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
def config(
*args: typing.Any,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"injection_config": {
# the list contains the percentage of injections
# we cover 10 elements per decade, and add 1 at the end and 0 at the start
# we start from 0.000001
# "randomness": [0] + sum((list(x * 10 ** y for x in range(1, 10)) for y in range(-7, 0)), start=[]) + [1],
"randomness": [
0.003,
0.004,
0.005,
0.006,
0.007,
0.008,
0.009000000000000001,
0.01,
0.02,
0.03,
0.04,
0.05,
0.06,
0.07,
0.08,
0.09,
0.1,
0.2,
0.30000000000000004,
0.4,
0.5,
0.6000000000000001,
0.7000000000000001,
0.8,
0.9,
1,
],
# custom set to false to allow the random injections to be instantiated
"custom": False,
}
}
| 2,042
| 31.428571
| 119
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/cpu_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
def config(
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"model": {
"callable_args": {
"map": "cpu",
}
},
"trainer": {
"callable_args": {
"accelerator": "cpu",
"devices": 1,
},
},
}
| 1,141
| 29.864865
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/quantization_config.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import torch
import torch.quantization
def config(
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
return {
"dynamic_quantization_config": {
"qconfig": {
torch.nn.Linear,
torch.nn.LSTM,
torch.nn.GRU,
torch.nn.LSTMCell,
torch.nn.RNNCell,
torch.nn.GRUCell,
torch.nn.EmbeddingBag,
},
"dtype": torch.qint8,
}
}
| 1,291
| 30.512195
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/image_classification_config_single.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import typing
import enpheeph.injections
import enpheeph.injections.plugins
import image_classification_config
def config(
*,
dataset_directory: pathlib.Path,
model_weight_file: pathlib.Path,
storage_file: pathlib.Path,
**kwargs: typing.Any,
) -> typing.Dict[str, typing.Any]:
config = image_classification_config.config(
dataset_directory=dataset_directory,
model_weight_file=model_weight_file,
storage_file=storage_file,
)
storage_plugin = config["injection_callback"].storage_plugin
pytorch_mask_plugin = (
enpheeph.injections.plugins.mask.autopytorchmaskplugin.AutoPyTorchMaskPlugin()
)
monitor_1 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
# resnet18
# module_name="adapter.backbone.conv1",
# vgg11
module_name="adapter.backbone.0",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=...,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
fault_1 = enpheeph.injections.WeightPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
# resnet18
# module_name="adapter.backbone.conv1",
# vgg11
module_name="adapter.backbone.0",
parameter_type=enpheeph.utils.enums.ParameterType.Weight,
parameter_name="weight",
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (
...,
0,
0,
),
},
bit_index=[10, 16, 31],
bit_fault_value=enpheeph.utils.enums.BitFaultValue.StuckAtOne,
),
low_level_torch_plugin=pytorch_mask_plugin,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_2 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
# resnet18
# module_name="adapter.backbone.conv1",
# vgg11
module_name="adapter.backbone.0",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_3 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="adapter.backbone",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
fault_2 = enpheeph.injections.OutputPyTorchFault(
location=enpheeph.utils.data_classes.FaultLocation(
module_name="adapter.backbone",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: (slice(10, 15),),
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=[31],
bit_fault_value=enpheeph.utils.enums.BitFaultValue.StuckAtOne,
),
low_level_torch_plugin=pytorch_mask_plugin,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
monitor_4 = enpheeph.injections.OutputPyTorchMonitor(
location=enpheeph.utils.data_classes.MonitorLocation(
module_name="adapter.backbone",
parameter_type=enpheeph.utils.enums.ParameterType.Activation,
dimension_index={
enpheeph.utils.enums.DimensionType.Tensor: ...,
enpheeph.utils.enums.DimensionType.Batch: ...,
},
bit_index=None,
),
enabled_metrics=enpheeph.utils.enums.MonitorMetric.StandardDeviation,
storage_plugin=storage_plugin,
move_to_first=False,
indexing_plugin=enpheeph.injections.plugins.indexing.IndexingPlugin(
dimension_dict=enpheeph.utils.constants.PYTORCH_DIMENSION_DICT,
),
)
config["injection_handler"].add_injections(
injections=[monitor_1, fault_1, monitor_2, monitor_3, fault_2, monitor_4],
)
# custom is used to avoid the random injections
config["injection_config"] = {
"custom": True,
}
return config
| 6,648
| 38.577381
| 86
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_training/dvs128gesturesnnmodule.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import functools
import typing
import norse
import pytorch_lightning
import pytorch_lightning.utilities.cli
import torch
import torchmetrics
import torchvision
class SNNReturnTuple(typing.NamedTuple):
output: torch.Tensor
state: torch.Tensor
# decorator to be used for running the proper loop with a forward of the main
# model
def snn_module_forward_decorator(model_forward):
@functools.wraps(model_forward)
def inner_forward(
self,
inputs: torch.Tensor,
*,
state: typing.Optional[typing.Sequence[typing.Tuple[torch.Tensor]]] = None,
) -> typing.Union[torch.Tensor, SNNReturnTuple]:
# we encode the inputs, if enabled
if self.encoding_flag:
encoded_inputs = self.encoder(inputs)
else:
encoded_inputs = inputs
# we save the sequence length from the shape of the inputs
seq_length = encoded_inputs.size()[0]
# states will contain the states at each time step, and the second
# dimension will be the one covering the number of stateful layers
# which returns states, which are named tuple
# we initialize the states with the given ones, and then we add
# new ones for covering the evolution of the system
# this is done only if we will return the state at the end
if self.return_state:
states = [state] + [None] * seq_length
# we need a list to save the output at each time step
out = []
# we iterate over the timesteps
for ts in range(seq_length):
# we load the correct state depending on whether we are saving
# them all or we only need it for execution
if self.return_state:
state = states[ts]
# we need to use self explicitly as this function is not
# bound to an instance since it's wrapped
output, state = model_forward(self, encoded_inputs[ts], state=state)
# we append the output at the current timestep to
# the output list
out.append(output)
# also here we save the state in a list for returning it,
# otherwise we save it just for the following execution
if self.return_state:
states[ts + 1] = state
# we stack the output to a torch tensor
torch_out = torch.stack(out)
# we decode the outputs, if enabled
if self.decoding_flag:
decoded_output = self.decoder(torch_out)
else:
decoded_output = output
if self.return_state:
return SNNReturnTuple(output=decoded_output, state=states)
else:
return decoded_output
return inner_forward
class DVS128GestureSNNModule(pytorch_lightning.LightningModule):
DEFAULT_ENCODER = torch.nn.Identity()
DEFAULT_DECODER = torch.nn.Identity()
DEFAULT_OPTIMIZER_CLASS = torch.optim.Adam
DEFAULT_LEARNING_RATE = 1e-3
DEFAULT_RETURN_STATE = False
DEFAULT_ENCODING_FLAG = True
DEFAULT_DECODING_FLAG = True
DEFAULT_TRAINABLE_NEURON_PARAMETERS = True
DEFAULT_EXAMPLE_INPUT_ARRAY_SIZE = (1, 1, 1, 128, 128)
DEFAULT_DIMS = None
DEFAULT_NUM_CLASSES = None
DIMS = (1, 128, 128)
NUM_CLASSES = 11
def __init__(
self,
*args: typing.Any,
encoder: typing.Callable[[torch.Tensor], torch.Tensor] = DEFAULT_ENCODER,
decoder: typing.Callable[[torch.Tensor], torch.Tensor] = DEFAULT_DECODER,
return_state: bool = DEFAULT_RETURN_STATE,
encoding_flag: bool = DEFAULT_ENCODING_FLAG,
decoding_flag: bool = DEFAULT_DECODING_FLAG,
trainable_neuron_parameters: bool = DEFAULT_TRAINABLE_NEURON_PARAMETERS,
dims: typing.Optional[typing.Sequence[int]] = DIMS,
num_classes: typing.Optional[int] = NUM_CLASSES,
example_input_array_size: typing.Optional[
typing.Sequence[int]
] = DEFAULT_EXAMPLE_INPUT_ARRAY_SIZE,
optimizer_class: type(torch.optim.Optimizer) = DEFAULT_OPTIMIZER_CLASS,
learning_rate: float = DEFAULT_LEARNING_RATE,
map: typing.Optional[torch.device] = None,
**kwargs: typing.Any,
):
super().__init__(*args, **kwargs)
self.save_hyperparameters()
self.encoder = encoder
self.decoder = decoder
self.encoding_flag = self.hparams.encoding_flag
self.decoding_flag = self.hparams.decoding_flag
self.return_state = self.hparams.return_state
self.trainable_neuron_parameters = self.hparams.trainable_neuron_parameters
self.optimizer_classes = optimizer_class
self.learning_rates = learning_rate
self.normalize_prob_func = torch.nn.Identity()
self.pre_accuracy_func = torch.nn.Identity()
self.loss_func = torch.nn.CrossEntropyLoss()
self.accuracy_func = self.custom_argmax_accuracy
# we save the input size
self.dims = dims
if self.dims is None and hasattr(self, "DIMS"):
self.dims = self.DIMS
# we save the number of classes
self.num_classes = num_classes
if self.num_classes is None and hasattr(self, "NUM_CLASSES"):
self.num_classes = self.NUM_CLASSES
self.example_input_array_size = example_input_array_size
if self.example_input_array_size is not None:
self.example_input_array = torch.randn(*self.example_input_array_size)
self._check_encoder_decoder()
self.model_definition()
if map is not None:
self.to(map)
def _check_encoder_decoder(self):
callable_ = callable(self.encoder) and callable(self.decoder)
if not callable_:
raise ValueError("The encoder/decoder should be callable")
# this method is used to register possible hidden parameters inside the
# SNN configurations
def register_snn_parameters(self):
# we get all the Parameter elements from the modules
# some Parameters have nested Parameters, like LIFRefrac has
# a nested LIFParameters in it
p_list = []
# we need a counter as many parameters may have the same name
counter = 0
# we populate the list with direct children to the modules,
# using 'p' as variable name
# only if it is a namedtuple, with _asdict, or if it is a
# torch.nn.Module
for module in self.modules():
if hasattr(module, "p"):
p = module.p
if hasattr(p, "_asdict"):
p_list.extend(list(p._asdict().items()))
elif isinstance(p, torch.nn.Module):
p_list.extend(list(p.named_modules()))
# we iterate over the list until it's empty
while len(p_list) > 0:
p_name, p_value = p_list.pop()
# if the value is a namedtuple or a torch.nn.Module we extend the
# list
if hasattr(p_value, "_asdict"):
p_list.extend(list(p_value._asdict().items()))
elif isinstance(p_value, torch.nn.Module):
p_list.extend(list(p_value.named_modules()))
# we check wheter it is a tensor which requires gradient and
# it is not already registered
tensor_flag = isinstance(p_value, torch.Tensor)
grad_flag = getattr(p_value, "requires_grad", False)
id_param_list = [id(param) for param in self.parameters()]
parameter_flag = id(p_value) not in id_param_list
# if True we increase the counter and register the new parameter
if tensor_flag and grad_flag and parameter_flag:
counter += 1
module.register_parameter("p/" + p_name + "/" + str(counter), p_value)
# we delegate the weight initialization to each component
# decoder, model, encoder
def init_weights(self):
for mod in (self.decoder, self.encoder):
if (init_weights := getattr(mod, "init_weights", None)) is not None:
init_weights()
# this initialization is similar to the ResNet one
# taken from https://github.com/Lornatang/AlexNet-PyTorch/
# @ alexnet_pytorch/model.py#L63
for m in self.modules():
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(
m.weight, mode="fan_out", nonlinearity="relu"
)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.BatchNorm2d):
torch.nn.init.constant_(m.weight, 1)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.Linear):
torch.nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
# implemented by us for compatibility between forward and validation/test
# steps
def inference_step(self, batch, batch_idx):
x, y = batch
y_hat = self.normalize_prob_func(self.forward(x))
loss = self.loss_func(y_hat, y)
acc = self.accuracy_func(self.pre_accuracy_func(y_hat), y)
return {"acc": acc, "loss": loss}
def training_step(self, batch, batch_idx):
m = self.inference_step(batch, batch_idx)
metrics = {
"train_acc": m["acc"],
"train_loss": m["loss"],
}
self.log_dict(metrics, prog_bar=True, on_step=True, on_epoch=True, logger=True)
# here we need to return the loss to be able to properly train
return m["loss"]
def validation_step(self, batch, batch_idx):
m = self.inference_step(batch, batch_idx)
metrics = {
"val_acc": m["acc"],
"val_loss": m["loss"],
}
self.log_dict(metrics, prog_bar=True, on_step=True, on_epoch=True, logger=True)
# this may not be needed, as for logging we already use self.log_dict
# return metrics
def test_step(self, batch, batch_idx):
m = self.inference_step(batch, batch_idx)
metrics = {
"test_acc": m["acc"],
"test_loss": m["loss"],
}
self.log_dict(metrics, prog_bar=True, on_step=True, on_epoch=True, logger=True)
# this may not be needed, as for logging we already use self.log_dict
# return metrics
def configure_optimizers(self):
optimizer = self.optimizer_classes(self.parameters(), self.learning_rates)
return optimizer
def model_definition(self):
if self.trainable_neuron_parameters:
lif1 = norse.torch.LIFCell(
p=norse.torch.LIFParameters(
tau_syn_inv=torch.nn.Parameter(
torch.full(
size=[32, 32, 32],
fill_value=(
norse.torch.LIFParameters._field_defaults.get(
"tau_syn_inv"
)
),
),
),
tau_mem_inv=torch.nn.Parameter(
torch.full(
size=[32, 32, 32],
fill_value=(
norse.torch.LIFParameters._field_defaults.get(
"tau_mem_inv"
)
),
),
),
v_leak=torch.nn.Parameter(
norse.torch.LIFParameters._field_defaults.get("v_leak")
),
v_th=torch.nn.Parameter(
torch.full(
size=[32, 32, 32],
fill_value=(
0.4
# norse.torch.LIFParameters.
# _field_defaults.get(
# "v_th"
# )
),
),
),
v_reset=torch.nn.Parameter(
torch.full(
size=[32, 32, 32],
fill_value=(
norse.torch.LIFParameters._field_defaults.get("v_reset")
),
),
),
alpha=norse.torch.LIFParameters._field_defaults.get("alpha"),
method="super",
),
dt=0.01,
)
lif2 = norse.torch.LIFCell(
p=norse.torch.LIFParameters(
tau_syn_inv=torch.nn.Parameter(
torch.full(
size=[32, 16, 16],
fill_value=(
norse.torch.LIFParameters._field_defaults.get(
"tau_syn_inv"
)
),
),
),
tau_mem_inv=torch.nn.Parameter(
torch.full(
size=[32, 16, 16],
fill_value=(
norse.torch.LIFParameters._field_defaults.get(
"tau_mem_inv"
)
),
),
),
v_leak=torch.nn.Parameter(
norse.torch.LIFParameters._field_defaults.get("v_leak")
),
v_th=torch.nn.Parameter(
torch.full(
size=[32, 16, 16],
fill_value=(
0.4
# norse.torch.LIFParameters.
# _field_defaults.get(
# "v_th"
# )
),
),
),
v_reset=torch.nn.Parameter(
torch.full(
size=[32, 16, 16],
fill_value=(
norse.torch.LIFParameters._field_defaults.get("v_reset")
),
),
),
alpha=norse.torch.LIFParameters._field_defaults.get("alpha"),
method="super",
),
dt=0.01,
)
li = norse.torch.LICell(
p=norse.torch.LIParameters(
tau_syn_inv=torch.nn.Parameter(
torch.full(
size=[11],
fill_value=(
norse.torch.LIParameters._field_defaults.get(
"tau_syn_inv"
)
),
),
),
tau_mem_inv=torch.nn.Parameter(
torch.full(
size=[11],
fill_value=(
norse.torch.LIParameters._field_defaults.get(
"tau_mem_inv"
)
),
),
),
v_leak=torch.nn.Parameter(
norse.torch.LIParameters._field_defaults.get("v_leak")
),
),
dt=torch.nn.Parameter(
torch.full(
size=[11],
fill_value=0.01,
),
),
)
else:
lif1 = norse.torch.LIFCell()
lif2 = norse.torch.LIFCell()
li = norse.torch.LICell()
self.sequential = norse.torch.SequentialState(
torch.nn.AvgPool2d(
kernel_size=4,
stride=4,
padding=0,
ceil_mode=False,
),
torch.nn.Dropout(
p=0.1,
inplace=False,
),
# 2
torch.nn.Conv2d(
in_channels=1,
out_channels=32,
kernel_size=3,
padding=1,
dilation=1,
stride=1,
groups=1,
),
lif1,
torch.nn.AvgPool2d(
kernel_size=2,
stride=2,
padding=0,
ceil_mode=False,
),
torch.nn.Dropout(
p=0.1,
inplace=False,
),
# 6
torch.nn.Conv2d(
in_channels=32,
out_channels=32,
kernel_size=3,
padding=1,
dilation=1,
stride=1,
groups=1,
),
lif2,
torch.nn.AvgPool2d(
kernel_size=2,
stride=2,
padding=0,
ceil_mode=False,
),
torch.nn.Dropout(
p=0.2,
inplace=False,
),
torch.nn.Flatten(
start_dim=1,
end_dim=-1,
),
# 11
torch.nn.Linear(
in_features=2048,
out_features=500,
bias=True,
),
torch.nn.ReLU(),
# 13
torch.nn.Linear(
in_features=500,
out_features=11,
bias=True,
),
li,
)
# this must be called after setting up the SNN module
self.register_snn_parameters()
@snn_module_forward_decorator
def forward(self, x, state=None):
return self.sequential.forward(x, state=state)
# NOTE: this is a temporary solution, as it is difficult to implement
# temporary function with JSON
@staticmethod
def random_noise_max_membrane_voltage_log_softmax_decoder(inputs):
# we add some random noise
temp = inputs + 0.001 * torch.randn(*inputs.size(), device=inputs.device)
# we get the maximum for each membrane voltage over the time steps,
# dim=0
max_inputs, _ = torch.max(temp, dim=0)
return max_inputs
# NOTE: this is a temporary solution, as it is difficult to implement
# temporary function with JSON
@staticmethod
def label_smoothing_loss(y_hat, y, alpha=0.2):
log_probs = torch.nn.functional.log_softmax(y_hat, dim=-1)
xent = torch.nn.functional.nll_loss(log_probs, y, reduction="none")
KL = -log_probs.mean(dim=-1)
loss = (1 - alpha) * xent + alpha * KL
return loss.sum()
@staticmethod
def custom_softmax_accuracy(y_hat, y):
return torchmetrics.Accuracy().to(y_hat.device)(
torch.nn.functional.softmax(y_hat, dim=-1), y
)
# the following functions are for MNIST SNN training, from the norse
# tutorial
@staticmethod
def custom_argmax_accuracy(y_hat, y):
return torchmetrics.Accuracy().to(y_hat.device)(torch.argmax(y_hat, dim=-1), y)
# must be used if the target is one-hot encoded
@staticmethod
def custom_one_hot_argmax_accuracy(y_hat, y):
return torchmetrics.Accuracy().to(y_hat.device)(
torch.argmax(y_hat, dim=-1),
torch.max(y, dim=-1)[1],
)
@staticmethod
def max_log_softmax_probability(x):
x, _ = torch.max(x, 0)
log_p_y = torch.nn.functional.log_softmax(x, dim=-1)
return log_p_y
@staticmethod
def decoder_dvs128gesture(x):
return DVS128GestureSNNModule.max_log_softmax_probability(x)
@classmethod
def encoder_dvs128gesture(cls, input_):
encoder_name = "_encoder_dvs128gesture"
if (encoder := getattr(cls, encoder_name, None)) is None:
encoder = torchvision.transforms.Compose(
[
lambda x: x.to_dense() if x.is_sparse else x,
lambda x: x[:, :, 0:1, :, :],
functools.partial(
lambda x, dtype: x.to(dtype=dtype) if x.dtype != dtype else x,
dtype=torch.float32,
),
lambda x: x.permute(1, 0, 2, 3, 4),
]
)
setattr(cls, encoder_name, encoder)
return encoder(input_)
| 22,558
| 36.978114
| 88
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_training/snn_training.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import pathlib
import sys
import pytorch_lightning
try:
import dvs128gesturesnnmodule
import dvs128gesturedatamodule
except ImportError:
sys.path.append(str(pathlib.Path(__file__).absolute().parent))
import dvs128gesturesnnmodule
import dvs128gesturedatamodule
sys.path.pop()
BATCH_SIZE = 10
DVS128GESTURE_DATASET_PATH = pathlib.Path(
"/shared/ml/datasets/vision/snn/DVS128Gesture/"
)
MONITOR_METRIC_ACCURACY = "val_acc_epoch"
MONITOR_METRIC_ACCURACY_MODE = "max"
MONITOR_METRIC_LOSS = "val_loss_epoch"
MONITOR_METRIC_LOSS_MODE = "min"
# MONITOR_METRIC_LOSS = "val_acc_epoch"
# MONITOR_METRIC_LOSS_MODE = "max"
SEED = 42
TRAINING_DIR = pathlib.Path(__file__).parent / "checkpoints" / "dvs128gesture_snn"
def main():
pytorch_lightning.seed_everything(SEED)
model = dvs128gesturesnnmodule.DVS128GestureSNNModule(
encoder=dvs128gesturesnnmodule.DVS128GestureSNNModule.encoder_dvs128gesture,
decoder=dvs128gesturesnnmodule.DVS128GestureSNNModule.decoder_dvs128gesture,
return_state=False,
encoding_flag=True,
decoding_flag=True,
trainable_neuron_parameters=False,
learning_rate=1e-3,
)
datamodule = dvs128gesturedatamodule.DVS128GestureDataModule(
data_dir=DVS128GESTURE_DATASET_PATH,
num_workers=64,
drop_last=False,
shuffle=False,
batch_size=BATCH_SIZE,
seed=SEED,
pin_memory=False,
)
trainer = pytorch_lightning.Trainer(
accelerator="gpu",
callbacks=[
pytorch_lightning.callbacks.DeviceStatsMonitor(),
pytorch_lightning.callbacks.EarlyStopping(
check_finite=True,
min_delta=0.001,
mode=MONITOR_METRIC_LOSS_MODE,
# string of monitored metric
# default is early_stop_on
monitor=MONITOR_METRIC_LOSS,
patience=5,
verbose=True,
),
pytorch_lightning.callbacks.ModelCheckpoint(
dirpath=None,
every_n_epochs=1,
every_n_train_steps=None,
filename=None,
mode=MONITOR_METRIC_ACCURACY_MODE,
monitor=MONITOR_METRIC_ACCURACY,
save_last=True,
save_top_k=3,
save_weights_only=False,
verbose=True,
),
pytorch_lightning.callbacks.TQDMProgressBar(),
],
default_root_dir=str(TRAINING_DIR),
deterministic=True,
devices="auto",
logger=[
pytorch_lightning.loggers.TensorBoardLogger(
save_dir=str(TRAINING_DIR),
# experiment name, in this custom configuration it is default
name="default",
version=None,
# this enables the saving of the computational graph
# it requires example_input_array in the model
log_graph=True,
default_hp_metric=True,
prefix="",
)
],
log_every_n_steps=10,
replace_sampler_ddp=True,
strategy=pytorch_lightning.plugins.DDPPlugin(find_unused_parameters=False),
)
trainer.fit(model, datamodule=datamodule)
if __name__ == "__main__":
main()
| 4,122
| 32.520325
| 84
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_training/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/papers/iros2022/experiments/results/configs/snn_training/dvs128gesturedatamodule.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import typing
import pl_bolts
import tonic
import torch
import torchvision
class DVS128GestureDataModule(
pl_bolts.datamodules.vision_datamodule.VisionDataModule,
):
DEFAULT_TRAIN_TRANSFORMS = tonic.transforms.Compose(
[
# torch.tensor,
# tonic.transforms.Downsample(time_factor=0.0001),
# average number of timesteps is 7185841
# so we can use a time window of 100000 to make it into 72
tonic.transforms.MergePolarities(),
tonic.transforms.ToFrame(
tonic.datasets.dvsgesture.DVSGesture.sensor_size,
time_window=25_000,
),
]
)
DEFAULT_VAL_TRANSFORMS = DEFAULT_TRAIN_TRANSFORMS
DEFAULT_TEST_TRANSFORMS = DEFAULT_TRAIN_TRANSFORMS
DEFAULT_TARGET_TRANSFORM = None
DEFAULT_COLLATE_FN = torchvision.transforms.Compose(
[
tonic.collation.PadTensors(batch_first=True),
]
)
EXTRA_ARGS = {"target_transform": None}
# trick as dataset_cls should have this signature, using also download which is
# not required in tonic
# see the corresponding property
# dataset_cls = tonic.datasets.dvsgesture.DVSGesture
name = "DVSGesture"
dims = tonic.datasets.dvsgesture.DVSGesture.sensor_size
num_classes = 11
# trick as dataset_cls should have the signature of dataset_cls_interface,
# using also download which is not used in tonic
@property
def dataset_cls(self):
def dataset_cls_interface(
data_dir, train=True, download=True, transform=None, *args, **kwargs
):
return tonic.datasets.dvsgesture.DVSGesture(
save_to=data_dir, train=train, transform=transform
)
return dataset_cls_interface
def __init__(
self,
# generic VisionDataModule arguments
data_dir: typing.Optional[str] = None,
val_split: typing.Union[int, float] = 0.2,
num_workers: int = 16,
normalize: bool = False,
batch_size: int = 32,
seed: int = 42,
shuffle: bool = False,
pin_memory: bool = False,
drop_last: bool = False,
# generic transforms
train_transforms: typing.Optional[
typing.Callable[[typing.Any], torch.Tensor]
] = None,
val_transforms: typing.Optional[
typing.Callable[[typing.Any], torch.Tensor]
] = None,
test_transforms: typing.Optional[
typing.Callable[[typing.Any], torch.Tensor]
] = None,
# tonic specific arguments for collate_fn and target transform
target_transform: typing.Optional[
typing.Callable[[typing.Any], torch.Tensor]
] = None,
collate_fn: typing.Optional[
typing.Callable[[torch.Tensor], torch.Tensor]
] = None,
# extra argument
*args: typing.Any,
**kwargs: typing.Any,
):
super().__init__(
*args,
data_dir=data_dir,
val_split=val_split,
num_workers=num_workers,
normalize=normalize,
batch_size=batch_size,
seed=seed,
shuffle=shuffle,
pin_memory=pin_memory,
drop_last=drop_last,
**kwargs,
)
if train_transforms is None:
self.train_transforms = self.DEFAULT_TRAIN_TRANSFORMS
else:
self.train_transforms = train_transforms
if val_transforms is None:
self.val_transforms = self.DEFAULT_VAL_TRANSFORMS
else:
self.val_transforms = val_transforms
if test_transforms is None:
self.test_transforms = self.DEFAULT_TEST_TRANSFORMS
else:
self.test_transforms = test_transforms
if target_transform is None:
self.target_transform = self.DEFAULT_TARGET_TRANSFORM
else:
self.target_transform = target_transform
if collate_fn is None:
self.collate_fn = self.DEFAULT_COLLATE_FN
else:
self.collate_fn = collate_fn
# this is automatically passed in the dataset class
self.EXTRA_ARGS["target_transform"] = self.target_transform
# we call it here to initialize the datasets otherwise when using *_dataloader
# it is not automatically called
self.setup()
def default_transforms(self) -> typing.Callable[[typing.Any], torch.Tensor]:
return tonic.transforms.Compose([])
def _data_loader(
self, dataset: torch.utils.data.Dataset, shuffle: bool = False
) -> torch.utils.data.DataLoader:
return torch.utils.data.DataLoader(
dataset,
batch_size=self.batch_size,
collate_fn=self.collate_fn,
shuffle=shuffle,
num_workers=self.num_workers,
drop_last=self.drop_last,
pin_memory=self.pin_memory,
)
| 6,508
| 34.763736
| 86
|
py
|
enpheeph
|
enpheeph-main/tests/conftest.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections
import torchvision
import pytest
# with params we can parametrize the fixture
@pytest.fixture(
scope="function",
params=[
[None, object(), None],
["test.module", 2, "test"],
["foobar", "a", "foobar"],
["second_test", 2, "second_test"],
[False, [1, 2, 3], None],
],
ids=[
"None",
"test.module",
"foobar",
"second_test",
"deletion",
],
)
# we need to use request.param to access the parameter
def mock_object_with_library(monkeypatch, request):
# we get the name of the library to be tested and the object
library_name, obj, expected_library_name = request.param
if library_name is not False:
monkeypatch.setattr(obj.__class__, "__module__", library_name)
else:
monkeypatch.delattr(obj.__class__, "__module__")
return TestWithTarget(test_input=obj, target=expected_library_name)
# move everything to pytest_cases https://smarie.github.io/python-pytest-cases/
@pytest.fixture(
scope="class",
)
def trained_model_1epoch():
pass
@pytest.fixture(
scope="session",
params=[
[torchvision.datasets.CIFAR10],
],
ids=[
"CIFAR10",
],
)
def datamodule(tmp_path, request):
request.param[0]
TestWithTarget = collections.namedtuple("TestWithTarget", "test_input target")
| 2,909
| 30.290323
| 79
|
py
|
enpheeph
|
enpheeph-main/tests/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/integration_test/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/integration_test/test_injections/test_faultabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enpheeph.injections.abc.faultabc
import enpheeph.injections.abc.injectionabc
class TestFaultABC(object):
def test_subclass_injection(self):
assert issubclass(
enpheeph.injections.abc.faultabc.FaultABC,
enpheeph.injections.abc.injectionabc.InjectionABC,
)
| 1,849
| 40.111111
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/integration_test/test_injections/test_monitorabc.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enpheeph.injections.abc.injectionabc
import enpheeph.injections.abc.monitorabc
class TestMonitorABC(object):
def test_subclass_injection(self):
assert issubclass(
enpheeph.injections.abc.monitorabc.MonitorABC,
enpheeph.injections.abc.injectionabc.InjectionABC,
)
| 1,857
| 40.288889
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/integration_test/test_injections/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 1,539
| 45.666667
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/e2e_test/test_pytorch_sparse_injection.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
class TestPyTorchSparseInjection(object):
pass
| 1,592
| 42.054054
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/e2e_test/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 781
| 45
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/test_handlers/test_injection_handler.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections
import pytest
import enpheeph.handlers.injectionhandler
class TestInjectionHandler(object):
@pytest.mark.skip(reason="InjectionHandler tests are not ready")
@pytest.mark.parametrize(
argnames=("injections",),
argvalues=[
pytest.param(
collections.defaultdict(),
id="injections",
),
],
)
def test_add_injections(self, injections):
assert enpheeph.handlers.injectionhandler
| 2,039
| 36.777778
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/test_handlers/__init__.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
| 1,539
| 45.666667
| 77
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/test_utils/test_functions.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import collections
import pytest
import pytest_cases
import enpheeph.utils.functions
class CasesCamelCaseToSnakeCaseFunction(object):
def case_CamelSnake(self):
return ("CamelSnake", "camel_snake")
def case_camelSnake(self):
return ("camelSnake", "camel_snake")
def case_camel_snake(self):
return ("camel_snake", "camel_snake")
class CasesGetObjectLibraryFunction(object):
def case_int_from_builtins(self):
return (1, "builtins")
def case_pluggy_from_pytest(self):
return (pytest.hookspec, "pluggy")
def case_defaultdict_from_collections(self):
return (collections.defaultdict(), "collections")
class TestCamelCaseToSnakeCaseFunction(object):
@pytest_cases.parametrize_with_cases(
argnames=("camel", "snake"),
cases=CasesCamelCaseToSnakeCaseFunction,
)
def test_camel_to_snake(self, camel, snake):
assert enpheeph.utils.functions.camel_to_snake(camel) == snake
class TestGetObjectLibraryFunction(object):
@pytest.mark.skip(
reason=(
"PyTest/unittest do not support mocking __module__ in __class__ "
"of an object, however this code is left here as memorandum"
),
)
def test_get_object_library_with_mocks(self, mock_object_with_library):
obj, library_name = mock_object_with_library
assert enpheeph.utils.functions.get_object_library(obj) == library_name
@pytest_cases.parametrize_with_cases(
argnames="obj,library_name",
cases=CasesGetObjectLibraryFunction,
)
def test_get_object_library_with_real_objs(self, obj, library_name):
assert enpheeph.utils.functions.get_object_library(obj) == library_name
| 3,268
| 35.322222
| 79
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/test_utils/test_enums.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import enum
import operator
import pytest
import enpheeph.utils.enums
class TestEnums(object):
def test_bit_fault_value(self):
assert issubclass(enpheeph.utils.enums.BitFaultValue, enum.Enum)
assert {"BitFlip", "StuckAtZero", "StuckAtOne"} == set(
enpheeph.utils.enums.BitFaultValue.__members__.keys()
)
def test_bit_width_int_enum(self):
assert issubclass(enpheeph.utils.enums.BitWidth, enum.IntEnum)
@pytest.mark.parametrize(
argnames=("width_name", "bit_width_value"),
argvalues=[
pytest.param(
width_name,
bit_width_value,
id=str(width_name) + "_" + str(bit_width_value),
)
for width_name, bit_width_value in {
"OneByte": 8,
"TwoBytes": 16,
"ThreeBytes": 24,
"FourBytes": 32,
"FiveBytes": 40,
"SixBytes": 48,
"SevenBytes": 56,
"EightBytes": 64,
"FloatingPoint16": 16,
"FloatingPoint32": 32,
"FloatingPoint64": 64,
"Int32": 32,
"Int64": 64,
}.items()
],
)
def test_bit_width_values(self, width_name, bit_width_value):
assert (
enpheeph.utils.enums.BitWidth.__members__[width_name].value
== bit_width_value
)
def test_dimension_type(self):
assert issubclass(enpheeph.utils.enums.DimensionType, enum.Enum)
assert {"BitLevel", "Batch", "Tensor", "Time"} == set(
enpheeph.utils.enums.DimensionType.__members__.keys()
)
def test_endianness_enum(self):
assert issubclass(enpheeph.utils.enums.Endianness, enum.Enum)
@pytest.mark.parametrize(
argnames=("endianness_name", "endianness_symbol"),
argvalues=[
pytest.param(
endianness_name,
endianness_symbol,
id=str(endianness_name) + "_" + str(endianness_symbol),
)
for endianness_name, endianness_symbol in {
"Little": "<",
"Big": ">",
"MSBAtIndexZero": ">",
"LSBAtIndexZero": "<",
}.items()
],
)
def test_endianness_values(self, endianness_name, endianness_symbol):
assert (
enpheeph.utils.enums.Endianness.__members__[endianness_name].value
== endianness_symbol
)
def test_fault_mask_operation_enum(self):
assert issubclass(enpheeph.utils.enums.FaultMaskOperation, enum.Enum)
@pytest.mark.parametrize(
argnames=("fault_mask_operation_name", "fault_mask_operation_symbol"),
argvalues=[
pytest.param(
fault_mask_operation_name,
fault_mask_operation_symbol,
id=str(fault_mask_operation_name)
+ "_"
+ str(fault_mask_operation_symbol),
)
for fault_mask_operation_name, fault_mask_operation_symbol in {
"InPlaceXor": operator.ixor,
"InPlaceAnd": operator.iand,
"InPlaceOr": operator.ior,
"Xor": operator.xor,
"And": operator.and_,
"Or": operator.or_,
}.items()
],
)
def test_fault_mask_operation_values(
self, fault_mask_operation_name, fault_mask_operation_symbol
):
assert (
enpheeph.utils.enums.FaultMaskOperation.__members__[
fault_mask_operation_name
].value
== fault_mask_operation_symbol
)
def test_fault_mask_value_enum(self):
assert issubclass(enpheeph.utils.enums.FaultMaskValue, enum.IntEnum)
@pytest.mark.parametrize(
argnames=("fault_mask_value_name", "fault_mask_value_symbol"),
argvalues=[
pytest.param(
fault_mask_value_name,
fault_mask_value_symbol,
id=str(fault_mask_value_name) + "_" + str(fault_mask_value_symbol),
)
for fault_mask_value_name, fault_mask_value_symbol in {
"One": 1,
"Zero": 0,
}.items()
],
)
def test_fault_mask_value_values(
self, fault_mask_value_name, fault_mask_value_symbol
):
assert (
enpheeph.utils.enums.FaultMaskValue.__members__[fault_mask_value_name].value
== fault_mask_value_symbol
)
def test_handler_status(self):
assert issubclass(enpheeph.utils.enums.HandlerStatus, enum.Enum)
assert {"Running", "Idle"} == set(
enpheeph.utils.enums.HandlerStatus.__members__.keys()
)
def test_monitor_metric(self):
assert issubclass(enpheeph.utils.enums.MonitorMetric, enum.Flag)
assert {
"StandardDeviation",
"Maximum",
"Minimum",
"ArithmeticMean",
"GeometricMean",
} == set(enpheeph.utils.enums.MonitorMetric.__members__.keys())
def test_parameter_type(self):
assert issubclass(enpheeph.utils.enums.ParameterType, enum.Flag)
# we use subset as there are extra items which we check later
assert {
"DNN",
"SNN",
"RNN",
"Weight",
"Activation",
"State",
"LIF",
"Voltage",
"Current",
"Dense",
"PrunedDense",
"Sparse",
"COO",
"CSR",
"Index",
"Value",
"DNNWeightDense",
"DNNActivationDense",
"SNNLIFStateVoltageDense",
} == set(enpheeph.utils.enums.ParameterType.__members__.keys())
@pytest.mark.parametrize(
argnames=("parameter_type_composite", "parameter_type_equivalence"),
argvalues=[
pytest.param(
parameter_type_composite,
parameter_type_equivalence,
id=str(parameter_type_composite)
+ "_"
+ str(parameter_type_equivalence),
)
for parameter_type_composite, parameter_type_equivalence in {
enpheeph.utils.enums.ParameterType.DNNWeightDense: (
enpheeph.utils.enums.ParameterType.DNN
| enpheeph.utils.enums.ParameterType.Weight
| enpheeph.utils.enums.ParameterType.Dense
),
enpheeph.utils.enums.ParameterType.DNNActivationDense: (
enpheeph.utils.enums.ParameterType.DNN
| enpheeph.utils.enums.ParameterType.Activation
| enpheeph.utils.enums.ParameterType.Dense
),
enpheeph.utils.enums.ParameterType.SNNLIFStateVoltageDense: (
enpheeph.utils.enums.ParameterType.SNN
| enpheeph.utils.enums.ParameterType.LIF
| enpheeph.utils.enums.ParameterType.State
| enpheeph.utils.enums.ParameterType.Dense
| enpheeph.utils.enums.ParameterType.Voltage
),
}.items()
],
)
def test_parameter_type_composite_values(
self, parameter_type_composite, parameter_type_equivalence
):
assert parameter_type_composite == parameter_type_equivalence
| 9,052
| 34.641732
| 88
|
py
|
enpheeph
|
enpheeph-main/tests/test_enpheeph/unit_test/test_utils/test_data_classes.py
|
# -*- coding: utf-8 -*-
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2023 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# enpheeph - Neural Fault Injection Framework
# Copyright (C) 2020-2022 Alessio "Alexei95" Colucci
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# import enpheeph.utils.dataclasses
class Test(object):
pass
| 1,608
| 39.225
| 77
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.