body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
53937b8ac6e0ecfae2ab6697d03e7a748a337a5d4a42f2ffa5cc235b822fd53a
def mimeTypes(self): 'Inherited method to redefine draggable mime types.' return [Hdf5DatasetMimeData.MIME_TYPE]
Inherited method to redefine draggable mime types.
src/silx/app/view/CustomNxdataWidget.py
mimeTypes
tifuchs/silx
94
python
def mimeTypes(self): return [Hdf5DatasetMimeData.MIME_TYPE]
def mimeTypes(self): return [Hdf5DatasetMimeData.MIME_TYPE]<|docstring|>Inherited method to redefine draggable mime types.<|endoftext|>
afaf60ff9dacb3870d12d8d149f7ccb4f7dcd72a0c11f91b013b21321c9a74d0
def mimeData(self, indexes): '\n Returns an object that contains serialized items of data corresponding\n to the list of indexes specified.\n\n :param List[qt.QModelIndex] indexes: List of indexes\n :rtype: qt.QMimeData\n ' if (len(indexes) > 1): return None if (len(indexes) == 0): return None qindex = indexes[0] qindex = self.index(qindex.row(), 0, parent=qindex.parent()) item = self.itemFromIndex(qindex) if isinstance(item, _DatasetItemRow): dataset = item.getDataset() if (dataset is None): return None else: mimeData = Hdf5DatasetMimeData(dataset=item.getDataset()) else: mimeData = None return mimeData
Returns an object that contains serialized items of data corresponding to the list of indexes specified. :param List[qt.QModelIndex] indexes: List of indexes :rtype: qt.QMimeData
src/silx/app/view/CustomNxdataWidget.py
mimeData
tifuchs/silx
94
python
def mimeData(self, indexes): '\n Returns an object that contains serialized items of data corresponding\n to the list of indexes specified.\n\n :param List[qt.QModelIndex] indexes: List of indexes\n :rtype: qt.QMimeData\n ' if (len(indexes) > 1): return None if (len(indexes) == 0): return None qindex = indexes[0] qindex = self.index(qindex.row(), 0, parent=qindex.parent()) item = self.itemFromIndex(qindex) if isinstance(item, _DatasetItemRow): dataset = item.getDataset() if (dataset is None): return None else: mimeData = Hdf5DatasetMimeData(dataset=item.getDataset()) else: mimeData = None return mimeData
def mimeData(self, indexes): '\n Returns an object that contains serialized items of data corresponding\n to the list of indexes specified.\n\n :param List[qt.QModelIndex] indexes: List of indexes\n :rtype: qt.QMimeData\n ' if (len(indexes) > 1): return None if (len(indexes) == 0): return None qindex = indexes[0] qindex = self.index(qindex.row(), 0, parent=qindex.parent()) item = self.itemFromIndex(qindex) if isinstance(item, _DatasetItemRow): dataset = item.getDataset() if (dataset is None): return None else: mimeData = Hdf5DatasetMimeData(dataset=item.getDataset()) else: mimeData = None return mimeData<|docstring|>Returns an object that contains serialized items of data corresponding to the list of indexes specified. :param List[qt.QModelIndex] indexes: List of indexes :rtype: qt.QMimeData<|endoftext|>
65d2f9dfbc7f04dfd73305ba13512e57749b2eeb4fd05ca2e3123993576320a7
def dropMimeData(self, mimedata, action, row, column, parentIndex): 'Inherited method to handle a drop operation to this model.' if (action == qt.Qt.IgnoreAction): return True if mimedata.hasFormat(Hdf5DatasetMimeData.MIME_TYPE): if ((row != (- 1)) or (column != (- 1))): return False item = self.itemFromIndex(parentIndex) if ((item is None) or (item is self.invisibleRootItem())): dataset = mimedata.dataset() if silx.io.is_dataset(dataset): self.createFromSignal(dataset) elif silx.io.is_group(dataset): nxdata = dataset try: self.createFromNxdata(nxdata) except ValueError: _logger.error('Error while dropping a group as an NXdata') _logger.debug('Backtrace', exc_info=True) return False else: _logger.error('Dropping a wrong object') return False else: item = item.parent().child(item.row(), 0) if (not isinstance(item, _DatasetItemRow)): return False dataset = mimedata.dataset() if silx.io.is_dataset(dataset): item.setDataset(dataset) else: _logger.error('Dropping a wrong object') return False return True return False
Inherited method to handle a drop operation to this model.
src/silx/app/view/CustomNxdataWidget.py
dropMimeData
tifuchs/silx
94
python
def dropMimeData(self, mimedata, action, row, column, parentIndex): if (action == qt.Qt.IgnoreAction): return True if mimedata.hasFormat(Hdf5DatasetMimeData.MIME_TYPE): if ((row != (- 1)) or (column != (- 1))): return False item = self.itemFromIndex(parentIndex) if ((item is None) or (item is self.invisibleRootItem())): dataset = mimedata.dataset() if silx.io.is_dataset(dataset): self.createFromSignal(dataset) elif silx.io.is_group(dataset): nxdata = dataset try: self.createFromNxdata(nxdata) except ValueError: _logger.error('Error while dropping a group as an NXdata') _logger.debug('Backtrace', exc_info=True) return False else: _logger.error('Dropping a wrong object') return False else: item = item.parent().child(item.row(), 0) if (not isinstance(item, _DatasetItemRow)): return False dataset = mimedata.dataset() if silx.io.is_dataset(dataset): item.setDataset(dataset) else: _logger.error('Dropping a wrong object') return False return True return False
def dropMimeData(self, mimedata, action, row, column, parentIndex): if (action == qt.Qt.IgnoreAction): return True if mimedata.hasFormat(Hdf5DatasetMimeData.MIME_TYPE): if ((row != (- 1)) or (column != (- 1))): return False item = self.itemFromIndex(parentIndex) if ((item is None) or (item is self.invisibleRootItem())): dataset = mimedata.dataset() if silx.io.is_dataset(dataset): self.createFromSignal(dataset) elif silx.io.is_group(dataset): nxdata = dataset try: self.createFromNxdata(nxdata) except ValueError: _logger.error('Error while dropping a group as an NXdata') _logger.debug('Backtrace', exc_info=True) return False else: _logger.error('Dropping a wrong object') return False else: item = item.parent().child(item.row(), 0) if (not isinstance(item, _DatasetItemRow)): return False dataset = mimedata.dataset() if silx.io.is_dataset(dataset): item.setDataset(dataset) else: _logger.error('Dropping a wrong object') return False return True return False<|docstring|>Inherited method to handle a drop operation to this model.<|endoftext|>
8a274582ac63596a5ba5ac14ec3706738928c0979c471711dad0ee49518e30b5
def __getNxdataByTitle(self, title): 'Returns an NXdata item by its title, else None.\n\n :rtype: Union[_NxDataItem,None]\n ' for row in range(self.rowCount()): qindex = self.index(row, 0) item = self.itemFromIndex(qindex) if (item.getTitle() == title): return item return None
Returns an NXdata item by its title, else None. :rtype: Union[_NxDataItem,None]
src/silx/app/view/CustomNxdataWidget.py
__getNxdataByTitle
tifuchs/silx
94
python
def __getNxdataByTitle(self, title): 'Returns an NXdata item by its title, else None.\n\n :rtype: Union[_NxDataItem,None]\n ' for row in range(self.rowCount()): qindex = self.index(row, 0) item = self.itemFromIndex(qindex) if (item.getTitle() == title): return item return None
def __getNxdataByTitle(self, title): 'Returns an NXdata item by its title, else None.\n\n :rtype: Union[_NxDataItem,None]\n ' for row in range(self.rowCount()): qindex = self.index(row, 0) item = self.itemFromIndex(qindex) if (item.getTitle() == title): return item return None<|docstring|>Returns an NXdata item by its title, else None. :rtype: Union[_NxDataItem,None]<|endoftext|>
300aaa0fe686a94cf93c546bfd18d8148cf05b75af6bbe43f68a293be0264448
def findFreeNxdataTitle(self): 'Returns an NXdata title which is not yet used.\n\n :rtype: str\n ' for i in range((self.rowCount() + 1)): name = ('NXData #%d' % (i + 1)) group = self.__getNxdataByTitle(name) if (group is None): break return name
Returns an NXdata title which is not yet used. :rtype: str
src/silx/app/view/CustomNxdataWidget.py
findFreeNxdataTitle
tifuchs/silx
94
python
def findFreeNxdataTitle(self): 'Returns an NXdata title which is not yet used.\n\n :rtype: str\n ' for i in range((self.rowCount() + 1)): name = ('NXData #%d' % (i + 1)) group = self.__getNxdataByTitle(name) if (group is None): break return name
def findFreeNxdataTitle(self): 'Returns an NXdata title which is not yet used.\n\n :rtype: str\n ' for i in range((self.rowCount() + 1)): name = ('NXData #%d' % (i + 1)) group = self.__getNxdataByTitle(name) if (group is None): break return name<|docstring|>Returns an NXdata title which is not yet used. :rtype: str<|endoftext|>
58cbcf7a87d537f1ec3a852e54643a75c7718fac585c8da4cb3ba7e1b76299b9
def createNewNxdata(self, name=None): 'Create a new NXdata item.\n\n :param Union[str,None] name: A title for the new NXdata\n ' item = _NxDataItem() if (name is None): name = self.findFreeNxdataTitle() item.setTitle(name) self.appendRow(item.getRowItems())
Create a new NXdata item. :param Union[str,None] name: A title for the new NXdata
src/silx/app/view/CustomNxdataWidget.py
createNewNxdata
tifuchs/silx
94
python
def createNewNxdata(self, name=None): 'Create a new NXdata item.\n\n :param Union[str,None] name: A title for the new NXdata\n ' item = _NxDataItem() if (name is None): name = self.findFreeNxdataTitle() item.setTitle(name) self.appendRow(item.getRowItems())
def createNewNxdata(self, name=None): 'Create a new NXdata item.\n\n :param Union[str,None] name: A title for the new NXdata\n ' item = _NxDataItem() if (name is None): name = self.findFreeNxdataTitle() item.setTitle(name) self.appendRow(item.getRowItems())<|docstring|>Create a new NXdata item. :param Union[str,None] name: A title for the new NXdata<|endoftext|>
bd7cc63348a47ca0689bd0837f9865c1672ba14d2512746b80e2c64431ddd160
def createFromSignal(self, dataset): 'Create a new NXdata item from a signal dataset.\n\n This signal will also define an amount of axes according to its number\n of dimensions.\n\n :param Union[numpy.ndarray,h5py.Dataset,silx.io.commonh5.Dataset] dataset:\n A dataset uses as signal.\n ' item = _NxDataItem() name = self.findFreeNxdataTitle() item.setTitle(name) item.setSignalDataset(dataset) item.setAxesDatasets(([None] * len(dataset.shape))) self.appendRow(item.getRowItems())
Create a new NXdata item from a signal dataset. This signal will also define an amount of axes according to its number of dimensions. :param Union[numpy.ndarray,h5py.Dataset,silx.io.commonh5.Dataset] dataset: A dataset uses as signal.
src/silx/app/view/CustomNxdataWidget.py
createFromSignal
tifuchs/silx
94
python
def createFromSignal(self, dataset): 'Create a new NXdata item from a signal dataset.\n\n This signal will also define an amount of axes according to its number\n of dimensions.\n\n :param Union[numpy.ndarray,h5py.Dataset,silx.io.commonh5.Dataset] dataset:\n A dataset uses as signal.\n ' item = _NxDataItem() name = self.findFreeNxdataTitle() item.setTitle(name) item.setSignalDataset(dataset) item.setAxesDatasets(([None] * len(dataset.shape))) self.appendRow(item.getRowItems())
def createFromSignal(self, dataset): 'Create a new NXdata item from a signal dataset.\n\n This signal will also define an amount of axes according to its number\n of dimensions.\n\n :param Union[numpy.ndarray,h5py.Dataset,silx.io.commonh5.Dataset] dataset:\n A dataset uses as signal.\n ' item = _NxDataItem() name = self.findFreeNxdataTitle() item.setTitle(name) item.setSignalDataset(dataset) item.setAxesDatasets(([None] * len(dataset.shape))) self.appendRow(item.getRowItems())<|docstring|>Create a new NXdata item from a signal dataset. This signal will also define an amount of axes according to its number of dimensions. :param Union[numpy.ndarray,h5py.Dataset,silx.io.commonh5.Dataset] dataset: A dataset uses as signal.<|endoftext|>
471b26ecb58545c3cbde9895e861977ad53c42f758eb3fbd1dfcd5defb1c352a
def createFromNxdata(self, nxdata): 'Create a new custom NXdata item from an existing NXdata group.\n\n If the NXdata is not valid, nothing is created, and an exception is\n returned.\n\n :param Union[h5py.Group,silx.io.commonh5.Group] nxdata: An h5py group\n following the NXData specification.\n :raise ValueError:If `nxdata` is not valid.\n ' validator = silx.io.nxdata.NXdata(nxdata) if validator.is_valid: item = _NxDataItem() title = validator.title if (title in [(None or '')]): title = self.findFreeNxdataTitle() item.setTitle(title) item.setSignalDataset(validator.signal) item.setAxesDatasets(validator.axes) self.appendRow(item.getRowItems()) else: raise ValueError('Not a valid NXdata')
Create a new custom NXdata item from an existing NXdata group. If the NXdata is not valid, nothing is created, and an exception is returned. :param Union[h5py.Group,silx.io.commonh5.Group] nxdata: An h5py group following the NXData specification. :raise ValueError:If `nxdata` is not valid.
src/silx/app/view/CustomNxdataWidget.py
createFromNxdata
tifuchs/silx
94
python
def createFromNxdata(self, nxdata): 'Create a new custom NXdata item from an existing NXdata group.\n\n If the NXdata is not valid, nothing is created, and an exception is\n returned.\n\n :param Union[h5py.Group,silx.io.commonh5.Group] nxdata: An h5py group\n following the NXData specification.\n :raise ValueError:If `nxdata` is not valid.\n ' validator = silx.io.nxdata.NXdata(nxdata) if validator.is_valid: item = _NxDataItem() title = validator.title if (title in [(None or )]): title = self.findFreeNxdataTitle() item.setTitle(title) item.setSignalDataset(validator.signal) item.setAxesDatasets(validator.axes) self.appendRow(item.getRowItems()) else: raise ValueError('Not a valid NXdata')
def createFromNxdata(self, nxdata): 'Create a new custom NXdata item from an existing NXdata group.\n\n If the NXdata is not valid, nothing is created, and an exception is\n returned.\n\n :param Union[h5py.Group,silx.io.commonh5.Group] nxdata: An h5py group\n following the NXData specification.\n :raise ValueError:If `nxdata` is not valid.\n ' validator = silx.io.nxdata.NXdata(nxdata) if validator.is_valid: item = _NxDataItem() title = validator.title if (title in [(None or )]): title = self.findFreeNxdataTitle() item.setTitle(title) item.setSignalDataset(validator.signal) item.setAxesDatasets(validator.axes) self.appendRow(item.getRowItems()) else: raise ValueError('Not a valid NXdata')<|docstring|>Create a new custom NXdata item from an existing NXdata group. If the NXdata is not valid, nothing is created, and an exception is returned. :param Union[h5py.Group,silx.io.commonh5.Group] nxdata: An h5py group following the NXData specification. :raise ValueError:If `nxdata` is not valid.<|endoftext|>
938f621ec00952016d03cbb7a500a76dfcb270f3b9c6ba544367ae2e21d08762
def removeNxdataItem(self, item): 'Remove an NXdata item from this model.\n\n :param _NxDataItem item: An item\n ' if isinstance(item, _NxDataItem): parent = item.parent() assert (parent is None) model = item.model() model.removeRow(item.row()) else: _logger.error('Unexpected item')
Remove an NXdata item from this model. :param _NxDataItem item: An item
src/silx/app/view/CustomNxdataWidget.py
removeNxdataItem
tifuchs/silx
94
python
def removeNxdataItem(self, item): 'Remove an NXdata item from this model.\n\n :param _NxDataItem item: An item\n ' if isinstance(item, _NxDataItem): parent = item.parent() assert (parent is None) model = item.model() model.removeRow(item.row()) else: _logger.error('Unexpected item')
def removeNxdataItem(self, item): 'Remove an NXdata item from this model.\n\n :param _NxDataItem item: An item\n ' if isinstance(item, _NxDataItem): parent = item.parent() assert (parent is None) model = item.model() model.removeRow(item.row()) else: _logger.error('Unexpected item')<|docstring|>Remove an NXdata item from this model. :param _NxDataItem item: An item<|endoftext|>
68e98fc8809b534de170f4218c10c6e48baf9b5f81511868bf07123ad81763f9
def appendAxisToNxdataItem(self, item): 'Append a new axes to this item (or the NXdata item own by this item).\n\n :param Union[_NxDataItem,qt.QStandardItem] item: An item\n ' if ((item is not None) and (not isinstance(item, _NxDataItem))): item = item.parent() nxdataItem = item if isinstance(item, _NxDataItem): datasets = nxdataItem.getAxesDatasets() datasets.append(None) nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')
Append a new axes to this item (or the NXdata item own by this item). :param Union[_NxDataItem,qt.QStandardItem] item: An item
src/silx/app/view/CustomNxdataWidget.py
appendAxisToNxdataItem
tifuchs/silx
94
python
def appendAxisToNxdataItem(self, item): 'Append a new axes to this item (or the NXdata item own by this item).\n\n :param Union[_NxDataItem,qt.QStandardItem] item: An item\n ' if ((item is not None) and (not isinstance(item, _NxDataItem))): item = item.parent() nxdataItem = item if isinstance(item, _NxDataItem): datasets = nxdataItem.getAxesDatasets() datasets.append(None) nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')
def appendAxisToNxdataItem(self, item): 'Append a new axes to this item (or the NXdata item own by this item).\n\n :param Union[_NxDataItem,qt.QStandardItem] item: An item\n ' if ((item is not None) and (not isinstance(item, _NxDataItem))): item = item.parent() nxdataItem = item if isinstance(item, _NxDataItem): datasets = nxdataItem.getAxesDatasets() datasets.append(None) nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')<|docstring|>Append a new axes to this item (or the NXdata item own by this item). :param Union[_NxDataItem,qt.QStandardItem] item: An item<|endoftext|>
ed99d577a48630d7865dd1bd1dc203422bdebd42226c3dda40b2daeea4157f38
def removeAxisItem(self, item): 'Remove an axis item from this model.\n\n :param _DatasetAxisItemRow item: An axis item\n ' if isinstance(item, _DatasetAxisItemRow): axisId = item.getAxisId() nxdataItem = item.parent() datasets = nxdataItem.getAxesDatasets() del datasets[axisId] nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')
Remove an axis item from this model. :param _DatasetAxisItemRow item: An axis item
src/silx/app/view/CustomNxdataWidget.py
removeAxisItem
tifuchs/silx
94
python
def removeAxisItem(self, item): 'Remove an axis item from this model.\n\n :param _DatasetAxisItemRow item: An axis item\n ' if isinstance(item, _DatasetAxisItemRow): axisId = item.getAxisId() nxdataItem = item.parent() datasets = nxdataItem.getAxesDatasets() del datasets[axisId] nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')
def removeAxisItem(self, item): 'Remove an axis item from this model.\n\n :param _DatasetAxisItemRow item: An axis item\n ' if isinstance(item, _DatasetAxisItemRow): axisId = item.getAxisId() nxdataItem = item.parent() datasets = nxdataItem.getAxesDatasets() del datasets[axisId] nxdataItem.setAxesDatasets(datasets) else: _logger.error('Unexpected item')<|docstring|>Remove an axis item from this model. :param _DatasetAxisItemRow item: An axis item<|endoftext|>
c352ae630dcfe281af0927b36f74490a864aef57a19b77b63333421990a48f53
def __init__(self, parent=None): 'Constructor' super(CustomNxDataToolBar, self).__init__(parent=parent) self.__nxdataWidget = None self.__initContent() self.__currentSelectionChanged(qt.QModelIndex(), qt.QModelIndex())
Constructor
src/silx/app/view/CustomNxdataWidget.py
__init__
tifuchs/silx
94
python
def __init__(self, parent=None): super(CustomNxDataToolBar, self).__init__(parent=parent) self.__nxdataWidget = None self.__initContent() self.__currentSelectionChanged(qt.QModelIndex(), qt.QModelIndex())
def __init__(self, parent=None): super(CustomNxDataToolBar, self).__init__(parent=parent) self.__nxdataWidget = None self.__initContent() self.__currentSelectionChanged(qt.QModelIndex(), qt.QModelIndex())<|docstring|>Constructor<|endoftext|>
1f864967607080df44755290fe06d98c879e2c9b7d0955870f68752d7fdad015
def __initContent(self): 'Create all expected actions and set the content of this toolbar.' action = qt.QAction('Create a new custom NXdata', self) action.setIcon(icons.getQIcon('nxdata-create')) action.triggered.connect(self.__createNewNxdata) self.addAction(action) self.__addNxDataAction = action action = qt.QAction('Remove the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-remove')) action.triggered.connect(self.__removeSelectedNxdata) self.addAction(action) self.__removeNxDataAction = action self.addSeparator() action = qt.QAction('Create a new axis to the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.triggered.connect(self.__appendNewAxisToSelectedNxdata) self.addAction(action) self.__addNxDataAxisAction = action action = qt.QAction('Remove the selected NXdata axis', self) action.setIcon(icons.getQIcon('nxdata-axis-remove')) action.triggered.connect(self.__removeSelectedAxis) self.addAction(action) self.__removeNxDataAxisAction = action
Create all expected actions and set the content of this toolbar.
src/silx/app/view/CustomNxdataWidget.py
__initContent
tifuchs/silx
94
python
def __initContent(self): action = qt.QAction('Create a new custom NXdata', self) action.setIcon(icons.getQIcon('nxdata-create')) action.triggered.connect(self.__createNewNxdata) self.addAction(action) self.__addNxDataAction = action action = qt.QAction('Remove the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-remove')) action.triggered.connect(self.__removeSelectedNxdata) self.addAction(action) self.__removeNxDataAction = action self.addSeparator() action = qt.QAction('Create a new axis to the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.triggered.connect(self.__appendNewAxisToSelectedNxdata) self.addAction(action) self.__addNxDataAxisAction = action action = qt.QAction('Remove the selected NXdata axis', self) action.setIcon(icons.getQIcon('nxdata-axis-remove')) action.triggered.connect(self.__removeSelectedAxis) self.addAction(action) self.__removeNxDataAxisAction = action
def __initContent(self): action = qt.QAction('Create a new custom NXdata', self) action.setIcon(icons.getQIcon('nxdata-create')) action.triggered.connect(self.__createNewNxdata) self.addAction(action) self.__addNxDataAction = action action = qt.QAction('Remove the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-remove')) action.triggered.connect(self.__removeSelectedNxdata) self.addAction(action) self.__removeNxDataAction = action self.addSeparator() action = qt.QAction('Create a new axis to the selected NXdata', self) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.triggered.connect(self.__appendNewAxisToSelectedNxdata) self.addAction(action) self.__addNxDataAxisAction = action action = qt.QAction('Remove the selected NXdata axis', self) action.setIcon(icons.getQIcon('nxdata-axis-remove')) action.triggered.connect(self.__removeSelectedAxis) self.addAction(action) self.__removeNxDataAxisAction = action<|docstring|>Create all expected actions and set the content of this toolbar.<|endoftext|>
334fbe772a1be4c6db21f1e491540a5e22b534ccc4086c327da30b4361b258aa
def __getSelectedItem(self): 'Get the selected item from the linked CustomNxdataWidget.\n\n :rtype: qt.QStandardItem\n ' selectionModel = self.__nxdataWidget.selectionModel() index = selectionModel.currentIndex() if (not index.isValid()): return model = self.__nxdataWidget.model() index = model.index(index.row(), 0, index.parent()) item = model.itemFromIndex(index) return item
Get the selected item from the linked CustomNxdataWidget. :rtype: qt.QStandardItem
src/silx/app/view/CustomNxdataWidget.py
__getSelectedItem
tifuchs/silx
94
python
def __getSelectedItem(self): 'Get the selected item from the linked CustomNxdataWidget.\n\n :rtype: qt.QStandardItem\n ' selectionModel = self.__nxdataWidget.selectionModel() index = selectionModel.currentIndex() if (not index.isValid()): return model = self.__nxdataWidget.model() index = model.index(index.row(), 0, index.parent()) item = model.itemFromIndex(index) return item
def __getSelectedItem(self): 'Get the selected item from the linked CustomNxdataWidget.\n\n :rtype: qt.QStandardItem\n ' selectionModel = self.__nxdataWidget.selectionModel() index = selectionModel.currentIndex() if (not index.isValid()): return model = self.__nxdataWidget.model() index = model.index(index.row(), 0, index.parent()) item = model.itemFromIndex(index) return item<|docstring|>Get the selected item from the linked CustomNxdataWidget. :rtype: qt.QStandardItem<|endoftext|>
8a4ecc616b3161cb11a916bda1c7f5e163cb17a89545973cf33166c8a16eab79
def __createNewNxdata(self): 'Create a new NXdata item to the linked CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() model.createNewNxdata()
Create a new NXdata item to the linked CustomNxdataWidget.
src/silx/app/view/CustomNxdataWidget.py
__createNewNxdata
tifuchs/silx
94
python
def __createNewNxdata(self): if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() model.createNewNxdata()
def __createNewNxdata(self): if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() model.createNewNxdata()<|docstring|>Create a new NXdata item to the linked CustomNxdataWidget.<|endoftext|>
694496991652d0df9765a5807f672d1ef04674a8f251dbe86e6f0c890d64f8a6
def __removeSelectedNxdata(self): 'Remove the NXdata item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeNxdataItem(item)
Remove the NXdata item currently selected in the linked CustomNxdataWidget.
src/silx/app/view/CustomNxdataWidget.py
__removeSelectedNxdata
tifuchs/silx
94
python
def __removeSelectedNxdata(self): 'Remove the NXdata item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeNxdataItem(item)
def __removeSelectedNxdata(self): 'Remove the NXdata item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeNxdataItem(item)<|docstring|>Remove the NXdata item currently selected in the linked CustomNxdataWidget.<|endoftext|>
aff281958728564845867dbc8cebfd580174e62a7fa76f2f3aa453e914c00815
def __appendNewAxisToSelectedNxdata(self): 'Append a new axis to the NXdata item currently selected in the\n linked CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.appendAxisToNxdataItem(item)
Append a new axis to the NXdata item currently selected in the linked CustomNxdataWidget.
src/silx/app/view/CustomNxdataWidget.py
__appendNewAxisToSelectedNxdata
tifuchs/silx
94
python
def __appendNewAxisToSelectedNxdata(self): 'Append a new axis to the NXdata item currently selected in the\n linked CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.appendAxisToNxdataItem(item)
def __appendNewAxisToSelectedNxdata(self): 'Append a new axis to the NXdata item currently selected in the\n linked CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.appendAxisToNxdataItem(item)<|docstring|>Append a new axis to the NXdata item currently selected in the linked CustomNxdataWidget.<|endoftext|>
cd0b6942a53940276141097ec1d072513645512e32aef06dfe2a7e1e759ef30c
def __removeSelectedAxis(self): 'Remove the axis item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeAxisItem(item)
Remove the axis item currently selected in the linked CustomNxdataWidget.
src/silx/app/view/CustomNxdataWidget.py
__removeSelectedAxis
tifuchs/silx
94
python
def __removeSelectedAxis(self): 'Remove the axis item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeAxisItem(item)
def __removeSelectedAxis(self): 'Remove the axis item currently selected in the linked\n CustomNxdataWidget.' if (self.__nxdataWidget is None): return model = self.__nxdataWidget.model() item = self.__getSelectedItem() model.removeAxisItem(item)<|docstring|>Remove the axis item currently selected in the linked CustomNxdataWidget.<|endoftext|>
38ed9cc14c7ce96ead9bca0fe040665f1b96f8c9fcf1750eccb8c10904fcc8a2
def setCustomNxDataWidget(self, widget): 'Set the linked CustomNxdataWidget to this toolbar.' assert isinstance(widget, CustomNxdataWidget) if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.disconnect(self.__currentSelectionChanged) self.__nxdataWidget = widget if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.connect(self.__currentSelectionChanged)
Set the linked CustomNxdataWidget to this toolbar.
src/silx/app/view/CustomNxdataWidget.py
setCustomNxDataWidget
tifuchs/silx
94
python
def setCustomNxDataWidget(self, widget): assert isinstance(widget, CustomNxdataWidget) if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.disconnect(self.__currentSelectionChanged) self.__nxdataWidget = widget if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.connect(self.__currentSelectionChanged)
def setCustomNxDataWidget(self, widget): assert isinstance(widget, CustomNxdataWidget) if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.disconnect(self.__currentSelectionChanged) self.__nxdataWidget = widget if (self.__nxdataWidget is not None): selectionModel = self.__nxdataWidget.selectionModel() selectionModel.currentChanged.connect(self.__currentSelectionChanged)<|docstring|>Set the linked CustomNxdataWidget to this toolbar.<|endoftext|>
e6884625a4397f1c78ade93b69beb0c6308623da44eaeec942d378f776abbe68
def __currentSelectionChanged(self, current, previous): 'Update the actions according to the linked CustomNxdataWidget\n item selection' if (not current.isValid()): item = None else: model = self.__nxdataWidget.model() index = model.index(current.row(), 0, current.parent()) item = model.itemFromIndex(index) self.__removeNxDataAction.setEnabled(isinstance(item, _NxDataItem)) self.__removeNxDataAxisAction.setEnabled(isinstance(item, _DatasetAxisItemRow)) self.__addNxDataAxisAction.setEnabled((isinstance(item, _NxDataItem) or isinstance(item, _DatasetItemRow)))
Update the actions according to the linked CustomNxdataWidget item selection
src/silx/app/view/CustomNxdataWidget.py
__currentSelectionChanged
tifuchs/silx
94
python
def __currentSelectionChanged(self, current, previous): 'Update the actions according to the linked CustomNxdataWidget\n item selection' if (not current.isValid()): item = None else: model = self.__nxdataWidget.model() index = model.index(current.row(), 0, current.parent()) item = model.itemFromIndex(index) self.__removeNxDataAction.setEnabled(isinstance(item, _NxDataItem)) self.__removeNxDataAxisAction.setEnabled(isinstance(item, _DatasetAxisItemRow)) self.__addNxDataAxisAction.setEnabled((isinstance(item, _NxDataItem) or isinstance(item, _DatasetItemRow)))
def __currentSelectionChanged(self, current, previous): 'Update the actions according to the linked CustomNxdataWidget\n item selection' if (not current.isValid()): item = None else: model = self.__nxdataWidget.model() index = model.index(current.row(), 0, current.parent()) item = model.itemFromIndex(index) self.__removeNxDataAction.setEnabled(isinstance(item, _NxDataItem)) self.__removeNxDataAxisAction.setEnabled(isinstance(item, _DatasetAxisItemRow)) self.__addNxDataAxisAction.setEnabled((isinstance(item, _NxDataItem) or isinstance(item, _DatasetItemRow)))<|docstring|>Update the actions according to the linked CustomNxdataWidget item selection<|endoftext|>
a64eae4227206b2424517afbc05bc75aa30e133122f288efa42c1eb24ba6e6ae
def __init__(self, parent=None): 'Constructor' super(_HashDropZones, self).__init__(parent) pen = qt.QPen() pen.setColor(qt.QColor('#D0D0D0')) pen.setStyle(qt.Qt.DotLine) pen.setWidth(2) self.__dropPen = pen
Constructor
src/silx/app/view/CustomNxdataWidget.py
__init__
tifuchs/silx
94
python
def __init__(self, parent=None): super(_HashDropZones, self).__init__(parent) pen = qt.QPen() pen.setColor(qt.QColor('#D0D0D0')) pen.setStyle(qt.Qt.DotLine) pen.setWidth(2) self.__dropPen = pen
def __init__(self, parent=None): super(_HashDropZones, self).__init__(parent) pen = qt.QPen() pen.setColor(qt.QColor('#D0D0D0')) pen.setStyle(qt.Qt.DotLine) pen.setWidth(2) self.__dropPen = pen<|docstring|>Constructor<|endoftext|>
fa35990a9438f9d842933083976b08ae7dc79da59202072b4af662c8b73288a8
def paint(self, painter, option, index): '\n Paint the item\n\n :param qt.QPainter painter: A painter\n :param qt.QStyleOptionViewItem option: Options of the item to paint\n :param qt.QModelIndex index: Index of the item to paint\n ' displayDropZone = False if index.isValid(): model = index.model() rowIndex = model.index(index.row(), 0, index.parent()) rowItem = model.itemFromIndex(rowIndex) if isinstance(rowItem, _DatasetItemRow): displayDropZone = (rowItem.getDataset() is None) if displayDropZone: painter.save() if (option.state & qt.QStyle.State_Selected): colorGroup = qt.QPalette.Inactive if (option.state & qt.QStyle.State_Active): colorGroup = qt.QPalette.Active if (not (option.state & qt.QStyle.State_Enabled)): colorGroup = qt.QPalette.Disabled brush = option.palette.brush(colorGroup, qt.QPalette.Highlight) painter.fillRect(option.rect, brush) painter.setPen(self.__dropPen) painter.drawRect(option.rect.adjusted(3, 3, (- 3), (- 3))) painter.restore() else: qt.QStyledItemDelegate.paint(self, painter, option, index)
Paint the item :param qt.QPainter painter: A painter :param qt.QStyleOptionViewItem option: Options of the item to paint :param qt.QModelIndex index: Index of the item to paint
src/silx/app/view/CustomNxdataWidget.py
paint
tifuchs/silx
94
python
def paint(self, painter, option, index): '\n Paint the item\n\n :param qt.QPainter painter: A painter\n :param qt.QStyleOptionViewItem option: Options of the item to paint\n :param qt.QModelIndex index: Index of the item to paint\n ' displayDropZone = False if index.isValid(): model = index.model() rowIndex = model.index(index.row(), 0, index.parent()) rowItem = model.itemFromIndex(rowIndex) if isinstance(rowItem, _DatasetItemRow): displayDropZone = (rowItem.getDataset() is None) if displayDropZone: painter.save() if (option.state & qt.QStyle.State_Selected): colorGroup = qt.QPalette.Inactive if (option.state & qt.QStyle.State_Active): colorGroup = qt.QPalette.Active if (not (option.state & qt.QStyle.State_Enabled)): colorGroup = qt.QPalette.Disabled brush = option.palette.brush(colorGroup, qt.QPalette.Highlight) painter.fillRect(option.rect, brush) painter.setPen(self.__dropPen) painter.drawRect(option.rect.adjusted(3, 3, (- 3), (- 3))) painter.restore() else: qt.QStyledItemDelegate.paint(self, painter, option, index)
def paint(self, painter, option, index): '\n Paint the item\n\n :param qt.QPainter painter: A painter\n :param qt.QStyleOptionViewItem option: Options of the item to paint\n :param qt.QModelIndex index: Index of the item to paint\n ' displayDropZone = False if index.isValid(): model = index.model() rowIndex = model.index(index.row(), 0, index.parent()) rowItem = model.itemFromIndex(rowIndex) if isinstance(rowItem, _DatasetItemRow): displayDropZone = (rowItem.getDataset() is None) if displayDropZone: painter.save() if (option.state & qt.QStyle.State_Selected): colorGroup = qt.QPalette.Inactive if (option.state & qt.QStyle.State_Active): colorGroup = qt.QPalette.Active if (not (option.state & qt.QStyle.State_Enabled)): colorGroup = qt.QPalette.Disabled brush = option.palette.brush(colorGroup, qt.QPalette.Highlight) painter.fillRect(option.rect, brush) painter.setPen(self.__dropPen) painter.drawRect(option.rect.adjusted(3, 3, (- 3), (- 3))) painter.restore() else: qt.QStyledItemDelegate.paint(self, painter, option, index)<|docstring|>Paint the item :param qt.QPainter painter: A painter :param qt.QStyleOptionViewItem option: Options of the item to paint :param qt.QModelIndex index: Index of the item to paint<|endoftext|>
26752adc89935e6f8085a11c5c0de8bbc01b18d27272c4b10eb5a6add133c7e1
def __init__(self, parent=None): 'Constructor' qt.QTreeView.__init__(self, parent=None) self.__model = _Model(self) self.__model.setColumnCount(4) self.__model.setHorizontalHeaderLabels(['Name', 'Dataset', 'Type', 'Shape']) self.setModel(self.__model) self.setItemDelegateForColumn(1, _HashDropZones(self)) self.__model.sigNxdataUpdated.connect(self.__nxdataUpdate) self.__model.rowsAboutToBeRemoved.connect(self.__rowsAboutToBeRemoved) self.__model.rowsAboutToBeInserted.connect(self.__rowsAboutToBeInserted) header = self.header() header.setSectionResizeMode(0, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(1, qt.QHeaderView.Stretch) header.setSectionResizeMode(2, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(3, qt.QHeaderView.ResizeToContents) self.setSelectionMode(qt.QAbstractItemView.SingleSelection) self.setDropIndicatorShown(True) self.setDragDropOverwriteMode(True) self.setDragEnabled(True) self.viewport().setAcceptDrops(True) self.setContextMenuPolicy(qt.Qt.CustomContextMenu) self.customContextMenuRequested[qt.QPoint].connect(self.__executeContextMenu)
Constructor
src/silx/app/view/CustomNxdataWidget.py
__init__
tifuchs/silx
94
python
def __init__(self, parent=None): qt.QTreeView.__init__(self, parent=None) self.__model = _Model(self) self.__model.setColumnCount(4) self.__model.setHorizontalHeaderLabels(['Name', 'Dataset', 'Type', 'Shape']) self.setModel(self.__model) self.setItemDelegateForColumn(1, _HashDropZones(self)) self.__model.sigNxdataUpdated.connect(self.__nxdataUpdate) self.__model.rowsAboutToBeRemoved.connect(self.__rowsAboutToBeRemoved) self.__model.rowsAboutToBeInserted.connect(self.__rowsAboutToBeInserted) header = self.header() header.setSectionResizeMode(0, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(1, qt.QHeaderView.Stretch) header.setSectionResizeMode(2, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(3, qt.QHeaderView.ResizeToContents) self.setSelectionMode(qt.QAbstractItemView.SingleSelection) self.setDropIndicatorShown(True) self.setDragDropOverwriteMode(True) self.setDragEnabled(True) self.viewport().setAcceptDrops(True) self.setContextMenuPolicy(qt.Qt.CustomContextMenu) self.customContextMenuRequested[qt.QPoint].connect(self.__executeContextMenu)
def __init__(self, parent=None): qt.QTreeView.__init__(self, parent=None) self.__model = _Model(self) self.__model.setColumnCount(4) self.__model.setHorizontalHeaderLabels(['Name', 'Dataset', 'Type', 'Shape']) self.setModel(self.__model) self.setItemDelegateForColumn(1, _HashDropZones(self)) self.__model.sigNxdataUpdated.connect(self.__nxdataUpdate) self.__model.rowsAboutToBeRemoved.connect(self.__rowsAboutToBeRemoved) self.__model.rowsAboutToBeInserted.connect(self.__rowsAboutToBeInserted) header = self.header() header.setSectionResizeMode(0, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(1, qt.QHeaderView.Stretch) header.setSectionResizeMode(2, qt.QHeaderView.ResizeToContents) header.setSectionResizeMode(3, qt.QHeaderView.ResizeToContents) self.setSelectionMode(qt.QAbstractItemView.SingleSelection) self.setDropIndicatorShown(True) self.setDragDropOverwriteMode(True) self.setDragEnabled(True) self.viewport().setAcceptDrops(True) self.setContextMenuPolicy(qt.Qt.CustomContextMenu) self.customContextMenuRequested[qt.QPoint].connect(self.__executeContextMenu)<|docstring|>Constructor<|endoftext|>
7da30a287a7a9c9eccbaca179dc6e8c5c1892294f3e2cd376426bf2ecc5edff4
def __rowsAboutToBeRemoved(self, parentIndex, start, end): 'Called when an item was removed from the model.' items = [] model = self.model() for index in range(start, end): qindex = model.index(index, 0, parent=parentIndex) item = self.__model.itemFromIndex(qindex) if isinstance(item, _NxDataItem): items.append(item) for item in items: self.sigNxdataItemRemoved.emit(item) self.repaint()
Called when an item was removed from the model.
src/silx/app/view/CustomNxdataWidget.py
__rowsAboutToBeRemoved
tifuchs/silx
94
python
def __rowsAboutToBeRemoved(self, parentIndex, start, end): items = [] model = self.model() for index in range(start, end): qindex = model.index(index, 0, parent=parentIndex) item = self.__model.itemFromIndex(qindex) if isinstance(item, _NxDataItem): items.append(item) for item in items: self.sigNxdataItemRemoved.emit(item) self.repaint()
def __rowsAboutToBeRemoved(self, parentIndex, start, end): items = [] model = self.model() for index in range(start, end): qindex = model.index(index, 0, parent=parentIndex) item = self.__model.itemFromIndex(qindex) if isinstance(item, _NxDataItem): items.append(item) for item in items: self.sigNxdataItemRemoved.emit(item) self.repaint()<|docstring|>Called when an item was removed from the model.<|endoftext|>
723a0023231c39d2454d29efdc7e84f8fe8625f7c73a582c6729387d3c0e25e6
def __nxdataUpdate(self, index): 'Called when a virtual NXdata was updated from the model.' model = self.model() item = model.itemFromIndex(index) self.sigNxdataItemUpdated.emit(item)
Called when a virtual NXdata was updated from the model.
src/silx/app/view/CustomNxdataWidget.py
__nxdataUpdate
tifuchs/silx
94
python
def __nxdataUpdate(self, index): model = self.model() item = model.itemFromIndex(index) self.sigNxdataItemUpdated.emit(item)
def __nxdataUpdate(self, index): model = self.model() item = model.itemFromIndex(index) self.sigNxdataItemUpdated.emit(item)<|docstring|>Called when a virtual NXdata was updated from the model.<|endoftext|>
de286b83ff7089b43178687a51a10ee8be4bc47c250163605d2f03a29017e7b9
def createDefaultContextMenu(self, index): 'Create a default context menu at this position.\n\n :param qt.QModelIndex index: Index of the item\n ' index = self.__model.index(index.row(), 0, parent=index.parent()) item = self.__model.itemFromIndex(index) menu = qt.QMenu() weakself = weakref.proxy(self) if isinstance(item, _NxDataItem): action = qt.QAction('Add a new axis', menu) action.triggered.connect((lambda : weakself.model().appendAxisToNxdataItem(item))) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.setIconVisibleInMenu(True) menu.addAction(action) menu.addSeparator() action = qt.QAction('Remove this NXdata', menu) action.triggered.connect((lambda : weakself.model().removeNxdataItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) else: if isinstance(item, _DatasetItemRow): if (item.getDataset() is not None): action = qt.QAction('Remove this dataset', menu) action.triggered.connect((lambda : item.setDataset(None))) menu.addAction(action) if isinstance(item, _DatasetAxisItemRow): menu.addSeparator() action = qt.QAction('Remove this axis', menu) action.triggered.connect((lambda : weakself.model().removeAxisItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) return menu
Create a default context menu at this position. :param qt.QModelIndex index: Index of the item
src/silx/app/view/CustomNxdataWidget.py
createDefaultContextMenu
tifuchs/silx
94
python
def createDefaultContextMenu(self, index): 'Create a default context menu at this position.\n\n :param qt.QModelIndex index: Index of the item\n ' index = self.__model.index(index.row(), 0, parent=index.parent()) item = self.__model.itemFromIndex(index) menu = qt.QMenu() weakself = weakref.proxy(self) if isinstance(item, _NxDataItem): action = qt.QAction('Add a new axis', menu) action.triggered.connect((lambda : weakself.model().appendAxisToNxdataItem(item))) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.setIconVisibleInMenu(True) menu.addAction(action) menu.addSeparator() action = qt.QAction('Remove this NXdata', menu) action.triggered.connect((lambda : weakself.model().removeNxdataItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) else: if isinstance(item, _DatasetItemRow): if (item.getDataset() is not None): action = qt.QAction('Remove this dataset', menu) action.triggered.connect((lambda : item.setDataset(None))) menu.addAction(action) if isinstance(item, _DatasetAxisItemRow): menu.addSeparator() action = qt.QAction('Remove this axis', menu) action.triggered.connect((lambda : weakself.model().removeAxisItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) return menu
def createDefaultContextMenu(self, index): 'Create a default context menu at this position.\n\n :param qt.QModelIndex index: Index of the item\n ' index = self.__model.index(index.row(), 0, parent=index.parent()) item = self.__model.itemFromIndex(index) menu = qt.QMenu() weakself = weakref.proxy(self) if isinstance(item, _NxDataItem): action = qt.QAction('Add a new axis', menu) action.triggered.connect((lambda : weakself.model().appendAxisToNxdataItem(item))) action.setIcon(icons.getQIcon('nxdata-axis-add')) action.setIconVisibleInMenu(True) menu.addAction(action) menu.addSeparator() action = qt.QAction('Remove this NXdata', menu) action.triggered.connect((lambda : weakself.model().removeNxdataItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) else: if isinstance(item, _DatasetItemRow): if (item.getDataset() is not None): action = qt.QAction('Remove this dataset', menu) action.triggered.connect((lambda : item.setDataset(None))) menu.addAction(action) if isinstance(item, _DatasetAxisItemRow): menu.addSeparator() action = qt.QAction('Remove this axis', menu) action.triggered.connect((lambda : weakself.model().removeAxisItem(item))) action.setIcon(icons.getQIcon('remove')) action.setIconVisibleInMenu(True) menu.addAction(action) return menu<|docstring|>Create a default context menu at this position. :param qt.QModelIndex index: Index of the item<|endoftext|>
14d64c176e840fbe451cfe82bbd75ce61a9dcf1220e62d9f1ff058beae0853a8
def __executeContextMenu(self, point): 'Execute the context menu at this position.' index = self.indexAt(point) menu = self.createDefaultContextMenu(index) if ((menu is None) or menu.isEmpty()): return menu.exec(qt.QCursor.pos())
Execute the context menu at this position.
src/silx/app/view/CustomNxdataWidget.py
__executeContextMenu
tifuchs/silx
94
python
def __executeContextMenu(self, point): index = self.indexAt(point) menu = self.createDefaultContextMenu(index) if ((menu is None) or menu.isEmpty()): return menu.exec(qt.QCursor.pos())
def __executeContextMenu(self, point): index = self.indexAt(point) menu = self.createDefaultContextMenu(index) if ((menu is None) or menu.isEmpty()): return menu.exec(qt.QCursor.pos())<|docstring|>Execute the context menu at this position.<|endoftext|>
534f523b4e0f96f43d70eabcdff320094c0055fe695fe22044324ecc7cd398d9
def removeDatasetsFrom(self, root): '\n Remove all datasets provided by this root\n\n :param root: The root file of datasets to remove\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): if (dataset is not None): if (dataset.file.filename == root.file.filename): datasets[i] = None edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() if (dataset is not None): if (dataset.file.filename == root.file.filename): item.setSignalDataset(None)
Remove all datasets provided by this root :param root: The root file of datasets to remove
src/silx/app/view/CustomNxdataWidget.py
removeDatasetsFrom
tifuchs/silx
94
python
def removeDatasetsFrom(self, root): '\n Remove all datasets provided by this root\n\n :param root: The root file of datasets to remove\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): if (dataset is not None): if (dataset.file.filename == root.file.filename): datasets[i] = None edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() if (dataset is not None): if (dataset.file.filename == root.file.filename): item.setSignalDataset(None)
def removeDatasetsFrom(self, root): '\n Remove all datasets provided by this root\n\n :param root: The root file of datasets to remove\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): if (dataset is not None): if (dataset.file.filename == root.file.filename): datasets[i] = None edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() if (dataset is not None): if (dataset.file.filename == root.file.filename): item.setSignalDataset(None)<|docstring|>Remove all datasets provided by this root :param root: The root file of datasets to remove<|endoftext|>
c2329913793b6a9a24f0439b6639cba076087eec8b43d1e2be8a07516d2086c5
def replaceDatasetsFrom(self, removedRoot, loadedRoot): '\n Replace any dataset from any NXdata items using the same dataset name\n from another root.\n\n Usually used when a file was synchronized.\n\n :param removedRoot: The h5py root file which is replaced\n (which have to be removed)\n :param loadedRoot: The new h5py root file which have to be used\n instread.\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): datasets[i] = newDataset edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): item.setSignalDataset(newDataset)
Replace any dataset from any NXdata items using the same dataset name from another root. Usually used when a file was synchronized. :param removedRoot: The h5py root file which is replaced (which have to be removed) :param loadedRoot: The new h5py root file which have to be used instread.
src/silx/app/view/CustomNxdataWidget.py
replaceDatasetsFrom
tifuchs/silx
94
python
def replaceDatasetsFrom(self, removedRoot, loadedRoot): '\n Replace any dataset from any NXdata items using the same dataset name\n from another root.\n\n Usually used when a file was synchronized.\n\n :param removedRoot: The h5py root file which is replaced\n (which have to be removed)\n :param loadedRoot: The new h5py root file which have to be used\n instread.\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): datasets[i] = newDataset edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): item.setSignalDataset(newDataset)
def replaceDatasetsFrom(self, removedRoot, loadedRoot): '\n Replace any dataset from any NXdata items using the same dataset name\n from another root.\n\n Usually used when a file was synchronized.\n\n :param removedRoot: The h5py root file which is replaced\n (which have to be removed)\n :param loadedRoot: The new h5py root file which have to be used\n instread.\n ' for row in range(self.__model.rowCount()): qindex = self.__model.index(row, 0) item = self.model().itemFromIndex(qindex) edited = False datasets = item.getAxesDatasets() for (i, dataset) in enumerate(datasets): newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): datasets[i] = newDataset edited = True if edited: item.setAxesDatasets(datasets) dataset = item.getSignalDataset() newDataset = self.__replaceDatasetRoot(dataset, removedRoot, loadedRoot) if (dataset is not newDataset): item.setSignalDataset(newDataset)<|docstring|>Replace any dataset from any NXdata items using the same dataset name from another root. Usually used when a file was synchronized. :param removedRoot: The h5py root file which is replaced (which have to be removed) :param loadedRoot: The new h5py root file which have to be used instread.<|endoftext|>
9950fa6450f49425cfaaaf3378eb644c1e7d1b20ec4513710a7ef514e92475eb
def __replaceDatasetRoot(self, dataset, fromRoot, toRoot): '\n Replace the dataset by the same dataset name from another root.\n ' if (dataset is None): return None if (dataset.file is None): return dataset if (dataset.file.filename == fromRoot.file.filename): try: return toRoot[dataset.name] except Exception: _logger.debug('Backtrace', exc_info=True) return None else: return dataset
Replace the dataset by the same dataset name from another root.
src/silx/app/view/CustomNxdataWidget.py
__replaceDatasetRoot
tifuchs/silx
94
python
def __replaceDatasetRoot(self, dataset, fromRoot, toRoot): '\n \n ' if (dataset is None): return None if (dataset.file is None): return dataset if (dataset.file.filename == fromRoot.file.filename): try: return toRoot[dataset.name] except Exception: _logger.debug('Backtrace', exc_info=True) return None else: return dataset
def __replaceDatasetRoot(self, dataset, fromRoot, toRoot): '\n \n ' if (dataset is None): return None if (dataset.file is None): return dataset if (dataset.file.filename == fromRoot.file.filename): try: return toRoot[dataset.name] except Exception: _logger.debug('Backtrace', exc_info=True) return None else: return dataset<|docstring|>Replace the dataset by the same dataset name from another root.<|endoftext|>
87ee1baeb69f2382c59cb49ad2af3a428493097aa277e12f85ef2ed4134f9630
def selectedItems(self): 'Returns the list of selected items containing NXdata\n\n :rtype: List[qt.QStandardItem]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item) return result
Returns the list of selected items containing NXdata :rtype: List[qt.QStandardItem]
src/silx/app/view/CustomNxdataWidget.py
selectedItems
tifuchs/silx
94
python
def selectedItems(self): 'Returns the list of selected items containing NXdata\n\n :rtype: List[qt.QStandardItem]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item) return result
def selectedItems(self): 'Returns the list of selected items containing NXdata\n\n :rtype: List[qt.QStandardItem]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item) return result<|docstring|>Returns the list of selected items containing NXdata :rtype: List[qt.QStandardItem]<|endoftext|>
b9b1bda246f7db86261ed5d55a78ddca8c87e82e83cc35af72f3dae3709cd49d
def selectedNxdata(self): 'Returns the list of selected NXdata\n\n :rtype: List[silx.io.commonh5.Group]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item.getVirtualGroup()) return result
Returns the list of selected NXdata :rtype: List[silx.io.commonh5.Group]
src/silx/app/view/CustomNxdataWidget.py
selectedNxdata
tifuchs/silx
94
python
def selectedNxdata(self): 'Returns the list of selected NXdata\n\n :rtype: List[silx.io.commonh5.Group]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item.getVirtualGroup()) return result
def selectedNxdata(self): 'Returns the list of selected NXdata\n\n :rtype: List[silx.io.commonh5.Group]\n ' result = [] for qindex in self.selectedIndexes(): if (qindex.column() != 0): continue if (not qindex.isValid()): continue item = self.__model.itemFromIndex(qindex) if (not isinstance(item, _NxDataItem)): continue result.append(item.getVirtualGroup()) return result<|docstring|>Returns the list of selected NXdata :rtype: List[silx.io.commonh5.Group]<|endoftext|>
c9d013ee0c575b9fd263187f45386cbcdb496ead8d3a2498df63a1d043aafc49
def init_pinyin_alpha_map(): ' initialize map\n\n map pinyin and alphabet to specific number\n \n :return none\n ' global map_cnt pinyin_alpha_map['*'] = 0 map_cnt = 1 for letter in ALPHABET_LIST: pinyin_alpha_map[letter] = map_cnt map_cnt += 1 for pinyin in PINYIN_LIST: pinyin_alpha_map[pinyin] = map_cnt map_cnt += 1
initialize map map pinyin and alphabet to specific number :return none
111900828/main.py
init_pinyin_alpha_map
zsiothsu/FZU2021SE
0
python
def init_pinyin_alpha_map(): ' initialize map\n\n map pinyin and alphabet to specific number\n \n :return none\n ' global map_cnt pinyin_alpha_map['*'] = 0 map_cnt = 1 for letter in ALPHABET_LIST: pinyin_alpha_map[letter] = map_cnt map_cnt += 1 for pinyin in PINYIN_LIST: pinyin_alpha_map[pinyin] = map_cnt map_cnt += 1
def init_pinyin_alpha_map(): ' initialize map\n\n map pinyin and alphabet to specific number\n \n :return none\n ' global map_cnt pinyin_alpha_map['*'] = 0 map_cnt = 1 for letter in ALPHABET_LIST: pinyin_alpha_map[letter] = map_cnt map_cnt += 1 for pinyin in PINYIN_LIST: pinyin_alpha_map[pinyin] = map_cnt map_cnt += 1<|docstring|>initialize map map pinyin and alphabet to specific number :return none<|endoftext|>
747ff9dd065051b40bdb67f5eb539c04090eeb558f7505511f6d81d6a0f3f0f6
def __init__(self, word): ' create a Word object\n :arg\n word[string]: word to be processed\n ' self.original_word = word
create a Word object :arg word[string]: word to be processed
111900828/main.py
__init__
zsiothsu/FZU2021SE
0
python
def __init__(self, word): ' create a Word object\n :arg\n word[string]: word to be processed\n ' self.original_word = word
def __init__(self, word): ' create a Word object\n :arg\n word[string]: word to be processed\n ' self.original_word = word<|docstring|>create a Word object :arg word[string]: word to be processed<|endoftext|>
a6176999c615bd863e8a855890f0fce90f535225b303d5269988520357fd54d4
def confuse(self): " enumerate various confusing of words\n\n for chinese, confusing can be: single Chinese character,\n full spelling pinyin, initial pinyin and dismantling of\n Chinese characters\n There is no confusing on English, a word will be processed\n in letter\n\n :arg\n self.original_word[string]: word to be processed\n :return -> list\n a list of all confusing\n for example, '你好':\n [['ni', 'hao'], ['n', 'i', 'hao'], ['n', 'hao'],\n ['亻', '尔', 'hao'], ['ni', 'h', 'a', 'o'], ['n', 'i', 'h', 'a', 'o'],\n ['n', 'h', 'a', 'o'], ['亻', '尔', 'h', 'a', 'o'], ['ni', 'h'],\n ['n', 'i', 'h'], ['n', 'h'], ['亻', '尔', 'h'],\n ['ni', '女', '子'], ['n', 'i', '女', '子'], ['n', '女', '子'],\n ['亻', '尔', '女', '子']]\n " global map_cnt confuse_enum = [] word = list(self.original_word) for i in range(len(word)): c = word[i] if ((u'一' <= c <= u'龥') or (u'㐀' <= c <= u'䶵')): li = [] pin = lazy_pinyin(c) pin = pin[0] li.append(pin) li.append(list(pin)) li.append(pin[0]) if hanziBreaker.is_breakable(c): hanzi_part = hanziBreaker.get(c) glyph = [] for part in hanzi_part: if (part not in glyph_code_map): glyph_code_map[part] = map_cnt map_cnt = (map_cnt + 1) glyph.append(part) li.append(glyph) word[i] = li else: pass for c in word: if (not isinstance(c, list)): if (len(confuse_enum) == 0): confuse_enum.append([c]) else: for li in confuse_enum: li.append(c) elif (len(confuse_enum) == 0): for one_confuse in c: if (not isinstance(one_confuse, list)): confuse_enum.append([one_confuse]) else: confuse_enum.append(one_confuse) else: pre = confuse_enum new_confuse_enum = [] for one_confuse in c: new_confuse = copy.deepcopy(pre) if (not isinstance(one_confuse, list)): for existed_confuse in new_confuse: existed_confuse.append(one_confuse) else: for existed_confuse in new_confuse: for x in one_confuse: existed_confuse.append(x) new_confuse_enum = (new_confuse_enum + new_confuse) confuse_enum = new_confuse_enum return confuse_enum
enumerate various confusing of words for chinese, confusing can be: single Chinese character, full spelling pinyin, initial pinyin and dismantling of Chinese characters There is no confusing on English, a word will be processed in letter :arg self.original_word[string]: word to be processed :return -> list a list of all confusing for example, '你好': [['ni', 'hao'], ['n', 'i', 'hao'], ['n', 'hao'], ['亻', '尔', 'hao'], ['ni', 'h', 'a', 'o'], ['n', 'i', 'h', 'a', 'o'], ['n', 'h', 'a', 'o'], ['亻', '尔', 'h', 'a', 'o'], ['ni', 'h'], ['n', 'i', 'h'], ['n', 'h'], ['亻', '尔', 'h'], ['ni', '女', '子'], ['n', 'i', '女', '子'], ['n', '女', '子'], ['亻', '尔', '女', '子']]
111900828/main.py
confuse
zsiothsu/FZU2021SE
0
python
def confuse(self): " enumerate various confusing of words\n\n for chinese, confusing can be: single Chinese character,\n full spelling pinyin, initial pinyin and dismantling of\n Chinese characters\n There is no confusing on English, a word will be processed\n in letter\n\n :arg\n self.original_word[string]: word to be processed\n :return -> list\n a list of all confusing\n for example, '你好':\n [['ni', 'hao'], ['n', 'i', 'hao'], ['n', 'hao'],\n ['亻', '尔', 'hao'], ['ni', 'h', 'a', 'o'], ['n', 'i', 'h', 'a', 'o'],\n ['n', 'h', 'a', 'o'], ['亻', '尔', 'h', 'a', 'o'], ['ni', 'h'],\n ['n', 'i', 'h'], ['n', 'h'], ['亻', '尔', 'h'],\n ['ni', '女', '子'], ['n', 'i', '女', '子'], ['n', '女', '子'],\n ['亻', '尔', '女', '子']]\n " global map_cnt confuse_enum = [] word = list(self.original_word) for i in range(len(word)): c = word[i] if ((u'一' <= c <= u'龥') or (u'㐀' <= c <= u'䶵')): li = [] pin = lazy_pinyin(c) pin = pin[0] li.append(pin) li.append(list(pin)) li.append(pin[0]) if hanziBreaker.is_breakable(c): hanzi_part = hanziBreaker.get(c) glyph = [] for part in hanzi_part: if (part not in glyph_code_map): glyph_code_map[part] = map_cnt map_cnt = (map_cnt + 1) glyph.append(part) li.append(glyph) word[i] = li else: pass for c in word: if (not isinstance(c, list)): if (len(confuse_enum) == 0): confuse_enum.append([c]) else: for li in confuse_enum: li.append(c) elif (len(confuse_enum) == 0): for one_confuse in c: if (not isinstance(one_confuse, list)): confuse_enum.append([one_confuse]) else: confuse_enum.append(one_confuse) else: pre = confuse_enum new_confuse_enum = [] for one_confuse in c: new_confuse = copy.deepcopy(pre) if (not isinstance(one_confuse, list)): for existed_confuse in new_confuse: existed_confuse.append(one_confuse) else: for existed_confuse in new_confuse: for x in one_confuse: existed_confuse.append(x) new_confuse_enum = (new_confuse_enum + new_confuse) confuse_enum = new_confuse_enum return confuse_enum
def confuse(self): " enumerate various confusing of words\n\n for chinese, confusing can be: single Chinese character,\n full spelling pinyin, initial pinyin and dismantling of\n Chinese characters\n There is no confusing on English, a word will be processed\n in letter\n\n :arg\n self.original_word[string]: word to be processed\n :return -> list\n a list of all confusing\n for example, '你好':\n [['ni', 'hao'], ['n', 'i', 'hao'], ['n', 'hao'],\n ['亻', '尔', 'hao'], ['ni', 'h', 'a', 'o'], ['n', 'i', 'h', 'a', 'o'],\n ['n', 'h', 'a', 'o'], ['亻', '尔', 'h', 'a', 'o'], ['ni', 'h'],\n ['n', 'i', 'h'], ['n', 'h'], ['亻', '尔', 'h'],\n ['ni', '女', '子'], ['n', 'i', '女', '子'], ['n', '女', '子'],\n ['亻', '尔', '女', '子']]\n " global map_cnt confuse_enum = [] word = list(self.original_word) for i in range(len(word)): c = word[i] if ((u'一' <= c <= u'龥') or (u'㐀' <= c <= u'䶵')): li = [] pin = lazy_pinyin(c) pin = pin[0] li.append(pin) li.append(list(pin)) li.append(pin[0]) if hanziBreaker.is_breakable(c): hanzi_part = hanziBreaker.get(c) glyph = [] for part in hanzi_part: if (part not in glyph_code_map): glyph_code_map[part] = map_cnt map_cnt = (map_cnt + 1) glyph.append(part) li.append(glyph) word[i] = li else: pass for c in word: if (not isinstance(c, list)): if (len(confuse_enum) == 0): confuse_enum.append([c]) else: for li in confuse_enum: li.append(c) elif (len(confuse_enum) == 0): for one_confuse in c: if (not isinstance(one_confuse, list)): confuse_enum.append([one_confuse]) else: confuse_enum.append(one_confuse) else: pre = confuse_enum new_confuse_enum = [] for one_confuse in c: new_confuse = copy.deepcopy(pre) if (not isinstance(one_confuse, list)): for existed_confuse in new_confuse: existed_confuse.append(one_confuse) else: for existed_confuse in new_confuse: for x in one_confuse: existed_confuse.append(x) new_confuse_enum = (new_confuse_enum + new_confuse) confuse_enum = new_confuse_enum return confuse_enum<|docstring|>enumerate various confusing of words for chinese, confusing can be: single Chinese character, full spelling pinyin, initial pinyin and dismantling of Chinese characters There is no confusing on English, a word will be processed in letter :arg self.original_word[string]: word to be processed :return -> list a list of all confusing for example, '你好': [['ni', 'hao'], ['n', 'i', 'hao'], ['n', 'hao'], ['亻', '尔', 'hao'], ['ni', 'h', 'a', 'o'], ['n', 'i', 'h', 'a', 'o'], ['n', 'h', 'a', 'o'], ['亻', '尔', 'h', 'a', 'o'], ['ni', 'h'], ['n', 'i', 'h'], ['n', 'h'], ['亻', '尔', 'h'], ['ni', '女', '子'], ['n', 'i', '女', '子'], ['n', '女', '子'], ['亻', '尔', '女', '子']]<|endoftext|>
999ddf5c858e4ae400f33f613a1eb1396875bc766fffb096749b7379c8664b6f
def read_sensitive_words(self, filename): ' sensitive words reader\n\n read sensitive words from given file and put into sensitive_word_list\n\n :arg\n filename[string]: file name\n\n :return none\n\n :exception\n IOError: unable to open the given file\n ' try: with open(filename, 'r+', encoding='utf-8') as words: lines = words.readlines() word_count = 0 for line in lines: line = line.replace('\r', '').replace('\n', '') self.original_sensitive_word_list.append(line) line = line.lower() confuse = Word(line) confused_word_list = confuse.confuse() for confused_word in confused_word_list: word = [] for i in range(len(confused_word)): if (confused_word[i] == ''): continue if (confused_word[i] in pinyin_alpha_map): word.append(pinyin_alpha_map[confused_word[i]]) elif (confused_word[i] in glyph_code_map): word.append(glyph_code_map[confused_word[i]]) self.sensitive_word_list.append((word, word_count)) word_count += 1 except IOError: raise IOError('[word reader] Unable to open the word file') else: self.build_sensitive_word_tree()
sensitive words reader read sensitive words from given file and put into sensitive_word_list :arg filename[string]: file name :return none :exception IOError: unable to open the given file
111900828/main.py
read_sensitive_words
zsiothsu/FZU2021SE
0
python
def read_sensitive_words(self, filename): ' sensitive words reader\n\n read sensitive words from given file and put into sensitive_word_list\n\n :arg\n filename[string]: file name\n\n :return none\n\n :exception\n IOError: unable to open the given file\n ' try: with open(filename, 'r+', encoding='utf-8') as words: lines = words.readlines() word_count = 0 for line in lines: line = line.replace('\r', ).replace('\n', ) self.original_sensitive_word_list.append(line) line = line.lower() confuse = Word(line) confused_word_list = confuse.confuse() for confused_word in confused_word_list: word = [] for i in range(len(confused_word)): if (confused_word[i] == ): continue if (confused_word[i] in pinyin_alpha_map): word.append(pinyin_alpha_map[confused_word[i]]) elif (confused_word[i] in glyph_code_map): word.append(glyph_code_map[confused_word[i]]) self.sensitive_word_list.append((word, word_count)) word_count += 1 except IOError: raise IOError('[word reader] Unable to open the word file') else: self.build_sensitive_word_tree()
def read_sensitive_words(self, filename): ' sensitive words reader\n\n read sensitive words from given file and put into sensitive_word_list\n\n :arg\n filename[string]: file name\n\n :return none\n\n :exception\n IOError: unable to open the given file\n ' try: with open(filename, 'r+', encoding='utf-8') as words: lines = words.readlines() word_count = 0 for line in lines: line = line.replace('\r', ).replace('\n', ) self.original_sensitive_word_list.append(line) line = line.lower() confuse = Word(line) confused_word_list = confuse.confuse() for confused_word in confused_word_list: word = [] for i in range(len(confused_word)): if (confused_word[i] == ): continue if (confused_word[i] in pinyin_alpha_map): word.append(pinyin_alpha_map[confused_word[i]]) elif (confused_word[i] in glyph_code_map): word.append(glyph_code_map[confused_word[i]]) self.sensitive_word_list.append((word, word_count)) word_count += 1 except IOError: raise IOError('[word reader] Unable to open the word file') else: self.build_sensitive_word_tree()<|docstring|>sensitive words reader read sensitive words from given file and put into sensitive_word_list :arg filename[string]: file name :return none :exception IOError: unable to open the given file<|endoftext|>
79d592233d9d09515141698a6020b9984a24342152c06519cd618bdcdd57557d
def build_sensitive_word_tree(self): ' build words tree\n\n using a trie tree as main structure storing sensitive words\n\n :return none\n ' for (index, word_count_tuple) in enumerate(self.sensitive_word_list): word = word_count_tuple[0] current = self.sensitive_dict for (i, c) in enumerate(word): if (c not in current): child = {'end': False} current[c] = child current = child else: child = current[c] current = child if (i == (len(word) - 1)): current['end'] = True current['word'] = word_count_tuple[1]
build words tree using a trie tree as main structure storing sensitive words :return none
111900828/main.py
build_sensitive_word_tree
zsiothsu/FZU2021SE
0
python
def build_sensitive_word_tree(self): ' build words tree\n\n using a trie tree as main structure storing sensitive words\n\n :return none\n ' for (index, word_count_tuple) in enumerate(self.sensitive_word_list): word = word_count_tuple[0] current = self.sensitive_dict for (i, c) in enumerate(word): if (c not in current): child = {'end': False} current[c] = child current = child else: child = current[c] current = child if (i == (len(word) - 1)): current['end'] = True current['word'] = word_count_tuple[1]
def build_sensitive_word_tree(self): ' build words tree\n\n using a trie tree as main structure storing sensitive words\n\n :return none\n ' for (index, word_count_tuple) in enumerate(self.sensitive_word_list): word = word_count_tuple[0] current = self.sensitive_dict for (i, c) in enumerate(word): if (c not in current): child = {'end': False} current[c] = child current = child else: child = current[c] current = child if (i == (len(word) - 1)): current['end'] = True current['word'] = word_count_tuple[1]<|docstring|>build words tree using a trie tree as main structure storing sensitive words :return none<|endoftext|>
83d402817fb7ed253dd6ffce3dc828cda20fa78c755cac222109a30cb742692e
def logger(self, begin, end, index): ' logger\n\n Record results\n\n :arg\n begin: begin index of the word at original text\n end: end index of the word at original text\n index: the order of word in sensitive words list\n\n :return none\n ' if (len(self.result) != 0): if ((begin == self.result[(- 1)][3]) and (self.__lineno == self.result[(- 1)][0])): self.result.pop() self.total -= 1 self.result.append((self.__lineno, self.original_sensitive_word_list[index], self.__cline_org[begin:end], begin)) self.total += 1
logger Record results :arg begin: begin index of the word at original text end: end index of the word at original text index: the order of word in sensitive words list :return none
111900828/main.py
logger
zsiothsu/FZU2021SE
0
python
def logger(self, begin, end, index): ' logger\n\n Record results\n\n :arg\n begin: begin index of the word at original text\n end: end index of the word at original text\n index: the order of word in sensitive words list\n\n :return none\n ' if (len(self.result) != 0): if ((begin == self.result[(- 1)][3]) and (self.__lineno == self.result[(- 1)][0])): self.result.pop() self.total -= 1 self.result.append((self.__lineno, self.original_sensitive_word_list[index], self.__cline_org[begin:end], begin)) self.total += 1
def logger(self, begin, end, index): ' logger\n\n Record results\n\n :arg\n begin: begin index of the word at original text\n end: end index of the word at original text\n index: the order of word in sensitive words list\n\n :return none\n ' if (len(self.result) != 0): if ((begin == self.result[(- 1)][3]) and (self.__lineno == self.result[(- 1)][0])): self.result.pop() self.total -= 1 self.result.append((self.__lineno, self.original_sensitive_word_list[index], self.__cline_org[begin:end], begin)) self.total += 1<|docstring|>logger Record results :arg begin: begin index of the word at original text end: end index of the word at original text index: the order of word in sensitive words list :return none<|endoftext|>
fdd58bb22423cf305b558beae1b7f6a22931b434aebaffdbcde4982734453139
def output(self, filename): ' answer export\n\n export answer\n\n :arg\n filename[string]: output file\n\n :return none\n ' try: with open(filename, 'w+', encoding='utf-8') as ans: print('Total: {}'.format(self.total), file=ans) for i in self.result: print('Line{}: <{}> {}'.format(i[0], i[1], i[2]), file=ans) except IOError: raise IOError('[answer export] Unable to open ans file')
answer export export answer :arg filename[string]: output file :return none
111900828/main.py
output
zsiothsu/FZU2021SE
0
python
def output(self, filename): ' answer export\n\n export answer\n\n :arg\n filename[string]: output file\n\n :return none\n ' try: with open(filename, 'w+', encoding='utf-8') as ans: print('Total: {}'.format(self.total), file=ans) for i in self.result: print('Line{}: <{}> {}'.format(i[0], i[1], i[2]), file=ans) except IOError: raise IOError('[answer export] Unable to open ans file')
def output(self, filename): ' answer export\n\n export answer\n\n :arg\n filename[string]: output file\n\n :return none\n ' try: with open(filename, 'w+', encoding='utf-8') as ans: print('Total: {}'.format(self.total), file=ans) for i in self.result: print('Line{}: <{}> {}'.format(i[0], i[1], i[2]), file=ans) except IOError: raise IOError('[answer export] Unable to open ans file')<|docstring|>answer export export answer :arg filename[string]: output file :return none<|endoftext|>
bf593df06c0a267c30359a1e493972603ec1292c92cae2640722d1fedd736e76
def filter_line(self, sentence): ' filter a single line\n\n filter a single line. cannot detect sensitive words in\n two different lines at the same time\n\n :arg\n sentence[string]: text to be detected\n\n :return -> set\n the starting index of the answer\n ' current = self.sensitive_dict word_begin_index = 0 ans_set = set() fail_pointer_stack: (int, dict, int) = [] i = 0 while (i < len(sentence)): c = sentence[i] if (c == '*'): i = (i + 1) continue if (c in glyph_code_map): pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] glyph_code = glyph_code_map[c] is_pinyin_code_in_current = (pinyin_code in current) is_glyph_code_in_current = (glyph_code in current) if ((not is_pinyin_code_in_current) and (not is_glyph_code_in_current)): current = self.sensitive_dict word_begin_index = 0 if is_pinyin_code_in_current: if (current == self.sensitive_dict): word_begin_index = i if is_glyph_code_in_current: fail_pointer_stack.append(((i + 1), current[glyph_code], word_begin_index)) current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif is_glyph_code_in_current: if (current == self.sensitive_dict): word_begin_index = i current = current[glyph_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() continue else: current = self.sensitive_dict word_begin_index = 0 else: pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] if (pinyin_code not in current): current = self.sensitive_dict word_begin_index = 0 if (pinyin_code in current): if (current == self.sensitive_dict): word_begin_index = i current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) i = (i + 1) continue if (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() else: current = self.sensitive_dict word_begin_index = 0 i += 1 return ans_set
filter a single line filter a single line. cannot detect sensitive words in two different lines at the same time :arg sentence[string]: text to be detected :return -> set the starting index of the answer
111900828/main.py
filter_line
zsiothsu/FZU2021SE
0
python
def filter_line(self, sentence): ' filter a single line\n\n filter a single line. cannot detect sensitive words in\n two different lines at the same time\n\n :arg\n sentence[string]: text to be detected\n\n :return -> set\n the starting index of the answer\n ' current = self.sensitive_dict word_begin_index = 0 ans_set = set() fail_pointer_stack: (int, dict, int) = [] i = 0 while (i < len(sentence)): c = sentence[i] if (c == '*'): i = (i + 1) continue if (c in glyph_code_map): pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] glyph_code = glyph_code_map[c] is_pinyin_code_in_current = (pinyin_code in current) is_glyph_code_in_current = (glyph_code in current) if ((not is_pinyin_code_in_current) and (not is_glyph_code_in_current)): current = self.sensitive_dict word_begin_index = 0 if is_pinyin_code_in_current: if (current == self.sensitive_dict): word_begin_index = i if is_glyph_code_in_current: fail_pointer_stack.append(((i + 1), current[glyph_code], word_begin_index)) current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif is_glyph_code_in_current: if (current == self.sensitive_dict): word_begin_index = i current = current[glyph_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() continue else: current = self.sensitive_dict word_begin_index = 0 else: pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] if (pinyin_code not in current): current = self.sensitive_dict word_begin_index = 0 if (pinyin_code in current): if (current == self.sensitive_dict): word_begin_index = i current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) i = (i + 1) continue if (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() else: current = self.sensitive_dict word_begin_index = 0 i += 1 return ans_set
def filter_line(self, sentence): ' filter a single line\n\n filter a single line. cannot detect sensitive words in\n two different lines at the same time\n\n :arg\n sentence[string]: text to be detected\n\n :return -> set\n the starting index of the answer\n ' current = self.sensitive_dict word_begin_index = 0 ans_set = set() fail_pointer_stack: (int, dict, int) = [] i = 0 while (i < len(sentence)): c = sentence[i] if (c == '*'): i = (i + 1) continue if (c in glyph_code_map): pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] glyph_code = glyph_code_map[c] is_pinyin_code_in_current = (pinyin_code in current) is_glyph_code_in_current = (glyph_code in current) if ((not is_pinyin_code_in_current) and (not is_glyph_code_in_current)): current = self.sensitive_dict word_begin_index = 0 if is_pinyin_code_in_current: if (current == self.sensitive_dict): word_begin_index = i if is_glyph_code_in_current: fail_pointer_stack.append(((i + 1), current[glyph_code], word_begin_index)) current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif is_glyph_code_in_current: if (current == self.sensitive_dict): word_begin_index = i current = current[glyph_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) elif (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() continue else: current = self.sensitive_dict word_begin_index = 0 else: pinyin_code = pinyin_alpha_map[lazy_pinyin(c)[0]] if (pinyin_code not in current): current = self.sensitive_dict word_begin_index = 0 if (pinyin_code in current): if (current == self.sensitive_dict): word_begin_index = i current = current[pinyin_code] if current['end']: self.logger(word_begin_index, (i + 1), current['word']) ans_set.add(word_begin_index) i = (i + 1) continue if (len(fail_pointer_stack) != 0): i = fail_pointer_stack[(- 1)][0] current = fail_pointer_stack[(- 1)][1] word_begin_index = fail_pointer_stack[(- 1)][2] fail_pointer_stack.pop() else: current = self.sensitive_dict word_begin_index = 0 i += 1 return ans_set<|docstring|>filter a single line filter a single line. cannot detect sensitive words in two different lines at the same time :arg sentence[string]: text to be detected :return -> set the starting index of the answer<|endoftext|>
e53ee852434c29dda3d3bc6ce019317190d68d4dce9d421801563100523bf4ca
def _wrap_children(self, children): ' Wrap any Widgets of a list of child layouts in a WidgetBox.\n This allows for the convenience of just spelling Row(button1, button2).\n ' from .widgets.widget import Widget wrapped_children = [] for child in children: if isinstance(child, Widget): child = WidgetBox(children=[child], responsive=child.responsive, width=child.width, height=child.height, disabled=child.disabled) wrapped_children.append(child) return wrapped_children
Wrap any Widgets of a list of child layouts in a WidgetBox. This allows for the convenience of just spelling Row(button1, button2).
bokeh/models/layouts.py
_wrap_children
andreagrant/bokehDev
0
python
def _wrap_children(self, children): ' Wrap any Widgets of a list of child layouts in a WidgetBox.\n This allows for the convenience of just spelling Row(button1, button2).\n ' from .widgets.widget import Widget wrapped_children = [] for child in children: if isinstance(child, Widget): child = WidgetBox(children=[child], responsive=child.responsive, width=child.width, height=child.height, disabled=child.disabled) wrapped_children.append(child) return wrapped_children
def _wrap_children(self, children): ' Wrap any Widgets of a list of child layouts in a WidgetBox.\n This allows for the convenience of just spelling Row(button1, button2).\n ' from .widgets.widget import Widget wrapped_children = [] for child in children: if isinstance(child, Widget): child = WidgetBox(children=[child], responsive=child.responsive, width=child.width, height=child.height, disabled=child.disabled) wrapped_children.append(child) return wrapped_children<|docstring|>Wrap any Widgets of a list of child layouts in a WidgetBox. This allows for the convenience of just spelling Row(button1, button2).<|endoftext|>
9e872e50563ece6ca0a78fb4baf2710b3acb29bd423c97732525ec4638bd1827
@task def build(ctx, rebuild=False, race=False, precompile_only=False, use_embedded_libs=False, build_include=None, build_exclude=None, puppy=False, use_venv=False): '\n Build the trace agent.\n ' if (sys.platform == 'win32'): ver = get_version_numeric_only(ctx) (maj_ver, min_ver, patch_ver) = ver.split('.') ctx.run('windmc --target pe-x86-64 -r cmd/trace-agent/windows_resources cmd/trace-agent/windows_resources/trace-agent-msg.mc') ctx.run('windres --define MAJ_VER={maj_ver} --define MIN_VER={min_ver} --define PATCH_VER={patch_ver} -i cmd/trace-agent/windows_resources/trace-agent.rc --target=pe-x86-64 -O coff -o cmd/trace-agent/rsrc.syso'.format(maj_ver=maj_ver, min_ver=min_ver, patch_ver=patch_ver)) (ldflags, gcflags, env) = get_build_flags(ctx, use_embedded_libs=use_embedded_libs, use_venv=use_venv) build_include = (DEFAULT_BUILD_TAGS if (build_include is None) else build_include.split(',')) build_exclude = ([] if (build_exclude is None) else build_exclude.split(',')) if puppy: build_tags = get_default_build_tags(puppy=True) else: build_tags = get_build_tags(build_include, build_exclude) cmd = 'go build {race_opt} {build_type} -tags "{go_build_tags}" ' cmd += '-o {agent_bin} -gcflags="{gcflags}" -ldflags="{ldflags}" {REPO_PATH}/cmd/trace-agent' args = {'race_opt': ('-race' if race else ''), 'build_type': ('-a' if rebuild else ('-i' if precompile_only else '')), 'go_build_tags': ' '.join(build_tags), 'agent_bin': os.path.join(BIN_PATH, bin_name('trace-agent', android=False)), 'gcflags': gcflags, 'ldflags': ldflags, 'REPO_PATH': REPO_PATH} ctx.run('go generate {REPO_PATH}/pkg/trace/info'.format(**args), env=env) ctx.run(cmd.format(**args), env=env)
Build the trace agent.
tasks/trace_agent.py
build
RyanGordon/datadog-agent
1
python
@task def build(ctx, rebuild=False, race=False, precompile_only=False, use_embedded_libs=False, build_include=None, build_exclude=None, puppy=False, use_venv=False): '\n \n ' if (sys.platform == 'win32'): ver = get_version_numeric_only(ctx) (maj_ver, min_ver, patch_ver) = ver.split('.') ctx.run('windmc --target pe-x86-64 -r cmd/trace-agent/windows_resources cmd/trace-agent/windows_resources/trace-agent-msg.mc') ctx.run('windres --define MAJ_VER={maj_ver} --define MIN_VER={min_ver} --define PATCH_VER={patch_ver} -i cmd/trace-agent/windows_resources/trace-agent.rc --target=pe-x86-64 -O coff -o cmd/trace-agent/rsrc.syso'.format(maj_ver=maj_ver, min_ver=min_ver, patch_ver=patch_ver)) (ldflags, gcflags, env) = get_build_flags(ctx, use_embedded_libs=use_embedded_libs, use_venv=use_venv) build_include = (DEFAULT_BUILD_TAGS if (build_include is None) else build_include.split(',')) build_exclude = ([] if (build_exclude is None) else build_exclude.split(',')) if puppy: build_tags = get_default_build_tags(puppy=True) else: build_tags = get_build_tags(build_include, build_exclude) cmd = 'go build {race_opt} {build_type} -tags "{go_build_tags}" ' cmd += '-o {agent_bin} -gcflags="{gcflags}" -ldflags="{ldflags}" {REPO_PATH}/cmd/trace-agent' args = {'race_opt': ('-race' if race else ), 'build_type': ('-a' if rebuild else ('-i' if precompile_only else )), 'go_build_tags': ' '.join(build_tags), 'agent_bin': os.path.join(BIN_PATH, bin_name('trace-agent', android=False)), 'gcflags': gcflags, 'ldflags': ldflags, 'REPO_PATH': REPO_PATH} ctx.run('go generate {REPO_PATH}/pkg/trace/info'.format(**args), env=env) ctx.run(cmd.format(**args), env=env)
@task def build(ctx, rebuild=False, race=False, precompile_only=False, use_embedded_libs=False, build_include=None, build_exclude=None, puppy=False, use_venv=False): '\n \n ' if (sys.platform == 'win32'): ver = get_version_numeric_only(ctx) (maj_ver, min_ver, patch_ver) = ver.split('.') ctx.run('windmc --target pe-x86-64 -r cmd/trace-agent/windows_resources cmd/trace-agent/windows_resources/trace-agent-msg.mc') ctx.run('windres --define MAJ_VER={maj_ver} --define MIN_VER={min_ver} --define PATCH_VER={patch_ver} -i cmd/trace-agent/windows_resources/trace-agent.rc --target=pe-x86-64 -O coff -o cmd/trace-agent/rsrc.syso'.format(maj_ver=maj_ver, min_ver=min_ver, patch_ver=patch_ver)) (ldflags, gcflags, env) = get_build_flags(ctx, use_embedded_libs=use_embedded_libs, use_venv=use_venv) build_include = (DEFAULT_BUILD_TAGS if (build_include is None) else build_include.split(',')) build_exclude = ([] if (build_exclude is None) else build_exclude.split(',')) if puppy: build_tags = get_default_build_tags(puppy=True) else: build_tags = get_build_tags(build_include, build_exclude) cmd = 'go build {race_opt} {build_type} -tags "{go_build_tags}" ' cmd += '-o {agent_bin} -gcflags="{gcflags}" -ldflags="{ldflags}" {REPO_PATH}/cmd/trace-agent' args = {'race_opt': ('-race' if race else ), 'build_type': ('-a' if rebuild else ('-i' if precompile_only else )), 'go_build_tags': ' '.join(build_tags), 'agent_bin': os.path.join(BIN_PATH, bin_name('trace-agent', android=False)), 'gcflags': gcflags, 'ldflags': ldflags, 'REPO_PATH': REPO_PATH} ctx.run('go generate {REPO_PATH}/pkg/trace/info'.format(**args), env=env) ctx.run(cmd.format(**args), env=env)<|docstring|>Build the trace agent.<|endoftext|>
701aa08d10f27bf03b183e41234c005adf25e5372aa1a0fa8eb4e07f9bedaa45
@task def integration_tests(ctx, install_deps=False, race=False, remote_docker=False): '\n Run integration tests for trace agent\n ' if install_deps: deps(ctx) test_args = {'go_build_tags': ' '.join(get_default_build_tags()), 'race_opt': ('-race' if race else ''), 'exec_opts': ''} if remote_docker: test_args['exec_opts'] = '-exec "inv docker.dockerize-test"' go_cmd = 'INTEGRATION=yes go test {race_opt} -tags "{go_build_tags}" {exec_opts}'.format(**test_args) prefixes = ['./pkg/trace/test/testsuite/...'] for prefix in prefixes: ctx.run('{} {}'.format(go_cmd, prefix))
Run integration tests for trace agent
tasks/trace_agent.py
integration_tests
RyanGordon/datadog-agent
1
python
@task def integration_tests(ctx, install_deps=False, race=False, remote_docker=False): '\n \n ' if install_deps: deps(ctx) test_args = {'go_build_tags': ' '.join(get_default_build_tags()), 'race_opt': ('-race' if race else ), 'exec_opts': } if remote_docker: test_args['exec_opts'] = '-exec "inv docker.dockerize-test"' go_cmd = 'INTEGRATION=yes go test {race_opt} -tags "{go_build_tags}" {exec_opts}'.format(**test_args) prefixes = ['./pkg/trace/test/testsuite/...'] for prefix in prefixes: ctx.run('{} {}'.format(go_cmd, prefix))
@task def integration_tests(ctx, install_deps=False, race=False, remote_docker=False): '\n \n ' if install_deps: deps(ctx) test_args = {'go_build_tags': ' '.join(get_default_build_tags()), 'race_opt': ('-race' if race else ), 'exec_opts': } if remote_docker: test_args['exec_opts'] = '-exec "inv docker.dockerize-test"' go_cmd = 'INTEGRATION=yes go test {race_opt} -tags "{go_build_tags}" {exec_opts}'.format(**test_args) prefixes = ['./pkg/trace/test/testsuite/...'] for prefix in prefixes: ctx.run('{} {}'.format(go_cmd, prefix))<|docstring|>Run integration tests for trace agent<|endoftext|>
3080fa745cab1d2b018cbfc1202f62999461994bbcb6306e451a51a70df973e4
def putCoords() -> None: 'Obtiene, pone y numera en el grafico los puntos con las coordenadas a recorrer' fig = plt.gcf() fig.canvas.manager.set_window_title('Visualizador de trayectoria para soluciones TSP') for p in coords: x.append(p.x) y.append(p.y) ax.scatter(x, y, color='red', s=70, edgecolors='black', label='Punto Normal') ax.set_title('Visualizacion del Tour') ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones\n\n') ax.grid(color='black', linestyle='-', linewidth=0.1) for i in range(len(coords)): ax.annotate(i, xy=(x[i], y[i]), xytext=(x[i], (y[i] + 0.05))) plt.tight_layout()
Obtiene, pone y numera en el grafico los puntos con las coordenadas a recorrer
src/tspf/plot.py
putCoords
Javernaver/ProyectoTitulo
2
python
def putCoords() -> None: fig = plt.gcf() fig.canvas.manager.set_window_title('Visualizador de trayectoria para soluciones TSP') for p in coords: x.append(p.x) y.append(p.y) ax.scatter(x, y, color='red', s=70, edgecolors='black', label='Punto Normal') ax.set_title('Visualizacion del Tour') ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones\n\n') ax.grid(color='black', linestyle='-', linewidth=0.1) for i in range(len(coords)): ax.annotate(i, xy=(x[i], y[i]), xytext=(x[i], (y[i] + 0.05))) plt.tight_layout()
def putCoords() -> None: fig = plt.gcf() fig.canvas.manager.set_window_title('Visualizador de trayectoria para soluciones TSP') for p in coords: x.append(p.x) y.append(p.y) ax.scatter(x, y, color='red', s=70, edgecolors='black', label='Punto Normal') ax.set_title('Visualizacion del Tour') ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones\n\n') ax.grid(color='black', linestyle='-', linewidth=0.1) for i in range(len(coords)): ax.annotate(i, xy=(x[i], y[i]), xytext=(x[i], (y[i] + 0.05))) plt.tight_layout()<|docstring|>Obtiene, pone y numera en el grafico los puntos con las coordenadas a recorrer<|endoftext|>
b24c045f3e55c4fee8a65f40c1bf83aa6d666c20fe8319161b47c54a99d9110c
def generateMap(i: int) -> None: 'Genera la visualizacion uniendo los puntos con las coordenadas en cada tour de la trayectoria' if (i >= len(trajectory)): return tour = trajectory[i].tour clearAnnotations() textstr = '\n'.join((f'Tour: {trajectory[i].tour}', f'Costo: {trajectory[i].cost}', f'Iteraciones: {trajectory[i].iterations}', f'Evaluaciones: {trajectory[i].evaluations}')) if (trajectory[i].temperature >= 0): textstr += f''' Temperatura: {trajectory[i].temperature:.2f}''' if ((trajectory[i].average > 0) and (trajectory[i].deviation > 0)): textstr += f''' Promedio Poblacion: {trajectory[i].average:.2f} Desviacion Estandar Poblacion: {trajectory[i].deviation:.2f}''' props = dict(boxstyle='round', facecolor='lightblue', alpha=0.77) a = ax.text(0.01, 0.98, textstr, transform=ax.transAxes, fontsize=12, verticalalignment='top', bbox=props) annotations.append(a) a = ax.scatter(x[tour[0]], y[tour[0]], s=70, edgecolors='black', color='lime', label='Punto de Partida') annotations.append(a) a = ax.legend(loc='upper right') annotations.append(a) drawFig(tour) drawStats(i)
Genera la visualizacion uniendo los puntos con las coordenadas en cada tour de la trayectoria
src/tspf/plot.py
generateMap
Javernaver/ProyectoTitulo
2
python
def generateMap(i: int) -> None: if (i >= len(trajectory)): return tour = trajectory[i].tour clearAnnotations() textstr = '\n'.join((f'Tour: {trajectory[i].tour}', f'Costo: {trajectory[i].cost}', f'Iteraciones: {trajectory[i].iterations}', f'Evaluaciones: {trajectory[i].evaluations}')) if (trajectory[i].temperature >= 0): textstr += f' Temperatura: {trajectory[i].temperature:.2f}' if ((trajectory[i].average > 0) and (trajectory[i].deviation > 0)): textstr += f' Promedio Poblacion: {trajectory[i].average:.2f} Desviacion Estandar Poblacion: {trajectory[i].deviation:.2f}' props = dict(boxstyle='round', facecolor='lightblue', alpha=0.77) a = ax.text(0.01, 0.98, textstr, transform=ax.transAxes, fontsize=12, verticalalignment='top', bbox=props) annotations.append(a) a = ax.scatter(x[tour[0]], y[tour[0]], s=70, edgecolors='black', color='lime', label='Punto de Partida') annotations.append(a) a = ax.legend(loc='upper right') annotations.append(a) drawFig(tour) drawStats(i)
def generateMap(i: int) -> None: if (i >= len(trajectory)): return tour = trajectory[i].tour clearAnnotations() textstr = '\n'.join((f'Tour: {trajectory[i].tour}', f'Costo: {trajectory[i].cost}', f'Iteraciones: {trajectory[i].iterations}', f'Evaluaciones: {trajectory[i].evaluations}')) if (trajectory[i].temperature >= 0): textstr += f' Temperatura: {trajectory[i].temperature:.2f}' if ((trajectory[i].average > 0) and (trajectory[i].deviation > 0)): textstr += f' Promedio Poblacion: {trajectory[i].average:.2f} Desviacion Estandar Poblacion: {trajectory[i].deviation:.2f}' props = dict(boxstyle='round', facecolor='lightblue', alpha=0.77) a = ax.text(0.01, 0.98, textstr, transform=ax.transAxes, fontsize=12, verticalalignment='top', bbox=props) annotations.append(a) a = ax.scatter(x[tour[0]], y[tour[0]], s=70, edgecolors='black', color='lime', label='Punto de Partida') annotations.append(a) a = ax.legend(loc='upper right') annotations.append(a) drawFig(tour) drawStats(i)<|docstring|>Genera la visualizacion uniendo los puntos con las coordenadas en cada tour de la trayectoria<|endoftext|>
f23fa119c54584c096cb0a22234a430c0ca7ea98db2de3caef2829e11a023c7f
def drawStats(i: int) -> None: 'Grafica los cambios en la calidad de los tour a lo largo de la iteraciones' if (len(coords) > MAXLEN): ax1.plot([tra.iterations for tra in trajectory], [tra.cost for tra in trajectory], label='Mejor Actual', linestyle='-', marker='', color='green') return ax1.cla() ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones') iterations.append(trajectory[i].iterations) cost.append(trajectory[i].cost) ax1.plot(iterations, cost, label='Mejor', linestyle='-', marker='', color='green') if ((trajectory[i].average > 0) and (trajectory[i].worst > 0)): avg.append(trajectory[i].average) worst.append(trajectory[i].worst) ax1.plot(iterations, avg, label='Promedio', linestyle='-', marker='', color='blue') ax1.plot(iterations, worst, label='Peor', linestyle='-', marker='', color='red') ax1.legend(loc='upper right')
Grafica los cambios en la calidad de los tour a lo largo de la iteraciones
src/tspf/plot.py
drawStats
Javernaver/ProyectoTitulo
2
python
def drawStats(i: int) -> None: if (len(coords) > MAXLEN): ax1.plot([tra.iterations for tra in trajectory], [tra.cost for tra in trajectory], label='Mejor Actual', linestyle='-', marker=, color='green') return ax1.cla() ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones') iterations.append(trajectory[i].iterations) cost.append(trajectory[i].cost) ax1.plot(iterations, cost, label='Mejor', linestyle='-', marker=, color='green') if ((trajectory[i].average > 0) and (trajectory[i].worst > 0)): avg.append(trajectory[i].average) worst.append(trajectory[i].worst) ax1.plot(iterations, avg, label='Promedio', linestyle='-', marker=, color='blue') ax1.plot(iterations, worst, label='Peor', linestyle='-', marker=, color='red') ax1.legend(loc='upper right')
def drawStats(i: int) -> None: if (len(coords) > MAXLEN): ax1.plot([tra.iterations for tra in trajectory], [tra.cost for tra in trajectory], label='Mejor Actual', linestyle='-', marker=, color='green') return ax1.cla() ax1.grid(color='black', linestyle='-', linewidth=0.1) ax1.set_title('Variacion por iteracion') ax1.set_ylabel('Costo') ax1.set_xlabel('Iteraciones') iterations.append(trajectory[i].iterations) cost.append(trajectory[i].cost) ax1.plot(iterations, cost, label='Mejor', linestyle='-', marker=, color='green') if ((trajectory[i].average > 0) and (trajectory[i].worst > 0)): avg.append(trajectory[i].average) worst.append(trajectory[i].worst) ax1.plot(iterations, avg, label='Promedio', linestyle='-', marker=, color='blue') ax1.plot(iterations, worst, label='Peor', linestyle='-', marker=, color='red') ax1.legend(loc='upper right')<|docstring|>Grafica los cambios en la calidad de los tour a lo largo de la iteraciones<|endoftext|>
94dd2747a5287ced28ad9bff212ac4ad88d9ae3a5702f2cc677d1f98de02d086
def clearAnnotations() -> None: 'Elimina todas las anotaciones de la figura' for an in annotations: an.remove() annotations[:] = []
Elimina todas las anotaciones de la figura
src/tspf/plot.py
clearAnnotations
Javernaver/ProyectoTitulo
2
python
def clearAnnotations() -> None: for an in annotations: an.remove() annotations[:] = []
def clearAnnotations() -> None: for an in annotations: an.remove() annotations[:] = []<|docstring|>Elimina todas las anotaciones de la figura<|endoftext|>
2775f9af2558176a02e8eec6fcf077f45e4ee9647f68fca91c746a9c15728847
def drawFig(tour: list) -> None: 'Grafica el tour en el grafico conectando los puntos a traves de flechas' for i in range((len(tour) - 1)): a = ax.annotate('', xy=(x[tour[(i + 1)]], y[tour[(i + 1)]]), xytext=(x[tour[i]], y[tour[i]]), arrowprops=dict(arrowstyle='->', connectionstyle='arc3', color='royalblue')) annotations.append(a)
Grafica el tour en el grafico conectando los puntos a traves de flechas
src/tspf/plot.py
drawFig
Javernaver/ProyectoTitulo
2
python
def drawFig(tour: list) -> None: for i in range((len(tour) - 1)): a = ax.annotate(, xy=(x[tour[(i + 1)]], y[tour[(i + 1)]]), xytext=(x[tour[i]], y[tour[i]]), arrowprops=dict(arrowstyle='->', connectionstyle='arc3', color='royalblue')) annotations.append(a)
def drawFig(tour: list) -> None: for i in range((len(tour) - 1)): a = ax.annotate(, xy=(x[tour[(i + 1)]], y[tour[(i + 1)]]), xytext=(x[tour[i]], y[tour[i]]), arrowprops=dict(arrowstyle='->', connectionstyle='arc3', color='royalblue')) annotations.append(a)<|docstring|>Grafica el tour en el grafico conectando los puntos a traves de flechas<|endoftext|>
23fc54d35c0ca22e3a9af9ceeb450b46fa632547f5d229b290a15330a25c4770
def show() -> None: 'Mostrar la figura y el grafico' global ani putCoords() if replit: if (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = FuncAnimation(fig, generateMap, interval=300, blit=False) elif (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = Player(fig, generateMap, maxi=(len(trajectory) - 1), interval=300, blit=False) plt_set_fullscreen() plt.show()
Mostrar la figura y el grafico
src/tspf/plot.py
show
Javernaver/ProyectoTitulo
2
python
def show() -> None: global ani putCoords() if replit: if (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = FuncAnimation(fig, generateMap, interval=300, blit=False) elif (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = Player(fig, generateMap, maxi=(len(trajectory) - 1), interval=300, blit=False) plt_set_fullscreen() plt.show()
def show() -> None: global ani putCoords() if replit: if (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = FuncAnimation(fig, generateMap, interval=300, blit=False) elif (len(coords) > MAXLEN): generateMap((len(trajectory) - 1)) else: ani = Player(fig, generateMap, maxi=(len(trajectory) - 1), interval=300, blit=False) plt_set_fullscreen() plt.show()<|docstring|>Mostrar la figura y el grafico<|endoftext|>
2d47b2aebf7d448b00c0313ed9ca40be0a576b6024b6069faabe06ecb36bc81d
def plt_set_fullscreen() -> None: 'Poner el grafico en pantalla maximizada para evitar conflictos con los distitos tipos de pantallas ' backend = str(plt.get_backend()) mgr = plt.get_current_fig_manager() if (backend == 'TkAgg'): if (os.name == 'nt'): mgr.window.state('zoomed') else: mgr.resize(*mgr.window.maxsize()) elif (backend == 'wxAgg'): mgr.frame.Maximize(True) elif (backend == 'Qt4Agg'): mgr.window.showMaximized()
Poner el grafico en pantalla maximizada para evitar conflictos con los distitos tipos de pantallas
src/tspf/plot.py
plt_set_fullscreen
Javernaver/ProyectoTitulo
2
python
def plt_set_fullscreen() -> None: ' ' backend = str(plt.get_backend()) mgr = plt.get_current_fig_manager() if (backend == 'TkAgg'): if (os.name == 'nt'): mgr.window.state('zoomed') else: mgr.resize(*mgr.window.maxsize()) elif (backend == 'wxAgg'): mgr.frame.Maximize(True) elif (backend == 'Qt4Agg'): mgr.window.showMaximized()
def plt_set_fullscreen() -> None: ' ' backend = str(plt.get_backend()) mgr = plt.get_current_fig_manager() if (backend == 'TkAgg'): if (os.name == 'nt'): mgr.window.state('zoomed') else: mgr.resize(*mgr.window.maxsize()) elif (backend == 'wxAgg'): mgr.frame.Maximize(True) elif (backend == 'Qt4Agg'): mgr.window.showMaximized()<|docstring|>Poner el grafico en pantalla maximizada para evitar conflictos con los distitos tipos de pantallas<|endoftext|>
e3ab7749ca27e3ad572c9f77e3f65b5798785e3eddef2d49d5cb649ac38565ce
def play(self) -> None: 'Definir frames para el reproductor' while self.runs: self.i = ((self.i + self.forwards) - (not self.forwards)) if ((self.i > self.min) and (self.i < self.max)): (yield self.i) else: self.stop() (yield self.i)
Definir frames para el reproductor
src/tspf/plot.py
play
Javernaver/ProyectoTitulo
2
python
def play(self) -> None: while self.runs: self.i = ((self.i + self.forwards) - (not self.forwards)) if ((self.i > self.min) and (self.i < self.max)): (yield self.i) else: self.stop() (yield self.i)
def play(self) -> None: while self.runs: self.i = ((self.i + self.forwards) - (not self.forwards)) if ((self.i > self.min) and (self.i < self.max)): (yield self.i) else: self.stop() (yield self.i)<|docstring|>Definir frames para el reproductor<|endoftext|>
0055890168d56ecd83a6b0e0b7e0751bcf6b68b0956539efd28620fb98248627
def start(self) -> None: 'Iniciar' self.runs = True self.event_source.start()
Iniciar
src/tspf/plot.py
start
Javernaver/ProyectoTitulo
2
python
def start(self) -> None: self.runs = True self.event_source.start()
def start(self) -> None: self.runs = True self.event_source.start()<|docstring|>Iniciar<|endoftext|>
05b7718334ba061b2b16417dd80b2fb278dcffd6d358053ccd23a514cc80cf07
def stop(self, event=None) -> None: 'Parar' self.runs = False self.event_source.stop()
Parar
src/tspf/plot.py
stop
Javernaver/ProyectoTitulo
2
python
def stop(self, event=None) -> None: self.runs = False self.event_source.stop()
def stop(self, event=None) -> None: self.runs = False self.event_source.stop()<|docstring|>Parar<|endoftext|>
2a5a4d7e53efe2bf1d2885e4d6807401c89514ef0f5839d2b4877ed921944aa4
def oneforward(self, event=None) -> None: 'Avanzar' self.forwards = True self.onestep()
Avanzar
src/tspf/plot.py
oneforward
Javernaver/ProyectoTitulo
2
python
def oneforward(self, event=None) -> None: self.forwards = True self.onestep()
def oneforward(self, event=None) -> None: self.forwards = True self.onestep()<|docstring|>Avanzar<|endoftext|>
7e42e2211945b608da8ab93f0b0c907ff2602f5927594f57282ee676d3412ee7
def onebackward(self, event=None) -> None: 'Retroceder' self.forwards = False self.onestep()
Retroceder
src/tspf/plot.py
onebackward
Javernaver/ProyectoTitulo
2
python
def onebackward(self, event=None) -> None: self.forwards = False self.onestep()
def onebackward(self, event=None) -> None: self.forwards = False self.onestep()<|docstring|>Retroceder<|endoftext|>
ad6e1dedecc5f835b09fd19a354a2b6e1a29107c459eeb14fdf61680f39877ac
def onestep(self) -> None: 'Mover un paso en la graficacion de la trayectoria' if ((self.i > self.min) and (self.i < self.max)): self.i = ((self.i + self.forwards) - (not self.forwards)) elif ((self.i == self.min) and self.forwards): self.i += 1 elif ((self.i == self.max) and (not self.forwards)): self.i -= 1 self.func(self.i) self.fig.canvas.draw_idle() self.stop()
Mover un paso en la graficacion de la trayectoria
src/tspf/plot.py
onestep
Javernaver/ProyectoTitulo
2
python
def onestep(self) -> None: if ((self.i > self.min) and (self.i < self.max)): self.i = ((self.i + self.forwards) - (not self.forwards)) elif ((self.i == self.min) and self.forwards): self.i += 1 elif ((self.i == self.max) and (not self.forwards)): self.i -= 1 self.func(self.i) self.fig.canvas.draw_idle() self.stop()
def onestep(self) -> None: if ((self.i > self.min) and (self.i < self.max)): self.i = ((self.i + self.forwards) - (not self.forwards)) elif ((self.i == self.min) and self.forwards): self.i += 1 elif ((self.i == self.max) and (not self.forwards)): self.i -= 1 self.func(self.i) self.fig.canvas.draw_idle() self.stop()<|docstring|>Mover un paso en la graficacion de la trayectoria<|endoftext|>
f46bf3acda6526e17654f1bd8bad8481994e9890c45ad4f8ecdf8d3ae72a5ba1
def setup(self, pos) -> None: 'Configurar los botones y sus acciones' playerax = self.fig.add_axes([pos[0], pos[1], 0.22, 0.04]) divider = mpl_toolkits.axes_grid1.make_axes_locatable(playerax) sax = divider.append_axes('right', size='80%', pad=0.05) ofax = divider.append_axes('right', size='100%', pad=0.05) self.button_oneback = Button(playerax, label=u'$◀$') self.button_stop = Button(sax, label=u'$■$') self.button_oneforward = Button(ofax, label=u'$▶$') self.button_oneback.on_clicked(self.onebackward) self.button_oneforward.on_clicked(self.oneforward) self.button_stop.on_clicked(self.stop)
Configurar los botones y sus acciones
src/tspf/plot.py
setup
Javernaver/ProyectoTitulo
2
python
def setup(self, pos) -> None: playerax = self.fig.add_axes([pos[0], pos[1], 0.22, 0.04]) divider = mpl_toolkits.axes_grid1.make_axes_locatable(playerax) sax = divider.append_axes('right', size='80%', pad=0.05) ofax = divider.append_axes('right', size='100%', pad=0.05) self.button_oneback = Button(playerax, label=u'$◀$') self.button_stop = Button(sax, label=u'$■$') self.button_oneforward = Button(ofax, label=u'$▶$') self.button_oneback.on_clicked(self.onebackward) self.button_oneforward.on_clicked(self.oneforward) self.button_stop.on_clicked(self.stop)
def setup(self, pos) -> None: playerax = self.fig.add_axes([pos[0], pos[1], 0.22, 0.04]) divider = mpl_toolkits.axes_grid1.make_axes_locatable(playerax) sax = divider.append_axes('right', size='80%', pad=0.05) ofax = divider.append_axes('right', size='100%', pad=0.05) self.button_oneback = Button(playerax, label=u'$◀$') self.button_stop = Button(sax, label=u'$■$') self.button_oneforward = Button(ofax, label=u'$▶$') self.button_oneback.on_clicked(self.onebackward) self.button_oneforward.on_clicked(self.oneforward) self.button_stop.on_clicked(self.stop)<|docstring|>Configurar los botones y sus acciones<|endoftext|>
e0e4f67713d03952e9d1652fce10e506f1b846a65b6a3a013d7bc35b075c9837
def cifar10(self): "\n keys: dict_keys([b'data', b'labels', b'filenames', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data.\n\n 1024 R + 1024 G + 1024 B (RGB)\n\n :return:\n " path = osp.join(base_path, 'cifar10') def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True) def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label) train() test()
keys: dict_keys([b'data', b'labels', b'filenames', b'batch_label']) data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image. labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data. 1024 R + 1024 G + 1024 B (RGB) :return:
allinone-master/main.py
cifar10
andreYoo/EBPC-dataset
1
python
def cifar10(self): "\n keys: dict_keys([b'data', b'labels', b'filenames', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data.\n\n 1024 R + 1024 G + 1024 B (RGB)\n\n :return:\n " path = osp.join(base_path, 'cifar10') def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True) def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label) train() test()
def cifar10(self): "\n keys: dict_keys([b'data', b'labels', b'filenames', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data.\n\n 1024 R + 1024 G + 1024 B (RGB)\n\n :return:\n " path = osp.join(base_path, 'cifar10') def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True) def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label) train() test()<|docstring|>keys: dict_keys([b'data', b'labels', b'filenames', b'batch_label']) data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image. labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data. 1024 R + 1024 G + 1024 B (RGB) :return:<|endoftext|>
266785fed31060aa1012585c06136a986c9652a37edc02971587f5f5c7b468ed
def cifar100(self): "\n keys: dict_keys([b'fine_labels', b'coarse_labels', b'filenames', b'data', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-99. The number at index i indicates the label of the ith image in the array data.\n\n RAW DATA IS RGB!!\n\n coarse_labels: super-class in the range 0-19\n fine_labels: sub-class in the range 0-99\n\n :return:\n " path = osp.join(base_path, 'cifar100') def train(): anchor = self.train_data_counter fname = osp.join(path, 'train') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar100', anchor, self.train_data_counter, isTrain=True) def test(): anchor = self.test_data_counter fname = osp.join(path, 'test') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar100', anchor, self.test_data_counter, label_size, new_label) train() test()
keys: dict_keys([b'fine_labels', b'coarse_labels', b'filenames', b'data', b'batch_label']) data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image. labels -- a list of 10000 numbers in the range 0-99. The number at index i indicates the label of the ith image in the array data. RAW DATA IS RGB!! coarse_labels: super-class in the range 0-19 fine_labels: sub-class in the range 0-99 :return:
allinone-master/main.py
cifar100
andreYoo/EBPC-dataset
1
python
def cifar100(self): "\n keys: dict_keys([b'fine_labels', b'coarse_labels', b'filenames', b'data', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-99. The number at index i indicates the label of the ith image in the array data.\n\n RAW DATA IS RGB!!\n\n coarse_labels: super-class in the range 0-19\n fine_labels: sub-class in the range 0-99\n\n :return:\n " path = osp.join(base_path, 'cifar100') def train(): anchor = self.train_data_counter fname = osp.join(path, 'train') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar100', anchor, self.train_data_counter, isTrain=True) def test(): anchor = self.test_data_counter fname = osp.join(path, 'test') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar100', anchor, self.test_data_counter, label_size, new_label) train() test()
def cifar100(self): "\n keys: dict_keys([b'fine_labels', b'coarse_labels', b'filenames', b'data', b'batch_label'])\n\n data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image.\n labels -- a list of 10000 numbers in the range 0-99. The number at index i indicates the label of the ith image in the array data.\n\n RAW DATA IS RGB!!\n\n coarse_labels: super-class in the range 0-19\n fine_labels: sub-class in the range 0-99\n\n :return:\n " path = osp.join(base_path, 'cifar100') def train(): anchor = self.train_data_counter fname = osp.join(path, 'train') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar100', anchor, self.train_data_counter, isTrain=True) def test(): anchor = self.test_data_counter fname = osp.join(path, 'test') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'fine_labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar100', anchor, self.test_data_counter, label_size, new_label) train() test()<|docstring|>keys: dict_keys([b'fine_labels', b'coarse_labels', b'filenames', b'data', b'batch_label']) data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32 colour image. The first 1024 entries contain the red channel values, the next 1024 the green, and the final 1024 the blue. The image is stored in row-major order, so that the first 32 entries of the array are the red channel values of the first row of the image. labels -- a list of 10000 numbers in the range 0-99. The number at index i indicates the label of the ith image in the array data. RAW DATA IS RGB!! coarse_labels: super-class in the range 0-19 fine_labels: sub-class in the range 0-99 :return:<|endoftext|>
527df92b65a06e54c94eccb0d9b6aeb4d716dec0d7c07593ab403c549e4c55a9
def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True)
read dats from every train_batch 1~5 eacth train_batch contains 10,000 samples :return:
allinone-master/main.py
train
andreYoo/EBPC-dataset
1
python
def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True)
def train(): '\n read dats from every train_batch 1~5\n eacth train_batch contains 10,000 samples\n\n :return:\n ' anchor = self.train_data_counter for i in range(1, 6): fname = osp.join(path, ('data_batch_%d' % i)) dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.trnx[self.train_data_counter] = img self.trny[self.train_data_counter] = (y + self.label_counter) self.train_data_counter += 1 self.print_stat('cifar10', anchor, self.train_data_counter, isTrain=True)<|docstring|>read dats from every train_batch 1~5 eacth train_batch contains 10,000 samples :return:<|endoftext|>
576afa264a8690b5bd32617459e53cbe4d60edf39ab18ea5d886f02a5eaffb63
def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label)
read datas from single test_batch file. same as train() :return:
allinone-master/main.py
test
andreYoo/EBPC-dataset
1
python
def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label)
def test(): '\n read datas from single test_batch file.\n\n same as train()\n :return:\n ' anchor = self.test_data_counter fname = osp.join(path, 'test_batch') dict = pickle.load(open(fname, 'rb'), encoding='bytes') data = dict[b'data'] labels = dict[b'labels'] images = data.reshape([(- 1), 3, 32, 32]) images = images.transpose([0, 2, 3, 1]) new_label = [] for (x, y) in zip(images, labels): if self.do: img = cv2.cvtColor(x, cv2.COLOR_RGB2BGR) img = cv2.resize(img, (64, 64)) self.tstx[self.test_data_counter] = img self.tsty[self.test_data_counter] = (y + self.label_counter) new_label.append((y + self.label_counter)) self.test_data_counter += 1 label_size = len(np.unique(new_label)) self.label_counter += label_size self.print_stat_test('cifar10', anchor, self.test_data_counter, label_size, new_label)<|docstring|>read datas from single test_batch file. same as train() :return:<|endoftext|>
fb03d7b2c08182b73d32341bd15cb2f6e074fb6e4e6b64ed57eb9478b2b5a4bd
@property def abspath(self): '\n see os.path.abspath()\n ' if (self._parent is None): return self._flist.rootpath if (self._parent.abspath == ''): raise RuntimeError('a file should always have a parent location') return os.path.join(self._parent.abspath, self.name)
see os.path.abspath()
JumpScale9Lib/data/flist/manipulator/path.py
abspath
Jumpscale/lib9
2
python
@property def abspath(self): '\n \n ' if (self._parent is None): return self._flist.rootpath if (self._parent.abspath == ): raise RuntimeError('a file should always have a parent location') return os.path.join(self._parent.abspath, self.name)
@property def abspath(self): '\n \n ' if (self._parent is None): return self._flist.rootpath if (self._parent.abspath == ): raise RuntimeError('a file should always have a parent location') return os.path.join(self._parent.abspath, self.name)<|docstring|>see os.path.abspath()<|endoftext|>
0bc0e48785bc1d9f1124ab80787b279c2ee24a87c07675382c38e5d68baccc91
@property def mtime(self): '\n Last modification time of the file.\n ' return self._obj.modificationTime
Last modification time of the file.
JumpScale9Lib/data/flist/manipulator/path.py
mtime
Jumpscale/lib9
2
python
@property def mtime(self): '\n \n ' return self._obj.modificationTime
@property def mtime(self): '\n \n ' return self._obj.modificationTime<|docstring|>Last modification time of the file.<|endoftext|>
062f45d2cd9de24db60c5b382555edc46798cc28b5da86ab2ec83bc05ab80085
@property def ctime(self): '\n creation time of the file.\n ' return self._obj.creationTime
creation time of the file.
JumpScale9Lib/data/flist/manipulator/path.py
ctime
Jumpscale/lib9
2
python
@property def ctime(self): '\n \n ' return self._obj.creationTime
@property def ctime(self): '\n \n ' return self._obj.creationTime<|docstring|>creation time of the file.<|endoftext|>
75529b21d8eee1e3570471317b9bbfbab5d828c159204699bebf4937e27e1c39
@property def basename(self): '\n see os.path.basename()\n ' parent = getattr(self._obj, 'parent', None) if (parent == ''): return self._flist.rootpath return self._obj.name
see os.path.basename()
JumpScale9Lib/data/flist/manipulator/path.py
basename
Jumpscale/lib9
2
python
@property def basename(self): '\n \n ' parent = getattr(self._obj, 'parent', None) if (parent == ): return self._flist.rootpath return self._obj.name
@property def basename(self): '\n \n ' parent = getattr(self._obj, 'parent', None) if (parent == ): return self._flist.rootpath return self._obj.name<|docstring|>see os.path.basename()<|endoftext|>
da522c232ac1ed4d95a0eae86483c8b12b1802566d272cef22b135d116ab0bd3
@property def name(self): '\n see basename\n ' return self.basename
see basename
JumpScale9Lib/data/flist/manipulator/path.py
name
Jumpscale/lib9
2
python
@property def name(self): '\n \n ' return self.basename
@property def name(self): '\n \n ' return self.basename<|docstring|>see basename<|endoftext|>
f8a1fb53d8720edf9cfc70b17b6a4f4229b2a36d07da5c1c2285258e22a45cbc
@property def size(self): '\n Size of the file, in bytes.\n ' return self._obj.size
Size of the file, in bytes.
JumpScale9Lib/data/flist/manipulator/path.py
size
Jumpscale/lib9
2
python
@property def size(self): '\n \n ' return self._obj.size
@property def size(self): '\n \n ' return self._obj.size<|docstring|>Size of the file, in bytes.<|endoftext|>
14b35565532504f39ce1d1d8bad58d0e7a4245e18e45e01e0c1936365c3325c9
@property def stem(self): '\n The same as name(), but with one file extension stripped off.\n ' return os.path.splitext(self.basename)
The same as name(), but with one file extension stripped off.
JumpScale9Lib/data/flist/manipulator/path.py
stem
Jumpscale/lib9
2
python
@property def stem(self): '\n \n ' return os.path.splitext(self.basename)
@property def stem(self): '\n \n ' return os.path.splitext(self.basename)<|docstring|>The same as name(), but with one file extension stripped off.<|endoftext|>
b205d627d0752c59a586b46ba8118134f34209f7840ec61477c3147383024bef
def chmod(self, mode): '\n see os.chmod()\n ' raise NotImplementedError()
see os.chmod()
JumpScale9Lib/data/flist/manipulator/path.py
chmod
Jumpscale/lib9
2
python
def chmod(self, mode): '\n \n ' raise NotImplementedError()
def chmod(self, mode): '\n \n ' raise NotImplementedError()<|docstring|>see os.chmod()<|endoftext|>
d9c00558b118d3e8f57ffbd93c3db3f451f10c74611c303948de77ddb4080783
def copy(self, src, follow_symlinks=True): '\n copy a file from the local filesystem into the flist\n ' logger.debug('copy file from %s to %s', src, os.path.join(self.abspath, os.path.basename(src))) return self._add_file(src)
copy a file from the local filesystem into the flist
JumpScale9Lib/data/flist/manipulator/path.py
copy
Jumpscale/lib9
2
python
def copy(self, src, follow_symlinks=True): '\n \n ' logger.debug('copy file from %s to %s', src, os.path.join(self.abspath, os.path.basename(src))) return self._add_file(src)
def copy(self, src, follow_symlinks=True): '\n \n ' logger.debug('copy file from %s to %s', src, os.path.join(self.abspath, os.path.basename(src))) return self._add_file(src)<|docstring|>copy a file from the local filesystem into the flist<|endoftext|>
4da5b5cadc62cb349ad5699cf6f1463f80bf7c85a60e0acb575d3541edf1ce00
def copytree(self, src): '\n Recursively copy a directory tree.\n ' if (not os.path.isdir(src)): raise ValueError('src must be a directory') def find_dir(location): current = self for dir_name in location.split(os.path.sep)[2:]: try: current = current.dirs(dir_name)[0] except IndexError: current = current.mkdir(dir_name) return current for (dirpath, dirnames, filenames) in os.walk(src): flit_dir = find_dir(dirpath) for name in dirnames: flit_dir.mkdir(name) for name in filenames: flit_dir.copy(os.path.join(dirpath, name))
Recursively copy a directory tree.
JumpScale9Lib/data/flist/manipulator/path.py
copytree
Jumpscale/lib9
2
python
def copytree(self, src): '\n \n ' if (not os.path.isdir(src)): raise ValueError('src must be a directory') def find_dir(location): current = self for dir_name in location.split(os.path.sep)[2:]: try: current = current.dirs(dir_name)[0] except IndexError: current = current.mkdir(dir_name) return current for (dirpath, dirnames, filenames) in os.walk(src): flit_dir = find_dir(dirpath) for name in dirnames: flit_dir.mkdir(name) for name in filenames: flit_dir.copy(os.path.join(dirpath, name))
def copytree(self, src): '\n \n ' if (not os.path.isdir(src)): raise ValueError('src must be a directory') def find_dir(location): current = self for dir_name in location.split(os.path.sep)[2:]: try: current = current.dirs(dir_name)[0] except IndexError: current = current.mkdir(dir_name) return current for (dirpath, dirnames, filenames) in os.walk(src): flit_dir = find_dir(dirpath) for name in dirnames: flit_dir.mkdir(name) for name in filenames: flit_dir.copy(os.path.join(dirpath, name))<|docstring|>Recursively copy a directory tree.<|endoftext|>
b8448c9d9b3d11d12330cc90b67aa50369c4e3a8dcb4ac397326d73dbf4ead54
def mkdir(self, name, mode='511'): '\n create a new directory\n ' dir = self._add_dir(name) logger.debug('create directory at %s', dir.abspath) return dir
create a new directory
JumpScale9Lib/data/flist/manipulator/path.py
mkdir
Jumpscale/lib9
2
python
def mkdir(self, name, mode='511'): '\n \n ' dir = self._add_dir(name) logger.debug('create directory at %s', dir.abspath) return dir
def mkdir(self, name, mode='511'): '\n \n ' dir = self._add_dir(name) logger.debug('create directory at %s', dir.abspath) return dir<|docstring|>create a new directory<|endoftext|>
64089c8039f5c61a84f35008b1c3c709932d2e1f1e05f240cf957d88ba12bf15
def files(self, pattern=None): "\n list files in this directory\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('file')
list files in this directory The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').
JumpScale9Lib/data/flist/manipulator/path.py
files
Jumpscale/lib9
2
python
def files(self, pattern=None): "\n list files in this directory\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('file')
def files(self, pattern=None): "\n list files in this directory\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('file')<|docstring|>list files in this directory The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').<|endoftext|>
58821033f569c75ef64c57193ee06614e5ec290888248a91093efe03185f952b
def dirs(self, pattern=None): "\n List of this directory's subdirectories.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('dir', pattern)
List of this directory's subdirectories. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').
JumpScale9Lib/data/flist/manipulator/path.py
dirs
Jumpscale/lib9
2
python
def dirs(self, pattern=None): "\n List of this directory's subdirectories.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('dir', pattern)
def dirs(self, pattern=None): "\n List of this directory's subdirectories.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('dir', pattern)<|docstring|>List of this directory's subdirectories. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').<|endoftext|>
77e97ddc2a503d329060c4e2b03e67a526b3134e46c52890097d6327d08409d6
def links(self, pattern=None): "\n List of this directory's links.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('link', pattern)
List of this directory's links. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').
JumpScale9Lib/data/flist/manipulator/path.py
links
Jumpscale/lib9
2
python
def links(self, pattern=None): "\n List of this directory's links.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('link', pattern)
def links(self, pattern=None): "\n List of this directory's links.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('link', pattern)<|docstring|>List of this directory's links. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').<|endoftext|>
afc39536699f4cdc77b66cc1309e4a94cbbf01120cebb93ff7e8fbfc7441a55a
def specials(self, pattern=None): "\n List of this directory's special files.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('special', pattern)
List of this directory's special files. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').
JumpScale9Lib/data/flist/manipulator/path.py
specials
Jumpscale/lib9
2
python
def specials(self, pattern=None): "\n List of this directory's special files.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('special', pattern)
def specials(self, pattern=None): "\n List of this directory's special files.\n\n The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()).\n\n With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').\n " return self._filter_content('special', pattern)<|docstring|>List of this directory's special files. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see walkdirs()). With the optional pattern argument, this only lists directories whose names match the given pattern. For example, d.dirs('build-*').<|endoftext|>
68d067eb62413ca3ae786f8de02364357245e97b17250ffa89c94609aad9ee84
def glob(self, pattern): "\n Return a list of Path objects that match the pattern.\n\n pattern - a path relative to this directory, with wildcards.\n\n For example, Path('/users').glob('*/bin/*') returns a list of all the files users have in their bin directories.\n " raise NotImplementedError()
Return a list of Path objects that match the pattern. pattern - a path relative to this directory, with wildcards. For example, Path('/users').glob('*/bin/*') returns a list of all the files users have in their bin directories.
JumpScale9Lib/data/flist/manipulator/path.py
glob
Jumpscale/lib9
2
python
def glob(self, pattern): "\n Return a list of Path objects that match the pattern.\n\n pattern - a path relative to this directory, with wildcards.\n\n For example, Path('/users').glob('*/bin/*') returns a list of all the files users have in their bin directories.\n " raise NotImplementedError()
def glob(self, pattern): "\n Return a list of Path objects that match the pattern.\n\n pattern - a path relative to this directory, with wildcards.\n\n For example, Path('/users').glob('*/bin/*') returns a list of all the files users have in their bin directories.\n " raise NotImplementedError()<|docstring|>Return a list of Path objects that match the pattern. pattern - a path relative to this directory, with wildcards. For example, Path('/users').glob('*/bin/*') returns a list of all the files users have in their bin directories.<|endoftext|>
8eefa06eaa86b58c8371df02dda465b80726ecdba2065244a675792f9c2e753a
def link(self, newpath): '\n Create a hard link at newpath, pointing to this file.\n\n See also\n\n os.link()\n ' raise NotImplementedError()
Create a hard link at newpath, pointing to this file. See also os.link()
JumpScale9Lib/data/flist/manipulator/path.py
link
Jumpscale/lib9
2
python
def link(self, newpath): '\n Create a hard link at newpath, pointing to this file.\n\n See also\n\n os.link()\n ' raise NotImplementedError()
def link(self, newpath): '\n Create a hard link at newpath, pointing to this file.\n\n See also\n\n os.link()\n ' raise NotImplementedError()<|docstring|>Create a hard link at newpath, pointing to this file. See also os.link()<|endoftext|>
916fee307fbdd7b91656f00a64642e56ebf5983db33c62fcdebe522dcbcecd0a
def unlink(self): '\n See also\n\n os.unlink()\n ' raise NotImplementedError()
See also os.unlink()
JumpScale9Lib/data/flist/manipulator/path.py
unlink
Jumpscale/lib9
2
python
def unlink(self): '\n See also\n\n os.unlink()\n ' raise NotImplementedError()
def unlink(self): '\n See also\n\n os.unlink()\n ' raise NotImplementedError()<|docstring|>See also os.unlink()<|endoftext|>
f242e058b4cbd168b5c9a07ab0acb64ff7ca9fb1e32d3477f65f5dabb5f7de57
def move(self, dst): '\n Recursively move a file or directory to another location. This is similar to the Unix “mv” command. Return the file or directory’s destination.\n\n If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist.\n\n If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics.\n\n If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. Symlinks are recreated under the new name if os.rename() fails because of cross filesystem renames.\n\n The optional copy_function argument is a callable that will be used to copy the source or it will be delegated to copytree. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used.\n\n A lot more could be done here… A look at a mv.c shows a lot of the issues this implementation glosses over.\n ' raise NotImplementedError()
Recursively move a file or directory to another location. This is similar to the Unix “mv” command. Return the file or directory’s destination. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. Symlinks are recreated under the new name if os.rename() fails because of cross filesystem renames. The optional copy_function argument is a callable that will be used to copy the source or it will be delegated to copytree. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. A lot more could be done here… A look at a mv.c shows a lot of the issues this implementation glosses over.
JumpScale9Lib/data/flist/manipulator/path.py
move
Jumpscale/lib9
2
python
def move(self, dst): '\n Recursively move a file or directory to another location. This is similar to the Unix “mv” command. Return the file or directory’s destination.\n\n If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist.\n\n If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics.\n\n If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. Symlinks are recreated under the new name if os.rename() fails because of cross filesystem renames.\n\n The optional copy_function argument is a callable that will be used to copy the source or it will be delegated to copytree. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used.\n\n A lot more could be done here… A look at a mv.c shows a lot of the issues this implementation glosses over.\n ' raise NotImplementedError()
def move(self, dst): '\n Recursively move a file or directory to another location. This is similar to the Unix “mv” command. Return the file or directory’s destination.\n\n If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist.\n\n If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics.\n\n If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. Symlinks are recreated under the new name if os.rename() fails because of cross filesystem renames.\n\n The optional copy_function argument is a callable that will be used to copy the source or it will be delegated to copytree. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used.\n\n A lot more could be done here… A look at a mv.c shows a lot of the issues this implementation glosses over.\n ' raise NotImplementedError()<|docstring|>Recursively move a file or directory to another location. This is similar to the Unix “mv” command. Return the file or directory’s destination. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. Symlinks are recreated under the new name if os.rename() fails because of cross filesystem renames. The optional copy_function argument is a callable that will be used to copy the source or it will be delegated to copytree. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. A lot more could be done here… A look at a mv.c shows a lot of the issues this implementation glosses over.<|endoftext|>
a735f8c856fe9714ee6212f9c513850dc3a681be201b5178212e83a77ecb62ee
@property def parent(self): "\n This path’s parent directory, as a new Path object.\n\n For example, Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')\n " return self._parent
This path’s parent directory, as a new Path object. For example, Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')
JumpScale9Lib/data/flist/manipulator/path.py
parent
Jumpscale/lib9
2
python
@property def parent(self): "\n This path’s parent directory, as a new Path object.\n\n For example, Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')\n " return self._parent
@property def parent(self): "\n This path’s parent directory, as a new Path object.\n\n For example, Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')\n " return self._parent<|docstring|>This path’s parent directory, as a new Path object. For example, Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')<|endoftext|>
4aea5b8e4b20fc9932141bd9b264caf4376b32fe32d1e3f78356d008be9a9225
def remove(self): '\n remove\n ' raise NotImplementedError()
remove
JumpScale9Lib/data/flist/manipulator/path.py
remove
Jumpscale/lib9
2
python
def (self): '\n \n ' raise NotImplementedError()
def (self): '\n \n ' raise NotImplementedError()<|docstring|>remove<|endoftext|>
b12abfadbf9e5a48b8a88adb2c77953795d15a8fef08963b4e782b1628384cba
def rename(self): '\n rename\n ' raise NotImplementedError()
rename
JumpScale9Lib/data/flist/manipulator/path.py
rename
Jumpscale/lib9
2
python
def (self): '\n \n ' raise NotImplementedError()
def (self): '\n \n ' raise NotImplementedError()<|docstring|>rename<|endoftext|>
ca683b3fee79164d07214618caed1912c769df741521215bfdbd08f3d2d2c710
def __init__(self, config): '\n Sets the model config and any other local variables. Here, we initialize\n the most_recent_value to None.\n ' super().__init__(config) self.most_recent_value = None
Sets the model config and any other local variables. Here, we initialize the most_recent_value to None.
merlion/models/forecast/repeat_recent.py
__init__
jimgoo/Merlion
0
python
def __init__(self, config): '\n Sets the model config and any other local variables. Here, we initialize\n the most_recent_value to None.\n ' super().__init__(config) self.most_recent_value = None
def __init__(self, config): '\n Sets the model config and any other local variables. Here, we initialize\n the most_recent_value to None.\n ' super().__init__(config) self.most_recent_value = None<|docstring|>Sets the model config and any other local variables. Here, we initialize the most_recent_value to None.<|endoftext|>
11812b645bd854012e898ebb44a72785e50397cfefdc66a5165949cb9471a174
def LSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as G the number of input features, F the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{G x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{F x N} the bias vector, with b_{f} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{f} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{G}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{g}\n + b_{f}\n for f = 1, ..., F.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' F = h.shape[0] E = h.shape[1] K = h.shape[2] G = h.shape[3] assert (S.shape[0] == E) N = S.shape[1] assert (S.shape[2] == N) B = x.shape[0] assert (x.shape[1] == G) assert (x.shape[2] == N) x = x.reshape([B, 1, G, N]) S = S.reshape([1, E, N, N]) z = x.reshape([B, 1, 1, G, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, G, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * G)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y
LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear shift-invariant graph filter on input and then adds bias. Denote as G the number of input features, F the number of output features, E the number of edge features, K the number of filter taps, N the number of nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{G x N} the input data where x_{g} in R^{N} is the graph signal representing feature g, and b in R^{F x N} the bias vector, with b_{f} in R^{N} representing the bias for feature f. Then, the LSI-GF is computed as y_{f} = \sum_{e=1}^{E} \sum_{k=0}^{K-1} \sum_{g=1}^{G} [h_{f,g,e}]_{k} S_{e}^{k} x_{g} + b_{f} for f = 1, ..., F. Inputs: filter_taps (torch.tensor): array of filter taps; shape: output_features x edge_features x filter_taps x input_features GSO (torch.tensor): graph shift operator; shape: edge_features x number_nodes x number_nodes input (torch.tensor): input signal; shape: batch_size x input_features x number_nodes bias (torch.tensor): shape: output_features x number_nodes if the same bias is to be applied to all nodes, set number_nodes = 1 so that b_{f} vector becomes b_{f} \mathbf{1}_{N} Outputs: output: filtered signals; shape: batch_size x output_features x number_nodes
utils/graphUtils/GraphMLSimple.py
LSIGF
vtekur/gnn_pathplanning
86
python
def LSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as G the number of input features, F the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{G x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{F x N} the bias vector, with b_{f} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{f} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{G}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{g}\n + b_{f}\n for f = 1, ..., F.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' F = h.shape[0] E = h.shape[1] K = h.shape[2] G = h.shape[3] assert (S.shape[0] == E) N = S.shape[1] assert (S.shape[2] == N) B = x.shape[0] assert (x.shape[1] == G) assert (x.shape[2] == N) x = x.reshape([B, 1, G, N]) S = S.reshape([1, E, N, N]) z = x.reshape([B, 1, 1, G, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, G, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * G)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y
def LSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as G the number of input features, F the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{G x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{F x N} the bias vector, with b_{f} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{f} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{G}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{g}\n + b_{f}\n for f = 1, ..., F.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' F = h.shape[0] E = h.shape[1] K = h.shape[2] G = h.shape[3] assert (S.shape[0] == E) N = S.shape[1] assert (S.shape[2] == N) B = x.shape[0] assert (x.shape[1] == G) assert (x.shape[2] == N) x = x.reshape([B, 1, G, N]) S = S.reshape([1, E, N, N]) z = x.reshape([B, 1, 1, G, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, G, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * G)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y<|docstring|>LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear shift-invariant graph filter on input and then adds bias. Denote as G the number of input features, F the number of output features, E the number of edge features, K the number of filter taps, N the number of nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{G x N} the input data where x_{g} in R^{N} is the graph signal representing feature g, and b in R^{F x N} the bias vector, with b_{f} in R^{N} representing the bias for feature f. Then, the LSI-GF is computed as y_{f} = \sum_{e=1}^{E} \sum_{k=0}^{K-1} \sum_{g=1}^{G} [h_{f,g,e}]_{k} S_{e}^{k} x_{g} + b_{f} for f = 1, ..., F. Inputs: filter_taps (torch.tensor): array of filter taps; shape: output_features x edge_features x filter_taps x input_features GSO (torch.tensor): graph shift operator; shape: edge_features x number_nodes x number_nodes input (torch.tensor): input signal; shape: batch_size x input_features x number_nodes bias (torch.tensor): shape: output_features x number_nodes if the same bias is to be applied to all nodes, set number_nodes = 1 so that b_{f} vector becomes b_{f} \mathbf{1}_{N} Outputs: output: filtered signals; shape: batch_size x output_features x number_nodes<|endoftext|>
05acc691864f50af673cd6c9cc80d4615331cdb76ac95d7023f952b9cdba09c2
def BatchLSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as F the number of input features, G the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{f x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{G x N} the bias vector, with b_{g} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{g} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{F}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{f}\n + b_{f}\n for g = 1, ..., G.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' G = h.shape[0] E = h.shape[1] K = h.shape[2] F = h.shape[3] assert (S.shape[1] == E) N = S.shape[2] assert (S.shape[3] == N) B = x.shape[0] assert (x.shape[1] == F) assert (x.shape[2] == N) x = x.reshape([B, 1, F, N]) S = S.reshape([B, E, N, N]) z = x.reshape([B, 1, 1, F, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, F, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * F)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y
LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear shift-invariant graph filter on input and then adds bias. Denote as F the number of input features, G the number of output features, E the number of edge features, K the number of filter taps, N the number of nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{f x N} the input data where x_{g} in R^{N} is the graph signal representing feature g, and b in R^{G x N} the bias vector, with b_{g} in R^{N} representing the bias for feature f. Then, the LSI-GF is computed as y_{g} = \sum_{e=1}^{E} \sum_{k=0}^{K-1} \sum_{g=1}^{F} [h_{f,g,e}]_{k} S_{e}^{k} x_{f} + b_{f} for g = 1, ..., G. Inputs: filter_taps (torch.tensor): array of filter taps; shape: output_features x edge_features x filter_taps x input_features GSO (torch.tensor): graph shift operator; shape: edge_features x number_nodes x number_nodes input (torch.tensor): input signal; shape: batch_size x input_features x number_nodes bias (torch.tensor): shape: output_features x number_nodes if the same bias is to be applied to all nodes, set number_nodes = 1 so that b_{f} vector becomes b_{f} \mathbf{1}_{N} Outputs: output: filtered signals; shape: batch_size x output_features x number_nodes
utils/graphUtils/GraphMLSimple.py
BatchLSIGF
vtekur/gnn_pathplanning
86
python
def BatchLSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as F the number of input features, G the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{f x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{G x N} the bias vector, with b_{g} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{g} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{F}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{f}\n + b_{f}\n for g = 1, ..., G.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' G = h.shape[0] E = h.shape[1] K = h.shape[2] F = h.shape[3] assert (S.shape[1] == E) N = S.shape[2] assert (S.shape[3] == N) B = x.shape[0] assert (x.shape[1] == F) assert (x.shape[2] == N) x = x.reshape([B, 1, F, N]) S = S.reshape([B, E, N, N]) z = x.reshape([B, 1, 1, F, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, F, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * F)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y
def BatchLSIGF(h, S, x, b=None): '\n LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear\n shift-invariant graph filter on input and then adds bias.\n\n Denote as F the number of input features, G the number of output features,\n E the number of edge features, K the number of filter taps, N the number of\n nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{f x N} the\n input data where x_{g} in R^{N} is the graph signal representing feature\n g, and b in R^{G x N} the bias vector, with b_{g} in R^{N} representing the\n bias for feature f.\n\n Then, the LSI-GF is computed as\n y_{g} = \\sum_{e=1}^{E}\n \\sum_{k=0}^{K-1}\n \\sum_{g=1}^{F}\n [h_{f,g,e}]_{k} S_{e}^{k} x_{f}\n + b_{f}\n for g = 1, ..., G.\n\n Inputs:\n filter_taps (torch.tensor): array of filter taps; shape:\n output_features x edge_features x filter_taps x input_features\n GSO (torch.tensor): graph shift operator; shape:\n edge_features x number_nodes x number_nodes\n input (torch.tensor): input signal; shape:\n batch_size x input_features x number_nodes\n bias (torch.tensor): shape: output_features x number_nodes\n if the same bias is to be applied to all nodes, set number_nodes = 1\n so that b_{f} vector becomes b_{f} \\mathbf{1}_{N}\n\n Outputs:\n output: filtered signals; shape:\n batch_size x output_features x number_nodes\n ' G = h.shape[0] E = h.shape[1] K = h.shape[2] F = h.shape[3] assert (S.shape[1] == E) N = S.shape[2] assert (S.shape[3] == N) B = x.shape[0] assert (x.shape[1] == F) assert (x.shape[2] == N) x = x.reshape([B, 1, F, N]) S = S.reshape([B, E, N, N]) z = x.reshape([B, 1, 1, F, N]).repeat(1, E, 1, 1, 1) for k in range(1, K): x = torch.matmul(x, S) xS = x.reshape([B, E, 1, F, N]) z = torch.cat((z, xS), dim=2) y = torch.matmul(z.permute(0, 4, 1, 2, 3).reshape([B, N, ((E * K) * F)]), h.reshape([F, ((E * K) * G)]).permute(1, 0)).permute(0, 2, 1) if (b is not None): y = (y + b) return y<|docstring|>LSIGF(filter_taps, GSO, input, bias=None) Computes the output of a linear shift-invariant graph filter on input and then adds bias. Denote as F the number of input features, G the number of output features, E the number of edge features, K the number of filter taps, N the number of nodes, S_{e} in R^{N x N} the GSO for edge feature e, x in R^{f x N} the input data where x_{g} in R^{N} is the graph signal representing feature g, and b in R^{G x N} the bias vector, with b_{g} in R^{N} representing the bias for feature f. Then, the LSI-GF is computed as y_{g} = \sum_{e=1}^{E} \sum_{k=0}^{K-1} \sum_{g=1}^{F} [h_{f,g,e}]_{k} S_{e}^{k} x_{f} + b_{f} for g = 1, ..., G. Inputs: filter_taps (torch.tensor): array of filter taps; shape: output_features x edge_features x filter_taps x input_features GSO (torch.tensor): graph shift operator; shape: edge_features x number_nodes x number_nodes input (torch.tensor): input signal; shape: batch_size x input_features x number_nodes bias (torch.tensor): shape: output_features x number_nodes if the same bias is to be applied to all nodes, set number_nodes = 1 so that b_{f} vector becomes b_{f} \mathbf{1}_{N} Outputs: output: filtered signals; shape: batch_size x output_features x number_nodes<|endoftext|>
ba9b246270d274a54cae315293918a85bb6fd0b9d5c81647414872e1af4b9f32
def gen_label_seq(self, npy_str, input_label_dict_list): '\n if the beh exists in binary score dict, return the 0/1 value, if it is not in the dict, return the \n 2 class label based on the split dict\n ' return_beh_score = [] return_beh_mask = [] for x in self.beh_list: if (x in input_label_dict_list): return_beh_score.append(input_label_dict_list[x]) return_beh_mask.append(1) else: return_beh_score.append(2.0) return_beh_mask.append(0) return (return_beh_score, return_beh_mask)
if the beh exists in binary score dict, return the 0/1 value, if it is not in the dict, return the 2 class label based on the split dict
src/TASK_train_E-BP_Emo2beh_seq_model/TASK_couple_sequence_model _with_pretrained_E-BP/Dataloader_couple_cnn_sequence.py
gen_label_seq
haoqi/emotions_as_primitives_towards_behavior_understanding
2
python
def gen_label_seq(self, npy_str, input_label_dict_list): '\n if the beh exists in binary score dict, return the 0/1 value, if it is not in the dict, return the \n 2 class label based on the split dict\n ' return_beh_score = [] return_beh_mask = [] for x in self.beh_list: if (x in input_label_dict_list): return_beh_score.append(input_label_dict_list[x]) return_beh_mask.append(1) else: return_beh_score.append(2.0) return_beh_mask.append(0) return (return_beh_score, return_beh_mask)
def gen_label_seq(self, npy_str, input_label_dict_list): '\n if the beh exists in binary score dict, return the 0/1 value, if it is not in the dict, return the \n 2 class label based on the split dict\n ' return_beh_score = [] return_beh_mask = [] for x in self.beh_list: if (x in input_label_dict_list): return_beh_score.append(input_label_dict_list[x]) return_beh_mask.append(1) else: return_beh_score.append(2.0) return_beh_mask.append(0) return (return_beh_score, return_beh_mask)<|docstring|>if the beh exists in binary score dict, return the 0/1 value, if it is not in the dict, return the 2 class label based on the split dict<|endoftext|>
b5505954cc26e386b1a219074e3c3873b07122246346fb68ae6b64e92f85d826
def test_trim_with_trailing_whitespace(tmpdir): 'Trimming files with trailing whitespace should remove the whitespace.' folder = tmpdir.mkdir('sub') file_handle = folder.join('whitespace.txt') file_handle.write('a line \na line \n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: print(repr(line)) assert (line == 'a line\n')
Trimming files with trailing whitespace should remove the whitespace.
fixwhitespace/tests/test_fixwhitespace.py
test_trim_with_trailing_whitespace
honzo0481/whitespace
0
python
def test_trim_with_trailing_whitespace(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('whitespace.txt') file_handle.write('a line \na line \n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: print(repr(line)) assert (line == 'a line\n')
def test_trim_with_trailing_whitespace(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('whitespace.txt') file_handle.write('a line \na line \n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: print(repr(line)) assert (line == 'a line\n')<|docstring|>Trimming files with trailing whitespace should remove the whitespace.<|endoftext|>
97720613c1f90c310317225ed20b9eda409800b644e075a283ccaa31b7af13f2
def test_trim_without_trailing_whitespace(tmpdir): "Trimming files without trailing whitespace shouldn't change them." folder = tmpdir.mkdir('sub') file_handle = folder.join('nowhitespace.txt') file_handle.write('a line\na line\n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == 'a line\n')
Trimming files without trailing whitespace shouldn't change them.
fixwhitespace/tests/test_fixwhitespace.py
test_trim_without_trailing_whitespace
honzo0481/whitespace
0
python
def test_trim_without_trailing_whitespace(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('nowhitespace.txt') file_handle.write('a line\na line\n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == 'a line\n')
def test_trim_without_trailing_whitespace(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('nowhitespace.txt') file_handle.write('a line\na line\n') trim(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == 'a line\n')<|docstring|>Trimming files without trailing whitespace shouldn't change them.<|endoftext|>
bf28c058155e0795720b8eb34cff7a2904a8ae6fb30355a4d449aaff90c5e260
def test_tabs2spaces_without_tabs(tmpdir): "Converting tabs in a file without tabs shouldn't alter the file." folder = tmpdir.mkdir('sub') file_handle = folder.join('notabs.txt') file_handle.write(' a line\n a line\n') tabs2spaces(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == ' a line\n')
Converting tabs in a file without tabs shouldn't alter the file.
fixwhitespace/tests/test_fixwhitespace.py
test_tabs2spaces_without_tabs
honzo0481/whitespace
0
python
def test_tabs2spaces_without_tabs(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('notabs.txt') file_handle.write(' a line\n a line\n') tabs2spaces(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == ' a line\n')
def test_tabs2spaces_without_tabs(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('notabs.txt') file_handle.write(' a line\n a line\n') tabs2spaces(folder, '.txt') with open(file_handle, 'r') as f: for line in f: assert (line == ' a line\n')<|docstring|>Converting tabs in a file without tabs shouldn't alter the file.<|endoftext|>
3b829e3b87251394fbf864305a48a330bcd219597e84ce8b75634223e1f74dbf
def test_tabs2spaces_with_tabs(tmpdir): 'Converting tabs in a file containing tabs should replace them.' folder = tmpdir.mkdir('sub') file_handle = folder.join('tabs.txt') file_handle.write('\tline 1\n\t\tline 2\n') n = 2 tabs2spaces(folder, '.txt', n=n) with open(file_handle, 'r') as f: for (i, line) in enumerate(f, start=1): assert (line == ('%sline %s\n' % (((' ' * i) * n), i)))
Converting tabs in a file containing tabs should replace them.
fixwhitespace/tests/test_fixwhitespace.py
test_tabs2spaces_with_tabs
honzo0481/whitespace
0
python
def test_tabs2spaces_with_tabs(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('tabs.txt') file_handle.write('\tline 1\n\t\tline 2\n') n = 2 tabs2spaces(folder, '.txt', n=n) with open(file_handle, 'r') as f: for (i, line) in enumerate(f, start=1): assert (line == ('%sline %s\n' % (((' ' * i) * n), i)))
def test_tabs2spaces_with_tabs(tmpdir): folder = tmpdir.mkdir('sub') file_handle = folder.join('tabs.txt') file_handle.write('\tline 1\n\t\tline 2\n') n = 2 tabs2spaces(folder, '.txt', n=n) with open(file_handle, 'r') as f: for (i, line) in enumerate(f, start=1): assert (line == ('%sline %s\n' % (((' ' * i) * n), i)))<|docstring|>Converting tabs in a file containing tabs should replace them.<|endoftext|>
96422e63ec852377f2dc3a9c273c35917c7f1bca170a030646635ee3d98720c4
def test_find_files_returns_only_files_with_correct_extensions(tmpdir): 'All files with extensions in `exts` should be returned and no others.' folder = tmpdir.mkdir('sub') txt = folder.join('file1.txt').write('foo') py = folder.join('file2.py').write('foo') exts = '.txt' files = find_files(folder, exts=exts) files = [f[(- 4):] for f in files] assert ('.txt' in files)
All files with extensions in `exts` should be returned and no others.
fixwhitespace/tests/test_fixwhitespace.py
test_find_files_returns_only_files_with_correct_extensions
honzo0481/whitespace
0
python
def test_find_files_returns_only_files_with_correct_extensions(tmpdir): folder = tmpdir.mkdir('sub') txt = folder.join('file1.txt').write('foo') py = folder.join('file2.py').write('foo') exts = '.txt' files = find_files(folder, exts=exts) files = [f[(- 4):] for f in files] assert ('.txt' in files)
def test_find_files_returns_only_files_with_correct_extensions(tmpdir): folder = tmpdir.mkdir('sub') txt = folder.join('file1.txt').write('foo') py = folder.join('file2.py').write('foo') exts = '.txt' files = find_files(folder, exts=exts) files = [f[(- 4):] for f in files] assert ('.txt' in files)<|docstring|>All files with extensions in `exts` should be returned and no others.<|endoftext|>
46064ac6faa82b1f83888c701cf9eeca96e6c04bb0fdef046a677fad7231cfc6
def test_find_files_returns_files_in_nested_folders(tmpdir): 'Matching files in nested folders should be returned.'
Matching files in nested folders should be returned.
fixwhitespace/tests/test_fixwhitespace.py
test_find_files_returns_files_in_nested_folders
honzo0481/whitespace
0
python
def test_find_files_returns_files_in_nested_folders(tmpdir):
def test_find_files_returns_files_in_nested_folders(tmpdir): <|docstring|>Matching files in nested folders should be returned.<|endoftext|>
c6852c73ecacb8586978968cfbb65c852e69681e26063d404f8faec4221a061d
def number_of_axes(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Number of axes (dimensions) of the array.\n ' return np.array(array).ndim
Parameters ---------- array Python array Returns ------- Number of axes (dimensions) of the array.
math_study/numpy_basics/numpy_array/array_information.py
number_of_axes
PitPietro/pascal-triangle
1
python
def number_of_axes(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Number of axes (dimensions) of the array.\n ' return np.array(array).ndim
def number_of_axes(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Number of axes (dimensions) of the array.\n ' return np.array(array).ndim<|docstring|>Parameters ---------- array Python array Returns ------- Number of axes (dimensions) of the array.<|endoftext|>
202243c2f3d98c5573a8132d8a19e25161afe27c97ed3fc7feb1acb1cfef2221
def shape(array): '\n The dimension of the array is a tuple of integers indicating the size of the array in each dimension.\n For a matrix with n rows and m columns, shape will be (n,m).\n The length of the shape tuple is therefore the number of axes, ndim.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Dimensions of the array.\n ' return np.array(array).shape
The dimension of the array is a tuple of integers indicating the size of the array in each dimension. For a matrix with n rows and m columns, shape will be (n,m). The length of the shape tuple is therefore the number of axes, ndim. Parameters ---------- array Python array Returns ------- Dimensions of the array.
math_study/numpy_basics/numpy_array/array_information.py
shape
PitPietro/pascal-triangle
1
python
def shape(array): '\n The dimension of the array is a tuple of integers indicating the size of the array in each dimension.\n For a matrix with n rows and m columns, shape will be (n,m).\n The length of the shape tuple is therefore the number of axes, ndim.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Dimensions of the array.\n ' return np.array(array).shape
def shape(array): '\n The dimension of the array is a tuple of integers indicating the size of the array in each dimension.\n For a matrix with n rows and m columns, shape will be (n,m).\n The length of the shape tuple is therefore the number of axes, ndim.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Dimensions of the array.\n ' return np.array(array).shape<|docstring|>The dimension of the array is a tuple of integers indicating the size of the array in each dimension. For a matrix with n rows and m columns, shape will be (n,m). The length of the shape tuple is therefore the number of axes, ndim. Parameters ---------- array Python array Returns ------- Dimensions of the array.<|endoftext|>
c2bd763bd919bc700971b6e977a9b04bc0f4cc2abf2af98c947433f2e70aaa3e
def size(array): '\n Parameters\n ----------\n array Python array Python array\n\n Returns\n -------\n Total number of elements of the array. It is equal to the product of the elements of shape.\n ' return np.array(array).size
Parameters ---------- array Python array Python array Returns ------- Total number of elements of the array. It is equal to the product of the elements of shape.
math_study/numpy_basics/numpy_array/array_information.py
size
PitPietro/pascal-triangle
1
python
def size(array): '\n Parameters\n ----------\n array Python array Python array\n\n Returns\n -------\n Total number of elements of the array. It is equal to the product of the elements of shape.\n ' return np.array(array).size
def size(array): '\n Parameters\n ----------\n array Python array Python array\n\n Returns\n -------\n Total number of elements of the array. It is equal to the product of the elements of shape.\n ' return np.array(array).size<|docstring|>Parameters ---------- array Python array Python array Returns ------- Total number of elements of the array. It is equal to the product of the elements of shape.<|endoftext|>
ac139bcb1e21af6575e451e8ad1f3692ff0885d233252830ce61b4651c295e3b
def dtype(array): "\n One can create or specify dtype's using standard Python types.\n NumPy provides types of its own. numpy.int32, numpy.int16, and numpy.float64 are some examples.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Object describing the type of the elements in the array.\n " return np.array(array).dtype
One can create or specify dtype's using standard Python types. NumPy provides types of its own. numpy.int32, numpy.int16, and numpy.float64 are some examples. Parameters ---------- array Python array Returns ------- Object describing the type of the elements in the array.
math_study/numpy_basics/numpy_array/array_information.py
dtype
PitPietro/pascal-triangle
1
python
def dtype(array): "\n One can create or specify dtype's using standard Python types.\n NumPy provides types of its own. numpy.int32, numpy.int16, and numpy.float64 are some examples.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Object describing the type of the elements in the array.\n " return np.array(array).dtype
def dtype(array): "\n One can create or specify dtype's using standard Python types.\n NumPy provides types of its own. numpy.int32, numpy.int16, and numpy.float64 are some examples.\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Object describing the type of the elements in the array.\n " return np.array(array).dtype<|docstring|>One can create or specify dtype's using standard Python types. NumPy provides types of its own. numpy.int32, numpy.int16, and numpy.float64 are some examples. Parameters ---------- array Python array Returns ------- Object describing the type of the elements in the array.<|endoftext|>
64db66fbcb0a35aff33cad676cae746628b99f180c402989b0418e6987b9aa4b
def item_size(array): '\n\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Size in bytes of each element of the array.\n ' return np.array(array).itemsize
Parameters ---------- array Python array Returns ------- Size in bytes of each element of the array.
math_study/numpy_basics/numpy_array/array_information.py
item_size
PitPietro/pascal-triangle
1
python
def item_size(array): '\n\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Size in bytes of each element of the array.\n ' return np.array(array).itemsize
def item_size(array): '\n\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n Size in bytes of each element of the array.\n ' return np.array(array).itemsize<|docstring|>Parameters ---------- array Python array Returns ------- Size in bytes of each element of the array.<|endoftext|>
770f53762e5b1a52777699e1e0a0b0b1c07d5ed4f6f9f27aea14243b5a7a658c
def data(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n The buffer containing the actual elements of the array.\n You do not usually need to use this attribute: you will access the elements in an array using indexing facilities.\n ' return np.array(array).data
Parameters ---------- array Python array Returns ------- The buffer containing the actual elements of the array. You do not usually need to use this attribute: you will access the elements in an array using indexing facilities.
math_study/numpy_basics/numpy_array/array_information.py
data
PitPietro/pascal-triangle
1
python
def data(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n The buffer containing the actual elements of the array.\n You do not usually need to use this attribute: you will access the elements in an array using indexing facilities.\n ' return np.array(array).data
def data(array): '\n Parameters\n ----------\n array Python array\n\n Returns\n -------\n The buffer containing the actual elements of the array.\n You do not usually need to use this attribute: you will access the elements in an array using indexing facilities.\n ' return np.array(array).data<|docstring|>Parameters ---------- array Python array Returns ------- The buffer containing the actual elements of the array. You do not usually need to use this attribute: you will access the elements in an array using indexing facilities.<|endoftext|>
92cda50a0db2fe1f5601788dfbae1d63d67ac4f5bae5865380e90030e672e531
def set_state(self, model_name: str, training_idx: int, fit_Kwargs: dict, dataset_name: str, compile_kwargs: dict, number_of_epochs: int, loss_class: Loss, loss_kwargs: dict, optimizer_class: Optimizer, optimizer_kwargs: dict) -> None: '\n ## Function:\n\n Set a new state.\n\n ## Receives:\n\n All states parameters.\n\n ## Returns:\n\n None\n ' self.training_idx: int = training_idx self.model_name: str = model_name self.number_of_epochs: int = number_of_epochs self.fit_Kwargs: dict = fit_Kwargs self.dataset_name: str = dataset_name (self.date, self.time) = get_current_time_and_data() self.sub_dir_0 = 'Relatorios-Dados-etc/Resultados/' self.sub_dir_1 = (self.dataset_name + '/') self.sub_dir_2 = (self.model_name.replace('.json', '') + '/') self.sub_dir_3 = (str(training_idx) + '/') self.data_path = (((self.sub_dir_0 + self.sub_dir_1) + self.sub_dir_2) + self.sub_dir_3) csv_name: str = (('csv-#' + str(self.training_idx)) + '.log') self.csv_pathname: str = (self.data_path + csv_name) self.model_save_pathname: str = (((self.data_path + '#') + str(self.training_idx)) + '-checkp') self.models_dir: str = 'nNet_models/' dataframe_name: str = 'Results_DataFrame' self.dataframe_pathname: str = (self.sub_dir_0 + dataframe_name) self.dataframe_columns: list = ['training idx', 'date', 'model name', 'dataset', 'dataset params', 'regularizer', 'model total params', 'model total layers', 'optimizer', 'optimizer args', 'loss', 'loss args', 'compile args', 'fit args', 'best training loss', 'best training epoch', 'best validation loss', 'best validation epoch', 'last epoch training loss', 'last epoch validation', 'last epoch'] self.last_epoch: int = get_last_epoch(self.csv_pathname) self.loss: Loss = loss_class self.loss_kwargs: dict = loss_kwargs self.optimizer: Optimizer = optimizer_class self.optimizer_kwargs: dict = optimizer_kwargs self.compile_kwargs: dict = compile_kwargs
## Function: Set a new state. ## Receives: All states parameters. ## Returns: None
old/automatic_training.py
set_state
AlanPXD/IC-AutoEncoder
0
python
def set_state(self, model_name: str, training_idx: int, fit_Kwargs: dict, dataset_name: str, compile_kwargs: dict, number_of_epochs: int, loss_class: Loss, loss_kwargs: dict, optimizer_class: Optimizer, optimizer_kwargs: dict) -> None: '\n ## Function:\n\n Set a new state.\n\n ## Receives:\n\n All states parameters.\n\n ## Returns:\n\n None\n ' self.training_idx: int = training_idx self.model_name: str = model_name self.number_of_epochs: int = number_of_epochs self.fit_Kwargs: dict = fit_Kwargs self.dataset_name: str = dataset_name (self.date, self.time) = get_current_time_and_data() self.sub_dir_0 = 'Relatorios-Dados-etc/Resultados/' self.sub_dir_1 = (self.dataset_name + '/') self.sub_dir_2 = (self.model_name.replace('.json', ) + '/') self.sub_dir_3 = (str(training_idx) + '/') self.data_path = (((self.sub_dir_0 + self.sub_dir_1) + self.sub_dir_2) + self.sub_dir_3) csv_name: str = (('csv-#' + str(self.training_idx)) + '.log') self.csv_pathname: str = (self.data_path + csv_name) self.model_save_pathname: str = (((self.data_path + '#') + str(self.training_idx)) + '-checkp') self.models_dir: str = 'nNet_models/' dataframe_name: str = 'Results_DataFrame' self.dataframe_pathname: str = (self.sub_dir_0 + dataframe_name) self.dataframe_columns: list = ['training idx', 'date', 'model name', 'dataset', 'dataset params', 'regularizer', 'model total params', 'model total layers', 'optimizer', 'optimizer args', 'loss', 'loss args', 'compile args', 'fit args', 'best training loss', 'best training epoch', 'best validation loss', 'best validation epoch', 'last epoch training loss', 'last epoch validation', 'last epoch'] self.last_epoch: int = get_last_epoch(self.csv_pathname) self.loss: Loss = loss_class self.loss_kwargs: dict = loss_kwargs self.optimizer: Optimizer = optimizer_class self.optimizer_kwargs: dict = optimizer_kwargs self.compile_kwargs: dict = compile_kwargs
def set_state(self, model_name: str, training_idx: int, fit_Kwargs: dict, dataset_name: str, compile_kwargs: dict, number_of_epochs: int, loss_class: Loss, loss_kwargs: dict, optimizer_class: Optimizer, optimizer_kwargs: dict) -> None: '\n ## Function:\n\n Set a new state.\n\n ## Receives:\n\n All states parameters.\n\n ## Returns:\n\n None\n ' self.training_idx: int = training_idx self.model_name: str = model_name self.number_of_epochs: int = number_of_epochs self.fit_Kwargs: dict = fit_Kwargs self.dataset_name: str = dataset_name (self.date, self.time) = get_current_time_and_data() self.sub_dir_0 = 'Relatorios-Dados-etc/Resultados/' self.sub_dir_1 = (self.dataset_name + '/') self.sub_dir_2 = (self.model_name.replace('.json', ) + '/') self.sub_dir_3 = (str(training_idx) + '/') self.data_path = (((self.sub_dir_0 + self.sub_dir_1) + self.sub_dir_2) + self.sub_dir_3) csv_name: str = (('csv-#' + str(self.training_idx)) + '.log') self.csv_pathname: str = (self.data_path + csv_name) self.model_save_pathname: str = (((self.data_path + '#') + str(self.training_idx)) + '-checkp') self.models_dir: str = 'nNet_models/' dataframe_name: str = 'Results_DataFrame' self.dataframe_pathname: str = (self.sub_dir_0 + dataframe_name) self.dataframe_columns: list = ['training idx', 'date', 'model name', 'dataset', 'dataset params', 'regularizer', 'model total params', 'model total layers', 'optimizer', 'optimizer args', 'loss', 'loss args', 'compile args', 'fit args', 'best training loss', 'best training epoch', 'best validation loss', 'best validation epoch', 'last epoch training loss', 'last epoch validation', 'last epoch'] self.last_epoch: int = get_last_epoch(self.csv_pathname) self.loss: Loss = loss_class self.loss_kwargs: dict = loss_kwargs self.optimizer: Optimizer = optimizer_class self.optimizer_kwargs: dict = optimizer_kwargs self.compile_kwargs: dict = compile_kwargs<|docstring|>## Function: Set a new state. ## Receives: All states parameters. ## Returns: None<|endoftext|>