body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
|---|---|---|---|---|---|---|---|
@moid.setter
def moid(self, moid):
'\n Sets the moid of this EquipmentChassis.\n A unique identifier of this Managed Object instance. \n\n :param moid: The moid of this EquipmentChassis.\n :type: str\n '
self._moid = moid
| -8,433,131,016,588,678,000
|
Sets the moid of this EquipmentChassis.
A unique identifier of this Managed Object instance.
:param moid: The moid of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
moid
|
fdemello/intersight-python
|
python
|
@moid.setter
def moid(self, moid):
'\n Sets the moid of this EquipmentChassis.\n A unique identifier of this Managed Object instance. \n\n :param moid: The moid of this EquipmentChassis.\n :type: str\n '
self._moid = moid
|
@property
def object_type(self):
'\n Gets the object_type of this EquipmentChassis.\n The fully-qualified type of this managed object, e.g. the class name. \n\n :return: The object_type of this EquipmentChassis.\n :rtype: str\n '
return self._object_type
| 9,146,052,699,057,276,000
|
Gets the object_type of this EquipmentChassis.
The fully-qualified type of this managed object, e.g. the class name.
:return: The object_type of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
object_type
|
fdemello/intersight-python
|
python
|
@property
def object_type(self):
'\n Gets the object_type of this EquipmentChassis.\n The fully-qualified type of this managed object, e.g. the class name. \n\n :return: The object_type of this EquipmentChassis.\n :rtype: str\n '
return self._object_type
|
@object_type.setter
def object_type(self, object_type):
'\n Sets the object_type of this EquipmentChassis.\n The fully-qualified type of this managed object, e.g. the class name. \n\n :param object_type: The object_type of this EquipmentChassis.\n :type: str\n '
self._object_type = object_type
| 5,967,508,551,853,678,000
|
Sets the object_type of this EquipmentChassis.
The fully-qualified type of this managed object, e.g. the class name.
:param object_type: The object_type of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
object_type
|
fdemello/intersight-python
|
python
|
@object_type.setter
def object_type(self, object_type):
'\n Sets the object_type of this EquipmentChassis.\n The fully-qualified type of this managed object, e.g. the class name. \n\n :param object_type: The object_type of this EquipmentChassis.\n :type: str\n '
self._object_type = object_type
|
@property
def owners(self):
'\n Gets the owners of this EquipmentChassis.\n An array of owners which represent effective ownership of this object. \n\n :return: The owners of this EquipmentChassis.\n :rtype: list[str]\n '
return self._owners
| -5,796,262,495,266,642,000
|
Gets the owners of this EquipmentChassis.
An array of owners which represent effective ownership of this object.
:return: The owners of this EquipmentChassis.
:rtype: list[str]
|
intersight/models/equipment_chassis.py
|
owners
|
fdemello/intersight-python
|
python
|
@property
def owners(self):
'\n Gets the owners of this EquipmentChassis.\n An array of owners which represent effective ownership of this object. \n\n :return: The owners of this EquipmentChassis.\n :rtype: list[str]\n '
return self._owners
|
@owners.setter
def owners(self, owners):
'\n Sets the owners of this EquipmentChassis.\n An array of owners which represent effective ownership of this object. \n\n :param owners: The owners of this EquipmentChassis.\n :type: list[str]\n '
self._owners = owners
| -5,756,124,446,422,222,000
|
Sets the owners of this EquipmentChassis.
An array of owners which represent effective ownership of this object.
:param owners: The owners of this EquipmentChassis.
:type: list[str]
|
intersight/models/equipment_chassis.py
|
owners
|
fdemello/intersight-python
|
python
|
@owners.setter
def owners(self, owners):
'\n Sets the owners of this EquipmentChassis.\n An array of owners which represent effective ownership of this object. \n\n :param owners: The owners of this EquipmentChassis.\n :type: list[str]\n '
self._owners = owners
|
@property
def parent(self):
'\n Gets the parent of this EquipmentChassis.\n The direct ancestor of this managed object in the containment hierarchy. \n\n :return: The parent of this EquipmentChassis.\n :rtype: MoBaseMoRef\n '
return self._parent
| 3,423,610,237,586,827,000
|
Gets the parent of this EquipmentChassis.
The direct ancestor of this managed object in the containment hierarchy.
:return: The parent of this EquipmentChassis.
:rtype: MoBaseMoRef
|
intersight/models/equipment_chassis.py
|
parent
|
fdemello/intersight-python
|
python
|
@property
def parent(self):
'\n Gets the parent of this EquipmentChassis.\n The direct ancestor of this managed object in the containment hierarchy. \n\n :return: The parent of this EquipmentChassis.\n :rtype: MoBaseMoRef\n '
return self._parent
|
@parent.setter
def parent(self, parent):
'\n Sets the parent of this EquipmentChassis.\n The direct ancestor of this managed object in the containment hierarchy. \n\n :param parent: The parent of this EquipmentChassis.\n :type: MoBaseMoRef\n '
self._parent = parent
| 510,684,913,294,766,800
|
Sets the parent of this EquipmentChassis.
The direct ancestor of this managed object in the containment hierarchy.
:param parent: The parent of this EquipmentChassis.
:type: MoBaseMoRef
|
intersight/models/equipment_chassis.py
|
parent
|
fdemello/intersight-python
|
python
|
@parent.setter
def parent(self, parent):
'\n Sets the parent of this EquipmentChassis.\n The direct ancestor of this managed object in the containment hierarchy. \n\n :param parent: The parent of this EquipmentChassis.\n :type: MoBaseMoRef\n '
self._parent = parent
|
@property
def tags(self):
'\n Gets the tags of this EquipmentChassis.\n An array of tags, which allow to add key, value meta-data to managed objects. \n\n :return: The tags of this EquipmentChassis.\n :rtype: list[MoTag]\n '
return self._tags
| 5,254,447,752,488,890,000
|
Gets the tags of this EquipmentChassis.
An array of tags, which allow to add key, value meta-data to managed objects.
:return: The tags of this EquipmentChassis.
:rtype: list[MoTag]
|
intersight/models/equipment_chassis.py
|
tags
|
fdemello/intersight-python
|
python
|
@property
def tags(self):
'\n Gets the tags of this EquipmentChassis.\n An array of tags, which allow to add key, value meta-data to managed objects. \n\n :return: The tags of this EquipmentChassis.\n :rtype: list[MoTag]\n '
return self._tags
|
@tags.setter
def tags(self, tags):
'\n Sets the tags of this EquipmentChassis.\n An array of tags, which allow to add key, value meta-data to managed objects. \n\n :param tags: The tags of this EquipmentChassis.\n :type: list[MoTag]\n '
self._tags = tags
| -2,896,345,568,088,920,000
|
Sets the tags of this EquipmentChassis.
An array of tags, which allow to add key, value meta-data to managed objects.
:param tags: The tags of this EquipmentChassis.
:type: list[MoTag]
|
intersight/models/equipment_chassis.py
|
tags
|
fdemello/intersight-python
|
python
|
@tags.setter
def tags(self, tags):
'\n Sets the tags of this EquipmentChassis.\n An array of tags, which allow to add key, value meta-data to managed objects. \n\n :param tags: The tags of this EquipmentChassis.\n :type: list[MoTag]\n '
self._tags = tags
|
@property
def version_context(self):
'\n Gets the version_context of this EquipmentChassis.\n The versioning info for this managed object \n\n :return: The version_context of this EquipmentChassis.\n :rtype: MoVersionContext\n '
return self._version_context
| -2,244,708,150,065,598,200
|
Gets the version_context of this EquipmentChassis.
The versioning info for this managed object
:return: The version_context of this EquipmentChassis.
:rtype: MoVersionContext
|
intersight/models/equipment_chassis.py
|
version_context
|
fdemello/intersight-python
|
python
|
@property
def version_context(self):
'\n Gets the version_context of this EquipmentChassis.\n The versioning info for this managed object \n\n :return: The version_context of this EquipmentChassis.\n :rtype: MoVersionContext\n '
return self._version_context
|
@version_context.setter
def version_context(self, version_context):
'\n Sets the version_context of this EquipmentChassis.\n The versioning info for this managed object \n\n :param version_context: The version_context of this EquipmentChassis.\n :type: MoVersionContext\n '
self._version_context = version_context
| 7,014,728,958,737,755,000
|
Sets the version_context of this EquipmentChassis.
The versioning info for this managed object
:param version_context: The version_context of this EquipmentChassis.
:type: MoVersionContext
|
intersight/models/equipment_chassis.py
|
version_context
|
fdemello/intersight-python
|
python
|
@version_context.setter
def version_context(self, version_context):
'\n Sets the version_context of this EquipmentChassis.\n The versioning info for this managed object \n\n :param version_context: The version_context of this EquipmentChassis.\n :type: MoVersionContext\n '
self._version_context = version_context
|
@property
def device_mo_id(self):
'\n Gets the device_mo_id of this EquipmentChassis.\n\n :return: The device_mo_id of this EquipmentChassis.\n :rtype: str\n '
return self._device_mo_id
| 2,803,210,910,497,360,000
|
Gets the device_mo_id of this EquipmentChassis.
:return: The device_mo_id of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
device_mo_id
|
fdemello/intersight-python
|
python
|
@property
def device_mo_id(self):
'\n Gets the device_mo_id of this EquipmentChassis.\n\n :return: The device_mo_id of this EquipmentChassis.\n :rtype: str\n '
return self._device_mo_id
|
@device_mo_id.setter
def device_mo_id(self, device_mo_id):
'\n Sets the device_mo_id of this EquipmentChassis.\n\n :param device_mo_id: The device_mo_id of this EquipmentChassis.\n :type: str\n '
self._device_mo_id = device_mo_id
| 7,266,247,615,991,828,000
|
Sets the device_mo_id of this EquipmentChassis.
:param device_mo_id: The device_mo_id of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
device_mo_id
|
fdemello/intersight-python
|
python
|
@device_mo_id.setter
def device_mo_id(self, device_mo_id):
'\n Sets the device_mo_id of this EquipmentChassis.\n\n :param device_mo_id: The device_mo_id of this EquipmentChassis.\n :type: str\n '
self._device_mo_id = device_mo_id
|
@property
def dn(self):
'\n Gets the dn of this EquipmentChassis.\n\n :return: The dn of this EquipmentChassis.\n :rtype: str\n '
return self._dn
| -5,242,492,363,572,684,000
|
Gets the dn of this EquipmentChassis.
:return: The dn of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
dn
|
fdemello/intersight-python
|
python
|
@property
def dn(self):
'\n Gets the dn of this EquipmentChassis.\n\n :return: The dn of this EquipmentChassis.\n :rtype: str\n '
return self._dn
|
@dn.setter
def dn(self, dn):
'\n Sets the dn of this EquipmentChassis.\n\n :param dn: The dn of this EquipmentChassis.\n :type: str\n '
self._dn = dn
| -3,085,358,775,880,350,700
|
Sets the dn of this EquipmentChassis.
:param dn: The dn of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
dn
|
fdemello/intersight-python
|
python
|
@dn.setter
def dn(self, dn):
'\n Sets the dn of this EquipmentChassis.\n\n :param dn: The dn of this EquipmentChassis.\n :type: str\n '
self._dn = dn
|
@property
def rn(self):
'\n Gets the rn of this EquipmentChassis.\n\n :return: The rn of this EquipmentChassis.\n :rtype: str\n '
return self._rn
| 4,563,868,324,021,508,000
|
Gets the rn of this EquipmentChassis.
:return: The rn of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
rn
|
fdemello/intersight-python
|
python
|
@property
def rn(self):
'\n Gets the rn of this EquipmentChassis.\n\n :return: The rn of this EquipmentChassis.\n :rtype: str\n '
return self._rn
|
@rn.setter
def rn(self, rn):
'\n Sets the rn of this EquipmentChassis.\n\n :param rn: The rn of this EquipmentChassis.\n :type: str\n '
self._rn = rn
| 3,427,683,795,829,285,400
|
Sets the rn of this EquipmentChassis.
:param rn: The rn of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
rn
|
fdemello/intersight-python
|
python
|
@rn.setter
def rn(self, rn):
'\n Sets the rn of this EquipmentChassis.\n\n :param rn: The rn of this EquipmentChassis.\n :type: str\n '
self._rn = rn
|
@property
def model(self):
'\n Gets the model of this EquipmentChassis.\n\n :return: The model of this EquipmentChassis.\n :rtype: str\n '
return self._model
| -587,197,220,262,295,600
|
Gets the model of this EquipmentChassis.
:return: The model of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
model
|
fdemello/intersight-python
|
python
|
@property
def model(self):
'\n Gets the model of this EquipmentChassis.\n\n :return: The model of this EquipmentChassis.\n :rtype: str\n '
return self._model
|
@model.setter
def model(self, model):
'\n Sets the model of this EquipmentChassis.\n\n :param model: The model of this EquipmentChassis.\n :type: str\n '
self._model = model
| -6,572,406,886,304,264,000
|
Sets the model of this EquipmentChassis.
:param model: The model of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
model
|
fdemello/intersight-python
|
python
|
@model.setter
def model(self, model):
'\n Sets the model of this EquipmentChassis.\n\n :param model: The model of this EquipmentChassis.\n :type: str\n '
self._model = model
|
@property
def revision(self):
'\n Gets the revision of this EquipmentChassis.\n\n :return: The revision of this EquipmentChassis.\n :rtype: str\n '
return self._revision
| 7,361,901,812,003,549,000
|
Gets the revision of this EquipmentChassis.
:return: The revision of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
revision
|
fdemello/intersight-python
|
python
|
@property
def revision(self):
'\n Gets the revision of this EquipmentChassis.\n\n :return: The revision of this EquipmentChassis.\n :rtype: str\n '
return self._revision
|
@revision.setter
def revision(self, revision):
'\n Sets the revision of this EquipmentChassis.\n\n :param revision: The revision of this EquipmentChassis.\n :type: str\n '
self._revision = revision
| -2,461,176,041,929,837,600
|
Sets the revision of this EquipmentChassis.
:param revision: The revision of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
revision
|
fdemello/intersight-python
|
python
|
@revision.setter
def revision(self, revision):
'\n Sets the revision of this EquipmentChassis.\n\n :param revision: The revision of this EquipmentChassis.\n :type: str\n '
self._revision = revision
|
@property
def serial(self):
'\n Gets the serial of this EquipmentChassis.\n\n :return: The serial of this EquipmentChassis.\n :rtype: str\n '
return self._serial
| 6,395,463,785,908,063,000
|
Gets the serial of this EquipmentChassis.
:return: The serial of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
serial
|
fdemello/intersight-python
|
python
|
@property
def serial(self):
'\n Gets the serial of this EquipmentChassis.\n\n :return: The serial of this EquipmentChassis.\n :rtype: str\n '
return self._serial
|
@serial.setter
def serial(self, serial):
'\n Sets the serial of this EquipmentChassis.\n\n :param serial: The serial of this EquipmentChassis.\n :type: str\n '
self._serial = serial
| 110,131,993,948,440,930
|
Sets the serial of this EquipmentChassis.
:param serial: The serial of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
serial
|
fdemello/intersight-python
|
python
|
@serial.setter
def serial(self, serial):
'\n Sets the serial of this EquipmentChassis.\n\n :param serial: The serial of this EquipmentChassis.\n :type: str\n '
self._serial = serial
|
@property
def vendor(self):
'\n Gets the vendor of this EquipmentChassis.\n\n :return: The vendor of this EquipmentChassis.\n :rtype: str\n '
return self._vendor
| -6,223,945,719,104,198,000
|
Gets the vendor of this EquipmentChassis.
:return: The vendor of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
vendor
|
fdemello/intersight-python
|
python
|
@property
def vendor(self):
'\n Gets the vendor of this EquipmentChassis.\n\n :return: The vendor of this EquipmentChassis.\n :rtype: str\n '
return self._vendor
|
@vendor.setter
def vendor(self, vendor):
'\n Sets the vendor of this EquipmentChassis.\n\n :param vendor: The vendor of this EquipmentChassis.\n :type: str\n '
self._vendor = vendor
| -4,499,878,453,311,607,000
|
Sets the vendor of this EquipmentChassis.
:param vendor: The vendor of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
vendor
|
fdemello/intersight-python
|
python
|
@vendor.setter
def vendor(self, vendor):
'\n Sets the vendor of this EquipmentChassis.\n\n :param vendor: The vendor of this EquipmentChassis.\n :type: str\n '
self._vendor = vendor
|
@property
def blades(self):
'\n Gets the blades of this EquipmentChassis.\n\n :return: The blades of this EquipmentChassis.\n :rtype: list[ComputeBladeRef]\n '
return self._blades
| -2,076,659,690,575,531,500
|
Gets the blades of this EquipmentChassis.
:return: The blades of this EquipmentChassis.
:rtype: list[ComputeBladeRef]
|
intersight/models/equipment_chassis.py
|
blades
|
fdemello/intersight-python
|
python
|
@property
def blades(self):
'\n Gets the blades of this EquipmentChassis.\n\n :return: The blades of this EquipmentChassis.\n :rtype: list[ComputeBladeRef]\n '
return self._blades
|
@blades.setter
def blades(self, blades):
'\n Sets the blades of this EquipmentChassis.\n\n :param blades: The blades of this EquipmentChassis.\n :type: list[ComputeBladeRef]\n '
self._blades = blades
| -7,377,899,236,427,372,000
|
Sets the blades of this EquipmentChassis.
:param blades: The blades of this EquipmentChassis.
:type: list[ComputeBladeRef]
|
intersight/models/equipment_chassis.py
|
blades
|
fdemello/intersight-python
|
python
|
@blades.setter
def blades(self, blades):
'\n Sets the blades of this EquipmentChassis.\n\n :param blades: The blades of this EquipmentChassis.\n :type: list[ComputeBladeRef]\n '
self._blades = blades
|
@property
def fanmodules(self):
'\n Gets the fanmodules of this EquipmentChassis.\n\n :return: The fanmodules of this EquipmentChassis.\n :rtype: list[EquipmentFanModuleRef]\n '
return self._fanmodules
| -2,379,875,100,550,353,400
|
Gets the fanmodules of this EquipmentChassis.
:return: The fanmodules of this EquipmentChassis.
:rtype: list[EquipmentFanModuleRef]
|
intersight/models/equipment_chassis.py
|
fanmodules
|
fdemello/intersight-python
|
python
|
@property
def fanmodules(self):
'\n Gets the fanmodules of this EquipmentChassis.\n\n :return: The fanmodules of this EquipmentChassis.\n :rtype: list[EquipmentFanModuleRef]\n '
return self._fanmodules
|
@fanmodules.setter
def fanmodules(self, fanmodules):
'\n Sets the fanmodules of this EquipmentChassis.\n\n :param fanmodules: The fanmodules of this EquipmentChassis.\n :type: list[EquipmentFanModuleRef]\n '
self._fanmodules = fanmodules
| -996,254,179,541,310,600
|
Sets the fanmodules of this EquipmentChassis.
:param fanmodules: The fanmodules of this EquipmentChassis.
:type: list[EquipmentFanModuleRef]
|
intersight/models/equipment_chassis.py
|
fanmodules
|
fdemello/intersight-python
|
python
|
@fanmodules.setter
def fanmodules(self, fanmodules):
'\n Sets the fanmodules of this EquipmentChassis.\n\n :param fanmodules: The fanmodules of this EquipmentChassis.\n :type: list[EquipmentFanModuleRef]\n '
self._fanmodules = fanmodules
|
@property
def ioms(self):
'\n Gets the ioms of this EquipmentChassis.\n\n :return: The ioms of this EquipmentChassis.\n :rtype: list[EquipmentIoCardRef]\n '
return self._ioms
| -283,981,135,645,268,640
|
Gets the ioms of this EquipmentChassis.
:return: The ioms of this EquipmentChassis.
:rtype: list[EquipmentIoCardRef]
|
intersight/models/equipment_chassis.py
|
ioms
|
fdemello/intersight-python
|
python
|
@property
def ioms(self):
'\n Gets the ioms of this EquipmentChassis.\n\n :return: The ioms of this EquipmentChassis.\n :rtype: list[EquipmentIoCardRef]\n '
return self._ioms
|
@ioms.setter
def ioms(self, ioms):
'\n Sets the ioms of this EquipmentChassis.\n\n :param ioms: The ioms of this EquipmentChassis.\n :type: list[EquipmentIoCardRef]\n '
self._ioms = ioms
| 7,464,975,318,010,999,000
|
Sets the ioms of this EquipmentChassis.
:param ioms: The ioms of this EquipmentChassis.
:type: list[EquipmentIoCardRef]
|
intersight/models/equipment_chassis.py
|
ioms
|
fdemello/intersight-python
|
python
|
@ioms.setter
def ioms(self, ioms):
'\n Sets the ioms of this EquipmentChassis.\n\n :param ioms: The ioms of this EquipmentChassis.\n :type: list[EquipmentIoCardRef]\n '
self._ioms = ioms
|
@property
def oper_state(self):
'\n Gets the oper_state of this EquipmentChassis.\n\n :return: The oper_state of this EquipmentChassis.\n :rtype: str\n '
return self._oper_state
| 2,989,402,908,181,816,000
|
Gets the oper_state of this EquipmentChassis.
:return: The oper_state of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
oper_state
|
fdemello/intersight-python
|
python
|
@property
def oper_state(self):
'\n Gets the oper_state of this EquipmentChassis.\n\n :return: The oper_state of this EquipmentChassis.\n :rtype: str\n '
return self._oper_state
|
@oper_state.setter
def oper_state(self, oper_state):
'\n Sets the oper_state of this EquipmentChassis.\n\n :param oper_state: The oper_state of this EquipmentChassis.\n :type: str\n '
self._oper_state = oper_state
| 2,119,176,885,830,340,000
|
Sets the oper_state of this EquipmentChassis.
:param oper_state: The oper_state of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
oper_state
|
fdemello/intersight-python
|
python
|
@oper_state.setter
def oper_state(self, oper_state):
'\n Sets the oper_state of this EquipmentChassis.\n\n :param oper_state: The oper_state of this EquipmentChassis.\n :type: str\n '
self._oper_state = oper_state
|
@property
def psus(self):
'\n Gets the psus of this EquipmentChassis.\n\n :return: The psus of this EquipmentChassis.\n :rtype: list[EquipmentPsuRef]\n '
return self._psus
| 7,552,621,456,630,749,000
|
Gets the psus of this EquipmentChassis.
:return: The psus of this EquipmentChassis.
:rtype: list[EquipmentPsuRef]
|
intersight/models/equipment_chassis.py
|
psus
|
fdemello/intersight-python
|
python
|
@property
def psus(self):
'\n Gets the psus of this EquipmentChassis.\n\n :return: The psus of this EquipmentChassis.\n :rtype: list[EquipmentPsuRef]\n '
return self._psus
|
@psus.setter
def psus(self, psus):
'\n Sets the psus of this EquipmentChassis.\n\n :param psus: The psus of this EquipmentChassis.\n :type: list[EquipmentPsuRef]\n '
self._psus = psus
| 5,481,846,992,534,956,000
|
Sets the psus of this EquipmentChassis.
:param psus: The psus of this EquipmentChassis.
:type: list[EquipmentPsuRef]
|
intersight/models/equipment_chassis.py
|
psus
|
fdemello/intersight-python
|
python
|
@psus.setter
def psus(self, psus):
'\n Sets the psus of this EquipmentChassis.\n\n :param psus: The psus of this EquipmentChassis.\n :type: list[EquipmentPsuRef]\n '
self._psus = psus
|
@property
def registered_device(self):
'\n Gets the registered_device of this EquipmentChassis.\n\n :return: The registered_device of this EquipmentChassis.\n :rtype: AssetDeviceRegistrationRef\n '
return self._registered_device
| -955,344,537,030,405,900
|
Gets the registered_device of this EquipmentChassis.
:return: The registered_device of this EquipmentChassis.
:rtype: AssetDeviceRegistrationRef
|
intersight/models/equipment_chassis.py
|
registered_device
|
fdemello/intersight-python
|
python
|
@property
def registered_device(self):
'\n Gets the registered_device of this EquipmentChassis.\n\n :return: The registered_device of this EquipmentChassis.\n :rtype: AssetDeviceRegistrationRef\n '
return self._registered_device
|
@registered_device.setter
def registered_device(self, registered_device):
'\n Sets the registered_device of this EquipmentChassis.\n\n :param registered_device: The registered_device of this EquipmentChassis.\n :type: AssetDeviceRegistrationRef\n '
self._registered_device = registered_device
| -3,199,838,068,360,467,500
|
Sets the registered_device of this EquipmentChassis.
:param registered_device: The registered_device of this EquipmentChassis.
:type: AssetDeviceRegistrationRef
|
intersight/models/equipment_chassis.py
|
registered_device
|
fdemello/intersight-python
|
python
|
@registered_device.setter
def registered_device(self, registered_device):
'\n Sets the registered_device of this EquipmentChassis.\n\n :param registered_device: The registered_device of this EquipmentChassis.\n :type: AssetDeviceRegistrationRef\n '
self._registered_device = registered_device
|
@property
def sasexpanders(self):
'\n Gets the sasexpanders of this EquipmentChassis.\n\n :return: The sasexpanders of this EquipmentChassis.\n :rtype: list[StorageSasExpanderRef]\n '
return self._sasexpanders
| 4,970,616,966,302,048,000
|
Gets the sasexpanders of this EquipmentChassis.
:return: The sasexpanders of this EquipmentChassis.
:rtype: list[StorageSasExpanderRef]
|
intersight/models/equipment_chassis.py
|
sasexpanders
|
fdemello/intersight-python
|
python
|
@property
def sasexpanders(self):
'\n Gets the sasexpanders of this EquipmentChassis.\n\n :return: The sasexpanders of this EquipmentChassis.\n :rtype: list[StorageSasExpanderRef]\n '
return self._sasexpanders
|
@sasexpanders.setter
def sasexpanders(self, sasexpanders):
'\n Sets the sasexpanders of this EquipmentChassis.\n\n :param sasexpanders: The sasexpanders of this EquipmentChassis.\n :type: list[StorageSasExpanderRef]\n '
self._sasexpanders = sasexpanders
| 6,319,972,870,893,646,000
|
Sets the sasexpanders of this EquipmentChassis.
:param sasexpanders: The sasexpanders of this EquipmentChassis.
:type: list[StorageSasExpanderRef]
|
intersight/models/equipment_chassis.py
|
sasexpanders
|
fdemello/intersight-python
|
python
|
@sasexpanders.setter
def sasexpanders(self, sasexpanders):
'\n Sets the sasexpanders of this EquipmentChassis.\n\n :param sasexpanders: The sasexpanders of this EquipmentChassis.\n :type: list[StorageSasExpanderRef]\n '
self._sasexpanders = sasexpanders
|
@property
def siocs(self):
'\n Gets the siocs of this EquipmentChassis.\n\n :return: The siocs of this EquipmentChassis.\n :rtype: list[EquipmentSystemIoControllerRef]\n '
return self._siocs
| -6,272,941,836,516,304,000
|
Gets the siocs of this EquipmentChassis.
:return: The siocs of this EquipmentChassis.
:rtype: list[EquipmentSystemIoControllerRef]
|
intersight/models/equipment_chassis.py
|
siocs
|
fdemello/intersight-python
|
python
|
@property
def siocs(self):
'\n Gets the siocs of this EquipmentChassis.\n\n :return: The siocs of this EquipmentChassis.\n :rtype: list[EquipmentSystemIoControllerRef]\n '
return self._siocs
|
@siocs.setter
def siocs(self, siocs):
'\n Sets the siocs of this EquipmentChassis.\n\n :param siocs: The siocs of this EquipmentChassis.\n :type: list[EquipmentSystemIoControllerRef]\n '
self._siocs = siocs
| 7,973,869,749,024,284,000
|
Sets the siocs of this EquipmentChassis.
:param siocs: The siocs of this EquipmentChassis.
:type: list[EquipmentSystemIoControllerRef]
|
intersight/models/equipment_chassis.py
|
siocs
|
fdemello/intersight-python
|
python
|
@siocs.setter
def siocs(self, siocs):
'\n Sets the siocs of this EquipmentChassis.\n\n :param siocs: The siocs of this EquipmentChassis.\n :type: list[EquipmentSystemIoControllerRef]\n '
self._siocs = siocs
|
@property
def storage_enclosures(self):
'\n Gets the storage_enclosures of this EquipmentChassis.\n\n :return: The storage_enclosures of this EquipmentChassis.\n :rtype: list[StorageEnclosureRef]\n '
return self._storage_enclosures
| 7,754,907,690,592,987,000
|
Gets the storage_enclosures of this EquipmentChassis.
:return: The storage_enclosures of this EquipmentChassis.
:rtype: list[StorageEnclosureRef]
|
intersight/models/equipment_chassis.py
|
storage_enclosures
|
fdemello/intersight-python
|
python
|
@property
def storage_enclosures(self):
'\n Gets the storage_enclosures of this EquipmentChassis.\n\n :return: The storage_enclosures of this EquipmentChassis.\n :rtype: list[StorageEnclosureRef]\n '
return self._storage_enclosures
|
@storage_enclosures.setter
def storage_enclosures(self, storage_enclosures):
'\n Sets the storage_enclosures of this EquipmentChassis.\n\n :param storage_enclosures: The storage_enclosures of this EquipmentChassis.\n :type: list[StorageEnclosureRef]\n '
self._storage_enclosures = storage_enclosures
| 2,039,931,402,524,658,400
|
Sets the storage_enclosures of this EquipmentChassis.
:param storage_enclosures: The storage_enclosures of this EquipmentChassis.
:type: list[StorageEnclosureRef]
|
intersight/models/equipment_chassis.py
|
storage_enclosures
|
fdemello/intersight-python
|
python
|
@storage_enclosures.setter
def storage_enclosures(self, storage_enclosures):
'\n Sets the storage_enclosures of this EquipmentChassis.\n\n :param storage_enclosures: The storage_enclosures of this EquipmentChassis.\n :type: list[StorageEnclosureRef]\n '
self._storage_enclosures = storage_enclosures
|
def to_dict(self):
'\n Returns the model properties as a dict\n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
| 2,191,974,537,531,847,000
|
Returns the model properties as a dict
|
intersight/models/equipment_chassis.py
|
to_dict
|
fdemello/intersight-python
|
python
|
def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
|
def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict())
| -3,531,024,894,346,511,000
|
Returns the string representation of the model
|
intersight/models/equipment_chassis.py
|
to_str
|
fdemello/intersight-python
|
python
|
def to_str(self):
'\n \n '
return pformat(self.to_dict())
|
def __repr__(self):
'\n For `print` and `pprint`\n '
return self.to_str()
| 5,853,962,500,611,353,000
|
For `print` and `pprint`
|
intersight/models/equipment_chassis.py
|
__repr__
|
fdemello/intersight-python
|
python
|
def __repr__(self):
'\n \n '
return self.to_str()
|
def __eq__(self, other):
'\n Returns true if both objects are equal\n '
if (not isinstance(other, EquipmentChassis)):
return False
return (self.__dict__ == other.__dict__)
| 8,349,878,811,762,179,000
|
Returns true if both objects are equal
|
intersight/models/equipment_chassis.py
|
__eq__
|
fdemello/intersight-python
|
python
|
def __eq__(self, other):
'\n \n '
if (not isinstance(other, EquipmentChassis)):
return False
return (self.__dict__ == other.__dict__)
|
def __ne__(self, other):
'\n Returns true if both objects are not equal\n '
return (not (self == other))
| 3,600,423,175,817,510,400
|
Returns true if both objects are not equal
|
intersight/models/equipment_chassis.py
|
__ne__
|
fdemello/intersight-python
|
python
|
def __ne__(self, other):
'\n \n '
return (not (self == other))
|
def main():
'\n To find the highest and lowest temperature, cold days and the average.\n '
print('stanCode "Weather Master 4.0"!')
maximum = (- 100000000)
minimum = 100000000
total = 0
count = 0
cold_day = 0
while True:
temperature = int(input((('Next Temperature: (or ' + str(EXIT)) + ' to quit)? ')))
if ((temperature == EXIT) and (count == 0)):
print('No temperatures were entered.')
break
if ((temperature > 90) or (temperature < (- 100))):
print((('>>> The temperature "' + str(temperature)) + '" not exist, so we exclude and stop it.'))
break
if (temperature == EXIT):
break
else:
count += 1
if (temperature < 16):
cold_day += 1
total += temperature
if (temperature > maximum):
maximum = temperature
if (temperature < minimum):
minimum = temperature
else:
pass
if (count != 0):
avg = (total / count)
print('')
print(('Highest temperature = ' + str(maximum)))
print(('Lowest temperature = ' + str(minimum)))
print(('Average = ' + str(avg)))
print((str(cold_day) + ' cold day(s)'))
'\n My note:\n This is the first try, when I debug I found the calculation logic is wrong.\n The first variable I type will disappear when it enter into the while loop. And the count of \n total days would include the EXIT constant.\n '
| -2,519,882,223,176,822,000
|
To find the highest and lowest temperature, cold days and the average.
|
stanCode_Projects/weather_master/weather_master.py
|
main
|
clairejrlin/stanCode_projects
|
python
|
def main():
'\n \n '
print('stanCode "Weather Master 4.0"!')
maximum = (- 100000000)
minimum = 100000000
total = 0
count = 0
cold_day = 0
while True:
temperature = int(input((('Next Temperature: (or ' + str(EXIT)) + ' to quit)? ')))
if ((temperature == EXIT) and (count == 0)):
print('No temperatures were entered.')
break
if ((temperature > 90) or (temperature < (- 100))):
print((('>>> The temperature "' + str(temperature)) + '" not exist, so we exclude and stop it.'))
break
if (temperature == EXIT):
break
else:
count += 1
if (temperature < 16):
cold_day += 1
total += temperature
if (temperature > maximum):
maximum = temperature
if (temperature < minimum):
minimum = temperature
else:
pass
if (count != 0):
avg = (total / count)
print()
print(('Highest temperature = ' + str(maximum)))
print(('Lowest temperature = ' + str(minimum)))
print(('Average = ' + str(avg)))
print((str(cold_day) + ' cold day(s)'))
'\n My note:\n This is the first try, when I debug I found the calculation logic is wrong.\n The first variable I type will disappear when it enter into the while loop. And the count of \n total days would include the EXIT constant.\n '
|
def ee_table_to_legend(in_table, out_file):
'Converts an Earth Engine color table to a dictionary\n \n Args:\n in_table (str): The input file path (*.txt) to the Earth Engine color table.\n out_file (str): The output file path (*.txt) to the legend dictionary. \n '
pkg_dir = os.path.dirname(pkg_resources.resource_filename('geemap', 'geemap.py'))
ee_legend_table = os.path.join(pkg_dir, 'data/template/ee_legend_table.txt')
if (not os.path.exists(in_table)):
print('The class table does not exist.')
out_file = os.path.abspath(out_file)
if (not os.path.exists(os.path.dirname(out_file))):
os.makedirs(os.path.dirname(out_file))
legend_dict = {}
with open(in_table) as f:
lines = f.readlines()
for (index, line) in enumerate(lines):
if (index > 0):
items = line.split('\t')
items = [item.strip() for item in items]
color = items[1]
key = ((items[0] + ' ') + items[2])
legend_dict[key] = color
out_lines = []
out_lines.append('{\n')
for key in legend_dict.keys():
line = "\t'{}': '{}',\n".format(key, legend_dict[key])
out_lines.append(line)
out_lines[(- 1)] = (out_lines[(- 1)].rstrip()[:(- 1)] + '\n')
out_lines.append('}\n')
with open(out_file, 'w') as f:
f.writelines(out_lines)
| -3,205,447,828,213,331,000
|
Converts an Earth Engine color table to a dictionary
Args:
in_table (str): The input file path (*.txt) to the Earth Engine color table.
out_file (str): The output file path (*.txt) to the legend dictionary.
|
geemap/legends.py
|
ee_table_to_legend
|
GSRS/geemap
|
python
|
def ee_table_to_legend(in_table, out_file):
'Converts an Earth Engine color table to a dictionary\n \n Args:\n in_table (str): The input file path (*.txt) to the Earth Engine color table.\n out_file (str): The output file path (*.txt) to the legend dictionary. \n '
pkg_dir = os.path.dirname(pkg_resources.resource_filename('geemap', 'geemap.py'))
ee_legend_table = os.path.join(pkg_dir, 'data/template/ee_legend_table.txt')
if (not os.path.exists(in_table)):
print('The class table does not exist.')
out_file = os.path.abspath(out_file)
if (not os.path.exists(os.path.dirname(out_file))):
os.makedirs(os.path.dirname(out_file))
legend_dict = {}
with open(in_table) as f:
lines = f.readlines()
for (index, line) in enumerate(lines):
if (index > 0):
items = line.split('\t')
items = [item.strip() for item in items]
color = items[1]
key = ((items[0] + ' ') + items[2])
legend_dict[key] = color
out_lines = []
out_lines.append('{\n')
for key in legend_dict.keys():
line = "\t'{}': '{}',\n".format(key, legend_dict[key])
out_lines.append(line)
out_lines[(- 1)] = (out_lines[(- 1)].rstrip()[:(- 1)] + '\n')
out_lines.append('}\n')
with open(out_file, 'w') as f:
f.writelines(out_lines)
|
def dummy_trace_orbit_func(loc, times=None):
'\n Purely for testing purposes\n\n Dummy trace orbit func to skip irrelevant computation\n A little constraint on age (since otherwise its a free floating\n parameter)\n '
if (times is not None):
if np.all((times > 1.0)):
return (loc + 1000.0)
return loc
| 2,413,577,940,947,131,400
|
Purely for testing purposes
Dummy trace orbit func to skip irrelevant computation
A little constraint on age (since otherwise its a free floating
parameter)
|
chronostar/naivefit-bak.py
|
dummy_trace_orbit_func
|
mikeireland/chronostar
|
python
|
def dummy_trace_orbit_func(loc, times=None):
'\n Purely for testing purposes\n\n Dummy trace orbit func to skip irrelevant computation\n A little constraint on age (since otherwise its a free floating\n parameter)\n '
if (times is not None):
if np.all((times > 1.0)):
return (loc + 1000.0)
return loc
|
def log_message(msg, symbol='.', surround=False):
'Little formatting helper'
res = '{}{:^40}{}'.format((5 * symbol), msg, (5 * symbol))
if surround:
res = '\n{}\n{}\n{}'.format((50 * symbol), res, (50 * symbol))
logging.info(res)
| -6,724,881,269,071,199,000
|
Little formatting helper
|
chronostar/naivefit-bak.py
|
log_message
|
mikeireland/chronostar
|
python
|
def log_message(msg, symbol='.', surround=False):
res = '{}{:^40}{}'.format((5 * symbol), msg, (5 * symbol))
if surround:
res = '\n{}\n{}\n{}'.format((50 * symbol), res, (50 * symbol))
logging.info(res)
|
def __init__(self, fit_pars):
'\n Parameters\n ----------\n fit_pars : str -or- dictionary\n If a string, `fit_pars` should be a path to a parameter file which\n can be parsed by readparam.readParam, to construct a dictionary.\n Alternatively, an actual dictionary can be passed in. See README.md\n for a description of parameters.\n '
if (type(fit_pars) is str):
fit_pars = readparam.readParam(fit_pars, default_pars=self.DEFAULT_FIT_PARS)
self.fit_pars = dict(self.DEFAULT_FIT_PARS)
self.fit_pars.update(fit_pars)
assert (type(self.fit_pars) is dict)
self.fit_pars['par_log_file'] = os.path.join(self.fit_pars['results_dir'], self.fit_pars['par_log_file'])
self.data_dict = tabletool.build_data_dict_from_table(self.fit_pars['data_table'], historical=self.fit_pars['historical_colnames'])
self.ncomps = 1
if (self.fit_pars['component'] == 'sphere'):
self.Component = component.SphereComponent
self.fit_pars['Component'] = component.SphereComponent
elif (self.fit_pars['component'] == 'ellip'):
self.Component = component.EllipComponent
self.fit_pars['Component'] = component.EllipComponent
else:
raise UserWarning('Unknown (or missing) component parametrisation')
if (os.path.exists(self.fit_pars['results_dir']) and (not self.fit_pars['overwrite_prev_run'])):
rdir = '{}_{}'.format(self.fit_pars['results_dir'].rstrip('/'), random.randint(0, 1000))
else:
rdir = self.fit_pars['results_dir']
self.rdir = (rdir.rstrip('/') + '/')
mkpath(self.rdir)
assert os.access(self.rdir, os.W_OK)
readparam.log_used_pars(self.fit_pars, default_pars=self.DEFAULT_FIT_PARS)
logging.basicConfig(filename=(self.rdir + 'log.log'), level=logging.INFO)
log_message(msg='Component count cap set to {}'.format(self.fit_pars['max_comp_count']), symbol='+', surround=True)
log_message(msg='Iteration count cap set to {}'.format(self.fit_pars['max_em_iterations']), symbol='+', surround=True)
print('printed')
if (self.fit_pars['nthreads'] > (cpu_count() - 1)):
raise UserWarning('Provided nthreads exceeds cpu count on this machine. Rememeber to leave one cpu free for master thread!')
if (self.fit_pars['nthreads'] > 1):
log_message(('pool = Pool(nthreads) = pool(%d)' % self.fit_pars['nthreads']))
self.fit_pars['pool'] = Pool(self.fit_pars['nthreads'])
else:
self.pool = None
if (self.fit_pars['trace_orbit_func'] == 'dummy_trace_orbit_func'):
self.fit_pars['trace_orbit_func'] = dummy_trace_orbit_func
elif (self.fit_pars['trace_orbit_func'] == 'epicyclic'):
log_message('trace_orbit: epicyclic')
self.fit_pars['trace_orbit_func'] = traceorbit.trace_epicyclic_orbit
else:
self.fit_pars['trace_orbit_func'] = traceorbit.trace_cartesian_orbit
if (type(self.fit_pars['init_comps']) is str):
self.fit_pars['init_comps'] = self.Component.load_raw_components(self.fit_pars['init_comps'])
self.ncomps = len(self.fit_pars['init_comps'])
print('Managed to load in init_comps from file')
else:
self.fit_pars['init_comps'] = None
print("'Init comps' is initialised as none")
| -1,119,155,281,083,345,400
|
Parameters
----------
fit_pars : str -or- dictionary
If a string, `fit_pars` should be a path to a parameter file which
can be parsed by readparam.readParam, to construct a dictionary.
Alternatively, an actual dictionary can be passed in. See README.md
for a description of parameters.
|
chronostar/naivefit-bak.py
|
__init__
|
mikeireland/chronostar
|
python
|
def __init__(self, fit_pars):
'\n Parameters\n ----------\n fit_pars : str -or- dictionary\n If a string, `fit_pars` should be a path to a parameter file which\n can be parsed by readparam.readParam, to construct a dictionary.\n Alternatively, an actual dictionary can be passed in. See README.md\n for a description of parameters.\n '
if (type(fit_pars) is str):
fit_pars = readparam.readParam(fit_pars, default_pars=self.DEFAULT_FIT_PARS)
self.fit_pars = dict(self.DEFAULT_FIT_PARS)
self.fit_pars.update(fit_pars)
assert (type(self.fit_pars) is dict)
self.fit_pars['par_log_file'] = os.path.join(self.fit_pars['results_dir'], self.fit_pars['par_log_file'])
self.data_dict = tabletool.build_data_dict_from_table(self.fit_pars['data_table'], historical=self.fit_pars['historical_colnames'])
self.ncomps = 1
if (self.fit_pars['component'] == 'sphere'):
self.Component = component.SphereComponent
self.fit_pars['Component'] = component.SphereComponent
elif (self.fit_pars['component'] == 'ellip'):
self.Component = component.EllipComponent
self.fit_pars['Component'] = component.EllipComponent
else:
raise UserWarning('Unknown (or missing) component parametrisation')
if (os.path.exists(self.fit_pars['results_dir']) and (not self.fit_pars['overwrite_prev_run'])):
rdir = '{}_{}'.format(self.fit_pars['results_dir'].rstrip('/'), random.randint(0, 1000))
else:
rdir = self.fit_pars['results_dir']
self.rdir = (rdir.rstrip('/') + '/')
mkpath(self.rdir)
assert os.access(self.rdir, os.W_OK)
readparam.log_used_pars(self.fit_pars, default_pars=self.DEFAULT_FIT_PARS)
logging.basicConfig(filename=(self.rdir + 'log.log'), level=logging.INFO)
log_message(msg='Component count cap set to {}'.format(self.fit_pars['max_comp_count']), symbol='+', surround=True)
log_message(msg='Iteration count cap set to {}'.format(self.fit_pars['max_em_iterations']), symbol='+', surround=True)
print('printed')
if (self.fit_pars['nthreads'] > (cpu_count() - 1)):
raise UserWarning('Provided nthreads exceeds cpu count on this machine. Rememeber to leave one cpu free for master thread!')
if (self.fit_pars['nthreads'] > 1):
log_message(('pool = Pool(nthreads) = pool(%d)' % self.fit_pars['nthreads']))
self.fit_pars['pool'] = Pool(self.fit_pars['nthreads'])
else:
self.pool = None
if (self.fit_pars['trace_orbit_func'] == 'dummy_trace_orbit_func'):
self.fit_pars['trace_orbit_func'] = dummy_trace_orbit_func
elif (self.fit_pars['trace_orbit_func'] == 'epicyclic'):
log_message('trace_orbit: epicyclic')
self.fit_pars['trace_orbit_func'] = traceorbit.trace_epicyclic_orbit
else:
self.fit_pars['trace_orbit_func'] = traceorbit.trace_cartesian_orbit
if (type(self.fit_pars['init_comps']) is str):
self.fit_pars['init_comps'] = self.Component.load_raw_components(self.fit_pars['init_comps'])
self.ncomps = len(self.fit_pars['init_comps'])
print('Managed to load in init_comps from file')
else:
self.fit_pars['init_comps'] = None
print("'Init comps' is initialised as none")
|
def build_comps_from_chains(self, run_dir):
"\n Build compoennt objects from stored emcee chains and cooresponding\n lnprobs.\n\n Parameters\n ----------\n run_dir: str\n Directory of an EM fit, which in the context of NaiveFit will be\n e.g. 'myfit/1', or 'myfit/2/A'\n\n Returns\n -------\n comps: [Component]\n A list of components that correspond to the best fit from the\n run in question.\n "
logging.info('Component class has been modified, reconstructing from chain')
comps = (self.ncomps * [None])
for i in range(self.ncomps):
final_cdir = (run_dir + 'final/comp{}/'.format(i))
chain = np.load((final_cdir + 'final_chain.npy'))
lnprob = np.load((final_cdir + 'final_lnprob.npy'))
npars = len(self.Component.PARAMETER_FORMAT)
best_ix = np.argmax(lnprob)
best_pars = chain.reshape((- 1), npars)[best_ix]
comps[i] = self.Component(emcee_pars=best_pars)
self.Component.store_raw_components(str(((run_dir + 'final/') + self.final_comps_file)), comps)
return comps
| 1,173,593,740,890,240,500
|
Build compoennt objects from stored emcee chains and cooresponding
lnprobs.
Parameters
----------
run_dir: str
Directory of an EM fit, which in the context of NaiveFit will be
e.g. 'myfit/1', or 'myfit/2/A'
Returns
-------
comps: [Component]
A list of components that correspond to the best fit from the
run in question.
|
chronostar/naivefit-bak.py
|
build_comps_from_chains
|
mikeireland/chronostar
|
python
|
def build_comps_from_chains(self, run_dir):
"\n Build compoennt objects from stored emcee chains and cooresponding\n lnprobs.\n\n Parameters\n ----------\n run_dir: str\n Directory of an EM fit, which in the context of NaiveFit will be\n e.g. 'myfit/1', or 'myfit/2/A'\n\n Returns\n -------\n comps: [Component]\n A list of components that correspond to the best fit from the\n run in question.\n "
logging.info('Component class has been modified, reconstructing from chain')
comps = (self.ncomps * [None])
for i in range(self.ncomps):
final_cdir = (run_dir + 'final/comp{}/'.format(i))
chain = np.load((final_cdir + 'final_chain.npy'))
lnprob = np.load((final_cdir + 'final_lnprob.npy'))
npars = len(self.Component.PARAMETER_FORMAT)
best_ix = np.argmax(lnprob)
best_pars = chain.reshape((- 1), npars)[best_ix]
comps[i] = self.Component(emcee_pars=best_pars)
self.Component.store_raw_components(str(((run_dir + 'final/') + self.final_comps_file)), comps)
return comps
|
def log_score_comparison(self, prev, new):
'\n Purely a logging helper function.\n Log BIC comparisons.\n\n Parameters\n ----------\n prev: dict\n A dictinoary of scores from the previous run with the following entries\n - bic: the Bayesian Information Criterion\n - lnlike : the log likelihood\n - lnpost : the log posterior\n new: dict\n A dictinoary of scores from the new run, with identical entries as\n `prev`\n\n Result\n ------\n None\n '
if (new['bic'] < prev['bic']):
logging.info('Extra component has improved BIC...')
logging.info('New BIC: {} < Old BIC: {}'.format(new['bic'], prev['bic']))
else:
logging.info('Extra component has worsened BIC...')
logging.info('New BIC: {} > Old BIC: {}'.format(new['bic'], prev['bic']))
logging.info('lnlike: {} | {}'.format(new['lnlike'], prev['lnlike']))
logging.info('lnpost: {} | {}'.format(new['lnpost'], prev['lnpost']))
| 2,617,305,590,642,477,600
|
Purely a logging helper function.
Log BIC comparisons.
Parameters
----------
prev: dict
A dictinoary of scores from the previous run with the following entries
- bic: the Bayesian Information Criterion
- lnlike : the log likelihood
- lnpost : the log posterior
new: dict
A dictinoary of scores from the new run, with identical entries as
`prev`
Result
------
None
|
chronostar/naivefit-bak.py
|
log_score_comparison
|
mikeireland/chronostar
|
python
|
def log_score_comparison(self, prev, new):
'\n Purely a logging helper function.\n Log BIC comparisons.\n\n Parameters\n ----------\n prev: dict\n A dictinoary of scores from the previous run with the following entries\n - bic: the Bayesian Information Criterion\n - lnlike : the log likelihood\n - lnpost : the log posterior\n new: dict\n A dictinoary of scores from the new run, with identical entries as\n `prev`\n\n Result\n ------\n None\n '
if (new['bic'] < prev['bic']):
logging.info('Extra component has improved BIC...')
logging.info('New BIC: {} < Old BIC: {}'.format(new['bic'], prev['bic']))
else:
logging.info('Extra component has worsened BIC...')
logging.info('New BIC: {} > Old BIC: {}'.format(new['bic'], prev['bic']))
logging.info('lnlike: {} | {}'.format(new['lnlike'], prev['lnlike']))
logging.info('lnpost: {} | {}'.format(new['lnpost'], prev['lnpost']))
|
def build_init_comps(self, prev_comps, split_comp_ix, prev_med_and_spans, memb_probs):
"\n Given a list of converged components from a N component fit, generate\n a list of N+1 components with which to initialise an EM run.\n\n This is done by taking the target component, `prev_comps[comp_ix]`,\n replacing it in the list of comps, by splitting it into two components\n with a lower and higher age,\n\n Parameters\n ----------\n prev_comps : [N] list of Component objects\n List of components from the N component fit\n split_comp_ix : int\n The index of component which is to be split into two\n prev_med_and_spans : [ncomps,npars,3] np.array\n The median and spans of\n\n Return\n ------\n init_comps: [N+1] list of Component objects\n\n Side effects\n ------------\n Updates self.fit_pars['init_comps'] with a [N+1] list of Component\n objects\n "
target_comp = prev_comps[split_comp_ix]
assert isinstance(target_comp, self.Component)
if (self.fit_pars['split_group'] == 'age'):
if (self.fit_pars['optimisation_method'] == 'emcee'):
split_comps = target_comp.split_group_age(lo_age=prev_med_and_spans[(split_comp_ix, (- 1), 1)], hi_age=prev_med_and_spans[(split_comp_ix, (- 1), 2)])
elif (self.fit_pars['optimisation_method'] == 'Nelder-Mead'):
age = target_comp.get_age()
split_comps = target_comp.split_group_age(lo_age=(0.8 * age), hi_age=(1.2 * age))
elif (self.fit_pars['split_group'] == 'spatial'):
split_comps = target_comp.split_group_spatial(self.data_dict, memb_probs[:, split_comp_ix])
init_comps = list(prev_comps)
init_comps.pop(split_comp_ix)
init_comps.insert(split_comp_ix, split_comps[1])
init_comps.insert(split_comp_ix, split_comps[0])
return init_comps
| -7,115,485,194,683,713,000
|
Given a list of converged components from a N component fit, generate
a list of N+1 components with which to initialise an EM run.
This is done by taking the target component, `prev_comps[comp_ix]`,
replacing it in the list of comps, by splitting it into two components
with a lower and higher age,
Parameters
----------
prev_comps : [N] list of Component objects
List of components from the N component fit
split_comp_ix : int
The index of component which is to be split into two
prev_med_and_spans : [ncomps,npars,3] np.array
The median and spans of
Return
------
init_comps: [N+1] list of Component objects
Side effects
------------
Updates self.fit_pars['init_comps'] with a [N+1] list of Component
objects
|
chronostar/naivefit-bak.py
|
build_init_comps
|
mikeireland/chronostar
|
python
|
def build_init_comps(self, prev_comps, split_comp_ix, prev_med_and_spans, memb_probs):
"\n Given a list of converged components from a N component fit, generate\n a list of N+1 components with which to initialise an EM run.\n\n This is done by taking the target component, `prev_comps[comp_ix]`,\n replacing it in the list of comps, by splitting it into two components\n with a lower and higher age,\n\n Parameters\n ----------\n prev_comps : [N] list of Component objects\n List of components from the N component fit\n split_comp_ix : int\n The index of component which is to be split into two\n prev_med_and_spans : [ncomps,npars,3] np.array\n The median and spans of\n\n Return\n ------\n init_comps: [N+1] list of Component objects\n\n Side effects\n ------------\n Updates self.fit_pars['init_comps'] with a [N+1] list of Component\n objects\n "
target_comp = prev_comps[split_comp_ix]
assert isinstance(target_comp, self.Component)
if (self.fit_pars['split_group'] == 'age'):
if (self.fit_pars['optimisation_method'] == 'emcee'):
split_comps = target_comp.split_group_age(lo_age=prev_med_and_spans[(split_comp_ix, (- 1), 1)], hi_age=prev_med_and_spans[(split_comp_ix, (- 1), 2)])
elif (self.fit_pars['optimisation_method'] == 'Nelder-Mead'):
age = target_comp.get_age()
split_comps = target_comp.split_group_age(lo_age=(0.8 * age), hi_age=(1.2 * age))
elif (self.fit_pars['split_group'] == 'spatial'):
split_comps = target_comp.split_group_spatial(self.data_dict, memb_probs[:, split_comp_ix])
init_comps = list(prev_comps)
init_comps.pop(split_comp_ix)
init_comps.insert(split_comp_ix, split_comps[1])
init_comps.insert(split_comp_ix, split_comps[0])
return init_comps
|
def run_em_unless_loadable(self, run_dir):
'\n Run and EM fit, but only if not loadable from a previous run\n\n '
try:
try:
med_and_spans = np.load(os.path.join(run_dir, 'final/', self.final_med_and_spans_file))
except ValueError:
logging.info('med_and_spans not read. Presumably you are using gradient descent optimisation procedure?')
med_and_spans = [None]
memb_probs = np.load(os.path.join(run_dir, 'final/', self.final_memb_probs_file))
comps = self.Component.load_raw_components(str(os.path.join(run_dir, 'final/', self.final_comps_file)))
logging.info('Loaded from previous run')
except AttributeError:
comps = self.build_comps_from_chains(run_dir)
except IOError:
(comps, med_and_spans, memb_probs) = expectmax.fit_many_comps(data=self.data_dict, ncomps=self.ncomps, rdir=run_dir, **self.fit_pars)
self.fit_pars['init_comps'] = None
self.fit_pars['init_memb_probs'] = None
return {'comps': comps, 'med_and_spans': med_and_spans, 'memb_probs': memb_probs}
| -5,796,801,986,742,765,000
|
Run and EM fit, but only if not loadable from a previous run
|
chronostar/naivefit-bak.py
|
run_em_unless_loadable
|
mikeireland/chronostar
|
python
|
def run_em_unless_loadable(self, run_dir):
'\n \n\n '
try:
try:
med_and_spans = np.load(os.path.join(run_dir, 'final/', self.final_med_and_spans_file))
except ValueError:
logging.info('med_and_spans not read. Presumably you are using gradient descent optimisation procedure?')
med_and_spans = [None]
memb_probs = np.load(os.path.join(run_dir, 'final/', self.final_memb_probs_file))
comps = self.Component.load_raw_components(str(os.path.join(run_dir, 'final/', self.final_comps_file)))
logging.info('Loaded from previous run')
except AttributeError:
comps = self.build_comps_from_chains(run_dir)
except IOError:
(comps, med_and_spans, memb_probs) = expectmax.fit_many_comps(data=self.data_dict, ncomps=self.ncomps, rdir=run_dir, **self.fit_pars)
self.fit_pars['init_comps'] = None
self.fit_pars['init_memb_probs'] = None
return {'comps': comps, 'med_and_spans': med_and_spans, 'memb_probs': memb_probs}
|
def calc_score(self, comps, memb_probs):
'\n Calculate global score of fit for comparison with future fits with different\n component counts\n\n Parameters\n ----------\n :param comps:\n :param memb_probs:\n :return:\n\n TODO: Establish relevance of bg_ln_ols\n '
lnlike = expectmax.get_overall_lnlikelihood(self.data_dict, comps, old_memb_probs=memb_probs)
lnpost = expectmax.get_overall_lnlikelihood(self.data_dict, comps, old_memb_probs=memb_probs, inc_posterior=True)
bic = expectmax.calc_bic(self.data_dict, self.ncomps, lnlike, memb_probs=memb_probs, Component=self.Component)
return {'bic': bic, 'lnlike': lnlike, 'lnpost': lnpost}
| -5,439,076,067,232,939,000
|
Calculate global score of fit for comparison with future fits with different
component counts
Parameters
----------
:param comps:
:param memb_probs:
:return:
TODO: Establish relevance of bg_ln_ols
|
chronostar/naivefit-bak.py
|
calc_score
|
mikeireland/chronostar
|
python
|
def calc_score(self, comps, memb_probs):
'\n Calculate global score of fit for comparison with future fits with different\n component counts\n\n Parameters\n ----------\n :param comps:\n :param memb_probs:\n :return:\n\n TODO: Establish relevance of bg_ln_ols\n '
lnlike = expectmax.get_overall_lnlikelihood(self.data_dict, comps, old_memb_probs=memb_probs)
lnpost = expectmax.get_overall_lnlikelihood(self.data_dict, comps, old_memb_probs=memb_probs, inc_posterior=True)
bic = expectmax.calc_bic(self.data_dict, self.ncomps, lnlike, memb_probs=memb_probs, Component=self.Component)
return {'bic': bic, 'lnlike': lnlike, 'lnpost': lnpost}
|
def run_fit(self):
"\n Perform a fit (as described in Paper I) to a set of prepared data.\n\n Results are outputted as two dictionaries\n results = {'comps':best_fit, (list of components)\n 'med_and_spans':median and spans of model parameters,\n 'memb_probs': membership probability array (the standard one)}\n scores = {'bic': the bic,\n 'lnlike': log likelihood of that run,\n 'lnpost': log posterior of that run}\n "
log_message('Beginning Chronostar run', symbol='_', surround=True)
if self.fit_pars['store_burnin_chains']:
log_message(msg='Storing burnin chains', symbol='-')
if ((self.fit_pars['init_comps'] is None) and (self.fit_pars['init_memb_probs'] is None)):
assert (self.ncomps == 1), 'If no initialisation set, can only accept ncomp==1'
init_memb_probs = np.zeros((len(self.data_dict['means']), (self.ncomps + self.fit_pars['use_background'])))
init_memb_probs[:, 0] = 1.0
else:
log_message(msg=('Initialising with init_comps or init_memb_probs with%i components' % self.ncomps), symbol='*', surround=True)
pass
log_message(msg='FITTING {} COMPONENT'.format(self.ncomps), symbol='*', surround=True)
run_dir = (self.rdir + '{}/'.format(self.ncomps))
prev_result = self.run_em_unless_loadable(run_dir)
prev_score = self.calc_score(prev_result['comps'], prev_result['memb_probs'])
self.ncomps += 1
while (self.ncomps <= self.fit_pars['max_comp_count']):
log_message(msg='FITTING {} COMPONENT'.format(self.ncomps), symbol='*', surround=True)
all_results = []
all_scores = []
for (i, target_comp) in enumerate(prev_result['comps']):
div_label = chr((ord('A') + i))
run_dir = (self.rdir + '{}/{}/'.format(self.ncomps, div_label))
log_message(msg='Subdividing stage {}'.format(div_label), symbol='+', surround=True)
mkpath(run_dir)
self.fit_pars['init_comps'] = self.build_init_comps(prev_result['comps'], split_comp_ix=i, prev_med_and_spans=prev_result['med_and_spans'], memb_probs=prev_result['memb_probs'])
result = self.run_em_unless_loadable(run_dir)
all_results.append(result)
score = self.calc_score(result['comps'], result['memb_probs'])
all_scores.append(score)
logging.info('Decomposition {} finished with \nBIC: {}\nlnlike: {}\nlnpost: {}'.format(div_label, all_scores[(- 1)]['bic'], all_scores[(- 1)]['lnlike'], all_scores[(- 1)]['lnpost']))
all_bics = [score['bic'] for score in all_scores]
best_split_ix = np.nanargmin(all_bics)
new_result = all_results[best_split_ix]
new_score = all_scores[best_split_ix]
self.iter_end_log(best_split_ix, prev_result=prev_result, new_result=new_result)
self.log_score_comparison(new=new_score, prev=prev_score)
if (new_score['bic'] < prev_score['bic']):
prev_score = new_score
prev_result = new_result
self.ncomps += 1
log_message(msg='Commencing {} component fit on {}{}'.format(self.ncomps, (self.ncomps - 1), chr((ord('A') + best_split_ix))), symbol='+')
else:
logging.info('... saving previous fit as best fit to data')
self.Component.store_raw_components((self.rdir + self.final_comps_file), prev_result['comps'])
np.save((self.rdir + self.final_med_and_spans_file), prev_result['med_and_spans'])
np.save((self.rdir + self.final_memb_probs_file), prev_result['memb_probs'])
np.save((self.rdir + 'final_likelihood_post_and_bic'), prev_score)
tabcomps = self.Component.convert_components_array_into_astropy_table(prev_result['comps'])
if self.fit_pars['overwrite_fits']:
tabcomps.write(os.path.join(self.rdir, ('final_comps_%d.fits' % len(prev_result['comps']))), overwrite=self.fit_pars['overwrite_fits'])
else:
filename_comps_fits_random = os.path.join(self.rdir, ('final_comps_%d_%s.fits' % (len(prev_result['comps']), str(uuid.uuid4().hex))))
tabcomps.write(filename_comps_fits_random, overwrite=self.fit_pars['overwrite_fits'])
try:
if self.fit_pars['overwrite_fits']:
tabletool.construct_an_astropy_table_with_gaia_ids_and_membership_probabilities(self.fit_pars['data_table'], prev_result['memb_probs'], prev_result['comps'], os.path.join(self.rdir, ('final_memberships_%d.fits' % len(prev_result['comps']))), get_background_overlaps=True, stellar_id_colname=self.fit_pars['stellar_id_colname'], overwrite_fits=self.fit_pars['overwrite_fits'])
else:
filename_memb_probs_fits_random = os.path.join(self.rdir, ('final_memberships_%d_%s.fits' % (len(prev_result['comps']), str(uuid.uuid4().hex))))
tabletool.construct_an_astropy_table_with_gaia_ids_and_membership_probabilities(self.fit_pars['data_table'], prev_result['memb_probs'], prev_result['comps'], filename_memb_probs_fits_random, get_background_overlaps=True, stellar_id_colname=self.fit_pars['stellar_id_colname'], overwrite_fits=self.fit_pars['overwrite_fits'])
except:
logging.info("[WARNING] Couldn't print membership.fits file. Check column id.")
self.log_final_log(prev_result, prev_score)
break
logging.info('Best fit:\n{}'.format([group.get_pars() for group in prev_result['comps']]))
if (self.ncomps >= self.fit_pars['max_comp_count']):
log_message(msg='REACHED MAX COMP LIMIT', symbol='+', surround=True)
return (prev_result, prev_score)
| 7,838,494,438,511,895,000
|
Perform a fit (as described in Paper I) to a set of prepared data.
Results are outputted as two dictionaries
results = {'comps':best_fit, (list of components)
'med_and_spans':median and spans of model parameters,
'memb_probs': membership probability array (the standard one)}
scores = {'bic': the bic,
'lnlike': log likelihood of that run,
'lnpost': log posterior of that run}
|
chronostar/naivefit-bak.py
|
run_fit
|
mikeireland/chronostar
|
python
|
def run_fit(self):
"\n Perform a fit (as described in Paper I) to a set of prepared data.\n\n Results are outputted as two dictionaries\n results = {'comps':best_fit, (list of components)\n 'med_and_spans':median and spans of model parameters,\n 'memb_probs': membership probability array (the standard one)}\n scores = {'bic': the bic,\n 'lnlike': log likelihood of that run,\n 'lnpost': log posterior of that run}\n "
log_message('Beginning Chronostar run', symbol='_', surround=True)
if self.fit_pars['store_burnin_chains']:
log_message(msg='Storing burnin chains', symbol='-')
if ((self.fit_pars['init_comps'] is None) and (self.fit_pars['init_memb_probs'] is None)):
assert (self.ncomps == 1), 'If no initialisation set, can only accept ncomp==1'
init_memb_probs = np.zeros((len(self.data_dict['means']), (self.ncomps + self.fit_pars['use_background'])))
init_memb_probs[:, 0] = 1.0
else:
log_message(msg=('Initialising with init_comps or init_memb_probs with%i components' % self.ncomps), symbol='*', surround=True)
pass
log_message(msg='FITTING {} COMPONENT'.format(self.ncomps), symbol='*', surround=True)
run_dir = (self.rdir + '{}/'.format(self.ncomps))
prev_result = self.run_em_unless_loadable(run_dir)
prev_score = self.calc_score(prev_result['comps'], prev_result['memb_probs'])
self.ncomps += 1
while (self.ncomps <= self.fit_pars['max_comp_count']):
log_message(msg='FITTING {} COMPONENT'.format(self.ncomps), symbol='*', surround=True)
all_results = []
all_scores = []
for (i, target_comp) in enumerate(prev_result['comps']):
div_label = chr((ord('A') + i))
run_dir = (self.rdir + '{}/{}/'.format(self.ncomps, div_label))
log_message(msg='Subdividing stage {}'.format(div_label), symbol='+', surround=True)
mkpath(run_dir)
self.fit_pars['init_comps'] = self.build_init_comps(prev_result['comps'], split_comp_ix=i, prev_med_and_spans=prev_result['med_and_spans'], memb_probs=prev_result['memb_probs'])
result = self.run_em_unless_loadable(run_dir)
all_results.append(result)
score = self.calc_score(result['comps'], result['memb_probs'])
all_scores.append(score)
logging.info('Decomposition {} finished with \nBIC: {}\nlnlike: {}\nlnpost: {}'.format(div_label, all_scores[(- 1)]['bic'], all_scores[(- 1)]['lnlike'], all_scores[(- 1)]['lnpost']))
all_bics = [score['bic'] for score in all_scores]
best_split_ix = np.nanargmin(all_bics)
new_result = all_results[best_split_ix]
new_score = all_scores[best_split_ix]
self.iter_end_log(best_split_ix, prev_result=prev_result, new_result=new_result)
self.log_score_comparison(new=new_score, prev=prev_score)
if (new_score['bic'] < prev_score['bic']):
prev_score = new_score
prev_result = new_result
self.ncomps += 1
log_message(msg='Commencing {} component fit on {}{}'.format(self.ncomps, (self.ncomps - 1), chr((ord('A') + best_split_ix))), symbol='+')
else:
logging.info('... saving previous fit as best fit to data')
self.Component.store_raw_components((self.rdir + self.final_comps_file), prev_result['comps'])
np.save((self.rdir + self.final_med_and_spans_file), prev_result['med_and_spans'])
np.save((self.rdir + self.final_memb_probs_file), prev_result['memb_probs'])
np.save((self.rdir + 'final_likelihood_post_and_bic'), prev_score)
tabcomps = self.Component.convert_components_array_into_astropy_table(prev_result['comps'])
if self.fit_pars['overwrite_fits']:
tabcomps.write(os.path.join(self.rdir, ('final_comps_%d.fits' % len(prev_result['comps']))), overwrite=self.fit_pars['overwrite_fits'])
else:
filename_comps_fits_random = os.path.join(self.rdir, ('final_comps_%d_%s.fits' % (len(prev_result['comps']), str(uuid.uuid4().hex))))
tabcomps.write(filename_comps_fits_random, overwrite=self.fit_pars['overwrite_fits'])
try:
if self.fit_pars['overwrite_fits']:
tabletool.construct_an_astropy_table_with_gaia_ids_and_membership_probabilities(self.fit_pars['data_table'], prev_result['memb_probs'], prev_result['comps'], os.path.join(self.rdir, ('final_memberships_%d.fits' % len(prev_result['comps']))), get_background_overlaps=True, stellar_id_colname=self.fit_pars['stellar_id_colname'], overwrite_fits=self.fit_pars['overwrite_fits'])
else:
filename_memb_probs_fits_random = os.path.join(self.rdir, ('final_memberships_%d_%s.fits' % (len(prev_result['comps']), str(uuid.uuid4().hex))))
tabletool.construct_an_astropy_table_with_gaia_ids_and_membership_probabilities(self.fit_pars['data_table'], prev_result['memb_probs'], prev_result['comps'], filename_memb_probs_fits_random, get_background_overlaps=True, stellar_id_colname=self.fit_pars['stellar_id_colname'], overwrite_fits=self.fit_pars['overwrite_fits'])
except:
logging.info("[WARNING] Couldn't print membership.fits file. Check column id.")
self.log_final_log(prev_result, prev_score)
break
logging.info('Best fit:\n{}'.format([group.get_pars() for group in prev_result['comps']]))
if (self.ncomps >= self.fit_pars['max_comp_count']):
log_message(msg='REACHED MAX COMP LIMIT', symbol='+', surround=True)
return (prev_result, prev_score)
|
@click.command(name='cache.files', help='List files in state cache diretory')
@default_state_options
def files(state, datefilter=''):
'List files in state cache diretory\n\n State is required. Optionally provide a date \n filter to limit results.\n\n NOTE: Cache must be populated in order to load data.\n '
cache = StateCache(state)
files = cache.list_dir(datefilter)
if files:
print_files(files)
else:
msg = 'No files found'
if datefilter:
msg += (' using date filter: %s' % datefilter)
print(msg)
| 1,475,680,113,284,084,700
|
List files in state cache diretory
State is required. Optionally provide a date
filter to limit results.
NOTE: Cache must be populated in order to load data.
|
openelex/tasks/cache.py
|
files
|
ColCarroll/openelections-core
|
python
|
@click.command(name='cache.files', help='List files in state cache diretory')
@default_state_options
def files(state, datefilter=):
'List files in state cache diretory\n\n State is required. Optionally provide a date \n filter to limit results.\n\n NOTE: Cache must be populated in order to load data.\n '
cache = StateCache(state)
files = cache.list_dir(datefilter)
if files:
print_files(files)
else:
msg = 'No files found'
if datefilter:
msg += (' using date filter: %s' % datefilter)
print(msg)
|
@click.command(name='cache.clear', help='Delete files in state cache diretory')
@default_state_options
def clear(state, datefilter=''):
'Delete files in state cache diretory\n\n State is required. Optionally provide a date\n filter to limit results.\n '
cache = StateCache(state)
cache.clear(datefilter)
| -8,522,599,112,594,594,000
|
Delete files in state cache diretory
State is required. Optionally provide a date
filter to limit results.
|
openelex/tasks/cache.py
|
clear
|
ColCarroll/openelections-core
|
python
|
@click.command(name='cache.clear', help='Delete files in state cache diretory')
@default_state_options
def clear(state, datefilter=):
'Delete files in state cache diretory\n\n State is required. Optionally provide a date\n filter to limit results.\n '
cache = StateCache(state)
cache.clear(datefilter)
|
async def async_setup(hass, yaml_config):
'Activate the emulated_hue component.'
config = Config(hass, yaml_config.get(DOMAIN, {}))
app = web.Application()
app['hass'] = hass
real_ip.setup_real_ip(app, False, [])
app._on_startup.freeze()
(await app.startup())
runner = None
site = None
DescriptionXmlView(config).register(app, app.router)
HueUsernameView().register(app, app.router)
HueAllLightsStateView(config).register(app, app.router)
HueOneLightStateView(config).register(app, app.router)
HueOneLightChangeView(config).register(app, app.router)
HueAllGroupsStateView(config).register(app, app.router)
HueGroupView(config).register(app, app.router)
upnp_listener = UPNPResponderThread(config.host_ip_addr, config.listen_port, config.upnp_bind_multicast, config.advertise_ip, config.advertise_port)
async def stop_emulated_hue_bridge(event):
'Stop the emulated hue bridge.'
upnp_listener.stop()
if site:
(await site.stop())
if runner:
(await runner.cleanup())
async def start_emulated_hue_bridge(event):
'Start the emulated hue bridge.'
upnp_listener.start()
nonlocal site
nonlocal runner
runner = web.AppRunner(app)
(await runner.setup())
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
try:
(await site.start())
except OSError as error:
_LOGGER.error('Failed to create HTTP server at port %d: %s', config.listen_port, error)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_emulated_hue_bridge)
return True
| 2,128,652,928,823,814,100
|
Activate the emulated_hue component.
|
homeassistant/components/emulated_hue/__init__.py
|
async_setup
|
0x00-0xFF/home-assistant
|
python
|
async def async_setup(hass, yaml_config):
config = Config(hass, yaml_config.get(DOMAIN, {}))
app = web.Application()
app['hass'] = hass
real_ip.setup_real_ip(app, False, [])
app._on_startup.freeze()
(await app.startup())
runner = None
site = None
DescriptionXmlView(config).register(app, app.router)
HueUsernameView().register(app, app.router)
HueAllLightsStateView(config).register(app, app.router)
HueOneLightStateView(config).register(app, app.router)
HueOneLightChangeView(config).register(app, app.router)
HueAllGroupsStateView(config).register(app, app.router)
HueGroupView(config).register(app, app.router)
upnp_listener = UPNPResponderThread(config.host_ip_addr, config.listen_port, config.upnp_bind_multicast, config.advertise_ip, config.advertise_port)
async def stop_emulated_hue_bridge(event):
'Stop the emulated hue bridge.'
upnp_listener.stop()
if site:
(await site.stop())
if runner:
(await runner.cleanup())
async def start_emulated_hue_bridge(event):
'Start the emulated hue bridge.'
upnp_listener.start()
nonlocal site
nonlocal runner
runner = web.AppRunner(app)
(await runner.setup())
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
try:
(await site.start())
except OSError as error:
_LOGGER.error('Failed to create HTTP server at port %d: %s', config.listen_port, error)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_emulated_hue_bridge)
return True
|
def _load_json(filename):
'Wrapper, because we actually want to handle invalid json.'
try:
return load_json(filename)
except HomeAssistantError:
pass
return {}
| -2,627,731,872,973,515,000
|
Wrapper, because we actually want to handle invalid json.
|
homeassistant/components/emulated_hue/__init__.py
|
_load_json
|
0x00-0xFF/home-assistant
|
python
|
def _load_json(filename):
try:
return load_json(filename)
except HomeAssistantError:
pass
return {}
|
async def stop_emulated_hue_bridge(event):
'Stop the emulated hue bridge.'
upnp_listener.stop()
if site:
(await site.stop())
if runner:
(await runner.cleanup())
| -2,079,382,594,621,797,000
|
Stop the emulated hue bridge.
|
homeassistant/components/emulated_hue/__init__.py
|
stop_emulated_hue_bridge
|
0x00-0xFF/home-assistant
|
python
|
async def stop_emulated_hue_bridge(event):
upnp_listener.stop()
if site:
(await site.stop())
if runner:
(await runner.cleanup())
|
async def start_emulated_hue_bridge(event):
'Start the emulated hue bridge.'
upnp_listener.start()
nonlocal site
nonlocal runner
runner = web.AppRunner(app)
(await runner.setup())
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
try:
(await site.start())
except OSError as error:
_LOGGER.error('Failed to create HTTP server at port %d: %s', config.listen_port, error)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge)
| -1,601,674,871,553,881,000
|
Start the emulated hue bridge.
|
homeassistant/components/emulated_hue/__init__.py
|
start_emulated_hue_bridge
|
0x00-0xFF/home-assistant
|
python
|
async def start_emulated_hue_bridge(event):
upnp_listener.start()
nonlocal site
nonlocal runner
runner = web.AppRunner(app)
(await runner.setup())
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
try:
(await site.start())
except OSError as error:
_LOGGER.error('Failed to create HTTP server at port %d: %s', config.listen_port, error)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge)
|
def __init__(self, hass, conf):
'Initialize the instance.'
self.hass = hass
self.type = conf.get(CONF_TYPE)
self.numbers = None
self.cached_states = {}
if (self.type == TYPE_ALEXA):
_LOGGER.warning('Emulated Hue running in legacy mode because type has been specified. More info at https://goo.gl/M6tgz8')
self.host_ip_addr = conf.get(CONF_HOST_IP)
if (self.host_ip_addr is None):
self.host_ip_addr = util.get_local_ip()
_LOGGER.info('Listen IP address not specified, auto-detected address is %s', self.host_ip_addr)
self.listen_port = conf.get(CONF_LISTEN_PORT)
if (not isinstance(self.listen_port, int)):
self.listen_port = DEFAULT_LISTEN_PORT
_LOGGER.info('Listen port not specified, defaulting to %s', self.listen_port)
self.upnp_bind_multicast = conf.get(CONF_UPNP_BIND_MULTICAST, DEFAULT_UPNP_BIND_MULTICAST)
self.off_maps_to_on_domains = conf.get(CONF_OFF_MAPS_TO_ON_DOMAINS)
if (not isinstance(self.off_maps_to_on_domains, list)):
self.off_maps_to_on_domains = DEFAULT_OFF_MAPS_TO_ON_DOMAINS
self.expose_by_default = conf.get(CONF_EXPOSE_BY_DEFAULT, DEFAULT_EXPOSE_BY_DEFAULT)
self.exposed_domains = conf.get(CONF_EXPOSED_DOMAINS, DEFAULT_EXPOSED_DOMAINS)
self.advertise_ip = (conf.get(CONF_ADVERTISE_IP) or self.host_ip_addr)
self.advertise_port = (conf.get(CONF_ADVERTISE_PORT) or self.listen_port)
self.entities = conf.get(CONF_ENTITIES, {})
| -4,658,504,909,304,408,000
|
Initialize the instance.
|
homeassistant/components/emulated_hue/__init__.py
|
__init__
|
0x00-0xFF/home-assistant
|
python
|
def __init__(self, hass, conf):
self.hass = hass
self.type = conf.get(CONF_TYPE)
self.numbers = None
self.cached_states = {}
if (self.type == TYPE_ALEXA):
_LOGGER.warning('Emulated Hue running in legacy mode because type has been specified. More info at https://goo.gl/M6tgz8')
self.host_ip_addr = conf.get(CONF_HOST_IP)
if (self.host_ip_addr is None):
self.host_ip_addr = util.get_local_ip()
_LOGGER.info('Listen IP address not specified, auto-detected address is %s', self.host_ip_addr)
self.listen_port = conf.get(CONF_LISTEN_PORT)
if (not isinstance(self.listen_port, int)):
self.listen_port = DEFAULT_LISTEN_PORT
_LOGGER.info('Listen port not specified, defaulting to %s', self.listen_port)
self.upnp_bind_multicast = conf.get(CONF_UPNP_BIND_MULTICAST, DEFAULT_UPNP_BIND_MULTICAST)
self.off_maps_to_on_domains = conf.get(CONF_OFF_MAPS_TO_ON_DOMAINS)
if (not isinstance(self.off_maps_to_on_domains, list)):
self.off_maps_to_on_domains = DEFAULT_OFF_MAPS_TO_ON_DOMAINS
self.expose_by_default = conf.get(CONF_EXPOSE_BY_DEFAULT, DEFAULT_EXPOSE_BY_DEFAULT)
self.exposed_domains = conf.get(CONF_EXPOSED_DOMAINS, DEFAULT_EXPOSED_DOMAINS)
self.advertise_ip = (conf.get(CONF_ADVERTISE_IP) or self.host_ip_addr)
self.advertise_port = (conf.get(CONF_ADVERTISE_PORT) or self.listen_port)
self.entities = conf.get(CONF_ENTITIES, {})
|
def entity_id_to_number(self, entity_id):
'Get a unique number for the entity id.'
if (self.type == TYPE_ALEXA):
return entity_id
if (self.numbers is None):
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
for (number, ent_id) in self.numbers.items():
if (entity_id == ent_id):
return number
number = '1'
if self.numbers:
number = str((max((int(k) for k in self.numbers)) + 1))
self.numbers[number] = entity_id
save_json(self.hass.config.path(NUMBERS_FILE), self.numbers)
return number
| -1,633,032,628,719,011,600
|
Get a unique number for the entity id.
|
homeassistant/components/emulated_hue/__init__.py
|
entity_id_to_number
|
0x00-0xFF/home-assistant
|
python
|
def entity_id_to_number(self, entity_id):
if (self.type == TYPE_ALEXA):
return entity_id
if (self.numbers is None):
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
for (number, ent_id) in self.numbers.items():
if (entity_id == ent_id):
return number
number = '1'
if self.numbers:
number = str((max((int(k) for k in self.numbers)) + 1))
self.numbers[number] = entity_id
save_json(self.hass.config.path(NUMBERS_FILE), self.numbers)
return number
|
def number_to_entity_id(self, number):
'Convert unique number to entity id.'
if (self.type == TYPE_ALEXA):
return number
if (self.numbers is None):
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
assert isinstance(number, str)
return self.numbers.get(number)
| -6,532,583,345,988,421,000
|
Convert unique number to entity id.
|
homeassistant/components/emulated_hue/__init__.py
|
number_to_entity_id
|
0x00-0xFF/home-assistant
|
python
|
def number_to_entity_id(self, number):
if (self.type == TYPE_ALEXA):
return number
if (self.numbers is None):
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
assert isinstance(number, str)
return self.numbers.get(number)
|
def get_entity_name(self, entity):
'Get the name of an entity.'
if ((entity.entity_id in self.entities) and (CONF_ENTITY_NAME in self.entities[entity.entity_id])):
return self.entities[entity.entity_id][CONF_ENTITY_NAME]
return entity.attributes.get(ATTR_EMULATED_HUE_NAME, entity.name)
| 7,057,863,674,962,129,000
|
Get the name of an entity.
|
homeassistant/components/emulated_hue/__init__.py
|
get_entity_name
|
0x00-0xFF/home-assistant
|
python
|
def get_entity_name(self, entity):
if ((entity.entity_id in self.entities) and (CONF_ENTITY_NAME in self.entities[entity.entity_id])):
return self.entities[entity.entity_id][CONF_ENTITY_NAME]
return entity.attributes.get(ATTR_EMULATED_HUE_NAME, entity.name)
|
def is_entity_exposed(self, entity):
'Determine if an entity should be exposed on the emulated bridge.\n\n Async friendly.\n '
if (entity.attributes.get('view') is not None):
return False
domain = entity.domain.lower()
explicit_expose = entity.attributes.get(ATTR_EMULATED_HUE, None)
explicit_hidden = entity.attributes.get(ATTR_EMULATED_HUE_HIDDEN, None)
if ((entity.entity_id in self.entities) and (CONF_ENTITY_HIDDEN in self.entities[entity.entity_id])):
explicit_hidden = self.entities[entity.entity_id][CONF_ENTITY_HIDDEN]
if ((explicit_expose is True) or (explicit_hidden is False)):
expose = True
elif ((explicit_expose is False) or (explicit_hidden is True)):
expose = False
else:
expose = None
get_deprecated(entity.attributes, ATTR_EMULATED_HUE_HIDDEN, ATTR_EMULATED_HUE, None)
domain_exposed_by_default = (self.expose_by_default and (domain in self.exposed_domains))
is_default_exposed = (domain_exposed_by_default and (expose is not False))
return (is_default_exposed or expose)
| -1,462,506,348,889,897,000
|
Determine if an entity should be exposed on the emulated bridge.
Async friendly.
|
homeassistant/components/emulated_hue/__init__.py
|
is_entity_exposed
|
0x00-0xFF/home-assistant
|
python
|
def is_entity_exposed(self, entity):
'Determine if an entity should be exposed on the emulated bridge.\n\n Async friendly.\n '
if (entity.attributes.get('view') is not None):
return False
domain = entity.domain.lower()
explicit_expose = entity.attributes.get(ATTR_EMULATED_HUE, None)
explicit_hidden = entity.attributes.get(ATTR_EMULATED_HUE_HIDDEN, None)
if ((entity.entity_id in self.entities) and (CONF_ENTITY_HIDDEN in self.entities[entity.entity_id])):
explicit_hidden = self.entities[entity.entity_id][CONF_ENTITY_HIDDEN]
if ((explicit_expose is True) or (explicit_hidden is False)):
expose = True
elif ((explicit_expose is False) or (explicit_hidden is True)):
expose = False
else:
expose = None
get_deprecated(entity.attributes, ATTR_EMULATED_HUE_HIDDEN, ATTR_EMULATED_HUE, None)
domain_exposed_by_default = (self.expose_by_default and (domain in self.exposed_domains))
is_default_exposed = (domain_exposed_by_default and (expose is not False))
return (is_default_exposed or expose)
|
def __init__(self):
'\n ESMInterfaceTypeData - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'io_interface_type': 'str', 'port_list': 'PortList'}
self.attribute_map = {'io_interface_type': 'ioInterfaceType', 'port_list': 'portList'}
self._io_interface_type = None
self._port_list = None
| 2,766,354,809,802,065,000
|
ESMInterfaceTypeData - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
__init__
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def __init__(self):
'\n ESMInterfaceTypeData - a model defined in Swagger\n\n :param dict swaggerTypes: The key is attribute name\n and the value is attribute type.\n :param dict attributeMap: The key is attribute name\n and the value is json key in definition.\n '
self.swagger_types = {'io_interface_type': 'str', 'port_list': 'PortList'}
self.attribute_map = {'io_interface_type': 'ioInterfaceType', 'port_list': 'portList'}
self._io_interface_type = None
self._port_list = None
|
@property
def io_interface_type(self):
'\n Gets the io_interface_type of this ESMInterfaceTypeData.\n This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.\n\n :return: The io_interface_type of this ESMInterfaceTypeData.\n :rtype: str\n :required/optional: required\n '
return self._io_interface_type
| -4,076,907,369,107,872,000
|
Gets the io_interface_type of this ESMInterfaceTypeData.
This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.
:return: The io_interface_type of this ESMInterfaceTypeData.
:rtype: str
:required/optional: required
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
io_interface_type
|
NetApp/santricity-webapi-pythonsdk
|
python
|
@property
def io_interface_type(self):
'\n Gets the io_interface_type of this ESMInterfaceTypeData.\n This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.\n\n :return: The io_interface_type of this ESMInterfaceTypeData.\n :rtype: str\n :required/optional: required\n '
return self._io_interface_type
|
@io_interface_type.setter
def io_interface_type(self, io_interface_type):
'\n Sets the io_interface_type of this ESMInterfaceTypeData.\n This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.\n\n :param io_interface_type: The io_interface_type of this ESMInterfaceTypeData.\n :type: str\n '
allowed_values = ['notImplemented', 'scsi', 'fc', 'sata', 'sas', 'iscsi', 'ib', 'fcoe', 'nvmeof', '__UNDEFINED']
if (io_interface_type not in allowed_values):
raise ValueError('Invalid value for `io_interface_type`, must be one of {0}'.format(allowed_values))
self._io_interface_type = io_interface_type
| 7,809,298,042,549,681,000
|
Sets the io_interface_type of this ESMInterfaceTypeData.
This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.
:param io_interface_type: The io_interface_type of this ESMInterfaceTypeData.
:type: str
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
io_interface_type
|
NetApp/santricity-webapi-pythonsdk
|
python
|
@io_interface_type.setter
def io_interface_type(self, io_interface_type):
'\n Sets the io_interface_type of this ESMInterfaceTypeData.\n This enumeration defines the different I/O interface types that may be reported as part of the configuration information associated with a controller.\n\n :param io_interface_type: The io_interface_type of this ESMInterfaceTypeData.\n :type: str\n '
allowed_values = ['notImplemented', 'scsi', 'fc', 'sata', 'sas', 'iscsi', 'ib', 'fcoe', 'nvmeof', '__UNDEFINED']
if (io_interface_type not in allowed_values):
raise ValueError('Invalid value for `io_interface_type`, must be one of {0}'.format(allowed_values))
self._io_interface_type = io_interface_type
|
@property
def port_list(self):
'\n Gets the port_list of this ESMInterfaceTypeData.\n A list of detailed information for each port.\n\n :return: The port_list of this ESMInterfaceTypeData.\n :rtype: PortList\n :required/optional: optional\n '
return self._port_list
| 5,520,007,831,716,388,000
|
Gets the port_list of this ESMInterfaceTypeData.
A list of detailed information for each port.
:return: The port_list of this ESMInterfaceTypeData.
:rtype: PortList
:required/optional: optional
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
port_list
|
NetApp/santricity-webapi-pythonsdk
|
python
|
@property
def port_list(self):
'\n Gets the port_list of this ESMInterfaceTypeData.\n A list of detailed information for each port.\n\n :return: The port_list of this ESMInterfaceTypeData.\n :rtype: PortList\n :required/optional: optional\n '
return self._port_list
|
@port_list.setter
def port_list(self, port_list):
'\n Sets the port_list of this ESMInterfaceTypeData.\n A list of detailed information for each port.\n\n :param port_list: The port_list of this ESMInterfaceTypeData.\n :type: PortList\n '
self._port_list = port_list
| 5,297,459,435,244,345,000
|
Sets the port_list of this ESMInterfaceTypeData.
A list of detailed information for each port.
:param port_list: The port_list of this ESMInterfaceTypeData.
:type: PortList
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
port_list
|
NetApp/santricity-webapi-pythonsdk
|
python
|
@port_list.setter
def port_list(self, port_list):
'\n Sets the port_list of this ESMInterfaceTypeData.\n A list of detailed information for each port.\n\n :param port_list: The port_list of this ESMInterfaceTypeData.\n :type: PortList\n '
self._port_list = port_list
|
def to_dict(self):
'\n Returns the model properties as a dict\n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
| 2,191,974,537,531,847,000
|
Returns the model properties as a dict
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
to_dict
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def to_dict(self):
'\n \n '
result = {}
for (attr, _) in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
|
def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict())
| -3,531,024,894,346,511,000
|
Returns the string representation of the model
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
to_str
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def to_str(self):
'\n \n '
return pformat(self.to_dict())
|
def __repr__(self):
'\n For `print` and `pprint`\n '
if (self is None):
return None
return self.to_str()
| 6,088,887,181,750,408,000
|
For `print` and `pprint`
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
__repr__
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def __repr__(self):
'\n \n '
if (self is None):
return None
return self.to_str()
|
def __eq__(self, other):
'\n Returns true if both objects are equal\n '
if ((self is None) or (other is None)):
return None
return (self.__dict__ == other.__dict__)
| 6,708,289,971,086,792,000
|
Returns true if both objects are equal
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
__eq__
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def __eq__(self, other):
'\n \n '
if ((self is None) or (other is None)):
return None
return (self.__dict__ == other.__dict__)
|
def __ne__(self, other):
'\n Returns true if both objects are not equal\n '
return (not (self == other))
| 3,600,423,175,817,510,400
|
Returns true if both objects are not equal
|
netapp/santricity/models/symbol/esm_interface_type_data.py
|
__ne__
|
NetApp/santricity-webapi-pythonsdk
|
python
|
def __ne__(self, other):
'\n \n '
return (not (self == other))
|
def ParseOptions():
'Parses the command line options.\n\n In case of command line errors, it will show the usage and exit the\n program.\n\n @return: the options in a tuple\n\n '
parser = optparse.OptionParser()
parser.add_option('-t', dest='thread_count', default=1, type='int', help='Number of threads', metavar='NUM')
parser.add_option('-d', dest='duration', default=5, type='float', help='Duration', metavar='SECS')
(opts, args) = parser.parse_args()
if (opts.thread_count < 1):
parser.error('Number of threads must be at least 1')
return (opts, args)
| -5,343,402,722,454,307,000
|
Parses the command line options.
In case of command line errors, it will show the usage and exit the
program.
@return: the options in a tuple
|
test/py/lockperf.py
|
ParseOptions
|
RegioHelden/ganeti
|
python
|
def ParseOptions():
'Parses the command line options.\n\n In case of command line errors, it will show the usage and exit the\n program.\n\n @return: the options in a tuple\n\n '
parser = optparse.OptionParser()
parser.add_option('-t', dest='thread_count', default=1, type='int', help='Number of threads', metavar='NUM')
parser.add_option('-d', dest='duration', default=5, type='float', help='Duration', metavar='SECS')
(opts, args) = parser.parse_args()
if (opts.thread_count < 1):
parser.error('Number of threads must be at least 1')
return (opts, args)
|
def _Counter(lock, state, me):
'Thread function for acquiring locks.\n\n '
counts = state.counts
verify = state.verify
while True:
lock.acquire()
try:
verify[me] = 1
counts[me] += 1
state.total_count += 1
if ((state.total_count % 1000) == 0):
sys.stdout.write((' %8d\r' % state.total_count))
sys.stdout.flush()
if (sum(verify) != 1):
print('Inconsistent state!')
os._exit(1)
verify[me] = 0
finally:
lock.release()
| 6,262,570,621,122,128,000
|
Thread function for acquiring locks.
|
test/py/lockperf.py
|
_Counter
|
RegioHelden/ganeti
|
python
|
def _Counter(lock, state, me):
'\n\n '
counts = state.counts
verify = state.verify
while True:
lock.acquire()
try:
verify[me] = 1
counts[me] += 1
state.total_count += 1
if ((state.total_count % 1000) == 0):
sys.stdout.write((' %8d\r' % state.total_count))
sys.stdout.flush()
if (sum(verify) != 1):
print('Inconsistent state!')
os._exit(1)
verify[me] = 0
finally:
lock.release()
|
def __init__(self, thread_count):
'Initializes this class.\n\n '
self.verify = [0 for _ in range(thread_count)]
self.counts = [0 for _ in range(thread_count)]
self.total_count = 0
| 5,832,565,340,899,158,000
|
Initializes this class.
|
test/py/lockperf.py
|
__init__
|
RegioHelden/ganeti
|
python
|
def __init__(self, thread_count):
'\n\n '
self.verify = [0 for _ in range(thread_count)]
self.counts = [0 for _ in range(thread_count)]
self.total_count = 0
|
def get_network(name, batch_size):
'Get the symbol definition and random weight of a network'
input_shape = (batch_size, 3, 224, 224)
output_shape = (batch_size, 1000)
if ('resnet' in name):
n_layer = int(name.split('-')[1])
(mod, params) = relay.testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif ('vgg' in name):
n_layer = int(name.split('-')[1])
(mod, params) = relay.testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif (name == 'mobilenet'):
(mod, params) = relay.testing.mobilenet.get_workload(batch_size=batch_size, dtype=dtype)
elif (name == 'squeezenet_v1.1'):
(mod, params) = relay.testing.squeezenet.get_workload(batch_size=batch_size, version='1.1', dtype=dtype)
elif (name == 'inception_v3'):
input_shape = (batch_size, 3, 299, 299)
(mod, params) = relay.testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype)
elif (name == 'mxnet'):
from mxnet.gluon.model_zoo.vision import get_model
block = get_model('resnet18_v1', pretrained=True)
(mod, params) = relay.frontend.from_mxnet(block, shape={'data': input_shape}, dtype=dtype)
net = mod['main']
net = relay.Function(net.params, relay.nn.softmax(net.body), None, net.type_params, net.attrs)
mod = tvm.IRModule.from_expr(net)
else:
raise ValueError(('Unsupported network: ' + name))
return (mod, params, input_shape, output_shape)
| -8,848,266,499,118,495,000
|
Get the symbol definition and random weight of a network
|
tutorials/autotvm/tune_relay_mobile_gpu.py
|
get_network
|
HemiMin/tvm
|
python
|
def get_network(name, batch_size):
input_shape = (batch_size, 3, 224, 224)
output_shape = (batch_size, 1000)
if ('resnet' in name):
n_layer = int(name.split('-')[1])
(mod, params) = relay.testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif ('vgg' in name):
n_layer = int(name.split('-')[1])
(mod, params) = relay.testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif (name == 'mobilenet'):
(mod, params) = relay.testing.mobilenet.get_workload(batch_size=batch_size, dtype=dtype)
elif (name == 'squeezenet_v1.1'):
(mod, params) = relay.testing.squeezenet.get_workload(batch_size=batch_size, version='1.1', dtype=dtype)
elif (name == 'inception_v3'):
input_shape = (batch_size, 3, 299, 299)
(mod, params) = relay.testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype)
elif (name == 'mxnet'):
from mxnet.gluon.model_zoo.vision import get_model
block = get_model('resnet18_v1', pretrained=True)
(mod, params) = relay.frontend.from_mxnet(block, shape={'data': input_shape}, dtype=dtype)
net = mod['main']
net = relay.Function(net.params, relay.nn.softmax(net.body), None, net.type_params, net.attrs)
mod = tvm.IRModule.from_expr(net)
else:
raise ValueError(('Unsupported network: ' + name))
return (mod, params, input_shape, output_shape)
|
def before_fork(self):
'\n Called before the database instance is given to the new process\n '
pass
| 7,063,914,639,212,521,000
|
Called before the database instance is given to the new process
|
dejavu/database.py
|
before_fork
|
KundanGaira/dejavu
|
python
|
def before_fork(self):
'\n \n '
pass
|
def after_fork(self):
'\n Called after the database instance has been given to the new process\n\n This will be called in the new process.\n '
pass
| 1,941,101,492,828,343,000
|
Called after the database instance has been given to the new process
This will be called in the new process.
|
dejavu/database.py
|
after_fork
|
KundanGaira/dejavu
|
python
|
def after_fork(self):
'\n Called after the database instance has been given to the new process\n\n This will be called in the new process.\n '
pass
|
def setup(self):
'\n Called on creation or shortly afterwards.\n '
pass
| 5,840,900,101,847,430,000
|
Called on creation or shortly afterwards.
|
dejavu/database.py
|
setup
|
KundanGaira/dejavu
|
python
|
def setup(self):
'\n \n '
pass
|
@abc.abstractmethod
def empty(self):
'\n Called when the database should be cleared of all data.\n '
pass
| -7,798,511,590,989,507,000
|
Called when the database should be cleared of all data.
|
dejavu/database.py
|
empty
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def empty(self):
'\n \n '
pass
|
@abc.abstractmethod
def delete_unfingerprinted_songs(self):
'\n Called to remove any song entries that do not have any fingerprints\n associated with them.\n '
pass
| 7,479,374,714,272,975,000
|
Called to remove any song entries that do not have any fingerprints
associated with them.
|
dejavu/database.py
|
delete_unfingerprinted_songs
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def delete_unfingerprinted_songs(self):
'\n Called to remove any song entries that do not have any fingerprints\n associated with them.\n '
pass
|
@abc.abstractmethod
def get_num_songs(self):
'\n Returns the amount of songs in the database.\n '
pass
| 2,847,167,348,001,796,000
|
Returns the amount of songs in the database.
|
dejavu/database.py
|
get_num_songs
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def get_num_songs(self):
'\n \n '
pass
|
@abc.abstractmethod
def get_num_fingerprints(self):
'\n Returns the number of fingerprints in the database.\n '
pass
| 1,746,305,380,041,930,200
|
Returns the number of fingerprints in the database.
|
dejavu/database.py
|
get_num_fingerprints
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def get_num_fingerprints(self):
'\n \n '
pass
|
@abc.abstractmethod
def set_song_fingerprinted(self, sid):
'\n Sets a specific song as having all fingerprints in the database.\n\n sid: Song identifier\n '
pass
| -3,826,436,828,348,414,000
|
Sets a specific song as having all fingerprints in the database.
sid: Song identifier
|
dejavu/database.py
|
set_song_fingerprinted
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def set_song_fingerprinted(self, sid):
'\n Sets a specific song as having all fingerprints in the database.\n\n sid: Song identifier\n '
pass
|
@abc.abstractmethod
def get_songs(self):
'\n Returns all fully fingerprinted songs in the database.\n '
pass
| 4,643,965,191,770,006,000
|
Returns all fully fingerprinted songs in the database.
|
dejavu/database.py
|
get_songs
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def get_songs(self):
'\n \n '
pass
|
@abc.abstractmethod
def get_song_by_id(self, sid):
'\n Return a song by its identifier\n\n sid: Song identifier\n '
pass
| 5,484,600,104,413,045,000
|
Return a song by its identifier
sid: Song identifier
|
dejavu/database.py
|
get_song_by_id
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def get_song_by_id(self, sid):
'\n Return a song by its identifier\n\n sid: Song identifier\n '
pass
|
@abc.abstractmethod
def insert(self, hash, sid, offset):
'\n Inserts a single fingerprint into the database.\n\n hash: Part of a sha1 hash, in hexadecimal format\n sid: Song identifier this fingerprint is off\n offset: The offset this hash is from\n '
pass
| -3,168,379,884,000,369,700
|
Inserts a single fingerprint into the database.
hash: Part of a sha1 hash, in hexadecimal format
sid: Song identifier this fingerprint is off
offset: The offset this hash is from
|
dejavu/database.py
|
insert
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def insert(self, hash, sid, offset):
'\n Inserts a single fingerprint into the database.\n\n hash: Part of a sha1 hash, in hexadecimal format\n sid: Song identifier this fingerprint is off\n offset: The offset this hash is from\n '
pass
|
@abc.abstractmethod
def insert_song(self, song_name):
'\n Inserts a song name into the database, returns the new\n identifier of the song.\n\n song_name: The name of the song.\n '
pass
| 7,611,623,995,054,936,000
|
Inserts a song name into the database, returns the new
identifier of the song.
song_name: The name of the song.
|
dejavu/database.py
|
insert_song
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def insert_song(self, song_name):
'\n Inserts a song name into the database, returns the new\n identifier of the song.\n\n song_name: The name of the song.\n '
pass
|
@abc.abstractmethod
def query(self, hash):
'\n Returns all matching fingerprint entries associated with\n the given hash as parameter.\n\n hash: Part of a sha1 hash, in hexadecimal format\n '
pass
| -6,147,768,251,485,444,000
|
Returns all matching fingerprint entries associated with
the given hash as parameter.
hash: Part of a sha1 hash, in hexadecimal format
|
dejavu/database.py
|
query
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def query(self, hash):
'\n Returns all matching fingerprint entries associated with\n the given hash as parameter.\n\n hash: Part of a sha1 hash, in hexadecimal format\n '
pass
|
@abc.abstractmethod
def get_iterable_kv_pairs(self):
'\n Returns all fingerprints in the database.\n '
pass
| -1,993,576,924,303,589,600
|
Returns all fingerprints in the database.
|
dejavu/database.py
|
get_iterable_kv_pairs
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def get_iterable_kv_pairs(self):
'\n \n '
pass
|
@abc.abstractmethod
def insert_hashes(self, sid, hashes):
'\n Insert a multitude of fingerprints.\n\n sid: Song identifier the fingerprints belong to\n hashes: A sequence of tuples in the format (hash, offset)\n - hash: Part of a sha1 hash, in hexadecimal format\n - offset: Offset this hash was created from/at.\n '
pass
| 2,167,338,825,815,032,600
|
Insert a multitude of fingerprints.
sid: Song identifier the fingerprints belong to
hashes: A sequence of tuples in the format (hash, offset)
- hash: Part of a sha1 hash, in hexadecimal format
- offset: Offset this hash was created from/at.
|
dejavu/database.py
|
insert_hashes
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def insert_hashes(self, sid, hashes):
'\n Insert a multitude of fingerprints.\n\n sid: Song identifier the fingerprints belong to\n hashes: A sequence of tuples in the format (hash, offset)\n - hash: Part of a sha1 hash, in hexadecimal format\n - offset: Offset this hash was created from/at.\n '
pass
|
@abc.abstractmethod
def return_matches(self, hashes):
'\n Searches the database for pairs of (hash, offset) values.\n\n hashes: A sequence of tuples in the format (hash, offset)\n - hash: Part of a sha1 hash, in hexadecimal format\n - offset: Offset this hash was created from/at.\n\n Returns a sequence of (sid, offset_difference) tuples.\n\n sid: Song identifier\n offset_difference: (offset - database_offset)\n '
pass
| -780,021,137,811,145,500
|
Searches the database for pairs of (hash, offset) values.
hashes: A sequence of tuples in the format (hash, offset)
- hash: Part of a sha1 hash, in hexadecimal format
- offset: Offset this hash was created from/at.
Returns a sequence of (sid, offset_difference) tuples.
sid: Song identifier
offset_difference: (offset - database_offset)
|
dejavu/database.py
|
return_matches
|
KundanGaira/dejavu
|
python
|
@abc.abstractmethod
def return_matches(self, hashes):
'\n Searches the database for pairs of (hash, offset) values.\n\n hashes: A sequence of tuples in the format (hash, offset)\n - hash: Part of a sha1 hash, in hexadecimal format\n - offset: Offset this hash was created from/at.\n\n Returns a sequence of (sid, offset_difference) tuples.\n\n sid: Song identifier\n offset_difference: (offset - database_offset)\n '
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.