function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def parse_name(self, name):
name = name.xpath('//h1/text()').extract_first()
return name.strip() | iandees/all-the-places | [
379,
151,
379,
602,
1465952958
] |
def parse_phone(self, phone):
phone = phone.xpath('//div[@class="padding_hf_v sp_padding_qt_v"]/a/text()').extract_first()
if not phone:
return ""
return phone.strip() | iandees/all-the-places | [
379,
151,
379,
602,
1465952958
] |
def parse_store(self, response):
data = response.body_as_unicode()
name = self.parse_name(response)
address = self.parse_address(response)
phone = self.parse_phone(response)
lat, lon = self.parse_latlon(response)
properties = {
'ref': response.meta['ref'],
'phone': phone,
'lon': lon,
'lat': lat,
'name': name,
'addr_full': address
}
yield GeojsonPointItem(**properties) | iandees/all-the-places | [
379,
151,
379,
602,
1465952958
] |
def __unicode__(self):
return self.symbol if self.symbol is not None else self.name | rodxavier/open-pse-initiative | [
10,
4,
10,
9,
1410356886
] |
def __str__(self):
return self.symbol if self.symbol is not None else self.name | rodxavier/open-pse-initiative | [
10,
4,
10,
9,
1410356886
] |
def readable_name(self):
if self.is_index:
return self.name[1:]
else:
return self.name | rodxavier/open-pse-initiative | [
10,
4,
10,
9,
1410356886
] |
def year_high(self):
today = datetime.now()
one_year = timedelta(days=52*7)
if today.isoweekday() == 6:
today = today - timedelta(days=1)
elif today.isoweekday() == 7:
today = today - timedelta(days=2)
last_year = today - one_year
quotes = self.quote_set.filter(quote_date__gt=last_year)
if quotes.count() == 0:
return 0.0
year_high = quotes.aggregate(Max('price_high'))
return ('%f' % year_high['price_high__max']).rstrip('0').rstrip('.') | rodxavier/open-pse-initiative | [
10,
4,
10,
9,
1410356886
] |
def year_low(self):
today = datetime.now()
one_year = timedelta(days=52*7)
if today.isoweekday() == 6:
today = today - timedelta(days=1)
elif today.isoweekday() == 7:
today = today - timedelta(days=2)
last_year = today - one_year
quotes = self.quote_set.filter(quote_date__gt=last_year)
if quotes.count() == 0:
return 0.0
year_low = quotes.aggregate(Min('price_low'))
return ('%f' % year_low['price_low__min']).rstrip('0').rstrip('.') | rodxavier/open-pse-initiative | [
10,
4,
10,
9,
1410356886
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_create_or_update(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
parameters, # type: "_models.DscpConfiguration"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DscpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _delete_initial(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
resource_group_name, # type: str
dscp_configuration_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_all(
self,
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, data: int) -> None:
self.data = data
self.rank: int
self.parent: Node | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def make_set(x: Node) -> None:
"""
Make x as a set.
"""
# rank is the distance from x to its' parent
# root's rank is 0
x.rank = 0
x.parent = x | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def union_set(x: Node, y: Node) -> None:
"""
Union of two sets.
set with bigger rank should be parent, so that the
disjoint set tree will be more flat.
"""
x, y = find_set(x), find_set(y)
if x == y:
return | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def find_set(x: Node) -> Node:
"""
Return the parent of x
"""
if x != x.parent:
x.parent = find_set(x.parent)
return x.parent | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def find_python_set(node: Node) -> set:
"""
Return a Python Standard Library set that contains i.
"""
sets = ({0, 1, 2}, {3, 4, 5})
for s in sets:
if node.data in s:
return s
raise ValueError(f"{node.data} is not in {sets}") | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def test_disjoint_set() -> None:
"""
>>> test_disjoint_set()
"""
vertex = [Node(i) for i in range(6)]
for v in vertex:
make_set(v) | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def __init__(self):
self._output = '' | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def print_line(self, x1, y1, x2, y2, color=0, width=1):
self.print_output(_svg_line(x1, y1, x2, y2, color=color, width=width)) | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def print_square(self, x, y, a, color=0, width=1, border_color=0):
self.print_output(_svg_rectangle(x, y, a, a, color=color, width=width, border_color=border_color)) | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def to_file(self, filename):
with open(filename, 'w') as f:
f.write(str(self)) | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def _svg_line(x1, y1, x2, y2, color, width):
color = _svg_color(color)
return '<line x1="{}" y1="{}" x2="{}" y2="{}" style="stroke-linecap:round;stroke:{};stroke-width:{};" />\n'.format(x1, y1, x2, y2, color, width) | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def _svg_rectangle(x, y, a, b, color, width, border_color):
color = _svg_color(color)
border_color = _svg_color(border_color)
return '<rect x="{}" y="{}" width="{}" height="{}" style="fill:{}; stroke:{}; stroke-width:{};" />\n'.format(x, y, a, b, color, border_color, width) | adaptive-learning/proso-apps | [
1,
8,
1,
33,
1409148971
] |
def __init__(self, device, chans=None):
"""When mapping initializes, it immediately grabs the scale and offset for each channel
specified in chans (or all channels if None). This means that the mapping is only valid
as long as these values have not changed."""
self.device = device
self.scale = {}
self.offset = {}
if chans is None:
chans = device.listChannels()
if isinstance(chans, six.string_types):
chans = [chans]
for ch in chans:
self.scale[ch] = device.getChanScale(ch)
self.offset[ch] = device.getChanOffset(ch) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def mapFromDaq(self, chan, data):
scale = self.scale[chan]
offset = self.offset[chan]
return (data + offset) * scale | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def __init__(self, dev, channel):
self.dev = dev
self.channel = channel | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def __init__(self, dm, config, name):
Device.__init__(self, dm, config, name)
self._DGLock = Mutex(Qt.QMutex.Recursive) ## protects access to _DGHolding, _DGConfig
## Do some sanity checks here on the configuration
# 'channels' key is expected; for backward compatibility we just use the top-level config.
config = config.get('channels', config)
self._DGConfig = config
self._DGHolding = {}
for ch in config:
if config[ch]['type'][0] != 'a' and ('scale' in config[ch] or 'offset' in config[ch]):
raise Exception("Scale/offset only allowed for analog channels. (%s.%s)" % (name, ch))
if 'scale' not in config[ch]:
config[ch]['scale'] = 1 ## must be int to prevent accidental type conversion on digital data
if 'offset' not in config[ch]:
config[ch]['offset'] = 0
if config[ch].get('invert', False):
if config[ch]['type'][0] != 'd':
raise Exception("Inversion only allowed for digital channels. (%s.%s)" % (name, ch))
config[ch]['scale'] = -1
config[ch]['offset'] = -1
# print "chan %s scale %f" % (ch, config[ch]['scale'])
if 'holding' not in config[ch]:
config[ch]['holding'] = 0.0
## It is possible to create virtual channels with no real hardware connection
if 'device' not in config[ch]:
# print "Assuming channel %s is virtual:" % ch, config[ch]
config[ch]['virtual'] = True
## set holding value for all output channels now
if config[ch]['type'][1] == 'o':
self.setChanHolding(ch, config[ch]['holding'])
# self._DGHolding[ch] = config[ch]['holding']
dm.declareInterface(name, ['daqChannelGroup'], self)
for ch in config:
dm.declareInterface(name + "." + ch, ['daqChannel'], ChannelHandle(self, ch)) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def mapFromDAQ(self, channel, data):
mapping = self.getMapping(chans=[channel])
return mapping.mapFromDaq(channel, data) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def createTask(self, cmd, parentTask):
return DAQGenericTask(self, cmd, parentTask) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def setChanHolding(self, channel, level=None, block=True, mapping=None):
"""Define and set the holding values for this channel
If *block* is True, then return only after the value has been set on the DAQ.
If *block* is False, then simply schedule the change to take place when the DAQ is available.
*mapping* is a DataMapping object which tells the device how to translate *level* into
a voltage on the physical DAQ channel. If *mapping* is None, then it will use self.getMapping(*channel*)
to determine the correct mapping.
"""
prof = Profiler(disabled=True)
with self._DGLock:
prof('lock')
# print "set holding", channel, level
### Set correct holding level here...
if level is None:
level = self._DGHolding[channel]
if level is None:
raise Exception("No remembered holding level for channel %s" % channel)
else:
self._DGHolding[channel] = level
if mapping is None:
mapping = self.getMapping(channel)
val = mapping.mapToDaq(channel, self._DGHolding[channel])
prof('map')
# print "Set holding for channel %s: %f => %f" % (channel, self._DGHolding[channel], val)
chConf = self._DGConfig[channel]
isVirtual = chConf.get('virtual', False)
if not isVirtual:
daq = chConf['device']
chan = chConf['channel']
daqDev = self.dm.getDevice(daq)
prof('get dev')
## release DGLock before setChannelValue
if not isVirtual:
if block:
daqDev.setChannelValue(chan, val, block=True)
else:
daqDev.setChannelValue(chan, val, block=False,
delaySetIfBusy=True) ## Note: If a task is running, this will not be set until it completes.
prof('set channel value')
self.sigHoldingChanged.emit(channel, level)
prof('emit') | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getChannelValue(self, channel, block=True, raw=False):
with self._DGLock:
daq = self._DGConfig[channel]['device']
chan = self._DGConfig[channel]['channel']
mode = self._DGConfig[channel].get('mode', None)
## release _DGLock before getChannelValue
daqDev = self.dm.getDevice(daq)
val = daqDev.getChannelValue(chan, mode=mode, block=block)
if not raw:
return self.mapFromDAQ(channel, val)
else:
return val | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def deviceInterface(self, win):
"""Return a widget with a UI to put in the device rack"""
return DAQDevGui(self) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getDAQName(self, channel):
# return self._DGConfig[channel]['channel'][0]
with self._DGLock:
return self._DGConfig[channel]['device'] | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def setChanScale(self, ch, scale, update=True, block=True):
with self._DGLock:
self._DGConfig[ch]['scale'] = scale
if update and self.isOutput(ch): ## only set Holding for output channels
self.setChanHolding(ch, block=block) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getChanScale(self, chan):
with self._DGLock:
## Scale defaults to 1.0
## - can be overridden in configuration
return self._DGConfig[chan].get('scale', 1.0) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getChanUnits(self, ch):
with self._DGLock:
if 'units' in self._DGConfig[ch]:
return self._DGConfig[ch]['units']
else:
return None | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def listChannels(self):
with self._DGLock:
return dict([(ch, self._DGConfig[ch].copy()) for ch in self._DGConfig]) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def __init__(self, dev, cmd, parentTask):
DeviceTask.__init__(self, dev, cmd, parentTask)
self.daqTasks = {}
self.initialState = {}
self._DAQCmd = cmd
## Stores the list of channels that will generate or acquire buffered samples
self.bufferedChannels = [] | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def configure(self):
## Record initial state or set initial value
## NOTE:
## Subclasses should call this function only _after_ making any changes that will affect the mapping between
## physical values and channel voltages.
prof = Profiler('DAQGenericTask.configure', disabled=True)
# self.daqTasks = {}
self.mapping = self.dev.getMapping(chans=list(
self._DAQCmd.keys())) ## remember the mapping so we can properly translate data after it has been returned
self.initialState = {}
self.holdingVals = {}
for ch in self._DAQCmd:
# dev = self.dev.dm.getDevice(self.dev._DGConfig[ch]['channel'][0])
dev = self.dev.dm.getDevice(self.dev.getDAQName(ch))
prof.mark(ch + ' get dev')
if 'preset' in self._DAQCmd[ch]:
with self.dev._DGLock:
daqChan = self.dev._DGConfig[ch]['channel']
# dev.setChannelValue(self.dev._DGConfig[ch]['channel'][1], self._DAQCmd[ch]['preset'])
preVal = self.mapping.mapToDaq(ch, self._DAQCmd[ch]['preset'])
dev.setChannelValue(daqChan, preVal)
prof.mark(ch + ' preset')
elif 'holding' in self._DAQCmd[ch]:
self.dev.setChanHolding(ch, self._DAQCmd[ch]['holding'])
prof.mark(ch + ' set holding')
if 'recordInit' in self._DAQCmd[ch] and self._DAQCmd[ch]['recordInit']:
self.initialState[ch] = self.dev.getChannelValue(ch)
prof.mark(ch + ' record init')
for ch in self.dev._DGConfig:
## record current holding value for all output channels (even those that were not buffered for this task)
with self.dev._DGLock:
chanType = self.dev._DGConfig[ch]['type']
if chanType in ['ao', 'do']:
self.holdingVals[ch] = self.dev.getChanHolding(ch)
prof.mark(ch + ' record holding')
prof.finish() | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getChanUnits(self, chan):
if 'units' in self._DAQCmd[chan]:
return self._DAQCmd[chan]['units']
else:
return self.dev.getChanUnits(chan) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def isDone(self):
## DAQ task handles this for us.
return True | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def getResult(self):
## Access data recorded from DAQ task
## create MetaArray and fill with MC state info
## Collect data and info for each channel in the command
result = {}
for ch in self.bufferedChannels:
result[ch] = self.daqTasks[ch].getData(self.dev._DGConfig[ch]['channel'])
result[ch]['data'] = self.mapping.mapFromDaq(ch, result[ch]['data']) ## scale/offset/invert
result[ch]['units'] = self.getChanUnits(ch)
if len(result) > 0:
meta = result[list(result.keys())[0]]['info']
rate = meta['rate']
nPts = meta['numPts']
## Create an array of time values
timeVals = np.linspace(0, float(nPts - 1) / float(rate), nPts)
## Concatenate all channels together into a single array, generate MetaArray info
chanList = [np.atleast_2d(result[x]['data']) for x in result]
cols = [(x, result[x]['units']) for x in result]
# print cols
try:
arr = np.concatenate(chanList)
except:
print(chanList)
print([a.shape for a in chanList])
raise
daqState = OrderedDict()
for ch in self.dev._DGConfig:
if ch in result:
daqState[ch] = result[ch]['info']
else:
daqState[ch] = {}
## record current holding value for all output channels (even those that were not buffered for this task)
if self.dev._DGConfig[ch]['type'] in ['ao', 'do']:
daqState[ch]['holding'] = self.holdingVals[ch]
info = [axis(name='Channel', cols=cols), axis(name='Time', units='s', values=timeVals)] + [
{'DAQ': daqState}]
protInfo = self._DAQCmd.copy() ## copy everything but the command arrays and low-level configuration info
for ch in protInfo:
protInfo[ch].pop('command', None)
protInfo[ch].pop('lowLevelConf', None)
info[-1]['Protocol'] = protInfo
marr = MetaArray(arr, info=info)
return marr
else:
return None | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def __init__(self, dev):
self.dev = dev
Qt.QWidget.__init__(self)
self.layout = Qt.QVBoxLayout()
self.setLayout(self.layout)
chans = self.dev.listChannels()
self.widgets = {}
# self.uis = {}
self.defaults = {}
for ch in chans:
wid = Qt.QWidget()
ui = Ui_Form()
ui.setupUi(wid)
self.layout.addWidget(wid)
ui.analogCtrls = [ui.scaleDefaultBtn, ui.scaleSpin, ui.offsetDefaultBtn, ui.offsetSpin, ui.scaleLabel,
ui.offsetLabel]
# ui.channel = ch
for s in dir(ui):
i = getattr(ui, s)
if isinstance(i, Qt.QWidget):
i.channel = ch
self.widgets[ch] = ui
ui.nameLabel.setText(str(ch))
ui.channelCombo.addItem("%s (%s)" % (ch, chans[ch]['channel']))
holding = chans[ch].get('holding', 0)
if chans[ch]['type'] in ['ao', 'ai']:
ui.inputRadio.setEnabled(False)
ui.outputRadio.setEnabled(False)
ui.invertCheck.hide()
scale = chans[ch].get('scale', 1)
units = chans[ch].get('units', 'V')
offset = chans[ch].get('offset', 0)
ui.offsetSpin.setOpts(suffix='V', siPrefix=True, dec=True, step=1.0, minStep=1e-4)
ui.offsetSpin.setValue(offset)
ui.offsetSpin.sigValueChanged.connect(self.offsetSpinChanged)
ui.offsetDefaultBtn.setText("Default (%s)" % siFormat(offset, suffix='V'))
ui.offsetDefaultBtn.clicked.connect(self.offsetDefaultBtnClicked)
if chans[ch]['type'] == 'ao':
ui.outputRadio.setChecked(True)
ui.scaleDefaultBtn.setText("Default (%s)" % siFormat(scale, suffix='V/' + units))
ui.scaleSpin.setOpts(suffix='V/' + units, siPrefix=True, dec=True, step=1.0, minStep=1e-9)
ui.holdingSpin.setOpts(suffix=units, siPrefix=True, step=0.01)
ui.holdingSpin.setValue(holding)
ui.holdingSpin.sigValueChanged.connect(self.holdingSpinChanged)
elif chans[ch]['type'] == 'ai':
ui.inputRadio.setChecked(True)
ui.holdingLabel.hide()
ui.holdingSpin.hide()
ui.scaleDefaultBtn.setText("Default (%s)" % siFormat(scale, suffix=units + '/V'))
# ui.scaleDefaultBtn.clicked.connect(self.scaleDefaultBtnClicked)
ui.scaleSpin.setOpts(suffix=units + '/V', siPrefix=True, dec=True)
ui.scaleSpin.setValue(scale)
ui.scaleDefaultBtn.clicked.connect(self.scaleDefaultBtnClicked)
ui.scaleSpin.sigValueChanged.connect(self.scaleSpinChanged)
self.defaults[ch] = {
'scale': scale,
'offset': offset}
elif chans[ch]['type'] in ['do', 'di']:
for item in ui.analogCtrls:
item.hide()
if chans[ch].get('invert', False):
ui.invertCheck.setChecked(True)
if chans[ch]['type'] == 'do':
ui.outputRadio.setChecked(True)
ui.holdingSpin.setOpts(bounds=[0, 1], step=1)
ui.holdingSpin.setValue(holding)
ui.holdingSpin.sigValueChanged.connect(self.holdingSpinChanged)
elif chans[ch]['type'] == 'di':
ui.inputRadio.setChecked(True)
ui.holdingLabel.hide()
ui.holdingSpin.hide()
ui.invertCheck.toggled.connect(self.invertToggled)
# Qt.QObject.connect(self.dev, Qt.SIGNAL('holdingChanged'), self.holdingChanged)
self.dev.sigHoldingChanged.connect(self.holdingChanged) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def holdingSpinChanged(self, spin):
ch = spin.channel
self.dev.setChanHolding(ch, spin.value(), block=False) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def offsetSpinChanged(self, spin):
ch = spin.channel
self.dev.setChanOffset(ch, spin.value(), block=False) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def scaleDefaultBtnClicked(self):
ch = self.sender().channel
self.widgets[ch].scaleSpin.setValue(self.defaults[ch]['scale']) | acq4/acq4 | [
51,
39,
51,
31,
1385049034
] |
def deconstruct_packet(packet):
"""
Replaces every bytearray in packet with a numbered placeholder.
:param packet:
:return: dict with packet and list of buffers
"""
buffers = []
packet_data = packet.get('data', None)
def _deconstruct_packet(data):
if type(data) is bytearray:
place_holder = {
'_placeholder': True,
'num': len(buffers)
}
buffers.append(data)
return place_holder
if type(data) is list:
new_data = []
for d in data:
new_data.append(_deconstruct_packet(d))
return new_data
if type(data) is dict:
new_data = {}
for k, v in data.items():
new_data[k] = _deconstruct_packet(v)
return new_data
return data
pack = copy.copy(packet)
pack['data'] = _deconstruct_packet(packet_data)
pack['attachments'] = len(buffers)
return {
'packet': pack,
'buffers': buffers
} | shuoli84/gevent_socketio2 | [
4,
4,
4,
6,
1411211530
] |
def reconstruct_packet(packet, buffers):
def _reconstruct_packet(data):
if type(data) is dict:
if '_placeholder' in data:
buf = buffers[data['num']]
return buf
else:
for k, v in data.items():
data[k] = _reconstruct_packet(v)
return data
if type(data) is list:
for i in xrange(len(data)):
data[i] = _reconstruct_packet(data[i])
return data
return data
packet['data'] = _reconstruct_packet(packet['data'])
del packet['attachments']
return packet | shuoli84/gevent_socketio2 | [
4,
4,
4,
6,
1411211530
] |
def pytest_sessionstart(session):
global server_process
server_process = Process(target=start)
server_process.start() | pirate/bookmark-archiver | [
15274,
890,
15274,
180,
1493974214
] |
def pytest_sessionfinish(session):
if server_process is not None:
server_process.terminate()
server_process.join() | pirate/bookmark-archiver | [
15274,
890,
15274,
180,
1493974214
] |
def register_type(cls, typename, obj):
"""Adds the new class to the dict of understood types."""
cls.types[typename] = obj | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def visit_Num(self, node):
self.output(json.dumps(node.n)) | iksteen/jaspyx | [
3,
1,
3,
2,
1368740942
] |
def visit_List(self, node):
self.group(node.elts, prefix='[', infix=', ', suffix=']') | iksteen/jaspyx | [
3,
1,
3,
2,
1368740942
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
resource_group_name, # type: str
route_filter_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_create_or_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def update_tags(
self,
resource_group_name, # type: str
route_filter_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def join_web_url(self):
"""Meeting URL associated to the call. May not be available for a peerToPeer call record type."""
return self.properties.get("joinWebUrl", None) | vgrem/Office365-REST-Python-Client | [
816,
246,
816,
181,
1454945091
] |
def __init__(self, location, *args, **kwargs):
if not isinstance(location, (type(None), list, tuple)):
location = [location]
if location is not None:
kwargs.setdefault('headers', {})['Location'] = ', '.join(str(URI(uri)) for uri in location)
super(RedirectStatus, self).__init__(*args, **kwargs) | spaceone/httoop | [
17,
5,
17,
5,
1365534138
] |
def __init__(self, *args, **kwargs):
# don't set location
super(NOT_MODIFIED, self).__init__(None, *args, **kwargs) | spaceone/httoop | [
17,
5,
17,
5,
1365534138
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def register_object_type(cls=None, vendor_id=0):
if _debug: register_object_type._debug("register_object_type %s vendor_id=%s", repr(cls), vendor_id)
# if cls isn't given, return a decorator
if not cls:
def _register(xcls):
if _debug: register_object_type._debug("_register %s (vendor_id=%s)", repr(cls), vendor_id)
return register_object_type(xcls, vendor_id)
if _debug: register_object_type._debug(" - returning decorator")
return _register
# make sure it's an Object derived class
if not issubclass(cls, Object):
raise RuntimeError("Object derived class required")
# build a property dictionary by going through the class and all its parents
_properties = {}
for c in cls.__mro__:
if _debug: register_object_type._debug(" - c: %r", c)
for prop in getattr(c, 'properties', []):
if prop.identifier not in _properties:
_properties[prop.identifier] = prop
# if the object type hasn't been provided, make an immutable one
if 'objectType' not in _properties:
_properties['objectType'] = ReadableProperty('objectType', ObjectType, cls.objectType, mutable=False)
# store this in the class
cls._properties = _properties
# now save this in all our types
registered_object_types[(cls.objectType, vendor_id)] = cls
# return the class as a decorator
return cls | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def get_object_class(object_type, vendor_id=0):
"""Return the class associated with an object type."""
if _debug: get_object_class._debug("get_object_class %r vendor_id=%r", object_type, vendor_id)
# find the klass as given
cls = registered_object_types.get((object_type, vendor_id))
if _debug: get_object_class._debug(" - direct lookup: %s", repr(cls))
# if the class isn't found and the vendor id is non-zero, try the standard class for the type
if (not cls) and vendor_id:
cls = registered_object_types.get((object_type, 0))
if _debug: get_object_class._debug(" - default lookup: %s", repr(cls))
return cls | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def get_datatype(object_type, propid, vendor_id=0):
"""Return the datatype for the property of an object."""
if _debug: get_datatype._debug("get_datatype %r %r vendor_id=%r", object_type, propid, vendor_id)
# get the related class
cls = get_object_class(object_type, vendor_id)
if not cls:
return None
# get the property
prop = cls._properties.get(propid)
if not prop:
return None
# return the datatype
return prop.datatype | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, identifier, datatype, default=None, optional=True, mutable=True):
if _debug:
Property._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# keep the arguments
self.identifier = identifier
# check the datatype
self.datatype = datatype
if not issubclass(datatype, (Atomic, Sequence, Choice, Array, List, AnyAtomic)):
raise TypeError("invalid datatype for property: %s" % (identifier,))
self.optional = optional
self.mutable = mutable
self.default = default | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug:
Property._debug("WriteProperty(%s) %s %r arrayIndex=%r priority=%r direct=%r",
self.identifier, obj, value, arrayIndex, priority, direct
)
if direct:
if _debug: Property._debug(" - direct write")
else:
# see if it must be provided
if not self.optional and value is None:
raise ValueError("%s value required" % (self.identifier,))
# see if it can be changed
if not self.mutable:
if _debug: Property._debug(" - property is immutable")
raise ExecutionError(errorClass='property', errorCode='writeAccessDenied')
# if changing the length of the array, the value is unsigned
if arrayIndex == 0:
if not Unsigned.is_valid(value):
raise InvalidParameterDatatype("length of %s must be unsigned" % (
self.identifier,
))
# if it's atomic, make sure it's valid
elif issubclass(self.datatype, AnyAtomic):
if _debug: Property._debug(" - property is any atomic, checking value")
if not isinstance(value, Atomic):
raise InvalidParameterDatatype("%s must be an atomic instance" % (
self.identifier,
))
elif issubclass(self.datatype, Atomic):
if _debug: Property._debug(" - property is atomic, checking value")
if not self.datatype.is_valid(value):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# if it's an array, make sure it's valid regarding arrayIndex provided
elif issubclass(self.datatype, Array):
if _debug: Property._debug(" - property is array, checking subtype and index")
# changing a single element
if arrayIndex is not None:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(value):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# constructed type
elif not isinstance(value, self.datatype.subtype):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# replacing the array
elif isinstance(value, list):
# check validity regarding subtype
for item in value:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(item):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__,
))
# constructed type
elif not isinstance(item, self.datatype.subtype):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# value is mutated into a new array
value = self.datatype(value)
# if it's an array, make sure it's valid regarding arrayIndex provided
elif issubclass(self.datatype, List):
if _debug: Property._debug(" - property is list, checking subtype")
# changing a single element
if arrayIndex is not None:
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
# replacing the array
if not isinstance(value, list):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# check validity regarding subtype
for item in value:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(item):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__,
))
# constructed type
elif not isinstance(item, self.datatype.subtype):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# value is mutated into a new list
value = self.datatype(value)
# some kind of constructed data
elif not isinstance(value, self.datatype):
if _debug: Property._debug(" - property is not atomic and wrong type")
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# local check if the property is monitored
is_monitored = self.identifier in obj._property_monitors
if arrayIndex is not None:
if not issubclass(self.datatype, Array):
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
# check the array
arry = obj._values[self.identifier]
if arry is None:
raise RuntimeError("%s uninitialized array" % (self.identifier,))
if is_monitored:
old_value = _copy(arry)
# seems to be OK, let the array object take over
if _debug: Property._debug(" - forwarding to array")
try:
arry[arrayIndex] = value
except IndexError:
raise ExecutionError(errorClass='property', errorCode='invalidArrayIndex')
except TypeError:
raise ExecutionError(errorClass='property', errorCode='valueOutOfRange')
# check for monitors, call each one with the old and new value
if is_monitored:
for fn in obj._property_monitors[self.identifier]:
if _debug: Property._debug(" - monitor: %r", fn)
fn(old_value, arry)
else:
if is_monitored:
old_value = obj._values.get(self.identifier, None)
# seems to be OK
obj._values[self.identifier] = value
# check for monitors, call each one with the old and new value
if is_monitored:
for fn in obj._property_monitors[self.identifier]:
if _debug: Property._debug(" - monitor: %r", fn)
fn(old_value, value) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, identifier, datatype, default=None, optional=True, mutable=True):
if _debug:
StandardProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# use one of the subclasses
if not isinstance(self, (OptionalProperty, ReadableProperty, WritableProperty)):
raise ConfigurationError(self.__class__.__name__ + " must derive from OptionalProperty, ReadableProperty, or WritableProperty")
# validate the identifier to be one of the standard property enumerations
if identifier not in PropertyIdentifier.enumerations:
raise ConfigurationError("unknown standard property identifier: %s" % (identifier,))
# continue with the initialization
Property.__init__(self, identifier, datatype, default, optional, mutable) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, identifier, datatype, default=None, optional=True, mutable=False):
if _debug:
OptionalProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, identifier, datatype, default=None, optional=False, mutable=False):
if _debug:
ReadableProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, identifier, datatype, default=None, optional=False, mutable=True):
if _debug:
WritableProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug: ObjectIdentifierProperty._debug("WriteProperty %r %r arrayIndex=%r priority=%r", obj, value, arrayIndex, priority)
# make it easy to default
if value is None:
pass
elif isinstance(value, int):
value = (obj.objectType, value)
elif isinstance(value, tuple) and len(value) == 2:
if value[0] != obj.objectType:
raise ValueError("%s required" % (obj.objectType,))
else:
raise TypeError("object identifier")
return Property.WriteProperty( self, obj, value, arrayIndex, priority, direct ) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __init__(self, **kwargs):
"""Create an object, with default property values as needed."""
if _debug: Object._debug("__init__(%s) %r", self.__class__.__name__, kwargs)
# map the python names into property names and make sure they
# are appropriate for this object
initargs = {}
for key, value in kwargs.items():
if key not in self._properties:
raise PropertyError(key)
initargs[key] = value
# object is detached from an application until it is added
self._app = None
# start with a clean dict of values
self._values = {}
# empty list of property monitors
self._property_monitors = defaultdict(list)
# initialize the object
for propid, prop in self._properties.items():
if propid in initargs:
if _debug: Object._debug(" - setting %s from initargs", propid)
# defer to the property object for error checking
prop.WriteProperty(self, initargs[propid], direct=True)
elif prop.default is not None:
if _debug: Object._debug(" - setting %s from default", propid)
# default values bypass property interface
self._values[propid] = prop.default
else:
if not prop.optional:
if _debug: Object._debug(" - %s value required", propid)
self._values[propid] = None
if _debug: Object._debug(" - done __init__") | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def __getattr__(self, attr):
if _debug: Object._debug("__getattr__ %r", attr)
# do not redirect private attrs or functions
if attr.startswith('_') or attr[0].isupper() or (attr == 'debug_contents'):
return object.__getattribute__(self, attr)
# defer to the property to get the value
prop = self._attr_to_property(attr)
if _debug: Object._debug(" - deferring to %r", prop)
# defer to the property to get the value
return prop.ReadProperty(self) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def add_property(self, prop):
"""Add a property to an object. The property is an instance of
a Property or one of its derived classes. Adding a property
disconnects it from the collection of properties common to all of the
objects of its class."""
if _debug: Object._debug("add_property %r", prop)
# make a copy of the properties dictionary
self._properties = _copy(self._properties)
# save the property reference and default value (usually None)
self._properties[prop.identifier] = prop
self._values[prop.identifier] = prop.default | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def ReadProperty(self, propid, arrayIndex=None):
if _debug: Object._debug("ReadProperty %r arrayIndex=%r", propid, arrayIndex)
# get the property
prop = self._properties.get(propid)
if not prop:
raise PropertyError(propid)
# defer to the property to get the value
return prop.ReadProperty(self, arrayIndex) | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
def get_datatype(self, propid):
"""Return the datatype for the property of an object."""
if _debug: Object._debug("get_datatype %r", propid)
# get the property
prop = self._properties.get(propid)
if not prop:
raise PropertyError(propid)
# return the datatype
return prop.datatype | JoelBender/bacpypes | [
243,
121,
243,
107,
1436992431
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.