text stringlengths 81 112k |
|---|
Decode SLIP message.
def decode(raw):
"""Decode SLIP message."""
return raw \
.replace(bytes([SLIP_ESC, SLIP_ESC_END]), bytes([SLIP_END])) \
.replace(bytes([SLIP_ESC, SLIP_ESC_ESC]), bytes([SLIP_ESC])) |
Encode SLIP message.
def encode(raw):
"""Encode SLIP message."""
return raw \
.replace(bytes([SLIP_ESC]), bytes([SLIP_ESC, SLIP_ESC_ESC])) \
.replace(bytes([SLIP_END]), bytes([SLIP_ESC, SLIP_ESC_END])) |
Get the next slip packet from raw data.
Returns the extracted packet plus the raw data with the remaining data stream.
def get_next_slip(raw):
"""
Get the next slip packet from raw data.
Returns the extracted packet plus the raw data with the remaining data stream.
"""
if not is_slip(raw):
return None, raw
length = raw[1:].index(SLIP_END)
slip_packet = decode(raw[1:length+1])
new_raw = raw[length+2:]
return slip_packet, new_raw |
Enable house status monitor.
async def set_utc(pyvlx):
"""Enable house status monitor."""
setutc = SetUTC(pyvlx=pyvlx)
await setutc.do_api_call()
if not setutc.success:
raise PyVLXException("Unable to set utc.") |
Handle incoming API frame, return True if this was the expected frame.
async def handle_frame(self, frame):
"""Handle incoming API frame, return True if this was the expected frame."""
if not isinstance(frame, FrameSetUTCConfirmation):
return False
self.success = True
return True |
Python implementation of ``calcbinflux``.
This is only used if ``synphot.synphot_utils`` C-extension
import fails.
See docstrings.py
def _slow_calcbinflux(len_binwave, i_beg, i_end, avflux, deltaw):
"""Python implementation of ``calcbinflux``.
This is only used if ``synphot.synphot_utils`` C-extension
import fails.
See docstrings.py
"""
binflux = np.empty(shape=(len_binwave, ), dtype=np.float64)
intwave = np.empty(shape=(len_binwave, ), dtype=np.float64)
# Note that, like all Python striding, the range over which
# we integrate is [first:last).
for i in range(len(i_beg)):
first = i_beg[i]
last = i_end[i]
cur_dw = deltaw[first:last]
intwave[i] = cur_dw.sum()
binflux[i] = np.sum(avflux[first:last] * cur_dw) / intwave[i]
return binflux, intwave |
Calculate the edges of wavelength bins given the centers.
The algorithm calculates bin edges as the midpoints between bin centers
and treats the first and last bins as symmetric about their centers.
Parameters
----------
centers : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin centers. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
edges : `~astropy.units.quantity.Quantity`
Array of bin edges. Will be 1D, have one more value
than ``centers``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
def calculate_bin_edges(centers):
"""Calculate the edges of wavelength bins given the centers.
The algorithm calculates bin edges as the midpoints between bin centers
and treats the first and last bins as symmetric about their centers.
Parameters
----------
centers : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin centers. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
edges : `~astropy.units.quantity.Quantity`
Array of bin edges. Will be 1D, have one more value
than ``centers``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
"""
if not isinstance(centers, u.Quantity):
centers = centers * u.AA
if centers.ndim != 1:
raise exceptions.SynphotError('Bin centers must be 1D array.')
if centers.size < 2:
raise exceptions.SynphotError(
'Bin centers must have at least two values.')
edges = np.empty(centers.size + 1, dtype=np.float64)
edges[1:-1] = (centers.value[1:] + centers.value[:-1]) * 0.5
# Compute the first and last by making them symmetric
edges[0] = 2.0 * centers.value[0] - edges[1]
edges[-1] = 2.0 * centers.value[-1] - edges[-2]
return edges * centers.unit |
Calculate the widths of wavelengths bins given their edges.
Parameters
----------
edges : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin edges. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
widths : `~astropy.units.quantity.Quantity`
Array of bin widths. Will be 1D, have one less value
than ``edges``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
def calculate_bin_widths(edges):
"""Calculate the widths of wavelengths bins given their edges.
Parameters
----------
edges : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin edges. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
widths : `~astropy.units.quantity.Quantity`
Array of bin widths. Will be 1D, have one less value
than ``edges``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
"""
if not isinstance(edges, u.Quantity):
edges = edges * u.AA
if edges.ndim != 1:
raise exceptions.SynphotError('Bin edges must be 1D array.')
if edges.size < 2:
raise exceptions.SynphotError(
'Bin edges must have at least two values.')
return np.abs(edges[1:] - edges[:-1]) |
Calculate the centers of wavelengths bins given their edges.
Parameters
----------
edges : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin edges. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
centers : `~astropy.units.quantity.Quantity`
Array of bin centers. Will be 1D, have one less value
than ``edges``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
def calculate_bin_centers(edges):
"""Calculate the centers of wavelengths bins given their edges.
Parameters
----------
edges : array-like or `~astropy.units.quantity.Quantity`
Sequence of bin edges. Must be 1D and have at least two values.
If not a Quantity, assumed to be in Angstrom.
Returns
-------
centers : `~astropy.units.quantity.Quantity`
Array of bin centers. Will be 1D, have one less value
than ``edges``, and also the same unit.
Raises
------
synphot.exceptions.SynphotError
Invalid input.
"""
if not isinstance(edges, u.Quantity):
edges = edges * u.AA
if edges.ndim != 1:
raise exceptions.SynphotError('Bin edges must be 1D array.')
if edges.size < 2:
raise exceptions.SynphotError(
'Bin edges must have at least two values.')
centers = np.empty(edges.size - 1, dtype=np.float64)
centers[0] = edges.value[:2].mean()
for i in range(1, centers.size):
centers[i] = 2.0 * edges.value[i] - centers[i - 1]
return centers * edges.unit |
Calculate the wavelength range covered by the given number of pixels
centered on the given central wavelength of the given bins.
Parameters
----------
bins : array-like
Wavelengths at bin centers, each centered on a pixel.
Must be 1D array.
cenwave : float
Desired central wavelength, in the same unit as ``bins``.
npix : int
Desired number of pixels, centered on ``cenwave``.
mode : {'round', 'min', 'max', 'none'}
Determines how the pixels at the edges of the wavelength range
are handled. All the options, except 'none', will return
wavelength range edges that correspond to pixel edges:
* 'round' - Wavelength range edges are the pixel edges
and the range spans exactly ``npix`` pixels. An edge
that falls in the center of a bin is rounded to the
nearest pixel edge. This is the default.
* 'min' - Wavelength range is shrunk such that it includes
an integer number of pixels and its edges fall on pixel
edges. It may not span exactly ``npix`` pixels.
* 'max' - Wavelength range is expanded such that it
includes an integer number of pixels and its edges fall
on pixel edges. It may not span exactly ``npix`` pixels.
* 'none' - Exact wavelength range is returned. The edges
may not correspond to pixel edges, but it covers exactly
``npix`` pixels.
Returns
-------
wave1, wave2 : float
Lower and upper limits of the wavelength range.
Raises
------
synphot.exceptions.OverlapError
Given central wavelength is not within the given bins
or the wavelength range would exceed the bin limits.
synphot.exceptions.SynphotError
Invalid inputs or calculation failed.
def wave_range(bins, cenwave, npix, mode='round'):
"""Calculate the wavelength range covered by the given number of pixels
centered on the given central wavelength of the given bins.
Parameters
----------
bins : array-like
Wavelengths at bin centers, each centered on a pixel.
Must be 1D array.
cenwave : float
Desired central wavelength, in the same unit as ``bins``.
npix : int
Desired number of pixels, centered on ``cenwave``.
mode : {'round', 'min', 'max', 'none'}
Determines how the pixels at the edges of the wavelength range
are handled. All the options, except 'none', will return
wavelength range edges that correspond to pixel edges:
* 'round' - Wavelength range edges are the pixel edges
and the range spans exactly ``npix`` pixels. An edge
that falls in the center of a bin is rounded to the
nearest pixel edge. This is the default.
* 'min' - Wavelength range is shrunk such that it includes
an integer number of pixels and its edges fall on pixel
edges. It may not span exactly ``npix`` pixels.
* 'max' - Wavelength range is expanded such that it
includes an integer number of pixels and its edges fall
on pixel edges. It may not span exactly ``npix`` pixels.
* 'none' - Exact wavelength range is returned. The edges
may not correspond to pixel edges, but it covers exactly
``npix`` pixels.
Returns
-------
wave1, wave2 : float
Lower and upper limits of the wavelength range.
Raises
------
synphot.exceptions.OverlapError
Given central wavelength is not within the given bins
or the wavelength range would exceed the bin limits.
synphot.exceptions.SynphotError
Invalid inputs or calculation failed.
"""
mode = mode.lower()
if mode not in ('round', 'min', 'max', 'none'):
raise exceptions.SynphotError(
'mode={0} is invalid, must be "round", "min", "max", '
'or "none".'.format(mode))
if not isinstance(npix, int):
raise exceptions.SynphotError('npix={0} is invalid.'.format(npix))
# Bin values must be in ascending order.
if bins[0] > bins[-1]:
bins = bins[::-1]
# Central wavelength must be within given bins.
if cenwave < bins[0] or cenwave > bins[-1]:
raise exceptions.OverlapError(
'cenwave={0} is not within binset (min={1}, max={2}).'.format(
cenwave, bins[0], bins[-1]))
# Find the index the central wavelength among bins
diff = cenwave - bins
ind = np.argmin(np.abs(diff))
# Calculate fractional index
frac_ind = float(ind)
if diff[ind] < 0:
frac_ind += diff[ind] / (bins[ind] - bins[ind - 1])
elif diff[ind] > 0:
frac_ind += diff[ind] / (bins[ind + 1] - bins[ind])
# Calculate fractional indices of the edges
half_npix = npix / 2.0
frac_ind1 = frac_ind - half_npix
frac_ind2 = frac_ind + half_npix
# Calculated edges must not exceed bin edges
if frac_ind1 < -0.5:
raise exceptions.OverlapError(
'Lower limit of wavelength range is out of bounds.')
if frac_ind2 > (bins.size - 0.5):
raise exceptions.OverlapError(
'Upper limit of wavelength range is out of bounds.')
frac1, int1 = np.modf(frac_ind1)
frac2, int2 = np.modf(frac_ind2)
int1 = int(int1)
int2 = int(int2)
if mode == 'round':
# Lower end of wavelength range
if frac1 >= 0:
# end is somewhere greater than binset[0] so we can just
# interpolate between two neighboring values going with upper edge
wave1 = bins[int1:int1 + 2].mean()
else:
# end is below the lowest binset value, but not by enough to
# trigger an exception
wave1 = bins[0] - (bins[0:2].mean() - bins[0])
# Upper end of wavelength range
if int2 < bins.shape[0] - 1:
# end is somewhere below binset[-1] so we can just interpolate
# between two neighboring values, going with the upper edge.
wave2 = bins[int2:int2 + 2].mean()
else:
# end is above highest binset value but not by enough to
# trigger an exception
wave2 = bins[-1] + (bins[-1] - bins[-2:].mean())
elif mode == 'min':
# Lower end of wavelength range
if frac1 <= 0.5 and int1 < bins.shape[0] - 1:
# not at the lowest possible edge and pixel i included
wave1 = bins[int1:int1 + 2].mean()
elif frac1 > 0.5 and int1 < bins.shape[0] - 2:
# not at the lowest possible edge and pixel i not included
wave1 = bins[int1 + 1:int1 + 3].mean()
elif frac1 == -0.5:
# at the lowest possible edge
wave1 = bins[0] - (bins[0:2].mean() - bins[0])
else: # pragma: no cover
raise exceptions.SynphotError(
'mode={0} gets unexpected frac1={1}, int1={2}'.format(
mode, frac1, int1))
# Upper end of wavelength range
if frac2 >= 0.5 and int2 < bins.shape[0] - 1:
# not out at the end and pixel i included
wave2 = bins[int2:int2 + 2].mean()
elif frac2 < 0.5 and int2 < bins.shape[0]:
# not out at end and pixel i not included
wave2 = bins[int2 - 1:int2 + 1].mean()
elif frac2 == 0.5 and int2 == bins.shape[0] - 1:
# at the very end
wave2 = bins[-1] + (bins[-1] - bins[-2:].mean())
else: # pragma: no cover
raise exceptions.SynphotError(
'mode={0} gets unexpected frac2={1}, int2={2}'.format(
mode, frac2, int2))
elif mode == 'max':
# Lower end of wavelength range
if frac1 < 0.5 and int1 < bins.shape[0]:
# not at the lowest possible edge and pixel i included
wave1 = bins[int1 - 1:int1 + 1].mean()
elif frac1 >= 0.5 and int1 < bins.shape[0] - 1:
# not at the lowest possible edge and pixel i not included
wave1 = bins[int1:int1 + 2].mean()
elif frac1 == -0.5:
# at the lowest possible edge
wave1 = bins[0] - (bins[0:2].mean() - bins[0])
else: # pragma: no cover
raise exceptions.SynphotError(
'mode={0} gets unexpected frac1={1}, int1={2}'.format(
mode, frac1, int1))
# Upper end of wavelength range
if frac2 > 0.5 and int2 < bins.shape[0] - 2:
# not out at the end and pixel i included
wave2 = bins[int2 + 1:int2 + 3].mean()
elif frac2 <= 0.5 and int2 < bins.shape[0] - 1:
# not out at end and pixel i not included
wave2 = bins[int2:int2 + 2].mean()
elif frac2 == 0.5 and int2 == bins.shape[0] - 1:
# at the very end
wave2 = bins[-1] + (bins[-1] - bins[-2:].mean())
else: # pragma: no cover
raise exceptions.SynphotError(
'mode={0} gets unexpected frac2={1}, int2={2}'.format(
mode, frac2, int2))
else: # mode == 'none'
wave1 = bins[int1] + frac1 * (bins[int1 + 1] - bins[int1])
wave2 = bins[int2] + frac2 * (bins[int2 + 1] - bins[int2])
return wave1, wave2 |
Calculate the number of pixels within the given wavelength range
and the given bins.
Parameters
----------
bins : array-like
Wavelengths at bin centers, each centered on a pixel.
Must be 1D array.
waverange : tuple of float
Lower and upper limits of the desired wavelength range,
in the same unit as ``bins``.
mode : {'round', 'min', 'max', 'none'}
Determines how the pixels at the edges of the wavelength range
are handled. All the options, except 'none', will return
an integer number of pixels:
* 'round' - Wavelength range edges that fall in the middle
of a pixel are counted if more than half of the pixel is
within the given wavelength range. Edges that fall in
the center of a pixel are rounded to the nearest pixel
edge. This is the default.
* 'min' - Only pixels wholly within the given wavelength
range are counted.
* 'max' - Pixels that are within the given wavelength range
by any margin are counted.
* 'none' - The exact number of encompassed pixels,
including fractional pixels, is returned.
Returns
-------
npix : number
Number of pixels.
Raises
------
synphot.exceptions.OverlapError
Given wavelength range exceeds the bounds of given bins.
synphot.exceptions.SynphotError
Invalid mode.
def pixel_range(bins, waverange, mode='round'):
"""Calculate the number of pixels within the given wavelength range
and the given bins.
Parameters
----------
bins : array-like
Wavelengths at bin centers, each centered on a pixel.
Must be 1D array.
waverange : tuple of float
Lower and upper limits of the desired wavelength range,
in the same unit as ``bins``.
mode : {'round', 'min', 'max', 'none'}
Determines how the pixels at the edges of the wavelength range
are handled. All the options, except 'none', will return
an integer number of pixels:
* 'round' - Wavelength range edges that fall in the middle
of a pixel are counted if more than half of the pixel is
within the given wavelength range. Edges that fall in
the center of a pixel are rounded to the nearest pixel
edge. This is the default.
* 'min' - Only pixels wholly within the given wavelength
range are counted.
* 'max' - Pixels that are within the given wavelength range
by any margin are counted.
* 'none' - The exact number of encompassed pixels,
including fractional pixels, is returned.
Returns
-------
npix : number
Number of pixels.
Raises
------
synphot.exceptions.OverlapError
Given wavelength range exceeds the bounds of given bins.
synphot.exceptions.SynphotError
Invalid mode.
"""
mode = mode.lower()
if mode not in ('round', 'min', 'max', 'none'):
raise exceptions.SynphotError(
'mode={0} is invalid, must be "round", "min", "max", '
'or "none".'.format(mode))
if waverange[0] < waverange[-1]:
wave1 = waverange[0]
wave2 = waverange[-1]
else:
wave1 = waverange[-1]
wave2 = waverange[0]
# Bin values must be in ascending order.
if bins[0] > bins[-1]:
bins = bins[::-1]
# Wavelength range must be within bins
minwave = bins[0] - (bins[0:2].mean() - bins[0])
maxwave = bins[-1] + (bins[-1] - bins[-2:].mean())
if wave1 < minwave or wave2 > maxwave:
raise exceptions.OverlapError(
'Wavelength range ({0}, {1}) is out of bounds of bins '
'(min={2}, max={3}).'.format(wave1, wave2, minwave, maxwave))
if wave1 == wave2:
return 0
if mode == 'round':
ind1 = bins.searchsorted(wave1, side='right')
ind2 = bins.searchsorted(wave2, side='right')
else:
ind1 = bins.searchsorted(wave1, side='left')
ind2 = bins.searchsorted(wave2, side='left')
if mode == 'round':
npix = ind2 - ind1
elif mode == 'min':
# for ind1, figure out if pixel ind1 is wholly included or not.
# do this by figuring out where wave1 is between ind1 and ind1-1.
frac = (bins[ind1] - wave1) / (bins[ind1] - bins[ind1 - 1])
if frac < 0.5:
# ind1 is only partially included
ind1 += 1
# similar but reversed procedure for ind2
frac = (wave2 - bins[ind2 - 1]) / (bins[ind2] - bins[ind2 - 1])
if frac < 0.5:
# ind2 is only partially included
ind2 -= 1
npix = ind2 - ind1
elif mode == 'max':
# for ind1, figure out if pixel ind1-1 is partially included or not.
# do this by figuring out where wave1 is between ind1 and ind1-1.
frac = (wave1 - bins[ind1 - 1]) / (bins[ind1] - bins[ind1 - 1])
if frac < 0.5:
# ind1 is partially included
ind1 -= 1
# similar but reversed procedure for ind2
frac = (bins[ind2] - wave2) / (bins[ind2] - bins[ind2 - 1])
if frac < 0.5:
# ind2 is partially included
ind2 += 1
npix = ind2 - ind1
else: # mode == 'none'
# calculate fractional indices
frac1 = ind1 - (bins[ind1] - wave1) / (bins[ind1] - bins[ind1 - 1])
frac2 = ind2 - (bins[ind2] - wave2) / (bins[ind2] - bins[ind2 - 1])
npix = frac2 - frac1
return npix |
Connect to KLF 200.
async def connect(self):
"""Connect to KLF 200."""
PYVLXLOG.warning("Connecting to KLF 200.")
await self.connection.connect()
login = Login(pyvlx=self, password=self.config.password)
await login.do_api_call()
if not login.success:
raise PyVLXException("Login to KLF 200 failed, check credentials") |
Retrieve version and protocol version from API.
async def update_version(self):
"""Retrieve version and protocol version from API."""
get_version = GetVersion(pyvlx=self)
await get_version.do_api_call()
if not get_version.success:
raise PyVLXException("Unable to retrieve version")
self.version = get_version.version
get_protocol_version = GetProtocolVersion(pyvlx=self)
await get_protocol_version.do_api_call()
if not get_protocol_version.success:
raise PyVLXException("Unable to retrieve protocol version")
self.protocol_version = get_protocol_version.version
PYVLXLOG.warning(
"Connected to: %s, protocol version: %s",
self.version, self.protocol_version) |
Send frame to API via connection.
async def send_frame(self, frame):
"""Send frame to API via connection."""
if not self.connection.connected:
await self.connect()
await self.update_version()
await set_utc(pyvlx=self)
await house_status_monitor_enable(pyvlx=self)
self.connection.write(frame) |
Read scene from configuration.
def from_config(cls, pyvlx, item):
"""Read scene from configuration."""
name = item['name']
ident = item['id']
return cls(pyvlx, ident, name) |
Send api call.
async def api_call(self, verb, action, params=None, add_authorization_token=True, retry=False):
"""Send api call."""
if add_authorization_token and not self.token:
await self.refresh_token()
try:
return await self._api_call_impl(verb, action, params, add_authorization_token)
except InvalidToken:
if not retry and add_authorization_token:
await self.refresh_token()
# Recursive call of api_call
return await self.api_call(verb, action, params, add_authorization_token, True)
raise |
Refresh API token from KLF 200.
async def refresh_token(self):
"""Refresh API token from KLF 200."""
json_response = await self.api_call('auth', 'login', {'password': self.config.password}, add_authorization_token=False)
if 'token' not in json_response:
raise PyVLXException('no element token found in response: {0}'.format(json.dumps(json_response)))
self.token = json_response['token'] |
Create http body for rest request.
def create_body(action, params):
"""Create http body for rest request."""
body = {}
body['action'] = action
if params is not None:
body['params'] = params
return body |
Evaluate rest response.
def evaluate_response(json_response):
"""Evaluate rest response."""
if 'errors' in json_response and json_response['errors']:
Interface.evaluate_errors(json_response)
elif 'result' not in json_response:
raise PyVLXException('no element result found in response: {0}'.format(json.dumps(json_response)))
elif not json_response['result']:
raise PyVLXException('Request failed {0}'.format(json.dumps(json_response))) |
Evaluate rest errors.
def evaluate_errors(json_response):
"""Evaluate rest errors."""
if 'errors' not in json_response or \
not isinstance(json_response['errors'], list) or \
not json_response['errors'] or \
not isinstance(json_response['errors'][0], int):
raise PyVLXException('Could not evaluate errors {0}'.format(json.dumps(json_response)))
# unclear if response may contain more errors than one. Taking the first.
first_error = json_response['errors'][0]
if first_error in [402, 403, 405, 406]:
raise InvalidToken(first_error)
raise PyVLXException('Unknown error code {0}'.format(first_error)) |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = bytes([self.node_id])
ret += string_to_bytes(self.name, 64)
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.node_id = payload[0]
self.name = bytes_to_string(payload[1:65]) |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.status = SetNodeNameConfirmationStatus(payload[0])
self.node_id = payload[1] |
Convert FrameGet[All]Node[s]InformationNotification into Node object.
def convert_frame_to_node(pyvlx, frame):
"""Convert FrameGet[All]Node[s]InformationNotification into Node object."""
# pylint: disable=too-many-return-statements
if frame.node_type == NodeTypeWithSubtype.WINDOW_OPENER:
return Window(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name, rain_sensor=False)
if frame.node_type == NodeTypeWithSubtype.WINDOW_OPENER_WITH_RAIN_SENSOR:
return Window(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name, rain_sensor=True)
if frame.node_type == NodeTypeWithSubtype.ROLLER_SHUTTER or \
frame.node_type == NodeTypeWithSubtype.DUAL_ROLLER_SHUTTER:
return RollerShutter(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name)
if frame.node_type == NodeTypeWithSubtype.INTERIOR_VENETIAN_BLIND or \
frame.node_type == NodeTypeWithSubtype.VERTICAL_INTERIOR_BLINDS or \
frame.node_type == NodeTypeWithSubtype.EXTERIOR_VENETIAN_BLIND or \
frame.node_type == NodeTypeWithSubtype.LOUVER_BLIND:
return Blind(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name)
if frame.node_type == NodeTypeWithSubtype.VERTICAL_EXTERIOR_AWNING or \
frame.node_type == NodeTypeWithSubtype.HORIZONTAL_AWNING:
return Awning(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name)
if frame.node_type == NodeTypeWithSubtype.ON_OFF_SWITCH:
return OnOffSwitch(pyvlx=pyvlx, node_id=frame.node_id, name=frame.name)
PYVLXLOG.warning("%s not implemented", frame.node_type)
return None |
Set temperature.
def temperature(self, what):
"""Set temperature."""
self._temperature = units.validate_quantity(what, u.K) |
Apply emissivity to an existing beam to produce a thermal
source spectrum (without optical counterpart).
Thermal source spectrum is calculated as follow:
#. Create a blackbody spectrum in PHOTLAM per square arcsec
with `temperature`.
#. Multiply the blackbody with `beam_fill_factor` and ``self``.
Returns
-------
sp : `~synphot.spectrum.SourceSpectrum`
Thermal source spectrum.
def thermal_source(self):
"""Apply emissivity to an existing beam to produce a thermal
source spectrum (without optical counterpart).
Thermal source spectrum is calculated as follow:
#. Create a blackbody spectrum in PHOTLAM per square arcsec
with `temperature`.
#. Multiply the blackbody with `beam_fill_factor` and ``self``.
Returns
-------
sp : `~synphot.spectrum.SourceSpectrum`
Thermal source spectrum.
"""
sp = (SourceSpectrum(BlackBody1D, temperature=self.temperature) *
units.SR_PER_ARCSEC2 * self.beam_fill_factor * self)
sp.meta['temperature'] = self.temperature
sp.meta['beam_fill_factor'] = self.beam_fill_factor
return sp |
Creates a thermal spectral element from file.
.. note::
Only FITS format is supported.
Parameters
----------
filename : str
Thermal spectral element filename.
temperature_key, beamfill_key : str
Keywords in FITS *table extension* that store temperature
(in Kelvin) and beam filling factor values.
Beam filling factor is set to 1 if its keyword is missing.
kwargs : dict
Keywords acceptable by :func:`~synphot.specio.read_fits_spec`.
Returns
-------
th : `ThermalSpectralElement`
Empirical thermal spectral element.
Raises
------
synphot.exceptions.SynphotError
Invalid inputs.
def from_file(cls, filename, temperature_key='DEFT',
beamfill_key='BEAMFILL', **kwargs):
"""Creates a thermal spectral element from file.
.. note::
Only FITS format is supported.
Parameters
----------
filename : str
Thermal spectral element filename.
temperature_key, beamfill_key : str
Keywords in FITS *table extension* that store temperature
(in Kelvin) and beam filling factor values.
Beam filling factor is set to 1 if its keyword is missing.
kwargs : dict
Keywords acceptable by :func:`~synphot.specio.read_fits_spec`.
Returns
-------
th : `ThermalSpectralElement`
Empirical thermal spectral element.
Raises
------
synphot.exceptions.SynphotError
Invalid inputs.
"""
if not (filename.endswith('fits') or filename.endswith('fit')):
raise exceptions.SynphotError('Only FITS format is supported.')
# Extra info from table header
ext = kwargs.get('ext', 1)
tab_hdr = fits.getheader(filename, ext=ext)
temperature = tab_hdr.get(temperature_key)
if temperature is None:
raise exceptions.SynphotError(
'Missing {0} keyword.'.format(temperature_key))
beam_fill_factor = tab_hdr.get('BEAMFILL', 1)
if 'flux_unit' not in kwargs:
kwargs['flux_unit'] = cls._internal_flux_unit
if 'flux_col' not in kwargs:
kwargs['flux_col'] = 'EMISSIVITY'
header, wavelengths, em = specio.read_spec(filename, **kwargs)
return cls(
Empirical1D, temperature, beam_fill_factor=beam_fill_factor,
points=wavelengths, lookup_table=em, meta={'header': header}) |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.status = AllNodesInformationStatus(payload[0])
self.number_of_nodes = payload[1] |
Return Payload.
def get_payload(self):
"""Return Payload."""
payload = bytes()
payload += bytes([self.node_id])
payload += bytes([self.order >> 8 & 255, self.order & 255])
payload += bytes([self.placement])
payload += bytes(string_to_bytes(self.name, 64))
payload += bytes([self.velocity.value])
payload += bytes([self.node_type.value >> 8 & 255, self.node_type.value & 255])
payload += bytes([self.product_group])
payload += bytes([self.product_type])
payload += bytes([self.node_variation.value])
payload += bytes([self.power_mode])
payload += bytes([self.build_number])
payload += bytes(self._serial_number)
payload += bytes([self.state])
payload += bytes(self.current_position.raw)
payload += bytes(self.target.raw)
payload += bytes(self.current_position_fp1.raw)
payload += bytes(self.current_position_fp2.raw)
payload += bytes(self.current_position_fp3.raw)
payload += bytes(self.current_position_fp4.raw)
payload += bytes([self.remaining_time >> 8 & 255, self.remaining_time & 255])
payload += struct.pack(">I", self.timestamp)
payload += bytes(self.alias_array)
return payload |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.node_id = payload[0]
self.order = payload[1] * 256 + payload[2]
self.placement = payload[3]
self.name = bytes_to_string(payload[4:68])
self.velocity = Velocity(payload[68])
self.node_type = NodeTypeWithSubtype(payload[69] * 256 + payload[70])
self.product_group = payload[71]
self.product_type = payload[72]
self.node_variation = NodeVariation(payload[73])
self.power_mode = payload[74]
self.build_number = payload[75]
self._serial_number = payload[76:84]
self.state = payload[84]
self.current_position = Parameter(payload[85:87])
self.target = Parameter(payload[87:89])
self.current_position_fp1 = Parameter(payload[89:91])
self.current_position_fp2 = Parameter(payload[91:93])
self.current_position_fp3 = Parameter(payload[93:95])
self.current_position_fp4 = Parameter(payload[95:97])
self.remaining_time = payload[97] * 256 + payload[98]
self.timestamp = struct.unpack(">I", payload[99:103])[0]
self.alias_array = AliasArray(payload[103:125]) |
Set internal raw state from parameter.
def from_parameter(self, parameter):
"""Set internal raw state from parameter."""
if not isinstance(parameter, Parameter):
raise Exception("parameter::from_parameter_wrong_object")
self.raw = parameter.raw |
Create raw out of position vlaue.
def from_int(value):
"""Create raw out of position vlaue."""
if not isinstance(value, int):
raise PyVLXException("value_has_to_be_int")
if not Parameter.is_valid_int(value):
raise PyVLXException("value_out_of_range")
return bytes([value >> 8 & 255, value & 255]) |
Test if value can be rendered out of int.
def is_valid_int(value):
"""Test if value can be rendered out of int."""
if 0 <= value <= Parameter.MAX: # This includes ON and OFF
return True
if value == Parameter.UNKNOWN_VALUE:
return True
if value == Parameter.CURRENT_POSITION:
return True
return False |
Test if raw packets are valid for initialization of Position.
def from_raw(raw):
"""Test if raw packets are valid for initialization of Position."""
if not isinstance(raw, bytes):
raise PyVLXException("Position::raw_must_be_bytes")
if len(raw) != 2:
raise PyVLXException("Position::raw_must_be_two_bytes")
if raw != Position.from_int(Position.CURRENT_POSITION) and \
raw != Position.from_int(Position.UNKNOWN_VALUE) and \
Position.to_int(raw) > Position.MAX:
raise PyVLXException("position::raw_exceed_limit", raw=raw)
return raw |
Create raw value out of percent position.
def from_percent(position_percent):
"""Create raw value out of percent position."""
if not isinstance(position_percent, int):
raise PyVLXException("Position::position_percent_has_to_be_int")
if position_percent < 0:
raise PyVLXException("Position::position_percent_has_to_be_positive")
if position_percent > 100:
raise PyVLXException("Position::position_percent_out_of_range")
return bytes([position_percent*2, 0]) |
Return product as human readable string.
def product(self):
"""Return product as human readable string."""
if self.product_group == 14 and self.product_type == 3:
return "KLF 200"
return "Unknown Product: {}:{}".format(self.product_group, self.product_type) |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = self._software_version
ret += bytes([self.hardware_version, self.product_group, self.product_type])
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self._software_version = payload[0:6]
self.hardware_version = payload[6]
self.product_group = payload[7]
self.product_type = payload[8] |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = bytes([self.session_id >> 8 & 255, self.session_id & 255])
ret += bytes([self.originator.value])
ret += bytes([self.priority.value])
ret += bytes([self.scene_id])
ret += bytes([self.velocity.value])
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.originator = Originator(payload[2])
self.priority = Priority(payload[3])
self.scene_id = payload[4]
self.velocity = Velocity(payload[5]) |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = bytes([self.status.value])
ret += bytes([self.session_id >> 8 & 255, self.session_id & 255])
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.status = ActivateSceneConfirmationStatus(payload[0])
self.session_id = payload[1]*256 + payload[2] |
Return Payload.
def get_payload(self):
"""Return Payload."""
# Session id
ret = bytes([self.session_id >> 8 & 255, self.session_id & 255])
ret += bytes([self.originator.value])
ret += bytes([self.priority.value])
ret += bytes([0]) # ParameterActive pointing to main parameter (MP)
# FPI 1+2
ret += bytes([0])
ret += bytes([0])
# Main parameter + functional parameter
ret += bytes(self.parameter)
ret += bytes(32)
# Nodes array: Number of nodes + node array + padding
ret += bytes([len(self.node_ids)]) # index array count
ret += bytes(self.node_ids) + bytes(20-len(self.node_ids))
# Priority Level Lock
ret += bytes([0])
# Priority Level information 1+2
ret += bytes([0, 0])
# Locktime
ret += bytes([0])
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.originator = Originator(payload[2])
self.priority = Priority(payload[3])
len_node_ids = payload[41]
if len_node_ids > 20:
raise PyVLXException("command_send_request_wrong_node_length")
self.node_ids = []
for i in range(len_node_ids):
self.node_ids.append(payload[42] + i)
self.parameter = Parameter(payload[7:9]) |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.status = CommandSendConfirmationStatus(payload[2]) |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = bytes([self.session_id >> 8 & 255, self.session_id & 255])
ret += bytes([self.status_id])
ret += bytes([self.index_id])
ret += bytes([self.node_parameter])
ret += bytes([self.parameter_value >> 8 & 255, self.parameter_value & 255])
# XXX: Missing implementation of run_status, status_reply and information_code
ret += bytes(6)
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.status_id = payload[2]
self.index_id = payload[3]
self.node_parameter = payload[4]
self.parameter_value = payload[5]*256 + payload[6] |
Return Payload.
def get_payload(self):
"""Return Payload."""
ret = bytes([self.session_id >> 8 & 255, self.session_id & 255])
ret += bytes([self.index_id])
ret += bytes([self.node_parameter])
ret += bytes([self.seconds >> 8 & 255, self.seconds & 255])
return ret |
Init frame from binary data.
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.index_id = payload[2]
self.node_parameter = payload[3]
self.seconds = payload[4]*256 + payload[5] |
Log packets from Bus.
async def main(loop):
"""Log packets from Bus."""
# Setting debug
PYVLXLOG.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
PYVLXLOG.addHandler(stream_handler)
# Connecting to KLF 200
pyvlx = PyVLX('pyvlx.yaml', loop=loop)
await pyvlx.load_scenes()
await pyvlx.load_nodes()
# and wait, increase this timeout if you want to
# log for a longer time.:)
await asyncio.sleep(90)
# Cleanup, KLF 200 is terrible in handling lost connections
await pyvlx.disconnect() |
Change name of node.
async def rename(self, name):
"""Change name of node."""
set_node_name = SetNodeName(pyvlx=self.pyvlx, node_id=self.node_id, name=name)
await set_node_name.do_api_call()
if not set_node_name.success:
raise PyVLXException("Unable to rename node")
self.name = name |
Set window to desired position.
Parameters:
* position: Position object containing the target position.
* wait_for_completion: If set, function will return
after device has reached target position.
async def set_position(self, position, wait_for_completion=True):
"""Set window to desired position.
Parameters:
* position: Position object containing the target position.
* wait_for_completion: If set, function will return
after device has reached target position.
"""
command_send = CommandSend(
pyvlx=self.pyvlx,
wait_for_completion=wait_for_completion,
node_id=self.node_id,
parameter=position)
await command_send.do_api_call()
if not command_send.success:
raise PyVLXException("Unable to send command")
await self.after_update() |
Open window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
async def open(self, wait_for_completion=True):
"""Open window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
"""
await self.set_position(
position=Position(position_percent=0),
wait_for_completion=wait_for_completion) |
Close window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
async def close(self, wait_for_completion=True):
"""Close window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
"""
await self.set_position(
position=Position(position_percent=100),
wait_for_completion=wait_for_completion) |
Stop window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
async def stop(self, wait_for_completion=True):
"""Stop window.
Parameters:
* wait_for_completion: If set, function will return
after device has reached target position.
"""
await self.set_position(
position=CurrentPosition(),
wait_for_completion=wait_for_completion) |
Return sampleset of a model or `None` if undefined.
Model could be a real model or evaluated sampleset.
def _get_sampleset(model):
"""Return sampleset of a model or `None` if undefined.
Model could be a real model or evaluated sampleset."""
if isinstance(model, Model):
if hasattr(model, 'sampleset'):
w = model.sampleset()
else:
w = None
else:
w = model # Already a sampleset
return w |
Simple merge of samplesets.
def _merge_sampleset(model1, model2):
"""Simple merge of samplesets."""
w1 = _get_sampleset(model1)
w2 = _get_sampleset(model2)
return merge_wavelengths(w1, w2) |
One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
def _shift_wavelengths(model1, model2):
"""One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
"""
if isinstance(model1, _models.RedshiftScaleFactor):
val = _get_sampleset(model2)
if val is None:
w = val
else:
w = model1.inverse(val)
elif isinstance(model1, _models.Scale):
w = _get_sampleset(model2)
else:
w = _get_sampleset(model1)
return w |
Get optimal wavelengths for sampling a given model.
Parameters
----------
model : `~astropy.modeling.Model`
Model.
Returns
-------
waveset : array-like or `None`
Optimal wavelengths. `None` if undefined.
Raises
------
synphot.exceptions.SynphotError
Invalid model.
def get_waveset(model):
"""Get optimal wavelengths for sampling a given model.
Parameters
----------
model : `~astropy.modeling.Model`
Model.
Returns
-------
waveset : array-like or `None`
Optimal wavelengths. `None` if undefined.
Raises
------
synphot.exceptions.SynphotError
Invalid model.
"""
if not isinstance(model, Model):
raise SynphotError('{0} is not a model.'.format(model))
if isinstance(model, _CompoundModel):
waveset = model._tree.evaluate(WAVESET_OPERATORS, getter=None)
else:
waveset = _get_sampleset(model)
return waveset |
Return metadata of a model.
Model could be a real model or evaluated metadata.
def _get_meta(model):
"""Return metadata of a model.
Model could be a real model or evaluated metadata."""
if isinstance(model, Model):
w = model.meta
else:
w = model # Already metadata
return w |
Simple merge of samplesets.
def _merge_meta(model1, model2):
"""Simple merge of samplesets."""
w1 = _get_meta(model1)
w2 = _get_meta(model2)
return metadata.merge(w1, w2, metadata_conflicts='silent') |
Get metadata for a given model.
Parameters
----------
model : `~astropy.modeling.Model`
Model.
Returns
-------
metadata : dict
Metadata for the model.
Raises
------
synphot.exceptions.SynphotError
Invalid model.
def get_metadata(model):
"""Get metadata for a given model.
Parameters
----------
model : `~astropy.modeling.Model`
Model.
Returns
-------
metadata : dict
Metadata for the model.
Raises
------
synphot.exceptions.SynphotError
Invalid model.
"""
if not isinstance(model, Model):
raise SynphotError('{0} is not a model.'.format(model))
if isinstance(model, _CompoundModel):
metadata = model._tree.evaluate(METADATA_OPERATORS, getter=None)
else:
metadata = deepcopy(model.meta)
return metadata |
Peak wavelength in Angstrom when the curve is expressed as
power density.
def lambda_max(self):
"""Peak wavelength in Angstrom when the curve is expressed as
power density."""
return ((const.b_wien.value / self.temperature) * u.m).to(u.AA).value |
Tuple defining the default ``bounding_box`` limits,
``(x_low, x_high)``.
.. math::
x_{\\textnormal{low}} = 0
x_{\\textnormal{high}} = \\log(\\lambda_{\\textnormal{max}} \\;\
(1 + \\textnormal{factor}))
Parameters
----------
factor : float
Used to calculate ``x_high``.
def bounding_box(self, factor=10.0):
"""Tuple defining the default ``bounding_box`` limits,
``(x_low, x_high)``.
.. math::
x_{\\textnormal{low}} = 0
x_{\\textnormal{high}} = \\log(\\lambda_{\\textnormal{max}} \\;\
(1 + \\textnormal{factor}))
Parameters
----------
factor : float
Used to calculate ``x_high``.
"""
w0 = self.lambda_max
return (w0 * 0, np.log10(w0 + factor * w0)) |
Return ``x`` array that samples the feature.
Parameters
----------
factor_bbox : float
Factor for ``bounding_box`` calculations.
num : int
Number of points to generate.
def sampleset(self, factor_bbox=10.0, num=1000):
"""Return ``x`` array that samples the feature.
Parameters
----------
factor_bbox : float
Factor for ``bounding_box`` calculations.
num : int
Number of points to generate.
"""
w1, w2 = self.bounding_box(factor=factor_bbox)
if self._n_models == 1:
w = np.logspace(w1, w2, num)
else:
w = list(map(partial(np.logspace, num=num), w1, w2))
return np.asarray(w) |
Evaluate the model.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
temperature : number
Temperature in Kelvin.
Returns
-------
y : number or ndarray
Blackbody radiation in PHOTLAM per steradian.
def evaluate(x, temperature):
"""Evaluate the model.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
temperature : number
Temperature in Kelvin.
Returns
-------
y : number or ndarray
Blackbody radiation in PHOTLAM per steradian.
"""
if ASTROPY_LT_2_0:
from astropy.analytic_functions.blackbody import blackbody_nu
else:
from astropy.modeling.blackbody import blackbody_nu
# Silence Numpy
old_np_err_cfg = np.seterr(all='ignore')
wave = np.ascontiguousarray(x) * u.AA
bbnu_flux = blackbody_nu(wave, temperature)
bbflux = (bbnu_flux * u.sr).to(
units.PHOTLAM, u.spectral_density(wave)) / u.sr # PHOTLAM/sr
# Restore Numpy settings
np.seterr(**old_np_err_cfg)
return bbflux.value |
Evaluate the model.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
temperature : number
Temperature in Kelvin.
Returns
-------
y : number or ndarray
Blackbody radiation in PHOTLAM.
def evaluate(self, x, temperature):
"""Evaluate the model.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
temperature : number
Temperature in Kelvin.
Returns
-------
y : number or ndarray
Blackbody radiation in PHOTLAM.
"""
bbflux = super(BlackBodyNorm1D, self).evaluate(x, temperature)
return bbflux * self._omega |
Calculate sampleset for each model.
def _calc_sampleset(w1, w2, step, minimal):
"""Calculate sampleset for each model."""
if minimal:
arr = [w1 - step, w1, w2, w2 + step]
else:
arr = np.arange(w1 - step, w2 + step + step, step)
return arr |
Return ``x`` array that samples the feature.
Parameters
----------
step : float
Distance of first and last points w.r.t. bounding box.
minimal : bool
Only return the minimal points needed to define the box;
i.e., box edges and a point outside on each side.
def sampleset(self, step=0.01, minimal=False):
"""Return ``x`` array that samples the feature.
Parameters
----------
step : float
Distance of first and last points w.r.t. bounding box.
minimal : bool
Only return the minimal points needed to define the box;
i.e., box edges and a point outside on each side.
"""
w1, w2 = self.bounding_box
if self._n_models == 1:
w = self._calc_sampleset(w1, w2, step, minimal)
else:
w = list(map(partial(
self._calc_sampleset, step=step, minimal=minimal), w1, w2))
return np.asarray(w) |
One dimensional constant flux model function.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
Returns
-------
y : number or ndarray
Flux in PHOTLAM.
def evaluate(self, x, *args):
"""One dimensional constant flux model function.
Parameters
----------
x : number or ndarray
Wavelengths in Angstrom.
Returns
-------
y : number or ndarray
Flux in PHOTLAM.
"""
a = (self.amplitude * np.ones_like(x)) * self._flux_unit
y = units.convert_flux(x, a, units.PHOTLAM)
return y.value |
Remove negative flux.
def _process_neg_flux(self, x, y):
"""Remove negative flux."""
if self._keep_neg: # Nothing to do
return y
old_y = None
if np.isscalar(y): # pragma: no cover
if y < 0:
n_neg = 1
old_x = x
old_y = y
y = 0
else:
x = np.asarray(x) # In case input is just pure list
y = np.asarray(y)
i = np.where(y < 0)
n_neg = len(i[0])
if n_neg > 0:
old_x = x[i]
old_y = y[i]
y[i] = 0
if old_y is not None:
warn_str = ('{0} bin(s) contained negative flux or throughput'
'; it/they will be set to zero.'.format(n_neg))
warn_str += '\n points: {0}\n lookup_table: {1}'.format(
old_x, old_y) # Extra info
self.meta['warnings'].update({'NegativeFlux': warn_str})
warnings.warn(warn_str, AstropyUserWarning)
return y |
Evaluate the model.
Parameters
----------
inputs : number or ndarray
Wavelengths in same unit as ``points``.
Returns
-------
y : number or ndarray
Flux or throughput in same unit as ``lookup_table``.
def evaluate(self, inputs):
"""Evaluate the model.
Parameters
----------
inputs : number or ndarray
Wavelengths in same unit as ``points``.
Returns
-------
y : number or ndarray
Flux or throughput in same unit as ``lookup_table``.
"""
y = super(Empirical1D, self).evaluate(inputs)
# Assume NaN at both ends need to be extrapolated based on
# nearest end point.
if self.fill_value is np.nan:
# Cannot use sampleset() due to ExtinctionModel1D
x = np.squeeze(self.points)
if np.isscalar(y): # pragma: no cover
if inputs < x[0]:
y = self.lookup_table[0]
elif inputs > x[-1]:
y = self.lookup_table[-1]
else:
y[inputs < x[0]] = self.lookup_table[0]
y[inputs > x[-1]] = self.lookup_table[-1]
return self._process_neg_flux(inputs, y) |
GaussianAbsorption1D model function.
def evaluate(x, amplitude, mean, stddev):
"""
GaussianAbsorption1D model function.
"""
return 1.0 - Gaussian1D.evaluate(x, amplitude, mean, stddev) |
GaussianAbsorption1D model function derivatives.
def fit_deriv(x, amplitude, mean, stddev):
"""
GaussianAbsorption1D model function derivatives.
"""
import operator
return list(map(
operator.neg, Gaussian1D.fit_deriv(x, amplitude, mean, stddev))) |
Return ``x`` array that samples the feature.
Parameters
----------
factor_step : float
Factor for sample step calculation. The step is calculated
using ``factor_step * self.fwhm``.
kwargs : dict
Keyword(s) for ``bounding_box`` calculation.
def sampleset(self, factor_step=0.05, **kwargs):
"""Return ``x`` array that samples the feature.
Parameters
----------
factor_step : float
Factor for sample step calculation. The step is calculated
using ``factor_step * self.fwhm``.
kwargs : dict
Keyword(s) for ``bounding_box`` calculation.
"""
w1, w2 = self.bounding_box(**kwargs)
dw = factor_step * self.fwhm
if self._n_models == 1:
w = np.arange(w1, w2, dw)
else:
w = list(map(np.arange, w1, w2, dw))
return np.asarray(w) |
Return flux in PHOTLAM. Assume input wavelength is in Angstrom.
def evaluate(self, x, *args):
"""Return flux in PHOTLAM. Assume input wavelength is in Angstrom."""
xx = x / self.x_0
y = (self.amplitude * xx ** (-self.alpha)) * self._flux_unit
flux = units.convert_flux(x, y, units.PHOTLAM)
return flux.value |
Return ``x`` array that samples the feature.
def sampleset(self):
"""Return ``x`` array that samples the feature."""
x1, x4 = self.bounding_box
dw = self.width * 0.5
x2 = self.x_0 - dw
x3 = self.x_0 + dw
if self._n_models == 1:
w = [x1, x2, x3, x4]
else:
w = list(zip(x1, x2, x3, x4))
return np.asarray(w) |
From the given request, add a snippet to the page.
def get_payment_request(self, cart, request):
"""
From the given request, add a snippet to the page.
"""
try:
self.charge(cart, request)
thank_you_url = OrderModel.objects.get_latest_url()
js_expression = 'window.location.href="{}";'.format(thank_you_url)
return js_expression
except (KeyError, stripe.error.StripeError) as err:
raise ValidationError(err) |
Use the Stripe token from the request and charge immediately.
This view is invoked by the Javascript function `scope.charge()` delivered
by `get_payment_request`.
def charge(self, cart, request):
"""
Use the Stripe token from the request and charge immediately.
This view is invoked by the Javascript function `scope.charge()` delivered
by `get_payment_request`.
"""
token_id = cart.extra['payment_extra_data']['token_id']
if LooseVersion(SHOP_VERSION) < LooseVersion('0.11'):
charge = stripe.Charge.create(
amount=cart.total.as_integer(),
currency=cart.total.currency,
source=token_id,
description=settings.SHOP_STRIPE['PURCHASE_DESCRIPTION']
)
if charge['status'] == 'succeeded':
order = OrderModel.objects.create_from_cart(cart, request)
order.add_stripe_payment(charge)
order.save()
else:
order = OrderModel.objects.create_from_cart(cart, request)
charge = stripe.Charge.create(
amount=cart.total.as_integer(),
currency=cart.total.currency,
source=token_id,
transfer_group=order.get_number(),
description=settings.SHOP_STRIPE['PURCHASE_DESCRIPTION'],
)
if charge['status'] == 'succeeded':
order.populate_from_cart(cart, request)
order.add_stripe_payment(charge)
order.save()
if charge['status'] != 'succeeded':
msg = "Stripe returned status '{status}' for id: {id}"
raise stripe.error.InvalidRequestError(msg.format(**charge)) |
Refund the payment using Stripe's refunding API.
def refund_payment(self):
"""
Refund the payment using Stripe's refunding API.
"""
Money = MoneyMaker(self.currency)
filter_kwargs = {
'transaction_id__startswith': 'ch_',
'payment_method': StripePayment.namespace,
}
for payment in self.orderpayment_set.filter(**filter_kwargs):
refund = stripe.Refund.create(charge=payment.transaction_id)
if refund['status'] == 'succeeded':
amount = Money(refund['amount']) / Money.subunits
OrderPayment.objects.create(order=self, amount=-amount, transaction_id=refund['id'],
payment_method=StripePayment.namespace)
del self.amount_paid # to invalidate the cache
if self.amount_paid:
# proceed with other payment service providers
super(OrderWorkflowMixin, self).refund_payment() |
Create an instance of the US Weather Forecast Service with
typical starting settings.
def create(self):
"""
Create an instance of the US Weather Forecast Service with
typical starting settings.
"""
self.service.create()
# Set env vars for immediate use
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
zone = self.service.settings.data['zone']['http-header-value']
os.environ[zone_id] = zone
uri = predix.config.get_env_key(self.use_class, 'uri')
os.environ[uri] = self.service.settings.data['uri'] |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
manifest.add_env_var(zone_id,
self.service.settings.data['zone']['http-header-value'])
uri = predix.config.get_env_key(self.use_class, 'uri')
manifest.add_env_var(uri, self.service.settings.data['uri'])
manifest.write_manifest() |
Creating this service is handled asynchronously so this method will
simply check if the create is in progress. If it is not in progress,
we could probably infer it either failed or succeeded.
def _create_in_progress(self):
"""
Creating this service is handled asynchronously so this method will
simply check if the create is in progress. If it is not in progress,
we could probably infer it either failed or succeeded.
"""
instance = self.service.service.get_instance(self.service.name)
if (instance['last_operation']['state'] == 'in progress' and
instance['last_operation']['type'] == 'create'):
return True
return False |
Create an instance of the Predix Cache Service with they typical
starting settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
def create(self, max_wait=180, **kwargs):
"""
Create an instance of the Predix Cache Service with they typical
starting settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
"""
# Will need to wait for the service to be provisioned before can add
# service keys and get env details.
self.service.create(async=True, create_keys=False)
while self._create_in_progress() and max_wait > 0:
time.sleep(1)
max_wait -= 1
# Now get the service env (via service keys)
cfg = self.service._get_service_config()
self.service.settings.save(cfg)
host = predix.config.get_env_key(self.use_class, 'host')
os.environ[host] = self.service.settings.data['host']
password = predix.config.get_env_key(self.use_class, 'password')
os.environ[password] = self.service.settings.data['password']
port = predix.config.get_env_key(self.use_class, 'port')
os.environ[port] = str(self.service.settings.data['port']) |
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
"""
manifest.add_service(self.service.name)
host = predix.config.get_env_key(self.use_class, 'host')
manifest.add_env_var(host, self.service.settings.data['host'])
password = predix.config.get_env_key(self.use_class, 'password')
manifest.add_env_var(password, self.service.settings.data['password'])
port = predix.config.get_env_key(self.use_class, 'port')
manifest.add_env_var(port, self.service.settings.data['port'])
manifest.write_manifest() |
Will return the uri for an existing instance.
def _get_uri(self):
"""
Will return the uri for an existing instance.
"""
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['uri'] |
Will return the zone id for an existing instance.
def _get_zone_id(self):
"""
Will return the zone id for an existing instance.
"""
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['zone']['http-header-value'] |
Create an instance of the Access Control Service with the typical
starting settings.
def create(self):
"""
Create an instance of the Access Control Service with the typical
starting settings.
"""
self.service.create()
# Set environment variables for immediate use
predix.config.set_env_value(self.use_class, 'uri', self._get_uri())
predix.config.set_env_value(self.use_class, 'zone_id',
self._get_zone_id()) |
Grant the given client id all the scopes and authorities
needed to work with the access control service.
def grant_client(self, client_id):
"""
Grant the given client id all the scopes and authorities
needed to work with the access control service.
"""
zone = self.service.settings.data['zone']['oauth-scope']
scopes = ['openid', zone,
'acs.policies.read', 'acs.attributes.read',
'acs.policies.write', 'acs.attributes.write']
authorities = ['uaa.resource', zone,
'acs.policies.read', 'acs.policies.write',
'acs.attributes.read', 'acs.attributes.write']
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
uri = predix.config.get_env_key(self.use_class, 'uri')
manifest.add_env_var(uri, self._get_uri())
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
manifest.add_env_var(zone_id, self._get_zone_id())
manifest.write_manifest() |
Generic GET with headers
def get(self, path):
"""
Generic GET with headers
"""
uri = self.config.get_target() + path
headers = self._get_headers()
logging.debug("URI=GET " + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.get(uri, headers=headers)
if response.status_code == 200:
return response.json()
elif response.status_code == 401:
raise predix.admin.cf.config.CloudFoundryLoginError('token invalid')
else:
response.raise_for_status() |
Generic POST with headers
def post(self, path, data):
"""
Generic POST with headers
"""
uri = self.config.get_target() + path
headers = self._post_headers()
logging.debug("URI=POST " + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(data))
response = self.session.post(uri, headers=headers,
data=json.dumps(data))
if response.status_code in (200, 201, 202):
return response.json()
elif response.status_code == 401:
raise predix.admin.cf.config.CloudFoundryLoginError('token invalid')
else:
logging.debug("STATUS=" + str(response.status_code))
logging.debug("CONTENT=" + str(response.content))
response.raise_for_status() |
Generic DELETE with headers
def delete(self, path, data=None, params=None):
"""
Generic DELETE with headers
"""
uri = self.config.get_target() + path
headers = {
'Authorization': self.config.get_access_token()
}
logging.debug("URI=DELETE " + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.delete(
uri, headers=headers, params=params, data=json.dumps(data))
if response.status_code == 204:
return response
else:
logging.debug("STATUS=" + str(response.status_code))
logging.debug("CONTENT=" + str(response.content))
response.raise_for_status() |
Returns a flat list of the names for the organizations
user belongs.
def get_orgs(self):
"""
Returns a flat list of the names for the organizations
user belongs.
"""
orgs = []
for resource in self._get_orgs()['resources']:
orgs.append(resource['entity']['name'])
return orgs |
Returns a flat list of the names for the apps in
the organization.
def get_apps(self):
"""
Returns a flat list of the names for the apps in
the organization.
"""
apps = []
for resource in self._get_apps()['resources']:
apps.append(resource['entity']['name'])
return apps |
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
`manager`,`billing_manager`
def add_user(self, user_name, role='user'):
"""
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
`manager`,`billing_manager`
"""
role_uri = self._get_role_uri(role=role)
return self.api.put(path=role_uri, data={'username': user_name}) |
Calls CF's remove user with org
def remove_user(self, user_name, role):
"""
Calls CF's remove user with org
"""
role_uri = self._get_role_uri(role=role)
return self.api.delete(path=role_uri, data={'username': user_name}) |
add messages to the rx_queue
:param id: str message Id
:param body: str the message body
:param tags: dict[string->string] tags to be associated with the message
:return: self
def add_message(self, id, body, tags=False):
"""
add messages to the rx_queue
:param id: str message Id
:param body: str the message body
:param tags: dict[string->string] tags to be associated with the message
:return: self
"""
if not tags:
tags = {}
try:
self._tx_queue_lock.acquire()
self._tx_queue.append(
EventHub_pb2.Message(id=id, body=body, tags=tags, zone_id=self.eventhub_client.zone_id))
finally:
self._tx_queue_lock.release()
return self |
Publish all messages that have been added to the queue for configured protocol
:return: None
def publish_queue(self):
"""
Publish all messages that have been added to the queue for configured protocol
:return: None
"""
self.last_send_time = time.time()
try:
self._tx_queue_lock.acquire()
start_length = len(self._rx_queue)
publish_amount = len(self._tx_queue)
if self.config.protocol == PublisherConfig.Protocol.GRPC:
self._publish_queue_grpc()
else:
self._publish_queue_wss()
self._tx_queue = []
finally:
self._tx_queue_lock.release()
if self.config.publish_type == self.config.Type.SYNC:
start_time = time.time()
while time.time() - start_time < self.config.sync_timeout and \
len(self._rx_queue) - start_length < publish_amount:
pass
return self._rx_queue |
generator for acks to yield messages to the user in a async configuration
:return: messages as they come in
def ack_generator(self):
"""
generator for acks to yield messages to the user in a async configuration
:return: messages as they come in
"""
if self.config.is_sync():
logging.warning('cant use generator on a sync publisher')
return
while self._run_ack_generator:
while len(self._rx_queue) != 0:
logging.debug('yielding to client')
yield self._rx_queue.pop(0)
return |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.