code
stringlengths 75
104k
| docstring
stringlengths 1
46.9k
|
|---|---|
def get_worksheet(self, id_or_name):
""" Gets a specific worksheet by id or name """
url = self.build_url(self._endpoints.get('get_worksheet').format(id=quote(id_or_name)))
response = self.session.get(url)
if not response:
return None
return self.worksheet_constructor(parent=self, **{self._cloud_data_key: response.json()})
|
Gets a specific worksheet by id or name
|
def remove_record(self, record):
"""Remove an already accepted record from the community.
:param record: Record object.
:type record: `invenio_records.api.Record`
"""
if not self.has_record(record):
current_app.logger.warning(
'Community removal: record {uuid} was not in community '
'"{comm}"'.format(uuid=record.id, comm=self.id))
else:
key = current_app.config['COMMUNITIES_RECORD_KEY']
record[key] = [c for c in record[key] if c != self.id]
if current_app.config['COMMUNITIES_OAI_ENABLED']:
if self.oaiset.has_record(record):
self.oaiset.remove_record(record)
|
Remove an already accepted record from the community.
:param record: Record object.
:type record: `invenio_records.api.Record`
|
def fetch_command(self, global_options, subcommand):
"""
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"uliweb") if it can't be found.
"""
commands = self.get_commands(global_options)
try:
klass = commands[subcommand]
except KeyError:
sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\nMany commands will only run at project directory, maybe the directory is not right.\n" % \
(subcommand, self.prog_name))
sys.exit(1)
return klass
|
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"uliweb") if it can't be found.
|
def _sanity_check_coerce_type_outside_of_fold(ir_blocks):
"""Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block."""
is_in_fold = False
for first_block, second_block in pairwise(ir_blocks):
if isinstance(first_block, Fold):
is_in_fold = True
if not is_in_fold and isinstance(first_block, CoerceType):
if not isinstance(second_block, (MarkLocation, Filter)):
raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, '
u'but none was found: {}'.format(ir_blocks))
if isinstance(second_block, Unfold):
is_in_fold = False
|
Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.
|
def main(argv):
"""
Main mon
:param argv: console arguments
:return:
"""
input_file = ""
output_file = ""
monitor = None
formula = None
trace = None
iformula = None
itrace = None
isys = None
online = False
fuzzer = False
l2m = False
debug = False
rounds = 1
server_port = 8080
webservice = False
help_str_extended = "fodtlmon V 0.1 .\n" + \
"For more information see fodtlmon home page\n Usage : mon.py [OPTIONS] formula trace" + \
"\n -h \t--help " + "\t display this help and exit" + \
"\n -i \t--input= [file] " + "\t the input file" + \
"\n -o \t--output= [path]" + "\t the output file" + \
"\n -f \t--formula " + "\t the formula" + \
"\n \t--iformula " + "\t path to file that contains the formula" + \
"\n -t \t--trace " + "\t the trace" + \
"\n \t--itrace " + "\t path to file that contains the trace" + \
"\n -1 \t--ltl " + "\t use LTL monitor" + \
"\n \t--l2m " + "\t call ltl2mon also" + \
"\n -2 \t--fotl " + "\t use FOTL monitor" + \
"\n -3 \t--dtl " + "\t use DTL monitor" + \
"\n -4 \t--fodtl " + "\t use FODTL monitor" + \
"\n \t--sys= [file] " + "\t Run a system from json file" + \
"\n \t--rounds= int " + "\t Number of rounds to run in the system" + \
"\n -z \t--fuzzer " + "\t run fuzzing tester" + \
"\n -d \t--debug " + "\t enable debug mode" + \
"\n \t--server " + "\t start web service" + \
"\n \t--port= int " + "\t server port number" + \
"\n\nReport fodtlmon bugs to walid.benghabrit@mines-nantes.fr" + \
"\nfodtlmon home page: <https://github.com/hkff/fodtlmon>" + \
"\nfodtlmon is a free software released under GPL 3"
# Checking options
try:
opts, args = getopt.getopt(argv[1:], "hi:o:f:t:1234zd",
["help", "input=", "output=", "trace=", "formula=" "ltl", "fotl", "dtl",
"fodtl", "sys=", "fuzzer", "itrace=", "iformula=", "rounds=", "l2m", "debug",
"server", "port="])
except getopt.GetoptError:
print(help_str_extended)
sys.exit(2)
if len(opts) == 0:
print(help_str_extended)
# Handling options
for opt, arg in opts:
if opt in ("-h", "--help"):
print(help_str_extended)
sys.exit()
elif opt in ("-i", "--input"):
input_file = arg
elif opt in ("-o", "--output"):
output_file = arg
elif opt in ("-1", "--ltl"):
monitor = Ltlmon
elif opt in ("-2", "--fotl"):
monitor = Fotlmon
elif opt in ("-3", "--dtl"):
monitor = Dtlmon
elif opt in ("-4", "--fodtl"):
monitor = Fodtlmon
elif opt in ("-f", "--formula"):
formula = arg
elif opt in ("-t", "--trace"):
trace = arg
elif opt in "--sys":
isys = arg
elif opt in "--rounds":
rounds = int(arg)
elif opt in ("-z", "--fuzzer"):
fuzzer = True
elif opt in "--iformula":
iformula = arg
elif opt in "--itrace":
itrace = arg
elif opt in "--l2m":
l2m = True
elif opt in ("-d", "--debug"):
debug = True
elif opt in "--server":
webservice = True
elif opt in "--port":
server_port = int(arg)
if webservice:
Webservice.start(server_port)
return
if fuzzer:
if monitor is Ltlmon:
run_ltl_tests(monitor="ltl", alphabet=["P"], constants=["a", "b", "c"], trace_lenght=10000, formula_depth=5,
formula_nbr=10000, debug=debug)
elif monitor is Dtlmon:
run_dtl_tests()
return
if itrace is not None:
with open(itrace, "r") as f:
trace = f.read()
if iformula is not None:
with open(iformula, "r") as f:
formula = f.read()
if isys is not None:
with open(isys, "r") as f:
js = f.read()
s = System.parseJSON(js)
for x in range(rounds):
s.run()
return
# print(argv)
if None not in (monitor, trace, formula):
tr = Trace().parse(trace)
fl = eval(formula[1:]) if formula.startswith(":") else FodtlParser.parse(formula)
mon = monitor(fl, tr)
res = mon.monitor(debug=debug)
print("")
print("Trace : %s" % tr)
print("Formula : %s" % fl)
print("Code : %s" % fl.toCODE())
print("PPrint : %s" % fl.prefix_print())
print("TSPASS : %s" % fl.toTSPASS())
print("LTLFO : %s" % fl.toLTLFO())
print("Result : %s" % res)
if l2m:
print(fl.toLTLFO())
res = ltlfo2mon(fl.toLTLFO(), tr.toLTLFO())
print("ltl2mon : %s" % res)
|
Main mon
:param argv: console arguments
:return:
|
def setData(self, data, setName=None):
"""
Assign the data in the dataframe to the AMPL entities with the names
corresponding to the column names.
Args:
data: The dataframe containing the data to be assigned.
setName: The name of the set to which the indices values of the
DataFrame are to be assigned.
Raises:
AMPLException: if the data assignment procedure was not successful.
"""
if not isinstance(data, DataFrame):
if pd is not None and isinstance(data, pd.DataFrame):
data = DataFrame.fromPandas(data)
if setName is None:
lock_and_call(
lambda: self._impl.setData(data._impl),
self._lock
)
else:
lock_and_call(
lambda: self._impl.setData(data._impl, setName),
self._lock
)
|
Assign the data in the dataframe to the AMPL entities with the names
corresponding to the column names.
Args:
data: The dataframe containing the data to be assigned.
setName: The name of the set to which the indices values of the
DataFrame are to be assigned.
Raises:
AMPLException: if the data assignment procedure was not successful.
|
def getConfig(self, key):
""" Get a Config Value """
if hasattr(self, key):
return getattr(self, key)
else:
return False
|
Get a Config Value
|
def get_instance(self, payload):
"""
Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance
"""
return RoleInstance(self._version, payload, service_sid=self._solution['service_sid'], )
|
Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance
|
def all_sharded_cluster_links(cluster_id, shard_id=None,
router_id=None, rel_to=None):
"""Get a list of all links to be included with ShardedClusters."""
return [
sharded_cluster_link(rel, cluster_id, shard_id, router_id,
self_rel=(rel == rel_to))
for rel in (
'get-sharded-clusters', 'get-sharded-cluster-info',
'sharded-cluster-command', 'delete-sharded-cluster',
'add-shard', 'get-shards', 'get-configsvrs',
'get-routers', 'add-router'
)
]
|
Get a list of all links to be included with ShardedClusters.
|
def parts(self, *args, **kwargs):
"""Retrieve parts belonging to this activity.
Without any arguments it retrieves the Instances related to this task only.
This call only returns the configured properties in an activity. So properties that are not configured
are not in the returned parts.
See :class:`pykechain.Client.parts` for additional available parameters.
Example
-------
>>> task = project.activity('Specify Wheel Diameter')
>>> parts = task.parts()
To retrieve the models only.
>>> parts = task.parts(category=Category.MODEL)
"""
return self._client.parts(*args, activity=self.id, **kwargs)
|
Retrieve parts belonging to this activity.
Without any arguments it retrieves the Instances related to this task only.
This call only returns the configured properties in an activity. So properties that are not configured
are not in the returned parts.
See :class:`pykechain.Client.parts` for additional available parameters.
Example
-------
>>> task = project.activity('Specify Wheel Diameter')
>>> parts = task.parts()
To retrieve the models only.
>>> parts = task.parts(category=Category.MODEL)
|
def _filter_desc(self, indexing):
'''
Private function to filter data, goes with filter_desc
'''
# now filter data
if len(indexing) > 0:
desc_tmp = np.zeros((len(indexing),len(self.header_desc)),dtype='|S1024')
data_tmp = np.zeros((len(indexing),len(self.header_data)))
style_tmp= np.zeros((len(indexing),len(self.header_style)),dtype='|S1024')
for i in range(len(indexing)):
for j in range(len(self.header_desc)):
desc_tmp[i][j] = self.desc[indexing[i]][j]
for k in range(len(self.header_data)):
data_tmp[i][k] = self.data[indexing[i]][k]
for l in range(len(self.header_style)):
style_tmp[i][l]= self.style[indexing[i]][l]
self.desc = desc_tmp
self.data = data_tmp
self.style= style_tmp
else:
print('No filter selected or no data found!')
|
Private function to filter data, goes with filter_desc
|
def update_group_states_for_vifs(self, vifs, ack):
"""Updates security groups by setting the ack field"""
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in vifs]
self.set_fields(vif_keys, SECURITY_GROUP_ACK, ack)
|
Updates security groups by setting the ack field
|
def customchain(**kwargsChain):
""" This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj)
"""
def wrap(f):
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
newoptions = ctx.obj
newoptions.update(kwargsChain)
ctx.bitshares = BitShares(**newoptions)
ctx.blockchain = ctx.bitshares
set_shared_bitshares_instance(ctx.bitshares)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
return wrap
|
This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj)
|
def stop(self):
"""Stop the sensor.
"""
# Check that everything is running
if not self._running:
logging.warning('Webcam not running. Aborting stop')
return False
if self._cap:
self._cap.release()
self._cap = None
self._running = False
return True
|
Stop the sensor.
|
def release(input_dict, environment_dict):
"""
<Purpose>
Releases the specified vessels.
<Arguments>
input_dict: The commanddict representing the user's input.
environment_dict: The dictionary representing the current seash
environment.
<Side Effects>
Connects to the Clearinghouse and releases vessels.
Removes the released vessels from the list of valid targets.
Does not guarantee that all vessels specified are released!
<Exceptions>
None
<Returns>
None
"""
# Activate secure mode if user did not specify the insecure keyword
allow_ssl_insecure = _get_user_argument(input_dict, 'insecure') is not None
# Get the group name to release
groupname = environment_dict['currenttarget']
nodelist = seash_global_variables.targets[groupname]
# Get the Clearinghouse vessel handles for each vessel
retdict = seash_helper.contact_targets(nodelist, _get_clearinghouse_vessel_handle)
clearinghouse_vesselhandles = []
faillist = []
# parse the output so we can print out something intelligible
for nodename in retdict:
if retdict[nodename][0]:
clearinghouse_vesselhandles.append(retdict[nodename][1])
else:
faillist.append(nodename)
# Release!
client = _connect_to_clearinghouse(environment_dict['currentkeyname'],
allow_ssl_insecure)
client.release_resources(clearinghouse_vesselhandles)
# Remove each vessel from the targets list
removed_nodehandles = seash_global_variables.targets[groupname][:]
for handle in removed_nodehandles:
for target in seash_global_variables.targets:
if handle in seash_global_variables.targets[target]:
seash_global_variables.targets[target].remove(handle)
|
<Purpose>
Releases the specified vessels.
<Arguments>
input_dict: The commanddict representing the user's input.
environment_dict: The dictionary representing the current seash
environment.
<Side Effects>
Connects to the Clearinghouse and releases vessels.
Removes the released vessels from the list of valid targets.
Does not guarantee that all vessels specified are released!
<Exceptions>
None
<Returns>
None
|
def _onSize(self, evt):
"""
Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window.
"""
DEBUG_MSG("_onSize()", 2, self)
# Create a new, correctly sized bitmap
self._width, self._height = self.GetClientSize()
self.bitmap =wx.EmptyBitmap(self._width, self._height)
self._isDrawn = False
if self._width <= 1 or self._height <= 1: return # Empty figure
dpival = self.figure.dpi
winch = self._width/dpival
hinch = self._height/dpival
self.figure.set_size_inches(winch, hinch)
# Rendering will happen on the associated paint event
# so no need to do anything here except to make sure
# the whole background is repainted.
self.Refresh(eraseBackground=False)
FigureCanvasBase.resize_event(self)
|
Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window.
|
def new(partname, content_type):
"""
Return a new ``<Override>`` element with attributes set to parameter
values.
"""
xml = '<Override xmlns="%s"/>' % nsmap['ct']
override = parse_xml(xml)
override.set('PartName', partname)
override.set('ContentType', content_type)
return override
|
Return a new ``<Override>`` element with attributes set to parameter
values.
|
def add_multiple_to_queue(self, items, container=None):
"""Add a sequence of items to the queue.
Args:
items (list): A sequence of items to the be added to the queue
container (DidlObject, optional): A container object which
includes the items.
"""
if container is not None:
container_uri = container.resources[0].uri
container_metadata = to_didl_string(container)
else:
container_uri = '' # Sonos seems to accept this as well
container_metadata = '' # pylint: disable=redefined-variable-type
chunk_size = 16 # With each request, we can only add 16 items
item_list = list(items) # List for slicing
for index in range(0, len(item_list), chunk_size):
chunk = item_list[index:index + chunk_size]
uris = ' '.join([item.resources[0].uri for item in chunk])
uri_metadata = ' '.join([to_didl_string(item) for item in chunk])
self.avTransport.AddMultipleURIsToQueue([
('InstanceID', 0),
('UpdateID', 0),
('NumberOfURIs', len(chunk)),
('EnqueuedURIs', uris),
('EnqueuedURIsMetaData', uri_metadata),
('ContainerURI', container_uri),
('ContainerMetaData', container_metadata),
('DesiredFirstTrackNumberEnqueued', 0),
('EnqueueAsNext', 0)
])
|
Add a sequence of items to the queue.
Args:
items (list): A sequence of items to the be added to the queue
container (DidlObject, optional): A container object which
includes the items.
|
def _get_sd(file_descr):
"""
Get streamdescriptor matching file_descr fileno.
:param file_descr: file object
:return: StreamDescriptor or None
"""
for stream_descr in NonBlockingStreamReader._streams:
if file_descr == stream_descr.stream.fileno():
return stream_descr
return None
|
Get streamdescriptor matching file_descr fileno.
:param file_descr: file object
:return: StreamDescriptor or None
|
def init_cas_a (year):
"""Insert an entry for Cas A into the table of models. Need to specify the
year of the observations to account for the time variation of Cas A's
emission.
"""
year = float (year)
models['CasA'] = lambda f: cas_a (f, year)
|
Insert an entry for Cas A into the table of models. Need to specify the
year of the observations to account for the time variation of Cas A's
emission.
|
def consume(iterator, n):
"Advance the iterator n-steps ahead. If n is none, consume entirely."
# Use functions that consume iterators at C speed.
if n is None:
# feed the entire iterator into a zero-length deque
collections.deque(iterator, maxlen=0)
else:
# advance to the empty slice starting at position n
next(islice(iterator, n, n), None)
|
Advance the iterator n-steps ahead. If n is none, consume entirely.
|
def _legion_state(self, inputs, t, argv):
"""!
@brief Returns new values of excitatory and inhibitory parts of oscillator and potential of oscillator.
@param[in] inputs (list): Initial values (current) of oscillator [excitatory, inhibitory, potential].
@param[in] t (double): Current time of simulation.
@param[in] argv (uint): Extra arguments that are not used for integration - index of oscillator.
@return (list) New values of excitatoty and inhibitory part of oscillator and new value of potential (not assign).
"""
index = argv;
x = inputs[0]; # excitatory
y = inputs[1]; # inhibitory
p = inputs[2]; # potential
potential_influence = heaviside(p + math.exp(-self._params.alpha * t) - self._params.teta);
dx = 3.0 * x - x ** 3.0 + 2.0 - y + self._stimulus[index] * potential_influence + self._coupling_term[index] + self._noise[index];
dy = self._params.eps * (self._params.gamma * (1.0 + math.tanh(x / self._params.betta)) - y);
neighbors = self.get_neighbors(index);
potential = 0.0;
for index_neighbor in neighbors:
potential += self._params.T * heaviside(self._excitatory[index_neighbor] - self._params.teta_x);
dp = self._params.lamda * (1.0 - p) * heaviside(potential - self._params.teta_p) - self._params.mu * p;
return [dx, dy, dp];
|
!
@brief Returns new values of excitatory and inhibitory parts of oscillator and potential of oscillator.
@param[in] inputs (list): Initial values (current) of oscillator [excitatory, inhibitory, potential].
@param[in] t (double): Current time of simulation.
@param[in] argv (uint): Extra arguments that are not used for integration - index of oscillator.
@return (list) New values of excitatoty and inhibitory part of oscillator and new value of potential (not assign).
|
def fix(self):
"""
Fix the layouts and calculate the locations of all the widgets.
This function should be called once all Layouts have been added to the Frame and all
widgets added to the Layouts.
"""
# Do up to 2 passes in case we have a variable height Layout.
fill_layout = None
fill_height = y = 0
for _ in range(2):
# Pick starting point/height - varies for borders.
if self._has_border:
x = y = start_y = 1
height = self._canvas.height - 2
width = self._canvas.width - 2
else:
x = y = start_y = 0
height = self._canvas.height
width = self._canvas.width
# Process each Layout in the Frame - getting required height for
# each.
for layout in self._layouts:
if layout.fill_frame:
if fill_layout is None:
# First pass - remember it for now.
fill_layout = layout
elif fill_layout == layout:
# Second pass - pass in max height
y = layout.fix(x, y, width, fill_height)
else:
# A second filler - this is a bug in the application.
raise Highlander("Too many Layouts filling Frame")
else:
y = layout.fix(x, y, width, height)
# If we hit a variable height Layout - figure out the available
# space and reset everything to the new values.
if fill_layout is None:
break
else:
fill_height = max(1, start_y + height - y)
# Remember the resulting height of the underlying Layouts.
self._max_height = y
# Reset text
while self._focus < len(self._layouts):
try:
self._layouts[self._focus].focus(force_first=True)
break
except IndexError:
self._focus += 1
self._clear()
|
Fix the layouts and calculate the locations of all the widgets.
This function should be called once all Layouts have been added to the Frame and all
widgets added to the Layouts.
|
def _getgrnam(name, root=None):
'''
Alternative implementation for getgrnam, that use only /etc/group
'''
root = root or '/'
passwd = os.path.join(root, 'etc/group')
with salt.utils.files.fopen(passwd) as fp_:
for line in fp_:
line = salt.utils.stringutils.to_unicode(line)
comps = line.strip().split(':')
if len(comps) < 4:
log.debug('Ignoring group line: %s', line)
continue
if comps[0] == name:
# Generate a getpwnam compatible output
comps[2] = int(comps[2])
comps[3] = comps[3].split(',') if comps[3] else []
return grp.struct_group(comps)
raise KeyError('getgrnam(): name not found: {}'.format(name))
|
Alternative implementation for getgrnam, that use only /etc/group
|
def hosts(self, **kwargs):
"""
Convenience wrapper around listHosts(...) for this channel ID.
:param **kwargs: keyword arguments to the listHosts RPC.
:returns: deferred that when fired returns a list of hosts (dicts).
"""
kwargs['channelID'] = self.id
return self.connection.listHosts(**kwargs)
|
Convenience wrapper around listHosts(...) for this channel ID.
:param **kwargs: keyword arguments to the listHosts RPC.
:returns: deferred that when fired returns a list of hosts (dicts).
|
def es_query_proto(path, selects, wheres, schema):
"""
RETURN TEMPLATE AND PATH-TO-FILTER AS A 2-TUPLE
:param path: THE NESTED PATH (NOT INCLUDING TABLE NAME)
:param wheres: MAP FROM path TO LIST OF WHERE CONDITIONS
:return: (es_query, filters_map) TUPLE
"""
output = None
last_where = MATCH_ALL
for p in reversed(sorted( wheres.keys() | set(selects.keys()))):
where = wheres.get(p)
select = selects.get(p)
if where:
where = AndOp(where).partial_eval().to_esfilter(schema)
if output:
where = es_or([es_and([output, where]), where])
else:
if output:
if last_where is MATCH_ALL:
where = es_or([output, MATCH_ALL])
else:
where = output
else:
where = MATCH_ALL
if p == ".":
output = set_default(
{
"from": 0,
"size": 0,
"sort": [],
"query": where
},
select.to_es()
)
else:
output = {"nested": {
"path": p,
"inner_hits": set_default({"size": 100000}, select.to_es()) if select else None,
"query": where
}}
last_where = where
return output
|
RETURN TEMPLATE AND PATH-TO-FILTER AS A 2-TUPLE
:param path: THE NESTED PATH (NOT INCLUDING TABLE NAME)
:param wheres: MAP FROM path TO LIST OF WHERE CONDITIONS
:return: (es_query, filters_map) TUPLE
|
def retrieveVals(self):
"""Retrieve values for graphs."""
file_stats = self._fileInfo.getContainerStats()
for contname in self._fileContList:
stats = file_stats.get(contname)
if stats is not None:
if self.hasGraph('rackspace_cloudfiles_container_size'):
self.setGraphVal('rackspace_cloudfiles_container_size', contname,
stats.get('size'))
if self.hasGraph('rackspace_cloudfiles_container_count'):
self.setGraphVal('rackspace_cloudfiles_container_count', contname,
stats.get('count'))
|
Retrieve values for graphs.
|
def _add_access_token_to_response(self, response, access_token):
# type: (oic.message.AccessTokenResponse, se_leg_op.access_token.AccessToken) -> None
"""
Adds the Access Token and the associated parameters to the Token Response.
"""
response['access_token'] = access_token.value
response['token_type'] = access_token.type
response['expires_in'] = access_token.expires_in
|
Adds the Access Token and the associated parameters to the Token Response.
|
def Create(self, *args, **kwargs):
"""Calls Driver.Create() with optionally provided creation options as
dict, or falls back to driver specific defaults.
"""
if not self.writable:
raise IOError('Driver does not support raster creation')
options = kwargs.pop('options', {})
kwargs['options'] = driverdict_tolist(options or self.settings)
return self._driver.Create(*args, **kwargs)
|
Calls Driver.Create() with optionally provided creation options as
dict, or falls back to driver specific defaults.
|
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
EMPTY_RESULT = None, {}
try:
s = self.context.wrap_socket(
sock, do_handshake_on_connect=True, server_side=True,
)
except ssl.SSLError as ex:
if ex.errno == ssl.SSL_ERROR_EOF:
# This is almost certainly due to the cherrypy engine
# 'pinging' the socket to assert it's connectable;
# the 'ping' isn't SSL.
return EMPTY_RESULT
elif ex.errno == ssl.SSL_ERROR_SSL:
if _assert_ssl_exc_contains(ex, 'http request'):
# The client is speaking HTTP to an HTTPS server.
raise errors.NoSSLError
# Check if it's one of the known errors
# Errors that are caught by PyOpenSSL, but thrown by
# built-in ssl
_block_errors = (
'unknown protocol', 'unknown ca', 'unknown_ca',
'unknown error',
'https proxy request', 'inappropriate fallback',
'wrong version number',
'no shared cipher', 'certificate unknown',
'ccs received early',
'certificate verify failed', # client cert w/o trusted CA
)
if _assert_ssl_exc_contains(ex, *_block_errors):
# Accepted error, let's pass
return EMPTY_RESULT
elif _assert_ssl_exc_contains(ex, 'handshake operation timed out'):
# This error is thrown by builtin SSL after a timeout
# when client is speaking HTTP to an HTTPS server.
# The connection can safely be dropped.
return EMPTY_RESULT
raise
except generic_socket_error as exc:
"""It is unclear why exactly this happens.
It's reproducible only with openssl>1.0 and stdlib ``ssl`` wrapper.
In CherryPy it's triggered by Checker plugin, which connects
to the app listening to the socket port in TLS mode via plain
HTTP during startup (from the same process).
Ref: https://github.com/cherrypy/cherrypy/issues/1618
"""
is_error0 = exc.args == (0, 'Error')
if is_error0 and IS_ABOVE_OPENSSL10:
return EMPTY_RESULT
raise
return s, self.get_environ(s)
|
Wrap and return the given socket, plus WSGI environ entries.
|
def _replace_coerce(self, to_replace, value, inplace=True, regex=False,
convert=False, mask=None):
"""
Replace value corresponding to the given boolean array with another
value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
inplace : bool, default False
Perform inplace modification.
regex : bool, default False
If true, perform regular expression substitution.
convert : bool, default True
If true, try to coerce any object types to better types.
mask : array-like of bool, optional
True indicate corresponding element is ignored.
Returns
-------
A new block if there is anything to replace or the original block.
"""
if mask.any():
block = super()._replace_coerce(
to_replace=to_replace, value=value, inplace=inplace,
regex=regex, convert=convert, mask=mask)
if convert:
block = [b.convert(by_item=True, numeric=False, copy=True)
for b in block]
return block
return self
|
Replace value corresponding to the given boolean array with another
value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
inplace : bool, default False
Perform inplace modification.
regex : bool, default False
If true, perform regular expression substitution.
convert : bool, default True
If true, try to coerce any object types to better types.
mask : array-like of bool, optional
True indicate corresponding element is ignored.
Returns
-------
A new block if there is anything to replace or the original block.
|
def assume_role_credentials(self, arn):
"""Return the environment variables for an assumed role"""
log.info("Assuming role as %s", arn)
# Clear out empty values
for name in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']:
if name in os.environ and not os.environ[name]:
del os.environ[name]
sts = self.amazon.session.client("sts")
with self.catch_boto_400("Couldn't assume role", arn=arn):
creds = sts.assume_role(RoleArn=arn, RoleSessionName="aws_syncr")
return {
'AWS_ACCESS_KEY_ID': creds["Credentials"]["AccessKeyId"]
, 'AWS_SECRET_ACCESS_KEY': creds["Credentials"]["SecretAccessKey"]
, 'AWS_SECURITY_TOKEN': creds["Credentials"]["SessionToken"]
, 'AWS_SESSION_TOKEN': creds["Credentials"]["SessionToken"]
}
|
Return the environment variables for an assumed role
|
def _start(self):
"""
Starts the underlying send and receive threads.
"""
# Initialize the locks
self._recv_lock = coros.Semaphore(0)
self._send_lock = coros.Semaphore(0)
# Boot the threads
self._recv_thread = gevent.spawn(self._recv)
self._send_thread = gevent.spawn(self._send)
# Link the threads such that we get notified if one or the
# other exits
self._recv_thread.link(self._thread_error)
self._send_thread.link(self._thread_error)
|
Starts the underlying send and receive threads.
|
def get_memfree(memory, parallel):
"""Computes the memory required for the memfree field."""
number = int(memory.rstrip(string.ascii_letters))
memtype = memory.lstrip(string.digits)
if not memtype:
memtype = "G"
return "%d%s" % (number*parallel, memtype)
|
Computes the memory required for the memfree field.
|
def select_by_index(self, index):
"""Select the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be selected
throws NoSuchElementException If there is no option with specified index in SELECT
"""
match = str(index)
for opt in self.options:
if opt.get_attribute("index") == match:
self._setSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index)
|
Select the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be selected
throws NoSuchElementException If there is no option with specified index in SELECT
|
def clearOldCalibrations(self, date=None):
'''
if not only a specific date than remove all except of the youngest calibration
'''
self.coeffs['dark current'] = [self.coeffs['dark current'][-1]]
self.coeffs['noise'] = [self.coeffs['noise'][-1]]
for light in self.coeffs['flat field']:
self.coeffs['flat field'][light] = [
self.coeffs['flat field'][light][-1]]
for light in self.coeffs['lens']:
self.coeffs['lens'][light] = [self.coeffs['lens'][light][-1]]
|
if not only a specific date than remove all except of the youngest calibration
|
def buscar_timeout_opcvip(self, id_ambiente_vip):
"""Buscar nome_opcao_txt das Opcoes VIp quando tipo_opcao = 'Timeout' pelo environmentvip_id
:return: Dictionary with the following structure:
::
{‘timeout_opt’: ‘timeout_opt’: <'nome_opcao_txt'>}
:raise InvalidParameterError: Environment VIP identifier is null and invalid.
:raise EnvironmentVipNotFoundError: Environment VIP not registered.
:raise InvalidParameterError: finalidade_txt and cliente_txt is null and invalid.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
if not is_valid_int_param(id_ambiente_vip):
raise InvalidParameterError(
u'The identifier of environment-vip is invalid or was not informed.')
url = 'environment-vip/get/timeout/' + str(id_ambiente_vip) + '/'
code, xml = self.submit(None, 'GET', url)
return self.response(code, xml)
|
Buscar nome_opcao_txt das Opcoes VIp quando tipo_opcao = 'Timeout' pelo environmentvip_id
:return: Dictionary with the following structure:
::
{‘timeout_opt’: ‘timeout_opt’: <'nome_opcao_txt'>}
:raise InvalidParameterError: Environment VIP identifier is null and invalid.
:raise EnvironmentVipNotFoundError: Environment VIP not registered.
:raise InvalidParameterError: finalidade_txt and cliente_txt is null and invalid.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
|
def _shrink_list(self, shrink):
"""
Shrink list down to essentials
:param shrink: List to shrink
:type shrink: list
:return: Shrunk list
:rtype: list
"""
res = []
if len(shrink) == 1:
return self.shrink(shrink[0])
else:
for a in shrink:
temp = self.shrink(a)
if temp:
res.append(temp)
return res
|
Shrink list down to essentials
:param shrink: List to shrink
:type shrink: list
:return: Shrunk list
:rtype: list
|
def updateTerms(self, data:list, LIMIT:int=20, _print:bool=True, crawl:bool=False,) -> list:
""" Updates existing entities
Args:
data:
needs:
id <str>
ilx_id <str>
options:
definition <str> #bug with qutations
superclasses [{'id':<int>}]
type term, cde, anntation, or relationship <str>
synonyms {'literal':<str>}
existing_ids {'iri':<str>,'curie':<str>','change':<bool>, 'delete':<bool>}
LIMIT:
limit of concurrent
_print:
prints label of data presented
crawl:
True: Uses linear requests.
False: Uses concurrent requests from the asyncio and aiohttp modules
Returns:
List of filled in data parallel with the input data. If any entity failed with an
ignorable reason, it will return empty for the item in the list returned.
"""
url_base = self.base_url + '/api/1/term/edit/{id}'
merged_data = []
# PHP on the server is is LOADED with bugs. Best to just duplicate entity data and change
# what you need in it before re-upserting the data.
old_data = self.identifierSearches(
[d['id'] for d in data], # just need the ids
LIMIT = LIMIT,
_print = _print,
crawl = crawl,
)
for d in data: # d for dictionary
url = url_base.format(id=str(d['id']))
# Reason this exists is to avoid contradictions in case you are using a local reference
if d['ilx'] != old_data[int(d['id'])]['ilx']:
print(d['ilx'], old_data[int(d['id'])]['ilx'])
exit('You might be using beta insead of production!')
merged = scicrunch_client_helper.merge(new=d, old=old_data[int(d['id'])])
merged = scicrunch_client_helper.superclasses_bug_fix(merged) # BUG: superclass output diff than input needed
merged_data.append((url, merged))
resp = self.post(
merged_data,
LIMIT = LIMIT,
action = 'Updating Terms', # forced input from each function
_print = _print,
crawl = crawl,
)
return resp
|
Updates existing entities
Args:
data:
needs:
id <str>
ilx_id <str>
options:
definition <str> #bug with qutations
superclasses [{'id':<int>}]
type term, cde, anntation, or relationship <str>
synonyms {'literal':<str>}
existing_ids {'iri':<str>,'curie':<str>','change':<bool>, 'delete':<bool>}
LIMIT:
limit of concurrent
_print:
prints label of data presented
crawl:
True: Uses linear requests.
False: Uses concurrent requests from the asyncio and aiohttp modules
Returns:
List of filled in data parallel with the input data. If any entity failed with an
ignorable reason, it will return empty for the item in the list returned.
|
def read(self):
"""Reads the data stored in the files we have been initialized with. It will
ignore files that cannot be read, possibly leaving an empty configuration
:return: Nothing
:raise IOError: if a file cannot be handled"""
if self._is_initialized:
return
self._is_initialized = True
if not isinstance(self._file_or_files, (tuple, list)):
files_to_read = [self._file_or_files]
else:
files_to_read = list(self._file_or_files)
# end assure we have a copy of the paths to handle
seen = set(files_to_read)
num_read_include_files = 0
while files_to_read:
file_path = files_to_read.pop(0)
fp = file_path
file_ok = False
if hasattr(fp, "seek"):
self._read(fp, fp.name)
else:
# assume a path if it is not a file-object
try:
with open(file_path, 'rb') as fp:
file_ok = True
self._read(fp, fp.name)
except IOError:
continue
# Read includes and append those that we didn't handle yet
# We expect all paths to be normalized and absolute (and will assure that is the case)
if self._has_includes():
for _, include_path in self.items('include'):
if include_path.startswith('~'):
include_path = osp.expanduser(include_path)
if not osp.isabs(include_path):
if not file_ok:
continue
# end ignore relative paths if we don't know the configuration file path
assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work"
include_path = osp.join(osp.dirname(file_path), include_path)
# end make include path absolute
include_path = osp.normpath(include_path)
if include_path in seen or not os.access(include_path, os.R_OK):
continue
seen.add(include_path)
# insert included file to the top to be considered first
files_to_read.insert(0, include_path)
num_read_include_files += 1
# each include path in configuration file
# end handle includes
# END for each file object to read
# If there was no file included, we can safely write back (potentially) the configuration file
# without altering it's meaning
if num_read_include_files == 0:
self._merge_includes = False
|
Reads the data stored in the files we have been initialized with. It will
ignore files that cannot be read, possibly leaving an empty configuration
:return: Nothing
:raise IOError: if a file cannot be handled
|
def spaceout_and_resize_panels(self):
"""
Adjust the spacing between the panels and resize them
to meet the aspect ratio
"""
ncol = self.ncol
nrow = self.nrow
figure = self.figure
theme = self.theme
get_property = theme.themeables.property
left = figure.subplotpars.left
right = figure.subplotpars.right
top = figure.subplotpars.top
bottom = figure.subplotpars.bottom
top_strip_height = self.strip_size('top')
W, H = figure.get_size_inches()
try:
spacing_x = get_property('panel_spacing_x')
except KeyError:
spacing_x = 0.1
try:
spacing_y = get_property('panel_spacing_y')
except KeyError:
spacing_y = 0.1
try:
aspect_ratio = get_property('aspect_ratio')
except KeyError:
# If the panels have different limits the coordinates
# cannot compute a common aspect ratio
if not self.free['x'] and not self.free['y']:
aspect_ratio = self.coordinates.aspect(
self.layout.panel_params[0])
else:
aspect_ratio = None
if theme.themeables.is_blank('strip_text_x'):
top_strip_height = 0
# Account for the vertical sliding of the strip if any
with suppress(KeyError):
strip_margin_x = get_property('strip_margin_x')
top_strip_height *= (1 + strip_margin_x)
# The goal is to have equal spacing along the vertical
# and the horizontal. We use the wspace and compute
# the appropriate hspace. It would be a lot easier if
# MPL had a better layout manager.
# width of axes and height of axes
w = ((right-left)*W - spacing_x*(ncol-1)) / ncol
h = ((top-bottom)*H - (spacing_y+top_strip_height)*(nrow-1)) / nrow
# aspect ratio changes the size of the figure
if aspect_ratio is not None:
h = w*aspect_ratio
H = (h*nrow + (spacing_y+top_strip_height)*(nrow-1)) / \
(top-bottom)
figure.set_figheight(H)
# spacing
wspace = spacing_x/w
hspace = (spacing_y + top_strip_height) / h
figure.subplots_adjust(wspace=wspace, hspace=hspace)
|
Adjust the spacing between the panels and resize them
to meet the aspect ratio
|
def close(self):
"""
Stop listing for new connections and close all open connections.
:returns: Deferred that calls back once everything is closed.
"""
assert self._opened, "RPC System is not opened"
logger.debug("Closing rpc system. Stopping ping loop")
self._ping_loop.stop()
if self._ping_current_iteration:
self._ping_current_iteration.cancel()
return self._connectionpool.close()
|
Stop listing for new connections and close all open connections.
:returns: Deferred that calls back once everything is closed.
|
def _inferSchemaFromList(self, data, names=None):
"""
Infer schema from list of Row or tuple.
:param data: list of Row or tuple
:param names: list of column names
:return: :class:`pyspark.sql.types.StructType`
"""
if not data:
raise ValueError("can not infer schema from empty dataset")
first = data[0]
if type(first) is dict:
warnings.warn("inferring schema from dict is deprecated,"
"please use pyspark.sql.Row instead")
schema = reduce(_merge_type, (_infer_schema(row, names) for row in data))
if _has_nulltype(schema):
raise ValueError("Some of types cannot be determined after inferring")
return schema
|
Infer schema from list of Row or tuple.
:param data: list of Row or tuple
:param names: list of column names
:return: :class:`pyspark.sql.types.StructType`
|
def distance_centimeters_continuous(self):
"""
Measurement of the distance detected by the sensor,
in centimeters.
The sensor will continue to take measurements so
they are available for future reads.
Prefer using the equivalent :meth:`UltrasonicSensor.distance_centimeters` property.
"""
self._ensure_mode(self.MODE_US_DIST_CM)
return self.value(0) * self._scale('US_DIST_CM')
|
Measurement of the distance detected by the sensor,
in centimeters.
The sensor will continue to take measurements so
they are available for future reads.
Prefer using the equivalent :meth:`UltrasonicSensor.distance_centimeters` property.
|
def build(self, X, Y, w=None, edges=None):
""" Assigns data to this object and builds the requested topological
structure
@ In, X, an m-by-n array of values specifying m
n-dimensional samples
@ In, Y, a m vector of values specifying the output
responses corresponding to the m samples specified by X
@ In, w, an optional m vector of values specifying the
weights associated to each of the m samples used. Default of
None means all points will be equally weighted
@ In, edges, an optional list of custom edges to use as a
starting point for pruning, or in place of a computed graph.
"""
self.reset()
if X is None or Y is None:
return
self.__set_data(X, Y, w)
if self.debug:
sys.stdout.write("Graph Preparation: ")
start = time.clock()
self.graph_rep = nglpy.Graph(
self.Xnorm,
self.graph,
self.max_neighbors,
self.beta,
connect=self.connect,
)
if self.debug:
end = time.clock()
sys.stdout.write("%f s\n" % (end - start))
|
Assigns data to this object and builds the requested topological
structure
@ In, X, an m-by-n array of values specifying m
n-dimensional samples
@ In, Y, a m vector of values specifying the output
responses corresponding to the m samples specified by X
@ In, w, an optional m vector of values specifying the
weights associated to each of the m samples used. Default of
None means all points will be equally weighted
@ In, edges, an optional list of custom edges to use as a
starting point for pruning, or in place of a computed graph.
|
def adsSyncWriteControlReqEx(
port, address, ads_state, device_state, data, plc_data_type
):
# type: (int, AmsAddr, int, int, Any, Type) -> None
"""Change the ADS state and the machine-state of the ADS-server.
:param int port: local AMS port as returned by adsPortOpenEx()
:param pyads.structs.AmsAddr adr: local or remote AmsAddr
:param int ads_state: new ADS-state, according to ADSTATE constants
:param int device_state: new machine-state
:param data: additional data
:param int plc_data_type: plc datatype, according to PLCTYPE constants
"""
sync_write_control_request = _adsDLL.AdsSyncWriteControlReqEx
ams_address_pointer = ctypes.pointer(address.amsAddrStruct())
ads_state_c = ctypes.c_ulong(ads_state)
device_state_c = ctypes.c_ulong(device_state)
if plc_data_type == PLCTYPE_STRING:
data = ctypes.c_char_p(data.encode("utf-8"))
data_pointer = data
data_length = len(data_pointer.value) + 1
else:
data = plc_data_type(data)
data_pointer = ctypes.pointer(data)
data_length = ctypes.sizeof(data)
error_code = sync_write_control_request(
port,
ams_address_pointer,
ads_state_c,
device_state_c,
data_length,
data_pointer,
)
if error_code:
raise ADSError(error_code)
|
Change the ADS state and the machine-state of the ADS-server.
:param int port: local AMS port as returned by adsPortOpenEx()
:param pyads.structs.AmsAddr adr: local or remote AmsAddr
:param int ads_state: new ADS-state, according to ADSTATE constants
:param int device_state: new machine-state
:param data: additional data
:param int plc_data_type: plc datatype, according to PLCTYPE constants
|
def getTableAsCsv(self, networkId, tableType, verbose=None):
"""
Returns a CSV representation of the table specified by the `networkId` and `tableType` parameters. All column names are included in the first row.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'.csv', method="GET", verbose=verbose, parse_params=False)
return response
|
Returns a CSV representation of the table specified by the `networkId` and `tableType` parameters. All column names are included in the first row.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation
|
def start_resolver(finder=None, wheel_cache=None):
"""Context manager to produce a resolver.
:param finder: A package finder to use for searching the index
:type finder: :class:`~pip._internal.index.PackageFinder`
:return: A 3-tuple of finder, preparer, resolver
:rtype: (:class:`~pip._internal.operations.prepare.RequirementPreparer`, :class:`~pip._internal.resolve.Resolver`)
"""
pip_command = get_pip_command()
pip_options = get_pip_options(pip_command=pip_command)
if not finder:
finder = get_finder(pip_command=pip_command, pip_options=pip_options)
if not wheel_cache:
wheel_cache = WHEEL_CACHE
_ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels")))
download_dir = PKGS_DOWNLOAD_DIR
_ensure_dir(download_dir)
_build_dir = create_tracked_tempdir(fs_str("build"))
_source_dir = create_tracked_tempdir(fs_str("source"))
preparer = partialclass(
pip_shims.shims.RequirementPreparer,
build_dir=_build_dir,
src_dir=_source_dir,
download_dir=download_dir,
wheel_download_dir=WHEEL_DOWNLOAD_DIR,
progress_bar="off",
build_isolation=False,
)
resolver = partialclass(
pip_shims.shims.Resolver,
finder=finder,
session=finder.session,
upgrade_strategy="to-satisfy-only",
force_reinstall=True,
ignore_dependencies=False,
ignore_requires_python=True,
ignore_installed=True,
isolated=False,
wheel_cache=wheel_cache,
use_user_site=False,
)
try:
if packaging.version.parse(pip_shims.shims.pip_version) >= packaging.version.parse('18'):
with pip_shims.shims.RequirementTracker() as req_tracker:
preparer = preparer(req_tracker=req_tracker)
yield resolver(preparer=preparer)
else:
preparer = preparer()
yield resolver(preparer=preparer)
finally:
finder.session.close()
|
Context manager to produce a resolver.
:param finder: A package finder to use for searching the index
:type finder: :class:`~pip._internal.index.PackageFinder`
:return: A 3-tuple of finder, preparer, resolver
:rtype: (:class:`~pip._internal.operations.prepare.RequirementPreparer`, :class:`~pip._internal.resolve.Resolver`)
|
def find_by_id(self, organization_export, params={}, **options):
"""Returns details of a previously-requested Organization export.
Parameters
----------
organization_export : {Id} Globally unique identifier for the Organization export.
[params] : {Object} Parameters for the request
"""
path = "/organization_exports/%s" % (organization_export)
return self.client.get(path, params, **options)
|
Returns details of a previously-requested Organization export.
Parameters
----------
organization_export : {Id} Globally unique identifier for the Organization export.
[params] : {Object} Parameters for the request
|
def format_modes(modes, full_modes=False, current_mode=None):
""" Creates a nice readily printable Table for a list of modes.
Used in `displays list' and the candidates list
in `displays set'. """
t = table.Table(((
'*' if mode == current_mode else '', # 0
str(Q.CGDisplayModeGetWidth(mode)), # 1
str(Q.CGDisplayModeGetHeight(mode)), # 2
'@'+shorter_float_str(Q.CGDisplayModeGetRefreshRate(mode)), # 3
format_pixelEncoding(
Q.CGDisplayModeCopyPixelEncoding(mode))) # 4
for mode in modes))
t.set_key(2, 'height')
t.set_key(3, 'rate')
t.set_key(4, 'depth')
t.set_alignment('height', 'l')
t.set_alignment('rate', 'l')
t.set_separator('height', ' x ')
created_flags_col = False
if full_modes:
t.append_col(tuple((' '.join(get_flags_of_mode(mode))
for mode in modes)), key='flags')
created_flags_col = True
else:
# Remove refresh rate and bit depth if they are all the same
if len(frozenset(t.get_col('rate'))) == 1:
t.del_col('rate')
if len(frozenset(t.get_col('depth'))) == 1:
t.del_col('depth')
# Show distinct IO flags when several modes appear the same
lut = {}
for i, row in enumerate(t):
row = tuple(row)
if row not in lut:
lut[row] = []
elif not created_flags_col:
t.append_col(('',) * len(modes), key='flags')
lut[row].append(i)
for rw, indices in lut.iteritems():
if len(indices) == 1:
continue
flags = {}
for i in indices:
flags[i] = get_flags_of_mode(modes[i])
common_flags = reduce(lambda x, y: x.intersection(y),
map(frozenset, flags.itervalues()))
for i in indices:
t[i, 'flags'] = ' '.join(frozenset(flags[i])
- common_flags)
if created_flags_col:
t.set_alignment('flags', 'l')
return t
|
Creates a nice readily printable Table for a list of modes.
Used in `displays list' and the candidates list
in `displays set'.
|
def __step1(self):
"""
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
"""
C = self.C
n = self.n
for i in range(n):
minval = min(self.C[i])
# Find the minimum value for this row and subtract that minimum
# from every element in the row.
for j in range(n):
self.C[i][j] -= minval
return 2
|
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
|
def _handle_double_click(self, event):
""" Double click with left mouse button focuses the element"""
if event.get_button()[1] == 1: # Left mouse button
path_info = self.tree_view.get_path_at_pos(int(event.x), int(event.y))
if path_info: # Valid entry was clicked on
path = path_info[0]
iter = self.list_store.get_iter(path)
model = self.list_store.get_value(iter, self.MODEL_STORAGE_ID)
selection = self.model.get_state_machine_m().selection
selection.focus = model
|
Double click with left mouse button focuses the element
|
def transform(self, X):
"""
Parameters
----------
X : array-like, shape [n x m]
The mask in form of n x m array.
"""
if self.mode_ == 'target':
return np.apply_along_axis(self._target, 1, np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2])))
if self.mode_ == 'majority':
return np.apply_along_axis(self._majority, 1, np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2])))
print('Invalid mode! Set mode to majority or target. Returning input.')
return X
|
Parameters
----------
X : array-like, shape [n x m]
The mask in form of n x m array.
|
async def _async_connect(self): # pragma: no cover
""" connect and authenticate to the XMPP server. Async mode. """
try:
self.conn_coro = self.client.connected()
aenter = type(self.conn_coro).__aenter__(self.conn_coro)
self.stream = await aenter
logger.info(f"Agent {str(self.jid)} connected and authenticated.")
except aiosasl.AuthenticationFailure:
raise AuthenticationFailure(
"Could not authenticate the agent. Check user and password or use auto_register=True")
|
connect and authenticate to the XMPP server. Async mode.
|
def rytov_sc(radius=5e-6, sphere_index=1.339, medium_index=1.333,
wavelength=550e-9, pixel_size=1e-7, grid_size=(80, 80),
center=(39.5, 39.5), radius_sampling=42):
r"""Field behind a dielectric sphere, systematically corrected Rytov
This method implements a correction of
:func:`qpsphere.models.rytov`, where the
`radius` :math:`r_\text{Ryt}` and the `sphere_index`
:math:`n_\text{Ryt}` are corrected using
the approach described in :cite:`Mueller2018` (eqns. 3,4, and 5).
.. math::
n_\text{Ryt-SC} &= n_\text{Ryt} + n_\text{med} \cdot
\left( a_n x^2 + b_n x + c_n \right)
r_\text{Ryt-SC} &= r_\text{Ryt} \cdot
\left( a_r x^2 +b_r x + c_r \right)
&\text{with} x = \frac{n_\text{Ryt}}{n_\text{med}} - 1
The correction factors are given in
:data:`qpsphere.models.mod_rytov_sc.RSC_PARAMS`.
Parameters
----------
radius: float
Radius of the sphere [m]
sphere_index: float
Refractive index of the sphere
medium_index: float
Refractive index of the surrounding medium
wavelength: float
Vacuum wavelength of the imaging light [m]
pixel_size: float
Pixel size [m]
grid_size: tuple of floats
Resulting image size in x and y [px]
center: tuple of floats
Center position in image coordinates [px]
radius_sampling: int
Number of pixels used to sample the sphere radius when
computing the Rytov field. The default value of 42
pixels is a reasonable number for single-cell analysis.
Returns
-------
qpi: qpimage.QPImage
Quantitative phase data set
"""
r_ryt, n_ryt = correct_rytov_sc_input(radius_sc=radius,
sphere_index_sc=sphere_index,
medium_index=medium_index,
radius_sampling=radius_sampling)
qpi = mod_rytov.rytov(radius=r_ryt,
sphere_index=n_ryt,
medium_index=medium_index,
wavelength=wavelength,
pixel_size=pixel_size,
grid_size=grid_size,
center=center,
radius_sampling=radius_sampling)
# update correct simulation parameters
qpi["sim radius"] = radius
qpi["sim index"] = sphere_index
qpi["sim model"] = "rytov-sc"
return qpi
|
r"""Field behind a dielectric sphere, systematically corrected Rytov
This method implements a correction of
:func:`qpsphere.models.rytov`, where the
`radius` :math:`r_\text{Ryt}` and the `sphere_index`
:math:`n_\text{Ryt}` are corrected using
the approach described in :cite:`Mueller2018` (eqns. 3,4, and 5).
.. math::
n_\text{Ryt-SC} &= n_\text{Ryt} + n_\text{med} \cdot
\left( a_n x^2 + b_n x + c_n \right)
r_\text{Ryt-SC} &= r_\text{Ryt} \cdot
\left( a_r x^2 +b_r x + c_r \right)
&\text{with} x = \frac{n_\text{Ryt}}{n_\text{med}} - 1
The correction factors are given in
:data:`qpsphere.models.mod_rytov_sc.RSC_PARAMS`.
Parameters
----------
radius: float
Radius of the sphere [m]
sphere_index: float
Refractive index of the sphere
medium_index: float
Refractive index of the surrounding medium
wavelength: float
Vacuum wavelength of the imaging light [m]
pixel_size: float
Pixel size [m]
grid_size: tuple of floats
Resulting image size in x and y [px]
center: tuple of floats
Center position in image coordinates [px]
radius_sampling: int
Number of pixels used to sample the sphere radius when
computing the Rytov field. The default value of 42
pixels is a reasonable number for single-cell analysis.
Returns
-------
qpi: qpimage.QPImage
Quantitative phase data set
|
def keys(name, basepath='/etc/pki', **kwargs):
'''
Manage libvirt keys.
name
The name variable used to track the execution
basepath
Defaults to ``/etc/pki``, this is the root location used for libvirt
keys on the hypervisor
The following parameters are optional:
country
The country that the certificate should use. Defaults to US.
.. versionadded:: 2018.3.0
state
The state that the certificate should use. Defaults to Utah.
.. versionadded:: 2018.3.0
locality
The locality that the certificate should use.
Defaults to Salt Lake City.
.. versionadded:: 2018.3.0
organization
The organization that the certificate should use.
Defaults to Salted.
.. versionadded:: 2018.3.0
expiration_days
The number of days that the certificate should be valid for.
Defaults to 365 days (1 year)
.. versionadded:: 2018.3.0
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
# Grab all kwargs to make them available as pillar values
# rename them to something hopefully unique to avoid
# overriding anything existing
pillar_kwargs = {}
for key, value in six.iteritems(kwargs):
pillar_kwargs['ext_pillar_virt.{0}'.format(key)] = value
pillar = __salt__['pillar.ext']({'libvirt': '_'}, pillar_kwargs)
paths = {
'serverkey': os.path.join(basepath, 'libvirt',
'private', 'serverkey.pem'),
'servercert': os.path.join(basepath, 'libvirt',
'servercert.pem'),
'clientkey': os.path.join(basepath, 'libvirt',
'private', 'clientkey.pem'),
'clientcert': os.path.join(basepath, 'libvirt',
'clientcert.pem'),
'cacert': os.path.join(basepath, 'CA', 'cacert.pem')
}
for key in paths:
p_key = 'libvirt.{0}.pem'.format(key)
if p_key not in pillar:
continue
if not os.path.exists(os.path.dirname(paths[key])):
os.makedirs(os.path.dirname(paths[key]))
if os.path.isfile(paths[key]):
with salt.utils.files.fopen(paths[key], 'r') as fp_:
if salt.utils.stringutils.to_unicode(fp_.read()) != pillar[p_key]:
ret['changes'][key] = 'update'
else:
ret['changes'][key] = 'new'
if not ret['changes']:
ret['comment'] = 'All keys are correct'
elif __opts__['test']:
ret['result'] = None
ret['comment'] = 'Libvirt keys are set to be updated'
ret['changes'] = {}
else:
for key in ret['changes']:
with salt.utils.files.fopen(paths[key], 'w+') as fp_:
fp_.write(
salt.utils.stringutils.to_str(
pillar['libvirt.{0}.pem'.format(key)]
)
)
ret['comment'] = 'Updated libvirt certs and keys'
return ret
|
Manage libvirt keys.
name
The name variable used to track the execution
basepath
Defaults to ``/etc/pki``, this is the root location used for libvirt
keys on the hypervisor
The following parameters are optional:
country
The country that the certificate should use. Defaults to US.
.. versionadded:: 2018.3.0
state
The state that the certificate should use. Defaults to Utah.
.. versionadded:: 2018.3.0
locality
The locality that the certificate should use.
Defaults to Salt Lake City.
.. versionadded:: 2018.3.0
organization
The organization that the certificate should use.
Defaults to Salted.
.. versionadded:: 2018.3.0
expiration_days
The number of days that the certificate should be valid for.
Defaults to 365 days (1 year)
.. versionadded:: 2018.3.0
|
def shapely_formatter(_, vertices, codes=None):
"""`Shapely`_ style contour formatter.
Contours are returned as a list of :class:`shapely.geometry.LineString`,
:class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point`
geometry elements.
Filled contours return a list of :class:`shapely.geometry.Polygon`
elements instead.
.. note:: If possible, `Shapely speedups`_ will be enabled.
.. _Shapely: http://toblerity.org/shapely/manual.html
.. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance
See Also
--------
`descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_
or GeoJSON-like geometric objects as matplotlib paths and patches.
"""
elements = []
if codes is None:
for vertices_ in vertices:
if np.all(vertices_[0, :] == vertices_[-1, :]):
# Contour is single point.
if len(vertices) < 3:
elements.append(Point(vertices_[0, :]))
# Contour is closed.
else:
elements.append(LinearRing(vertices_))
# Contour is open.
else:
elements.append(LineString(vertices_))
else:
for vertices_, codes_ in zip(vertices, codes):
starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0]
stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0]
try:
rings = [LinearRing(vertices_[start:stop+1, :])
for start, stop in zip(starts, stops)]
elements.append(Polygon(rings[0], rings[1:]))
except ValueError as err:
# Verify error is from degenerate (single point) polygon.
if np.any(stop - start - 1 == 0):
# Polygon is single point, remove the polygon.
if stops[0] < starts[0]+2:
pass
# Polygon has single point hole, remove the hole.
else:
rings = [
LinearRing(vertices_[start:stop+1, :])
for start, stop in zip(starts, stops)
if stop >= start+2]
elements.append(Polygon(rings[0], rings[1:]))
else:
raise(err)
return elements
|
`Shapely`_ style contour formatter.
Contours are returned as a list of :class:`shapely.geometry.LineString`,
:class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point`
geometry elements.
Filled contours return a list of :class:`shapely.geometry.Polygon`
elements instead.
.. note:: If possible, `Shapely speedups`_ will be enabled.
.. _Shapely: http://toblerity.org/shapely/manual.html
.. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance
See Also
--------
`descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_
or GeoJSON-like geometric objects as matplotlib paths and patches.
|
def plot_data():
'''Plot sample data up with the fancy colormaps.
'''
var = ['temp', 'oxygen', 'salinity', 'fluorescence-ECO', 'density', 'PAR', 'turbidity', 'fluorescence-CDOM']
# colorbar limits for each property
lims = np.array([[26, 33], [0, 10], [0, 36], [0, 6], [1005, 1025], [0, 0.6], [0, 2], [0, 9]]) # reasonable values
# lims = np.array([[20,36], [26,33], [1.5,5.6], [0,4], [0,9], [0,1.5]]) # values to show colormaps
for fname in fnames:
fig, axes = plt.subplots(nrows=4, ncols=2)
fig.set_size_inches(20, 10)
fig.subplots_adjust(top=0.95, bottom=0.01, left=0.2, right=0.99, wspace=0.0, hspace=0.07)
i = 0
for ax, Var, cmap in zip(axes.flat, var, cmaps): # loop through data to plot up
# get variable data
lat, lon, z, data = test.read(Var, fname)
map1 = ax.scatter(lat, -z, c=data, cmap=cmap, s=10, linewidths=0., vmin=lims[i, 0], vmax=lims[i, 1])
# no stupid offset
y_formatter = mpl.ticker.ScalarFormatter(useOffset=False)
ax.xaxis.set_major_formatter(y_formatter)
if i == 6:
ax.set_xlabel('Latitude [degrees]')
ax.set_ylabel('Depth [m]')
else:
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_ylim(-z.max(), 0)
ax.set_xlim(lat.min(), lat.max())
cb = plt.colorbar(map1, ax=ax, pad=0.02)
cb.set_label(cmap.name + ' [' + '$' + cmap.units + '$]')
i += 1
fig.savefig('figures/' + fname.split('.')[0] + '.png', bbox_inches='tight')
|
Plot sample data up with the fancy colormaps.
|
def config_acl(args):
''' Retrieve access control list for a method configuration'''
r = fapi.get_repository_config_acl(args.namespace, args.config,
args.snapshot_id)
fapi._check_response_code(r, 200)
acls = sorted(r.json(), key=lambda k: k['user'])
return map(lambda acl: '{0}\t{1}'.format(acl['user'], acl['role']), acls)
|
Retrieve access control list for a method configuration
|
def set_current_limit(self, value, channel=1):
""" channel: 1=OP1, 2=OP2, AUX is not supported"""
cmd = "I%d %f" % (channel, value)
self.write(cmd)
|
channel: 1=OP1, 2=OP2, AUX is not supported
|
def get_type_hints(obj, globalns=None, localns=None):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, and if necessary
adds Optional[t] if a default value equal to None is set.
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
if getattr(obj, '__no_type_check__', None):
return {}
# Classes require a special treatment.
if isinstance(obj, type):
hints = {}
for base in reversed(obj.__mro__):
if globalns is None:
base_globals = sys.modules[base.__module__].__dict__
else:
base_globals = globalns
ann = base.__dict__.get('__annotations__', {})
for name, value in ann.items():
if value is None:
value = type(None)
if isinstance(value, str):
value = _ForwardRef(value)
value = _eval_type(value, base_globals, localns)
hints[name] = value
return hints
if globalns is None:
if isinstance(obj, types.ModuleType):
globalns = obj.__dict__
else:
globalns = getattr(obj, '__globals__', {})
if localns is None:
localns = globalns
elif localns is None:
localns = globalns
hints = getattr(obj, '__annotations__', None)
if hints is None:
# Return empty annotations for something that _could_ have them.
if isinstance(obj, _allowed_types):
return {}
else:
raise TypeError('{!r} is not a module, class, method, '
'or function.'.format(obj))
defaults = _get_defaults(obj)
hints = dict(hints)
for name, value in hints.items():
if value is None:
value = type(None)
if isinstance(value, str):
value = _ForwardRef(value)
value = _eval_type(value, globalns, localns)
if name in defaults and defaults[name] is None:
value = Optional[value]
hints[name] = value
return hints
|
Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, and if necessary
adds Optional[t] if a default value equal to None is set.
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
|
def average_data(self,ranges=[[None,None]],percentile=None):
"""
given a list of ranges, return single point averages for every sweep.
Units are in seconds. Expects something like:
ranges=[[1,2],[4,5],[7,7.5]]
None values will be replaced with maximum/minimum bounds.
For baseline subtraction, make a range baseline then sub it youtself.
returns datas[iSweep][iRange][AVorSD]
if a percentile is given, return that percentile rather than average.
percentile=50 is the median, but requires sorting, and is slower.
"""
ranges=copy.deepcopy(ranges) #TODO: make this cleaner. Why needed?
# clean up ranges, make them indexes
for i in range(len(ranges)):
if ranges[i][0] is None:
ranges[i][0] = 0
else:
ranges[i][0] = int(ranges[i][0]*self.rate)
if ranges[i][1] is None:
ranges[i][1] = -1
else:
ranges[i][1] = int(ranges[i][1]*self.rate)
# do the math
datas=np.empty((self.sweeps,len(ranges),2)) #[sweep][range]=[Av,Er]
for iSweep in range(self.sweeps):
self.setSweep(iSweep)
for iRange in range(len(ranges)):
I1=ranges[iRange][0]
I2=ranges[iRange][1]
if percentile:
datas[iSweep][iRange][0]=np.percentile(self.dataY[I1:I2],percentile)
else:
datas[iSweep][iRange][0]=np.average(self.dataY[I1:I2])
datas[iSweep][iRange][1]=np.std(self.dataY[I1:I2])
return datas
|
given a list of ranges, return single point averages for every sweep.
Units are in seconds. Expects something like:
ranges=[[1,2],[4,5],[7,7.5]]
None values will be replaced with maximum/minimum bounds.
For baseline subtraction, make a range baseline then sub it youtself.
returns datas[iSweep][iRange][AVorSD]
if a percentile is given, return that percentile rather than average.
percentile=50 is the median, but requires sorting, and is slower.
|
def expanding_stdize(obj, **kwargs):
"""Standardize a pandas object column-wise on expanding window.
**kwargs -> passed to `obj.expanding`
Example
-------
df = pd.DataFrame(np.random.randn(10, 3))
print(expanding_stdize(df, min_periods=5))
0 1 2
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 NaN NaN NaN
4 0.67639 -1.03507 0.96610
5 0.95008 -0.26067 0.27761
6 1.67793 -0.50816 0.19293
7 1.50364 -1.10035 -0.87859
8 -0.64949 0.08028 -0.51354
9 0.15280 -0.73283 -0.84907
"""
return (obj - obj.expanding(**kwargs).mean()) / (
obj.expanding(**kwargs).std()
)
|
Standardize a pandas object column-wise on expanding window.
**kwargs -> passed to `obj.expanding`
Example
-------
df = pd.DataFrame(np.random.randn(10, 3))
print(expanding_stdize(df, min_periods=5))
0 1 2
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 NaN NaN NaN
4 0.67639 -1.03507 0.96610
5 0.95008 -0.26067 0.27761
6 1.67793 -0.50816 0.19293
7 1.50364 -1.10035 -0.87859
8 -0.64949 0.08028 -0.51354
9 0.15280 -0.73283 -0.84907
|
def _all_escape(self):
""" u """
# Toggle all escaped URLs
self.unesc = not self.unesc
self.urls, self.urls_unesc = self.urls_unesc, self.urls
urls = iter(self.urls)
for item in self.items:
# Each Column has (Text, Button). Update the Button label
if isinstance(item, urwid.Columns):
item[1].set_label(shorten_url(next(urls),
self.size[0],
self.shorten))
|
u
|
def usable_ids(cls, id, accept_multi=True):
""" Retrieve id from input which can be an id or a cn."""
try:
qry_id = [int(id)]
except ValueError:
try:
qry_id = cls.from_cn(id)
except Exception:
qry_id = None
if not qry_id or not accept_multi and len(qry_id) != 1:
msg = 'unknown identifier %s' % id
cls.error(msg)
return qry_id if accept_multi else qry_id[0]
|
Retrieve id from input which can be an id or a cn.
|
def installed(name,
pkgs=None,
dir=None,
user=None,
force_reinstall=False,
registry=None,
env=None):
'''
Verify that the given package is installed and is at the correct version
(if specified).
.. code-block:: yaml
coffee-script:
npm.installed:
- user: someuser
coffee-script@1.0.1:
npm.installed: []
name
The package to install
.. versionchanged:: 2014.7.2
This parameter is no longer lowercased by salt so that
case-sensitive NPM package names will work.
pkgs
A list of packages to install with a single npm invocation; specifying
this argument will ignore the ``name`` argument
.. versionadded:: 2014.7.0
dir
The target directory in which to install the package, or None for
global installation
user
The user to run NPM with
.. versionadded:: 0.17.0
registry
The NPM registry from which to install the package
.. versionadded:: 2014.7.0
env
A list of environment variables to be set prior to execution. The
format is the same as the :py:func:`cmd.run <salt.states.cmd.run>`.
state function.
.. versionadded:: 2014.7.0
force_reinstall
Install the package even if it is already installed
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
pkg_list = pkgs if pkgs else [name]
try:
installed_pkgs = __salt__['npm.list'](dir=dir, runas=user, env=env, depth=0)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = False
ret['comment'] = 'Error looking up \'{0}\': {1}'.format(name, err)
return ret
else:
installed_pkgs = dict((p, info)
for p, info in six.iteritems(installed_pkgs))
pkgs_satisfied = []
pkgs_to_install = []
def _pkg_is_installed(pkg, installed_pkgs):
'''
Helper function to determine if a package is installed
This performs more complex comparison than just checking
keys, such as examining source repos to see if the package
was installed by a different name from the same repo
:pkg str: The package to compare
:installed_pkgs: A dictionary produced by npm list --json
'''
if (pkg_name in installed_pkgs and
'version' in installed_pkgs[pkg_name]):
return True
# Check to see if we are trying to install from a URI
elif '://' in pkg_name: # TODO Better way?
for pkg_details in installed_pkgs.values():
try:
pkg_from = pkg_details.get('from', '').split('://')[1]
# Catch condition where we may have specified package as
# git://github.com/foo/bar but packager describes it as
# git://github.com/foo/bar.git in the package
if not pkg_from.endswith('.git') and pkg_name.startswith('git://'):
pkg_from += '.git'
if pkg_name.split('://')[1] == pkg_from:
return True
except IndexError:
pass
return False
for pkg in pkg_list:
# Valid:
#
# @google-cloud/bigquery@^0.9.6
# @foobar
# buffer-equal-constant-time@1.0.1
# coffee-script
matches = re.search(r'^(@?[^@\s]+)(?:@(\S+))?', pkg)
pkg_name, pkg_ver = matches.group(1), matches.group(2) or None
if force_reinstall is True:
pkgs_to_install.append(pkg)
continue
if not _pkg_is_installed(pkg, installed_pkgs):
pkgs_to_install.append(pkg)
continue
installed_name_ver = '{0}@{1}'.format(pkg_name,
installed_pkgs[pkg_name]['version'])
# If given an explicit version check the installed version matches.
if pkg_ver:
if installed_pkgs[pkg_name].get('version') != pkg_ver:
pkgs_to_install.append(pkg)
else:
pkgs_satisfied.append(installed_name_ver)
continue
else:
pkgs_satisfied.append(installed_name_ver)
continue
if __opts__['test']:
ret['result'] = None
comment_msg = []
if pkgs_to_install:
comment_msg.append('NPM package(s) \'{0}\' are set to be installed'
.format(', '.join(pkgs_to_install)))
ret['changes'] = {'old': [], 'new': pkgs_to_install}
if pkgs_satisfied:
comment_msg.append('Package(s) \'{0}\' satisfied by {1}'
.format(', '.join(pkg_list), ', '.join(pkgs_satisfied)))
ret['result'] = True
ret['comment'] = '. '.join(comment_msg)
return ret
if not pkgs_to_install:
ret['result'] = True
ret['comment'] = ('Package(s) \'{0}\' satisfied by {1}'
.format(', '.join(pkg_list), ', '.join(pkgs_satisfied)))
return ret
try:
cmd_args = {
'dir': dir,
'runas': user,
'registry': registry,
'env': env,
'pkgs': pkg_list,
}
call = __salt__['npm.install'](**cmd_args)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = False
ret['comment'] = 'Error installing \'{0}\': {1}'.format(
', '.join(pkg_list), err)
return ret
if call and (isinstance(call, list) or isinstance(call, dict)):
ret['result'] = True
ret['changes'] = {'old': [], 'new': pkgs_to_install}
ret['comment'] = 'Package(s) \'{0}\' successfully installed'.format(
', '.join(pkgs_to_install))
else:
ret['result'] = False
ret['comment'] = 'Could not install package(s) \'{0}\''.format(
', '.join(pkg_list))
return ret
|
Verify that the given package is installed and is at the correct version
(if specified).
.. code-block:: yaml
coffee-script:
npm.installed:
- user: someuser
coffee-script@1.0.1:
npm.installed: []
name
The package to install
.. versionchanged:: 2014.7.2
This parameter is no longer lowercased by salt so that
case-sensitive NPM package names will work.
pkgs
A list of packages to install with a single npm invocation; specifying
this argument will ignore the ``name`` argument
.. versionadded:: 2014.7.0
dir
The target directory in which to install the package, or None for
global installation
user
The user to run NPM with
.. versionadded:: 0.17.0
registry
The NPM registry from which to install the package
.. versionadded:: 2014.7.0
env
A list of environment variables to be set prior to execution. The
format is the same as the :py:func:`cmd.run <salt.states.cmd.run>`.
state function.
.. versionadded:: 2014.7.0
force_reinstall
Install the package even if it is already installed
|
def print_summary(self):
"""Print a summary of the form.
May help finding which fields need to be filled-in.
"""
for input in self.form.find_all(
("input", "textarea", "select", "button")):
input_copy = copy.copy(input)
# Text between the opening tag and the closing tag often
# contains a lot of spaces that we don't want here.
for subtag in input_copy.find_all() + [input_copy]:
if subtag.string:
subtag.string = subtag.string.strip()
print(input_copy)
|
Print a summary of the form.
May help finding which fields need to be filled-in.
|
def preferred_format(incomplete_format, preferred_formats):
"""Return the preferred format for the given extension"""
incomplete_format = long_form_one_format(incomplete_format)
if 'format_name' in incomplete_format:
return incomplete_format
for fmt in long_form_multiple_formats(preferred_formats):
if ((incomplete_format['extension'] == fmt['extension'] or (
fmt['extension'] == '.auto' and
incomplete_format['extension'] not in ['.md', '.Rmd', '.ipynb'])) and
incomplete_format.get('suffix') == fmt.get('suffix', incomplete_format.get('suffix')) and
incomplete_format.get('prefix') == fmt.get('prefix', incomplete_format.get('prefix'))):
fmt.update(incomplete_format)
return fmt
return incomplete_format
|
Return the preferred format for the given extension
|
def extract_fields(lines, delim, searches, match_lineno=1, **kwargs):
"""Return generator of fields matching `searches`.
Parameters
----------
lines : iterable
Provides line number (1-based) and line (str)
delim : str
Delimiter to split line by to produce fields
searches : iterable
Returns search (str) to match against line fields.
match_lineno : int
Line number of line to split and search fields
Remaining keyword arguments are passed to `match_fields`.
"""
keep_idx = []
for lineno, line in lines:
if lineno < match_lineno or delim not in line:
if lineno == match_lineno:
raise WcutError('Delimter not found in line {}'.format(
match_lineno))
yield [line]
continue
fields = line.split(delim)
if lineno == match_lineno:
keep_idx = list(match_fields(fields, searches, **kwargs))
keep_fields = [fields[i] for i in keep_idx]
if keep_fields:
yield keep_fields
|
Return generator of fields matching `searches`.
Parameters
----------
lines : iterable
Provides line number (1-based) and line (str)
delim : str
Delimiter to split line by to produce fields
searches : iterable
Returns search (str) to match against line fields.
match_lineno : int
Line number of line to split and search fields
Remaining keyword arguments are passed to `match_fields`.
|
def completeness(self, catalogue, config, saveplot=False, filetype='png',
timeout=120):
'''
:param catalogue:
Earthquake catalogue as instance of
:class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param dict config:
Configuration parameters of the algorithm, containing the
following information:
'magnitude_bin' Size of magnitude bin (non-negative float)
'time_bin' Size (in dec. years) of the time window (non-negative
float)
'increment_lock' Boolean to indicate whether to ensure
completeness magnitudes always decrease with more
recent bins
:returns:
2-column table indicating year of completeness and corresponding
magnitude numpy.ndarray
'''
if saveplot and not isinstance(saveplot, str):
raise ValueError('To save the figures enter a filename: ')
# Get magntitude bins
magnitude_bins = self._get_magnitudes_from_spacing(
catalogue.data['magnitude'],
config['magnitude_bin'])
dec_time = catalogue.get_decimal_time()
completeness_table = np.zeros([len(magnitude_bins) - 1, 2],
dtype=float)
min_year = float(np.min(catalogue.data['year']))
max_year = float(np.max(catalogue.data['year'])) + 1.0
has_completeness = np.zeros(len(magnitude_bins) - 1, dtype=bool)
for iloc in range(0, len(magnitude_bins) - 1):
lower_mag = magnitude_bins[iloc]
upper_mag = magnitude_bins[iloc + 1]
idx = np.logical_and(catalogue.data['magnitude'] >= lower_mag,
catalogue.data['magnitude'] < upper_mag)
cumvals = np.cumsum(np.ones(np.sum(idx)))
plt.plot(dec_time[idx], cumvals, '.')
plt.xlim(min_year, max_year + 5)
title_string = 'Magnitude %5.2f to %5.2f' % (lower_mag, upper_mag)
plt.title(title_string)
pts = pylab.ginput(1, timeout=timeout)[0]
if pts[0] <= max_year:
# Magnitude bin has no completeness!
has_completeness[iloc] = True
completeness_table[iloc, 0] = np.floor(pts[0])
completeness_table[iloc, 1] = magnitude_bins[iloc]
print(completeness_table[iloc, :], has_completeness[iloc])
if config['increment_lock'] and (iloc > 0) and \
(completeness_table[iloc, 0] > completeness_table[iloc - 1, 0]):
completeness_table[iloc, 0] = \
completeness_table[iloc - 1, 0]
# Add marker line to indicate completeness point
marker_line = np.array([
[0., completeness_table[iloc, 0]],
[cumvals[-1], completeness_table[iloc, 0]]])
plt.plot(marker_line[:, 0], marker_line[:, 1], 'r-')
if saveplot:
filename = saveplot + '_' + ('%5.2f' % lower_mag) + (
'%5.2f' % upper_mag) + '.' + filetype
plt.savefig(filename, format=filetype)
plt.close()
return completeness_table[has_completeness, :]
|
:param catalogue:
Earthquake catalogue as instance of
:class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param dict config:
Configuration parameters of the algorithm, containing the
following information:
'magnitude_bin' Size of magnitude bin (non-negative float)
'time_bin' Size (in dec. years) of the time window (non-negative
float)
'increment_lock' Boolean to indicate whether to ensure
completeness magnitudes always decrease with more
recent bins
:returns:
2-column table indicating year of completeness and corresponding
magnitude numpy.ndarray
|
def get_local_annotations(
cls, target, exclude=None, ctx=None, select=lambda *p: True
):
"""Get a list of local target annotations in the order of their
definition.
:param type cls: type of annotation to get from target.
:param target: target from where get annotations.
:param tuple/type exclude: annotation types to exclude from selection.
:param ctx: target ctx.
:param select: selection function which takes in parameters a target,
a ctx and an annotation and returns True if the annotation has to
be selected. True by default.
:return: target local annotations.
:rtype: list
"""
result = []
# initialize exclude
exclude = () if exclude is None else exclude
try:
# get local annotations
local_annotations = get_local_property(
target, Annotation.__ANNOTATIONS_KEY__, result, ctx=ctx
)
if not local_annotations:
if ismethod(target):
func = get_method_function(target)
local_annotations = get_local_property(
func, Annotation.__ANNOTATIONS_KEY__,
result, ctx=ctx
)
if not local_annotations:
local_annotations = get_local_property(
func, Annotation.__ANNOTATIONS_KEY__,
result
)
elif isfunction(target):
local_annotations = get_local_property(
target, Annotation.__ANNOTATIONS_KEY__,
result
)
except TypeError:
raise TypeError('target {0} must be hashable'.format(target))
for local_annotation in local_annotations:
# check if local annotation inherits from cls
inherited = isinstance(local_annotation, cls)
# and if not excluded
not_excluded = not isinstance(local_annotation, exclude)
# and if selected
selected = select(target, ctx, local_annotation)
# if three conditions, add local annotation to the result
if inherited and not_excluded and selected:
result.append(local_annotation)
return result
|
Get a list of local target annotations in the order of their
definition.
:param type cls: type of annotation to get from target.
:param target: target from where get annotations.
:param tuple/type exclude: annotation types to exclude from selection.
:param ctx: target ctx.
:param select: selection function which takes in parameters a target,
a ctx and an annotation and returns True if the annotation has to
be selected. True by default.
:return: target local annotations.
:rtype: list
|
def _split_tidy(self, string, maxsplit=None):
"""Rstrips string for \n and splits string for \t"""
if maxsplit is None:
return string.rstrip("\n").split("\t")
else:
return string.rstrip("\n").split("\t", maxsplit)
|
Rstrips string for \n and splits string for \t
|
def ae_latent_softmax(latents_pred, latents_discrete, hparams):
"""Latent prediction and loss."""
vocab_size = 2 ** hparams.z_size
if hparams.num_decode_blocks < 2:
latents_logits = tf.layers.dense(latents_pred, vocab_size,
name="extra_logits")
if hparams.logit_normalization:
latents_logits *= tf.rsqrt(1e-8 +
tf.reduce_mean(tf.square(latents_logits)))
loss = None
if latents_discrete is not None:
if hparams.soft_em:
# latents_discrete is actually one-hot of multinomial samples
assert hparams.num_decode_blocks == 1
loss = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=latents_discrete, logits=latents_logits)
else:
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=latents_discrete, logits=latents_logits)
sample = multinomial_sample(
latents_logits, vocab_size, hparams.sampling_temp)
return sample, loss
# Multi-block case.
vocab_bits = int(math.log(vocab_size, 2))
assert vocab_size == 2**vocab_bits
assert vocab_bits % hparams.num_decode_blocks == 0
block_vocab_size = 2**(vocab_bits // hparams.num_decode_blocks)
latents_logits = [
tf.layers.dense(
latents_pred, block_vocab_size, name="extra_logits_%d" % i)
for i in range(hparams.num_decode_blocks)
]
loss = None
if latents_discrete is not None:
losses = []
for i in range(hparams.num_decode_blocks):
d = tf.floormod(tf.floordiv(latents_discrete,
block_vocab_size**i), block_vocab_size)
losses.append(tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=d, logits=latents_logits[i]))
loss = sum(losses)
samples = [multinomial_sample(l, block_vocab_size, hparams.sampling_temp)
for l in latents_logits]
sample = sum([s * block_vocab_size**i for i, s in enumerate(samples)])
return sample, loss
|
Latent prediction and loss.
|
def get_all_groups(region=None, key=None, keyid=None, profile=None):
'''
Return all AutoScale Groups visible in the account
(as a list of boto.ec2.autoscale.group.AutoScalingGroup).
.. versionadded:: 2016.11.0
CLI example:
.. code-block:: bash
salt-call boto_asg.get_all_groups region=us-east-1 --output yaml
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
next_token = ''
asgs = []
while next_token is not None:
ret = conn.get_all_groups(next_token=next_token)
asgs += [a for a in ret]
next_token = ret.next_token
return asgs
except boto.exception.BotoServerError as e:
if retries and e.code == 'Throttling':
log.debug('Throttled by AWS API, retrying in 5 seconds...')
time.sleep(5)
retries -= 1
continue
log.error(e)
return []
|
Return all AutoScale Groups visible in the account
(as a list of boto.ec2.autoscale.group.AutoScalingGroup).
.. versionadded:: 2016.11.0
CLI example:
.. code-block:: bash
salt-call boto_asg.get_all_groups region=us-east-1 --output yaml
|
def hessian(self, x, y, grid_interp_x=None, grid_interp_y=None, f_=None, f_x=None, f_y=None, f_xx=None, f_yy=None, f_xy=None):
"""
returns Hessian matrix of function d^2f/dx^2, d^f/dy^2, d^2/dxdy
"""
#self._check_interp(grid_interp_x, grid_interp_y, f_, f_x, f_y, f_xx, f_yy, f_xy)
n = len(np.atleast_1d(x))
if n <= 1 and np.shape(x) == ():
#if type(x) == float or type(x) == int or type(x) == type(np.float64(1)) or len(x) <= 1:
f_xx_out = self.f_xx_interp(x, y, grid_interp_x, grid_interp_y, f_xx)
f_yy_out = self.f_yy_interp(x, y, grid_interp_x, grid_interp_y, f_yy)
f_xy_out = self.f_xy_interp(x, y, grid_interp_x, grid_interp_y, f_xy)
return f_xx_out[0][0], f_yy_out[0][0], f_xy_out[0][0]
else:
if self._grid and n >= self._min_grid_number:
x_, y_ = util.get_axes(x, y)
f_xx_out = self.f_xx_interp(x_, y_, grid_interp_x, grid_interp_y, f_xx)
f_yy_out = self.f_yy_interp(x_, y_, grid_interp_x, grid_interp_y, f_yy)
f_xy_out = self.f_xy_interp(x_, y_, grid_interp_x, grid_interp_y, f_xy)
f_xx_out = util.image2array(f_xx_out)
f_yy_out = util.image2array(f_yy_out)
f_xy_out = util.image2array(f_xy_out)
else:
#n = len(x)
f_xx_out, f_yy_out, f_xy_out = np.zeros(n), np.zeros(n), np.zeros(n)
for i in range(n):
f_xx_out[i] = self.f_xx_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_xx)
f_yy_out[i] = self.f_yy_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_yy)
f_xy_out[i] = self.f_xy_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_xy)
return f_xx_out, f_yy_out, f_xy_out
|
returns Hessian matrix of function d^2f/dx^2, d^f/dy^2, d^2/dxdy
|
def add_interface_to_router(self, segment_id,
router_name, gip, router_ip, mask, server):
"""Adds an interface to existing HW router on Arista HW device.
:param segment_id: VLAN Id associated with interface that is added
:param router_name: globally unique identifier for router/VRF
:param gip: Gateway IP associated with the subnet
:param router_ip: IP address of the router
:param mask: subnet mask to be used
:param server: Server endpoint on the Arista switch to be configured
"""
if not segment_id:
segment_id = DEFAULT_VLAN
cmds = []
for c in self._interfaceDict['add']:
if self._mlag_configured:
# In VARP config, use router ID else, use gateway IP address.
ip = router_ip
else:
ip = gip + '/' + mask
cmds.append(c.format(segment_id, router_name, ip))
if self._mlag_configured:
for c in self._additionalInterfaceCmdsDict['add']:
cmds.append(c.format(gip))
self._run_config_cmds(cmds, server)
|
Adds an interface to existing HW router on Arista HW device.
:param segment_id: VLAN Id associated with interface that is added
:param router_name: globally unique identifier for router/VRF
:param gip: Gateway IP associated with the subnet
:param router_ip: IP address of the router
:param mask: subnet mask to be used
:param server: Server endpoint on the Arista switch to be configured
|
def issues(self):
"""
Returns a list of dicts representing issues from a remote service.
"""
for board in self.get_boards():
for lst in self.get_lists(board['id']):
listextra = dict(boardname=board['name'], listname=lst['name'])
for card in self.get_cards(lst['id']):
issue = self.get_issue_for_record(card, extra=listextra)
issue.update_extra({"annotations": self.annotations(card)})
yield issue
|
Returns a list of dicts representing issues from a remote service.
|
def get_analysis_question(hazard, exposure):
"""Construct analysis question based on hazard and exposure.
:param hazard: A hazard definition.
:type hazard: dict
:param exposure: An exposure definition.
:type exposure: dict
:returns: Analysis question based on reporting standards.
:rtype: str
"""
# First we look for a translated hardcoded question.
question = specific_analysis_question(hazard, exposure)
if question:
return question
if hazard == hazard_generic:
# Secondly, if the hazard is generic, we don't need the hazard.
question = tr(
'In each of the hazard zones {exposure_measure} {exposure_name} '
'might be affected?').format(
exposure_measure=exposure['measure_question'],
exposure_name=exposure['name'])
return question
# Then, we fallback on a generated string on the fly.
question = tr(
'In the event of a {hazard_name}, {exposure_measure} {exposure_name} '
'might be affected?').format(
hazard_name=hazard['name'],
exposure_measure=exposure['measure_question'],
exposure_name=exposure['name'])
return question
|
Construct analysis question based on hazard and exposure.
:param hazard: A hazard definition.
:type hazard: dict
:param exposure: An exposure definition.
:type exposure: dict
:returns: Analysis question based on reporting standards.
:rtype: str
|
def increment(self, key, value=1):
"""
Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool
"""
data, time_ = self._get_payload(key)
integer = int(data) + value
self.put(key, integer, int(time_))
return integer
|
Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool
|
def update(self, response):
"""Update session information from device response.
Increment sequence number when starting stream, not when playing.
If device requires authentication resend previous message with auth.
"""
data = response.splitlines()
_LOGGER.debug('Received data %s from %s', data, self.host)
while data:
line = data.pop(0)
if 'RTSP/1.0' in line:
self.rtsp_version = int(line.split(' ')[0][5])
self.status_code = int(line.split(' ')[1])
self.status_text = line.split(' ')[2]
elif 'CSeq' in line:
self.sequence_ack = int(line.split(': ')[1])
elif 'Date' in line:
self.date = line.split(': ')[1]
elif 'Public' in line:
self.methods_ack = line.split(': ')[1].split(', ')
elif "WWW-Authenticate: Basic" in line:
self.basic = True
self.realm = line.split('"')[1]
elif "WWW-Authenticate: Digest" in line:
self.digest = True
self.realm = line.split('"')[1]
self.nonce = line.split('"')[3]
self.stale = (line.split('stale=')[1] == 'TRUE')
elif 'Content-Type' in line:
self.content_type = line.split(': ')[1]
elif 'Content-Base' in line:
self.content_base = line.split(': ')[1]
elif 'Content-Length' in line:
self.content_length = int(line.split(': ')[1])
elif 'Session' in line:
self.session_id = line.split(': ')[1].split(";")[0]
if '=' in line:
self.session_timeout = int(line.split(': ')[1].split('=')[1])
elif 'Transport' in line:
self.transport_ack = line.split(': ')[1]
elif 'Range' in line:
self.range = line.split(': ')[1]
elif 'RTP-Info' in line:
self.rtp_info = line.split(': ')[1]
elif not line:
if data:
self.sdp = data
break
if self.sdp:
stream_found = False
for param in self.sdp:
if not stream_found and 'm=application' in param:
stream_found = True
elif stream_found and 'a=control:rtsp' in param:
self.control_url = param.split(':', 1)[1]
break
if self.status_code == 200:
if self.state == STATE_STARTING:
self.sequence += 1
elif self.status_code == 401:
# Device requires authorization, do not increment to next method
pass
else:
# If device configuration is correct we should never get here
_LOGGER.debug(
"%s RTSP %s %s", self.host, self.status_code, self.status_text)
|
Update session information from device response.
Increment sequence number when starting stream, not when playing.
If device requires authentication resend previous message with auth.
|
def from_string(string, _or=''):
""" Parse a given string and turn it into an input token. """
if _or:
and_or = 'or'
else:
and_or = ''
return Input(string, and_or=and_or)
|
Parse a given string and turn it into an input token.
|
def eval(self, expression):
"""Evaluate `expression` in MATLAB engine.
Parameters
----------
expression : str
Expression is passed to MATLAB engine and evaluated.
"""
expression_wrapped = wrap_script.format(expression)
### Evaluate the expression
self._libeng.engEvalString(self._ep, expression_wrapped)
### Check for exceptions in MATLAB
mxresult = self._libeng.engGetVariable(self._ep, 'ERRSTR__')
error_string = self._libmx.mxArrayToString(mxresult)
self._libmx.mxDestroyArray(mxresult)
if error_string != "":
raise RuntimeError("Error from MATLAB\n{0}".format(error_string))
|
Evaluate `expression` in MATLAB engine.
Parameters
----------
expression : str
Expression is passed to MATLAB engine and evaluated.
|
def uninstall_pgpm_from_db(self):
"""
Removes pgpm from db and all related metadata (_pgpm schema). Install packages are left as they are
:return: 0 if successful and error otherwise
"""
drop_schema_cascade_script = 'DROP SCHEMA {schema_name} CASCADE;'
if self._conn.closed:
self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection)
cur = self._conn.cursor()
# get current user
cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.current_user_sql)
current_user = cur.fetchone()[0]
# check if current user is a super user
cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.is_superuser_sql)
is_cur_superuser = cur.fetchone()[0]
if not is_cur_superuser:
self._logger.debug('User {0} is not a superuser. Only superuser can remove pgpm'
.format(current_user))
sys.exit(1)
self._logger.debug('Removing pgpm from DB by dropping schema {0}'.format(self._pgpm_schema_name))
cur.execute(drop_schema_cascade_script.format(schema_name=self._pgpm_schema_name))
# Commit transaction
self._conn.commit()
self._conn.close()
return 0
|
Removes pgpm from db and all related metadata (_pgpm schema). Install packages are left as they are
:return: 0 if successful and error otherwise
|
def binary_gas_search(state: BaseState, transaction: BaseTransaction, tolerance: int=1) -> int:
"""
Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete
"""
if not hasattr(transaction, 'sender'):
raise TypeError(
"Transaction is missing attribute sender.",
"If sending an unsigned transaction, use SpoofTransaction and provide the",
"sender using the 'from' parameter")
minimum_transaction = SpoofTransaction(
transaction,
gas=transaction.intrinsic_gas,
gas_price=0,
)
if _get_computation_error(state, minimum_transaction) is None:
return transaction.intrinsic_gas
maximum_transaction = SpoofTransaction(
transaction,
gas=state.gas_limit,
gas_price=0,
)
error = _get_computation_error(state, maximum_transaction)
if error is not None:
raise error
minimum_viable = state.gas_limit
maximum_out_of_gas = transaction.intrinsic_gas
while minimum_viable - maximum_out_of_gas > tolerance:
midpoint = (minimum_viable + maximum_out_of_gas) // 2
test_transaction = SpoofTransaction(transaction, gas=midpoint)
if _get_computation_error(state, test_transaction) is None:
minimum_viable = midpoint
else:
maximum_out_of_gas = midpoint
return minimum_viable
|
Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete
|
def product_data_request(self):
"""Request product data from a device.
Not supported by all devices.
Required after 01-Feb-2007.
"""
msg = StandardSend(self._address,
COMMAND_PRODUCT_DATA_REQUEST_0X03_0X00)
self._send_msg(msg)
|
Request product data from a device.
Not supported by all devices.
Required after 01-Feb-2007.
|
def chunks(seq, chunk_size):
# type: (Sequence[T], int) -> Iterable[Sequence[T]]
""" Split seq into chunk_size-sized chunks.
:param seq: A sequence to chunk.
:param chunk_size: The size of chunk.
"""
return (seq[i:i + chunk_size] for i in range(0, len(seq), chunk_size))
|
Split seq into chunk_size-sized chunks.
:param seq: A sequence to chunk.
:param chunk_size: The size of chunk.
|
def command(execute=None): # noqa: E501
"""Execute a Command
Execute a command # noqa: E501
:param execute: The data needed to execute this command
:type execute: dict | bytes
:rtype: Response
"""
if connexion.request.is_json:
execute = Execute.from_dict(connexion.request.get_json()) # noqa: E501
if(not hasAccess()):
return redirectUnauthorized()
try:
connector = None
parameters = {}
if (execute.command.parameters):
parameters = execute.command.parameters
credentials = Credentials()
options = Options(debug=execute.command.options['debug'], sensitive=execute.command.options['sensitive'])
if (execute.auth):
credentials = mapUserAuthToCredentials(execute.auth, credentials)
if (not execute.auth.api_token):
options.sensitive = True
connector = Connector(options=options, credentials=credentials, command=execute.command.command,
parameters=parameters)
commandHandler = connector.execute()
response = Response(status=commandHandler.getRequest().getResponseStatusCode(),
body=json.loads(commandHandler.getRequest().getResponseBody()))
if (execute.command.options['debug']):
response.log = connector.logBuffer
return response
except:
State.log.error(traceback.format_exc())
if ('debug' in execute.command.options and execute.command.options['debug']):
return ErrorResponse(status=500,
message="Uncaught exception occured during processing. To get a larger stack trace, visit the logs.",
state=traceback.format_exc(3))
else:
return ErrorResponse(status=500, message="")
|
Execute a Command
Execute a command # noqa: E501
:param execute: The data needed to execute this command
:type execute: dict | bytes
:rtype: Response
|
def get_pickled_ontology(filename):
""" try to retrieve a cached ontology """
pickledfile = os.path.join(ONTOSPY_LOCAL_CACHE, filename + ".pickle")
# pickledfile = ONTOSPY_LOCAL_CACHE + "/" + filename + ".pickle"
if GLOBAL_DISABLE_CACHE:
printDebug(
"WARNING: DEMO MODE cache has been disabled in __init__.py ==============",
"red")
if os.path.isfile(pickledfile) and not GLOBAL_DISABLE_CACHE:
try:
return cPickle.load(open(pickledfile, "rb"))
except:
print(Style.DIM +
"** WARNING: Cache is out of date ** ...recreating it... " +
Style.RESET_ALL)
return None
else:
return None
|
try to retrieve a cached ontology
|
def health_node(consul_url=None, token=None, node=None, **kwargs):
'''
Health information about the registered node.
:param consul_url: The Consul server URL.
:param node: The node to request health information about.
:param dc: By default, the datacenter of the agent is queried;
however, the dc can be provided using the "dc" parameter.
:return: Health information about the requested node.
CLI Example:
.. code-block:: bash
salt '*' consul.health_node node='node1'
'''
ret = {}
query_params = {}
if not consul_url:
consul_url = _get_config()
if not consul_url:
log.error('No Consul URL found.')
ret['message'] = 'No Consul URL found.'
ret['res'] = False
return ret
if not node:
raise SaltInvocationError('Required argument "node" is missing.')
if 'dc' in kwargs:
query_params['dc'] = kwargs['dc']
function = 'health/node/{0}'.format(node)
ret = _query(consul_url=consul_url,
function=function,
token=token,
query_params=query_params)
return ret
|
Health information about the registered node.
:param consul_url: The Consul server URL.
:param node: The node to request health information about.
:param dc: By default, the datacenter of the agent is queried;
however, the dc can be provided using the "dc" parameter.
:return: Health information about the requested node.
CLI Example:
.. code-block:: bash
salt '*' consul.health_node node='node1'
|
def _extend_support_with_default_value(self, x, f, default_value):
"""Returns `f(x)` if x is in the support, and `default_value` otherwise.
Given `f` which is defined on the support of this distribution
(`x >= loc`), extend the function definition to the real line
by defining `f(x) = default_value` for `x < loc`.
Args:
x: Floating-point `Tensor` to evaluate `f` at.
f: Callable that takes in a `Tensor` and returns a `Tensor`. This
represents the function whose domain of definition we want to extend.
default_value: Python or numpy literal representing the value to use for
extending the domain.
Returns:
`Tensor` representing an extension of `f(x)`.
"""
with tf.name_scope("extend_support_with_default_value"):
x = tf.convert_to_tensor(value=x, dtype=self.dtype, name="x")
loc = self.loc + tf.zeros_like(self.scale) + tf.zeros_like(x)
x = x + tf.zeros_like(loc)
# Substitute out-of-support values in x with values that are in the
# support of the distribution before applying f.
y = f(tf.where(x < loc, self._inv_z(0.5) + tf.zeros_like(x), x))
if default_value == 0.:
default_value = tf.zeros_like(y)
elif default_value == 1.:
default_value = tf.ones_like(y)
else:
default_value = tf.fill(
dims=tf.shape(input=y),
value=dtype_util.as_numpy_dtype(self.dtype)(default_value))
return tf.where(x < loc, default_value, y)
|
Returns `f(x)` if x is in the support, and `default_value` otherwise.
Given `f` which is defined on the support of this distribution
(`x >= loc`), extend the function definition to the real line
by defining `f(x) = default_value` for `x < loc`.
Args:
x: Floating-point `Tensor` to evaluate `f` at.
f: Callable that takes in a `Tensor` and returns a `Tensor`. This
represents the function whose domain of definition we want to extend.
default_value: Python or numpy literal representing the value to use for
extending the domain.
Returns:
`Tensor` representing an extension of `f(x)`.
|
def fetch_entity_cls_from_registry(entity):
"""Util Method to fetch an Entity class from an entity's name"""
# Defensive check to ensure we only process if `to_cls` is a string
if isinstance(entity, str):
try:
return repo_factory.get_entity(entity)
except AssertionError:
# Entity has not been registered (yet)
# FIXME print a helpful debug message
raise
else:
return entity
|
Util Method to fetch an Entity class from an entity's name
|
def add_object(self, start, obj, object_size):
"""
Add/Store an object to this region at the given offset.
:param start:
:param obj:
:param int object_size: Size of the object
:return:
"""
self._store(start, obj, object_size, overwrite=False)
|
Add/Store an object to this region at the given offset.
:param start:
:param obj:
:param int object_size: Size of the object
:return:
|
def text_input(self, window, allow_resize=False):
"""
Transform a window into a text box that will accept user input and loop
until an escape sequence is entered.
If the escape key (27) is pressed, cancel the textbox and return None.
Otherwise, the textbox will wait until it is full (^j, or a new line is
entered on the bottom line) or the BEL key (^g) is pressed.
"""
window.clear()
# Set cursor mode to 1 because 2 doesn't display on some terminals
self.curs_set(1)
# Keep insert_mode off to avoid the recursion error described here
# http://bugs.python.org/issue13051
textbox = textpad.Textbox(window)
textbox.stripspaces = 0
def validate(ch):
"Filters characters for special key sequences"
if ch == self.ESCAPE:
raise exceptions.EscapeInterrupt()
if (not allow_resize) and (ch == curses.KEY_RESIZE):
raise exceptions.EscapeInterrupt()
# Fix backspace for iterm
if ch == curses.ascii.DEL:
ch = curses.KEY_BACKSPACE
return ch
# Wrapping in an exception block so that we can distinguish when the
# user hits the return character from when the user tries to back out
# of the input.
try:
out = textbox.edit(validate=validate)
if isinstance(out, six.binary_type):
out = out.decode('utf-8')
except exceptions.EscapeInterrupt:
out = None
self.curs_set(0)
return self.strip_textpad(out)
|
Transform a window into a text box that will accept user input and loop
until an escape sequence is entered.
If the escape key (27) is pressed, cancel the textbox and return None.
Otherwise, the textbox will wait until it is full (^j, or a new line is
entered on the bottom line) or the BEL key (^g) is pressed.
|
def _merge(self, value):
""" Returns a list based on `value`:
* missing required value is converted to an empty list;
* missing required items are never created;
* nested items are merged recursively.
"""
if not value:
return []
if value is not None and not isinstance(value, list):
# bogus value; will not pass validation but should be preserved
return value
item_spec = self._nested_validator
return [x if x is None else item_spec.get_default_for(x) for x in value]
|
Returns a list based on `value`:
* missing required value is converted to an empty list;
* missing required items are never created;
* nested items are merged recursively.
|
def getRegionsByType(self, regionClass):
"""
Gets all region instances of a given class
(for example, nupic.regions.sp_region.SPRegion).
"""
regions = []
for region in self.regions.values():
if type(region.getSelf()) is regionClass:
regions.append(region)
return regions
|
Gets all region instances of a given class
(for example, nupic.regions.sp_region.SPRegion).
|
def get(self, name, default=None):
'''
Retrieves the object with "name", like with SessionManager.get(), but
removes the object from the database after retrieval, so that it can be
retrieved only once
'''
session_object = super(NotificationManager, self).get(name, default)
if session_object is not None:
self.delete(name)
return session_object
|
Retrieves the object with "name", like with SessionManager.get(), but
removes the object from the database after retrieval, so that it can be
retrieved only once
|
def tag_name(cls, tag):
"""return the name of the tag, with the namespace removed"""
while isinstance(tag, etree._Element):
tag = tag.tag
return tag.split('}')[-1]
|
return the name of the tag, with the namespace removed
|
def get_levenshtein(first, second):
"""\
Get the Levenshtein distance between two strings.
:param first: the first string
:param second: the second string
"""
if not first:
return len(second)
if not second:
return len(first)
prev_distances = range(0, len(second) + 1)
curr_distances = None
# Find the minimum edit distance between each substring of 'first' and
# the entirety of 'second'. The first column of each distance list is
# the distance from the current string to the empty string.
for first_idx, first_char in enumerate(first, start=1):
# Keep only the previous and current rows of the
# lookup table in memory
curr_distances = [first_idx]
for second_idx, second_char in enumerate(second, start=1):
# Take the max of the neighbors
compare = [
prev_distances[second_idx - 1],
prev_distances[second_idx],
curr_distances[second_idx - 1],
]
distance = min(*compare)
if first_char != second_char:
distance += 1
curr_distances.append(distance)
prev_distances = curr_distances
return curr_distances[-1]
|
\
Get the Levenshtein distance between two strings.
:param first: the first string
:param second: the second string
|
def log(arg, base=None):
"""
Perform the logarithm using a specified base
Parameters
----------
base : number, default None
If None, base e is used
Returns
-------
logarithm : double type
"""
op = ops.Log(arg, base)
return op.to_expr()
|
Perform the logarithm using a specified base
Parameters
----------
base : number, default None
If None, base e is used
Returns
-------
logarithm : double type
|
def _complete_path(path=None):
"""Perform completion of filesystem path.
https://stackoverflow.com/questions/5637124/tab-completion-in-pythons-raw-input
"""
if not path:
return _listdir('.')
dirname, rest = os.path.split(path)
tmp = dirname if dirname else '.'
res = [p for p in _listdir(tmp) if p.startswith(rest)]
# more than one match, or single match which does not exist (typo)
if len(res) > 1 or not os.path.exists(path):
return res
# resolved to a single directory, so return list of files below it
if os.path.isdir(path):
return [p for p in _listdir(path)]
# exact file match terminates this completion
return [path + ' ']
|
Perform completion of filesystem path.
https://stackoverflow.com/questions/5637124/tab-completion-in-pythons-raw-input
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.