code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def get_tools(self) -> list:
tools = "flake8,pylint,vulture,pyroma,isort,yapf,safety,dodgy,pytest,pypi".split(
",")
print("Available tools: {0}".format(",".join(tools)))
answer = ask_list("What tools would you like to use?",
["flake8", "pytest"])
if any(tool not in tools for tool in answer):
print("Invalid answer, retry.")
self.get_tools()
return answer | Lets the user enter the tools he want to use |
def createfork(self, project_id):
request = requests.post(
'{0}/fork/{1}'.format(self.projects_url, project_id),
timeout=self.timeout, verify=self.verify_ssl)
if request.status_code == 200:
return True
else:
return False | Forks a project into the user namespace of the authenticated user.
:param project_id: Project ID to fork
:return: True if succeed |
def notification_factory(code, subcode):
notification = BGPNotification(code, subcode)
if not notification.reason:
raise ValueError('Invalid code/sub-code.')
return notification | Returns a `Notification` message corresponding to given codes.
Parameters:
- `code`: (int) BGP error code
- `subcode`: (int) BGP error sub-code |
def ping_connection(dbapi_connection, connection_record, connection_proxy):
cursor = dbapi_connection.cursor()
try:
cursor.execute("SELECT 1")
except Exception:
raise sa.exc.DisconnectionError()
cursor.close() | Ensure connections are valid.
From: `http://docs.sqlalchemy.org/en/rel_0_8/core/pooling.html`
In case db has been restarted pool may return invalid connections. |
def observe(self, path, callback, timeout=None, **kwargs):
request = self.mk_request(defines.Codes.GET, path)
request.observe = 0
for k, v in kwargs.items():
if hasattr(request, k):
setattr(request, k, v)
return self.send_request(request, callback, timeout) | Perform a GET with observe on a certain path.
:param path: the path
:param callback: the callback function to invoke upon notifications
:param timeout: the timeout of the request
:return: the response to the observe request |
def close_database_session(session):
try:
session.close()
except OperationalError as e:
raise DatabaseError(error=e.orig.args[1], code=e.orig.args[0]) | Close connection with the database |
def parse_style_decl(style: str, owner: AbstractNode = None
) -> CSSStyleDeclaration:
_style = CSSStyleDeclaration(style, owner=owner)
return _style | Make CSSStyleDeclaration from style string.
:arg AbstractNode owner: Owner of the style. |
def writeFile(filename, data):
with open(filename, 'wb') as f:
f.write(data.encode('utf-8')) | Writes data to a file |
def pipfaster_download_cacher(index_urls):
from pip._internal import download
orig = download._download_http_url
patched_fn = get_patched_download_http_url(orig, index_urls)
return patched(vars(download), {'_download_http_url': patched_fn}) | vanilla pip stores a cache of the http session in its cache and not the
wheel files. We intercept the download and save those files into our
cache |
def send_stats(self, start, environ, response_interception, exception=None):
if response_interception:
key_name = self.get_key_name(environ, response_interception, exception=exception)
timer = self.statsd_client.timer(key_name)
timer._start_time = start
timer.stop() | Send the actual timing stats.
:param start: start time in seconds since the epoch as a floating point number
:type start: float
:param environ: wsgi environment
:type environ: dict
:param response_interception: dictionary in form
{'status': '<response status>', 'response_headers': [<response headers], 'exc_info': <exc_info>}
This is the interception of what was passed to start_response handler.
:type response_interception: dict
:param exception: optional exception happened during the iteration of the response
:type exception: Exception |
def status(name, runas=None):
return prlctl('status', salt.utils.data.decode(name), runas=runas) | Status of a VM
:param str name:
Name/ID of VM whose status will be returned
:param str runas:
The user that the prlctl command will be run as
Example:
.. code-block:: bash
salt '*' parallels.status macvm runas=macdev |
def inner(self, isolated=False):
if isolated:
return Frame(self.eval_ctx, level=self.symbols.level + 1)
return Frame(self.eval_ctx, self) | Return an inner frame. |
def put(self, event):
self.log("Configuration put request ",
event.user)
try:
component = model_factory(Schema).find_one({
'uuid': event.data['uuid']
})
component.update(event.data)
component.save()
response = {
'component': 'hfos.ui.configurator',
'action': 'put',
'data': True
}
self.log('Updated component configuration:',
component.name)
self.fireEvent(reload_configuration(component.name))
except (KeyError, ValueError, ValidationError, PermissionError) as e:
response = {
'component': 'hfos.ui.configurator',
'action': 'put',
'data': False
}
self.log('Storing component configuration failed: ',
type(e), e, exc=True, lvl=error)
self.fireEvent(send(event.client.uuid, response))
return | Store a given configuration |
def update_insight(self, project_key, insight_id, **kwargs):
request = self.__build_insight_obj(
lambda: _swagger.InsightPatchRequest(), kwargs)
project_owner, project_id = parse_dataset_key(project_key)
try:
self._insights_api.update_insight(project_owner,
project_id,
insight_id, body=request)
except _swagger.rest.ApiException as e:
raise RestApiError(cause=e) | Update an insight.
**Note that only elements included in the request will be updated. All
omitted elements will remain untouched.
:param project_key: Projrct identifier, in the form of
projectOwner/projectid
:type project_key: str
:param insight_id: Insight unique identifier.
:type insight_id: str
:param title: Insight title
:type title: str
:param description: Insight description.
:type description: str, optional
:param image_url: If image-based, the URL of the image
:type image_url: str
:param embed_url: If embed-based, the embeddable URL
:type embed_url: str
:param source_link: Permalink to source code or platform this insight
was generated with. Allows others to replicate the steps originally
used to produce the insight.
:type source_link: str, optional
:param data_source_links: One or more permalinks to the data sources
used to generate this insight. Allows others to access the data
originally used to produce the insight.
:type data_source_links: array
:returns: message object
:rtype: object
:raises RestApiException: If a server error occurs
Examples
--------
>>> import datadotworld as dw
>>> api_client = dw.api_client()
>>> api_client.update_insight(
... 'username/test-project', 'insightid'
... title='demo atadotworld'}) # doctest: +SKIP |
def _radix_int_handler_factory(radix_indicators, charset, parse_func):
def assertion(c, ctx):
return c in radix_indicators and \
((len(ctx.value) == 1 and ctx.value[0] == _ZERO) or
(len(ctx.value) == 2 and ctx.value[0] == _MINUS and ctx.value[1] == _ZERO)) and \
ctx.ion_type == IonType.INT
return _numeric_handler_factory(charset, lambda prev, c, ctx, trans: _illegal_character(c, ctx),
assertion, radix_indicators, parse_func, illegal_at_end=radix_indicators) | Generates a handler co-routine which tokenizes a integer of a particular radix.
Args:
radix_indicators (sequence): The set of ordinals of characters that indicate the radix of this int.
charset (sequence): Set of ordinals of legal characters for this radix.
parse_func (callable): Called upon ending the numeric value. Accepts the current token value and returns a
thunk that lazily parses the token. |
async def log_transaction(self, **params):
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
coinid = params.get("coinid")
if not coinid in ["QTUM", "PUT"]:
return {"error":400, "reason": "Missed or invalid coinid"}
database = client[settings.TXS]
source_collection = database[coinid]
await source_collection.find_one_and_update({"txid":params.get("txid")},{"$set":{
"blocknumber":params.get("blocknumber"),
"blockhash":params.get("blockhash"),
"gasLimit":params.get("gasLimit"),
"gasPrice":params.get("gasPrice"),
}})
return {"success":True} | Writing transaction to database |
def delete(node_name):
result = {}
node = nago.core.get_node(node_name)
if not node:
result['status'] = 'error'
result['message'] = "node not found."
else:
node.delete()
result['status'] = 'success'
result['message'] = 'node deleted.'
return result | Delete a specific node |
def search_texts(args, parser):
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
catalogue = utils.get_catalogue(args)
store.validate(corpus, catalogue)
ngrams = []
for ngram_file in args.ngrams:
ngrams.extend(utils.get_ngrams(ngram_file))
store.search(catalogue, ngrams, sys.stdout) | Searches texts for presence of n-grams. |
def alias_proficiency(self, proficiency_id, alias_id):
self._alias_id(primary_id=proficiency_id, equivalent_id=alias_id) | Adds an ``Id`` to a ``Proficiency`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Proficiency`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another proficiency, it is
reassigned to the given proficiency ``Id``.
arg: proficiency_id (osid.id.Id): the ``Id`` of a
``Proficiency``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``proficiency_id`` not found
raise: NullArgument - ``proficiency_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.* |
def ends(self, layer):
ends = []
for data in self[layer]:
ends.append(data[END])
return ends | Retrieve end positions of elements if given layer. |
def apply_defaults(self):
self.emit('will_apply_defaults')
self.schema.apply_defaults(self)
self.emit('did_apply_defaults') | Apply schema defaults to this document. |
def remove(self, field: Field):
self._table = [fld for fld in self._table if fld is not field] | Removes a `Field` from the table by identity. |
def networkproperties(self):
print('Number of nodes: %d' % len(self.nodes))
print('Number of elements: %d' % len(self.elements))
print('Number of end nodes: %d' % len(self.endnodes))
print('Number of distinct networks: %d' % len(self.numberofnetworks))
print('Applied node variables: %s' % ', '.join(self.variables)) | Print out some properties of the network defined by the |Node| and
|Element| objects currently handled by the |HydPy| object. |
def on_unicode_checkbox(self, w=None, state=False):
logging.debug("unicode State is %s", state)
self.controller.smooth_graph_mode = state
if state:
self.hline = urwid.AttrWrap(
urwid.SolidFill(u'\N{LOWER ONE QUARTER BLOCK}'), 'line')
else:
self.hline = urwid.AttrWrap(urwid.SolidFill(u' '), 'line')
for graph in self.graphs.values():
graph.set_smooth_colors(state)
self.show_graphs() | Enable smooth edges if utf-8 is supported |
def AddMethod(self, function, name=None):
method = MethodWrapper(self, function, name)
self.added_methods.append(method) | Adds the specified function as a method of this construction
environment with the specified name. If the name is omitted,
the default name is the name of the function itself. |
def _cmp_by_local_pref(path1, path2):
lp1 = path1.get_pattr(BGP_ATTR_TYPE_LOCAL_PREF)
lp2 = path2.get_pattr(BGP_ATTR_TYPE_LOCAL_PREF)
if not (lp1 and lp2):
return None
lp1 = lp1.value
lp2 = lp2.value
if lp1 > lp2:
return path1
elif lp2 > lp1:
return path2
else:
return None | Selects a path with highest local-preference.
Unlike the weight attribute, which is only relevant to the local
router, local preference is an attribute that routers exchange in the
same AS. Highest local-pref is preferred. If we cannot decide,
we return None. |
def spin_in_system(incl, long_an):
return np.dot(Rz(long_an), np.dot(Rx(-incl), np.array([0.,0.,1.]))) | Spin in the plane of sky of a star given its inclination and "long_an"
incl - inclination of the star in the plane of sky
long_an - longitude of ascending node (equator) of the star in the plane of sky
Return:
spin - in plane of sky |
def next_moments_operating_on(self,
qubits: Iterable[ops.Qid],
start_moment_index: int = 0
) -> Dict[ops.Qid, int]:
next_moments = {}
for q in qubits:
next_moment = self.next_moment_operating_on(
[q], start_moment_index)
next_moments[q] = (len(self._moments) if next_moment is None else
next_moment)
return next_moments | Finds the index of the next moment that touches each qubit.
Args:
qubits: The qubits to find the next moments acting on.
start_moment_index: The starting point of the search.
Returns:
The index of the next moment that touches each qubit. If there
is no such moment, the next moment is specified as the number of
moments in the circuit. Equivalently, can be characterized as one
plus the index of the last moment after start_moment_index
(inclusive) that does *not* act on a given qubit. |
def start_basic_span(self, request):
try:
if request.tracing.trace_id:
context = self.tracer.extract(
format=ZIPKIN_SPAN_FORMAT,
carrier=request.tracing)
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
except opentracing.UnsupportedFormatException:
pass
except:
log.exception('Cannot extract tracing span from Trace field') | Start tracing span from the protocol's `tracing` fields.
This will only work if the `tracer` supports Zipkin-style span context.
:param request: inbound request
:type request: tchannel.tornado.request.Request |
def _set_cache_(self, attr):
if attr in TagObject.__slots__:
ostream = self.repo.odb.stream(self.binsha)
lines = ostream.read().decode(defenc).splitlines()
obj, hexsha = lines[0].split(" ")
type_token, type_name = lines[1].split(" ")
self.object = \
get_object_type_by_name(type_name.encode('ascii'))(self.repo, hex_to_bin(hexsha))
self.tag = lines[2][4:]
tagger_info = lines[3]
self.tagger, self.tagged_date, self.tagger_tz_offset = parse_actor_and_date(tagger_info)
if len(lines) > 5:
self.message = "\n".join(lines[5:])
else:
self.message = ''
else:
super(TagObject, self)._set_cache_(attr) | Cache all our attributes at once |
def before_insert(mapper, conn, target):
if target.sequence_id is None:
from ambry.orm.exc import DatabaseError
raise DatabaseError('Must have sequence id before insertion')
Table.before_update(mapper, conn, target) | event.listen method for Sqlalchemy to set the seqience_id for this
object and create an ObjectNumber value for the id |
def extract_traits(self, entity):
traits = getattr(entity, self._characteristic)
if traits is not None and isinstance(traits, Hashable):
traits = [traits]
return Trait(
traits,
getattr(entity, self._characteristic + '_match', True)
) | Extract data required to classify entity.
:param object entity:
:return: namedtuple consisting of characteristic traits and match flag
:rtype: matchbox.box.Trait |
def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates,
initialPermanence, sampleSize, maxSynapsesPerSegment):
numNewSynapses = len(growthCandidates)
if sampleSize != -1:
numNewSynapses = min(numNewSynapses, sampleSize)
if maxSynapsesPerSegment != -1:
numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment)
newSegments = connections.createSegments(newSegmentCells)
connections.growSynapsesToSample(newSegments, growthCandidates,
numNewSynapses, initialPermanence,
rng) | Create new segments, and grow synapses on them.
@param connections (SparseMatrixConnections)
@param rng (Random)
@param newSegmentCells (numpy array)
@param growthCandidates (numpy array) |
def pst(self):
if self.__pst is None and self.pst_arg is None:
raise Exception("linear_analysis.pst: can't access self.pst:" +
"no pest control argument passed")
elif self.__pst:
return self.__pst
else:
self.__load_pst()
return self.__pst | get the pyemu.Pst attribute
Returns
-------
pst : pyemu.Pst
Note
----
returns a references
If LinearAnalysis.__pst is None, then the pst attribute is
dynamically loaded before returning |
def activate_firmware_and_wait(self, rollback_override=None,
timeout=2, interval=1):
try:
self.activate_firmware(rollback_override)
except CompletionCodeError as e:
if e.cc == CC_LONG_DURATION_CMD_IN_PROGRESS:
self.wait_for_long_duration_command(
constants.CMDID_HPM_ACTIVATE_FIRMWARE,
timeout, interval)
else:
raise HpmError('activate_firmware CC=0x%02x' % e.cc)
except IpmiTimeoutError:
pass | Activate the new uploaded firmware and wait for
long running command. |
def get_declared_enums(metadata, schema, default):
types = set(column.type
for table in metadata.tables.values()
for column in table.columns
if (isinstance(column.type, sqlalchemy.Enum) and
schema == (column.type.schema or default)))
return {t.name: frozenset(t.enums) for t in types} | Return a dict mapping SQLAlchemy enumeration types to the set of their
declared values.
:param metadata:
...
:param str schema:
Schema name (e.g. "public").
:returns dict:
{
"my_enum": frozenset(["a", "b", "c"]),
} |
def write_line(self, message):
self.out.write(message + "\n")
self.out.flush() | Unbuffered printing to stdout. |
def get_default_project_id():
try:
proc = subprocess.Popen(['gcloud', 'config', 'list', '--format', 'value(core.project)'],
stdout=subprocess.PIPE)
stdout, _ = proc.communicate()
value = stdout.strip()
if proc.poll() == 0 and value:
if isinstance(value, six.string_types):
return value
else:
return value.decode()
except:
pass
config_file = os.path.join(get_config_dir(), 'config.json')
if os.path.exists(config_file):
with open(config_file) as f:
config = json.loads(f.read())
if 'project_id' in config and config['project_id']:
return str(config['project_id'])
if os.getenv('PROJECT_ID') is not None:
return os.getenv('PROJECT_ID')
return None | Get default project id from config or environment var.
Returns: the project id if available, or None. |
def get_node(self, node_id):
try:
return self._nodes[node_id]
except KeyError:
raise aiohttp.web.HTTPNotFound(text="Node ID {} doesn't exist".format(node_id)) | Return the node or raise a 404 if the node is unknown |
def scan_and_reimport(mod_type: str) -> List[Tuple[str, str]]:
mod_enabled, mod_disabled = get_modules(mod_type)
errors = []
for mod in mod_enabled + mod_disabled:
if mod in sys.modules:
msg = safe_reload(sys.modules[mod])
else:
msg = safe_load(mod)
if msg is not None:
errors.append((mod, msg))
return errors | Scans folder for modules. |
def close(self, clear=False):
if clear and not self.leave:
self.clear()
else:
self.refresh()
self.manager.remove(self) | Do final refresh and remove from manager
If ``leave`` is True, the default, the effect is the same as :py:meth:`refresh`. |
def _render_dataframe(dataframe):
data = dataframe.to_dict(orient='records')
fields = dataframe.columns.tolist()
return IPython.core.display.HTML(
datalab.utils.commands.HtmlBuilder.render_table(data, fields)) | Helper to render a dataframe as an HTML table. |
def _write_bin(self, stream, byte_order):
for rec in self.data:
for prop in self.properties:
prop._write_bin(rec[prop.name], stream, byte_order) | Save a PLY element to a binary PLY file. The element may
contain list properties. |
def _needs_reindex_multi(self, axes, method, level):
return ((com.count_not_none(*axes.values()) == self._AXIS_LEN) and
method is None and level is None and not self._is_mixed_type) | Check if we do need a multi reindex. |
def list_request_settings(self, service_id, version_number):
content = self._fetch("/service/%s/version/%d/request_settings" % (service_id, version_number))
return map(lambda x: FastlyRequestSetting(self, x), content) | Returns a list of all Request Settings objects for the given service and version. |
def check_theme(theme):
terminal_colors = curses.COLORS if curses.has_colors() else 0
if theme.required_colors > terminal_colors:
return False
elif theme.required_color_pairs > curses.COLOR_PAIRS:
return False
else:
return True | Check if the given theme is compatible with the terminal |
def is_primary(self):
return bool(next(iter(self.selfsig._signature.subpackets['h_PrimaryUserID']), False)) | If the most recent, valid self-signature specifies this as being primary, this will be True. Otherwise, Faqlse. |
def is_website(url):
if re.match(r"(http|ftp|https)://([\w\-\.]+)/?", url):
LOGGER.debug("> {0}' is matched as website.".format(url))
return True
else:
LOGGER.debug("> {0}' is not matched as website.".format(url))
return False | Check if given url string is a website.
Usage::
>>> is_website("http://www.domain.com")
True
>>> is_website("domain.com")
False
:param data: Data to check.
:type data: unicode
:return: Is website.
:rtype: bool |
def get_amount_of_tweets(self):
if not self.__response:
raise TwitterSearchException(1013)
return (len(self.__response['content']['statuses'])
if self.__order_is_search
else len(self.__response['content'])) | Returns current amount of tweets available within this instance
:returns: The amount of tweets currently available
:raises: TwitterSearchException |
def _validate_calibration_params(strategy='accuracy', min_rate=None,
beta=1.):
if strategy not in ('accuracy', 'f_beta', 'max_tpr',
'max_tnr'):
raise ValueError('Strategy can either be "accuracy", "f_beta" or '
'"max_tpr" or "max_tnr". Got "{}" instead.'
.format(strategy))
if strategy == 'max_tpr' or strategy == 'max_tnr':
if (min_rate is None or not isinstance(min_rate, (int, float)) or
not min_rate >= 0 or not min_rate <= 1):
raise ValueError('Parameter min_rate must be a number in'
'[0, 1]. '
'Got {} instead.'.format(min_rate))
if strategy == 'f_beta':
if beta is None or not isinstance(beta, (int, float)):
raise ValueError('Parameter beta must be a real number. '
'Got {} instead.'.format(type(beta))) | Ensure that calibration parameters have allowed values |
def heappush(heap, item):
heap.append(item)
_siftdown(heap, 0, len(heap)-1) | Push item onto heap, maintaining the heap invariant. |
def list_downloads():
outfiles = []
for root, subFolder, files in salt.utils.path.os_walk('/Library/Updates'):
for f in files:
outfiles.append(os.path.join(root, f))
dist_files = []
for f in outfiles:
if f.endswith('.dist'):
dist_files.append(f)
ret = []
for update in _get_available():
for f in dist_files:
with salt.utils.files.fopen(f) as fhr:
if update.rsplit('-', 1)[0] in salt.utils.stringutils.to_unicode(fhr.read()):
ret.append(update)
return ret | Return a list of all updates that have been downloaded locally.
:return: A list of updates that have been downloaded
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' softwareupdate.list_downloads |
def delete_view(self, request, object_id, extra_context=None):
if not extra_context:
extra_context = {}
extra_context['is_popup'] = request.REQUEST.get('_popup', 0)
return super(EnhancedAdminMixin, self).delete_view(request, object_id, extra_context) | Sets is_popup context variable to hide admin header. |
def command_builder(self, string, value=None, default=None, disable=None):
if default:
return 'default %s' % string
elif disable:
return 'no %s' % string
elif value is True:
return string
elif value:
return '%s %s' % (string, value)
else:
return 'no %s' % string | Builds a command with keywords
Notes:
Negating a command string by overriding 'value' with None or an
assigned value that evalutates to false has been deprecated.
Please use 'disable' to negate a command.
Parameters are evaluated in the order 'default', 'disable', 'value'
Args:
string (str): The command string
value (str): The configuration setting to subsititue into the
command string. If value is a boolean and True, just the
command string is used
default (bool): Specifies the command should use the default
keyword argument. Default preempts disable and value.
disable (bool): Specifies the command should use the no
keyword argument. Disable preempts value.
Returns:
A command string that can be used to configure the node |
def data_to_dict(self, sysbase=False):
assert isinstance(sysbase, bool)
ret = {}
for key in self.data_keys:
if (not sysbase) and (key in self._store):
val = self._store[key]
else:
val = self.__dict__[key]
ret[key] = val
return ret | Return the loaded model parameters as one dictionary.
Each key of the dictionary is a parameter name, and the value is a
list of all the parameter values.
:param sysbase: use system base quantities
:type sysbase: bool |
def cut_sequences_relative(records, slices, record_id):
with _record_buffer(records) as r:
try:
record = next(i for i in r() if i.id == record_id)
except StopIteration:
raise ValueError("Record with id {0} not found.".format(record_id))
new_slices = _update_slices(record, slices)
for record in multi_cut_sequences(r(), new_slices):
yield record | Cuts records to slices, indexed by non-gap positions in record_id |
def checkIPFromAlias(alias=None):
headers = {
"Content-type": "text/html",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Encoding": " gzip, deflate",
"Accept-Language": " es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3",
"Connection": "keep-alive",
"DNT": "1",
"Host": "www.resolvethem.com",
"Referer": "http://www.resolvethem.com/index.php",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0",
"Content-Length": "26",
"Content-Type": "application/x-www-form-urlencoded",
}
req = requests.post("http://www.resolvethem.com/index.php",headers=headers,data={'skypeUsername': alias,'submit':''})
data = req.content
p = re.compile("class='alert alert-success'>([0-9\.]*)<")
allMatches = p.findall(data)
if len(allMatches)> 0:
jsonData = {}
jsonData["type"]="i3visio.ip"
jsonData["value"]=allMatches[0]
jsonData["attributes"]=[]
return jsonData
return {} | Method that checks if the given alias is currently connected to Skype and returns its IP address.
:param alias: Alias to be searched.
:return: Python structure for the Json received. It has the following structure:
{
"type": "i3visio.ip",
"value": "1.1.1.1",
"attributes" : []
} |
def _optimize_A(self, A):
right_eigenvectors = self.right_eigenvectors_[:, :self.n_macrostates]
flat_map, square_map = get_maps(A)
alpha = to_flat(1.0 * A, flat_map)
def obj(x):
return -1 * self._objective_function(
x, self.transmat_, right_eigenvectors, square_map,
self.populations_
)
alpha = scipy.optimize.basinhopping(
obj, alpha, niter_success=1000,
)['x']
alpha = scipy.optimize.fmin(
obj, alpha, full_output=True, xtol=1E-4, ftol=1E-4,
maxfun=5000, maxiter=100000
)[0]
if np.isneginf(obj(alpha)):
raise ValueError(
"Error: minimization has not located a feasible point.")
A = to_square(alpha, square_map)
return A | Find optimal transformation matrix A by minimization.
Parameters
----------
A : ndarray
The transformation matrix A.
Returns
-------
A : ndarray
The transformation matrix. |
def match_rules(tree, rules, fun=None, multi=False):
if multi:
context = match_rules_context_multi(tree, rules)
else:
context = match_rules_context(tree, rules)
if not context:
return None
if fun:
args = fun.__code__.co_varnames
if multi:
res = []
for c in context:
action_context = {}
for arg in args:
if arg in c:
action_context[arg] = c[arg]
res.append(fun(**action_context))
return res
else:
action_context = {}
for arg in args:
if arg in context:
action_context[arg] = context[arg]
return fun(**action_context)
else:
return context | Matches a Tree structure with the given query rules.
Query rules are represented as a dictionary of template to action.
Action is either a function, or a dictionary of subtemplate parameter to rules::
rules = { 'template' : { 'key': rules } }
| { 'template' : {} }
Args:
tree (Tree): Parsed tree structure
rules (dict): A dictionary of query rules
fun (function): Function to call with context (set to None if you want to return context)
multi (Bool): If True, returns all matched contexts, else returns first matched context
Returns:
Contexts from matched rules |
def wget_files():
for f in lamost_id:
short = (f.split('-')[2]).split('_')[0]
filename = "%s/%s.gz" %(short,f)
DIR = "/Users/annaho/Data/Li_Giants/Spectra_APOKASC"
searchfor = "%s/%s.gz" %(DIR,f)
if glob.glob(searchfor):
print("done")
else:
os.system(
"wget http://dr2.lamost.org/sas/fits/%s" %(filename))
new_filename = filename.split("_")[0] + "_" + filename.split("_")[2]
os.system(
"wget http://dr2.lamost.org/sas/fits/%s" %(new_filename)) | Pull the files from the LAMOST archive |
def extract_alphabet(alphabet, inputdata, fixed_start = False):
if not inputdata:
return []
base_alphabet = alphabet.alphabet
lexer = lexer_factory(alphabet, base_alphabet)
totallen = len(inputdata)
maxl = totallen
minl = 1
if fixed_start:
max_start = 1
else:
max_start = totallen
result = []
for i in range(max_start):
for j in range(i+minl, min(i+maxl, totallen) + 1):
try:
lexed = lexer(inputdata[i:j])
if lexed and len(lexed) == 1:
result.append((i,j, inputdata[i:j], lexed[0].gd))
elif lexed:
raise Exception
except:
continue
result = filter_subsets(result)
return [PositionToken(content, gd, left, right) for (left, right, content, gd) in result] | Receives a sequence and an alphabet,
returns a list of PositionTokens with all of the parts of the sequence that
are a subset of the alphabet |
def _merge_outfile_fname(out_file, bam_files, work_dir, batch):
if out_file is None:
out_file = os.path.join(work_dir, os.path.basename(sorted(bam_files)[0]))
if batch is not None:
base, ext = os.path.splitext(out_file)
out_file = "%s-b%s%s" % (base, batch, ext)
return out_file | Derive correct name of BAM file based on batching. |
def list_directory(self, path):
r
_complain_ifclosed(self.closed)
return self.fs.list_directory(path) | r"""
Get list of files and directories for ``path``\ .
:type path: str
:param path: the path of the directory
:rtype: list
:return: list of files and directories in ``path``
:raises: :exc:`~exceptions.IOError` |
def encode_function_call(self, function_name, args):
if function_name not in self.function_data:
raise ValueError('Unkown function {}'.format(function_name))
description = self.function_data[function_name]
function_selector = zpad(encode_int(description['prefix']), 4)
arguments = encode_abi(description['encode_types'], args)
return function_selector + arguments | Return the encoded function call.
Args:
function_name (str): One of the existing functions described in the
contract interface.
args (List[object]): The function arguments that wll be encoded and
used in the contract execution in the vm.
Return:
bin: The encoded function name and arguments so that it can be used
with the evm to execute a funcion call, the binary string follows
the Ethereum Contract ABI. |
def print_variables(self):
print_out = partial(self.print_out, format_options='green')
print_out('===== variables =====')
for var, hint in self.vars.get_descriptions().items():
print_out(' %' + var + ' = ' + var + ' = ' + hint.replace('%', '%%'))
print_out('=====================')
return self | Prints out magic variables available in config files
alongside with their values and descriptions.
May be useful for debugging.
http://uwsgi-docs.readthedocs.io/en/latest/Configuration.html#magic-variables |
async def on_isupport_maxchannels(self, value):
if 'CHANTYPES' in self._isupport and 'CHANLIMIT' not in self._isupport:
self._channel_limits = {}
prefixes = self._isupport['CHANTYPES']
self._channel_limits[frozenset(prefixes)] = int(value)
for prefix in prefixes:
self._channel_limit_groups[prefix] = frozenset(prefixes) | Old version of CHANLIMIT. |
def log_stack(logger, level=logging.INFO, limit=None, frame=None):
if showing_stack.inside:
return
showing_stack.inside = True
try:
if frame is None:
frame = sys._getframe(1)
stack = "".join(traceback.format_stack(frame, limit))
for line in (l[2:] for l in stack.split("\n") if l.strip()):
logger.log(level, line)
finally:
showing_stack.inside = False | Display the current stack on ``logger``.
This function is designed to be used during emission of log messages, so it
won't call itself. |
def stop(self, graceful=False):
self.stop_flag.set()
if graceful:
self._logger.info('Shutting down gracefully...')
try:
for _, worker_process in self.worker_threads:
worker_process.join()
except KeyboardInterrupt:
self._logger.info('Received request to shut down now.')
else:
self._logger.info('All workers have stopped.')
else:
self._logger.info('Shutting down') | Set the stop-flag.
If `graceful=True`, this method blocks until the workers to finish
executing any tasks they might be currently working on. |
def map_pores(self, pores, origin, filtered=True):
r
ids = origin['pore._id'][pores]
return self._map(element='pore', ids=ids, filtered=filtered) | r"""
Given a list of pore on a target object, finds indices of those pores
on the calling object
Parameters
----------
pores : array_like
The indices of the pores on the object specifiedin ``origin``
origin : OpenPNM Base object
The object corresponding to the indices given in ``pores``
filtered : boolean (default is ``True``)
If ``True`` then a ND-array of indices is returned with missing
indices removed, otherwise a named-tuple containing both the
``indices`` and a boolean ``mask`` with ``False`` indicating
which locations were not found.
Returns
-------
Pore indices on the calling object corresponding to the same pores
on the ``origin`` object. Can be an array or a tuple containing an
array and a mask, depending on the value of ``filtered``. |
def remove(self, tag, nth=1):
tag = fix_tag(tag)
nth = int(nth)
for i in range(len(self.pairs)):
t, v = self.pairs[i]
if t == tag:
nth -= 1
if nth == 0:
self.pairs.pop(i)
return v
return None | Remove the n-th occurrence of tag in this message.
:param tag: FIX field tag number to be removed.
:param nth: Index of tag if repeating, first is 1.
:returns: Value of the field if removed, None otherwise. |
def insert_list(self, cards, indice=-1):
self_size = len(self.cards)
if indice in [0, -1]:
if indice == -1:
self.cards += cards
else:
self.cards.extendleft(cards)
elif indice != self_size:
half_x, half_y = self.split(indice)
self.cards = list(half_x.cards) + list(cards) + list(half_y.cards) | Insert a list of given cards into the stack at a given indice.
:arg list cards:
The list of cards to insert into the stack.
:arg int indice:
Where to insert the given cards. |
def debug(self, *args):
if _canShortcutLogging(self.logCategory, DEBUG):
return
debugObject(self.logObjectName(), self.logCategory,
*self.logFunction(*args)) | Log a debug message. Used for debugging. |
def add_argument(self, parser, bootstrap=False):
if self.cli_expose:
args = self._get_argparse_names(parser.prefix_chars)
kwargs = self._get_argparse_kwargs(bootstrap)
parser.add_argument(*args, **kwargs) | Add this item as an argument to the given parser.
Args:
parser (argparse.ArgumentParser): The parser to add this item to.
bootstrap: Flag to indicate whether you only want to mark this
item as required or not |
def experiments_predictions_create(self, experiment_id, model_id, argument_defs, name, arguments=None, properties=None):
if self.experiments_get(experiment_id) is None:
return None
return self.predictions.create_object(
name,
experiment_id,
model_id,
argument_defs,
arguments=arguments,
properties=properties
) | Create new model run for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
name : string
User-provided name for the model run
argument_defs : list(attribute.AttributeDefinition)
Definition of valid arguments for the given model
arguments : list(dict('name':...,'value:...')), optional
List of attribute instances
properties : Dictionary, optional
Set of model run properties.
Returns
-------
ModelRunHandle
Handle for created model run or None if experiment is unknown |
def system(command):
logger.debug('Running: %r', command)
subprocess.check_call(command, shell=isinstance(command, string_types), bufsize=-1) | A convenience wrapper around subprocess.check_call that logs the command before passing it
on. The command can be either a string or a sequence of strings. If it is a string shell=True
will be passed to subprocess.check_call.
:type command: str | sequence[string] |
def outputSimple(self):
out = []
errors = []
successfulResponses = \
len([True for rsp in self.results if rsp['success']])
out.append("INFO QUERIED {0}".format(
len(self.serverList)))
out.append("INFO SUCCESS {0}".format(
successfulResponses))
out.append("INFO ERROR {0}".format(
len(self.serverList) - successfulResponses))
for rsp in self.resultsColated:
if rsp['success']:
out.append("RESULT {0} {1}".format(
len(rsp['servers']),
"|".join(rsp['results'])
))
else:
errors.append("ERROR {0} {1}".format(
len(rsp['servers']),
"|".join(rsp['results'])
))
out += errors
sys.stdout.write("\n".join(out))
sys.stdout.write("\n") | Simple output mode |
def parse_compound_file(path, format):
context = FilePathContext(path)
format = resolve_format(format, context.filepath)
if format == 'yaml':
logger.debug('Parsing compound file {} as YAML'.format(
context.filepath))
with context.open('r') as f:
for compound in parse_compound_yaml_file(context, f):
yield compound
elif format == 'modelseed':
logger.debug('Parsing compound file {} as ModelSEED TSV'.format(
context.filepath))
with context.open('r') as f:
for compound in modelseed.parse_compound_file(f, context):
yield compound
elif format == 'tsv':
logger.debug('Parsing compound file {} as TSV'.format(
context.filepath))
with context.open('r') as f:
for compound in parse_compound_table_file(context, f):
yield compound
else:
raise ParseError('Unable to detect format of compound file {}'.format(
context.filepath)) | Open and parse reaction file based on file extension or given format
Path can be given as a string or a context. |
def get_servo_position(self):
data = []
data.append(0x09)
data.append(self.servoid)
data.append(RAM_READ_REQ)
data.append(CALIBRATED_POSITION_RAM)
data.append(BYTE2)
send_data(data)
rxdata = []
try:
rxdata = SERPORT.read(13)
if (self.servomodel==0x06) or (self.servomodel == 0x04):
return ((ord(rxdata[10])&0xff)<<8) | (ord(rxdata[9])&0xFF)
else:
return ((ord(rxdata[10])&0x03)<<8) | (ord(rxdata[9])&0xFF)
except HerkulexError:
print "Could not read from the servos. Check connection" | Gets the current position of Herkulex
Args:
none
Returns:
int: position of the servo- 0 to 1023
Raises:
SerialException: Error occured while opening serial port |
def show_instance(name, session=None, call=None):
if call == 'function':
raise SaltCloudException(
'The show_instnce function must be called with -a or --action.'
)
log.debug('show_instance-> name: %s session: %s', name, session)
if session is None:
session = _get_session()
vm = _get_vm(name, session=session)
record = session.xenapi.VM.get_record(vm)
if not record['is_a_template'] and not record['is_control_domain']:
try:
base_template_name = record['other_config']['base_template_name']
except Exception:
base_template_name = None
log.debug(
'VM %s, doesnt have base_template_name attribute',
record['name_label']
)
ret = {'id': record['uuid'],
'image': base_template_name,
'name': record['name_label'],
'size': record['memory_dynamic_max'],
'state': record['power_state'],
'private_ips': get_vm_ip(name, session),
'public_ips': None}
__utils__['cloud.cache_node'](
ret,
__active_provider_name__,
__opts__
)
return ret | Show information about a specific VM or template
.. code-block:: bash
salt-cloud -a show_instance xenvm01
.. note:: memory is memory_dynamic_max |
def finalize(self):
self.library.finalize.argtypes = []
self.library.finalize.restype = c_int
ierr = wrap(self.library.finalize)()
logger.info('cd {}'.format(self.original_dir))
os.chdir(self.original_dir)
if ierr:
errormsg = "Finalizing model {engine} failed with exit code {code}"
raise RuntimeError(errormsg.format(engine=self.engine, code=ierr)) | Shutdown the library and clean up the model.
Note that the Fortran library's cleanup code is not up to snuff yet,
so the cleanup is not perfect. Note also that the working directory is
changed back to the original one. |
def _starts_with_drive_letter(self, file_path):
colon = self._matching_string(file_path, ':')
return (self.is_windows_fs and len(file_path) >= 2 and
file_path[:1].isalpha and (file_path[1:2]) == colon) | Return True if file_path starts with a drive letter.
Args:
file_path: the full path to be examined.
Returns:
`True` if drive letter support is enabled in the filesystem and
the path starts with a drive letter. |
def max_rain(self):
return max(self._purge_none_samples(self.rain_series()),
key=lambda item:item[1]) | Returns a tuple containing the max value in the rain
series preceeded by its timestamp
:returns: a tuple
:raises: ValueError when the measurement series is empty |
def on_confirmation(self, frame):
delivered = frame.method.NAME.split('.')[1].lower() == 'ack'
self.logger.debug('Received publisher confirmation (Delivered: %s)',
delivered)
if frame.method.multiple:
for index in range(self.last_confirmation + 1,
frame.method.delivery_tag):
self.confirm_delivery(index, delivered)
self.confirm_delivery(frame.method.delivery_tag, delivered)
self.last_confirmation = frame.method.delivery_tag | Invoked by pika when RabbitMQ responds to a Basic.Publish RPC
command, passing in either a Basic.Ack or Basic.Nack frame with
the delivery tag of the message that was published. The delivery tag
is an integer counter indicating the message number that was sent
on the channel via Basic.Publish.
:param pika.frame.Method frame: Basic.Ack or Basic.Nack frame |
def move_partition_replica(self, under_loaded_rg, eligible_partition):
source_broker, dest_broker = self._get_eligible_broker_pair(
under_loaded_rg,
eligible_partition,
)
if source_broker and dest_broker:
self.log.debug(
'Moving partition {p_name} from broker {source_broker} to '
'replication-group:broker {rg_dest}:{dest_broker}'.format(
p_name=eligible_partition.name,
source_broker=source_broker.id,
dest_broker=dest_broker.id,
rg_dest=under_loaded_rg.id,
),
)
source_broker.move_partition(eligible_partition, dest_broker) | Move partition to under-loaded replication-group if possible. |
def scandir_limited(top, limit, deep=0):
deep += 1
try:
scandir_it = Path2(top).scandir()
except PermissionError as err:
log.error("scandir error: %s" % err)
return
for entry in scandir_it:
if entry.is_dir(follow_symlinks=False):
if deep < limit:
yield from scandir_limited(entry.path, limit, deep)
else:
yield entry | yields only directories with the given deep limit
:param top: source path
:param limit: how deep should be scanned?
:param deep: internal deep number
:return: yields os.DirEntry() instances |
def assert_keys_exist(self, caller, *keys):
assert keys, ("*keys parameter must be specified.")
for key in keys:
self.assert_key_exists(key, caller) | Assert that context contains keys.
Args:
keys: validates that these keys exists in context
caller: string. calling function or module name - this used to
construct error messages
Raises:
KeyNotInContextError: When key doesn't exist in context. |
def add_transform_chain(self, tc):
for t in tc.gpu_transforms:
if isinstance(t, Clip):
self.insert_vert('v_temp_pos_tr = temp_pos_tr;')
continue
self.insert_vert(t.glsl('temp_pos_tr'))
clip = tc.get('Clip')
if clip:
self.insert_frag(clip.glsl('v_temp_pos_tr'), 'before_transforms') | Insert the GLSL snippets of a transform chain. |
def pass_data_on(self, data_setters):
data_setters.init_structure(self.num_bonds, len(self.x_coord_list), len(self.group_type_list),
len(self.chain_id_list), len(self.chains_per_model), self.structure_id)
decoder_utils.add_entity_info(self, data_setters)
decoder_utils.add_atomic_information(self, data_setters)
decoder_utils.add_header_info(self, data_setters)
decoder_utils.add_xtalographic_info(self, data_setters)
decoder_utils.generate_bio_assembly(self, data_setters)
decoder_utils.add_inter_group_bonds(self, data_setters)
data_setters.finalize_structure() | Write the data from the getters to the setters.
:param data_setters: a series of functions that can fill a chemical
data structure
:type data_setters: DataTransferInterface |
def from_coordinates(cls, ra=None, dec=None,
distance=None,
pm_ra_cosdec=None, pm_dec=None,
radial_velocity=None,
obstime=2000.0*u.year,
id=None, mag=None,
**kwargs):
N = len(np.atleast_1d(ra))
if id is None:
id = ['{}'.format(i) for i in range(N)]
if mag is None:
mag = np.zeros(N)
standardized = Table(data=[id, mag], names=['object-id', 'filter-mag'])
for k in cls.coordinate_keys:
if locals()[k] is not None:
standardized[k] = locals()[k]
return cls(standardized) | Iniitalize a constellation object.
Parameters
----------
ra, dec, distance, pm_ra_cosdec, pm_dec, radial_velocity
These must be able to initialize a SkyCoord.
id : list, array
Identifications for the entries.
mag : list, array
Magnitudes for the entries.
**kwargs
All arguments and keyword arguments are passed along
to SkyCoord. They can be coordinates in the first place,
or, for example, ra and dec with units, or any other
inputs that can initialize a SkyCoord. |
def connection_open(self) -> None:
assert self.state is State.CONNECTING
self.state = State.OPEN
logger.debug("%s - state = OPEN", self.side)
self.transfer_data_task = self.loop.create_task(self.transfer_data())
self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping())
self.close_connection_task = self.loop.create_task(self.close_connection()) | Callback when the WebSocket opening handshake completes.
Enter the OPEN state and start the data transfer phase. |
def query_keys(self, user_devices, timeout=None, token=None):
content = {"device_keys": user_devices}
if timeout:
content["timeout"] = timeout
if token:
content["token"] = token
return self._send("POST", "/keys/query", content=content) | Query HS for public keys by user and optionally device.
Args:
user_devices (dict): The devices whose keys to download. Should be
formatted as <user_id>: [<device_ids>]. No device_ids indicates
all devices for the corresponding user.
timeout (int): Optional. The time (in milliseconds) to wait when
downloading keys from remote servers.
token (str): Optional. If the client is fetching keys as a result of
a device update received in a sync request, this should be the
'since' token of that sync request, or any later sync token. |
def rnegative_binomial(mu, alpha, size=None):
mu = np.asarray(mu, dtype=float)
pois_mu = np.random.gamma(alpha, mu / alpha, size)
return np.random.poisson(pois_mu, size) | Random negative binomial variates. |
def teams(self, name=None, id=None, is_hidden=False, **kwargs):
request_params = {
'name': name,
'id': id,
'is_hidden': is_hidden
}
if kwargs:
request_params.update(**kwargs)
r = self._request('GET', self._build_url('teams'), params=request_params)
if r.status_code != requests.codes.ok:
raise NotFoundError("Could not find teams: '{}'".format(r.json()))
data = r.json()
return [Team(team, client=self) for team in data['results']] | Teams of KE-chain.
Provide a list of :class:`Team`s of KE-chain. You can filter on teamname or id or any other advanced filter.
:param name: (optional) teamname to filter
:type name: basestring or None
:param id: (optional) id of the team to filter
:type id: basestring or None
:param is_hidden: (optional) boolean to show non-hidden or hidden teams or both (None) (default is non-hidden)
:type is_hidden: bool or None
:param kwargs: Additional filtering keyword=value arguments
:type kwargs: dict or None
:return: List of :class:`Teams`
:raises NotFoundError: when a team could not be found |
def stop(self):
self._stop_event.set()
if not self.persistence:
return
if self._cancel_save is not None:
self._cancel_save()
self._cancel_save = None
self.persistence.save_sensors() | Stop the background thread. |
def GetUrnHashEntry(urn, token=None):
if data_store.RelationalDBEnabled():
client_id, vfs_path = urn.Split(2)
path_type, components = rdf_objects.ParseCategorizedPath(vfs_path)
path_info = data_store.REL_DB.ReadPathInfo(client_id, path_type, components)
return path_info.hash_entry
else:
with aff4.FACTORY.Open(urn, token=token) as fd:
return GetFileHashEntry(fd) | Returns an `rdf_crypto.Hash` instance for given URN of an AFF4 file. |
def send_encoded(self, message, auth_header=None, **kwargs):
client_string = 'raven-python/%s' % (raven.VERSION,)
if not auth_header:
timestamp = time.time()
auth_header = get_auth_header(
protocol=self.protocol_version,
timestamp=timestamp,
client=client_string,
api_key=self.remote.public_key,
api_secret=self.remote.secret_key,
)
headers = {
'User-Agent': client_string,
'X-Sentry-Auth': auth_header,
'Content-Encoding': self.get_content_encoding(),
'Content-Type': 'application/octet-stream',
}
return self.send_remote(
url=self.remote.store_endpoint,
data=message,
headers=headers,
**kwargs
) | Given an already serialized message, signs the message and passes the
payload off to ``send_remote``. |
def readable_time_delta(seconds):
days = seconds // 86400
seconds -= days * 86400
hours = seconds // 3600
seconds -= hours * 3600
minutes = seconds // 60
m_suffix = 's' if minutes != 1 else ''
h_suffix = 's' if hours != 1 else ''
d_suffix = 's' if days != 1 else ''
retval = u'{0} minute{1}'.format(minutes, m_suffix)
if hours != 0:
retval = u'{0} hour{1} and {2}'.format(hours, h_suffix, retval)
if days != 0:
retval = u'{0} day{1}, {2}'.format(days, d_suffix, retval)
return retval | Convert a number of seconds into readable days, hours, and minutes |
def get_plugin_spec(self, name):
l_name = name.lower()
for spec in self.plugins:
name = spec.get('name', spec.get('klass', spec.module))
if name.lower() == l_name:
return spec
raise KeyError(name) | Get the specification attributes for plugin with name `name`. |
def setValidTo(self, value):
valid_from = self.getValidFrom()
valid_to = DateTime(value)
interval = self.getExpirationInterval()
if valid_from and interval:
valid_to = valid_from + int(interval)
self.getField("ValidTo").set(self, valid_to)
logger.debug("Set ValidTo Date to: %r" % valid_to)
else:
self.getField("ValidTo").set(self, valid_to) | Custom setter method to calculate a `ValidTo` date based on
the `ValidFrom` and `ExpirationInterval` field values. |
async def send_script(self, conn_id, data):
progress_callback = functools.partial(_on_progress, self, 'script', conn_id)
resp = await self._execute(self._adapter.send_script_sync, conn_id, data, progress_callback)
_raise_error(conn_id, 'send_rpc', resp) | Send a a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.