code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def channels_kick(self, room_id, user_id, **kwargs):
return self.__call_api_post('channels.kick', roomId=room_id, userId=user_id, kwargs=kwargs) | Removes a user from the channel. |
def _assemble_complex(stmt):
member_strs = [_assemble_agent_str(m) for m in stmt.members]
stmt_str = member_strs[0] + ' binds ' + _join_list(member_strs[1:])
return _make_sentence(stmt_str) | Assemble Complex statements into text. |
def is_micropython_usb_device(port):
if type(port).__name__ == 'Device':
if ('ID_BUS' not in port or port['ID_BUS'] != 'usb' or
'SUBSYSTEM' not in port or port['SUBSYSTEM'] != 'tty'):
return False
usb_id = 'usb vid:pid={}:{}'.format(port['ID_VENDOR_ID'], port['ID_MODEL_ID'])
... | Checks a USB device to see if it looks like a MicroPython device. |
def read_stats(self):
self.statistics = TgnObjectsDict()
for port in self.session.ports.values():
self.statistics[port] = port.read_port_stats()
return self.statistics | Read current ports statistics from chassis.
:return: dictionary {port name {group name, {stat name: stat value}}} |
def generateRandomSymbol(numColumns, sparseCols):
symbol = list()
remainingCols = sparseCols
while remainingCols > 0:
col = random.randrange(numColumns)
if col not in symbol:
symbol.append(col)
remainingCols -= 1
return symbol | Generates a random SDR with sparseCols number of active columns
@param numColumns (int) number of columns in the temporal memory
@param sparseCols (int) number of sparse columns for desired SDR
@return symbol (list) SDR |
def addParameter( self, k, r ):
if isinstance(r, six.string_types) or not isinstance(r, collections.Iterable):
r = [ r ]
else:
if isinstance(r, collections.Iterable):
r = list(r)
self._parameters[k] = r | Add a parameter to the experiment's parameter space. k is the
parameter name, and r is its range.
:param k: parameter name
:param r: parameter range |
def read_jp2_image(filename):
image = read_image(filename)
with open(filename, 'rb') as file:
bit_depth = get_jp2_bit_depth(file)
return fix_jp2_image(image, bit_depth) | Read data from JPEG2000 file
:param filename: name of JPEG2000 file to be read
:type filename: str
:return: data stored in JPEG2000 file |
async def add(client: Client, identity_signed_raw: str) -> ClientResponse:
return await client.post(MODULE + '/add', {'identity': identity_signed_raw}, rtype=RESPONSE_AIOHTTP) | POST identity raw document
:param client: Client to connect to the api
:param identity_signed_raw: Identity raw document
:return: |
def get_states(self, config_ids):
return itertools.chain.from_iterable(self.generate_config_states(config_id)
for config_id in config_ids) | Generates state information for the selected containers.
:param config_ids: List of MapConfigId tuples.
:type config_ids: list[dockermap.map.input.MapConfigId]
:return: Iterable of configuration states.
:rtype: collections.Iterable[dockermap.map.state.ConfigState] |
def gpg_version():
cmd = flatten([gnupg_bin(), "--version"])
output = stderr_output(cmd)
output = output \
.split('\n')[0] \
.split(" ")[2] \
.split('.')
return tuple([int(x) for x in output]) | Returns the GPG version |
def close(self):
if not self._process:
return
if self._process.returncode is not None:
return
_logger.debug('Terminate process.')
try:
self._process.terminate()
except OSError as error:
if error.errno != errno.ESRCH:
... | Terminate or kill the subprocess.
This function is blocking. |
async def send_rpc_message(self, message, context):
conn_string = message.get('connection_string')
rpc_id = message.get('rpc_id')
address = message.get('address')
timeout = message.get('timeout')
payload = message.get('payload')
client_id = context.user_data
self.... | Handle a send_rpc message.
See :meth:`AbstractDeviceAdapter.send_rpc`. |
def get_last_modified_timestamp(self):
cmd = "find . -print0 | xargs -0 stat -f '%T@ %p' | sort -n | tail -1 | cut -f2- -d' '"
ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
output = ps.communicate()[0]
print output | Looks at the files in a git root directory and grabs the last modified timestamp |
def subvolume_deleted(name, device, commit=False, __dest=None):
ret = {
'name': name,
'result': False,
'changes': {},
'comment': [],
}
path = os.path.join(__dest, name)
exists = __salt__['btrfs.subvolume_exists'](path)
if not exists:
ret['comment'].append('Sub... | Makes sure that a btrfs subvolume is removed.
name
Name of the subvolume to remove
device
Device where to remove the subvolume
commit
Wait until the transaction is over |
def dRV(self, dt, band='g'):
return (self.orbpop.dRV_1(dt)*self.A_brighter(band) +
self.orbpop.dRV_2(dt)*self.BC_brighter(band)) | Returns dRV of star A, if A is brighter than B+C, or of star B if B+C is brighter |
def convertforinput(self,filepath, metadata):
assert isinstance(metadata, CLAMMetaData)
if not metadata.__class__ in self.acceptforinput:
raise Exception("Convertor " + self.__class__.__name__ + " can not convert input files to " + metadata.__class__.__name__ + "!")
return False | Convert from target format into one of the source formats. Relevant if converters are used in InputTemplates. Metadata already is metadata for the to-be-generated file. 'filepath' is both the source and the target file, the source file will be erased and overwritten with the conversion result! |
def _set_current(self, new_current):
new_cur_full_path = self.join(new_current)
if not os.path.exists(new_cur_full_path):
raise PrefixNotFound(
'Prefix "%s" does not exist in workdir %s' %
(new_current, self.path)
)
if os.path.lexists(self.... | Change the current default prefix, for internal usage
Args:
new_current(str): Name of the new current prefix, it must already
exist
Returns:
None
Raises:
PrefixNotFound: if the given prefix name does not exist in the
workdir |
def _call(self, x, out=None):
if out is None:
out = self.range.element()
out.lincomb(self.a, x[0], self.b, x[1])
return out | Linearly combine ``x`` and write to ``out`` if given. |
def _call(self, path, method, body=None, headers=None):
try:
resp = self.http.do_call(path, method, body, headers)
except http.HTTPError as err:
if err.status == 401:
raise PermissionError('Insufficient permissions to query ' +
'%s with user %s... | Wrapper around http.do_call that transforms some HTTPError into
our own exceptions |
def convert_instancenorm(node, **kwargs):
name, input_nodes, attrs = get_inputs(node, kwargs)
eps = float(attrs.get("eps", 0.001))
node = onnx.helper.make_node(
'InstanceNormalization',
inputs=input_nodes,
outputs=[name],
name=name,
epsilon=eps)
return [node] | Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator
based on the input node's attributes and return the created node. |
def cmd_devid(self, args):
for p in self.mav_param.keys():
if p.startswith('COMPASS_DEV_ID'):
mp_util.decode_devid(self.mav_param[p], p)
if p.startswith('INS_') and p.endswith('_ID'):
mp_util.decode_devid(self.mav_param[p], p) | decode device IDs from parameters |
def getInfo(sign, lon):
return {
'ruler': ruler(sign),
'exalt': exalt(sign),
'dayTrip': dayTrip(sign),
'nightTrip': nightTrip(sign),
'partTrip': partTrip(sign),
'term': term(sign, lon),
'face': face(sign, lon),
'exile': exile(sign),
'fall': fal... | Returns the complete essential dignities
for a sign and longitude. |
def get_attr_filters(self):
for f in self.data.keys():
if f not in self.multi_attrs:
continue
fv = self.data[f]
if isinstance(fv, dict):
fv['key'] = f
else:
fv = {f: fv}
vf = ValueFilter(fv)
v... | Return an iterator resource attribute filters configured. |
def get_link_text_from_selector(selector):
if selector.startswith('link='):
return selector.split('link=')[1]
elif selector.startswith('link_text='):
return selector.split('link_text=')[1]
return selector | A basic method to get the link text from a link text selector. |
def strip_html(text):
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rpl... | Get rid of ugly twitter html |
def select_slice(self, row_slc, col_slc, add_to_selected=False):
if not add_to_selected:
self.grid.ClearSelection()
if row_slc == row_slc == slice(None, None, None):
self.grid.SelectAll()
elif row_slc.stop is None and col_slc.stop is None:
self.grid.SelectBloc... | Selects a slice of cells
Parameters
----------
* row_slc: Integer or Slice
\tRows to be selected
* col_slc: Integer or Slice
\tColumns to be selected
* add_to_selected: Bool, defaults to False
\tOld selections are cleared if False |
def fact(name, puppet=False):
opt_puppet = '--puppet' if puppet else ''
ret = __salt__['cmd.run_all'](
'facter {0} {1}'.format(opt_puppet, name),
python_shell=False)
if ret['retcode'] != 0:
raise CommandExecutionError(ret['stderr'])
if not ret['stdout']:
return ''... | Run facter for a specific fact
CLI Example:
.. code-block:: bash
salt '*' puppet.fact kernel |
def addIDs(self, asfield=False):
ids = vtk.vtkIdFilter()
ids.SetInputData(self.poly)
ids.PointIdsOn()
ids.CellIdsOn()
if asfield:
ids.FieldDataOn()
else:
ids.FieldDataOff()
ids.Update()
return self.updateMesh(ids.GetOutput()) | Generate point and cell ids.
:param bool asfield: flag to control whether to generate scalar or field data. |
def _dump_multilinestring(obj, decimals):
coords = obj['coordinates']
mlls = 'MULTILINESTRING (%s)'
linestrs = ('(%s)' % ', '.join(' '.join(_round_and_pad(c, decimals)
for c in pt) for pt in linestr) for linestr in coords)
mlls %= ', '.join(ls for ls in linestrs)
return mlls | Dump a GeoJSON-like MultiLineString object to WKT.
Input parameters and return value are the MULTILINESTRING equivalent to
:func:`_dump_point`. |
def _update(self):
self.dataChanged.emit(self.createIndex(0, 0), self.createIndex(
len(self.collection), len(self.header))) | Emit dataChanged signal on all cells |
def open(self, new=False):
self._db.new() if new else self._db.open()
self._run_init_queries() | Init the database, if required. |
def interrupt(self):
if self._database and self._databaseThreadId:
try:
self._database.interrupt(self._databaseThreadId)
except AttributeError:
pass
self._database = None
self._databaseThreadId = 0 | Interrupts the current database from processing. |
def _get_parsers(self, name):
parserlist = BaseParser.__subclasses__()
forced = name is None
if isinstance(name, (six.text_type, six.binary_type)):
parserlist = [p for p in parserlist if p.__name__ == name]
if not parserlist:
raise ValueError("could not fi... | Return the appropriate parser asked by the user.
Todo:
Change `Ontology._get_parsers` behaviour to look for parsers
through a setuptools entrypoint instead of mere subclasses. |
def onPollCreated(
self,
mid=None,
poll=None,
author_id=None,
thread_id=None,
thread_type=None,
ts=None,
metadata=None,
msg=None,
):
log.info(
"{} created poll {} in {} ({})".format(
author_id, poll, thread_i... | Called when the client is listening, and somebody creates a group poll
:param mid: The action ID
:param poll: Created poll
:param author_id: The ID of the person who created the poll
:param thread_id: Thread ID that the action was sent to. See :ref:`intro_threads`
:param thread_... |
def get_trainer(name):
name = name.lower()
return int(hashlib.md5(name.encode('utf-8')).hexdigest(), 16) % 10**8 | return the unique id for a trainer, determined by the md5 sum |
def state_view_for_block(block_wrapper, state_view_factory):
state_root_hash = \
block_wrapper.state_root_hash \
if block_wrapper is not None else None
return state_view_factory.create_view(state_root_hash) | Returns the state view for an arbitrary block.
Args:
block_wrapper (BlockWrapper): The block for which a state
view is to be returned
state_view_factory (StateViewFactory): The state view factory
used to create the StateView object
Returns:
... |
def sort(expr, field = None, keytype=None, ascending=True):
weld_obj = WeldObject(encoder_, decoder_)
expr_var = weld_obj.update(expr)
if isinstance(expr, WeldObject):
expr_var = expr.obj_id
weld_obj.dependencies[expr_var] = expr
if field is not None:
key_str = "x.$%s" % field
... | Sorts the vector.
If the field parameter is provided then the sort
operators on a vector of structs where the sort key
is the field of the struct.
Args:
expr (WeldObject)
field (Int) |
def _match_item(item, any_all=any, ignore_case=False, normalize_values=False, **kwargs):
it = get_item_tags(item)
return any_all(
_match_field(
get_field(it, field), pattern, ignore_case=ignore_case, normalize_values=normalize_values
) for field, patterns in kwargs.items() for pattern in patterns
) | Match items by metadata.
Note:
Metadata values are lowercased when ``normalized_values`` is ``True``,
so ``ignore_case`` is automatically set to ``True``.
Parameters:
item (~collections.abc.Mapping, str, os.PathLike): Item dict or filepath.
any_all (callable): A callable to determine if any or all filters m... |
def to_json(self):
obj = {
"vertices": [
{
"id": vertex.id,
"annotation": vertex.annotation,
}
for vertex in self.vertices
],
"edges": [
{
"id": edge.id... | Convert to a JSON string. |
def to_wea(self, file_path, hoys=None):
hoys = hoys or xrange(len(self.direct_normal_radiation.datetimes))
if not file_path.lower().endswith('.wea'):
file_path += '.wea'
originally_ip = False
if self.is_ip is True:
self.convert_to_si()
originally_ip = ... | Write an wea file from the epw file.
WEA carries radiation values from epw. Gendaymtx uses these values to
generate the sky. For an annual analysis it is identical to using epw2wea.
args:
file_path: Full file path for output file.
hoys: List of hours of the year. Defaul... |
def get_owner(obj_name, obj_type='file'):
r
try:
obj_type_flag = flags().obj_type[obj_type.lower()]
except KeyError:
raise SaltInvocationError(
'Invalid "obj_type" passed: {0}'.format(obj_type))
if obj_type in ['registry', 'registry32']:
obj_name = dacl().get_reg_name... | r'''
Gets the owner of the passed object
Args:
obj_name (str):
The path for which to obtain owner information. The format of this
parameter is different depending on the ``obj_type``
obj_type (str):
The type of object to query. This value changes the format... |
def install_package(package,
wheels_path,
venv=None,
requirement_files=None,
upgrade=False,
install_args=None):
requirement_files = requirement_files or []
logger.info('Installing %s...', package)
if venv and... | Install a Python package.
Can specify a specific version.
Can specify a prerelease.
Can specify a venv to install in.
Can specify a list of paths or urls to requirement txt files.
Can specify a local wheels_path to use for offline installation.
Can request an upgrade. |
def remove_if_exists(filename):
try:
os.unlink(filename)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise | Remove file.
This is like :func:`os.remove` (or :func:`os.unlink`), except that no
error is raised if the file does not exist. |
def get_project_groups_roles(request, project):
groups_roles = collections.defaultdict(list)
project_role_assignments = role_assignments_list(request,
project=project)
for role_assignment in project_role_assignments:
if not hasattr(role_assignment... | Gets the groups roles in a given project.
:param request: the request entity containing the login user information
:param project: the project to filter the groups roles. It accepts both
project object resource or project ID
:returns group_roles: a dictionary mapping the groups and the... |
def _ScanFileSystemForWindowsDirectory(self, path_resolver):
result = False
for windows_path in self._WINDOWS_DIRECTORIES:
windows_path_spec = path_resolver.ResolvePath(windows_path)
result = windows_path_spec is not None
if result:
self._windows_directory = windows_path
break
... | Scans a file system for a known Windows directory.
Args:
path_resolver (WindowsPathResolver): Windows path resolver.
Returns:
bool: True if a known Windows directory was found. |
def get_trust_id(self):
if not bool(self._my_map['trustId']):
raise errors.IllegalState('this Authorization has no trust')
else:
return Id(self._my_map['trustId']) | Gets the ``Trust`` ``Id`` for this authorization.
return: (osid.id.Id) - the trust ``Id``
raise: IllegalState - ``has_trust()`` is ``false``
*compliance: mandatory -- This method must be implemented.* |
def get_postgresql_args(db_config, extra_args=None):
db = db_config['NAME']
mapping = [('--username={0}', db_config.get('USER')),
('--host={0}', db_config.get('HOST')),
('--port={0}', db_config.get('PORT'))]
args = apply_arg_values(mapping)
if extra_args is not None:
... | Returns an array of argument values that will be passed to a `psql` or
`pg_dump` process when it is started based on the given database
configuration. |
def setControl(
self, request_type, request, value, index, buffer_or_len,
callback=None, user_data=None, timeout=0):
if self.__submitted:
raise ValueError('Cannot alter a submitted transfer')
if self.__doomed:
raise DoomedTransferError('Cannot reuse a doom... | Setup transfer for control use.
request_type, request, value, index
See USBDeviceHandle.controlWrite.
request_type defines transfer direction (see
ENDPOINT_OUT and ENDPOINT_IN)).
buffer_or_len
Either a string (when sending data), or expected data length (... |
def _send_solr_command(self, core_url, json_command):
url = _get_url(core_url, "update")
try:
response = self.req_session.post(url, data=json_command, headers={'Content-Type': 'application/json'})
response.raise_for_status()
except requests.RequestException as e:
... | Sends JSON string to Solr instance |
def arch(self):
if self.machine in ["x86_64", "AMD64", "i686"]:
if self.architecture == "32bit":
return "i386"
return "amd64"
elif self.machine == "x86":
return "i386"
return self.machine | Return a more standard representation of the architecture. |
def to_array(self, *args, **kwargs):
from root_numpy import tree2array
return tree2array(self, *args, **kwargs) | Convert this tree into a NumPy structured array |
def post(self, command, data=None):
now = calendar.timegm(datetime.datetime.now().timetuple())
if now > self.expiration:
auth = self.__open("/oauth/token", data=self.oauth)
self.__sethead(auth['access_token'])
return self.__open("%s%s" % (self.api, command),
... | Post data to API. |
def analyze(problem, X, Y, second_order=False, print_to_console=False,
seed=None):
if seed:
np.random.seed(seed)
problem = extend_bounds(problem)
num_vars = problem['num_vars']
X = generate_contrast(problem)
main_effect = (1. / (2 * num_vars)) * np.dot(Y, X)
Si = ResultDict((... | Perform a fractional factorial analysis
Returns a dictionary with keys 'ME' (main effect) and 'IE' (interaction
effect). The techniques bulks out the number of parameters with dummy
parameters to the nearest 2**n. Any results involving dummy parameters
could indicate a problem with the model runs.
... |
def _buffer(self, event: Message):
if isinstance(event, BytesMessage):
self._byte_buffer.write(event.data)
elif isinstance(event, TextMessage):
self._string_buffer.write(event.data) | Buffers an event, if applicable. |
def included_length(self):
return sum([shot.length for shot in self.shots if shot.is_included]) | Surveyed length, not including "excluded" shots |
def _maybe_cast_slice_bound(self, label, side, kind):
if isinstance(label, str):
parsed, resolution = _parse_iso8601_with_reso(self.date_type,
label)
start, end = _parsed_string_to_bounds(self.date_type, resolution,
... | Adapted from
pandas.tseries.index.DatetimeIndex._maybe_cast_slice_bound |
def class_parameters(decorator):
def decorate(the_class):
if not isclass(the_class):
raise TypeError(
'class_parameters(the_class=%s) you must pass a class' % (
the_class
)
)
for attr in the_class.__dict__:
if ca... | To wrap all class methods with static_parameters decorator |
def calc_exp(skydir, ltc, event_class, event_types,
egy, cth_bins, npts=None):
if npts is None:
npts = int(np.ceil(np.max(cth_bins[1:] - cth_bins[:-1]) / 0.025))
exp = np.zeros((len(egy), len(cth_bins) - 1))
cth_bins = utils.split_bin_edges(cth_bins, npts)
cth = edge_to_center(cth_b... | Calculate the exposure on a 2D grid of energy and incidence angle.
Parameters
----------
npts : int
Number of points by which to sample the response in each
incidence angle bin. If None then npts will be automatically
set such that incidence angle is sampled on intervals of <
... |
def _is_in_restart(self, x, y):
x1, y1, x2, y2 = self._new_game
return x1 <= x < x2 and y1 <= y < y2 | Checks if the game is to be restarted by request. |
def commit(self, f):
if self._overwrite:
replace_atomic(f.name, self._path)
else:
move_atomic(f.name, self._path) | Move the temporary file to the target location. |
def get_thermostability(self, at_temp):
import ssbio.protein.sequence.properties.thermostability as ts
dG = ts.get_dG_at_T(seq=self, temp=at_temp)
self.annotations['thermostability_{}_C-{}'.format(at_temp, dG[2].lower())] = (dG[0], dG[1]) | Run the thermostability calculator using either the Dill or Oobatake methods.
Stores calculated (dG, Keq) tuple in the ``annotations`` attribute, under the key
`thermostability_<TEMP>-<METHOD_USED>`.
See :func:`ssbio.protein.sequence.properties.thermostability.get_dG_at_T` for instructions and... |
def deploy_snmp(snmp, host=None, admin_username=None,
admin_password=None, module=None):
return __execute_cmd('deploy -v SNMPv2 {0} ro'.format(snmp),
host=host,
admin_username=admin_username,
admin_password=admin_password,
... | Change the QuickDeploy SNMP community string, used for switches as well
CLI Example:
.. code-block:: bash
salt dell dracr.deploy_snmp SNMP_STRING
host=<remote DRAC or CMC> admin_username=<DRAC user>
admin_password=<DRAC PW>
salt dell dracr.deploy_password diana secret |
def loadJson(d):
with codecs.open(jsonFn(d), 'r', 'utf-8') as f:
return json.load(f) | Return JSON data. |
def extract_keywords(cls, line, items):
unprocessed = list(reversed(line.split('=')))
while unprocessed:
chunk = unprocessed.pop()
key = None
if chunk.strip() in cls.allowed:
key = chunk.strip()
else:
raise SyntaxError("Inva... | Given the keyword string, parse a dictionary of options. |
def _send_and_wait(self, **kwargs):
frame_id = self.next_frame_id
kwargs.update(dict(frame_id=frame_id))
self._send(**kwargs)
timeout = datetime.now() + const.RX_TIMEOUT
while datetime.now() < timeout:
try:
frame = self._rx_frames.pop(frame_id)
... | Send a frame to either the local ZigBee or a remote device and wait
for a pre-defined amount of time for its response. |
def get(self, url, ignore_access_time=False):
key = hashlib.md5(url).hexdigest()
accessed = self._cache_meta_get(key)
if not accessed:
self.debug("From inet {}".format(url))
return None, None
if isinstance(accessed, dict):
cached = CacheInfo.from_dict(... | Try to retrieve url from cache if available
:param url: Url to retrieve
:type url: str | unicode
:param ignore_access_time: Should ignore the access time
:type ignore_access_time: bool
:return: (data, CacheInfo)
None, None -> not found in cache
None, Cach... |
def django_url(step, url=None):
base_url = step.test.live_server_url
if url:
return urljoin(base_url, url)
else:
return base_url | The URL for a page from the test server.
:param step: A Gherkin step
:param url: If specified, the relative URL to append. |
def read_be_array(fmt, count, fp):
arr = array.array(str(fmt))
if hasattr(arr, 'frombytes'):
arr.frombytes(fp.read(count * arr.itemsize))
else:
arr.fromstring(fp.read(count * arr.itemsize))
return fix_byteorder(arr) | Reads an array from a file with big-endian data. |
def _is_viable_phone_number(number):
if len(number) < _MIN_LENGTH_FOR_NSN:
return False
match = fullmatch(_VALID_PHONE_NUMBER_PATTERN, number)
return bool(match) | Checks to see if a string could possibly be a phone number.
At the moment, checks to see that the string begins with at least 2
digits, ignoring any punctuation commonly found in phone numbers. This
method does not require the number to be normalized in advance - but does
assume that leading non-numbe... |
def attached_socket(self, *args, **kwargs):
try:
sock = self.attach(*args, **kwargs)
yield sock
finally:
sock.shutdown(socket.SHUT_RDWR)
sock.close() | Opens a raw socket in a ``with`` block to write data to Splunk.
The arguments are identical to those for :meth:`attach`. The socket is
automatically closed at the end of the ``with`` block, even if an
exception is raised in the block.
:param host: The host value for events written to t... |
def _get(self, *args, **kwargs):
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)... | Gets a single list of messages from all storage backends. |
def send(self, obj_id):
response = self._client.session.post(
'{url}/{id}/send'.format(
url=self.endpoint_url, id=obj_id
)
)
return self.process_response(response) | Send email to the assigned lists
:param obj_id: int
:return: dict|str |
def fill_in_arguments(config, modules, args):
def work_in(config, module, name):
rkeys = getattr(module, 'runtime_keys', {})
for (attr, cname) in iteritems(rkeys):
v = args.get(attr, None)
if v is not None:
config[cname] = v
if not isinstance(args, collect... | Fill in configuration fields from command-line arguments.
`config` is a dictionary holding the initial configuration,
probably the result of :func:`assemble_default_config`. It reads
through `modules`, and for each, fills in any configuration values
that are provided in `args`.
`config` is modifi... |
def purge_bad_timestamp_files(file_list):
"Given a list of image files, find bad frames, remove them and modify file_list"
MAX_INITIAL_BAD_FRAMES = 15
bad_ts = Kinect.detect_bad_timestamps(Kinect.timestamps_from_file_list(file_list))
if not bad_ts:
return file_list
la... | Given a list of image files, find bad frames, remove them and modify file_list |
def layer_from_element(element, style_function=None):
from telluric.collections import BaseCollection
if isinstance(element, BaseCollection):
styled_element = element.map(lambda feat: style_element(feat, style_function))
else:
styled_element = style_element(element, style_function)
retur... | Return Leaflet layer from shape.
Parameters
----------
element : telluric.vectors.GeoVector, telluric.features.GeoFeature, telluric.collections.BaseCollection
Data to plot. |
def add_special(self, name):
self.undeclared.discard(name)
self.declared.add(name) | Register a special name like `loop`. |
def request_goto(self, tc=None):
if not tc:
tc = TextHelper(self.editor).word_under_cursor(
select_whole_word=True)
if not self._definition or isinstance(self.sender(), QAction):
self.select_word(tc)
if self._definition is not None:
QTimer.sing... | Request a go to assignment.
:param tc: Text cursor which contains the text that we must look for
its assignment. Can be None to go to the text that is under
the text cursor.
:type tc: QtGui.QTextCursor |
def _generate_encryption_data_dict(kek, cek, iv):
wrapped_cek = kek.wrap_key(cek)
wrapped_content_key = OrderedDict()
wrapped_content_key['KeyId'] = kek.get_kid()
wrapped_content_key['EncryptedKey'] = _encode_base64(wrapped_cek)
wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm()
enc... | Generates and returns the encryption metadata as a dict.
:param object kek: The key encryption key. See calling functions for more information.
:param bytes cek: The content encryption key.
:param bytes iv: The initialization vector.
:return: A dict containing all the encryption metadata.
:rtype: d... |
def commit(self):
self.logger.debug("Starting injections...")
self.logger.debug("Injections dict is:")
self.logger.debug(self.inject_dict)
self.logger.debug("Clear list is:")
self.logger.debug(self.clear_set)
for filename, content in self.inject_dict.items():
... | commit the injections desired, overwriting any previous injections in the file. |
def get_compute(self, compute=None, **kwargs):
if compute is not None:
kwargs['compute'] = compute
kwargs['context'] = 'compute'
return self.filter(**kwargs) | Filter in the 'compute' context
:parameter str compute: name of the compute options (optional)
:parameter **kwargs: any other tags to do the filter
(except compute or context)
:return: :class:`phoebe.parameters.parameters.ParameterSet` |
def _create_axes(filenames, file_dict):
try:
f = iter(f for tup in file_dict.itervalues()
for f in tup if f is not None).next()
except StopIteration as e:
raise (ValueError("No FITS files were found. "
"Searched filenames: '{f}'." .format(
f=filenames.valu... | Create a FitsAxes object |
def simulate(self, T):
x = []
for t in range(T):
law_x = self.PX0() if t == 0 else self.PX(t, x[-1])
x.append(law_x.rvs(size=1))
y = self.simulate_given_x(x)
return x, y | Simulate state and observation processes.
Parameters
----------
T: int
processes are simulated from time 0 to time T-1
Returns
-------
x, y: lists
lists of length T |
def similar(self, address_line, max_results=None):
params = {"term": address_line,
"max_results": max_results or self.max_results}
return self._make_request('/address/getSimilar', params) | Gets a list of valid addresses that are similar to the given term, can
be used to match invalid addresses to valid addresses. |
def delete_record(self, identifier=None, rtype=None, name=None, content=None, **kwargs):
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self... | Delete an existing record.
If record does not exist, do nothing.
If an identifier is specified, use it, otherwise do a lookup using type, name and content. |
def listFieldsFromSource(self, template_source):
ast = self.environment.parse(template_source)
return jinja2.meta.find_undeclared_variables(ast) | List all the attributes to be rendered directly from template
source
:param template_source: the template source (usually represents the
template content in string format)
:return: a :class:`set` contains all the needed attributes
:rtype: set |
def _mark_target(type, item):
if type not in ('input', 'output'):
msg = 'Error (7D74X): Type is not valid: {0}'.format(type)
raise ValueError(msg)
orig_item = item
if isinstance(item, list):
item_s = item
else:
item_s = [item]
for item in item_s:
if isinstance... | Wrap given item as input or output target that should be added to task.
Wrapper object will be handled specially in \
:paramref:`create_cmd_task.parts`.
:param type: Target type.
Allowed values:
- 'input'
- 'output'
:param item: Item to mark as input or output target.... |
def _get_select_commands(self, source, tables):
row_queries = {tbl: self.select_all(tbl, execute=False) for tbl in
tqdm(tables, total=len(tables), desc='Getting {0} select queries'.format(source))}
for tbl, command in row_queries.items():
if isinstance(command, str):
... | Create select queries for all of the tables from a source database.
:param source: Source database name
:param tables: Iterable of table names
:return: Dictionary of table keys, command values |
def lexical_parent(self):
if not hasattr(self, '_lexical_parent'):
self._lexical_parent = conf.lib.clang_getCursorLexicalParent(self)
return self._lexical_parent | Return the lexical parent for this cursor. |
def user_has_access(self, user):
if ROLE_ADMIN in user.roles:
return True
if self.enabled:
if not self.required_roles:
return True
for role in self.required_roles:
if role in user.roles:
return True
return Fa... | Check if a user has access to view information for the account
Args:
user (:obj:`User`): User object to check
Returns:
True if user has access to the account, else false |
def _linear_predictor(self, X=None, modelmat=None, b=None, term=-1):
if modelmat is None:
modelmat = self._modelmat(X, term=term)
if b is None:
b = self.coef_[self.terms.get_coef_indices(term)]
return modelmat.dot(b).flatten() | linear predictor
compute the linear predictor portion of the model
ie multiply the model matrix by the spline basis coefficients
Parameters
---------
at least 1 of (X, modelmat)
and
at least 1 of (b, feature)
X : array-like of shape (n_samples, m_fea... |
def set_file(name, source, template=None, context=None, defaults=None, **kwargs):
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
if context is None:
context = {}
elif not isinstance(context, dict):
ret['result'] = False
ret['commen... | Set debconf selections from a file or a template
.. code-block:: yaml
<state_id>:
debconf.set_file:
- source: salt://pathto/pkg.selections
<state_id>:
debconf.set_file:
- source: salt://pathto/pkg.selections?saltenv=myenvironment
<state_id>:
... |
def set_attr(self, name, val, dval=None, dtype=None, reset=False):
if dval is not None and val is None:
val = dval
if dtype is not None and val is not None:
if isinstance(dtype, type):
val = dtype(val)
else:
val = dtype.type(val)
... | Set an object attribute by its name. The attribute value
can be specified as a primary value `val`, and as default
value 'dval` that will be used if the primary value is None.
This arrangement allows an attribute to be set from an entry
in an options object, passed as `val`, while specif... |
def get_verse(self, v=1):
verse_count = len(self.verses)
if v - 1 < verse_count:
return self.verses[v - 1] | Get a specific verse. |
def search(session, query):
flat_query = "".join(query.split())
artists = session.query(Artist).filter(
or_(Artist.name.ilike(f"%%{query}%%"),
Artist.name.ilike(f"%%{flat_query}%%"))
).all()
albums = session.query(Album).filter(
Album.title.ilike(f"%%{q... | Naive search of the database for `query`.
:return: A dict with keys 'artists', 'albums', and 'tracks'. Each containing a list
of the respective ORM type. |
def timeout_queue_add(self, item, cache_time=0):
self.timeout_add_queue.append((item, cache_time))
if self.timeout_due is None or cache_time < self.timeout_due:
self.update_request.set() | Add a item to be run at a future time.
This must be a Module, I3statusModule or a Task |
def raw_data_engine(**kwargs):
logger.debug("cycles_engine")
raise NotImplementedError
experiments = kwargs["experiments"]
farms = []
barn = "raw_dir"
for experiment in experiments:
farms.append([])
return farms, barn | engine to extract raw data |
def lambda_A_calc(classes, table, P, POP):
try:
result = 0
maxreference = max(list(P.values()))
length = POP
for i in classes:
col = []
for col_item in table.values():
col.append(col_item[i])
result += max(col)
result = (res... | Calculate Goodman and Kruskal's lambda A.
:param classes: confusion matrix classes
:type classes : list
:param table: confusion matrix table
:type table : dict
:param P: condition positive
:type P : dict
:param POP: population
:type POP : int
:return: Goodman and Kruskal's lambda A ... |
def fix_germline_samplename(in_file, sample_name, data):
out_file = "%s-fixnames%s" % utils.splitext_plus(in_file)
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
sample_file = "%s-samples.txt" % utils.splitext_plus(tx_out_file)[0]
with o... | Replace germline sample names, originally from normal BAM file. |
def write(gmt, out_path):
with open(out_path, 'w') as f:
for _, each_dict in enumerate(gmt):
f.write(each_dict[SET_IDENTIFIER_FIELD] + '\t')
f.write(each_dict[SET_DESC_FIELD] + '\t')
f.write('\t'.join([str(entry) for entry in each_dict[SET_MEMBERS_FIELD]]))
f.... | Write a GMT to a text file.
Args:
gmt (GMT object): list of dicts
out_path (string): output path
Returns:
None |
def check_required_keys(self, required_keys):
h = self._contents_hash
for key in required_keys:
if key not in h:
raise InsufficientGraftMPackageException("Package missing key %s" % key) | raise InsufficientGraftMPackageException if this package does not
conform to the standard of the given package |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.