code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def setPololuProtocol(self):
self._compact = False
self._log and self._log.debug("Pololu protocol has been set.") | Set the pololu protocol. |
def _add_output(self, out):
self._outputs += [out]
out.node = self
out._set_as_output_of(self) | Add as destination of output data |
def getRequest(self):
ars = self.getLinkedRequests()
if len(ars) > 1:
ar_ids = ", ".join(map(api.get_id, ars))
logger.info("Attachment assigned to more than one AR: [{}]. "
"The first AR will be returned".format(ar_ids))
if len(ars) >= 1:
return ars[0]
analysis = self.getAnalysis()
if IRequestAnalysis.providedBy(analysis):
return analysis.getRequest()
return None | Return the primary AR this attachment is linked |
def log2(x, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_log2,
(BigFloat._implicit_convert(x),),
context,
) | Return the base-two logarithm of x. |
def _check_inplace_setting(self, value):
if self._is_mixed_type:
if not self._is_numeric_mixed_type:
try:
if np.isnan(value):
return True
except Exception:
pass
raise TypeError('Cannot do inplace boolean setting on '
'mixed-types with a non np.nan value')
return True | check whether we allow in-place setting with this type of value |
def from_sgf(sgfc):
if sgfc is None or sgfc == '' or (go.N <= 19 and sgfc == 'tt'):
return None
return _SGF_COLUMNS.index(sgfc[1]), _SGF_COLUMNS.index(sgfc[0]) | Converts from an SGF coordinate to a Minigo coordinate. |
def server_info_cb(self, context, server_info_p, userdata):
server_info = server_info_p.contents
self.request_update(context) | Retrieves the default sink and calls request_update |
def md(self, text=TEXT, float_format="%.2g"):
cols = self._data.columns
hl = pd.DataFrame([["---"] * len(cols)], index=["---"], columns=cols)
df = pd.concat([hl, self._data])
return df.to_csv(sep='|', index=True, float_format=float_format) | Generate Markdown from the table data. |
def activationFunctionASIG(self, x):
def act(v):
if v < -15.0: return 0.0
elif v > 15.0: return 1.0
else: return 1.0 / (1.0 + Numeric.exp(-v))
return Numeric.array(list(map(act, x)), 'f') | Determine the activation of a node based on that nodes net input. |
def dtool(debug):
level = logging.WARNING
if debug:
level = logging.DEBUG
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=level) | Tool to work with datasets. |
def server(self):
if self._server is None:
self._server = bugzilla.Bugzilla(url=self.parent.url)
return self._server | Connection to the server |
def _upload_none(self, upload_info, check_result):
return UploadResult(
action=None,
quickkey=check_result['duplicate_quickkey'],
hash_=upload_info.hash_info.file,
filename=upload_info.name,
size=upload_info.size,
created=None,
revision=None
) | Dummy upload function for when we don't actually upload |
def save(self):
if self.path is None:
if self.config_manager.userCodeDir is not None:
sys.path.remove(self.config_manager.userCodeDir)
self.config_manager.userCodeDir = None
logger.info("Removed custom module search path from configuration and sys.path.")
else:
if self.path != self.config_manager.userCodeDir:
if self.config_manager.userCodeDir is not None:
sys.path.remove(self.config_manager.userCodeDir)
sys.path.append(self.path)
self.config_manager.userCodeDir = self.path
logger.info("Saved custom module search path and added it to sys.path: {}".format(self.path)) | This function is called by the parent dialog window when the user selects to save the settings. |
def _build_pyramid(self, image, levels):
pyramid = [image]
for l in range(levels-1):
if any(x < 20 for x in pyramid[-1].shape[:2]):
break
pyramid.append(cv2.pyrDown(pyramid[-1]))
return list(reversed(pyramid)) | Returns a list of reduced-size images, from smallest to original size |
def user_got_role_event(user, role):
msg = 'User ({}){} got new role [{}]'
current_app.logger.info(msg.format(user.id, user.email, role.handle)) | User got new role |
def attend(x, source, hparams, name):
with tf.variable_scope(name):
x = tf.squeeze(x, axis=2)
if len(source.get_shape()) > 3:
source = tf.squeeze(source, axis=2)
source = common_attention.add_timing_signal_1d(source)
y = common_attention.multihead_attention(
common_layers.layer_preprocess(x, hparams), source, None,
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size, hparams.num_heads,
hparams.attention_dropout)
res = common_layers.layer_postprocess(x, y, hparams)
return tf.expand_dims(res, axis=2) | Self-attention layer with source as memory antecedent. |
def send(self, send_string, newline=None):
self.current_send_string = send_string
newline = newline if newline is not None else self.newline
self.channel.send(send_string + newline) | Saves and sends the send string provided. |
def feed_key(self, key_press):
assert isinstance(key_press, KeyPress)
cli = self._active_cli
if not cli.is_done:
cli.input_processor.feed(key_press)
cli.input_processor.process_keys() | Feed a key press to the CommandLineInterface. |
def reload(self, *modules):
self.notify('before_reload')
if 'configfiles' in self.config:
self.log.info('Reloading configuration...')
cfg = utils.parse_config(
self.server and 'server' or 'bot', *self.config['configfiles'])
self.config.update(cfg)
self.log.info('Reloading python code...')
if not modules:
modules = self.registry.includes
scanned = list(reversed(self.registry.scanned))
self.registry.reset()
to_scan = []
for module_name, categories in scanned:
if module_name in modules:
module = utils.maybedotted(module_name)
reload_module(module)
to_scan.append((module_name, categories))
for module_name, categories in to_scan:
self.include(module_name, venusian_categories=categories)
self.registry.reloading = {}
self.notify('after_reload') | Reload one or more plugins |
def optimise_xy(xy, *args):
z, elements, coordinates = args
window_com = np.array([xy[0], xy[1], z])
return -pore_diameter(elements, coordinates, com=window_com)[0] | Return negative pore diameter for x and y coordinates optimisation. |
def children_sum( self, children,node ):
return sum( [self.value(value,node) for value in children] ) | Calculate children's total sum |
def _check_versionlock():
if _yum() == 'dnf':
if int(__grains__.get('osmajorrelease')) >= 26:
if six.PY3:
vl_plugin = 'python3-dnf-plugin-versionlock'
else:
vl_plugin = 'python2-dnf-plugin-versionlock'
else:
if six.PY3:
vl_plugin = 'python3-dnf-plugins-extras-versionlock'
else:
vl_plugin = 'python-dnf-plugins-extras-versionlock'
else:
vl_plugin = 'yum-versionlock' \
if __grains__.get('osmajorrelease') == '5' \
else 'yum-plugin-versionlock'
if vl_plugin not in list_pkgs():
raise SaltInvocationError(
'Cannot proceed, {0} is not installed.'.format(vl_plugin)
) | Ensure that the appropriate versionlock plugin is present |
def packed_checksum(self, data):
self.field.setval(self.algo(data[self.start:self.end]))
sio = BytesIO()
self.field.pack(sio)
return sio.getvalue() | Given the data of the entire packet return the checksum bytes |
def connect(self):
"Initiate the connection to a proxying hub"
log.info("connecting")
self._peer = connection.Peer(
None, self._dispatcher, self._addrs.popleft(),
backend.Socket(), reconnect=False)
self._peer.start() | Initiate the connection to a proxying hub |
def from_srt(cls, file):
parser = SRTParser().read(file)
return cls(file=file, captions=parser.captions) | Reads captions from a file in SubRip format. |
def _report_options(p):
_default_options(p, blacklist=['cache', 'log-group', 'quiet'])
p.add_argument(
'--days', type=float, default=1,
help="Number of days of history to consider")
p.add_argument(
'--raw', type=argparse.FileType('wb'),
help="Store raw json of collected records to given file path")
p.add_argument(
'--field', action='append', default=[], type=_key_val_pair,
metavar='HEADER=FIELD',
help='Repeatable. JMESPath of field to include in the output OR '
'for a tag use prefix `tag:`. Special case fields `region` and'
'`policy` are available')
p.add_argument(
'--no-default-fields', action="store_true",
help='Exclude default fields for report.')
p.add_argument(
'--format', default='csv', choices=['csv', 'grid', 'simple', 'json'],
help="Format to output data in (default: %(default)s). "
"Options include simple, grid, csv, json") | Add options specific to the report subcommand. |
def create_merge_psm_map(peptides, ns):
psmmap = {}
for peptide in peptides:
seq = reader.get_peptide_seq(peptide, ns)
psm_ids = reader.get_psm_ids_from_peptide(peptide, ns)
for psm_id in psm_ids:
try:
psmmap[seq][psm_id.text] = 1
except KeyError:
psmmap[seq] = {psm_id.text: 2}
for seq, psm_id_dict in psmmap.items():
psmmap[seq] = [x for x in psm_id_dict]
return psmmap | Loops through peptides, stores sequences mapped to PSM ids. |
def terminal_path_lengths_per_neurite(neurites, neurite_type=NeuriteType.all):
return list(sectionfunc.section_path_length(s)
for n in iter_neurites(neurites, filt=is_type(neurite_type))
for s in iter_sections(n, iterator_type=Tree.ileaf)) | Get the path lengths to each terminal point per neurite in a collection |
def stop_db_session(exc=None):
if has_db_session():
exc_type = None
tb = None
if exc:
exc_type, exc, tb = get_exc_info(exc)
db_session.__exit__(exc_type, exc, tb) | Stops the last db_session |
def drinkAdmins(self, objects=False):
admins = self.group('drink', objects=objects)
return admins | Returns a list of drink admins uids |
def group_evidence_edges(edges: Iterable[EdgeTuple]) -> Iterable[Tuple[str, Iterable[EdgeTuple]]]:
return itt.groupby(edges, key=_evidence_sort_key) | Return an iterator over pairs of evidence values and their corresponding edge iterators. |
def uuid3(namespace, name):
import md5
hash = md5.md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3) | Generate a UUID from the MD5 hash of a namespace UUID and a name. |
def load(klass, client, id, **kwargs):
resource = klass.RESOURCE.format(id=id)
response = Request(client, 'get', resource, params=kwargs).perform()
return klass(client).from_response(response.body['data']) | Returns an object instance for a given resource. |
def cancel_job(self, id_job, hub=None, group=None, project=None,
access_token=None, user_id=None):
if access_token:
self.req.credential.set_token(access_token)
if user_id:
self.req.credential.set_user_id(user_id)
if not self.check_credentials():
respond = {}
respond["status"] = 'Error'
respond["error"] = "Not credentials valid"
return respond
if not id_job:
respond = {}
respond["status"] = 'Error'
respond["error"] = "Job ID not specified"
return respond
url = get_job_url(self.config, hub, group, project)
url += '/{}/cancel'.format(id_job)
res = self.req.post(url)
return res | Cancel the information about a job, by its id |
def autobuild_docproject():
try:
family = utilities.get_family('module_settings.json')
autobuild_release(family)
autobuild_documentation(family.tile)
except unit_test.IOTileException as e:
print(e.format())
Exit(1) | Autobuild a project that only contains documentation |
def disconnect_entry_signals():
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_PING_DIRECTORIES)
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS)
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE)
post_delete.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE) | Disconnect all the signals on Entry model. |
def connect_input(self, wire):
self._input = wire
wire.sinks.append(self) | Probe the specified wire. |
def _random_color(h_range=(0., 1.),
s_range=(.5, 1.),
v_range=(.5, 1.),
):
h, s, v = uniform(*h_range), uniform(*s_range), uniform(*v_range)
r, g, b = hsv_to_rgb(np.array([[[h, s, v]]])).flat
return r, g, b | Generate a random RGB color. |
def _remote_browser_class(env_vars, tags=None):
if tags is None:
tags = []
envs = _required_envs(env_vars)
envs.update(_optional_envs())
caps = _capabilities_dict(envs, tags)
if 'accessKey' in caps:
LOGGER.info(u"Using SauceLabs: %s %s %s", caps['platform'], caps['browserName'], caps['version'])
else:
LOGGER.info(u"Using Remote Browser: %s", caps['browserName'])
url = u"http://{0}:{1}/wd/hub".format(
envs['SELENIUM_HOST'], envs['SELENIUM_PORT'])
browser_args = []
browser_kwargs = {
'command_executor': url,
'desired_capabilities': caps,
}
if caps['browserName'] == 'firefox':
browser_kwargs['browser_profile'] = _firefox_profile()
return webdriver.Remote, browser_args, browser_kwargs | Returns class, kwargs, and args needed to instantiate the remote browser. |
def make_key(*criteria):
criteria = [stringify(c) for c in criteria]
criteria = [c for c in criteria if c is not None]
if len(criteria):
return ':'.join(criteria) | Make a string key out of many criteria. |
def _is_big_endian(self):
if self.endian == DataTypeMixIn.ENDIAN_NATIVE:
return SYSTEM_ENDIAN == DataTypeMixIn.ENDIAN_BIG
return self.endian in (DataTypeMixIn.ENDIAN_BIG, DataTypeMixIn.ENDIAN_NETWORK) | Whether the current endian is big endian. |
def should_stop(self, result):
if result.get(DONE):
return True
for criteria, stop_value in self.stopping_criterion.items():
if criteria not in result:
raise TuneError(
"Stopping criteria {} not provided in result {}.".format(
criteria, result))
if result[criteria] >= stop_value:
return True
return False | Whether the given result meets this trial's stopping criteria. |
def _process_legend(self):
for l in self.handles['plot'].legend:
l.items[:] = []
l.border_line_alpha = 0
l.background_fill_alpha = 0 | Disables legends if show_legend is disabled. |
def append_file(self, file):
self.files.append(file)
if self.transformer:
future = asyncio.ensure_future(self.transformer.transform(file))
future.add_done_callback(self.handle_transform) | Append a new file in the stream. |
def addConnector(self, wire1, wire2):
if wire1 == wire2:
return
if wire1 > wire2:
wire1, wire2 = wire2, wire1
try:
last_level = self[-1]
except IndexError:
self.append([(wire1, wire2)])
return
for wires in last_level:
if wires[1] >= wire1 and wires[0] <= wire2:
self.append([(wire1, wire2)])
return
last_level.append((wire1, wire2)) | Add a connector between wire1 and wire2 in the network. |
def _to_bstr(l):
if isinstance(l, str):
l = l.encode('ascii', 'backslashreplace')
elif not isinstance(l, bytes):
l = str(l).encode('ascii', 'backslashreplace')
return l | Convert to byte string. |
def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None):
agent_key = self._get_agent_key()
if session_name in self._provider_sessions[agent_key]:
return self._provider_sessions[agent_key][session_name]
else:
manager = self._get_sub_package_provider_manager(sub_package)
session = self._instantiate_session('get_' + session_name + '_for_bank',
proxy=self._proxy,
manager=manager)
self._set_bank_view(session)
self._set_object_view(session)
self._set_operable_view(session)
self._set_containable_view(session)
if self._session_management != DISABLED:
self._provider_sessions[agent_key][session_name] = session
return session | Gets the session from a sub-package |
def delist(target):
result = target
if type(target) is dict:
for key in target:
target[key] = delist(target[key])
if type(target) is list:
if len(target)==0:
result = None
elif len(target)==1:
result = delist(target[0])
else:
result = [delist(e) for e in target]
return result | for any "list" found, replace with a single entry if the list has exactly one entry |
def xslt(request):
foos = foobar_models.Foo.objects.all()
return render_xslt_to_response('xslt/model-to-xml.xsl', foos, mimetype='text/xml') | Shows xml output transformed with standard xslt |
def setup_step_out(self, frame):
self.frame_calling = None
self.frame_stop = None
self.frame_return = frame.f_back
self.frame_suspend = False
self.pending_stop = True
return | Setup debugger for a "stepOut" |
def CaptureVariable(self, value, depth, limits, can_enqueue=True):
try:
return self.CaptureVariableInternal(value, depth, limits, can_enqueue)
except BaseException as e:
return {
'status': {
'isError': True,
'refersTo': 'VARIABLE_VALUE',
'description': {
'format': ('Failed to capture variable: $0'),
'parameters': [str(e)]
}
}
} | Try-Except wrapped version of CaptureVariableInternal. |
def visit_starred(self, node, parent):
context = self._get_context(node)
newnode = nodes.Starred(
ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent
)
newnode.postinit(self.visit(node.value, newnode))
return newnode | visit a Starred node and return a new instance of it |
def coerce(self, value):
if self._coerce is not None:
value = self._coerce(value)
return value | Coerce a cleaned value. |
def _get_renditions(self, kwargs):
img_1x, size = self.get_rendition(
1, **utils.remap_args(kwargs, {"quality": "quality_ldpi"}))
img_2x, _ = self.get_rendition(
2, **utils.remap_args(kwargs, {"quality": "quality_hdpi"}))
return (img_1x, img_2x, size) | Get a bunch of renditions; returns a tuple of 1x, 2x, size |
def _post(url:str, params:dict, headers:dict) -> dict:
response = requests.post(url, params=params, headers=headers)
data = response.json()
if response.status_code != 200 or "error" in data:
raise GoogleApiError({"status_code": response.status_code,
"error": data.get("error", "")})
return data | Make a POST call. |
def OnBorderChoice(self, event):
choicelist = event.GetEventObject().GetItems()
self.borderstate = choicelist[event.GetInt()] | Change the borders that are affected by color and width changes |
def _StopMonitoringProcesses(self):
for pid in list(self._process_information_per_pid.keys()):
self._RaiseIfNotRegistered(pid)
process = self._processes_per_pid[pid]
self._StopMonitoringProcess(process) | Stops monitoring all processes. |
def close(self):
self._target_context_errors = None
self._query_context_errors = None
self._general_errors = None
for ae in self._alignment_errors:
ae.close()
self._alignment_errors = None | Set some objects to None to hopefully free up some memory. |
def _from_dict(cls, _dict):
args = {}
if 'utterance_id' in _dict:
args['utterance_id'] = _dict.get('utterance_id')
else:
raise ValueError(
'Required property \'utterance_id\' not present in UtteranceAnalysis JSON'
)
if 'utterance_text' in _dict:
args['utterance_text'] = _dict.get('utterance_text')
else:
raise ValueError(
'Required property \'utterance_text\' not present in UtteranceAnalysis JSON'
)
if 'tones' in _dict:
args['tones'] = [
ToneChatScore._from_dict(x) for x in (_dict.get('tones'))
]
else:
raise ValueError(
'Required property \'tones\' not present in UtteranceAnalysis JSON'
)
if 'error' in _dict:
args['error'] = _dict.get('error')
return cls(**args) | Initialize a UtteranceAnalysis object from a json dictionary. |
def replace_month_abbr_with_num(date_str, lang=DEFAULT_DATE_LANG):
num, abbr = get_month_from_date_str(date_str, lang)
return re.sub(abbr, str(num), date_str, flags=re.IGNORECASE) | Replace month strings occurrences with month number. |
def find_output_with_ifo(self, ifo):
ifo = ifo.upper()
return FileList([i for i in self if ifo in i.ifo_list]) | Find all files who have ifo = ifo |
def dict_row_strategy(column_names):
column_names = [(name or idx) for idx, name in enumerate(column_names)]
def row_factory(row):
return dict(zip(column_names, row))
return row_factory | Dict row strategy, rows returned as dictionaries |
def _highlight_extrema(data, color='yellow', max_=True):
attr = 'background-color: {0}'.format(color)
if data.ndim == 1:
if max_:
extrema = data == data.max()
else:
extrema = data == data.min()
return [attr if v else '' for v in extrema]
else:
if max_:
extrema = data == data.max().max()
else:
extrema = data == data.min().min()
return pd.DataFrame(np.where(extrema, attr, ''),
index=data.index, columns=data.columns) | Highlight the min or max in a Series or DataFrame. |
def _af_inv_scaled(x):
x = _transform_to_2d(x)
cov_matrix = np.atleast_2d(np.cov(x, rowvar=False))
cov_matrix_power = _mat_sqrt_inv(cov_matrix)
return x.dot(cov_matrix_power) | Scale a random vector for using the affinely invariant measures |
def _nose_tools_functions():
module = _BUILDER.string_build(
textwrap.dedent(
)
)
try:
case = next(module["a"].infer())
except astroid.InferenceError:
return
for method in case.methods():
if method.name.startswith("assert") and "_" not in method.name:
pep8_name = _pep8(method.name)
yield pep8_name, astroid.BoundMethod(method, case)
if method.name == "assertEqual":
yield "assert_equals", astroid.BoundMethod(method, case) | Get an iterator of names and bound methods. |
def expand_squeeze_to_nd(x, n, squeeze_dim=2, expand_dim=-1):
if len(x.shape) > n:
while len(x.shape) != n:
x = tf.squeeze(x, [squeeze_dim])
else:
while len(x.shape) != n:
x = tf.expand_dims(x, expand_dim)
return x | Make x n-d with squeeze and expand_dims. |
def reset(self):
if not self.request_list.conflict:
phase = _ResolvePhase(self.request_list.requirements, solver=self)
self.pr("resetting...")
self._init()
self._push_phase(phase) | Reset the solver, removing any current solve. |
def validate_search_input(self) -> bool:
"Check if input value is empty."
input = self._search_input
if input.value == str(): input.layout = Layout(border="solid 2px red", height='auto')
else: self._search_input.layout = Layout()
return input.value != str() | Check if input value is empty. |
def log_cloud_error(client, message, **kwargs):
try:
cloud_logger = getattr(log, kwargs.get('azurearm_log_level'))
except (AttributeError, TypeError):
cloud_logger = getattr(log, 'error')
cloud_logger(
'An AzureARM %s CloudError has occurred: %s',
client.capitalize(),
message
)
return | Log an azurearm cloud error exception |
def signature(self, block_size=None):
"Requests a signature for remote file via API."
kwargs = {}
if block_size:
kwargs['block_size'] = block_size
return self.api.get('path/sync/signature', self.path, **kwargs) | Requests a signature for remote file via API. |
def commitLine(self, line) :
if self.streamBuffer is None :
raise ValueError("Commit lines is only for when you are streaming to a file")
self.streamBuffer.append(line)
if len(self.streamBuffer) % self.writeRate == 0 :
for i in xrange(len(self.streamBuffer)) :
self.streamBuffer[i] = str(self.streamBuffer[i])
self.streamFile.write("%s\n" % ('\n'.join(self.streamBuffer)))
self.streamFile.flush()
self.streamBuffer = [] | Commits a line making it ready to be streamed to a file and saves the current buffer if needed. If no stream is active, raises a ValueError |
def _X509__asn1date_to_datetime(asn1date):
bio = Membio()
libcrypto.ASN1_TIME_print(bio.bio, asn1date)
pydate = datetime.strptime(str(bio), "%b %d %H:%M:%S %Y %Z")
return pydate.replace(tzinfo=utc) | Converts openssl ASN1_TIME object to python datetime.datetime |
def pp(i, base=1024):
degree = 0
pattern = "%4d %s"
while i > base:
pattern = "%7.2f %s"
i = i / float(base)
degree += 1
scales = ['B', 'KB', 'MB', 'GB', 'TB', 'EB']
return pattern % (i, scales[degree]) | Pretty-print the integer `i` as a human-readable size representation. |
def _generate_ngrams_with_context(self, tokens: List[Token]) -> chain:
chained_ngrams_iter = self._generate_ngrams_with_context_helper(iter(tokens), 1)
for n in range(2, self._ngrams + 1):
ngrams_iter = tee(tokens, n)
for j in range(1, n):
for k in range(j):
next(ngrams_iter[j], None)
ngrams_iter_with_context = self._generate_ngrams_with_context_helper(zip(*ngrams_iter), n)
chained_ngrams_iter = chain(chained_ngrams_iter, ngrams_iter_with_context)
return chained_ngrams_iter | Generates the 1-gram to n-grams tuples of the list of tokens |
def RawData(self):
result = collections.OrderedDict()
i = 0
while True:
try:
name, value, value_type = winreg.EnumValue(self._AccessRootKey(), i)
if value_type == winreg.REG_SZ:
precondition.AssertType(value, Text)
result[name] = value
except OSError:
break
i += 1
return result | Yields the valus in each section. |
def destroy(ctx, app, expire_hit, sandbox):
if expire_hit:
ctx.invoke(expire, app=app, sandbox=sandbox, exit=False)
HerokuApp(app).destroy() | Tear down an experiment server. |
def proj4_to_wkt(projection):
srs = osgeo.osr.SpatialReference()
srs.ImportFromProj4(projection.srs)
return srs.ExportToWkt() | Converts a pyproj.Proj object to a well-known text string |
def log_similarity_result(logfile, result):
assert result['task'] == 'similarity'
if not logfile:
return
with open(logfile, 'a') as f:
f.write('\t'.join([
str(result['global_step']),
result['task'],
result['dataset_name'],
json.dumps(result['dataset_kwargs']),
result['similarity_function'],
str(result['spearmanr']),
str(result['num_dropped']),
]))
f.write('\n') | Log a similarity evaluation result dictionary as TSV to logfile. |
def _get_utxos(self, address, services, **modes):
return get_unspent_outputs(
self.crypto, address, services=services,
**modes
) | Using the service fallback engine, get utxos from remote service. |
def create_roots(self, yam):
self.local_grammar = SchemaNode("grammar")
self.local_grammar.attr = {
"ns": yam.search_one("namespace").arg,
"nma:module": self.module.arg}
src_text = "YANG module '%s'" % yam.arg
revs = yam.search("revision")
if len(revs) > 0:
src_text += " revision %s" % self.current_revision(revs)
self.dc_element(self.local_grammar, "source", src_text)
start = SchemaNode("start", self.local_grammar)
self.data = SchemaNode("nma:data", start, interleave=True)
self.data.occur = 2
self.rpcs = SchemaNode("nma:rpcs", start, interleave=False)
self.notifications = SchemaNode("nma:notifications", start,
interleave=False) | Create the top-level structure for module `yam`. |
def check(ty, val):
"Checks that `val` adheres to type `ty`"
if isinstance(ty, basestring):
ty = Parser().parse(ty)
return ty.enforce(val) | Checks that `val` adheres to type `ty` |
def _update_plot(self, _):
for param in self.model.params:
param.value = self._sliders[param].val
for indep_var, dep_var in self._projections:
self._update_specific_plot(indep_var, dep_var) | Callback to redraw the plot to reflect the new parameter values. |
def _get_package_status(package):
status = package["status_str"] or "Unknown"
stage = package["stage_str"] or "Unknown"
if stage == "Fully Synchronised":
return status
return "%(status)s / %(stage)s" % {"status": status, "stage": stage} | Get the status for a package. |
def are_ilx(self, ilx_ids):
total_data = []
for ilx_id in ilx_ids:
ilx_id = ilx_id.replace('http', '').replace('.', '').replace('/', '')
data, success = self.get_data_from_ilx(ilx_id)
if success:
total_data.append(data['data'])
else:
total_data.append({})
return total_data | Checks list of objects to see if they are usable ILX IDs |
def xpatherror(self, file, line, no):
libxml2mod.xmlXPatherror(self._o, file, line, no) | Formats an error message. |
def _extract_variables(param):
variables = set()
if isinstance(param, list):
variables.update(*[_extract_variables(x) for x in param])
elif isinstance(param, dict):
variables.update(*[_extract_variables(x) for x in param.values()])
elif isinstance(param, str):
for match in re.finditer(TEMPLATE_REGEX, param):
if match.group('short_id') is not None:
variables.add(match.group('short_id'))
else:
variables.add(match.group('long_id'))
return variables | Find all template variables in args. |
def remove_item(self, item):
index = self.items.index(item)
self.beginRemoveRows(QtCore.QModelIndex(), index, index)
self.items.remove(item)
self.endRemoveRows() | Remove item from model |
def users(self, extra_params=None):
return self.api._get_json(
User,
space=self,
rel_path=self._build_rel_path('users'),
extra_params=extra_params,
) | All Users with access to this Space |
def _set_winning_team(self):
if not self._summary['finished']:
return
for team in self._summary['diplomacy']['teams']:
team['winner'] = False
for player_number in team['player_numbers']:
for player in self._summary['players']:
if player_number == player['number']:
if player['winner']:
team['winner'] = True | Mark the winning team. |
def closest_color(requested_color):
logging.disable(logging.DEBUG)
colors = []
for key, name in css3_hex_to_names.items():
diff = color_diff(hex_to_rgb(key), requested_color)
colors.append((diff, name))
logging.disable(logging.NOTSET)
min_diff, min_color = min(colors)
return min_color | Find closest color name for the request RGB tuple. |
def _shutdown_minions(self):
setproctitle('pyres_manager: Waiting on children to shutdown.')
for minion in self._workers.values():
minion.terminate()
minion.join() | send the SIGNINT signal to each worker in the pool. |
def _resolv_name(self, hostname):
ip = hostname
try:
ip = socket.gethostbyname(hostname)
except Exception as e:
logger.debug("{}: Cannot convert {} to IP address ({})".format(self.plugin_name, hostname, e))
return ip | Convert hostname to IP address. |
def _ratelimit(self, http_method, url, **kwargs):
def time_since_last_call():
if self.callsafety['lastcalltime'] is not None:
return int(time() - self.callsafety['lastcalltime'])
else:
return None
lastlimitremaining = self.callsafety['lastlimitremaining']
if time_since_last_call() is None or time_since_last_call() >= self.ratelimit_request_interval or \
lastlimitremaining >= self.ratelimit:
response = http_method(url, **kwargs)
else:
log.warning(
"Safety Limit Reached of %s remaining calls and time since last call is under %s seconds"
% (self.ratelimit, self.ratelimit_request_interval)
)
while time_since_last_call() < self.ratelimit_request_interval:
remaining_sleep = int(self.ratelimit_request_interval - time_since_last_call())
log.debug(" -> sleeping: %s more seconds" % remaining_sleep)
self.check_ratelimit_budget(1)
sleep(1)
response = http_method(url, **kwargs)
self.callsafety['lastcalltime'] = time()
self.callsafety['lastlimitremaining'] = int(response.headers.get('X-Rate-Limit-Remaining', 0))
return response | Ensure we do not hit the rate limit. |
def common_start(*args):
def _iter():
for s in zip(*args):
if len(set(s)) < len(args):
yield s[0]
else:
return
out = "".join(_iter()).strip()
result = [s for s in args if not s.startswith(out)]
result.insert(0, out)
return ', '.join(result) | returns the longest common substring from the beginning of sa and sb |
def lock(self):
self.password = None
self.keyfile = None
self.groups[:] = []
self.entries[:] = []
self._group_order[:] = []
self._entry_order[:] = []
self.root_group = v1Group()
self._num_groups = 1
self._num_entries = 0
return True | This method locks the database. |
def __parse_loc_data(loc_data, result):
result[DATA] = {ATTRIBUTION: ATTRIBUTION_INFO,
FORECAST: [],
PRECIPITATION_FORECAST: None}
for key, [value, func] in SENSOR_TYPES.items():
result[DATA][key] = None
try:
sens_data = loc_data[value]
if key == CONDITION:
desc = loc_data[__WEATHERDESCRIPTION]
result[DATA][CONDITION] = __cond_from_desc(desc)
result[DATA][CONDITION][IMAGE] = loc_data[__ICONURL]
continue
if key == STATIONNAME:
result[DATA][key] = __getStationName(loc_data[__STATIONNAME],
loc_data[__STATIONID])
continue
if func is not None:
result[DATA][key] = func(sens_data)
else:
result[DATA][key] = sens_data
except KeyError:
if result[MESSAGE] is None:
result[MESSAGE] = "Missing key(s) in br data: "
result[MESSAGE] += "%s " % value
log.warning("Data element with key='%s' "
"not loaded from br data!", key)
result[SUCCESS] = True
return result | Parse the json data from selected weatherstation. |
def create_gw_response(app, wsgi_env):
response = {}
buf = []
result = []
def start_response(status, headers, exc_info=None):
result[:] = [status, headers]
return buf.append
appr = app(wsgi_env, start_response)
close_func = getattr(appr, 'close', None)
try:
buf.extend(list(appr))
finally:
close_func and close_func()
response['body'] = ''.join(buf)
response['statusCode'] = result[0].split(' ', 1)[0]
response['headers'] = {}
for k, v in result[1]:
response['headers'][k] = v
if 'Content-Length' not in response['headers']:
response['headers']['Content-Length'] = str(len(response['body']))
if 'Content-Type' not in response['headers']:
response['headers']['Content-Type'] = 'text/plain'
return response | Create an api gw response from a wsgi app and environ. |
def create(cls, receiver_id, user_id=None):
event = cls(id=uuid.uuid4(), receiver_id=receiver_id, user_id=user_id)
event.payload = event.receiver.extract_payload()
return event | Create an event instance. |
async def _maybe_release_last_part(self) -> None:
if self._last_part is not None:
if not self._last_part.at_eof():
await self._last_part.release()
self._unread.extend(self._last_part._unread)
self._last_part = None | Ensures that the last read body part is read completely. |
def _get_hyperparameters(self):
hyperparameters = {}
for key in self._hyperparameters:
hyperparameters[key] = getattr(self, key)
return hyperparameters | Get internal optimization parameters. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.